{}
'.format(ftrack_message)} + ] self.show_interface(event, items, title) - self.log.error(message) + self.log.error('Fatal error during sync: {}'.format(message)) return - def _launch(self, event): - self.session.reset() - - args = self._translate_event( - self.session, event - ) - - self.launch( - self.session, *args - ) - return - - def _translate_event(self, session, event): - exceptions = [ - 'assetversion', 'job', 'user', 'reviewsessionobject', 'timer', - 'socialfeed', 'timelog' - ] - _selection = event['data'].get('entities', []) - - _entities = list() - for entity in _selection: - if entity['entityType'] in exceptions: - continue - _entities.append( - ( - session.get( - self._get_entity_type(entity), - entity.get('entityId') - ) - ) - ) - - return [_entities, event] - def register(session, **kw): '''Register plugin. Called when used as an plugin.''' diff --git a/pype/ftrack/events/event_test.py b/pype/ftrack/events/event_test.py index ecefc628f3..46e16cbb95 100644 --- a/pype/ftrack/events/event_test.py +++ b/pype/ftrack/events/event_test.py @@ -13,7 +13,7 @@ class Test_Event(BaseEvent): priority = 10000 - def launch(self, session, entities, event): + def launch(self, session, event): '''just a testing event''' diff --git a/pype/ftrack/events/event_thumbnail_updates.py b/pype/ftrack/events/event_thumbnail_updates.py index 62a194d167..50089e26b8 100644 --- a/pype/ftrack/events/event_thumbnail_updates.py +++ b/pype/ftrack/events/event_thumbnail_updates.py @@ -4,7 +4,7 @@ from pype.ftrack import BaseEvent class ThumbnailEvents(BaseEvent): - def launch(self, session, entities, event): + def launch(self, session, event): '''just a testing event''' # self.log.info(event) @@ -23,8 +23,12 @@ class ThumbnailEvents(BaseEvent): parent['name'], task['name'])) # Update task thumbnail from published version - if (entity['entityType'] == 'assetversion' and - entity['action'] == 'encoded'): + # if (entity['entityType'] == 'assetversion' and + # entity['action'] == 'encoded'): + if ( + entity['entityType'] == 'assetversion' + and 'thumbid' in entity['keys'] + ): version = session.get('AssetVersion', entity['entityId']) thumbnail = version.get('thumbnail') @@ -40,6 +44,7 @@ class ThumbnailEvents(BaseEvent): pass + def register(session, **kw): '''Register plugin. Called when used as an plugin.''' if not isinstance(session, ftrack_api.session.Session): diff --git a/pype/ftrack/events/event_version_to_task_statuses.py b/pype/ftrack/events/event_version_to_task_statuses.py index c5c1d9b664..d1393e622e 100644 --- a/pype/ftrack/events/event_version_to_task_statuses.py +++ b/pype/ftrack/events/event_version_to_task_statuses.py @@ -4,7 +4,7 @@ from pype.ftrack import BaseEvent class VersionToTaskStatus(BaseEvent): - def launch(self, session, entities, event): + def launch(self, session, event): '''Propagates status from version to task when changed''' session.commit() @@ -13,7 +13,7 @@ class VersionToTaskStatus(BaseEvent): # Filter non-assetversions if ( entity['entityType'] == 'assetversion' and - 'statusid' in entity['keys'] + 'statusid' in entity.get('keys', []) ): version = session.get('AssetVersion', entity['entityId']) @@ -45,10 +45,9 @@ class VersionToTaskStatus(BaseEvent): task_status = session.query(query).one() except Exception: self.log.info( - 'During update {}: Status {} was not found'.format( - entity['name'], status_to_set - ) - ) + '!!! status was not found in Ftrack [ {} ]'.format( + status_to_set + )) continue # Proceed if the task status was set diff --git a/pype/ftrack/ftrack_server/ftrack_server.py b/pype/ftrack/ftrack_server/ftrack_server.py index 831d33c4d7..14dd3d11f7 100644 --- a/pype/ftrack/ftrack_server/ftrack_server.py +++ b/pype/ftrack/ftrack_server/ftrack_server.py @@ -118,15 +118,18 @@ class FtrackServer(): if len(functions) < 1: raise Exception + function_counter = 0 for function in functions: try: function['register'](self.session) + if function_counter%7 == 0: + time.sleep(0.1) + function_counter += 1 except Exception as e: msg = '"{}" - register was not successful ({})'.format( function['name'], str(e) ) log.warning(msg) - time.sleep(0.05) def run_server(self): self.session = ftrack_api.Session(auto_connect_event_hub=True,) diff --git a/pype/ftrack/lib/avalon_sync.py b/pype/ftrack/lib/avalon_sync.py index 851b6f3ed6..030b0b5b6c 100644 --- a/pype/ftrack/lib/avalon_sync.py +++ b/pype/ftrack/lib/avalon_sync.py @@ -541,3 +541,26 @@ def get_config_data(): log.warning("{} - {}".format(msg, str(e))) return data + +def show_errors(obj, event, errors): + title = 'Hey You! You raised few Errors! (*look below*)' + items = [] + splitter = {'type': 'label', 'value': '---'} + for error in errors: + for key, message in error.items(): + error_title = { + 'type': 'label', + 'value': '# {}'.format(key) + } + error_message = { + 'type': 'label', + 'value': '{}
'.format(message) + } + if len(items) > 0: + items.append(splitter) + items.append(error_title) + items.append(error_message) + obj.log.error( + '{}: {}'.format(key, message) + ) + obj.show_interface(event, items, title) diff --git a/pype/ftrack/lib/ftrack_action_handler.py b/pype/ftrack/lib/ftrack_action_handler.py index 2249611a4b..c6d6181c1f 100644 --- a/pype/ftrack/lib/ftrack_action_handler.py +++ b/pype/ftrack/lib/ftrack_action_handler.py @@ -84,14 +84,20 @@ class BaseAction(BaseHandler): def _handle_result(self, session, result, entities, event): '''Validate the returned result from the action callback''' if isinstance(result, bool): - result = { - 'success': result, - 'message': ( - '{0} launched successfully.'.format( - self.label + if result is True: + result = { + 'success': result, + 'message': ( + '{0} launched successfully.'.format(self.label) ) - ) - } + } + else: + result = { + 'success': result, + 'message': ( + '{0} launch failed.'.format(self.label) + ) + } elif isinstance(result, dict): if 'items' in result: diff --git a/pype/ftrack/lib/ftrack_app_handler.py b/pype/ftrack/lib/ftrack_app_handler.py index 6d07a39f81..7498db5b62 100644 --- a/pype/ftrack/lib/ftrack_app_handler.py +++ b/pype/ftrack/lib/ftrack_app_handler.py @@ -148,6 +148,25 @@ class AppAction(BaseHandler): entity = entities[0] project_name = entity['project']['full_name'] + # Validate Clockify settings if Clockify is required + clockify_timer = os.environ.get('CLOCKIFY_WORKSPACE', None) + if clockify_timer is not None: + from pype.clockify import ClockifyAPI + clockapi = ClockifyAPI() + if clockapi.verify_api() is False: + title = 'Launch message' + header = '# You Can\'t launch **any Application**' + message = ( + 'You don\'t have set Clockify API' + ' key in Clockify settings
' + ) + items = [ + {'type': 'label', 'value': header}, + {'type': 'label', 'value': message} + ] + self.show_interface(event, items, title) + return False + database = pypelib.get_avalon_database() # Get current environments @@ -188,6 +207,7 @@ class AppAction(BaseHandler): "code": entity['project']['name']}, "task": entity['name'], "asset": entity['parent']['name'], + "app": application["application_dir"], "hierarchy": hierarchy} try: anatomy_filled = anatomy.format(data) @@ -306,6 +326,31 @@ class AppAction(BaseHandler): self.log.info('Starting timer for task: ' + task['name']) user.start_timer(task, force=True) + # RUN TIMER IN Clockify + if clockify_timer is not None: + task_type = task['type']['name'] + project_name = task['project']['full_name'] + + def get_parents(entity): + output = [] + if entity.entity_type.lower() == 'project': + return output + output.extend(get_parents(entity['parent'])) + output.append(entity['name']) + + return output + + desc_items = get_parents(task['parent']) + desc_items.append(task['name']) + description = '/'.join(desc_items) + + project_id = clockapi.get_project_id(project_name) + tag_ids = [] + tag_ids.append(clockapi.get_tag_id(task_type)) + clockapi.start_time_entry( + description, project_id, tag_ids=tag_ids + ) + # Change status of task to In progress config = get_config_data() diff --git a/pype/ftrack/lib/ftrack_base_handler.py b/pype/ftrack/lib/ftrack_base_handler.py index 6d56fcb010..3e08eb122d 100644 --- a/pype/ftrack/lib/ftrack_base_handler.py +++ b/pype/ftrack/lib/ftrack_base_handler.py @@ -5,8 +5,10 @@ from pype.vendor import ftrack_api class MissingPermision(Exception): - def __init__(self): - super().__init__('Missing permission') + def __init__(self, message=None): + if message is None: + message = 'Ftrack' + super().__init__(message) class BaseHandler(object): @@ -64,10 +66,14 @@ class BaseHandler(object): self.log.info(( '{} "{}" - Registered successfully ({:.4f}sec)' ).format(self.type, label, run_time)) - except MissingPermision: + except MissingPermision as MPE: self.log.info(( - '!{} "{}" - You\'re missing required permissions' - ).format(self.type, label)) + '!{} "{}" - You\'re missing required {} permissions' + ).format(self.type, label, str(MPE))) + except AssertionError as ae: + self.log.info(( + '!{} "{}" - {}' + ).format(self.type, label, str(ae))) except NotImplementedError: self.log.error(( '{} "{}" - Register method is not implemented' @@ -97,9 +103,12 @@ class BaseHandler(object): self.log.info(('{} "{}": Finished').format(self.type, label)) return result except Exception as e: - self.log.error('{} "{}": Failed ({})'.format( - self.type, label, str(e)) - ) + msg = '{} "{}": Failed ({})'.format(self.type, label, str(e)) + self.log.error(msg) + return { + 'success': False, + 'message': msg + } return wrapper_launch @property @@ -165,22 +174,31 @@ class BaseHandler(object): '''Return *event* translated structure to be used with the API.''' _entities = event['data'].get('entities_object', None) - if _entities is None: - selection = event['data'].get('selection', []) - _entities = [] - for entity in selection: - _entities.append( - self.session.get( - self._get_entity_type(entity), - entity.get('entityId') - ) - ) + if ( + _entities is None or + _entities[0].get('link', None) == ftrack_api.symbol.NOT_SET + ): + _entities = self._get_entities(event) return [ _entities, event ] + def _get_entities(self, event): + self.session._local_cache.clear() + selection = event['data'].get('selection', []) + _entities = [] + for entity in selection: + _entities.append( + self.session.get( + self._get_entity_type(entity), + entity.get('entityId') + ) + ) + event['data']['entities_object'] = _entities + return _entities + def _get_entity_type(self, entity): '''Return translated entity type tht can be used with API.''' # Get entity type and make sure it is lower cased. Most places except @@ -248,7 +266,10 @@ class BaseHandler(object): def _interface(self, *args): interface = self.interface(*args) if interface: - if 'items' in interface: + if ( + 'items' in interface or + ('success' in interface and 'message' in interface) + ): return interface return { @@ -273,23 +294,31 @@ class BaseHandler(object): def _handle_result(self, session, result, entities, event): '''Validate the returned result from the action callback''' if isinstance(result, bool): - result = { - 'success': result, - 'message': ( - '{0} launched successfully.'.format( - self.label + if result is True: + result = { + 'success': result, + 'message': ( + '{0} launched successfully.'.format(self.label) ) - ) - } + } + else: + result = { + 'success': result, + 'message': ( + '{0} launch failed.'.format(self.label) + ) + } elif isinstance(result, dict): - for key in ('success', 'message'): - if key in result: - continue + items = 'items' in result + if items is False: + for key in ('success', 'message'): + if key in result: + continue - raise KeyError( - 'Missing required key: {0}.'.format(key) - ) + raise KeyError( + 'Missing required key: {0}.'.format(key) + ) else: self.log.error( diff --git a/pype/ftrack/lib/ftrack_event_handler.py b/pype/ftrack/lib/ftrack_event_handler.py index 2cbc3782b8..c6c91e7428 100644 --- a/pype/ftrack/lib/ftrack_event_handler.py +++ b/pype/ftrack/lib/ftrack_event_handler.py @@ -1,3 +1,4 @@ +import functools from .ftrack_base_handler import BaseHandler @@ -18,6 +19,18 @@ class BaseEvent(BaseHandler): '''Expects a ftrack_api.Session instance''' super().__init__(session) + # Decorator + def launch_log(self, func): + @functools.wraps(func) + def wrapper_launch(*args, **kwargs): + try: + func(*args, **kwargs) + except Exception as e: + self.log.info('{} Failed ({})'.format( + self.__class__.__name__, str(e)) + ) + return wrapper_launch + def register(self): '''Registers the event, subscribing the discover and launch topics.''' self.session.event_hub.subscribe( @@ -27,23 +40,31 @@ class BaseEvent(BaseHandler): ) def _launch(self, event): - args = self._translate_event( - self.session, event - ) + self.session.rollback() + self.session._local_cache.clear() self.launch( - self.session, *args + self.session, event ) return def _translate_event(self, session, event): '''Return *event* translated structure to be used with the API.''' - _selection = event['data'].get('entities', []) + return [ + self._get_entities(session, event), + event + ] + def _get_entities( + self, session, event, ignore=['socialfeed', 'socialnotification'] + ): + _selection = event['data'].get('entities', []) _entities = list() + if isinstance(ignore, str): + ignore = list(ignore) for entity in _selection: - if entity['entityType'] in ['socialfeed']: + if entity['entityType'] in ignore: continue _entities.append( ( @@ -53,8 +74,4 @@ class BaseEvent(BaseHandler): ) ) ) - - return [ - _entities, - event - ] + return _entities diff --git a/pype/nuke/lib.py b/pype/nuke/lib.py index adc5f6b14a..f08e52857e 100644 --- a/pype/nuke/lib.py +++ b/pype/nuke/lib.py @@ -2,7 +2,7 @@ import sys from collections import OrderedDict from pprint import pprint from avalon.vendor.Qt import QtGui -from avalon import api, io +from avalon import api, io, lib import avalon.nuke import pype.api as pype import nuke @@ -37,10 +37,12 @@ def writes_version_sync(): for each in nuke.allNodes(): if each.Class() == 'Write': avalon_knob_data = get_avalon_knob_data(each) - if avalon_knob_data['families'] not in ["render"]: - log.info(avalon_knob_data['families']) - continue + try: + if avalon_knob_data['families'] not in ["render"]: + log.info(avalon_knob_data['families']) + continue + node_file = each['file'].value() log.info("node_file: {}".format(node_file)) @@ -90,6 +92,7 @@ def create_write_node(name, data): ) nuke_dataflow_writes = get_dataflow(**data) nuke_colorspace_writes = get_colorspace(**data) + application = lib.get_application(os.environ["AVALON_APP_NAME"]) try: anatomy_filled = format_anatomy({ "subset": data["avalon"]["subset"], @@ -99,6 +102,7 @@ def create_write_node(name, data): "project": {"name": pype.get_project_name(), "code": pype.get_project_code()}, "representation": nuke_dataflow_writes.file_type, + "app": application["application_dir"], }) except Exception as e: log.error("problem with resolving anatomy tepmlate: {}".format(e)) diff --git a/pype/plugins/global/load/open_djv.py b/pype/plugins/global/load/open_djv.py index 29f8e8ba08..bd49d86d5f 100644 --- a/pype/plugins/global/load/open_djv.py +++ b/pype/plugins/global/load/open_djv.py @@ -81,7 +81,7 @@ class OpenInDJV(api.Loader): '''layer name''' # cmd.append('-file_layer (value)') ''' Proxy scale: 1/2, 1/4, 1/8''' - cmd.append('-file_proxy 1/2') + # cmd.append('-file_proxy 1/2') ''' Cache: True, False.''' cmd.append('-file_cache True') ''' Start in full screen ''' diff --git a/pype/plugins/global/publish/collect_assumed_destination.py b/pype/plugins/global/publish/collect_assumed_destination.py index 058af12340..16a299d524 100644 --- a/pype/plugins/global/publish/collect_assumed_destination.py +++ b/pype/plugins/global/publish/collect_assumed_destination.py @@ -1,5 +1,5 @@ -import pyblish.api import os +import pyblish.api from avalon import io, api @@ -8,7 +8,7 @@ class CollectAssumedDestination(pyblish.api.InstancePlugin): """Generate the assumed destination path where the file will be stored""" label = "Collect Assumed Destination" - order = pyblish.api.CollectorOrder + 0.499 + order = pyblish.api.CollectorOrder + 0.498 exclude_families = ["clip"] def process(self, instance): @@ -76,6 +76,9 @@ class CollectAssumedDestination(pyblish.api.InstancePlugin): Returns: file path (str) """ + if [ef for ef in self.exclude_families + if instance.data["family"] in ef]: + return # get all the stuff from the database subset_name = instance.data["subset"] @@ -87,7 +90,7 @@ class CollectAssumedDestination(pyblish.api.InstancePlugin): projection={"config": True, "data": True}) template = project["config"]["template"]["publish"] - # anatomy = instance.context.data['anatomy'] + anatomy = instance.context.data['anatomy'] asset = io.find_one({"type": "asset", "name": asset_name, @@ -129,5 +132,10 @@ class CollectAssumedDestination(pyblish.api.InstancePlugin): "hierarchy": hierarchy, "representation": "TEMP"} - instance.data["assumedTemplateData"] = template_data instance.data["template"] = template + instance.data["assumedTemplateData"] = template_data + + # We take the parent folder of representation 'filepath' + instance.data["assumedDestination"] = os.path.dirname( + (anatomy.format(template_data)).publish.path + ) diff --git a/pype/plugins/global/publish/extract_jpeg.py b/pype/plugins/global/publish/extract_jpeg.py index a99e6bc787..7720c9d56d 100644 --- a/pype/plugins/global/publish/extract_jpeg.py +++ b/pype/plugins/global/publish/extract_jpeg.py @@ -16,9 +16,10 @@ class ExtractJpegEXR(pyblish.api.InstancePlugin): """ label = "Extract Jpeg EXR" + hosts = ["shell"] order = pyblish.api.ExtractorOrder families = ["imagesequence", "render", "write", "source"] - host = ["shell"] + def process(self, instance): start = instance.data.get("startFrame") diff --git a/pype/plugins/global/publish/extract_quicktime.py b/pype/plugins/global/publish/extract_quicktime.py index a226bf7e2a..621078e3c0 100644 --- a/pype/plugins/global/publish/extract_quicktime.py +++ b/pype/plugins/global/publish/extract_quicktime.py @@ -18,7 +18,7 @@ class ExtractQuicktimeEXR(pyblish.api.InstancePlugin): label = "Extract Quicktime EXR" order = pyblish.api.ExtractorOrder families = ["imagesequence", "render", "write", "source"] - host = ["shell"] + hosts = ["shell"] def process(self, instance): fps = instance.data.get("fps") diff --git a/pype/plugins/global/publish/integrate.py b/pype/plugins/global/publish/integrate.py index be7fc3bcf3..00096a95ee 100644 --- a/pype/plugins/global/publish/integrate.py +++ b/pype/plugins/global/publish/integrate.py @@ -5,6 +5,7 @@ import shutil import errno import pyblish.api from avalon import api, io +from avalon.vendor import filelink log = logging.getLogger(__name__) @@ -91,6 +92,13 @@ class IntegrateAsset(pyblish.api.InstancePlugin): self.log.debug("Establishing staging directory @ %s" % stagingdir) + # Ensure at least one file is set up for transfer in staging dir. + files = instance.data.get("files", []) + assert files, "Instance has no files to transfer" + assert isinstance(files, (list, tuple)), ( + "Instance 'files' must be a list, got: {0}".format(files) + ) + project = io.find_one({"type": "project"}) asset = io.find_one({"type": "asset", @@ -170,6 +178,8 @@ class IntegrateAsset(pyblish.api.InstancePlugin): # Each should be a single representation (as such, a single extension) representations = [] destination_list = [] + if 'transfers' not in instance.data: + instance.data['transfers'] = [] for files in instance.data["files"]: @@ -271,12 +281,22 @@ class IntegrateAsset(pyblish.api.InstancePlugin): instance: the instance to integrate """ - transfers = instance.data["transfers"] + transfers = instance.data.get("transfers", list()) for src, dest in transfers: self.log.info("Copying file .. {} -> {}".format(src, dest)) self.copy_file(src, dest) + # Produce hardlinked copies + # Note: hardlink can only be produced between two files on the same + # server/disk and editing one of the two will edit both files at once. + # As such it is recommended to only make hardlinks between static files + # to ensure publishes remain safe and non-edited. + hardlinks = instance.data.get("hardlinks", list()) + for src, dest in hardlinks: + self.log.info("Hardlinking file .. {} -> {}".format(src, dest)) + self.hardlink_file(src, dest) + def copy_file(self, src, dst): """ Copy given source to destination @@ -299,6 +319,20 @@ class IntegrateAsset(pyblish.api.InstancePlugin): shutil.copy(src, dst) + def hardlink_file(self, src, dst): + + dirname = os.path.dirname(dst) + try: + os.makedirs(dirname) + except OSError as e: + if e.errno == errno.EEXIST: + pass + else: + self.log.critical("An unexpected error occurred.") + raise + + filelink.create(src, dst, filelink.HARDLINK) + def get_subset(self, asset, instance): subset = io.find_one({"type": "subset", @@ -362,7 +396,7 @@ class IntegrateAsset(pyblish.api.InstancePlugin): families.append(instance_family) families += current_families - self.log.debug("Registered roor: {}".format(api.registered_root())) + self.log.debug("Registered root: {}".format(api.registered_root())) # create relative source path for DB try: source = instance.data['source'] @@ -382,7 +416,9 @@ class IntegrateAsset(pyblish.api.InstancePlugin): "fps": context.data.get("fps")} # Include optional data if present in - optionals = ["startFrame", "endFrame", "step", "handles"] + optionals = [ + "startFrame", "endFrame", "step", "handles", "sourceHashes" + ] for key in optionals: if key in instance.data: version_data[key] = instance.data[key] diff --git a/pype/plugins/global/publish/integrate_rendered_frames.py b/pype/plugins/global/publish/integrate_rendered_frames.py index ae11d33348..8e7e2a59c4 100644 --- a/pype/plugins/global/publish/integrate_rendered_frames.py +++ b/pype/plugins/global/publish/integrate_rendered_frames.py @@ -168,6 +168,9 @@ class IntegrateFrames(pyblish.api.InstancePlugin): representations = [] destination_list = [] + if 'transfers' not in instance.data: + instance.data['transfers'] = [] + for files in instance.data["files"]: # Collection # _______ @@ -240,7 +243,7 @@ class IntegrateFrames(pyblish.api.InstancePlugin): instance.data["transfers"].append([src, dst]) - template_data["frame"] = "#####" + template_data["frame"] = "#" * anatomy.render.padding anatomy_filled = anatomy.format(template_data) path_to_save = anatomy_filled.render.path template = anatomy.render.fullpath diff --git a/pype/plugins/launcher/actions/ClockifyStart.py b/pype/plugins/launcher/actions/ClockifyStart.py new file mode 100644 index 0000000000..78a8b4e1b6 --- /dev/null +++ b/pype/plugins/launcher/actions/ClockifyStart.py @@ -0,0 +1,49 @@ +from avalon import api, io +from pype.api import Logger +try: + from pype.clockify import ClockifyAPI +except Exception: + pass + +log = Logger.getLogger(__name__, "clockify_start") + + +class ClockifyStart(api.Action): + + name = "clockify_start_timer" + label = "Clockify - Start Timer" + icon = "clockify_icon" + order = 500 + + exec("try: clockapi = ClockifyAPI()\nexcept: clockapi = None") + + def is_compatible(self, session): + """Return whether the action is compatible with the session""" + if self.clockapi is None: + return False + if "AVALON_TASK" in session: + return True + return False + + def process(self, session, **kwargs): + project_name = session['AVALON_PROJECT'] + asset_name = session['AVALON_ASSET'] + task_name = session['AVALON_TASK'] + + description = asset_name + asset = io.find_one({ + 'type': 'asset', + 'name': asset_name + }) + if asset is not None: + desc_items = asset.get('data', {}).get('parents', []) + desc_items.append(asset_name) + desc_items.append(task_name) + description = '/'.join(desc_items) + + project_id = self.clockapi.get_project_id(project_name) + tag_ids = [] + tag_ids.append(self.clockapi.get_tag_id(task_name)) + self.clockapi.start_time_entry( + description, project_id, tag_ids=tag_ids + ) diff --git a/pype/plugins/launcher/actions/ClockifySync.py b/pype/plugins/launcher/actions/ClockifySync.py new file mode 100644 index 0000000000..c50fbc4b25 --- /dev/null +++ b/pype/plugins/launcher/actions/ClockifySync.py @@ -0,0 +1,65 @@ +from avalon import api, io +try: + from pype.clockify import ClockifyAPI +except Exception: + pass +from pype.api import Logger +log = Logger.getLogger(__name__, "clockify_sync") + + +class ClockifySync(api.Action): + + name = "sync_to_clockify" + label = "Sync to Clockify" + icon = "clockify_white_icon" + order = 500 + exec( + "try:\n\tclockapi = ClockifyAPI()" + "\n\thave_permissions = clockapi.validate_workspace_perm()" + "\nexcept:\n\tclockapi = None" + ) + + def is_compatible(self, session): + """Return whether the action is compatible with the session""" + if self.clockapi is None: + return False + return self.have_permissions + + def process(self, session, **kwargs): + project_name = session.get('AVALON_PROJECT', None) + + projects_to_sync = [] + if project_name.strip() == '' or project_name is None: + for project in io.projects(): + projects_to_sync.append(project) + else: + project = io.find_one({'type': 'project'}) + projects_to_sync.append(project) + + projects_info = {} + for project in projects_to_sync: + task_types = [task['name'] for task in project['config']['tasks']] + projects_info[project['name']] = task_types + + clockify_projects = self.clockapi.get_projects() + for project_name, task_types in projects_info.items(): + if project_name not in clockify_projects: + response = self.clockapi.add_project(project_name) + if 'id' not in response: + self.log.error('Project {} can\'t be created'.format( + project_name + )) + continue + project_id = response['id'] + else: + project_id = clockify_projects[project_name] + + clockify_workspace_tags = self.clockapi.get_tags() + for task_type in task_types: + if task_type not in clockify_workspace_tags: + response = self.clockapi.add_tag(task_type) + if 'id' not in response: + self.log.error('Task {} can\'t be created'.format( + task_type + )) + continue diff --git a/pype/plugins/maya/create/create_look.py b/pype/plugins/maya/create/create_look.py index 32cda3a28e..299fbafe02 100644 --- a/pype/plugins/maya/create/create_look.py +++ b/pype/plugins/maya/create/create_look.py @@ -15,3 +15,6 @@ class CreateLook(avalon.maya.Creator): super(CreateLook, self).__init__(*args, **kwargs) self.data["renderlayer"] = lib.get_current_renderlayer() + + # Whether to automatically convert the textures to .tx upon publish. + self.data["maketx"] = True diff --git a/pype/plugins/maya/load/load_alembic.py b/pype/plugins/maya/load/load_alembic.py index 9e08702521..9fd4aa2108 100644 --- a/pype/plugins/maya/load/load_alembic.py +++ b/pype/plugins/maya/load/load_alembic.py @@ -1,4 +1,6 @@ import pype.maya.plugin +import os +import json class AbcLoader(pype.maya.plugin.ReferenceLoader): @@ -16,6 +18,12 @@ class AbcLoader(pype.maya.plugin.ReferenceLoader): import maya.cmds as cmds + try: + family = context["representation"]["context"]["family"] + except ValueError: + family = "animation" + + groupName = "{}:{}".format(namespace, name) cmds.loadPlugin("AbcImport.mll", quiet=True) nodes = cmds.file(self.fname, namespace=namespace, @@ -25,6 +33,23 @@ class AbcLoader(pype.maya.plugin.ReferenceLoader): reference=True, returnNewNodes=True) + cmds.makeIdentity(groupName, apply=False, rotate=True, + translate=True, scale=True) + + preset_file = os.path.join( + os.environ.get('PYPE_STUDIO_TEMPLATES'), + 'presets', 'tools', + 'family_colors.json' + ) + with open(preset_file, 'r') as cfile: + colors = json.load(cfile) + + c = colors.get(family) + if c is not None: + cmds.setAttr(groupName + ".useOutlinerColor", 1) + cmds.setAttr(groupName + ".outlinerColor", + c[0], c[1], c[2]) + self[:] = nodes return nodes diff --git a/pype/plugins/maya/load/load_ass.py b/pype/plugins/maya/load/load_ass.py index 13ad85473c..c268ce70c5 100644 --- a/pype/plugins/maya/load/load_ass.py +++ b/pype/plugins/maya/load/load_ass.py @@ -2,6 +2,7 @@ from avalon import api import pype.maya.plugin import os import pymel.core as pm +import json class AssProxyLoader(pype.maya.plugin.ReferenceLoader): @@ -21,6 +22,11 @@ class AssProxyLoader(pype.maya.plugin.ReferenceLoader): from avalon import maya import pymel.core as pm + try: + family = context["representation"]["context"]["family"] + except ValueError: + family = "ass" + with maya.maintained_selection(): groupName = "{}:{}".format(namespace, name) @@ -34,7 +40,8 @@ class AssProxyLoader(pype.maya.plugin.ReferenceLoader): groupReference=True, groupName=groupName) - cmds.makeIdentity(groupName, apply=False, rotate=True, translate=True, scale=True) + cmds.makeIdentity(groupName, apply=False, rotate=True, + translate=True, scale=True) # Set attributes proxyShape = pm.ls(nodes, type="mesh")[0] @@ -43,6 +50,19 @@ class AssProxyLoader(pype.maya.plugin.ReferenceLoader): proxyShape.dso.set(path) proxyShape.aiOverrideShaders.set(0) + preset_file = os.path.join( + os.environ.get('PYPE_STUDIO_TEMPLATES'), + 'presets', 'tools', + 'family_colors.json' + ) + with open(preset_file, 'r') as cfile: + colors = json.load(cfile) + + c = colors.get(family) + if c is not None: + cmds.setAttr(groupName + ".useOutlinerColor", 1) + cmds.setAttr(groupName + ".outlinerColor", + c[0], c[1], c[2]) self[:] = nodes @@ -132,7 +152,6 @@ class AssStandinLoader(api.Loader): import mtoa.ui.arnoldmenu import pymel.core as pm - asset = context['asset']['name'] namespace = namespace or lib.unique_namespace( asset + "_", @@ -146,6 +165,20 @@ class AssStandinLoader(api.Loader): label = "{}:{}".format(namespace, name) root = pm.group(name=label, empty=True) + preset_file = os.path.join( + os.environ.get('PYPE_STUDIO_TEMPLATES'), + 'presets', 'tools', + 'family_colors.json' + ) + with open(preset_file, 'r') as cfile: + colors = json.load(cfile) + + c = colors.get('ass') + if c is not None: + cmds.setAttr(root + ".useOutlinerColor", 1) + cmds.setAttr(root + ".outlinerColor", + c[0], c[1], c[2]) + # Create transform with shape transform_name = label + "_ASS" # transform = pm.createNode("transform", name=transform_name, diff --git a/pype/plugins/maya/load/load_camera.py b/pype/plugins/maya/load/load_camera.py index eb75c3a63d..989e80e979 100644 --- a/pype/plugins/maya/load/load_camera.py +++ b/pype/plugins/maya/load/load_camera.py @@ -1,4 +1,6 @@ import pype.maya.plugin +import os +import json class CameraLoader(pype.maya.plugin.ReferenceLoader): @@ -16,7 +18,13 @@ class CameraLoader(pype.maya.plugin.ReferenceLoader): import maya.cmds as cmds # Get family type from the context + try: + family = context["representation"]["context"]["family"] + except ValueError: + family = "camera" + cmds.loadPlugin("AbcImport.mll", quiet=True) + groupName = "{}:{}".format(namespace, name) nodes = cmds.file(self.fname, namespace=namespace, sharedReferenceFile=False, @@ -27,6 +35,20 @@ class CameraLoader(pype.maya.plugin.ReferenceLoader): cameras = cmds.ls(nodes, type="camera") + preset_file = os.path.join( + os.environ.get('PYPE_STUDIO_TEMPLATES'), + 'presets', 'tools', + 'family_colors.json' + ) + with open(preset_file, 'r') as cfile: + colors = json.load(cfile) + + c = colors.get(family) + if c is not None: + cmds.setAttr(groupName + ".useOutlinerColor", 1) + cmds.setAttr(groupName + ".outlinerColor", + c[0], c[1], c[2]) + # Check the Maya version, lockTransform has been introduced since # Maya 2016.5 Ext 2 version = int(cmds.about(version=True)) diff --git a/pype/plugins/maya/load/load_fbx.py b/pype/plugins/maya/load/load_fbx.py index 2ee3e5fdbd..b580257334 100644 --- a/pype/plugins/maya/load/load_fbx.py +++ b/pype/plugins/maya/load/load_fbx.py @@ -1,4 +1,6 @@ import pype.maya.plugin +import os +import json class FBXLoader(pype.maya.plugin.ReferenceLoader): @@ -17,6 +19,11 @@ class FBXLoader(pype.maya.plugin.ReferenceLoader): import maya.cmds as cmds from avalon import maya + try: + family = context["representation"]["context"]["family"] + except ValueError: + family = "fbx" + # Ensure FBX plug-in is loaded cmds.loadPlugin("fbxmaya", quiet=True) @@ -28,6 +35,21 @@ class FBXLoader(pype.maya.plugin.ReferenceLoader): groupReference=True, groupName="{}:{}".format(namespace, name)) + groupName = "{}:{}".format(namespace, name) + preset_file = os.path.join( + os.environ.get('PYPE_STUDIO_TEMPLATES'), + 'presets', 'tools', + 'family_colors.json' + ) + with open(preset_file, 'r') as cfile: + colors = json.load(cfile) + + c = colors.get(family) + if c is not None: + cmds.setAttr(groupName + ".useOutlinerColor", 1) + cmds.setAttr(groupName + ".outlinerColor", + c[0], c[1], c[2]) + self[:] = nodes return nodes diff --git a/pype/plugins/maya/load/load_mayaascii.py b/pype/plugins/maya/load/load_mayaascii.py index 6f4c6a63a0..549d1dff4c 100644 --- a/pype/plugins/maya/load/load_mayaascii.py +++ b/pype/plugins/maya/load/load_mayaascii.py @@ -1,4 +1,6 @@ import pype.maya.plugin +import json +import os class MayaAsciiLoader(pype.maya.plugin.ReferenceLoader): @@ -19,6 +21,11 @@ class MayaAsciiLoader(pype.maya.plugin.ReferenceLoader): import maya.cmds as cmds from avalon import maya + try: + family = context["representation"]["context"]["family"] + except ValueError: + family = "model" + with maya.maintained_selection(): nodes = cmds.file(self.fname, namespace=namespace, @@ -28,6 +35,20 @@ class MayaAsciiLoader(pype.maya.plugin.ReferenceLoader): groupName="{}:{}".format(namespace, name)) self[:] = nodes + groupName = "{}:{}".format(namespace, name) + preset_file = os.path.join( + os.environ.get('PYPE_STUDIO_TEMPLATES'), + 'presets', 'tools', + 'family_colors.json' + ) + with open(preset_file, 'r') as cfile: + colors = json.load(cfile) + + c = colors.get(family) + if c is not None: + cmds.setAttr(groupName + ".useOutlinerColor", 1) + cmds.setAttr(groupName + ".outlinerColor", + c[0], c[1], c[2]) return nodes diff --git a/pype/plugins/maya/load/load_model.py b/pype/plugins/maya/load/load_model.py index f29af65b72..16f3556de7 100644 --- a/pype/plugins/maya/load/load_model.py +++ b/pype/plugins/maya/load/load_model.py @@ -1,5 +1,7 @@ from avalon import api import pype.maya.plugin +import json +import os class ModelLoader(pype.maya.plugin.ReferenceLoader): @@ -19,13 +21,36 @@ class ModelLoader(pype.maya.plugin.ReferenceLoader): import maya.cmds as cmds from avalon import maya + try: + family = context["representation"]["context"]["family"] + except ValueError: + family = "model" + preset_file = os.path.join( + os.environ.get('PYPE_STUDIO_TEMPLATES'), + 'presets', 'tools', + 'family_colors.json' + ) + with open(preset_file, 'r') as cfile: + colors = json.load(cfile) + with maya.maintained_selection(): + + groupName = "{}:{}".format(namespace, name) nodes = cmds.file(self.fname, namespace=namespace, reference=True, returnNewNodes=True, groupReference=True, - groupName="{}:{}".format(namespace, name)) + groupName=groupName) + + cmds.makeIdentity(groupName, apply=False, rotate=True, + translate=True, scale=True) + + c = colors.get(family) + if c is not None: + cmds.setAttr(groupName + ".useOutlinerColor", 1) + cmds.setAttr(groupName + ".outlinerColor", + c[0], c[1], c[2]) self[:] = nodes @@ -64,6 +89,19 @@ class GpuCacheLoader(api.Loader): # Root group label = "{}:{}".format(namespace, name) root = cmds.group(name=label, empty=True) + preset_file = os.path.join( + os.environ.get('PYPE_STUDIO_TEMPLATES'), + 'presets', 'tools', + 'family_colors.json' + ) + with open(preset_file, 'r') as cfile: + colors = json.load(cfile) + + c = colors.get('model') + if c is not None: + cmds.setAttr(root + ".useOutlinerColor", 1) + cmds.setAttr(root + ".outlinerColor", + c[0], c[1], c[2]) # Create transform with shape transform_name = label + "_GPU" @@ -125,6 +163,7 @@ class GpuCacheLoader(api.Loader): except RuntimeError: pass + class AbcModelLoader(pype.maya.plugin.ReferenceLoader): """Specific loader of Alembic for the studio.animation family""" @@ -141,15 +180,36 @@ class AbcModelLoader(pype.maya.plugin.ReferenceLoader): import maya.cmds as cmds + groupName = "{}:{}".format(namespace, name) cmds.loadPlugin("AbcImport.mll", quiet=True) nodes = cmds.file(self.fname, namespace=namespace, sharedReferenceFile=False, groupReference=True, - groupName="{}:{}".format(namespace, name), + groupName=groupName, reference=True, returnNewNodes=True) + namespace = cmds.referenceQuery(nodes[0], namespace=True) + groupName = "{}:{}".format(namespace, name) + + cmds.makeIdentity(groupName, apply=False, rotate=True, + translate=True, scale=True) + + preset_file = os.path.join( + os.environ.get('PYPE_STUDIO_TEMPLATES'), + 'presets', 'tools', + 'family_colors.json' + ) + with open(preset_file, 'r') as cfile: + colors = json.load(cfile) + + c = colors.get('model') + if c is not None: + cmds.setAttr(groupName + ".useOutlinerColor", 1) + cmds.setAttr(groupName + ".outlinerColor", + c[0], c[1], c[2]) + self[:] = nodes return nodes diff --git a/pype/plugins/maya/load/load_rig.py b/pype/plugins/maya/load/load_rig.py index aa40ca3cc2..1dcff45bb9 100644 --- a/pype/plugins/maya/load/load_rig.py +++ b/pype/plugins/maya/load/load_rig.py @@ -2,6 +2,8 @@ from maya import cmds import pype.maya.plugin from avalon import api, maya +import os +import json class RigLoader(pype.maya.plugin.ReferenceLoader): @@ -21,12 +23,35 @@ class RigLoader(pype.maya.plugin.ReferenceLoader): def process_reference(self, context, name, namespace, data): + try: + family = context["representation"]["context"]["family"] + except ValueError: + family = "rig" + + groupName = "{}:{}".format(namespace, name) nodes = cmds.file(self.fname, namespace=namespace, reference=True, returnNewNodes=True, groupReference=True, - groupName="{}:{}".format(namespace, name)) + groupName=groupName) + + cmds.makeIdentity(groupName, apply=False, rotate=True, + translate=True, scale=True) + + preset_file = os.path.join( + os.environ.get('PYPE_STUDIO_TEMPLATES'), + 'presets', 'tools', + 'family_colors.json' + ) + with open(preset_file, 'r') as cfile: + colors = json.load(cfile) + + c = colors.get(family) + if c is not None: + cmds.setAttr(groupName + ".useOutlinerColor", 1) + cmds.setAttr(groupName + ".outlinerColor", + c[0], c[1], c[2]) # Store for post-process self[:] = nodes diff --git a/pype/plugins/maya/load/load_vdb_to_redshift.py b/pype/plugins/maya/load/load_vdb_to_redshift.py index 8ff8bc0326..169c3bf34a 100644 --- a/pype/plugins/maya/load/load_vdb_to_redshift.py +++ b/pype/plugins/maya/load/load_vdb_to_redshift.py @@ -1,4 +1,6 @@ from avalon import api +import os +import json class LoadVDBtoRedShift(api.Loader): @@ -17,6 +19,11 @@ class LoadVDBtoRedShift(api.Loader): import avalon.maya.lib as lib from avalon.maya.pipeline import containerise + try: + family = context["representation"]["context"]["family"] + except ValueError: + family = "vdbcache" + # Check if the plugin for redshift is available on the pc try: cmds.loadPlugin("redshift4maya", quiet=True) @@ -48,6 +55,19 @@ class LoadVDBtoRedShift(api.Loader): # Root group label = "{}:{}".format(namespace, name) root = cmds.group(name=label, empty=True) + preset_file = os.path.join( + os.environ.get('PYPE_STUDIO_TEMPLATES'), + 'presets', 'tools', + 'family_colors.json' + ) + with open(preset_file, 'r') as cfile: + colors = json.load(cfile) + + c = colors.get(family) + if c is not None: + cmds.setAttr(root + ".useOutlinerColor", 1) + cmds.setAttr(root + ".outlinerColor", + c[0], c[1], c[2]) # Create VR volume_node = cmds.createNode("RedshiftVolumeShape", diff --git a/pype/plugins/maya/load/load_vdb_to_vray.py b/pype/plugins/maya/load/load_vdb_to_vray.py index ac20b0eb43..58d6d1b56e 100644 --- a/pype/plugins/maya/load/load_vdb_to_vray.py +++ b/pype/plugins/maya/load/load_vdb_to_vray.py @@ -1,4 +1,6 @@ from avalon import api +import json +import os class LoadVDBtoVRay(api.Loader): @@ -16,6 +18,11 @@ class LoadVDBtoVRay(api.Loader): import avalon.maya.lib as lib from avalon.maya.pipeline import containerise + try: + family = context["representation"]["context"]["family"] + except ValueError: + family = "vdbcache" + # Check if viewport drawing engine is Open GL Core (compat) render_engine = None compatible = "OpenGLCoreProfileCompat" @@ -40,6 +47,19 @@ class LoadVDBtoVRay(api.Loader): # Root group label = "{}:{}".format(namespace, name) root = cmds.group(name=label, empty=True) + preset_file = os.path.join( + os.environ.get('PYPE_STUDIO_TEMPLATES'), + 'presets', 'tools', + 'family_colors.json' + ) + with open(preset_file, 'r') as cfile: + colors = json.load(cfile) + + c = colors.get(family) + if c is not None: + cmds.setAttr(root + ".useOutlinerColor", 1) + cmds.setAttr(root + ".outlinerColor", + c[0], c[1], c[2]) # Create VR grid_node = cmds.createNode("VRayVolumeGrid", diff --git a/pype/plugins/maya/load/load_vrayproxy.py b/pype/plugins/maya/load/load_vrayproxy.py index 9396e124ce..a3a114440a 100644 --- a/pype/plugins/maya/load/load_vrayproxy.py +++ b/pype/plugins/maya/load/load_vrayproxy.py @@ -1,6 +1,7 @@ from avalon.maya import lib from avalon import api - +import json +import os import maya.cmds as cmds @@ -20,6 +21,19 @@ class VRayProxyLoader(api.Loader): from avalon.maya.pipeline import containerise from pype.maya.lib import namespaced + try: + family = context["representation"]["context"]["family"] + except ValueError: + family = "vrayproxy" + + preset_file = os.path.join( + os.environ.get('PYPE_STUDIO_TEMPLATES'), + 'presets', 'tools', + 'family_colors.json' + ) + with open(preset_file, 'r') as cfile: + colors = json.load(cfile) + asset_name = context['asset']["name"] namespace = namespace or lib.unique_namespace( asset_name + "_", @@ -40,6 +54,12 @@ class VRayProxyLoader(api.Loader): if not nodes: return + c = colors.get(family) + if c is not None: + cmds.setAttr("{0}_{1}.useOutlinerColor".format(name, "GRP"), 1) + cmds.setAttr("{0}_{1}.outlinerColor".format(name, "GRP"), + c[0], c[1], c[2]) + return containerise( name=name, namespace=namespace, @@ -101,7 +121,7 @@ class VRayProxyLoader(api.Loader): # Create nodes vray_mesh = cmds.createNode('VRayMesh', name="{}_VRMS".format(name)) mesh_shape = cmds.createNode("mesh", name="{}_GEOShape".format(name)) - vray_mat = cmds.shadingNode("VRayMeshMaterial", asShader=True, + vray_mat = cmds.shadingNode("VRayMeshMaterial", asShader=True, name="{}_VRMM".format(name)) vray_mat_sg = cmds.sets(name="{}_VRSG".format(name), empty=True, diff --git a/pype/plugins/maya/load/load_yeti_cache.py b/pype/plugins/maya/load/load_yeti_cache.py index 2160924047..b19bed1393 100644 --- a/pype/plugins/maya/load/load_yeti_cache.py +++ b/pype/plugins/maya/load/load_yeti_cache.py @@ -23,6 +23,11 @@ class YetiCacheLoader(api.Loader): def load(self, context, name=None, namespace=None, data=None): + try: + family = context["representation"]["context"]["family"] + except ValueError: + family = "yeticache" + # Build namespace asset = context["asset"] if namespace is None: @@ -49,6 +54,19 @@ class YetiCacheLoader(api.Loader): group_name = "{}:{}".format(namespace, name) group_node = cmds.group(nodes, name=group_name) + preset_file = os.path.join( + os.environ.get('PYPE_STUDIO_TEMPLATES'), + 'presets', 'tools', + 'family_colors.json' + ) + with open(preset_file, 'r') as cfile: + colors = json.load(cfile) + + c = colors.get(family) + if c is not None: + cmds.setAttr(group_name + ".useOutlinerColor", 1) + cmds.setAttr(group_name + ".outlinerColor", + c[0], c[1], c[2]) nodes.append(group_node) diff --git a/pype/plugins/maya/load/load_yeti_rig.py b/pype/plugins/maya/load/load_yeti_rig.py index 096b936b41..c821c6ca02 100644 --- a/pype/plugins/maya/load/load_yeti_rig.py +++ b/pype/plugins/maya/load/load_yeti_rig.py @@ -1,4 +1,6 @@ import pype.maya.plugin +import os +import json class YetiRigLoader(pype.maya.plugin.ReferenceLoader): @@ -24,6 +26,20 @@ class YetiRigLoader(pype.maya.plugin.ReferenceLoader): groupReference=True, groupName="{}:{}".format(namespace, name)) + groupName = "{}:{}".format(namespace, name) + preset_file = os.path.join( + os.environ.get('PYPE_STUDIO_TEMPLATES'), + 'presets', 'tools', + 'family_colors.json' + ) + with open(preset_file, 'r') as cfile: + colors = json.load(cfile) + + c = colors.get('yetiRig') + if c is not None: + cmds.setAttr(groupName + ".useOutlinerColor", 1) + cmds.setAttr(groupName + ".outlinerColor", + c[0], c[1], c[2]) self[:] = nodes self.log.info("Yeti Rig Connection Manager will be available soon") diff --git a/pype/plugins/maya/publish/collect_look.py b/pype/plugins/maya/publish/collect_look.py index 9c7b17acaa..dfefa15fe5 100644 --- a/pype/plugins/maya/publish/collect_look.py +++ b/pype/plugins/maya/publish/collect_look.py @@ -47,6 +47,8 @@ def get_look_attrs(node): for attr in attrs: if attr in SHAPE_ATTRS: result.append(attr) + elif attr.startswith('ai'): + result.append(attr) return result @@ -218,6 +220,7 @@ class CollectLook(pyblish.api.InstancePlugin): # make ftrack publishable instance.data["families"] = ['ftrack'] + instance.data['maketx'] = True def collect(self, instance): @@ -386,6 +389,8 @@ class CollectLook(pyblish.api.InstancePlugin): # Collect changes to "custom" attributes node_attrs = get_look_attrs(node) + self.log.info(node_attrs) + # Only include if there are any properties we care about if not node_attrs: continue diff --git a/pype/plugins/maya/publish/extract_look.py b/pype/plugins/maya/publish/extract_look.py index a30b1fe7d5..f6fdda8593 100644 --- a/pype/plugins/maya/publish/extract_look.py +++ b/pype/plugins/maya/publish/extract_look.py @@ -2,16 +2,97 @@ import os import json import tempfile import contextlib +import subprocess from collections import OrderedDict from maya import cmds import pyblish.api import avalon.maya +from avalon import io import pype.api import pype.maya.lib as lib +# Modes for transfer +COPY = 1 +HARDLINK = 2 + + +def source_hash(filepath, *args): + """Generate simple identifier for a source file. + This is used to identify whether a source file has previously been + processe into the pipeline, e.g. a texture. + The hash is based on source filepath, modification time and file size. + This is only used to identify whether a specific source file was already + published before from the same location with the same modification date. + We opt to do it this way as opposed to Avalanch C4 hash as this is much + faster and predictable enough for all our production use cases. + Args: + filepath (str): The source file path. + You can specify additional arguments in the function + to allow for specific 'processing' values to be included. + """ + # We replace dots with comma because . cannot be a key in a pymongo dict. + file_name = os.path.basename(filepath) + time = str(os.path.getmtime(filepath)) + size = str(os.path.getsize(filepath)) + return "|".join([ + file_name, + time, + size + ] + list(args)).replace(".", ",") + + +def find_paths_by_hash(texture_hash): + # Find the texture hash key in the dictionary and all paths that + # originate from it. + key = "data.sourceHashes.{0}".format(texture_hash) + return io.distinct(key, {"type": "version"}) + + +def maketx(source, destination, *args): + """Make .tx using maketx with some default settings. + The settings are based on default as used in Arnold's + txManager in the scene. + This function requires the `maketx` executable to be + on the `PATH`. + Args: + source (str): Path to source file. + destination (str): Writing destination path. + """ + + cmd = [ + "maketx", + "-v", # verbose + "-u", # update mode + # unpremultiply before conversion (recommended when alpha present) + "--unpremult", + # use oiio-optimized settings for tile-size, planarconfig, metadata + "--oiio" + ] + cmd.extend(args) + cmd.extend([ + "-o", destination, + source + ]) + + CREATE_NO_WINDOW = 0x08000000 + try: + out = subprocess.check_output( + cmd, + stderr=subprocess.STDOUT, + creationflags=CREATE_NO_WINDOW + ) + except subprocess.CalledProcessError as exc: + print exc + print out + import traceback + traceback.print_exc() + raise + + return out + @contextlib.contextmanager def no_workspace_dir(): @@ -79,12 +160,53 @@ class ExtractLook(pype.api.Extractor): relationships = lookdata["relationships"] sets = relationships.keys() + # Extract the textures to transfer, possibly convert with maketx and + # remap the node paths to the destination path. Note that a source + # might be included more than once amongst the resources as they could + # be the input file to multiple nodes. resources = instance.data["resources"] + do_maketx = instance.data.get("maketx", False) + # Collect all unique files used in the resources + files = set() + for resource in resources: + files.update(os.path.normpath(f) for f in resource["files"]) + + # Process the resource files + transfers = list() + hardlinks = list() + hashes = dict() + for filepath in files: + source, mode, hash = self._process_texture( + filepath, do_maketx, staging=dir_path + ) + destination = self.resource_destination( + instance, source, do_maketx + ) + if mode == COPY: + transfers.append((source, destination)) + elif mode == HARDLINK: + hardlinks.append((source, destination)) + + # Store the hashes from hash to destination to include in the + # database + hashes[hash] = destination + + # Remap the resources to the destination path (change node attributes) + destinations = dict() remap = OrderedDict() # needs to be ordered, see color space values for resource in resources: + source = os.path.normpath(resource["source"]) + if source not in destinations: + # Cache destination as source resource might be included + # multiple times + destinations[source] = self.resource_destination( + instance, source, do_maketx + ) + + # Remap file node filename to destination attr = resource['attribute'] - remap[attr] = resource['destination'] + remap[attr] = destinations[source] # Preserve color space values (force value after filepath change) # This will also trigger in the same order at end of context to @@ -107,15 +229,17 @@ class ExtractLook(pype.api.Extractor): with lib.attribute_values(remap): with avalon.maya.maintained_selection(): cmds.select(sets, noExpand=True) - cmds.file(maya_path, - force=True, - typ="mayaAscii", - exportSelected=True, - preserveReferences=False, - channels=True, - constraints=True, - expressions=True, - constructionHistory=True) + cmds.file( + maya_path, + force=True, + typ="mayaAscii", + exportSelected=True, + preserveReferences=False, + channels=True, + constraints=True, + expressions=True, + constructionHistory=True + ) # Write the JSON data self.log.info("Extract json..") @@ -127,9 +251,90 @@ class ExtractLook(pype.api.Extractor): if "files" not in instance.data: instance.data["files"] = list() + if "hardlinks" not in instance.data: + instance.data["hardlinks"] = list() + if "transfers" not in instance.data: + instance.data["transfers"] = list() instance.data["files"].append(maya_fname) instance.data["files"].append(json_fname) - self.log.info("Extracted instance '%s' to: %s" % (instance.name, - maya_path)) + # Set up the resources transfers/links for the integrator + instance.data["transfers"].extend(transfers) + instance.data["hardlinks"].extend(hardlinks) + + # Source hash for the textures + instance.data["sourceHashes"] = hashes + + self.log.info("Extracted instance '%s' to: %s" % ( + instance.name, maya_path) + ) + + def resource_destination(self, instance, filepath, do_maketx): + + # Compute destination location + basename, ext = os.path.splitext(os.path.basename(filepath)) + + # If maketx then the texture will always end with .tx + if do_maketx: + ext = ".tx" + + return os.path.join( + instance.data["assumedDestination"], + "resources", + basename + ext + ) + + def _process_texture(self, filepath, do_maketx, staging): + """Process a single texture file on disk for publishing. + This will: + 1. Check whether it's already published, if so it will do hardlink + 2. If not published and maketx is enabled, generate a new .tx file. + 3. Compute the destination path for the source file. + Args: + filepath (str): The source file path to process. + do_maketx (bool): Whether to produce a .tx file + Returns: + """ + + fname, ext = os.path.splitext(os.path.basename(filepath)) + + args = [] + if do_maketx: + args.append("maketx") + texture_hash = source_hash(filepath, *args) + + # If source has been published before with the same settings, + # then don't reprocess but hardlink from the original + existing = find_paths_by_hash(texture_hash) + if existing: + self.log.info("Found hash in database, preparing hardlink..") + source = next((p for p in existing if os.path.exists(p)), None) + if filepath: + return source, HARDLINK, texture_hash + else: + self.log.warning( + "Paths not found on disk, " + "skipping hardlink: %s" % (existing,) + ) + + if do_maketx and ext != ".tx": + # Produce .tx file in staging if source file is not .tx + converted = os.path.join( + staging, + "resources", + fname + ".tx" + ) + + # Ensure folder exists + if not os.path.exists(os.path.dirname(converted)): + os.makedirs(os.path.dirname(converted)) + + self.log.info("Generating .tx file for %s .." % filepath) + maketx(filepath, converted, + # Include `source-hash` as string metadata + "-sattrib", "sourceHash", texture_hash) + + return converted, COPY, texture_hash + + return filepath, COPY, texture_hash diff --git a/pype/plugins/maya/publish/increment_current_file_deadline.py b/pype/plugins/maya/publish/increment_current_file_deadline.py index 527f3d781d..6f644adacb 100644 --- a/pype/plugins/maya/publish/increment_current_file_deadline.py +++ b/pype/plugins/maya/publish/increment_current_file_deadline.py @@ -31,10 +31,11 @@ class IncrementCurrentFileDeadline(pyblish.api.ContextPlugin): current_filepath = context.data["currentFile"] new_filepath = version_up(current_filepath) - # Ensure the suffix is .ma because we're saving to `mayaAscii` type - if not new_filepath.endswith(".ma"): - self.log.warning("Refactoring scene to .ma extension") - new_filepath = os.path.splitext(new_filepath)[0] + ".ma" + # # Ensure the suffix is .ma because we're saving to `mayaAscii` type + if new_filepath.endswith(".ma"): + fileType = "mayaAscii" + elif new_filepath.endswith(".mb"): + fileType = "mayaBinary" cmds.file(rename=new_filepath) - cmds.file(save=True, force=True, type="mayaAscii") + cmds.file(save=True, force=True, type=fileType) diff --git a/pype/plugins/maya/publish/submit_maya_deadline.py b/pype/plugins/maya/publish/submit_maya_deadline.py index 56e4b1ea32..0a97a9b98f 100644 --- a/pype/plugins/maya/publish/submit_maya_deadline.py +++ b/pype/plugins/maya/publish/submit_maya_deadline.py @@ -238,6 +238,7 @@ class MayaSubmitDeadline(pyblish.api.InstancePlugin): # todo: This is a temporary fix for yeti variables "PEREGRINEL_LICENSE", + "SOLIDANGLE_LICENSE", "ARNOLD_LICENSE" "MAYA_MODULE_PATH", "TOOL_ENV" diff --git a/pype/plugins/maya/publish/validate_mesh_overlapping_uvs.py b/pype/plugins/maya/publish/validate_mesh_overlapping_uvs.py new file mode 100644 index 0000000000..3aae97b8fd --- /dev/null +++ b/pype/plugins/maya/publish/validate_mesh_overlapping_uvs.py @@ -0,0 +1,277 @@ +from maya import cmds + +import pyblish.api +import pype.api +import pype.maya.action +import math +import maya.api.OpenMaya as om +from pymel.core import polyUVSet + + +class GetOverlappingUVs(object): + + def _createBoundingCircle(self, meshfn): + """ Represent a face by center and radius + + :param meshfn: MFnMesh class + :type meshfn: :class:`maya.api.OpenMaya.MFnMesh` + :returns: (center, radius) + :rtype: tuple + """ + center = [] + radius = [] + for i in xrange(meshfn.numPolygons): # noqa: F821 + # get uvs from face + uarray = [] + varray = [] + for j in range(len(meshfn.getPolygonVertices(i))): + uv = meshfn.getPolygonUV(i, j) + uarray.append(uv[0]) + varray.append(uv[1]) + + # loop through all vertices to construct edges/rays + cu = 0.0 + cv = 0.0 + for j in range(len(uarray)): + cu += uarray[j] + cv += varray[j] + + cu /= len(uarray) + cv /= len(varray) + rsqr = 0.0 + for j in range(len(varray)): + du = uarray[j] - cu + dv = varray[j] - cv + dsqr = du * du + dv * dv + rsqr = dsqr if dsqr > rsqr else rsqr + + center.append(cu) + center.append(cv) + radius.append(math.sqrt(rsqr)) + + return center, radius + + def _createRayGivenFace(self, meshfn, faceId): + """ Represent a face by a series of edges(rays), i.e. + + :param meshfn: MFnMesh class + :type meshfn: :class:`maya.api.OpenMaya.MFnMesh` + :param faceId: face id + :type faceId: int + :returns: False if no valid uv's. + ""(True, orig, vec)"" or ""(False, None, None)"" + :rtype: tuple + + .. code-block:: python + + orig = [orig1u, orig1v, orig2u, orig2v, ... ] + vec = [vec1u, vec1v, vec2u, vec2v, ... ] + """ + orig = [] + vec = [] + # get uvs + uarray = [] + varray = [] + for i in range(len(meshfn.getPolygonVertices(faceId))): + uv = meshfn.getPolygonUV(faceId, i) + uarray.append(uv[0]) + varray.append(uv[1]) + + if len(uarray) == 0 or len(varray) == 0: + return (False, None, None) + + # loop throught all vertices to construct edges/rays + u = uarray[-1] + v = varray[-1] + for i in xrange(len(uarray)): # noqa: F821 + orig.append(uarray[i]) + orig.append(varray[i]) + vec.append(u - uarray[i]) + vec.append(v - varray[i]) + u = uarray[i] + v = varray[i] + + return (True, orig, vec) + + def _checkCrossingEdges(self, + face1Orig, + face1Vec, + face2Orig, + face2Vec): + """ Check if there are crossing edges between two faces. + Return True if there are crossing edges and False otherwise. + + :param face1Orig: origin of face 1 + :type face1Orig: tuple + :param face1Vec: face 1 edges + :type face1Vec: list + :param face2Orig: origin of face 2 + :type face2Orig: tuple + :param face2Vec: face 2 edges + :type face2Vec: list + + A face is represented by a series of edges(rays), i.e. + .. code-block:: python + + faceOrig[] = [orig1u, orig1v, orig2u, orig2v, ... ] + faceVec[] = [vec1u, vec1v, vec2u, vec2v, ... ] + """ + face1Size = len(face1Orig) + face2Size = len(face2Orig) + for i in xrange(0, face1Size, 2): # noqa: F821 + o1x = face1Orig[i] + o1y = face1Orig[i+1] + v1x = face1Vec[i] + v1y = face1Vec[i+1] + n1x = v1y + n1y = -v1x + for j in xrange(0, face2Size, 2): # noqa: F821 + # Given ray1(O1, V1) and ray2(O2, V2) + # Normal of ray1 is (V1.y, V1.x) + o2x = face2Orig[j] + o2y = face2Orig[j+1] + v2x = face2Vec[j] + v2y = face2Vec[j+1] + n2x = v2y + n2y = -v2x + + # Find t for ray2 + # t = [(o1x-o2x)n1x + (o1y-o2y)n1y] / + # (v2x * n1x + v2y * n1y) + denum = v2x * n1x + v2y * n1y + # Edges are parallel if denum is close to 0. + if math.fabs(denum) < 0.000001: + continue + t2 = ((o1x-o2x) * n1x + (o1y-o2y) * n1y) / denum + if (t2 < 0.00001 or t2 > 0.99999): + continue + + # Find t for ray1 + # t = [(o2x-o1x)n2x + # + (o2y-o1y)n2y] / (v1x * n2x + v1y * n2y) + denum = v1x * n2x + v1y * n2y + # Edges are parallel if denum is close to 0. + if math.fabs(denum) < 0.000001: + continue + t1 = ((o2x-o1x) * n2x + (o2y-o1y) * n2y) / denum + + # Edges intersect + if (t1 > 0.00001 and t1 < 0.99999): + return 1 + + return 0 + + def _getOverlapUVFaces(self, meshName): + """ Return overlapping faces + + :param meshName: name of mesh + :type meshName: str + :returns: list of overlapping faces + :rtype: list + """ + faces = [] + # find polygon mesh node + selList = om.MSelectionList() + selList.add(meshName) + mesh = selList.getDependNode(0) + if mesh.apiType() == om.MFn.kTransform: + dagPath = selList.getDagPath(0) + dagFn = om.MFnDagNode(dagPath) + child = dagFn.child(0) + if child.apiType() != om.MFn.kMesh: + raise Exception("Can't find polygon mesh") + mesh = child + meshfn = om.MFnMesh(mesh) + + center, radius = self._createBoundingCircle(meshfn) + for i in xrange(meshfn.numPolygons): # noqa: F821 + rayb1, face1Orig, face1Vec = self._createRayGivenFace( + meshfn, i) + if not rayb1: + continue + cui = center[2*i] + cvi = center[2*i+1] + ri = radius[i] + # Exclude the degenerate face + # if(area(face1Orig) < 0.000001) continue; + # Loop through face j where j != i + for j in range(i+1, meshfn.numPolygons): + cuj = center[2*j] + cvj = center[2*j+1] + rj = radius[j] + du = cuj - cui + dv = cvj - cvi + dsqr = du * du + dv * dv + # Quick rejection if bounding circles don't overlap + if (dsqr >= (ri + rj) * (ri + rj)): + continue + + rayb2, face2Orig, face2Vec = self._createRayGivenFace( + meshfn, j) + if not rayb2: + continue + # Exclude the degenerate face + # if(area(face2Orig) < 0.000001): continue; + if self._checkCrossingEdges(face1Orig, + face1Vec, + face2Orig, + face2Vec): + face1 = '%s.f[%d]' % (meshfn.name(), i) + face2 = '%s.f[%d]' % (meshfn.name(), j) + if face1 not in faces: + faces.append(face1) + if face2 not in faces: + faces.append(face2) + return faces + + +class ValidateMeshHasOverlappingUVs(pyblish.api.InstancePlugin): + """ Validate the current mesh overlapping UVs. + + It validates whether the current UVs are overlapping or not. + It is optional to warn publisher about it. + """ + + order = pype.api.ValidateMeshOrder + hosts = ['maya'] + families = ['model'] + category = 'geometry' + label = 'Mesh Has Overlapping UVs' + actions = [pype.maya.action.SelectInvalidAction] + optional = True + + @classmethod + def _has_overlapping_uvs(cls, node): + """ Check if mesh has overlapping UVs. + + :param node: node to check + :type node: str + :returns: True is has overlapping UVs, False otherwise + :rtype: bool + """ + ovl = GetOverlappingUVs() + + for i, uv in enumerate(polyUVSet(node, q=1, auv=1)): + polyUVSet(node, cuv=1, uvSet=uv) + of = ovl._getOverlapUVFaces(str(node)) + if of != []: + return True + return False + + @classmethod + def get_invalid(cls, instance): + invalid = [] + + for node in cmds.ls(instance, type='mesh'): + if cls._has_overlapping_uvs(node): + invalid.append(node) + + return invalid + + def process(self, instance): + + invalid = self.get_invalid(instance) + if invalid: + raise RuntimeError("Meshes found with overlapping " + "UVs: {0}".format(invalid)) + pass diff --git a/pype/plugins/maya/publish/validate_resources.py b/pype/plugins/maya/publish/validate_resources.py index bc10d3003c..47a94e7529 100644 --- a/pype/plugins/maya/publish/validate_resources.py +++ b/pype/plugins/maya/publish/validate_resources.py @@ -1,8 +1,9 @@ +import os +from collections import defaultdict + import pyblish.api import pype.api -import os - class ValidateResources(pyblish.api.InstancePlugin): """Validates mapped resources. @@ -12,18 +13,45 @@ class ValidateResources(pyblish.api.InstancePlugin): media. This validates: - - The resources are existing files. - - The resources have correctly collected the data. + - The resources have unique filenames (without extension) """ order = pype.api.ValidateContentsOrder - label = "Resources" + label = "Resources Unique" def process(self, instance): - for resource in instance.data.get('resources', []): - # Required data - assert "source" in resource, "No source found" - assert "files" in resource, "No files from source" - assert all(os.path.exists(f) for f in resource['files']) + resources = instance.data.get("resources", []) + if not resources: + self.log.debug("No resources to validate..") + return + + basenames = defaultdict(set) + + for resource in resources: + files = resource.get("files", []) + for filename in files: + + # Use normalized paths in comparison and ignore case + # sensitivity + filename = os.path.normpath(filename).lower() + + basename = os.path.splitext(os.path.basename(filename))[0] + basenames[basename].add(filename) + + invalid_resources = list() + for basename, sources in basenames.items(): + if len(sources) > 1: + invalid_resources.extend(sources) + + self.log.error( + "Non-unique resource name: {0}" + "{0} (sources: {1})".format( + basename, + list(sources) + ) + ) + + if invalid_resources: + raise RuntimeError("Invalid resources in instance.") diff --git a/pype/plugins/maya/publish/validate_transfers.py b/pype/plugins/maya/publish/validate_transfers.py deleted file mode 100644 index 3234b2240e..0000000000 --- a/pype/plugins/maya/publish/validate_transfers.py +++ /dev/null @@ -1,45 +0,0 @@ -import pyblish.api -import pype.api -import os - -from collections import defaultdict - - -class ValidateTransfers(pyblish.api.InstancePlugin): - """Validates mapped resources. - - This validates: - - The resources all transfer to a unique destination. - - """ - - order = pype.api.ValidateContentsOrder - label = "Transfers" - - def process(self, instance): - - transfers = instance.data.get("transfers", []) - if not transfers: - return - - # Collect all destination with its sources - collected = defaultdict(set) - for source, destination in transfers: - - # Use normalized paths in comparison and ignore case sensitivity - source = os.path.normpath(source).lower() - destination = os.path.normpath(destination).lower() - - collected[destination].add(source) - - invalid_destinations = list() - for destination, sources in collected.items(): - if len(sources) > 1: - invalid_destinations.append(destination) - - self.log.error("Non-unique file transfer for resources: " - "{0} (sources: {1})".format(destination, - list(sources))) - - if invalid_destinations: - raise RuntimeError("Invalid transfers in queue.") diff --git a/pype/plugins/nuke/load/load_sequence.py b/pype/plugins/nuke/load/load_sequence.py index c82d697541..45cd6e616e 100644 --- a/pype/plugins/nuke/load/load_sequence.py +++ b/pype/plugins/nuke/load/load_sequence.py @@ -101,7 +101,7 @@ class LoadSequence(api.Loader): if namespace is None: namespace = context['asset']['name'] - file = self.fname + file = self.fname.replace("\\", "/") log.info("file: {}\n".format(self.fname)) read_name = "Read_" + context["representation"]["context"]["subset"] @@ -112,7 +112,7 @@ class LoadSequence(api.Loader): r = nuke.createNode( "Read", "name {}".format(read_name)) - r["file"].setValue(self.fname) + r["file"].setValue(file) # Set colorspace defined in version data colorspace = context["version"]["data"].get("colorspace", None) diff --git a/pype/templates.py b/pype/templates.py index 58ae54f466..071426859a 100644 --- a/pype/templates.py +++ b/pype/templates.py @@ -1,18 +1,20 @@ import os import re -from avalon import io -from avalon import api as avalon +import sys +from avalon import io, api as avalon, lib as avalonlib from . import lib # from pypeapp.api import (Templates, Logger, format) from pypeapp import Logger log = Logger().get_logger(__name__, os.getenv("AVALON_APP", "pype-config")) -SESSION = None + +self = sys.modules[__name__] +self.SESSION = None def set_session(): lib.set_io_database() - SESSION = avalon.session + self.SESSION = avalon.session def load_data_from_templates(): @@ -104,9 +106,9 @@ def set_project_code(code): os.environ[KEY]: project code avalon.sesion[KEY]: project code """ - if SESSION is None: + if self.SESSION is None: set_session() - SESSION["AVALON_PROJECTCODE"] = code + self.SESSION["AVALON_PROJECTCODE"] = code os.environ["AVALON_PROJECTCODE"] = code @@ -118,9 +120,9 @@ def get_project_name(): string: project name """ - if SESSION is None: + if self.SESSION is None: set_session() - project_name = SESSION.get("AVALON_PROJECT", None) \ + project_name = self.SESSION.get("AVALON_PROJECT", None) \ or os.getenv("AVALON_PROJECT", None) assert project_name, log.error("missing `AVALON_PROJECT`" "in avalon session " @@ -138,9 +140,9 @@ def get_asset(): Raises: log: error """ - if SESSION is None: + if self.SESSION is None: set_session() - asset = SESSION.get("AVALON_ASSET", None) \ + asset = self.SESSION.get("AVALON_ASSET", None) \ or os.getenv("AVALON_ASSET", None) log.info("asset: {}".format(asset)) assert asset, log.error("missing `AVALON_ASSET`" @@ -159,9 +161,9 @@ def get_task(): Raises: log: error """ - if SESSION is None: + if self.SESSION is None: set_session() - task = SESSION.get("AVALON_TASK", None) \ + task = self.SESSION.get("AVALON_TASK", None) \ or os.getenv("AVALON_TASK", None) assert task, log.error("missing `AVALON_TASK`" "in avalon session " @@ -196,9 +198,9 @@ def set_hierarchy(hierarchy): Args: hierarchy (string): hierarchy path ("silo/folder/seq") """ - if SESSION is None: + if self.SESSION is None: set_session() - SESSION["AVALON_HIERARCHY"] = hierarchy + self.SESSION["AVALON_HIERARCHY"] = hierarchy os.environ["AVALON_HIERARCHY"] = hierarchy @@ -219,13 +221,14 @@ def get_context_data(project=None, dict: contextual data """ - + application = avalonlib.get_application(os.environ["AVALON_APP_NAME"]) data = { "task": task or get_task(), "asset": asset or get_asset(), "project": {"name": project or get_project_name(), "code": get_project_code()}, "hierarchy": hierarchy or get_hierarchy(), + "app": application["application_dir"] } return data @@ -248,10 +251,10 @@ def set_avalon_workdir(project=None, avalon.session[AVALON_WORKDIR]: workdir path """ - if SESSION is None: + if self.SESSION is None: set_session() - awd = SESSION.get("AVALON_WORKDIR", None) \ - or os.getenv("AVALON_WORKDIR", None) + + awd = self.SESSION.get("AVALON_WORKDIR", None) or os.getenv("AVALON_WORKDIR", None) data = get_context_data(project, hierarchy, asset, task) if (not awd) or ("{" not in awd): @@ -259,7 +262,7 @@ def set_avalon_workdir(project=None, awd_filled = os.path.normpath(format(awd, data)) - SESSION["AVALON_WORKDIR"] = awd_filled + self.SESSION["AVALON_WORKDIR"] = awd_filled os.environ["AVALON_WORKDIR"] = awd_filled log.info("`AVALON_WORKDIR` fixed to: {}".format(awd_filled))