Fix: Locally copied version of last published workfile is not incremented (#4722)

* Fix: Locally copied version of last published workfile is not incremented

* fix subset first match

* correct anatomy name

* Fix typo and linting

* keep source filepath for further path conformation

* fetch also input dependencies of workfile

* required changes

* lint

* fix case only one subset

* Enhancement:  copy last workfile as reusable methods (#6)

* Enhancement: copy last published workfile as reusable methods (WiP)

* Added get_host_extensions method, added subset_id and las_version_doc access, added optional arguments to get_last_published_workfile

* Plugged in the new methods + minor changes

* Added docstrings, last workfile optional argument, and removed unused code

* Using new implementation to get local workfile path. Warning: It adds an extra dot to the extension which I need to fix

* Refactoring and fixed double dots

* Added match subset_id and get representation method, plus clan up

* Removed unused vars

* Fixed some rebasing errors

* delinted unchanged code and renamed get_representation into get_representation_with_task

* This time it's really delinted, I hope...

* Update openpype/modules/sync_server/sync_server.py

reprenation isn't the right spelling (:

Co-authored-by: Félix David <felixg.david@gmail.com>

* Changes based on reviews

* Fixed non imperative docstring and missing space

* Fixed another non imperative docstring

* Update openpype/modules/sync_server/sync_server.py

Fixed typo

Co-authored-by: Félix David <felixg.david@gmail.com>

Co-authored-by: Hayley GUILLOT <hayleyguillot@outlook.com>
Co-authored-by: Félix David <felixg.david@gmail.com>

* Fix: syntax error

* fix single subset case

* Restore sync server enabled test in hook

* Python2 syntax

* renaming and missing key case handling

* Fix local workfile overwritten on update in some cases (#7)

* Fix: Local workfile overwrite when local version number is higher than published workfile version number (WiP)

* Changed regex search, clean up

* Readded mistakenly removed newline

* lint

* remove anticipated functions for cleaner PR

* remove funcs from entities.py

* change to get_last_workfile_with_version

* clean

* Update openpype/modules/sync_server/sync_server.py

Co-authored-by: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com>

* removed get_last_published_workfile_path

* moved hook to sync server module

* fix lint

* Refactor - download only if not present

* Refactor - change to list instead of set

* Refactor - removing unnecessary code

last_published_workfile_path must exists or we wouldn't get there. Use version only from that.

* Refactor - removing unnecessary imports

* Added check for max fail tries

* Refactor - cleaned up how to get last workfile

* Updated docstrings

* Remove unused imports

Co-authored-by: Félix David <felixg.david@gmail.com>

* OP-5466 - run this on more DCC

* Updated documentation

* Fix - handle hero versions

Skip hero versions, look only for versioned published to get max version id.

* Hound

* Refactor - simplified download_last_published_workfile

Logic should be in pre hook

* Skip if no profile found

* Removed unwanted import

* Use collected project_doc

Co-authored-by: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com>

* Use cached project_settings

Co-authored-by: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com>

---------

Co-authored-by: Félix David <felixg.david@gmail.com>
Co-authored-by: Sharkitty <81646000+Sharkitty@users.noreply.github.com>
Co-authored-by: Hayley GUILLOT <hayleyguillot@outlook.com>
Co-authored-by: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com>
Co-authored-by: Jakub Ježek <jakubjezek001@gmail.com>
This commit is contained in:
Petr Kalis 2023-05-02 18:49:02 +02:00 committed by GitHub
parent b8ce6e9e9c
commit fec104de8e
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
4 changed files with 379 additions and 148 deletions

View file

@ -0,0 +1,186 @@
import os
import shutil
from openpype.client.entities import (
get_representations,
get_project
)
from openpype.lib import PreLaunchHook
from openpype.lib.profiles_filtering import filter_profiles
from openpype.modules.sync_server.sync_server import (
download_last_published_workfile,
)
from openpype.pipeline.template_data import get_template_data
from openpype.pipeline.workfile.path_resolving import (
get_workfile_template_key,
)
from openpype.settings.lib import get_project_settings
class CopyLastPublishedWorkfile(PreLaunchHook):
"""Copy last published workfile as first workfile.
Prelaunch hook works only if last workfile leads to not existing file.
- That is possible only if it's first version.
"""
# Before `AddLastWorkfileToLaunchArgs`
order = -1
# any DCC could be used but TrayPublisher and other specials
app_groups = ["blender", "photoshop", "tvpaint", "aftereffects",
"nuke", "nukeassist", "nukex", "hiero", "nukestudio",
"maya", "harmony", "celaction", "flame", "fusion",
"houdini", "tvpaint"]
def execute(self):
"""Check if local workfile doesn't exist, else copy it.
1- Check if setting for this feature is enabled
2- Check if workfile in work area doesn't exist
3- Check if published workfile exists and is copied locally in publish
4- Substitute copied published workfile as first workfile
with incremented version by +1
Returns:
None: This is a void method.
"""
sync_server = self.modules_manager.get("sync_server")
if not sync_server or not sync_server.enabled:
self.log.debug("Sync server module is not enabled or available")
return
# Check there is no workfile available
last_workfile = self.data.get("last_workfile_path")
if os.path.exists(last_workfile):
self.log.debug(
"Last workfile exists. Skipping {} process.".format(
self.__class__.__name__
)
)
return
# Get data
project_name = self.data["project_name"]
asset_name = self.data["asset_name"]
task_name = self.data["task_name"]
task_type = self.data["task_type"]
host_name = self.application.host_name
# Check settings has enabled it
project_settings = get_project_settings(project_name)
profiles = project_settings["global"]["tools"]["Workfiles"][
"last_workfile_on_startup"
]
filter_data = {
"tasks": task_name,
"task_types": task_type,
"hosts": host_name,
}
last_workfile_settings = filter_profiles(profiles, filter_data)
if not last_workfile_settings:
return
use_last_published_workfile = last_workfile_settings.get(
"use_last_published_workfile"
)
if use_last_published_workfile is None:
self.log.info(
(
"Seems like old version of settings is used."
' Can\'t access custom templates in host "{}".'.format(
host_name
)
)
)
return
elif use_last_published_workfile is False:
self.log.info(
(
'Project "{}" has turned off to use last published'
' workfile as first workfile for host "{}"'.format(
project_name, host_name
)
)
)
return
max_retries = int((sync_server.sync_project_settings[project_name]
["config"]
["retry_cnt"]))
self.log.info("Trying to fetch last published workfile...")
asset_doc = self.data.get("asset_doc")
anatomy = self.data.get("anatomy")
context_filters = {
"asset": asset_name,
"family": "workfile",
"task": {"name": task_name, "type": task_type}
}
workfile_representations = list(get_representations(
project_name,
context_filters=context_filters
))
if not workfile_representations:
self.log.debug(
'No published workfile for task "{}" and host "{}".'.format(
task_name, host_name
)
)
return
filtered_repres = filter(
lambda r: r["context"].get("version") is not None,
workfile_representations
)
workfile_representation = max(
filtered_repres, key=lambda r: r["context"]["version"]
)
# Copy file and substitute path
last_published_workfile_path = download_last_published_workfile(
host_name,
project_name,
task_name,
workfile_representation,
max_retries,
anatomy=anatomy
)
if not last_published_workfile_path:
self.log.debug(
"Couldn't download {}".format(last_published_workfile_path)
)
return
project_doc = self.data["project_doc"]
project_settings = self.data["project_settings"]
template_key = get_workfile_template_key(
task_name, host_name, project_name, project_settings
)
# Get workfile data
workfile_data = get_template_data(
project_doc, asset_doc, task_name, host_name
)
extension = last_published_workfile_path.split(".")[-1]
workfile_data["version"] = (
workfile_representation["context"]["version"] + 1)
workfile_data["ext"] = extension
anatomy_result = anatomy.format(workfile_data)
local_workfile_path = anatomy_result[template_key]["path"]
# Copy last published workfile to local workfile directory
shutil.copy(
last_published_workfile_path,
local_workfile_path,
)
self.data["last_workfile_path"] = local_workfile_path
# Keep source filepath for further path conformation
self.data["source_filepath"] = last_published_workfile_path

View file

@ -3,10 +3,15 @@ import os
import asyncio
import threading
import concurrent.futures
from concurrent.futures._base import CancelledError
from time import sleep
from .providers import lib
from openpype.client.entity_links import get_linked_representation_id
from openpype.lib import Logger
from openpype.lib.local_settings import get_local_site_id
from openpype.modules.base import ModulesManager
from openpype.pipeline import Anatomy
from openpype.pipeline.load.utils import get_representation_path_with_anatomy
from .utils import SyncStatus, ResumableError
@ -189,6 +194,98 @@ def _site_is_working(module, project_name, site_name, site_config):
return handler.is_active()
def download_last_published_workfile(
host_name: str,
project_name: str,
task_name: str,
workfile_representation: dict,
max_retries: int,
anatomy: Anatomy = None,
) -> str:
"""Download the last published workfile
Args:
host_name (str): Host name.
project_name (str): Project name.
task_name (str): Task name.
workfile_representation (dict): Workfile representation.
max_retries (int): complete file failure only after so many attempts
anatomy (Anatomy, optional): Anatomy (Used for optimization).
Defaults to None.
Returns:
str: last published workfile path localized
"""
if not anatomy:
anatomy = Anatomy(project_name)
# Get sync server module
sync_server = ModulesManager().modules_by_name.get("sync_server")
if not sync_server or not sync_server.enabled:
print("Sync server module is disabled or unavailable.")
return
if not workfile_representation:
print(
"Not published workfile for task '{}' and host '{}'.".format(
task_name, host_name
)
)
return
last_published_workfile_path = get_representation_path_with_anatomy(
workfile_representation, anatomy
)
if (not last_published_workfile_path or
not os.path.exists(last_published_workfile_path)):
return
# If representation isn't available on remote site, then return.
if not sync_server.is_representation_on_site(
project_name,
workfile_representation["_id"],
sync_server.get_remote_site(project_name),
):
print(
"Representation for task '{}' and host '{}'".format(
task_name, host_name
)
)
return
# Get local site
local_site_id = get_local_site_id()
# Add workfile representation to local site
representation_ids = {workfile_representation["_id"]}
representation_ids.update(
get_linked_representation_id(
project_name, repre_id=workfile_representation["_id"]
)
)
for repre_id in representation_ids:
if not sync_server.is_representation_on_site(project_name, repre_id,
local_site_id):
sync_server.add_site(
project_name,
repre_id,
local_site_id,
force=True,
priority=99
)
sync_server.reset_timer()
print("Starting to download:{}".format(last_published_workfile_path))
# While representation unavailable locally, wait.
while not sync_server.is_representation_on_site(
project_name, workfile_representation["_id"], local_site_id,
max_retries=max_retries
):
sleep(5)
return last_published_workfile_path
class SyncServerThread(threading.Thread):
"""
Separate thread running synchronization server with asyncio loop.
@ -358,7 +455,6 @@ class SyncServerThread(threading.Thread):
duration = time.time() - start_time
self.log.debug("One loop took {:.2f}s".format(duration))
delay = self.module.get_loop_delay(project_name)
self.log.debug(
"Waiting for {} seconds to new loop".format(delay)
@ -370,8 +466,8 @@ class SyncServerThread(threading.Thread):
self.log.warning(
"ConnectionResetError in sync loop, trying next loop",
exc_info=True)
except CancelledError:
# just stopping server
except asyncio.exceptions.CancelledError:
# cancelling timer
pass
except ResumableError:
self.log.warning(

View file

@ -838,6 +838,18 @@ class SyncServerModule(OpenPypeModule, ITrayModule):
return ret_dict
def get_launch_hook_paths(self):
"""Implementation for applications launch hooks.
Returns:
(str): full absolut path to directory with hooks for the module
"""
return os.path.join(
os.path.dirname(os.path.abspath(__file__)),
"launch_hooks"
)
# Needs to be refactored after Settings are updated
# # Methods for Settings to get appriate values to fill forms
# def get_configurable_items(self, scope=None):
@ -1045,9 +1057,23 @@ class SyncServerModule(OpenPypeModule, ITrayModule):
self.sync_server_thread.reset_timer()
def is_representation_on_site(
self, project_name, representation_id, site_name
self, project_name, representation_id, site_name, max_retries=None
):
"""Checks if 'representation_id' has all files avail. on 'site_name'"""
"""Checks if 'representation_id' has all files avail. on 'site_name'
Args:
project_name (str)
representation_id (str)
site_name (str)
max_retries (int) (optional) - provide only if method used in while
loop to bail out
Returns:
(bool): True if 'representation_id' has all files correctly on the
'site_name'
Raises:
(ValueError) Only If 'max_retries' provided if upload/download
failed too many times to limit infinite loop check.
"""
representation = get_representation_by_id(project_name,
representation_id,
fields=["_id", "files"])
@ -1060,6 +1086,11 @@ class SyncServerModule(OpenPypeModule, ITrayModule):
if site["name"] != site_name:
continue
if max_retries:
tries = self._get_tries_count_from_rec(site)
if tries >= max_retries:
raise ValueError("Failed too many times")
if (site.get("progress") or site.get("error") or
not site.get("created_dt")):
return False