mirror of
https://github.com/ynput/ayon-core.git
synced 2025-12-24 12:54:40 +01:00
Fix conflicts from 'develop'
This commit is contained in:
parent
ca78a5ca2a
commit
96f7cb3959
3 changed files with 122 additions and 30 deletions
22
pype/api.py
22
pype/api.py
|
|
@ -6,6 +6,14 @@ from pypeapp import (
|
|||
execute
|
||||
)
|
||||
|
||||
from pypeapp.lib.mongo import (
|
||||
decompose_url,
|
||||
compose_url,
|
||||
get_default_components
|
||||
)
|
||||
|
||||
from . import resources
|
||||
|
||||
from .plugin import (
|
||||
Extractor,
|
||||
|
||||
|
|
@ -30,9 +38,11 @@ from .lib import (
|
|||
get_hierarchy,
|
||||
get_subsets,
|
||||
get_version_from_path,
|
||||
get_last_version_from_path,
|
||||
modified_environ,
|
||||
add_tool_to_environment,
|
||||
source_hash
|
||||
source_hash,
|
||||
get_latest_version
|
||||
)
|
||||
|
||||
# Special naming case for subprocess since its a built-in method.
|
||||
|
|
@ -44,6 +54,12 @@ __all__ = [
|
|||
"project_overrides_dir_path",
|
||||
"config",
|
||||
"execute",
|
||||
"decompose_url",
|
||||
"compose_url",
|
||||
"get_default_components",
|
||||
|
||||
# Resources
|
||||
"resources",
|
||||
|
||||
# plugin classes
|
||||
"Extractor",
|
||||
|
|
@ -68,9 +84,11 @@ __all__ = [
|
|||
"get_asset",
|
||||
"get_subsets",
|
||||
"get_version_from_path",
|
||||
"get_last_version_from_path",
|
||||
"modified_environ",
|
||||
"add_tool_to_environment",
|
||||
"source_hash",
|
||||
|
||||
"subprocess"
|
||||
"subprocess",
|
||||
"get_latest_version"
|
||||
]
|
||||
|
|
|
|||
84
pype/lib.py
84
pype/lib.py
|
|
@ -469,6 +469,43 @@ def get_version_from_path(file):
|
|||
)
|
||||
|
||||
|
||||
def get_last_version_from_path(path_dir, filter):
|
||||
"""
|
||||
Finds last version of given directory content
|
||||
|
||||
Args:
|
||||
path_dir (string): directory path
|
||||
filter (list): list of strings used as file name filter
|
||||
|
||||
Returns:
|
||||
string: file name with last version
|
||||
|
||||
Example:
|
||||
last_version_file = get_last_version_from_path(
|
||||
"/project/shots/shot01/work", ["shot01", "compositing", "nk"])
|
||||
"""
|
||||
|
||||
assert os.path.isdir(path_dir), "`path_dir` argument needs to be directory"
|
||||
assert isinstance(filter, list) and (
|
||||
len(filter) != 0), "`filter` argument needs to be list and not empty"
|
||||
|
||||
filtred_files = list()
|
||||
|
||||
# form regex for filtering
|
||||
patern = r".*".join(filter)
|
||||
|
||||
for f in os.listdir(path_dir):
|
||||
if not re.findall(patern, f):
|
||||
continue
|
||||
filtred_files.append(f)
|
||||
|
||||
if filtred_files:
|
||||
sorted(filtred_files)
|
||||
return filtred_files[-1]
|
||||
else:
|
||||
return None
|
||||
|
||||
|
||||
def get_avalon_database():
|
||||
if io._database is None:
|
||||
set_io_database()
|
||||
|
|
@ -482,14 +519,6 @@ def set_io_database():
|
|||
io.install()
|
||||
|
||||
|
||||
def get_all_avalon_projects():
|
||||
db = get_avalon_database()
|
||||
projects = []
|
||||
for name in db.collection_names():
|
||||
projects.append(db[name].find_one({'type': 'project'}))
|
||||
return projects
|
||||
|
||||
|
||||
def filter_pyblish_plugins(plugins):
|
||||
"""
|
||||
This servers as plugin filter / modifier for pyblish. It will load plugin
|
||||
|
|
@ -610,7 +639,7 @@ def get_subsets(asset_name,
|
|||
|
||||
if len(repres_out) > 0:
|
||||
output_dict[subset["name"]] = {"version": version_sel,
|
||||
"representaions": repres_out}
|
||||
"representations": repres_out}
|
||||
|
||||
return output_dict
|
||||
|
||||
|
|
@ -1350,7 +1379,6 @@ def ffprobe_streams(path_to_file):
|
|||
log.debug("FFprobe output: {}".format(popen_output))
|
||||
return json.loads(popen_output)["streams"]
|
||||
|
||||
|
||||
def source_hash(filepath, *args):
|
||||
"""Generate simple identifier for a source file.
|
||||
This is used to identify whether a source file has previously been
|
||||
|
|
@ -1370,3 +1398,39 @@ def source_hash(filepath, *args):
|
|||
time = str(os.path.getmtime(filepath))
|
||||
size = str(os.path.getsize(filepath))
|
||||
return "|".join([file_name, time, size] + list(args)).replace(".", ",")
|
||||
|
||||
def get_latest_version(asset_name, subset_name):
|
||||
"""Retrieve latest version from `asset_name`, and `subset_name`.
|
||||
|
||||
Args:
|
||||
asset_name (str): Name of asset.
|
||||
subset_name (str): Name of subset.
|
||||
"""
|
||||
# Get asset
|
||||
asset_name = io.find_one(
|
||||
{"type": "asset", "name": asset_name}, projection={"name": True}
|
||||
)
|
||||
|
||||
subset = io.find_one(
|
||||
{"type": "subset", "name": subset_name, "parent": asset_name["_id"]},
|
||||
projection={"_id": True, "name": True},
|
||||
)
|
||||
|
||||
# Check if subsets actually exists.
|
||||
assert subset, "No subsets found."
|
||||
|
||||
# Get version
|
||||
version_projection = {
|
||||
"name": True,
|
||||
"parent": True,
|
||||
}
|
||||
|
||||
version = io.find_one(
|
||||
{"type": "version", "parent": subset["_id"]},
|
||||
projection=version_projection,
|
||||
sort=[("name", -1)],
|
||||
)
|
||||
|
||||
assert version, "No version found, this is a bug"
|
||||
|
||||
return version
|
||||
|
|
|
|||
|
|
@ -84,7 +84,9 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
|
|||
"fbx",
|
||||
"textures",
|
||||
"action",
|
||||
"harmony.template"
|
||||
"harmony.template",
|
||||
"harmony.palette",
|
||||
"editorial"
|
||||
]
|
||||
exclude_families = ["clip"]
|
||||
db_representation_context_keys = [
|
||||
|
|
@ -111,7 +113,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
|
|||
self.log.info("instance.data: {}".format(instance.data))
|
||||
self.handle_destination_files(self.integrated_file_sizes,
|
||||
'finalize')
|
||||
except Exception as e:
|
||||
except Exception:
|
||||
# clean destination
|
||||
self.log.critical("Error when registering", exc_info=True)
|
||||
self.handle_destination_files(self.integrated_file_sizes, 'remove')
|
||||
|
|
@ -155,6 +157,8 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
|
|||
if task_name:
|
||||
anatomy_data["task"] = task_name
|
||||
|
||||
anatomy_data["family"] = instance.data.get("family")
|
||||
|
||||
stagingdir = instance.data.get("stagingDir")
|
||||
if not stagingdir:
|
||||
self.log.info((
|
||||
|
|
@ -398,8 +402,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
|
|||
dst = "{0}{1}{2}".format(
|
||||
dst_head,
|
||||
dst_padding,
|
||||
dst_tail
|
||||
).replace("..", ".")
|
||||
dst_tail).replace("..", ".")
|
||||
|
||||
self.log.debug("destination: `{}`".format(dst))
|
||||
src = os.path.join(stagingdir, src_file_name)
|
||||
|
|
@ -606,12 +609,19 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
|
|||
|
||||
# copy file with speedcopy and check if size of files are simetrical
|
||||
while True:
|
||||
import shutil
|
||||
try:
|
||||
copyfile(src, dst)
|
||||
except OSError as e:
|
||||
self.log.critical("Cannot copy {} to {}".format(src, dst))
|
||||
self.log.critical(e)
|
||||
six.reraise(*sys.exc_info())
|
||||
except shutil.SameFileError as sfe:
|
||||
self.log.critical("files are the same {} to {}".format(src, dst))
|
||||
os.remove(dst)
|
||||
try:
|
||||
shutil.copyfile(src, dst)
|
||||
self.log.debug("Copying files with shutil...")
|
||||
except (OSError) as e:
|
||||
self.log.critical("Cannot copy {} to {}".format(src, dst))
|
||||
self.log.critical(e)
|
||||
six.reraise(*sys.exc_info())
|
||||
if str(getsize(src)) in str(getsize(dst)):
|
||||
break
|
||||
|
||||
|
|
@ -648,7 +658,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
|
|||
"type": "subset",
|
||||
"name": subset_name,
|
||||
"data": {
|
||||
"families": instance.data.get('families')
|
||||
"families": instance.data.get("families", [])
|
||||
},
|
||||
"parent": asset["_id"]
|
||||
}).inserted_id
|
||||
|
|
@ -761,7 +771,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
|
|||
task_name = io.Session.get("AVALON_TASK")
|
||||
family = self.main_family_from_instance(instance)
|
||||
|
||||
matching_profiles = None
|
||||
matching_profiles = {}
|
||||
highest_value = -1
|
||||
self.log.info(self.template_name_profiles)
|
||||
for name, filters in self.template_name_profiles.items():
|
||||
|
|
@ -860,13 +870,13 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
|
|||
for src, dest in resources:
|
||||
path = self.get_rootless_path(anatomy, dest)
|
||||
dest = self.get_dest_temp_url(dest)
|
||||
hash = pype.api.source_hash(dest)
|
||||
if self.TMP_FILE_EXT and ',{}'.format(self.TMP_FILE_EXT) in hash:
|
||||
hash = hash.replace(',{}'.format(self.TMP_FILE_EXT), '')
|
||||
file_hash = pype.api.source_hash(dest)
|
||||
if self.TMP_FILE_EXT and ',{}'.format(self.TMP_FILE_EXT) in file_hash:
|
||||
file_hash = file_hash.replace(',{}'.format(self.TMP_FILE_EXT), '')
|
||||
|
||||
file_info = self.prepare_file_info(path,
|
||||
integrated_file_sizes[dest],
|
||||
hash)
|
||||
file_hash)
|
||||
output_resources.append(file_info)
|
||||
|
||||
return output_resources
|
||||
|
|
@ -885,13 +895,13 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
|
|||
dest += '.{}'.format(self.TMP_FILE_EXT)
|
||||
return dest
|
||||
|
||||
def prepare_file_info(self, path, size=None, hash=None, sites=None):
|
||||
def prepare_file_info(self, path, size=None, file_hash=None, sites=None):
|
||||
""" Prepare information for one file (asset or resource)
|
||||
|
||||
Arguments:
|
||||
path: destination url of published file (rootless)
|
||||
size(optional): size of file in bytes
|
||||
hash(optional): hash of file for synchronization validation
|
||||
file_hash(optional): hash of file for synchronization validation
|
||||
sites(optional): array of published locations, ['studio'] by default
|
||||
expected ['studio', 'site1', 'gdrive1']
|
||||
Returns:
|
||||
|
|
@ -905,8 +915,8 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
|
|||
if size:
|
||||
rec["size"] = size
|
||||
|
||||
if hash:
|
||||
rec["hash"] = hash
|
||||
if file_hash:
|
||||
rec["hash"] = file_hash
|
||||
|
||||
if sites:
|
||||
rec["sites"] = sites
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue