[Automated] Merged develop into main

This commit is contained in:
pypebot 2021-11-20 04:34:27 +01:00 committed by GitHub
commit de6fb66337
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
28 changed files with 425 additions and 123 deletions

View file

@ -10,6 +10,7 @@ import tempfile
from pathlib import Path
from typing import Union, Callable, List, Tuple
import hashlib
import platform
from zipfile import ZipFile, BadZipFile
@ -196,21 +197,23 @@ class OpenPypeVersion(semver.VersionInfo):
return str(self.finalize_version())
@staticmethod
def version_in_str(string: str) -> Tuple:
def version_in_str(string: str) -> Union[None, OpenPypeVersion]:
"""Find OpenPype version in given string.
Args:
string (str): string to search.
Returns:
tuple: True/False and OpenPypeVersion if found.
OpenPypeVersion: of detected or None.
"""
m = re.search(OpenPypeVersion._VERSION_REGEX, string)
if not m:
return False, None
return None
version = OpenPypeVersion.parse(string[m.start():m.end()])
return True, version
if "staging" in string[m.start():m.end()]:
version.staging = True
return version
@classmethod
def parse(cls, version):
@ -531,6 +534,7 @@ class BootstrapRepos:
processed_path = file
self._print(f"- processing {processed_path}")
checksums.append(
(
sha256sum(file.as_posix()),
@ -542,7 +546,10 @@ class BootstrapRepos:
checksums_str = ""
for c in checksums:
checksums_str += "{}:{}\n".format(c[0], c[1])
file_str = c[1]
if platform.system().lower() == "windows":
file_str = c[1].as_posix().replace("\\", "/")
checksums_str += "{}:{}\n".format(c[0], file_str)
zip_file.writestr("checksums", checksums_str)
# test if zip is ok
zip_file.testzip()
@ -563,6 +570,8 @@ class BootstrapRepos:
and string with reason as second.
"""
if os.getenv("OPENPYPE_DONT_VALIDATE_VERSION"):
return True, "Disabled validation"
if not path.exists():
return False, "Path doesn't exist"
@ -589,13 +598,16 @@ class BootstrapRepos:
# calculate and compare checksums in the zip file
for file in checksums:
file_name = file[1]
if platform.system().lower() == "windows":
file_name = file_name.replace("/", "\\")
h = hashlib.sha256()
try:
h.update(zip_file.read(file[1]))
h.update(zip_file.read(file_name))
except FileNotFoundError:
return False, f"Missing file [ {file[1]} ]"
return False, f"Missing file [ {file_name} ]"
if h.hexdigest() != file[0]:
return False, f"Invalid checksum on {file[1]}"
return False, f"Invalid checksum on {file_name}"
# get list of files in zip minus `checksums` file itself
# and turn in to set to compare against list of files
@ -604,7 +616,7 @@ class BootstrapRepos:
files_in_zip = zip_file.namelist()
files_in_zip.remove("checksums")
files_in_zip = set(files_in_zip)
files_in_checksum = set([file[1] for file in checksums])
files_in_checksum = {file[1] for file in checksums}
diff = files_in_zip.difference(files_in_checksum)
if diff:
return False, f"Missing files {diff}"
@ -628,16 +640,19 @@ class BootstrapRepos:
]
files_in_dir.remove("checksums")
files_in_dir = set(files_in_dir)
files_in_checksum = set([file[1] for file in checksums])
files_in_checksum = {file[1] for file in checksums}
for file in checksums:
file_name = file[1]
if platform.system().lower() == "windows":
file_name = file_name.replace("/", "\\")
try:
current = sha256sum((path / file[1]).as_posix())
current = sha256sum((path / file_name).as_posix())
except FileNotFoundError:
return False, f"Missing file [ {file[1]} ]"
return False, f"Missing file [ {file_name} ]"
if file[0] != current:
return False, f"Invalid checksum on {file[1]}"
return False, f"Invalid checksum on {file_name}"
diff = files_in_dir.difference(files_in_checksum)
if diff:
return False, f"Missing files {diff}"
@ -1161,9 +1176,9 @@ class BootstrapRepos:
name = item.name if item.is_dir() else item.stem
result = OpenPypeVersion.version_in_str(name)
if result[0]:
if result:
detected_version: OpenPypeVersion
detected_version = result[1]
detected_version = result
if item.is_dir() and not self._is_openpype_in_dir(
item, detected_version

View file

@ -17,6 +17,7 @@ from .lib import (
version_up,
get_asset,
get_hierarchy,
get_workdir_data,
get_version_from_path,
get_last_version_from_path,
get_app_environments_for_context,

View file

@ -384,3 +384,15 @@ def syncserver(debug, active_site):
if debug:
os.environ['OPENPYPE_DEBUG'] = '3'
PypeCommands().syncserver(active_site)
@main.command()
@click.argument("directory")
def repack_version(directory):
"""Repack OpenPype version from directory.
This command will re-create zip file from specified directory,
recalculating file checksums. It will try to use version detected in
directory name.
"""
PypeCommands().repack_version(directory)

View file

@ -126,7 +126,8 @@ class CollectFarmRender(openpype.lib.abstract_collect_render.
# because of using 'renderFarm' as a family, replace 'Farm' with
# capitalized task name - issue of avalon-core Creator app
subset_name = node.split("/")[1]
task_name = context.data["anatomyData"]["task"].capitalize()
task_name = context.data["anatomyData"]["task"][
"name"].capitalize()
replace_str = ""
if task_name.lower() not in subset_name.lower():
replace_str = task_name

View file

@ -28,7 +28,7 @@ class CollectPalettes(pyblish.api.ContextPlugin):
# skip collecting if not in allowed task
if self.allowed_tasks:
task_name = context.data["anatomyData"]["task"].lower()
task_name = context.data["anatomyData"]["task"]["name"].lower()
if (not any([re.search(pattern, task_name)
for pattern in self.allowed_tasks])):
return

View file

@ -67,6 +67,16 @@ from avalon.houdini import pipeline
pipeline.reload_pipeline()]]></scriptCode>
</scriptItem>
</subMenu>
<separatorItem/>
<scriptItem id="experimental_tools">
<label>Experimental tools...</label>
<scriptCode><![CDATA[
import hou
from openpype.tools.utils import host_tools
parent = hou.qt.mainWindow()
host_tools.show_experimental_tools_dialog(parent)]]></scriptCode>
</scriptItem>
</subMenu>
</menuBar>
</mainMenu>

View file

@ -18,7 +18,7 @@ from openpype.api import (
BuildWorkfile,
get_version_from_path,
get_anatomy_settings,
get_hierarchy,
get_workdir_data,
get_asset,
get_current_project_settings,
ApplicationManager
@ -268,15 +268,21 @@ def format_anatomy(data):
if not version:
file = script_name()
data["version"] = get_version_from_path(file)
project_document = io.find_one({"type": "project"})
project_doc = io.find_one({"type": "project"})
asset_doc = io.find_one({
"type": "asset",
"name": data["avalon"]["asset"]
})
task_name = os.environ["AVALON_TASK"]
host_name = os.environ["AVALON_APP"]
context_data = get_workdir_data(
project_doc, asset_doc, task_name, host_name
)
data.update(context_data)
data.update({
"subset": data["avalon"]["subset"],
"asset": data["avalon"]["asset"],
"task": os.environ["AVALON_TASK"],
"family": data["avalon"]["family"],
"project": {"name": project_document["name"],
"code": project_document["data"].get("code", '')},
"hierarchy": get_hierarchy(),
"frame": "#" * padding,
})
return anatomy.format(data)

View file

@ -49,10 +49,22 @@ class CollectHarmonyScenes(pyblish.api.InstancePlugin):
# fix anatomy data
anatomy_data_new = copy.deepcopy(anatomy_data)
project_entity = context.data["projectEntity"]
asset_entity = context.data["assetEntity"]
task_type = asset_entity["data"]["tasks"].get(task, {}).get("type")
project_task_types = project_entity["config"]["tasks"]
task_code = project_task_types.get(task_type, {}).get("short_name")
# updating hierarchy data
anatomy_data_new.update({
"asset": asset_data["name"],
"task": task,
"task": {
"name": task,
"type": task_type,
"short": task_code,
},
"subset": subset_name
})

View file

@ -27,6 +27,7 @@ class CollectHarmonyZips(pyblish.api.InstancePlugin):
anatomy_data = instance.context.data["anatomyData"]
repres = instance.data["representations"]
files = repres[0]["files"]
project_entity = context.data["projectEntity"]
if files.endswith(".zip"):
# A zip file was dropped
@ -45,14 +46,24 @@ class CollectHarmonyZips(pyblish.api.InstancePlugin):
self.log.info("Copied data: {}".format(new_instance.data))
task_type = asset_data["data"]["tasks"].get(task, {}).get("type")
project_task_types = project_entity["config"]["tasks"]
task_code = project_task_types.get(task_type, {}).get("short_name")
# fix anatomy data
anatomy_data_new = copy.deepcopy(anatomy_data)
# updating hierarchy data
anatomy_data_new.update({
"asset": asset_data["name"],
"task": task,
"subset": subset_name
})
anatomy_data_new.update(
{
"asset": asset_data["name"],
"task": {
"name": task,
"type": task_type,
"short": task_code,
},
"subset": subset_name
}
)
new_instance.data["label"] = f"{instance_name}"
new_instance.data["subset"] = subset_name

View file

@ -11,7 +11,7 @@ import zipfile
import pyblish.api
from avalon import api, io
import openpype.api
from openpype.lib import get_workfile_template_key_from_context
from openpype.lib import get_workfile_template_key
class ExtractHarmonyZip(openpype.api.Extractor):
@ -31,8 +31,10 @@ class ExtractHarmonyZip(openpype.api.Extractor):
# Presets
create_workfile = True
default_task = "harmonyIngest"
default_task_type = "Ingest"
default_task = {
"name": "harmonyIngest",
"type": "Ingest",
}
default_task_status = "Ingested"
assetversion_status = "Ingested"
@ -219,6 +221,19 @@ class ExtractHarmonyZip(openpype.api.Extractor):
# Setup the data needed to form a valid work path filename
anatomy = openpype.api.Anatomy()
project_entity = instance.context.data["projectEntity"]
asset_entity = io.find_one({
"type": "asset",
"name": instance.data["asset"]
})
task_name = instance.data.get("task")
task_type = asset_entity["data"]["tasks"][task_name].get("type")
if task_type:
task_short = project_entity["config"]["tasks"].get(
task_type, {}).get("short_name")
else:
task_short = None
data = {
"root": api.registered_root(),
@ -229,18 +244,20 @@ class ExtractHarmonyZip(openpype.api.Extractor):
"asset": instance.data["asset"],
"hierarchy": openpype.api.get_hierarchy(instance.data["asset"]),
"family": instance.data["family"],
"task": instance.data.get("task"),
"task": {
"name": task_name,
"type": task_type,
"short": task_short,
},
"subset": instance.data["subset"],
"version": 1,
"ext": "zip",
}
host_name = "harmony"
template_name = get_workfile_template_key_from_context(
instance.data["asset"],
instance.data.get("task"),
template_name = get_workfile_template_key(
instance.data.get("task").get("type"),
host_name,
project_name=project_entity["name"],
dbcon=io
)
# Get a valid work filename first with version 1

View file

@ -989,6 +989,14 @@ class Templates:
invalid_required = []
missing_required = []
replace_keys = []
task_data = data.get("task")
if (
isinstance(task_data, StringType)
and "{task[name]}" in orig_template
):
data["task"] = {"name": task_data}
for group in self.key_pattern.findall(template):
orig_key = group[1:-1]
key = str(orig_key)
@ -1074,6 +1082,10 @@ class Templates:
output = collections.defaultdict(dict)
for key, orig_value in templates.items():
if isinstance(orig_value, StringType):
# Replace {task} by '{task[name]}' for backward compatibility
if '{task}' in orig_value:
orig_value = orig_value.replace('{task}', '{task[name]}')
output[key] = self._format(orig_value, data)
continue

View file

@ -1280,23 +1280,12 @@ def prepare_context_environments(data):
anatomy = data["anatomy"]
asset_tasks = asset_doc.get("data", {}).get("tasks") or {}
task_info = asset_tasks.get(task_name) or {}
task_type = task_info.get("type")
task_type = workdir_data["task"]["type"]
# Temp solution how to pass task type to `_prepare_last_workfile`
data["task_type"] = task_type
workfile_template_key = get_workfile_template_key(
task_type,
app.host_name,
project_name=project_name,
project_settings=project_settings
)
try:
workdir = get_workdir_with_workdir_data(
workdir_data, anatomy, template_key=workfile_template_key
)
workdir = get_workdir_with_workdir_data(workdir_data, anatomy)
except Exception as exc:
raise ApplicationLaunchFailed(
@ -1329,10 +1318,10 @@ def prepare_context_environments(data):
)
data["env"].update(context_env)
_prepare_last_workfile(data, workdir, workfile_template_key)
_prepare_last_workfile(data, workdir)
def _prepare_last_workfile(data, workdir, workfile_template_key):
def _prepare_last_workfile(data, workdir):
"""last workfile workflow preparation.
Function check if should care about last workfile workflow and tries
@ -1395,6 +1384,10 @@ def _prepare_last_workfile(data, workdir, workfile_template_key):
anatomy = data["anatomy"]
# Find last workfile
file_template = anatomy.templates["work"]["file"]
# Replace {task} by '{task[name]}' for backward compatibility
if '{task}' in file_template:
file_template = file_template.replace('{task}', '{task[name]}')
workdir_data.update({
"version": 1,
"user": get_openpype_username(),

View file

@ -7,6 +7,7 @@ import platform
import logging
import collections
import functools
import getpass
from openpype.settings import get_project_settings
from .anatomy import Anatomy
@ -464,6 +465,7 @@ def get_workfile_template_key(
return default
# TODO rename function as is not just "work" specific
def get_workdir_data(project_doc, asset_doc, task_name, host_name):
"""Prepare data for workdir template filling from entered information.
@ -479,22 +481,31 @@ def get_workdir_data(project_doc, asset_doc, task_name, host_name):
"""
hierarchy = "/".join(asset_doc["data"]["parents"])
task_type = asset_doc['data']['tasks'].get(task_name, {}).get('type')
project_task_types = project_doc["config"]["tasks"]
task_code = project_task_types.get(task_type, {}).get("short_name")
data = {
"project": {
"name": project_doc["name"],
"code": project_doc["data"].get("code")
},
"task": task_name,
"task": {
"name": task_name,
"type": task_type,
"short": task_code,
},
"asset": asset_doc["name"],
"app": host_name,
"hierarchy": hierarchy
"user": getpass.getuser(),
"hierarchy": hierarchy,
}
return data
def get_workdir_with_workdir_data(
workdir_data, anatomy=None, project_name=None,
template_key=None, dbcon=None
workdir_data, anatomy=None, project_name=None, template_key=None
):
"""Fill workdir path from entered data and project's anatomy.
@ -529,12 +540,10 @@ def get_workdir_with_workdir_data(
anatomy = Anatomy(project_name)
if not template_key:
template_key = get_workfile_template_key_from_context(
workdir_data["asset"],
workdir_data["task"],
template_key = get_workfile_template_key(
workdir_data["task"]["type"],
workdir_data["app"],
project_name=workdir_data["project"]["name"],
dbcon=dbcon
project_name=workdir_data["project"]["name"]
)
anatomy_filled = anatomy.format(workdir_data)
@ -648,7 +657,7 @@ def create_workfile_doc(asset_doc, task_name, filename, workdir, dbcon=None):
anatomy = Anatomy(project_doc["name"])
# Get workdir path (result is anatomy.TemplateResult)
template_workdir = get_workdir_with_workdir_data(
workdir_data, anatomy, dbcon=dbcon
workdir_data, anatomy
)
template_workdir_path = str(template_workdir).replace("\\", "/")

View file

@ -1,30 +0,0 @@
import pyblish.api
import os
class IntegrateCleanComponentData(pyblish.api.InstancePlugin):
"""
Cleaning up thumbnail an mov files after they have been integrated
"""
order = pyblish.api.IntegratorOrder + 0.5
label = 'Clean component data'
families = ["ftrack"]
optional = True
active = False
def process(self, instance):
for comp in instance.data['representations']:
self.log.debug('component {}'.format(comp))
if "%" in comp['published_path'] or "#" in comp['published_path']:
continue
if comp.get('thumbnail') or ("thumbnail" in comp.get('tags', [])):
os.remove(comp['published_path'])
self.log.info('Thumbnail image was erased')
elif comp.get('preview') or ("preview" in comp.get('tags', [])):
os.remove(comp['published_path'])
self.log.info('Preview mov file was erased')

View file

@ -192,7 +192,7 @@ class SFTPHandler(AbstractProvider):
Format is importing for usage of python's format ** approach
"""
# roots cannot be locally overridden
return self.presets['roots']
return self.presets['root']
def get_tree(self):
"""

View file

@ -54,6 +54,12 @@ class CollectAnatomyContextData(pyblish.api.ContextPlugin):
if hierarchy_items:
hierarchy = os.path.join(*hierarchy_items)
asset_tasks = asset_entity["data"]["tasks"]
task_type = asset_tasks.get(task_name, {}).get("type")
project_task_types = project_entity["config"]["tasks"]
task_code = project_task_types.get(task_type, {}).get("short_name")
context_data = {
"project": {
"name": project_entity["name"],
@ -61,7 +67,11 @@ class CollectAnatomyContextData(pyblish.api.ContextPlugin):
},
"asset": asset_entity["name"],
"hierarchy": hierarchy.replace("\\", "/"),
"task": task_name,
"task": {
"name": task_name,
"type": task_type,
"short": task_code,
},
"username": context.data["user"],
"app": context.data["hostName"]
}

View file

@ -214,6 +214,8 @@ class CollectAnatomyInstanceData(pyblish.api.ContextPlugin):
project_doc = context.data["projectEntity"]
context_asset_doc = context.data["assetEntity"]
project_task_types = project_doc["config"]["tasks"]
for instance in context:
if self.follow_workfile_version:
version_number = context.data('version')
@ -245,7 +247,18 @@ class CollectAnatomyInstanceData(pyblish.api.ContextPlugin):
# Task
task_name = instance.data.get("task")
if task_name:
anatomy_updates["task"] = task_name
asset_tasks = asset_doc["data"]["tasks"]
task_type = asset_tasks.get(task_name, {}).get("type")
task_code = (
project_task_types
.get(task_type, {})
.get("short_name")
)
anatomy_updates["task"] = {
"name": task_name,
"type": task_type,
"short": task_code
}
# Additional data
resolution_width = instance.data.get("resolutionWidth")

View file

@ -184,7 +184,9 @@ class ExtractBurnin(openpype.api.Extractor):
for key in self.positions:
value = burnin_def.get(key)
if value:
burnin_values[key] = value
burnin_values[key] = value.replace(
"{task}", "{task[name]}"
)
# Remove "delete" tag from new representation
if "delete" in new_repre["tags"]:

View file

@ -172,21 +172,26 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
anatomy_data["hierarchy"] = hierarchy
# Make sure task name in anatomy data is same as on instance.data
task_name = instance.data.get("task")
if task_name:
anatomy_data["task"] = task_name
else:
# Just set 'task_name' variable to context task
task_name = anatomy_data["task"]
# Find task type for current task name
# - this should be already prepared on instance
asset_tasks = (
asset_entity.get("data", {}).get("tasks")
) or {}
task_info = asset_tasks.get(task_name) or {}
task_type = task_info.get("type")
instance.data["task_type"] = task_type
task_name = instance.data.get("task")
if task_name:
task_info = asset_tasks.get(task_name) or {}
task_type = task_info.get("type")
project_task_types = project_entity["config"]["tasks"]
task_code = project_task_types.get(task_type, {}).get("short_name")
anatomy_data["task"] = {
"name": task_name,
"type": task_type,
"short": task_code
}
else:
# Just set 'task_name' variable to context task
task_name = anatomy_data["task"]["name"]
task_type = anatomy_data["task"]["type"]
# Fill family in anatomy data
anatomy_data["family"] = instance.data.get("family")
@ -804,11 +809,8 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
# - is there a chance that task name is not filled in anatomy
# data?
# - should we use context task in that case?
task_name = (
instance.data["anatomyData"]["task"]
or io.Session["AVALON_TASK"]
)
task_type = instance.data["task_type"]
task_name = instance.data["anatomyData"]["task"]["name"]
task_type = instance.data["anatomyData"]["task"]["type"]
filtering_criteria = {
"families": instance.data["family"],
"hosts": instance.context.data["hostName"],

View file

@ -392,3 +392,10 @@ class PypeCommands:
import time
while True:
time.sleep(1.0)
def repack_version(self, directory):
"""Repacking OpenPype version."""
from openpype.tools.repack_version import VersionRepacker
version_packer = VersionRepacker(directory)
version_packer.process()

View file

@ -6,8 +6,8 @@
"frame": "{frame:0>{@frame_padding}}"
},
"work": {
"folder": "{root[work]}/{project[name]}/{hierarchy}/{asset}/work/{task}",
"file": "{project[code]}_{asset}_{task}_{@version}<_{comment}>.{ext}",
"folder": "{root[work]}/{project[name]}/{hierarchy}/{asset}/work/{task[name]}",
"file": "{project[code]}_{asset}_{task[name]}_{@version}<_{comment}>.{ext}",
"path": "{@folder}/{@file}"
},
"render": {

View file

@ -11,6 +11,10 @@
"type": "dict",
"key": "defaults",
"children": [
{
"type": "label",
"label": "The list of existing placeholders is available here:<br> https://openpype.io/docs/admin_settings_project_anatomy/#available-template-keys "
},
{
"type": "number",
"key": "version_padding",

View file

@ -856,6 +856,7 @@ def get_anatomy_settings(
apply_local_settings_on_anatomy_settings(
result, local_settings, project_name, site_name
)
return result

View file

@ -0,0 +1,164 @@
# -*- coding: utf-8 -*-
"""Script to rehash and repack current version."""
import enlighten
import blessed
from pathlib import Path
import platform
from zipfile import ZipFile
from typing import List
import hashlib
import sys
from igniter.bootstrap_repos import OpenPypeVersion
class VersionRepacker:
def __init__(self, directory: str):
self._term = blessed.Terminal()
self._manager = enlighten.get_manager()
self._last_increment = 0
self.version_path = Path(directory)
self.zip_path = self.version_path.parent
_version = {}
with open(self.version_path / "openpype" / "version.py") as fp:
exec(fp.read(), _version)
self._version_py = _version["__version__"]
del _version
def _print(self, msg: str, message_type: int = 0) -> None:
"""Print message to console.
Args:
msg (str): message to print
message_type (int): type of message (0 info, 1 error, 2 note)
"""
if message_type == 0:
header = self._term.aquamarine3(">>> ")
elif message_type == 1:
header = self._term.orangered2("!!! ")
elif message_type == 2:
header = self._term.tan1("... ")
else:
header = self._term.darkolivegreen3("--- ")
print("{}{}".format(header, msg))
@staticmethod
def sha256sum(filename):
"""Calculate sha256 for content of the file.
Args:
filename (str): Path to file.
Returns:
str: hex encoded sha256
"""
h = hashlib.sha256()
b = bytearray(128 * 1024)
mv = memoryview(b)
with open(filename, 'rb', buffering=0) as f:
for n in iter(lambda: f.readinto(mv), 0):
h.update(mv[:n])
return h.hexdigest()
@staticmethod
def _filter_dir(path: Path, path_filter: List) -> List[Path]:
"""Recursively crawl over path and filter."""
result = []
for item in path.iterdir():
if item.name in path_filter:
continue
if item.name.startswith('.'):
continue
if item.is_dir():
result.extend(VersionRepacker._filter_dir(item, path_filter))
else:
result.append(item)
return result
def process(self):
if (self.version_path / "pyproject.toml").exists():
self._print(
("This cannot run on OpenPype sources. "
"Please run it on extracted version."), 1)
return
self._print(f"Rehashing and zipping {self.version_path}")
version = OpenPypeVersion.version_in_str(self.version_path.name)
if not version:
self._print("Cannot get version from directory", 1)
return
self._print(f"Detected version is {version}")
# replace version in version.py
self._replace_version(version, self.version_path)
self._print("Recalculating checksums ...", 2)
checksums = []
file_list = VersionRepacker._filter_dir(self.version_path, [])
progress_bar = enlighten.Counter(
total=len(file_list), desc="Calculating checksums",
nits="%", color="green")
for file in file_list:
checksums.append((
VersionRepacker.sha256sum(file.as_posix()),
file.resolve().relative_to(self.version_path),
file
))
progress_bar.update()
progress_bar.close()
progress_bar = enlighten.Counter(
total=len(checksums), desc="Zipping directory",
nits="%", color=(56, 211, 159))
zip_filename = self.zip_path / f"openpype-v{version}.zip"
with ZipFile(zip_filename, "w") as zip_file:
for item in checksums:
if item[1].as_posix() == "checksums":
progress_bar.update()
continue
zip_file.write(item[2], item[1])
progress_bar.update()
checksums_str = ""
for c in checksums:
file_str = c[1]
if platform.system().lower() == "windows":
file_str = c[1].as_posix().replace("\\", "/")
checksums_str += "{}:{}\n".format(c[0], file_str)
zip_file.writestr("checksums", checksums_str)
# test if zip is ok
zip_file.testzip()
self._print(f"All done, you can find new zip here: {zip_filename}")
@staticmethod
def _replace_version(version: OpenPypeVersion, path: Path):
"""Replace version in version.py.
Args:
version (OpenPypeVersion): OpenPype version to set
path (Path): Path to unzipped version.
"""
with open(path / "openpype" / "version.py", "r") as op_version_file:
replacement = ""
for line in op_version_file:
stripped_line = line.strip()
if stripped_line.strip().startswith("__version__ ="):
line = f'__version__ = "{version}"\n'
replacement += line
with open(path / "openpype" / "version.py", "w") as op_version_file:
op_version_file.write(replacement)
if __name__ == '__main__':
print(sys.argv[1])
version_packer = VersionRepacker(sys.argv[1])
version_packer.process()

View file

@ -59,20 +59,39 @@ class NameWindow(QtWidgets.QDialog):
# Set work file data for template formatting
asset_name = session["AVALON_ASSET"]
task_name = session["AVALON_TASK"]
project_doc = io.find_one(
{"type": "project"},
{
"name": True,
"data.code": True
"data.code": True,
"config.tasks": True,
}
)
asset_doc = io.find_one(
{
"type": "asset",
"name": asset_name
},
{"data.tasks": True}
)
task_type = asset_doc["data"]["tasks"].get(task_name, {}).get("type")
project_task_types = project_doc["config"]["tasks"]
task_short = project_task_types.get(task_type, {}).get("short_name")
self.data = {
"project": {
"name": project_doc["name"],
"code": project_doc["data"].get("code")
},
"asset": asset_name,
"task": session["AVALON_TASK"],
"task": {
"name": task_name,
"type": task_type,
"short": task_short,
},
"version": 1,
"user": getpass.getuser(),
"comment": "",
@ -640,7 +659,7 @@ class FilesWidget(QtWidgets.QWidget):
self.host.save_file(file_path)
self.set_asset_task(
self._asse_id, self._task_name, self._task_type
self._asset_id, self._task_name, self._task_type
)
pipeline.emit("after.workfile.save", [file_path])

View file

@ -140,7 +140,7 @@ def test_search_string_for_openpype_version(printer):
]
for ver_string in strings:
printer(f"testing {ver_string[0]} should be {ver_string[1]}")
assert OpenPypeVersion.version_in_str(ver_string[0])[0] == \
assert OpenPypeVersion.version_in_str(ver_string[0]) == \
ver_string[1]

View file

@ -32,7 +32,10 @@ For more information [see here](admin_use#run-openpype).
| Command | Description | Arguments |
| --- | --- |: --- :|
| tray | Launch OpenPype Tray. | [📑](#tray-arguments)
| contextselection | Open Context selection dialog. | |
| module | Run command line arguments for modules. | |
| repack-version | Tool to re-create version zip. | [📑](#repack-version-arguments) |
| tray | Launch OpenPype Tray. | [📑](#tray-arguments)
| eventserver | This should be ideally used by system service (such as systemd or upstart on linux and window service). | [📑](#eventserver-arguments) |
| launch | Launch application in Pype environment. | [📑](#launch-arguments) |
| publish | Pype takes JSON from provided path and use it to publish data in it. | [📑](#publish-arguments) |
@ -156,4 +159,10 @@ openpypeconsole settings
`standalonepublisher` has no command-line arguments.
```shell
openpype_console standalonepublisher
```
```
### `repack-version` arguments {#repack-version-arguments}
Takes path to unzipped and possibly modified OpenPype version. Files will be
zipped, checksums recalculated and version will be determined by folder name
(and written to `version.py`).

View file

@ -57,7 +57,9 @@ We have a few required anatomy templates for OpenPype to work properly, however
| `project[code]` | Project's code |
| `hierarchy` | All hierarchical parents as subfolders |
| `asset` | Name of asset or shot |
| `task` | Name of task |
| `task[name]` | Name of task |
| `task[type]` | Type of task |
| `task[short]` | Shortname of task |
| `version` | Version number |
| `subset` | Subset name |
| `family` | Main family name |