mirror of
https://github.com/ynput/ayon-core.git
synced 2025-12-24 21:04:40 +01:00
Merge branch 'develop' into feature/1159-maya-safer-handling-of-expected-render-output-names
This commit is contained in:
commit
19b97780dd
57 changed files with 1624 additions and 639 deletions
146
.dockerignore
Normal file
146
.dockerignore
Normal file
|
|
@ -0,0 +1,146 @@
|
|||
# Created by .ignore support plugin (hsz.mobi)
|
||||
### Python template
|
||||
# Byte-compiled / optimized / DLL files
|
||||
__pycache__/
|
||||
*.py[cod]
|
||||
*$py.class
|
||||
|
||||
# C extensions
|
||||
*.so
|
||||
|
||||
# Distribution / packaging
|
||||
.Python
|
||||
build/
|
||||
develop-eggs/
|
||||
dist/
|
||||
downloads/
|
||||
eggs/
|
||||
.eggs/
|
||||
lib/
|
||||
lib64/
|
||||
parts/
|
||||
sdist/
|
||||
var/
|
||||
wheels/
|
||||
share/python-wheels/
|
||||
*.egg-info/
|
||||
.installed.cfg
|
||||
*.egg
|
||||
MANIFEST
|
||||
|
||||
# PyInstaller
|
||||
# Usually these files are written by a python script from a template
|
||||
# before PyInstaller builds the exe, so as to inject date/other infos into it.
|
||||
*.manifest
|
||||
*.spec
|
||||
|
||||
# Installer logs
|
||||
pip-log.txt
|
||||
pip-delete-this-directory.txt
|
||||
|
||||
# Unit test / coverage reports
|
||||
htmlcov/
|
||||
.tox/
|
||||
.nox/
|
||||
.coverage
|
||||
.coverage.*
|
||||
.cache
|
||||
nosetests.xml
|
||||
coverage.xml
|
||||
*.cover
|
||||
*.py,cover
|
||||
.hypothesis/
|
||||
.pytest_cache/
|
||||
cover/
|
||||
|
||||
# Translations
|
||||
*.mo
|
||||
*.pot
|
||||
|
||||
# Django stuff:
|
||||
*.log
|
||||
local_settings.py
|
||||
db.sqlite3
|
||||
db.sqlite3-journal
|
||||
|
||||
# Flask stuff:
|
||||
instance/
|
||||
.webassets-cache
|
||||
|
||||
# Scrapy stuff:
|
||||
.scrapy
|
||||
|
||||
# Sphinx documentation
|
||||
docs/_build/
|
||||
|
||||
# PyBuilder
|
||||
.pybuilder/
|
||||
target/
|
||||
|
||||
# Jupyter Notebook
|
||||
.ipynb_checkpoints
|
||||
|
||||
# IPython
|
||||
profile_default/
|
||||
ipython_config.py
|
||||
|
||||
# pyenv
|
||||
# For a library or package, you might want to ignore these files since the code is
|
||||
# intended to run in multiple environments; otherwise, check them in:
|
||||
# .python-version
|
||||
|
||||
# pipenv
|
||||
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
|
||||
# However, in case of collaboration, if having platform-specific dependencies or dependencies
|
||||
# having no cross-platform support, pipenv may install dependencies that don't work, or not
|
||||
# install all needed dependencies.
|
||||
#Pipfile.lock
|
||||
|
||||
# PEP 582; used by e.g. github.com/David-OConnor/pyflow
|
||||
__pypackages__/
|
||||
|
||||
# Celery stuff
|
||||
celerybeat-schedule
|
||||
celerybeat.pid
|
||||
|
||||
# SageMath parsed files
|
||||
*.sage.py
|
||||
|
||||
# Environments
|
||||
.env
|
||||
.venv
|
||||
env/
|
||||
venv/
|
||||
ENV/
|
||||
env.bak/
|
||||
venv.bak/
|
||||
|
||||
# Spyder project settings
|
||||
.spyderproject
|
||||
.spyproject
|
||||
|
||||
# Rope project settings
|
||||
.ropeproject
|
||||
|
||||
# mkdocs documentation
|
||||
/site
|
||||
|
||||
# mypy
|
||||
.mypy_cache/
|
||||
.dmypy.json
|
||||
dmypy.json
|
||||
|
||||
# Pyre type checker
|
||||
.pyre/
|
||||
|
||||
# pytype static type analyzer
|
||||
.pytype/
|
||||
|
||||
# Cython debug symbols
|
||||
cython_debug/
|
||||
|
||||
.poetry/
|
||||
.github/
|
||||
vendor/bin/
|
||||
docs/
|
||||
website/
|
||||
82
Dockerfile
Normal file
82
Dockerfile
Normal file
|
|
@ -0,0 +1,82 @@
|
|||
# Build Pype docker image
|
||||
FROM centos:7 AS builder
|
||||
ARG OPENPYPE_PYTHON_VERSION=3.7.10
|
||||
|
||||
LABEL org.opencontainers.image.name="pypeclub/openpype"
|
||||
LABEL org.opencontainers.image.title="OpenPype Docker Image"
|
||||
LABEL org.opencontainers.image.url="https://openpype.io/"
|
||||
LABEL org.opencontainers.image.source="https://github.com/pypeclub/pype"
|
||||
|
||||
USER root
|
||||
|
||||
# update base
|
||||
RUN yum -y install deltarpm \
|
||||
&& yum -y update \
|
||||
&& yum clean all
|
||||
|
||||
# add tools we need
|
||||
RUN yum -y install https://dl.fedoraproject.org/pub/epel/epel-release-latest-7.noarch.rpm \
|
||||
&& yum -y install centos-release-scl \
|
||||
&& yum -y install \
|
||||
bash \
|
||||
which \
|
||||
git \
|
||||
devtoolset-7-gcc* \
|
||||
make \
|
||||
cmake \
|
||||
curl \
|
||||
wget \
|
||||
gcc \
|
||||
zlib-devel \
|
||||
bzip2 \
|
||||
bzip2-devel \
|
||||
readline-devel \
|
||||
sqlite sqlite-devel \
|
||||
openssl-devel \
|
||||
tk-devel libffi-devel \
|
||||
qt5-qtbase-devel \
|
||||
patchelf \
|
||||
&& yum clean all
|
||||
|
||||
RUN mkdir /opt/openpype
|
||||
# RUN useradd -m pype
|
||||
# RUN chown pype /opt/openpype
|
||||
# USER pype
|
||||
|
||||
RUN curl https://pyenv.run | bash
|
||||
ENV PYTHON_CONFIGURE_OPTS --enable-shared
|
||||
|
||||
RUN echo 'export PATH="$HOME/.pyenv/bin:$PATH"'>> $HOME/.bashrc \
|
||||
&& echo 'eval "$(pyenv init -)"' >> $HOME/.bashrc \
|
||||
&& echo 'eval "$(pyenv virtualenv-init -)"' >> $HOME/.bashrc \
|
||||
&& echo 'eval "$(pyenv init --path)"' >> $HOME/.bashrc
|
||||
RUN source $HOME/.bashrc && pyenv install ${OPENPYPE_PYTHON_VERSION}
|
||||
|
||||
COPY . /opt/openpype/
|
||||
RUN rm -rf /openpype/.poetry || echo "No Poetry installed yet."
|
||||
# USER root
|
||||
# RUN chown -R pype /opt/openpype
|
||||
RUN chmod +x /opt/openpype/tools/create_env.sh && chmod +x /opt/openpype/tools/build.sh
|
||||
|
||||
# USER pype
|
||||
|
||||
WORKDIR /opt/openpype
|
||||
|
||||
RUN cd /opt/openpype \
|
||||
&& source $HOME/.bashrc \
|
||||
&& pyenv local ${OPENPYPE_PYTHON_VERSION}
|
||||
|
||||
RUN source $HOME/.bashrc \
|
||||
&& ./tools/create_env.sh
|
||||
|
||||
RUN source $HOME/.bashrc \
|
||||
&& ./tools/fetch_thirdparty_libs.sh
|
||||
|
||||
RUN source $HOME/.bashrc \
|
||||
&& bash ./tools/build.sh \
|
||||
&& cp /usr/lib64/libffi* ./build/exe.linux-x86_64-3.7/lib \
|
||||
&& cp /usr/lib64/libssl* ./build/exe.linux-x86_64-3.7/lib \
|
||||
&& cp /usr/lib64/libcrypto* ./build/exe.linux-x86_64-3.7/lib
|
||||
|
||||
RUN cd /opt/openpype \
|
||||
rm -rf ./vendor/bin
|
||||
|
|
@ -67,6 +67,15 @@ def patched_discover(superclass):
|
|||
@import_wrapper
|
||||
def install():
|
||||
"""Install Pype to Avalon."""
|
||||
from pyblish.lib import MessageHandler
|
||||
|
||||
def modified_emit(obj, record):
|
||||
"""Method replacing `emit` in Pyblish's MessageHandler."""
|
||||
record.msg = record.getMessage()
|
||||
obj.records.append(record)
|
||||
|
||||
MessageHandler.emit = modified_emit
|
||||
|
||||
log.info("Registering global plug-ins..")
|
||||
pyblish.register_plugin_path(PUBLISH_PATH)
|
||||
pyblish.register_discovery_filter(filter_pyblish_plugins)
|
||||
|
|
|
|||
|
|
@ -112,38 +112,4 @@ def get_asset_settings():
|
|||
"duration": duration
|
||||
}
|
||||
|
||||
try:
|
||||
# temporary, in pype3 replace with api.get_current_project_settings
|
||||
skip_resolution_check = (
|
||||
api.get_current_project_settings()
|
||||
["plugins"]
|
||||
["aftereffects"]
|
||||
["publish"]
|
||||
["ValidateSceneSettings"]
|
||||
["skip_resolution_check"]
|
||||
)
|
||||
skip_timelines_check = (
|
||||
api.get_current_project_settings()
|
||||
["plugins"]
|
||||
["aftereffects"]
|
||||
["publish"]
|
||||
["ValidateSceneSettings"]
|
||||
["skip_timelines_check"]
|
||||
)
|
||||
except KeyError:
|
||||
skip_resolution_check = ['*']
|
||||
skip_timelines_check = ['*']
|
||||
|
||||
if os.getenv('AVALON_TASK') in skip_resolution_check or \
|
||||
'*' in skip_timelines_check:
|
||||
scene_data.pop("resolutionWidth")
|
||||
scene_data.pop("resolutionHeight")
|
||||
|
||||
if entity_type in skip_timelines_check or '*' in skip_timelines_check:
|
||||
scene_data.pop('fps', None)
|
||||
scene_data.pop('frameStart', None)
|
||||
scene_data.pop('frameEnd', None)
|
||||
scene_data.pop('handleStart', None)
|
||||
scene_data.pop('handleEnd', None)
|
||||
|
||||
return scene_data
|
||||
|
|
|
|||
|
|
@ -1,6 +1,7 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Validate scene settings."""
|
||||
import os
|
||||
import re
|
||||
|
||||
import pyblish.api
|
||||
|
||||
|
|
@ -56,13 +57,26 @@ class ValidateSceneSettings(pyblish.api.InstancePlugin):
|
|||
hosts = ["aftereffects"]
|
||||
optional = True
|
||||
|
||||
skip_timelines_check = ["*"] # * >> skip for all
|
||||
skip_resolution_check = ["*"]
|
||||
skip_timelines_check = [".*"] # * >> skip for all
|
||||
skip_resolution_check = [".*"]
|
||||
|
||||
def process(self, instance):
|
||||
"""Plugin entry point."""
|
||||
expected_settings = api.get_asset_settings()
|
||||
self.log.info("expected_settings::{}".format(expected_settings))
|
||||
self.log.info("config from DB::{}".format(expected_settings))
|
||||
|
||||
if any(re.search(pattern, os.getenv('AVALON_TASK'))
|
||||
for pattern in self.skip_resolution_check):
|
||||
expected_settings.pop("resolutionWidth")
|
||||
expected_settings.pop("resolutionHeight")
|
||||
|
||||
if any(re.search(pattern, os.getenv('AVALON_TASK'))
|
||||
for pattern in self.skip_timelines_check):
|
||||
expected_settings.pop('fps', None)
|
||||
expected_settings.pop('frameStart', None)
|
||||
expected_settings.pop('frameEnd', None)
|
||||
expected_settings.pop('handleStart', None)
|
||||
expected_settings.pop('handleEnd', None)
|
||||
|
||||
# handle case where ftrack uses only two decimal places
|
||||
# 23.976023976023978 vs. 23.98
|
||||
|
|
@ -76,6 +90,8 @@ class ValidateSceneSettings(pyblish.api.InstancePlugin):
|
|||
duration = instance.data.get("frameEndHandle") - \
|
||||
instance.data.get("frameStartHandle") + 1
|
||||
|
||||
self.log.debug("filtered config::{}".format(expected_settings))
|
||||
|
||||
current_settings = {
|
||||
"fps": fps,
|
||||
"frameStartHandle": instance.data.get("frameStartHandle"),
|
||||
|
|
|
|||
|
|
@ -3,6 +3,7 @@
|
|||
import os
|
||||
from pathlib import Path
|
||||
import logging
|
||||
import re
|
||||
|
||||
from openpype import lib
|
||||
from openpype.api import (get_current_project_settings)
|
||||
|
|
@ -63,26 +64,9 @@ def get_asset_settings():
|
|||
"handleStart": handle_start,
|
||||
"handleEnd": handle_end,
|
||||
"resolutionWidth": resolution_width,
|
||||
"resolutionHeight": resolution_height
|
||||
"resolutionHeight": resolution_height,
|
||||
"entityType": entity_type
|
||||
}
|
||||
settings = get_current_project_settings()
|
||||
|
||||
try:
|
||||
skip_resolution_check = \
|
||||
settings["harmony"]["general"]["skip_resolution_check"]
|
||||
skip_timelines_check = \
|
||||
settings["harmony"]["general"]["skip_timelines_check"]
|
||||
except KeyError:
|
||||
skip_resolution_check = []
|
||||
skip_timelines_check = []
|
||||
|
||||
if os.getenv('AVALON_TASK') in skip_resolution_check:
|
||||
scene_data.pop("resolutionWidth")
|
||||
scene_data.pop("resolutionHeight")
|
||||
|
||||
if entity_type in skip_timelines_check:
|
||||
scene_data.pop('frameStart', None)
|
||||
scene_data.pop('frameEnd', None)
|
||||
|
||||
return scene_data
|
||||
|
||||
|
|
|
|||
|
|
@ -2,6 +2,7 @@
|
|||
"""Validate scene settings."""
|
||||
import os
|
||||
import json
|
||||
import re
|
||||
|
||||
import pyblish.api
|
||||
|
||||
|
|
@ -41,22 +42,42 @@ class ValidateSceneSettings(pyblish.api.InstancePlugin):
|
|||
families = ["workfile"]
|
||||
hosts = ["harmony"]
|
||||
actions = [ValidateSceneSettingsRepair]
|
||||
optional = True
|
||||
|
||||
frame_check_filter = ["_ch_", "_pr_", "_intd_", "_extd_"]
|
||||
# used for skipping resolution validation for render tasks
|
||||
render_check_filter = ["render", "Render"]
|
||||
# skip frameEnd check if asset contains any of:
|
||||
frame_check_filter = ["_ch_", "_pr_", "_intd_", "_extd_"] # regex
|
||||
|
||||
# skip resolution check if Task name matches any of regex patterns
|
||||
skip_resolution_check = ["render", "Render"] # regex
|
||||
|
||||
# skip frameStart, frameEnd check if Task name matches any of regex patt.
|
||||
skip_timelines_check = [] # regex
|
||||
|
||||
def process(self, instance):
|
||||
"""Plugin entry point."""
|
||||
expected_settings = openpype.hosts.harmony.api.get_asset_settings()
|
||||
self.log.info(expected_settings)
|
||||
self.log.info("scene settings from DB:".format(expected_settings))
|
||||
|
||||
expected_settings = _update_frames(dict.copy(expected_settings))
|
||||
expected_settings["frameEndHandle"] = expected_settings["frameEnd"] +\
|
||||
expected_settings["handleEnd"]
|
||||
|
||||
if any(string in instance.context.data['anatomyData']['asset']
|
||||
for string in self.frame_check_filter):
|
||||
if (any(re.search(pattern, os.getenv('AVALON_TASK'))
|
||||
for pattern in self.skip_resolution_check)):
|
||||
expected_settings.pop("resolutionWidth")
|
||||
expected_settings.pop("resolutionHeight")
|
||||
|
||||
entity_type = expected_settings.get("entityType")
|
||||
if (any(re.search(pattern, entity_type)
|
||||
for pattern in self.skip_timelines_check)):
|
||||
expected_settings.pop('frameStart', None)
|
||||
expected_settings.pop('frameEnd', None)
|
||||
|
||||
expected_settings.pop("entityType") # not useful after the check
|
||||
|
||||
asset_name = instance.context.data['anatomyData']['asset']
|
||||
if any(re.search(pattern, asset_name)
|
||||
for pattern in self.frame_check_filter):
|
||||
expected_settings.pop("frameEnd")
|
||||
|
||||
# handle case where ftrack uses only two decimal places
|
||||
|
|
@ -66,13 +87,7 @@ class ValidateSceneSettings(pyblish.api.InstancePlugin):
|
|||
fps = float(
|
||||
"{:.2f}".format(instance.context.data.get("frameRate")))
|
||||
|
||||
if any(string in instance.context.data['anatomyData']['task']
|
||||
for string in self.render_check_filter):
|
||||
self.log.debug("Render task detected, resolution check skipped")
|
||||
expected_settings.pop("resolutionWidth")
|
||||
expected_settings.pop("resolutionHeight")
|
||||
|
||||
self.log.debug(expected_settings)
|
||||
self.log.debug("filtered settings: {}".format(expected_settings))
|
||||
|
||||
current_settings = {
|
||||
"fps": fps,
|
||||
|
|
@ -84,7 +99,7 @@ class ValidateSceneSettings(pyblish.api.InstancePlugin):
|
|||
"resolutionWidth": instance.context.data.get("resolutionWidth"),
|
||||
"resolutionHeight": instance.context.data.get("resolutionHeight"),
|
||||
}
|
||||
self.log.debug("curr:: {}".format(current_settings))
|
||||
self.log.debug("current scene settings {}".format(current_settings))
|
||||
|
||||
invalid_settings = []
|
||||
for key, value in expected_settings.items():
|
||||
|
|
|
|||
|
|
@ -96,19 +96,25 @@ class ExtractPlayblast(openpype.api.Extractor):
|
|||
# Remove panel key since it's internal value to capture_gui
|
||||
preset.pop("panel", None)
|
||||
|
||||
|
||||
self.log.info('using viewport preset: {}'.format(preset))
|
||||
|
||||
path = capture.capture(**preset)
|
||||
playblast = self._fix_playblast_output_path(path)
|
||||
|
||||
self.log.info("file list {}".format(playblast))
|
||||
self.log.debug("playblast path {}".format(path))
|
||||
|
||||
collected_frames = os.listdir(stagingdir)
|
||||
collections, remainder = clique.assemble(collected_frames)
|
||||
input_path = os.path.join(
|
||||
stagingdir, collections[0].format('{head}{padding}{tail}'))
|
||||
self.log.info("input {}".format(input_path))
|
||||
collected_files = os.listdir(stagingdir)
|
||||
collections, remainder = clique.assemble(collected_files)
|
||||
|
||||
self.log.debug("filename {}".format(filename))
|
||||
frame_collection = None
|
||||
for collection in collections:
|
||||
filebase = collection.format('{head}').rstrip(".")
|
||||
self.log.debug("collection head {}".format(filebase))
|
||||
if filebase in filename:
|
||||
frame_collection = collection
|
||||
self.log.info(
|
||||
"we found collection of interest {}".format(
|
||||
str(frame_collection)))
|
||||
|
||||
if "representations" not in instance.data:
|
||||
instance.data["representations"] = []
|
||||
|
|
@ -119,12 +125,11 @@ class ExtractPlayblast(openpype.api.Extractor):
|
|||
|
||||
# Add camera node name to representation data
|
||||
camera_node_name = pm.ls(camera)[0].getTransform().name()
|
||||
|
||||
|
||||
representation = {
|
||||
'name': 'png',
|
||||
'ext': 'png',
|
||||
'files': collected_frames,
|
||||
'files': list(frame_collection),
|
||||
"stagingDir": stagingdir,
|
||||
"frameStart": start,
|
||||
"frameEnd": end,
|
||||
|
|
@ -135,44 +140,6 @@ class ExtractPlayblast(openpype.api.Extractor):
|
|||
}
|
||||
instance.data["representations"].append(representation)
|
||||
|
||||
def _fix_playblast_output_path(self, filepath):
|
||||
"""Workaround a bug in maya.cmds.playblast to return correct filepath.
|
||||
|
||||
When the `viewer` argument is set to False and maya.cmds.playblast
|
||||
does not automatically open the playblasted file the returned
|
||||
filepath does not have the file's extension added correctly.
|
||||
|
||||
To workaround this we just glob.glob() for any file extensions and
|
||||
assume the latest modified file is the correct file and return it.
|
||||
"""
|
||||
# Catch cancelled playblast
|
||||
if filepath is None:
|
||||
self.log.warning("Playblast did not result in output path. "
|
||||
"Playblast is probably interrupted.")
|
||||
return None
|
||||
|
||||
# Fix: playblast not returning correct filename (with extension)
|
||||
# Lets assume the most recently modified file is the correct one.
|
||||
if not os.path.exists(filepath):
|
||||
directory = os.path.dirname(filepath)
|
||||
filename = os.path.basename(filepath)
|
||||
# check if the filepath is has frame based filename
|
||||
# example : capture.####.png
|
||||
parts = filename.split(".")
|
||||
if len(parts) == 3:
|
||||
query = os.path.join(directory, "{}.*.{}".format(parts[0],
|
||||
parts[-1]))
|
||||
files = glob.glob(query)
|
||||
else:
|
||||
files = glob.glob("{}.*".format(filepath))
|
||||
|
||||
if not files:
|
||||
raise RuntimeError("Couldn't find playblast from: "
|
||||
"{0}".format(filepath))
|
||||
filepath = max(files, key=os.path.getmtime)
|
||||
|
||||
return filepath
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def maintained_time():
|
||||
|
|
|
|||
|
|
@ -7,6 +7,8 @@ log = Logger().get_logger("SyncServer")
|
|||
|
||||
@six.add_metaclass(abc.ABCMeta)
|
||||
class AbstractProvider:
|
||||
CODE = ''
|
||||
LABEL = ''
|
||||
|
||||
def __init__(self, project_name, site_name, tree=None, presets=None):
|
||||
self.presets = None
|
||||
|
|
@ -25,6 +27,17 @@ class AbstractProvider:
|
|||
(boolean)
|
||||
"""
|
||||
|
||||
@classmethod
|
||||
@abc.abstractmethod
|
||||
def get_configurable_items(cls):
|
||||
"""
|
||||
Returns filtered dict of editable properties
|
||||
|
||||
|
||||
Returns:
|
||||
(dict)
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def upload_file(self, source_path, path,
|
||||
server, collection, file, representation, site,
|
||||
|
|
|
|||
|
|
@ -1,22 +1,33 @@
|
|||
from __future__ import print_function
|
||||
import os.path
|
||||
from googleapiclient.discovery import build
|
||||
import google.oauth2.service_account as service_account
|
||||
from googleapiclient import errors
|
||||
from .abstract_provider import AbstractProvider
|
||||
from googleapiclient.http import MediaFileUpload, MediaIoBaseDownload
|
||||
import time
|
||||
import sys
|
||||
import six
|
||||
import platform
|
||||
|
||||
from openpype.api import Logger
|
||||
from openpype.api import get_system_settings
|
||||
from ..utils import time_function, ResumableError
|
||||
import time
|
||||
from .abstract_provider import AbstractProvider
|
||||
from ..utils import time_function, ResumableError, EditableScopes
|
||||
|
||||
log = Logger().get_logger("SyncServer")
|
||||
|
||||
try:
|
||||
from googleapiclient.discovery import build
|
||||
import google.oauth2.service_account as service_account
|
||||
from googleapiclient import errors
|
||||
from googleapiclient.http import MediaFileUpload, MediaIoBaseDownload
|
||||
except (ImportError, SyntaxError):
|
||||
if six.PY3:
|
||||
six.reraise(*sys.exc_info())
|
||||
|
||||
# handle imports from Python 2 hosts - in those only basic methods are used
|
||||
log.warning("Import failed, imported from Python 2, operations will fail.")
|
||||
|
||||
SCOPES = ['https://www.googleapis.com/auth/drive.metadata.readonly',
|
||||
'https://www.googleapis.com/auth/drive.file',
|
||||
'https://www.googleapis.com/auth/drive.readonly'] # for write|delete
|
||||
|
||||
log = Logger().get_logger("SyncServer")
|
||||
|
||||
|
||||
class GDriveHandler(AbstractProvider):
|
||||
"""
|
||||
|
|
@ -42,15 +53,20 @@ class GDriveHandler(AbstractProvider):
|
|||
}
|
||||
}
|
||||
"""
|
||||
CODE = 'gdrive'
|
||||
LABEL = 'Google Drive'
|
||||
|
||||
FOLDER_STR = 'application/vnd.google-apps.folder'
|
||||
MY_DRIVE_STR = 'My Drive' # name of root folder of regular Google drive
|
||||
CHUNK_SIZE = 2097152 # must be divisible by 256!
|
||||
CHUNK_SIZE = 2097152 # must be divisible by 256! used for upload chunks
|
||||
|
||||
def __init__(self, project_name, site_name, tree=None, presets=None):
|
||||
self.presets = None
|
||||
self.active = False
|
||||
self.project_name = project_name
|
||||
self.site_name = site_name
|
||||
self.service = None
|
||||
self.root = None
|
||||
|
||||
self.presets = presets
|
||||
if not self.presets:
|
||||
|
|
@ -58,18 +74,15 @@ class GDriveHandler(AbstractProvider):
|
|||
format(site_name))
|
||||
return
|
||||
|
||||
if not os.path.exists(self.presets["credentials_url"]):
|
||||
log.info("Sync Server: No credentials for Gdrive provider! ")
|
||||
cred_path = self.presets.get("credentials_url", {}).\
|
||||
get(platform.system().lower()) or ''
|
||||
if not os.path.exists(cred_path):
|
||||
msg = "Sync Server: No credentials for gdrive provider " + \
|
||||
"for '{}' on path '{}'!".format(site_name, cred_path)
|
||||
log.info(msg)
|
||||
return
|
||||
|
||||
self.service = self._get_gd_service()
|
||||
try:
|
||||
self.root = self._prepare_root_info()
|
||||
except errors.HttpError:
|
||||
log.warning("HttpError in sync loop, "
|
||||
"trying next loop",
|
||||
exc_info=True)
|
||||
raise ResumableError
|
||||
self.service = self._get_gd_service(cred_path)
|
||||
|
||||
self._tree = tree
|
||||
self.active = True
|
||||
|
|
@ -80,7 +93,34 @@ class GDriveHandler(AbstractProvider):
|
|||
Returns:
|
||||
(boolean)
|
||||
"""
|
||||
return self.active
|
||||
return self.service is not None
|
||||
|
||||
@classmethod
|
||||
def get_configurable_items(cls):
|
||||
"""
|
||||
Returns filtered dict of editable properties.
|
||||
|
||||
|
||||
Returns:
|
||||
(dict)
|
||||
"""
|
||||
# {platform} tells that value is multiplatform and only specific OS
|
||||
# should be returned
|
||||
editable = {
|
||||
# credentials could be override on Project or User level
|
||||
'credentials_url': {
|
||||
'scope': [EditableScopes.PROJECT,
|
||||
EditableScopes.LOCAL],
|
||||
'label': "Credentials url",
|
||||
'type': 'text',
|
||||
'namespace': '{project_settings}/global/sync_server/sites/{site}/credentials_url/{platform}' # noqa: E501
|
||||
},
|
||||
# roots could be override only on Project leve, User cannot
|
||||
'root': {'scope': [EditableScopes.PROJECT],
|
||||
'label': "Roots",
|
||||
'type': 'dict'}
|
||||
}
|
||||
return editable
|
||||
|
||||
def get_roots_config(self, anatomy=None):
|
||||
"""
|
||||
|
|
@ -537,7 +577,7 @@ class GDriveHandler(AbstractProvider):
|
|||
return
|
||||
return provider_presets
|
||||
|
||||
def _get_gd_service(self):
|
||||
def _get_gd_service(self, credentials_path):
|
||||
"""
|
||||
Authorize client with 'credentials.json', uses service account.
|
||||
Service account needs to have target folder shared with.
|
||||
|
|
@ -546,11 +586,18 @@ class GDriveHandler(AbstractProvider):
|
|||
Returns:
|
||||
None
|
||||
"""
|
||||
creds = service_account.Credentials.from_service_account_file(
|
||||
self.presets["credentials_url"],
|
||||
scopes=SCOPES)
|
||||
service = build('drive', 'v3',
|
||||
credentials=creds, cache_discovery=False)
|
||||
service = None
|
||||
try:
|
||||
creds = service_account.Credentials.from_service_account_file(
|
||||
credentials_path,
|
||||
scopes=SCOPES)
|
||||
service = build('drive', 'v3',
|
||||
credentials=creds, cache_discovery=False)
|
||||
except Exception:
|
||||
log.error("Connection failed, " +
|
||||
"check '{}' credentials file".format(credentials_path),
|
||||
exc_info=True)
|
||||
|
||||
return service
|
||||
|
||||
def _prepare_root_info(self):
|
||||
|
|
@ -562,39 +609,47 @@ class GDriveHandler(AbstractProvider):
|
|||
|
||||
Returns:
|
||||
(dicts) of dicts where root folders are keys
|
||||
throws ResumableError in case of errors.HttpError
|
||||
"""
|
||||
roots = {}
|
||||
config_roots = self.get_roots_config()
|
||||
for path in config_roots.values():
|
||||
if self.MY_DRIVE_STR in path:
|
||||
roots[self.MY_DRIVE_STR] = self.service.files()\
|
||||
.get(fileId='root').execute()
|
||||
else:
|
||||
shared_drives = []
|
||||
page_token = None
|
||||
try:
|
||||
for path in config_roots.values():
|
||||
if self.MY_DRIVE_STR in path:
|
||||
roots[self.MY_DRIVE_STR] = self.service.files()\
|
||||
.get(fileId='root')\
|
||||
.execute()
|
||||
else:
|
||||
shared_drives = []
|
||||
page_token = None
|
||||
|
||||
while True:
|
||||
response = self.service.drives().list(
|
||||
pageSize=100,
|
||||
pageToken=page_token).execute()
|
||||
shared_drives.extend(response.get('drives', []))
|
||||
page_token = response.get('nextPageToken', None)
|
||||
if page_token is None:
|
||||
break
|
||||
while True:
|
||||
response = self.service.drives().list(
|
||||
pageSize=100,
|
||||
pageToken=page_token).execute()
|
||||
shared_drives.extend(response.get('drives', []))
|
||||
page_token = response.get('nextPageToken', None)
|
||||
if page_token is None:
|
||||
break
|
||||
|
||||
folders = path.split('/')
|
||||
if len(folders) < 2:
|
||||
raise ValueError("Wrong root folder definition {}".
|
||||
format(path))
|
||||
folders = path.split('/')
|
||||
if len(folders) < 2:
|
||||
raise ValueError("Wrong root folder definition {}".
|
||||
format(path))
|
||||
|
||||
for shared_drive in shared_drives:
|
||||
if folders[1] in shared_drive["name"]:
|
||||
roots[shared_drive["name"]] = {
|
||||
"name": shared_drive["name"],
|
||||
"id": shared_drive["id"]}
|
||||
if self.MY_DRIVE_STR not in roots: # add My Drive always
|
||||
roots[self.MY_DRIVE_STR] = self.service.files() \
|
||||
.get(fileId='root').execute()
|
||||
for shared_drive in shared_drives:
|
||||
if folders[1] in shared_drive["name"]:
|
||||
roots[shared_drive["name"]] = {
|
||||
"name": shared_drive["name"],
|
||||
"id": shared_drive["id"]}
|
||||
if self.MY_DRIVE_STR not in roots: # add My Drive always
|
||||
roots[self.MY_DRIVE_STR] = self.service.files() \
|
||||
.get(fileId='root').execute()
|
||||
except errors.HttpError:
|
||||
log.warning("HttpError in sync loop, "
|
||||
"trying next loop",
|
||||
exc_info=True)
|
||||
raise ResumableError
|
||||
|
||||
return roots
|
||||
|
||||
|
|
@ -615,6 +670,9 @@ class GDriveHandler(AbstractProvider):
|
|||
(dictionary) path as a key, folder id as a value
|
||||
"""
|
||||
log.debug("build_tree len {}".format(len(folders)))
|
||||
if not self.root: # build only when necessary, could be expensive
|
||||
self.root = self._prepare_root_info()
|
||||
|
||||
root_ids = []
|
||||
default_root_id = None
|
||||
tree = {}
|
||||
|
|
|
|||
|
|
@ -65,6 +65,17 @@ class ProviderFactory:
|
|||
info = self._get_creator_info(provider)
|
||||
return info[1]
|
||||
|
||||
def get_provider_configurable_items(self, provider):
|
||||
"""
|
||||
Returns dict of modifiable properties for 'provider'.
|
||||
|
||||
Provider contains information which its properties and on what
|
||||
level could be override
|
||||
"""
|
||||
provider_info = self._get_creator_info(provider)
|
||||
|
||||
return provider_info[0].get_configurable_items()
|
||||
|
||||
def _get_creator_info(self, provider):
|
||||
"""
|
||||
Collect all necessary info for provider. Currently only creator
|
||||
|
|
@ -91,5 +102,5 @@ factory = ProviderFactory()
|
|||
# there is implementing 'GDriveHandler' class
|
||||
# 7 denotes number of files that could be synced in single loop - learned by
|
||||
# trial and error
|
||||
factory.register_provider('gdrive', GDriveHandler, 7)
|
||||
factory.register_provider('local_drive', LocalDriveHandler, 50)
|
||||
factory.register_provider(GDriveHandler.CODE, GDriveHandler, 7)
|
||||
factory.register_provider(LocalDriveHandler.CODE, LocalDriveHandler, 50)
|
||||
|
|
|
|||
|
|
@ -7,22 +7,43 @@ import time
|
|||
from openpype.api import Logger, Anatomy
|
||||
from .abstract_provider import AbstractProvider
|
||||
|
||||
from ..utils import EditableScopes
|
||||
|
||||
log = Logger().get_logger("SyncServer")
|
||||
|
||||
|
||||
class LocalDriveHandler(AbstractProvider):
|
||||
CODE = 'local_drive'
|
||||
LABEL = 'Local drive'
|
||||
|
||||
""" Handles required operations on mounted disks with OS """
|
||||
def __init__(self, project_name, site_name, tree=None, presets=None):
|
||||
self.presets = None
|
||||
self.active = False
|
||||
self.project_name = project_name
|
||||
self.site_name = site_name
|
||||
self._editable_properties = {}
|
||||
|
||||
self.active = self.is_active()
|
||||
|
||||
def is_active(self):
|
||||
return True
|
||||
|
||||
@classmethod
|
||||
def get_configurable_items(cls):
|
||||
"""
|
||||
Returns filtered dict of editable properties
|
||||
|
||||
Returns:
|
||||
(dict)
|
||||
"""
|
||||
editable = {
|
||||
'root': {'scope': [EditableScopes.LOCAL],
|
||||
'label': "Roots",
|
||||
'type': 'dict'}
|
||||
}
|
||||
return editable
|
||||
|
||||
def upload_file(self, source_path, target_path,
|
||||
server, collection, file, representation, site,
|
||||
overwrite=False, direction="Upload"):
|
||||
|
|
|
|||
|
|
@ -206,14 +206,14 @@ def _get_configured_sites_from_setting(module, project_name, project_setting):
|
|||
all_sites = module._get_default_site_configs()
|
||||
all_sites.update(project_setting.get("sites"))
|
||||
for site_name, config in all_sites.items():
|
||||
handler = initiated_handlers. \
|
||||
get((config["provider"], site_name))
|
||||
provider = module.get_provider_for_site(site=site_name)
|
||||
handler = initiated_handlers.get((provider, site_name))
|
||||
if not handler:
|
||||
handler = lib.factory.get_provider(config["provider"],
|
||||
handler = lib.factory.get_provider(provider,
|
||||
project_name,
|
||||
site_name,
|
||||
presets=config)
|
||||
initiated_handlers[(config["provider"], site_name)] = \
|
||||
initiated_handlers[(provider, site_name)] = \
|
||||
handler
|
||||
|
||||
if handler.is_active():
|
||||
|
|
@ -274,6 +274,9 @@ class SyncServerThread(threading.Thread):
|
|||
self.module.set_sync_project_settings() # clean cache
|
||||
for collection, preset in self.module.sync_project_settings.\
|
||||
items():
|
||||
if collection not in self.module.get_enabled_projects():
|
||||
continue
|
||||
|
||||
start_time = time.time()
|
||||
local_site, remote_site = self._working_sites(collection)
|
||||
if not all([local_site, remote_site]):
|
||||
|
|
@ -295,13 +298,14 @@ class SyncServerThread(threading.Thread):
|
|||
processed_file_path = set()
|
||||
|
||||
site_preset = preset.get('sites')[remote_site]
|
||||
remote_provider = site_preset['provider']
|
||||
remote_provider = \
|
||||
self.module.get_provider_for_site(site=remote_site)
|
||||
handler = lib.factory.get_provider(remote_provider,
|
||||
collection,
|
||||
remote_site,
|
||||
presets=site_preset)
|
||||
limit = lib.factory.get_provider_batch_limit(
|
||||
site_preset['provider'])
|
||||
remote_provider)
|
||||
# first call to get_provider could be expensive, its
|
||||
# building folder tree structure in memory
|
||||
# call only if needed, eg. DO_UPLOAD or DO_DOWNLOAD
|
||||
|
|
@ -451,8 +455,9 @@ class SyncServerThread(threading.Thread):
|
|||
remote_site))
|
||||
return None, None
|
||||
|
||||
if not all([site_is_working(self.module, collection, local_site),
|
||||
site_is_working(self.module, collection, remote_site)]):
|
||||
configured_sites = _get_configured_sites(self.module, collection)
|
||||
if not all([local_site in configured_sites,
|
||||
remote_site in configured_sites]):
|
||||
log.debug("Some of the sites {} - {} is not ".format(local_site,
|
||||
remote_site) +
|
||||
"working properly")
|
||||
|
|
|
|||
|
|
@ -2,6 +2,8 @@ import os
|
|||
from bson.objectid import ObjectId
|
||||
from datetime import datetime
|
||||
import threading
|
||||
import platform
|
||||
import copy
|
||||
|
||||
from avalon.api import AvalonMongoDB
|
||||
|
||||
|
|
@ -9,12 +11,18 @@ from .. import PypeModule, ITrayModule
|
|||
from openpype.api import (
|
||||
Anatomy,
|
||||
get_project_settings,
|
||||
get_system_settings,
|
||||
get_local_site_id)
|
||||
from openpype.lib import PypeLogger
|
||||
from openpype.settings.lib import (
|
||||
get_default_project_settings,
|
||||
get_default_anatomy_settings,
|
||||
get_anatomy_settings)
|
||||
|
||||
from .providers.local_drive import LocalDriveHandler
|
||||
from .providers import lib
|
||||
|
||||
from .utils import time_function, SyncStatus
|
||||
from .utils import time_function, SyncStatus, EditableScopes
|
||||
|
||||
|
||||
log = PypeLogger().get_logger("SyncServer")
|
||||
|
|
@ -340,18 +348,6 @@ class SyncServerModule(PypeModule, ITrayModule):
|
|||
|
||||
return self._get_enabled_sites_from_settings(sync_settings)
|
||||
|
||||
def get_configurable_items_for_site(self, project_name, site_name):
|
||||
"""
|
||||
Returns list of items that should be configurable by User
|
||||
|
||||
Returns:
|
||||
(list of dict)
|
||||
[{key:"root", label:"root", value:"valueFromSettings"}]
|
||||
"""
|
||||
# if project_name is None: ..for get_default_project_settings
|
||||
# return handler.get_configurable_items()
|
||||
pass
|
||||
|
||||
def get_active_site(self, project_name):
|
||||
"""
|
||||
Returns active (mine) site for 'project_name' from settings
|
||||
|
|
@ -402,6 +398,205 @@ class SyncServerModule(PypeModule, ITrayModule):
|
|||
|
||||
return remote_site
|
||||
|
||||
def get_local_settings_schema(self):
|
||||
"""Wrapper for Local settings - all projects incl. Default"""
|
||||
return self.get_configurable_items(EditableScopes.LOCAL)
|
||||
|
||||
def get_configurable_items(self, scope=None):
|
||||
"""
|
||||
Returns list of sites that could be configurable for all projects.
|
||||
|
||||
Could be filtered by 'scope' argument (list)
|
||||
|
||||
Args:
|
||||
scope (list of utils.EditableScope)
|
||||
|
||||
Returns:
|
||||
(dict of list of dict)
|
||||
{
|
||||
siteA : [
|
||||
{
|
||||
key:"root", label:"root",
|
||||
"value":"{'work': 'c:/projects'}",
|
||||
"type": "dict",
|
||||
"children":[
|
||||
{ "key": "work",
|
||||
"type": "text",
|
||||
"value": "c:/projects"}
|
||||
]
|
||||
},
|
||||
{
|
||||
key:"credentials_url", label:"Credentials url",
|
||||
"value":"'c:/projects/cred.json'", "type": "text",
|
||||
"namespace": "{project_setting}/global/sync_server/
|
||||
sites"
|
||||
}
|
||||
]
|
||||
}
|
||||
"""
|
||||
editable = {}
|
||||
applicable_projects = list(self.connection.projects())
|
||||
applicable_projects.append(None)
|
||||
for project in applicable_projects:
|
||||
project_name = None
|
||||
if project:
|
||||
project_name = project["name"]
|
||||
|
||||
items = self.get_configurable_items_for_project(project_name,
|
||||
scope)
|
||||
editable.update(items)
|
||||
|
||||
return editable
|
||||
|
||||
def get_local_settings_schema_for_project(self, project_name):
|
||||
"""Wrapper for Local settings - for specific 'project_name'"""
|
||||
return self.get_configurable_items_for_project(project_name,
|
||||
EditableScopes.LOCAL)
|
||||
|
||||
def get_configurable_items_for_project(self, project_name=None,
|
||||
scope=None):
|
||||
"""
|
||||
Returns list of items that could be configurable for specific
|
||||
'project_name'
|
||||
|
||||
Args:
|
||||
project_name (str) - None > default project,
|
||||
scope (list of utils.EditableScope)
|
||||
(optional, None is all scopes, default is LOCAL)
|
||||
|
||||
Returns:
|
||||
(dict of list of dict)
|
||||
{
|
||||
siteA : [
|
||||
{
|
||||
key:"root", label:"root",
|
||||
"type": "dict",
|
||||
"children":[
|
||||
{ "key": "work",
|
||||
"type": "text",
|
||||
"value": "c:/projects"}
|
||||
]
|
||||
},
|
||||
{
|
||||
key:"credentials_url", label:"Credentials url",
|
||||
"value":"'c:/projects/cred.json'", "type": "text",
|
||||
"namespace": "{project_setting}/global/sync_server/
|
||||
sites"
|
||||
}
|
||||
]
|
||||
}
|
||||
"""
|
||||
allowed_sites = set()
|
||||
sites = self.get_all_site_configs(project_name)
|
||||
if project_name:
|
||||
# Local Settings can select only from allowed sites for project
|
||||
allowed_sites.update(set(self.get_active_sites(project_name)))
|
||||
allowed_sites.update(set(self.get_remote_sites(project_name)))
|
||||
|
||||
editable = {}
|
||||
for site_name in sites.keys():
|
||||
if allowed_sites and site_name not in allowed_sites:
|
||||
continue
|
||||
|
||||
items = self.get_configurable_items_for_site(project_name,
|
||||
site_name,
|
||||
scope)
|
||||
editable[site_name] = items
|
||||
|
||||
return editable
|
||||
|
||||
def get_local_settings_schema_for_site(self, project_name, site_name):
|
||||
"""Wrapper for Local settings - for particular 'site_name and proj."""
|
||||
return self.get_configurable_items_for_site(project_name,
|
||||
site_name,
|
||||
EditableScopes.LOCAL)
|
||||
|
||||
def get_configurable_items_for_site(self, project_name=None,
|
||||
site_name=None,
|
||||
scope=None):
|
||||
"""
|
||||
Returns list of items that could be configurable.
|
||||
|
||||
Args:
|
||||
project_name (str) - None > default project
|
||||
site_name (str)
|
||||
scope (list of utils.EditableScope)
|
||||
(optional, None is all scopes)
|
||||
|
||||
Returns:
|
||||
(list)
|
||||
[
|
||||
{
|
||||
key:"root", label:"root", type:"dict",
|
||||
"children":[
|
||||
{ "key": "work",
|
||||
"type": "text",
|
||||
"value": "c:/projects"}
|
||||
]
|
||||
}, ...
|
||||
]
|
||||
"""
|
||||
provider_name = self.get_provider_for_site(site=site_name)
|
||||
items = lib.factory.get_provider_configurable_items(provider_name)
|
||||
|
||||
if project_name:
|
||||
sync_s = self.get_sync_project_setting(project_name,
|
||||
exclude_locals=True,
|
||||
cached=False)
|
||||
else:
|
||||
sync_s = get_default_project_settings(exclude_locals=True)
|
||||
sync_s = sync_s["global"]["sync_server"]
|
||||
sync_s["sites"].update(
|
||||
self._get_default_site_configs(self.enabled))
|
||||
|
||||
editable = []
|
||||
if type(scope) is not list:
|
||||
scope = [scope]
|
||||
scope = set(scope)
|
||||
for key, properties in items.items():
|
||||
if scope is None or scope.intersection(set(properties["scope"])):
|
||||
val = sync_s.get("sites", {}).get(site_name, {}).get(key)
|
||||
|
||||
item = {
|
||||
"key": key,
|
||||
"label": properties["label"],
|
||||
"type": properties["type"]
|
||||
}
|
||||
|
||||
if properties.get("namespace"):
|
||||
item["namespace"] = properties.get("namespace")
|
||||
if "platform" in item["namespace"]:
|
||||
try:
|
||||
if val:
|
||||
val = val[platform.system().lower()]
|
||||
except KeyError:
|
||||
st = "{}'s field value {} should be".format(key, val) # noqa: E501
|
||||
log.error(st + " multiplatform dict")
|
||||
|
||||
item["namespace"] = item["namespace"].replace('{site}',
|
||||
site_name)
|
||||
children = []
|
||||
if properties["type"] == "dict":
|
||||
if val:
|
||||
for val_key, val_val in val.items():
|
||||
child = {
|
||||
"type": "text",
|
||||
"key": val_key,
|
||||
"value": val_val
|
||||
}
|
||||
children.append(child)
|
||||
|
||||
if properties["type"] == "dict":
|
||||
item["children"] = children
|
||||
else:
|
||||
item["value"] = val
|
||||
|
||||
|
||||
|
||||
editable.append(item)
|
||||
|
||||
return editable
|
||||
|
||||
def reset_timer(self):
|
||||
"""
|
||||
Called when waiting for next loop should be skipped.
|
||||
|
|
@ -418,7 +613,7 @@ class SyncServerModule(PypeModule, ITrayModule):
|
|||
for project in self.connection.projects():
|
||||
project_name = project["name"]
|
||||
project_settings = self.get_sync_project_setting(project_name)
|
||||
if project_settings:
|
||||
if project_settings and project_settings.get("enabled"):
|
||||
enabled_projects.append(project_name)
|
||||
|
||||
return enabled_projects
|
||||
|
|
@ -570,75 +765,145 @@ class SyncServerModule(PypeModule, ITrayModule):
|
|||
|
||||
return self._sync_project_settings
|
||||
|
||||
def set_sync_project_settings(self):
|
||||
def set_sync_project_settings(self, exclude_locals=False):
|
||||
"""
|
||||
Set sync_project_settings for all projects (caching)
|
||||
|
||||
Args:
|
||||
exclude_locals (bool): ignore overrides from Local Settings
|
||||
For performance
|
||||
"""
|
||||
sync_project_settings = {}
|
||||
|
||||
for collection in self.connection.database.collection_names(False):
|
||||
sync_settings = self._parse_sync_settings_from_settings(
|
||||
get_project_settings(collection))
|
||||
if sync_settings:
|
||||
default_sites = self._get_default_site_configs()
|
||||
sync_settings['sites'].update(default_sites)
|
||||
sync_project_settings[collection] = sync_settings
|
||||
|
||||
if not sync_project_settings:
|
||||
log.info("No enabled and configured projects for sync.")
|
||||
sync_project_settings = self._prepare_sync_project_settings(
|
||||
exclude_locals)
|
||||
|
||||
self._sync_project_settings = sync_project_settings
|
||||
|
||||
def get_sync_project_setting(self, project_name):
|
||||
def _prepare_sync_project_settings(self, exclude_locals):
|
||||
sync_project_settings = {}
|
||||
system_sites = self.get_all_site_configs()
|
||||
for collection in self.connection.database.collection_names(False):
|
||||
sites = copy.deepcopy(system_sites) # get all configured sites
|
||||
proj_settings = self._parse_sync_settings_from_settings(
|
||||
get_project_settings(collection,
|
||||
exclude_locals=exclude_locals))
|
||||
sites.update(self._get_default_site_configs(
|
||||
proj_settings["enabled"], collection))
|
||||
sites.update(proj_settings['sites'])
|
||||
proj_settings["sites"] = sites
|
||||
|
||||
sync_project_settings[collection] = proj_settings
|
||||
if not sync_project_settings:
|
||||
log.info("No enabled and configured projects for sync.")
|
||||
return sync_project_settings
|
||||
|
||||
def get_sync_project_setting(self, project_name, exclude_locals=False,
|
||||
cached=True):
|
||||
""" Handles pulling sync_server's settings for enabled 'project_name'
|
||||
|
||||
Args:
|
||||
project_name (str): used in project settings
|
||||
exclude_locals (bool): ignore overrides from Local Settings
|
||||
cached (bool): use pre-cached values, or return fresh ones
|
||||
cached values needed for single loop (with all overrides)
|
||||
fresh values needed for Local settings (without overrides)
|
||||
Returns:
|
||||
(dict): settings dictionary for the enabled project,
|
||||
empty if no settings or sync is disabled
|
||||
"""
|
||||
# presets set already, do not call again and again
|
||||
# self.log.debug("project preset {}".format(self.presets))
|
||||
if self.sync_project_settings and \
|
||||
self.sync_project_settings.get(project_name):
|
||||
return self.sync_project_settings.get(project_name)
|
||||
if not cached:
|
||||
return self._prepare_sync_project_settings(exclude_locals)\
|
||||
[project_name]
|
||||
|
||||
settings = get_project_settings(project_name)
|
||||
return self._parse_sync_settings_from_settings(settings)
|
||||
if not self.sync_project_settings or \
|
||||
not self.sync_project_settings.get(project_name):
|
||||
self.set_sync_project_settings(exclude_locals)
|
||||
|
||||
return self.sync_project_settings.get(project_name)
|
||||
|
||||
def _parse_sync_settings_from_settings(self, settings):
|
||||
""" settings from api.get_project_settings, TOOD rename """
|
||||
sync_settings = settings.get("global").get("sync_server")
|
||||
if not sync_settings:
|
||||
log.info("No project setting not syncing.")
|
||||
return {}
|
||||
if sync_settings.get("enabled"):
|
||||
return sync_settings
|
||||
|
||||
return {}
|
||||
return sync_settings
|
||||
|
||||
def _get_default_site_configs(self):
|
||||
def get_all_site_configs(self, project_name=None):
|
||||
"""
|
||||
Returns skeleton settings for 'studio' and user's local site
|
||||
Returns (dict) with all sites configured system wide.
|
||||
|
||||
Args:
|
||||
project_name (str)(optional): if present, check if not disabled
|
||||
|
||||
Returns:
|
||||
(dict): {'studio': {'provider':'local_drive'...},
|
||||
'MY_LOCAL': {'provider':....}}
|
||||
"""
|
||||
default_config = {'provider': 'local_drive'}
|
||||
all_sites = {self.DEFAULT_SITE: default_config,
|
||||
get_local_site_id(): default_config}
|
||||
sys_sett = get_system_settings()
|
||||
sync_sett = sys_sett["modules"].get("sync_server")
|
||||
|
||||
project_enabled = True
|
||||
if project_name:
|
||||
project_enabled = project_name in self.get_enabled_projects()
|
||||
sync_enabled = sync_sett["enabled"] and project_enabled
|
||||
|
||||
system_sites = {}
|
||||
if sync_enabled:
|
||||
for site, detail in sync_sett.get("sites", {}).items():
|
||||
system_sites[site] = detail
|
||||
|
||||
system_sites.update(self._get_default_site_configs(sync_enabled,
|
||||
project_name))
|
||||
|
||||
return system_sites
|
||||
|
||||
def _get_default_site_configs(self, sync_enabled=True, project_name=None):
|
||||
"""
|
||||
Returns settings for 'studio' and user's local site
|
||||
|
||||
Returns base values from setting, not overriden by Local Settings,
|
||||
eg. value used to push TO LS not to get actual value for syncing.
|
||||
"""
|
||||
if not project_name:
|
||||
anatomy_sett = get_default_anatomy_settings(exclude_locals=True)
|
||||
else:
|
||||
anatomy_sett = get_anatomy_settings(project_name,
|
||||
exclude_locals=True)
|
||||
roots = {}
|
||||
for root, config in anatomy_sett["roots"].items():
|
||||
roots[root] = config[platform.system().lower()]
|
||||
studio_config = {
|
||||
'provider': 'local_drive',
|
||||
"root": roots
|
||||
}
|
||||
all_sites = {self.DEFAULT_SITE: studio_config}
|
||||
if sync_enabled:
|
||||
all_sites['local'] = {'provider': 'local_drive'}
|
||||
return all_sites
|
||||
|
||||
def get_provider_for_site(self, project_name, site):
|
||||
def get_provider_for_site(self, project_name=None, site=None):
|
||||
"""
|
||||
Return provider name for site.
|
||||
Return provider name for site (unique name across all projects.
|
||||
"""
|
||||
site_preset = self.get_sync_project_setting(project_name)["sites"].\
|
||||
get(site)
|
||||
if site_preset:
|
||||
return site_preset["provider"]
|
||||
sites = {self.DEFAULT_SITE: "local_drive",
|
||||
self.LOCAL_SITE: "local_drive",
|
||||
get_local_site_id(): "local_drive"}
|
||||
|
||||
return "NA"
|
||||
if site in sites.keys():
|
||||
return sites[site]
|
||||
|
||||
if project_name: # backward compatibility
|
||||
proj_settings = self.get_sync_project_setting(project_name)
|
||||
provider = proj_settings.get("sites", {}).get(site, {}).\
|
||||
get("provider")
|
||||
if provider:
|
||||
return provider
|
||||
|
||||
sys_sett = get_system_settings()
|
||||
sync_sett = sys_sett["modules"].get("sync_server")
|
||||
for site, detail in sync_sett.get("sites", {}).items():
|
||||
sites[site] = detail.get("provider")
|
||||
|
||||
return sites.get(site, 'N/A')
|
||||
|
||||
@time_function
|
||||
def get_sync_representations(self, collection, active_site, remote_site):
|
||||
|
|
@ -757,6 +1022,15 @@ class SyncServerModule(PypeModule, ITrayModule):
|
|||
Always is comparing local record, eg. site with
|
||||
'name' == self.presets[PROJECT_NAME]['config']["active_site"]
|
||||
|
||||
This leads to trigger actual upload or download, there is
|
||||
a use case 'studio' <> 'remote' where user should publish
|
||||
to 'studio', see progress in Tray GUI, but do not do
|
||||
physical upload/download
|
||||
(as multiple user would be doing that).
|
||||
|
||||
Do physical U/D only when any of the sites is user's local, in that
|
||||
case only user has the data and must U/D.
|
||||
|
||||
Args:
|
||||
file (dictionary): of file from representation in Mongo
|
||||
local_site (string): - local side of compare (usually 'studio')
|
||||
|
|
@ -766,8 +1040,12 @@ class SyncServerModule(PypeModule, ITrayModule):
|
|||
(string) - one of SyncStatus
|
||||
"""
|
||||
sites = file.get("sites") or []
|
||||
# if isinstance(sites, list): # temporary, old format of 'sites'
|
||||
# return SyncStatus.DO_NOTHING
|
||||
|
||||
if get_local_site_id() not in (local_site, remote_site):
|
||||
# don't do upload/download for studio sites
|
||||
log.debug("No local site {} - {}".format(local_site, remote_site))
|
||||
return SyncStatus.DO_NOTHING
|
||||
|
||||
_, remote_rec = self._get_site_rec(sites, remote_site) or {}
|
||||
if remote_rec: # sync remote target
|
||||
created_dt = remote_rec.get("created_dt")
|
||||
|
|
@ -1116,7 +1394,7 @@ class SyncServerModule(PypeModule, ITrayModule):
|
|||
format(site_name))
|
||||
return
|
||||
|
||||
provider_name = self.get_provider_for_site(collection, site_name)
|
||||
provider_name = self.get_provider_for_site(site=site_name)
|
||||
|
||||
if provider_name == 'local_drive':
|
||||
query = {
|
||||
|
|
|
|||
|
|
@ -158,7 +158,7 @@ def translate_provider_for_icon(sync_server, project, site):
|
|||
"""
|
||||
if site == sync_server.DEFAULT_SITE:
|
||||
return sync_server.DEFAULT_SITE
|
||||
return sync_server.get_provider_for_site(project, site)
|
||||
return sync_server.get_provider_for_site(site=site)
|
||||
|
||||
|
||||
def get_item_by_id(model, object_id):
|
||||
|
|
|
|||
|
|
@ -236,7 +236,7 @@ class _SyncRepresentationWidget(QtWidgets.QWidget):
|
|||
|
||||
for site, progress in {active_site: local_progress,
|
||||
remote_site: remote_progress}.items():
|
||||
provider = self.sync_server.get_provider_for_site(project, site)
|
||||
provider = self.sync_server.get_provider_for_site(site=site)
|
||||
if provider == 'local_drive':
|
||||
if 'studio' in site:
|
||||
txt = " studio version"
|
||||
|
|
|
|||
|
|
@ -33,3 +33,9 @@ def time_function(method):
|
|||
return result
|
||||
|
||||
return timed
|
||||
|
||||
|
||||
class EditableScopes:
|
||||
SYSTEM = 0
|
||||
PROJECT = 1
|
||||
LOCAL = 2
|
||||
|
|
|
|||
|
|
@ -55,7 +55,7 @@ class ExtractReview(pyblish.api.InstancePlugin):
|
|||
profiles = None
|
||||
|
||||
def process(self, instance):
|
||||
self.log.debug(instance.data["representations"])
|
||||
self.log.debug(str(instance.data["representations"]))
|
||||
# Skip review when requested.
|
||||
if not instance.data.get("review", True):
|
||||
return
|
||||
|
|
|
|||
|
|
@ -0,0 +1,18 @@
|
|||
{
|
||||
"publish": {
|
||||
"ValidateSceneSettings": {
|
||||
"enabled": true,
|
||||
"optional": true,
|
||||
"active": true,
|
||||
"skip_resolution_check": [".*"],
|
||||
"skip_timelines_check": [".*"]
|
||||
},
|
||||
"AfterEffectsSubmitDeadline": {
|
||||
"use_published": true,
|
||||
"priority": 50,
|
||||
"primary_pool": "",
|
||||
"secondary_pool": "",
|
||||
"chunk_size": 1000000
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -267,13 +267,6 @@
|
|||
"remote_site": "studio"
|
||||
},
|
||||
"sites": {
|
||||
"gdrive": {
|
||||
"provider": "gdrive",
|
||||
"credentials_url": "",
|
||||
"root": {
|
||||
"work": ""
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"project_plugins": {
|
||||
|
|
|
|||
|
|
@ -1,14 +1,18 @@
|
|||
{
|
||||
"general": {
|
||||
"skip_resolution_check": [],
|
||||
"skip_timelines_check": []
|
||||
},
|
||||
"publish": {
|
||||
"CollectPalettes": {
|
||||
"allowed_tasks": [
|
||||
"."
|
||||
".*"
|
||||
]
|
||||
},
|
||||
"ValidateSceneSettings": {
|
||||
"enabled": true,
|
||||
"optional": true,
|
||||
"active": true,
|
||||
"frame_check_filter": [],
|
||||
"skip_resolution_check": [],
|
||||
"skip_timelines_check": []
|
||||
},
|
||||
"HarmonySubmitDeadline": {
|
||||
"use_published": false,
|
||||
"priority": 50,
|
||||
|
|
|
|||
|
|
@ -135,7 +135,8 @@
|
|||
"workspace_name": ""
|
||||
},
|
||||
"sync_server": {
|
||||
"enabled": false
|
||||
"enabled": false,
|
||||
"sites": {}
|
||||
},
|
||||
"deadline": {
|
||||
"enabled": true,
|
||||
|
|
|
|||
|
|
@ -101,7 +101,8 @@ from .enum_entity import (
|
|||
BaseEnumEntity,
|
||||
EnumEntity,
|
||||
AppsEnumEntity,
|
||||
ToolsEnumEntity
|
||||
ToolsEnumEntity,
|
||||
ProvidersEnum
|
||||
)
|
||||
|
||||
from .list_entity import ListEntity
|
||||
|
|
@ -149,6 +150,7 @@ __all__ = (
|
|||
"EnumEntity",
|
||||
"AppsEnumEntity",
|
||||
"ToolsEnumEntity",
|
||||
"ProvidersEnum",
|
||||
|
||||
"ListEntity",
|
||||
|
||||
|
|
|
|||
|
|
@ -217,3 +217,41 @@ class ToolsEnumEntity(BaseEnumEntity):
|
|||
if key in self.valid_keys:
|
||||
new_value.append(key)
|
||||
self._current_value = new_value
|
||||
|
||||
|
||||
class ProvidersEnum(BaseEnumEntity):
|
||||
schema_types = ["providers-enum"]
|
||||
|
||||
def _item_initalization(self):
|
||||
self.multiselection = False
|
||||
self.value_on_not_set = ""
|
||||
self.enum_items = []
|
||||
self.valid_keys = set()
|
||||
self.valid_value_types = (str, )
|
||||
self.placeholder = None
|
||||
|
||||
def _get_enum_values(self):
|
||||
from openpype.modules.sync_server.providers import lib as lib_providers
|
||||
|
||||
providers = lib_providers.factory.providers
|
||||
|
||||
valid_keys = set()
|
||||
valid_keys.add('')
|
||||
enum_items = [{'': 'Choose Provider'}]
|
||||
for provider_code, provider_info in providers.items():
|
||||
provider, _ = provider_info
|
||||
enum_items.append({provider_code: provider.LABEL})
|
||||
valid_keys.add(provider_code)
|
||||
|
||||
return enum_items, valid_keys
|
||||
|
||||
def set_override_state(self, *args, **kwargs):
|
||||
super(ProvidersEnum, self).set_override_state(*args, **kwargs)
|
||||
|
||||
self.enum_items, self.valid_keys = self._get_enum_values()
|
||||
|
||||
value_on_not_set = list(self.valid_keys)[0]
|
||||
if self._current_value is NOT_SET:
|
||||
self._current_value = value_on_not_set
|
||||
|
||||
self.value_on_not_set = value_on_not_set
|
||||
|
|
|
|||
|
|
@ -78,6 +78,10 @@
|
|||
"type": "schema",
|
||||
"name": "schema_project_hiero"
|
||||
},
|
||||
{
|
||||
"type": "schema",
|
||||
"name": "schema_project_aftereffects"
|
||||
},
|
||||
{
|
||||
"type": "schema",
|
||||
"name": "schema_project_harmony"
|
||||
|
|
|
|||
|
|
@ -0,0 +1,90 @@
|
|||
{
|
||||
"type": "dict",
|
||||
"collapsible": true,
|
||||
"key": "aftereffects",
|
||||
"label": "AfterEffects",
|
||||
"is_file": true,
|
||||
"children": [
|
||||
{
|
||||
"type": "dict",
|
||||
"collapsible": true,
|
||||
"key": "publish",
|
||||
"label": "Publish plugins",
|
||||
"children": [
|
||||
{
|
||||
"type": "dict",
|
||||
"collapsible": true,
|
||||
"key": "ValidateSceneSettings",
|
||||
"label": "Validate Scene Settings",
|
||||
"checkbox_key": "enabled",
|
||||
"children": [
|
||||
{
|
||||
"type": "boolean",
|
||||
"key": "enabled",
|
||||
"label": "Enabled"
|
||||
},
|
||||
{
|
||||
"type": "boolean",
|
||||
"key": "optional",
|
||||
"label": "Optional"
|
||||
},
|
||||
{
|
||||
"type": "boolean",
|
||||
"key": "active",
|
||||
"label": "Active"
|
||||
},
|
||||
{
|
||||
"type": "label",
|
||||
"label": "Validate if FPS and Resolution match shot data"
|
||||
},
|
||||
{
|
||||
"type": "list",
|
||||
"key": "skip_resolution_check",
|
||||
"object_type": "text",
|
||||
"label": "Skip Resolution Check for Tasks"
|
||||
},
|
||||
{
|
||||
"type": "list",
|
||||
"key": "skip_timelines_check",
|
||||
"object_type": "text",
|
||||
"label": "Skip Timeline Check for Tasks"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"type": "dict",
|
||||
"collapsible": true,
|
||||
"key": "AfterEffectsSubmitDeadline",
|
||||
"label": "AfterEffects Submit to Deadline",
|
||||
"children": [
|
||||
{
|
||||
"type": "boolean",
|
||||
"key": "use_published",
|
||||
"label": "Use Published scene"
|
||||
},
|
||||
{
|
||||
"type": "number",
|
||||
"key": "priority",
|
||||
"label": "Priority"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"key": "primary_pool",
|
||||
"label": "Primary Pool"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"key": "secondary_pool",
|
||||
"label": "Secondary Pool"
|
||||
},
|
||||
{
|
||||
"type": "number",
|
||||
"key": "chunk_size",
|
||||
"label": "Frames Per Task"
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
|
|
@ -5,26 +5,6 @@
|
|||
"label": "Harmony",
|
||||
"is_file": true,
|
||||
"children": [
|
||||
{
|
||||
"type": "dict",
|
||||
"collapsible": true,
|
||||
"key": "general",
|
||||
"label": "General",
|
||||
"children": [
|
||||
{
|
||||
"type": "list",
|
||||
"key": "skip_resolution_check",
|
||||
"object_type": "text",
|
||||
"label": "Skip Resolution Check for Tasks"
|
||||
},
|
||||
{
|
||||
"type": "list",
|
||||
"key": "skip_timelines_check",
|
||||
"object_type": "text",
|
||||
"label": "Skip Timeliene Check for Tasks"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"type": "dict",
|
||||
"collapsible": true,
|
||||
|
|
@ -45,6 +25,52 @@
|
|||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"type": "dict",
|
||||
"collapsible": true,
|
||||
"key": "ValidateSceneSettings",
|
||||
"label": "Validate Scene Settings",
|
||||
"checkbox_key": "enabled",
|
||||
"children": [
|
||||
{
|
||||
"type": "boolean",
|
||||
"key": "enabled",
|
||||
"label": "Enabled"
|
||||
},
|
||||
{
|
||||
"type": "boolean",
|
||||
"key": "optional",
|
||||
"label": "Optional"
|
||||
},
|
||||
{
|
||||
"type": "boolean",
|
||||
"key": "active",
|
||||
"label": "Active"
|
||||
},
|
||||
{
|
||||
"type": "label",
|
||||
"label": "Validate if FrameStart, FrameEnd and Resolution match shot data"
|
||||
},
|
||||
{
|
||||
"type": "list",
|
||||
"key": "frame_check_filter",
|
||||
"label": "Skip Frame check for Assets with",
|
||||
"object_type": "text"
|
||||
},
|
||||
{
|
||||
"type": "list",
|
||||
"key": "skip_resolution_check",
|
||||
"object_type": "text",
|
||||
"label": "Skip Resolution Check for Tasks"
|
||||
},
|
||||
{
|
||||
"type": "list",
|
||||
"key": "skip_timelines_check",
|
||||
"object_type": "text",
|
||||
"label": "Skip Timeline Check for Tasks"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"type": "dict",
|
||||
"collapsible": true,
|
||||
|
|
@ -59,7 +85,7 @@
|
|||
{
|
||||
"type": "number",
|
||||
"key": "priority",
|
||||
"label": "priority"
|
||||
"label": "Priority"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
|
|
@ -74,7 +100,7 @@
|
|||
{
|
||||
"type": "number",
|
||||
"key": "chunk_size",
|
||||
"label": "Chunk Size"
|
||||
"label": "Frames Per Task"
|
||||
}
|
||||
]
|
||||
}
|
||||
|
|
|
|||
|
|
@ -50,14 +50,10 @@
|
|||
"type": "dict",
|
||||
"children": [
|
||||
{
|
||||
"type": "text",
|
||||
"key": "provider",
|
||||
"label": "Provider"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"type": "path",
|
||||
"key": "credentials_url",
|
||||
"label": "Credentials url"
|
||||
"label": "Credentials url",
|
||||
"multiplatform": true
|
||||
},
|
||||
{
|
||||
"type": "dict-modifiable",
|
||||
|
|
|
|||
|
|
@ -85,11 +85,32 @@
|
|||
"label": "Site Sync",
|
||||
"collapsible": true,
|
||||
"checkbox_key": "enabled",
|
||||
"children": [{
|
||||
"type": "boolean",
|
||||
"key": "enabled",
|
||||
"label": "Enabled"
|
||||
}]
|
||||
"children": [
|
||||
{
|
||||
"type": "boolean",
|
||||
"key": "enabled",
|
||||
"label": "Enabled"
|
||||
},
|
||||
{
|
||||
"type": "dict-modifiable",
|
||||
"collapsible": true,
|
||||
"key": "sites",
|
||||
"label": "Sites",
|
||||
"collapsible_key": false,
|
||||
"is_file": true,
|
||||
"object_type":
|
||||
{
|
||||
"type": "dict",
|
||||
"children": [
|
||||
{
|
||||
"type": "providers-enum",
|
||||
"key": "provider",
|
||||
"label": "Provider"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
]
|
||||
},{
|
||||
"type": "dict",
|
||||
"key": "deadline",
|
||||
|
|
|
|||
|
|
@ -1,3 +1,3 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Package declaring Pype version."""
|
||||
__version__ = "3.0.0-rc3"
|
||||
__version__ = "3.0.0-rc4"
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
[tool.poetry]
|
||||
name = "OpenPype"
|
||||
version = "3.0.0-rc3"
|
||||
version = "3.0.0-rc4"
|
||||
description = "Open VFX and Animation pipeline with support."
|
||||
authors = ["OpenPype Team <info@openpype.io>"]
|
||||
license = "MIT License"
|
||||
|
|
@ -97,9 +97,9 @@ url = "https://distribute.openpype.io/thirdparty/oiio_tools-2.2.0-windows.zip"
|
|||
hash = "fd2e00278e01e85dcee7b4a6969d1a16f13016ec16700fb0366dbb1b1f3c37ad"
|
||||
|
||||
[openpype.thirdparty.oiio.linux]
|
||||
url = "https://distribute.openpype.io/thirdparty/oiio-2.2.0-linux.tgz"
|
||||
hash = "sha256:..."
|
||||
url = "https://distribute.openpype.io/thirdparty/oiio_tools-2.2.12-linux.tgz"
|
||||
hash = "de63a8bf7f6c45ff59ecafeba13123f710c2cbc1783ec9e0b938e980d4f5c37f"
|
||||
|
||||
[openpype.thirdparty.oiio.darwin]
|
||||
url = "https://distribute.openpype.io/thirdparty/oiio-2.2.0-darwin.tgz"
|
||||
hash = "sha256:..."
|
||||
hash = "sha256:..."
|
||||
|
|
|
|||
|
|
@ -5,72 +5,75 @@ import sys
|
|||
from collections import namedtuple
|
||||
from pathlib import Path
|
||||
from zipfile import ZipFile
|
||||
from uuid import uuid4
|
||||
|
||||
import appdirs
|
||||
import pytest
|
||||
|
||||
from igniter.bootstrap_repos import BootstrapRepos
|
||||
from igniter.bootstrap_repos import PypeVersion
|
||||
from pype.lib import OpenPypeSettingsRegistry
|
||||
from igniter.bootstrap_repos import OpenPypeVersion
|
||||
from igniter.user_settings import OpenPypeSettingsRegistry
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def fix_bootstrap(tmp_path, pytestconfig):
|
||||
"""This will fix BoostrapRepos with temp paths."""
|
||||
bs = BootstrapRepos()
|
||||
bs.live_repo_dir = pytestconfig.rootpath / 'repos'
|
||||
bs.data_dir = tmp_path
|
||||
return bs
|
||||
|
||||
|
||||
def test_pype_version():
|
||||
v1 = PypeVersion(1, 2, 3)
|
||||
def test_openpype_version():
|
||||
"""Test determination of OpenPype versions."""
|
||||
v1 = OpenPypeVersion(1, 2, 3)
|
||||
assert str(v1) == "1.2.3"
|
||||
|
||||
v2 = PypeVersion(1, 2, 3, client="x")
|
||||
v2 = OpenPypeVersion(1, 2, 3, client="x")
|
||||
assert str(v2) == "1.2.3-x"
|
||||
assert v1 < v2
|
||||
|
||||
v3 = PypeVersion(1, 2, 3, variant="staging")
|
||||
v3 = OpenPypeVersion(1, 2, 3, variant="staging")
|
||||
assert str(v3) == "1.2.3-staging"
|
||||
|
||||
v4 = PypeVersion(1, 2, 3, variant="staging", client="client")
|
||||
v4 = OpenPypeVersion(1, 2, 3, variant="staging", client="client")
|
||||
assert str(v4) == "1.2.3-client-staging"
|
||||
assert v3 < v4
|
||||
assert v1 < v4
|
||||
|
||||
v5 = PypeVersion(1, 2, 3, variant="foo", client="x")
|
||||
v5 = OpenPypeVersion(1, 2, 3, variant="foo", client="x")
|
||||
assert str(v5) == "1.2.3-x"
|
||||
assert v4 < v5
|
||||
|
||||
v6 = PypeVersion(1, 2, 3, variant="foo")
|
||||
v6 = OpenPypeVersion(1, 2, 3, variant="foo")
|
||||
assert str(v6) == "1.2.3"
|
||||
|
||||
v7 = PypeVersion(2, 0, 0)
|
||||
v7 = OpenPypeVersion(2, 0, 0)
|
||||
assert v1 < v7
|
||||
|
||||
v8 = PypeVersion(0, 1, 5)
|
||||
v8 = OpenPypeVersion(0, 1, 5)
|
||||
assert v8 < v7
|
||||
|
||||
v9 = PypeVersion(1, 2, 4)
|
||||
v9 = OpenPypeVersion(1, 2, 4)
|
||||
assert v9 > v1
|
||||
|
||||
v10 = PypeVersion(1, 2, 2)
|
||||
v10 = OpenPypeVersion(1, 2, 2)
|
||||
assert v10 < v1
|
||||
|
||||
v11 = PypeVersion(1, 2, 3, path=Path("/foo/bar"))
|
||||
v11 = OpenPypeVersion(1, 2, 3, path=Path("/foo/bar"))
|
||||
assert v10 < v11
|
||||
|
||||
assert v5 == v2
|
||||
|
||||
sort_versions = [
|
||||
PypeVersion(3, 2, 1),
|
||||
PypeVersion(1, 2, 3),
|
||||
PypeVersion(0, 0, 1),
|
||||
PypeVersion(4, 8, 10),
|
||||
PypeVersion(4, 8, 20),
|
||||
PypeVersion(4, 8, 9),
|
||||
PypeVersion(1, 2, 3, variant="staging"),
|
||||
PypeVersion(1, 2, 3, client="client")
|
||||
OpenPypeVersion(3, 2, 1),
|
||||
OpenPypeVersion(1, 2, 3),
|
||||
OpenPypeVersion(0, 0, 1),
|
||||
OpenPypeVersion(4, 8, 10),
|
||||
OpenPypeVersion(4, 8, 20),
|
||||
OpenPypeVersion(4, 8, 9),
|
||||
OpenPypeVersion(1, 2, 3, variant="staging"),
|
||||
OpenPypeVersion(1, 2, 3, client="client")
|
||||
]
|
||||
res = sorted(sort_versions)
|
||||
|
||||
|
|
@ -88,25 +91,22 @@ def test_pype_version():
|
|||
"5.6.3",
|
||||
"5.6.3-staging"
|
||||
]
|
||||
res_versions = []
|
||||
for v in str_versions:
|
||||
res_versions.append(PypeVersion(version=v))
|
||||
|
||||
res_versions = [OpenPypeVersion(version=v) for v in str_versions]
|
||||
sorted_res_versions = sorted(res_versions)
|
||||
|
||||
assert str(sorted_res_versions[0]) == str_versions[0]
|
||||
assert str(sorted_res_versions[-1]) == str_versions[5]
|
||||
|
||||
with pytest.raises(ValueError):
|
||||
_ = PypeVersion()
|
||||
_ = OpenPypeVersion()
|
||||
|
||||
with pytest.raises(ValueError):
|
||||
_ = PypeVersion(major=1)
|
||||
_ = OpenPypeVersion(major=1)
|
||||
|
||||
with pytest.raises(ValueError):
|
||||
_ = PypeVersion(version="booobaa")
|
||||
_ = OpenPypeVersion(version="booobaa")
|
||||
|
||||
v11 = PypeVersion(version="4.6.7-client-staging")
|
||||
v11 = OpenPypeVersion(version="4.6.7-client-staging")
|
||||
assert v11.major == 4
|
||||
assert v11.minor == 6
|
||||
assert v11.subversion == 7
|
||||
|
|
@ -115,15 +115,15 @@ def test_pype_version():
|
|||
|
||||
|
||||
def test_get_main_version():
|
||||
ver = PypeVersion(1, 2, 3, variant="staging", client="foo")
|
||||
ver = OpenPypeVersion(1, 2, 3, variant="staging", client="foo")
|
||||
assert ver.get_main_version() == "1.2.3"
|
||||
|
||||
|
||||
def test_get_version_path_from_list():
|
||||
versions = [
|
||||
PypeVersion(1, 2, 3, path=Path('/foo/bar')),
|
||||
PypeVersion(3, 4, 5, variant="staging", path=Path("/bar/baz")),
|
||||
PypeVersion(6, 7, 8, client="x", path=Path("boo/goo"))
|
||||
OpenPypeVersion(1, 2, 3, path=Path('/foo/bar')),
|
||||
OpenPypeVersion(3, 4, 5, variant="staging", path=Path("/bar/baz")),
|
||||
OpenPypeVersion(6, 7, 8, client="x", path=Path("boo/goo"))
|
||||
]
|
||||
path = BootstrapRepos.get_version_path_from_list(
|
||||
"3.4.5-staging", versions)
|
||||
|
|
@ -131,7 +131,7 @@ def test_get_version_path_from_list():
|
|||
assert path == Path("/bar/baz")
|
||||
|
||||
|
||||
def test_search_string_for_pype_version(printer):
|
||||
def test_search_string_for_openpype_version(printer):
|
||||
strings = [
|
||||
("3.0.1", True),
|
||||
("foo-3.0", False),
|
||||
|
|
@ -142,106 +142,112 @@ def test_search_string_for_pype_version(printer):
|
|||
]
|
||||
for ver_string in strings:
|
||||
printer(f"testing {ver_string[0]} should be {ver_string[1]}")
|
||||
assert PypeVersion.version_in_str(ver_string[0])[0] == ver_string[1]
|
||||
assert OpenPypeVersion.version_in_str(ver_string[0])[0] == \
|
||||
ver_string[1]
|
||||
|
||||
|
||||
@pytest.mark.slow
|
||||
def test_install_live_repos(fix_bootstrap, printer):
|
||||
pype_version = fix_bootstrap.create_version_from_live_code()
|
||||
def test_install_live_repos(fix_bootstrap, printer, monkeypatch, pytestconfig):
|
||||
monkeypatch.setenv("OPENPYPE_ROOT", pytestconfig.rootpath.as_posix())
|
||||
monkeypatch.setenv("OPENPYPE_DATABASE_NAME", str(uuid4()))
|
||||
openpype_version = fix_bootstrap.create_version_from_live_code()
|
||||
sep = os.path.sep
|
||||
expected_paths = [
|
||||
f"{pype_version.path}{sep}repos{sep}avalon-core",
|
||||
f"{pype_version.path}{sep}repos{sep}avalon-unreal-integration",
|
||||
f"{pype_version.path}"
|
||||
f"{openpype_version.path}{sep}repos{sep}avalon-core",
|
||||
f"{openpype_version.path}{sep}repos{sep}avalon-unreal-integration",
|
||||
f"{openpype_version.path}"
|
||||
]
|
||||
printer("testing zip creation")
|
||||
assert os.path.exists(pype_version.path), "zip archive was not created"
|
||||
fix_bootstrap.add_paths_from_archive(pype_version.path)
|
||||
assert os.path.exists(openpype_version.path), "zip archive was not created"
|
||||
fix_bootstrap.add_paths_from_archive(openpype_version.path)
|
||||
for ep in expected_paths:
|
||||
assert ep in sys.path, f"{ep} not set correctly"
|
||||
|
||||
printer("testing pype imported")
|
||||
del sys.modules["pype"]
|
||||
import pype # noqa: F401
|
||||
printer("testing openpype imported")
|
||||
try:
|
||||
del sys.modules["openpype"]
|
||||
except KeyError:
|
||||
# wasn't imported before
|
||||
pass
|
||||
import openpype # noqa: F401
|
||||
|
||||
# test if pype is imported from specific location in zip
|
||||
assert "pype" in sys.modules.keys(), "Pype not imported"
|
||||
assert sys.modules["pype"].__file__ == \
|
||||
f"{pype_version.path}{sep}pype{sep}__init__.py"
|
||||
# test if openpype is imported from specific location in zip
|
||||
assert "openpype" in sys.modules.keys(), "OpenPype not imported"
|
||||
assert sys.modules["openpype"].__file__ == \
|
||||
f"{openpype_version.path}{sep}openpype{sep}__init__.py"
|
||||
|
||||
|
||||
def test_find_pype(fix_bootstrap, tmp_path_factory, monkeypatch, printer):
|
||||
|
||||
test_pype = namedtuple("Pype", "prefix version suffix type valid")
|
||||
def test_find_openpype(fix_bootstrap, tmp_path_factory, monkeypatch, printer):
|
||||
test_openpype = namedtuple("OpenPype", "prefix version suffix type valid")
|
||||
|
||||
test_versions_1 = [
|
||||
test_pype(prefix="foo-v", version="5.5.1",
|
||||
suffix=".zip", type="zip", valid=False),
|
||||
test_pype(prefix="bar-v", version="5.5.2-client",
|
||||
suffix=".zip", type="zip", valid=True),
|
||||
test_pype(prefix="baz-v", version="5.5.3-client-strange",
|
||||
suffix=".zip", type="zip", valid=True),
|
||||
test_pype(prefix="bum-v", version="5.5.4-staging",
|
||||
suffix=".zip", type="zip", valid=True),
|
||||
test_pype(prefix="zum-v", version="5.5.5-client-staging",
|
||||
suffix=".zip", type="zip", valid=True),
|
||||
test_pype(prefix="fam-v", version="5.6.3",
|
||||
suffix=".zip", type="zip", valid=True),
|
||||
test_pype(prefix="foo-v", version="5.6.3-staging",
|
||||
suffix=".zip", type="zip", valid=True),
|
||||
test_pype(prefix="fim-v", version="5.6.3",
|
||||
suffix=".zip", type="zip", valid=False),
|
||||
test_pype(prefix="foo-v", version="5.6.4",
|
||||
suffix=".txt", type="txt", valid=False),
|
||||
test_pype(prefix="foo-v", version="5.7.1",
|
||||
suffix="", type="dir", valid=False),
|
||||
test_openpype(prefix="foo-v", version="5.5.1",
|
||||
suffix=".zip", type="zip", valid=False),
|
||||
test_openpype(prefix="bar-v", version="5.5.2-client",
|
||||
suffix=".zip", type="zip", valid=True),
|
||||
test_openpype(prefix="baz-v", version="5.5.3-client-strange",
|
||||
suffix=".zip", type="zip", valid=True),
|
||||
test_openpype(prefix="bum-v", version="5.5.4-staging",
|
||||
suffix=".zip", type="zip", valid=True),
|
||||
test_openpype(prefix="zum-v", version="5.5.5-client-staging",
|
||||
suffix=".zip", type="zip", valid=True),
|
||||
test_openpype(prefix="fam-v", version="5.6.3",
|
||||
suffix=".zip", type="zip", valid=True),
|
||||
test_openpype(prefix="foo-v", version="5.6.3-staging",
|
||||
suffix=".zip", type="zip", valid=True),
|
||||
test_openpype(prefix="fim-v", version="5.6.3",
|
||||
suffix=".zip", type="zip", valid=False),
|
||||
test_openpype(prefix="foo-v", version="5.6.4",
|
||||
suffix=".txt", type="txt", valid=False),
|
||||
test_openpype(prefix="foo-v", version="5.7.1",
|
||||
suffix="", type="dir", valid=False),
|
||||
]
|
||||
|
||||
test_versions_2 = [
|
||||
test_pype(prefix="foo-v", version="10.0.0",
|
||||
suffix=".txt", type="txt", valid=False),
|
||||
test_pype(prefix="lom-v", version="7.2.6",
|
||||
suffix=".zip", type="zip", valid=True),
|
||||
test_pype(prefix="bom-v", version="7.2.7-client",
|
||||
suffix=".zip", type="zip", valid=True),
|
||||
test_pype(prefix="woo-v", version="7.2.8-client-strange",
|
||||
suffix=".zip", type="zip", valid=True),
|
||||
test_pype(prefix="loo-v", version="7.2.10-client-staging",
|
||||
suffix=".zip", type="zip", valid=True),
|
||||
test_pype(prefix="kok-v", version="7.0.1",
|
||||
suffix=".zip", type="zip", valid=True)
|
||||
test_openpype(prefix="foo-v", version="10.0.0",
|
||||
suffix=".txt", type="txt", valid=False),
|
||||
test_openpype(prefix="lom-v", version="7.2.6",
|
||||
suffix=".zip", type="zip", valid=True),
|
||||
test_openpype(prefix="bom-v", version="7.2.7-client",
|
||||
suffix=".zip", type="zip", valid=True),
|
||||
test_openpype(prefix="woo-v", version="7.2.8-client-strange",
|
||||
suffix=".zip", type="zip", valid=True),
|
||||
test_openpype(prefix="loo-v", version="7.2.10-client-staging",
|
||||
suffix=".zip", type="zip", valid=True),
|
||||
test_openpype(prefix="kok-v", version="7.0.1",
|
||||
suffix=".zip", type="zip", valid=True)
|
||||
]
|
||||
|
||||
test_versions_3 = [
|
||||
test_pype(prefix="foo-v", version="3.0.0",
|
||||
suffix=".zip", type="zip", valid=True),
|
||||
test_pype(prefix="goo-v", version="3.0.1",
|
||||
suffix=".zip", type="zip", valid=True),
|
||||
test_pype(prefix="hoo-v", version="4.1.0",
|
||||
suffix=".zip", type="zip", valid=True),
|
||||
test_pype(prefix="foo-v", version="4.1.2",
|
||||
suffix=".zip", type="zip", valid=True),
|
||||
test_pype(prefix="foo-v", version="3.0.1-client",
|
||||
suffix=".zip", type="zip", valid=True),
|
||||
test_pype(prefix="foo-v", version="3.0.1-client-strange",
|
||||
suffix=".zip", type="zip", valid=True),
|
||||
test_pype(prefix="foo-v", version="3.0.1-staging",
|
||||
suffix=".zip", type="zip", valid=True),
|
||||
test_pype(prefix="foo-v", version="3.0.1-client-staging",
|
||||
suffix=".zip", type="zip", valid=True),
|
||||
test_pype(prefix="foo-v", version="3.2.0",
|
||||
suffix=".zip", type="zip", valid=True)
|
||||
test_openpype(prefix="foo-v", version="3.0.0",
|
||||
suffix=".zip", type="zip", valid=True),
|
||||
test_openpype(prefix="goo-v", version="3.0.1",
|
||||
suffix=".zip", type="zip", valid=True),
|
||||
test_openpype(prefix="hoo-v", version="4.1.0",
|
||||
suffix=".zip", type="zip", valid=True),
|
||||
test_openpype(prefix="foo-v", version="4.1.2",
|
||||
suffix=".zip", type="zip", valid=True),
|
||||
test_openpype(prefix="foo-v", version="3.0.1-client",
|
||||
suffix=".zip", type="zip", valid=True),
|
||||
test_openpype(prefix="foo-v", version="3.0.1-client-strange",
|
||||
suffix=".zip", type="zip", valid=True),
|
||||
test_openpype(prefix="foo-v", version="3.0.1-staging",
|
||||
suffix=".zip", type="zip", valid=True),
|
||||
test_openpype(prefix="foo-v", version="3.0.1-client-staging",
|
||||
suffix=".zip", type="zip", valid=True),
|
||||
test_openpype(prefix="foo-v", version="3.2.0",
|
||||
suffix=".zip", type="zip", valid=True)
|
||||
]
|
||||
|
||||
test_versions_4 = [
|
||||
test_pype(prefix="foo-v", version="10.0.0",
|
||||
suffix="", type="dir", valid=True),
|
||||
test_pype(prefix="lom-v", version="11.2.6",
|
||||
suffix=".zip", type="dir", valid=False),
|
||||
test_pype(prefix="bom-v", version="7.2.7-client",
|
||||
suffix=".zip", type="zip", valid=True),
|
||||
test_pype(prefix="woo-v", version="7.2.8-client-strange",
|
||||
suffix=".zip", type="txt", valid=False)
|
||||
test_openpype(prefix="foo-v", version="10.0.0",
|
||||
suffix="", type="dir", valid=True),
|
||||
test_openpype(prefix="lom-v", version="11.2.6",
|
||||
suffix=".zip", type="dir", valid=False),
|
||||
test_openpype(prefix="bom-v", version="7.2.7-client",
|
||||
suffix=".zip", type="zip", valid=True),
|
||||
test_openpype(prefix="woo-v", version="7.2.8-client-strange",
|
||||
suffix=".zip", type="txt", valid=False)
|
||||
]
|
||||
|
||||
def _create_invalid_zip(path: Path):
|
||||
|
|
@ -251,7 +257,7 @@ def test_find_pype(fix_bootstrap, tmp_path_factory, monkeypatch, printer):
|
|||
def _create_valid_zip(path: Path, version: str):
|
||||
with ZipFile(path, "w") as zf:
|
||||
zf.writestr(
|
||||
"pype/version.py", f"__version__ = '{version}'\n\n")
|
||||
"openpype/version.py", f"__version__ = '{version}'\n\n")
|
||||
|
||||
def _create_invalid_dir(path: Path):
|
||||
path.mkdir(parents=True, exist_ok=True)
|
||||
|
|
@ -259,9 +265,9 @@ def test_find_pype(fix_bootstrap, tmp_path_factory, monkeypatch, printer):
|
|||
fp.write("invalid")
|
||||
|
||||
def _create_valid_dir(path: Path, version: str):
|
||||
pype_path = path / "pype"
|
||||
version_path = pype_path / "version.py"
|
||||
pype_path.mkdir(parents=True, exist_ok=True)
|
||||
openpype_path = path / "openpype"
|
||||
version_path = openpype_path / "version.py"
|
||||
openpype_path.mkdir(parents=True, exist_ok=True)
|
||||
with open(version_path, "w") as fp:
|
||||
fp.write(f"__version__ = '{version}'\n\n")
|
||||
|
||||
|
|
@ -283,15 +289,15 @@ def test_find_pype(fix_bootstrap, tmp_path_factory, monkeypatch, printer):
|
|||
with open(test_path, "w") as fp:
|
||||
fp.write("foo")
|
||||
|
||||
# in PYPE_PATH
|
||||
# in OPENPYPE_PATH
|
||||
e_path = tmp_path_factory.mktemp("environ")
|
||||
|
||||
# create files and directories for test
|
||||
for test_file in test_versions_1:
|
||||
_build_test_item(e_path, test_file)
|
||||
|
||||
# in pypePath registry
|
||||
p_path = tmp_path_factory.mktemp("pypePath")
|
||||
# in openPypePath registry
|
||||
p_path = tmp_path_factory.mktemp("openPypePath")
|
||||
for test_file in test_versions_2:
|
||||
_build_test_item(p_path, test_file)
|
||||
|
||||
|
|
@ -310,10 +316,10 @@ def test_find_pype(fix_bootstrap, tmp_path_factory, monkeypatch, printer):
|
|||
for test_file in test_versions_4:
|
||||
_build_test_item(dir_path, test_file)
|
||||
|
||||
printer("testing finding Pype in given path ...")
|
||||
result = fix_bootstrap.find_pype(g_path, include_zips=True)
|
||||
printer("testing finding OpenPype in given path ...")
|
||||
result = fix_bootstrap.find_openpype(g_path, include_zips=True)
|
||||
# we should have results as file were created
|
||||
assert result is not None, "no Pype version found"
|
||||
assert result is not None, "no OpenPype version found"
|
||||
# latest item in `result` should be latest version found.
|
||||
expected_path = Path(
|
||||
g_path / "{}{}{}".format(
|
||||
|
|
@ -323,13 +329,14 @@ def test_find_pype(fix_bootstrap, tmp_path_factory, monkeypatch, printer):
|
|||
)
|
||||
)
|
||||
assert result, "nothing found"
|
||||
assert result[-1].path == expected_path, "not a latest version of Pype 3"
|
||||
assert result[-1].path == expected_path, ("not a latest version of "
|
||||
"OpenPype 3")
|
||||
|
||||
monkeypatch.setenv("PYPE_PATH", e_path.as_posix())
|
||||
monkeypatch.setenv("OPENPYPE_PATH", e_path.as_posix())
|
||||
|
||||
result = fix_bootstrap.find_pype(include_zips=True)
|
||||
result = fix_bootstrap.find_openpype(include_zips=True)
|
||||
# we should have results as file were created
|
||||
assert result is not None, "no Pype version found"
|
||||
assert result is not None, "no OpenPype version found"
|
||||
# latest item in `result` should be latest version found.
|
||||
expected_path = Path(
|
||||
e_path / "{}{}{}".format(
|
||||
|
|
@ -339,21 +346,23 @@ def test_find_pype(fix_bootstrap, tmp_path_factory, monkeypatch, printer):
|
|||
)
|
||||
)
|
||||
assert result, "nothing found"
|
||||
assert result[-1].path == expected_path, "not a latest version of Pype 1"
|
||||
assert result[-1].path == expected_path, ("not a latest version of "
|
||||
"OpenPype 1")
|
||||
|
||||
monkeypatch.delenv("PYPE_PATH", raising=False)
|
||||
monkeypatch.delenv("OPENPYPE_PATH", raising=False)
|
||||
|
||||
# mock appdirs user_data_dir
|
||||
def mock_user_data_dir(*args, **kwargs):
|
||||
"""Mock local app data dir."""
|
||||
return d_path.as_posix()
|
||||
|
||||
monkeypatch.setattr(appdirs, "user_data_dir", mock_user_data_dir)
|
||||
fix_bootstrap.registry = OpenPypeSettingsRegistry()
|
||||
fix_bootstrap.registry.set_item("pypePath", d_path.as_posix())
|
||||
fix_bootstrap.registry.set_item("openPypePath", d_path.as_posix())
|
||||
|
||||
result = fix_bootstrap.find_pype(include_zips=True)
|
||||
result = fix_bootstrap.find_openpype(include_zips=True)
|
||||
# we should have results as file were created
|
||||
assert result is not None, "no Pype version found"
|
||||
assert result is not None, "no OpenPype version found"
|
||||
# latest item in `result` should be latest version found.
|
||||
expected_path = Path(
|
||||
d_path / "{}{}{}".format(
|
||||
|
|
@ -363,10 +372,11 @@ def test_find_pype(fix_bootstrap, tmp_path_factory, monkeypatch, printer):
|
|||
)
|
||||
)
|
||||
assert result, "nothing found"
|
||||
assert result[-1].path == expected_path, "not a latest version of Pype 2"
|
||||
assert result[-1].path == expected_path, ("not a latest version of "
|
||||
"OpenPype 2")
|
||||
|
||||
result = fix_bootstrap.find_pype(e_path, include_zips=True)
|
||||
assert result is not None, "no Pype version found"
|
||||
result = fix_bootstrap.find_openpype(e_path, include_zips=True)
|
||||
assert result is not None, "no OpenPype version found"
|
||||
expected_path = Path(
|
||||
e_path / "{}{}{}".format(
|
||||
test_versions_1[5].prefix,
|
||||
|
|
@ -374,10 +384,11 @@ def test_find_pype(fix_bootstrap, tmp_path_factory, monkeypatch, printer):
|
|||
test_versions_1[5].suffix
|
||||
)
|
||||
)
|
||||
assert result[-1].path == expected_path, "not a latest version of Pype 1"
|
||||
assert result[-1].path == expected_path, ("not a latest version of "
|
||||
"OpenPype 1")
|
||||
|
||||
result = fix_bootstrap.find_pype(dir_path, include_zips=True)
|
||||
assert result is not None, "no Pype versions found"
|
||||
result = fix_bootstrap.find_openpype(dir_path, include_zips=True)
|
||||
assert result is not None, "no OpenPype versions found"
|
||||
expected_path = Path(
|
||||
dir_path / "{}{}{}".format(
|
||||
test_versions_4[0].prefix,
|
||||
|
|
@ -385,4 +396,5 @@ def test_find_pype(fix_bootstrap, tmp_path_factory, monkeypatch, printer):
|
|||
test_versions_4[0].suffix
|
||||
)
|
||||
)
|
||||
assert result[-1].path == expected_path, "not a latest version of Pype 4"
|
||||
assert result[-1].path == expected_path, ("not a latest version of "
|
||||
"OpenPype 4")
|
||||
|
|
|
|||
|
|
@ -1,5 +1,7 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Test suite for User Settings."""
|
||||
import pytest
|
||||
from pype.lib import (
|
||||
from igniter.user_settings import (
|
||||
IniSettingRegistry,
|
||||
JSONSettingRegistry,
|
||||
OpenPypeSecureRegistry
|
||||
|
|
@ -9,9 +11,9 @@ import configparser
|
|||
|
||||
|
||||
@pytest.fixture
|
||||
def secure_registry(tmpdir):
|
||||
def secure_registry():
|
||||
name = "pypetest_{}".format(str(uuid4()))
|
||||
r = OpenPypeSecureRegistry(name, tmpdir)
|
||||
r = OpenPypeSecureRegistry(name)
|
||||
yield r
|
||||
|
||||
|
||||
|
|
@ -74,11 +74,19 @@ function Install-Poetry() {
|
|||
|
||||
$art = @"
|
||||
|
||||
▒█▀▀▀█ █▀▀█ █▀▀ █▀▀▄ ▒█▀▀█ █░░█ █▀▀█ █▀▀ ▀█▀ ▀█▀ ▀█▀
|
||||
▒█░░▒█ █░░█ █▀▀ █░░█ ▒█▄▄█ █▄▄█ █░░█ █▀▀ ▒█░ ▒█░ ▒█░
|
||||
▒█▄▄▄█ █▀▀▀ ▀▀▀ ▀░░▀ ▒█░░░ ▄▄▄█ █▀▀▀ ▀▀▀ ▄█▄ ▄█▄ ▄█▄
|
||||
.---= [ by Pype Club ] =---.
|
||||
https://openpype.io
|
||||
. . .. . ..
|
||||
_oOOP3OPP3Op_. .
|
||||
.PPpo~· ·· ~2p. ·· ···· · ·
|
||||
·Ppo · .pPO3Op.· · O:· · · ·
|
||||
.3Pp · oP3'· 'P33· · 4 ·· · · · ·· · · ·
|
||||
·~OP 3PO· .Op3 : · ·· _____ _____ _____
|
||||
·P3O · oP3oP3O3P' · · · · / /·/ /·/ /
|
||||
O3:· O3p~ · ·:· · ·/____/·/____/ /____/
|
||||
'P · 3p3· oP3~· ·.P:· · · ·· · · ·· · · ·
|
||||
· ': · Po' ·Opo'· .3O· . o[ by Pype Club ]]]==- - - · ·
|
||||
· '_ .. · . _OP3·· · ·https://openpype.io·· ·
|
||||
~P3·OPPPO3OP~ · ·· ·
|
||||
· ' '· · ·· · · · ·· ·
|
||||
|
||||
"@
|
||||
|
||||
|
|
|
|||
|
|
@ -6,11 +6,19 @@
|
|||
art () {
|
||||
cat <<-EOF
|
||||
|
||||
▒█▀▀▀█ █▀▀█ █▀▀ █▀▀▄ ▒█▀▀█ █░░█ █▀▀█ █▀▀ ▀█▀ ▀█▀ ▀█▀
|
||||
▒█░░▒█ █░░█ █▀▀ █░░█ ▒█▄▄█ █▄▄█ █░░█ █▀▀ ▒█░ ▒█░ ▒█░
|
||||
▒█▄▄▄█ █▀▀▀ ▀▀▀ ▀░░▀ ▒█░░░ ▄▄▄█ █▀▀▀ ▀▀▀ ▄█▄ ▄█▄ ▄█▄
|
||||
.---= [ by Pype Club ] =---.
|
||||
https://openpype.io
|
||||
. . .. . ..
|
||||
_oOOP3OPP3Op_. .
|
||||
.PPpo~· ·· ~2p. ·· ···· · ·
|
||||
·Ppo · .pPO3Op.· · O:· · · ·
|
||||
.3Pp · oP3'· 'P33· · 4 ·· · · · ·· · · ·
|
||||
·~OP 3PO· .Op3 : · ·· _____ _____ _____
|
||||
·P3O · oP3oP3O3P' · · · · / /·/ /·/ /
|
||||
O3:· O3p~ · ·:· · ·/____/·/____/ /____/
|
||||
'P · 3p3· oP3~· ·.P:· · · ·· · · ·· · · ·
|
||||
· ': · Po' ·Opo'· .3O· . o[ by Pype Club ]]]==- - - · ·
|
||||
· '_ .. · . _OP3·· · ·https://openpype.io·· ·
|
||||
~P3·OPPPO3OP~ · ·· ·
|
||||
· ' '· · ·· · · · ·· ·
|
||||
|
||||
EOF
|
||||
}
|
||||
|
|
@ -65,7 +73,7 @@ detect_python () {
|
|||
local version_command
|
||||
version_command="import sys;print('{0}.{1}'.format(sys.version_info[0], sys.version_info[1]))"
|
||||
local python_version
|
||||
python_version="$(python3 <<< ${version_command})"
|
||||
python_version="$(python <<< ${version_command})"
|
||||
oIFS="$IFS"
|
||||
IFS=.
|
||||
set -- $python_version
|
||||
|
|
@ -77,7 +85,7 @@ detect_python () {
|
|||
echo -e "${BIWhite}[${RST} ${BIGreen}$1.$2${RST} ${BIWhite}]${RST}"
|
||||
fi
|
||||
else
|
||||
command -v python3 >/dev/null 2>&1 || { echo -e "${BIRed}$1.$2$ - ${BIRed}FAILED${RST} ${BIYellow}Version is old and unsupported${RST}"; return 1; }
|
||||
command -v python >/dev/null 2>&1 || { echo -e "${BIRed}$1.$2$ - ${BIRed}FAILED${RST} ${BIYellow}Version is old and unsupported${RST}"; return 1; }
|
||||
fi
|
||||
}
|
||||
|
||||
|
|
@ -123,7 +131,7 @@ realpath () {
|
|||
install_poetry () {
|
||||
echo -e "${BIGreen}>>>${RST} Installing Poetry ..."
|
||||
command -v curl >/dev/null 2>&1 || { echo -e "${BIRed}!!!${RST}${BIYellow} Missing ${RST}${BIBlue}curl${BIYellow} command.${RST}"; return 1; }
|
||||
curl -sSL https://raw.githubusercontent.com/python-poetry/poetry/master/get-poetry.py | python3 -
|
||||
curl -sSL https://raw.githubusercontent.com/python-poetry/poetry/master/get-poetry.py | python -
|
||||
}
|
||||
|
||||
# Main
|
||||
|
|
@ -138,7 +146,7 @@ main () {
|
|||
pushd "$openpype_root" > /dev/null || return > /dev/null
|
||||
|
||||
version_command="import os;exec(open(os.path.join('$openpype_root', 'openpype', 'version.py')).read());print(__version__);"
|
||||
openpype_version="$(python3 <<< ${version_command})"
|
||||
openpype_version="$(python <<< ${version_command})"
|
||||
|
||||
_inside_openpype_tool="1"
|
||||
|
||||
|
|
@ -169,11 +177,11 @@ main () {
|
|||
|
||||
echo -e "${BIGreen}>>>${RST} Building ..."
|
||||
if [[ "$OSTYPE" == "linux-gnu"* ]]; then
|
||||
poetry run python3 "$openpype_root/setup.py" build > "$openpype_root/build/build.log" || { echo -e "${BIRed}!!!${RST} Build failed, see the build log."; return; }
|
||||
poetry run python "$openpype_root/setup.py" build > "$openpype_root/build/build.log" || { echo -e "${BIRed}!!!${RST} Build failed, see the build log."; return; }
|
||||
elif [[ "$OSTYPE" == "darwin"* ]]; then
|
||||
poetry run python3 "$openpype_root/setup.py" bdist_mac > "$openpype_root/build/build.log" || { echo -e "${BIRed}!!!${RST} Build failed, see the build log."; return; }
|
||||
poetry run python "$openpype_root/setup.py" bdist_mac > "$openpype_root/build/build.log" || { echo -e "${BIRed}!!!${RST} Build failed, see the build log."; return; }
|
||||
fi
|
||||
poetry run python3 "$openpype_root/tools/build_dependencies.py"
|
||||
poetry run python "$openpype_root/tools/build_dependencies.py"
|
||||
|
||||
if [[ "$OSTYPE" == "darwin"* ]]; then
|
||||
# fix code signing issue
|
||||
|
|
|
|||
|
|
@ -95,11 +95,21 @@ Set-Location -Path $openpype_root
|
|||
|
||||
$art = @"
|
||||
|
||||
▒█▀▀▀█ █▀▀█ █▀▀ █▀▀▄ ▒█▀▀█ █░░█ █▀▀█ █▀▀ ▀█▀ ▀█▀ ▀█▀
|
||||
▒█░░▒█ █░░█ █▀▀ █░░█ ▒█▄▄█ █▄▄█ █░░█ █▀▀ ▒█░ ▒█░ ▒█░
|
||||
▒█▄▄▄█ █▀▀▀ ▀▀▀ ▀░░▀ ▒█░░░ ▄▄▄█ █▀▀▀ ▀▀▀ ▄█▄ ▄█▄ ▄█▄
|
||||
.---= [ by Pype Club ] =---.
|
||||
https://openpype.io
|
||||
|
||||
. . .. . ..
|
||||
_oOOP3OPP3Op_. .
|
||||
.PPpo~· ·· ~2p. ·· ···· · ·
|
||||
·Ppo · .pPO3Op.· · O:· · · ·
|
||||
.3Pp · oP3'· 'P33· · 4 ·· · · · ·· · · ·
|
||||
·~OP 3PO· .Op3 : · ·· _____ _____ _____
|
||||
·P3O · oP3oP3O3P' · · · · / /·/ /·/ /
|
||||
O3:· O3p~ · ·:· · ·/____/·/____/ /____/
|
||||
'P · 3p3· oP3~· ·.P:· · · ·· · · ·· · · ·
|
||||
· ': · Po' ·Opo'· .3O· . o[ by Pype Club ]]]==- - - · ·
|
||||
· '_ .. · . _OP3·· · ·https://openpype.io·· ·
|
||||
~P3·OPPPO3OP~ · ·· ·
|
||||
· ' '· · ·· · · · ·· ·
|
||||
|
||||
|
||||
"@
|
||||
if (-not (Test-Path 'env:_INSIDE_OPENPYPE_TOOL')) {
|
||||
|
|
|
|||
|
|
@ -7,11 +7,19 @@
|
|||
art () {
|
||||
cat <<-EOF
|
||||
|
||||
▒█▀▀▀█ █▀▀█ █▀▀ █▀▀▄ ▒█▀▀█ █░░█ █▀▀█ █▀▀ ▀█▀ ▀█▀ ▀█▀
|
||||
▒█░░▒█ █░░█ █▀▀ █░░█ ▒█▄▄█ █▄▄█ █░░█ █▀▀ ▒█░ ▒█░ ▒█░
|
||||
▒█▄▄▄█ █▀▀▀ ▀▀▀ ▀░░▀ ▒█░░░ ▄▄▄█ █▀▀▀ ▀▀▀ ▄█▄ ▄█▄ ▄█▄
|
||||
.---= [ by Pype Club ] =---.
|
||||
https://openpype.io
|
||||
. . .. . ..
|
||||
_oOOP3OPP3Op_. .
|
||||
.PPpo~· ·· ~2p. ·· ···· · ·
|
||||
·Ppo · .pPO3Op.· · O:· · · ·
|
||||
.3Pp · oP3'· 'P33· · 4 ·· · · · ·· · · ·
|
||||
·~OP 3PO· .Op3 : · ·· _____ _____ _____
|
||||
·P3O · oP3oP3O3P' · · · · / /·/ /·/ /
|
||||
O3:· O3p~ · ·:· · ·/____/·/____/ /____/
|
||||
'P · 3p3· oP3~· ·.P:· · · ·· · · ·· · · ·
|
||||
· ': · Po' ·Opo'· .3O· . o[ by Pype Club ]]]==- - - · ·
|
||||
· '_ .. · . _OP3·· · ·https://openpype.io·· ·
|
||||
~P3·OPPPO3OP~ · ·· ·
|
||||
· ' '· · ·· · · · ·· ·
|
||||
|
||||
EOF
|
||||
}
|
||||
|
|
@ -81,7 +89,7 @@ done
|
|||
detect_python () {
|
||||
echo -e "${BIGreen}>>>${RST} Using python \c"
|
||||
local version_command="import sys;print('{0}.{1}'.format(sys.version_info[0], sys.version_info[1]))"
|
||||
local python_version="$(python3 <<< ${version_command})"
|
||||
local python_version="$(python <<< ${version_command})"
|
||||
oIFS="$IFS"
|
||||
IFS=.
|
||||
set -- $python_version
|
||||
|
|
@ -93,15 +101,16 @@ detect_python () {
|
|||
echo -e "${BIWhite}[${RST} ${BIGreen}$1.$2${RST} ${BIWhite}]${RST}"
|
||||
fi
|
||||
else
|
||||
command -v python3 >/dev/null 2>&1 || { echo -e "${BIRed}$1.$2$ - ${BIRed}FAILED${RST} ${BIYellow}Version is old and unsupported${RST}"; return 1; }
|
||||
command -v python >/dev/null 2>&1 || { echo -e "${BIRed}$1.$2$ - ${BIRed}FAILED${RST} ${BIYellow}Version is old and unsupported${RST}"; return 1; }
|
||||
fi
|
||||
}
|
||||
|
||||
install_poetry () {
|
||||
echo -e "${BIGreen}>>>${RST} Installing Poetry ..."
|
||||
export POETRY_HOME="$openpype_root/.poetry"
|
||||
command -v curl >/dev/null 2>&1 || { echo -e "${BIRed}!!!${RST}${BIYellow} Missing ${RST}${BIBlue}curl${BIYellow} command.${RST}"; return 1; }
|
||||
curl -sSL https://raw.githubusercontent.com/python-poetry/poetry/master/get-poetry.py | python3 -
|
||||
export PATH="$PATH:$HOME/.poetry/bin"
|
||||
curl -sSL https://raw.githubusercontent.com/python-poetry/poetry/master/get-poetry.py | python -
|
||||
export PATH="$PATH:$POETRY_HOME/bin"
|
||||
}
|
||||
|
||||
##############################################################################
|
||||
|
|
@ -177,7 +186,7 @@ main () {
|
|||
# cx_freeze will crash on missing __pychache__ on these but
|
||||
# reinstalling them solves the problem.
|
||||
echo -e "${BIGreen}>>>${RST} Fixing pycache bug ..."
|
||||
poetry run python -m pip install --upgrade pip
|
||||
poetry run python -m pip install --force-reinstall pip
|
||||
poetry run pip install --force-reinstall setuptools
|
||||
poetry run pip install --force-reinstall wheel
|
||||
poetry run python -m pip install --force-reinstall pip
|
||||
|
|
|
|||
|
|
@ -50,11 +50,19 @@ Set-Location -Path $openpype_root
|
|||
|
||||
$art = @"
|
||||
|
||||
▒█▀▀▀█ █▀▀█ █▀▀ █▀▀▄ ▒█▀▀█ █░░█ █▀▀█ █▀▀ ▀█▀ ▀█▀ ▀█▀
|
||||
▒█░░▒█ █░░█ █▀▀ █░░█ ▒█▄▄█ █▄▄█ █░░█ █▀▀ ▒█░ ▒█░ ▒█░
|
||||
▒█▄▄▄█ █▀▀▀ ▀▀▀ ▀░░▀ ▒█░░░ ▄▄▄█ █▀▀▀ ▀▀▀ ▄█▄ ▄█▄ ▄█▄
|
||||
.---= [ by Pype Club ] =---.
|
||||
https://openpype.io
|
||||
. . .. . ..
|
||||
_oOOP3OPP3Op_. .
|
||||
.PPpo~· ·· ~2p. ·· ···· · ·
|
||||
·Ppo · .pPO3Op.· · O:· · · ·
|
||||
.3Pp · oP3'· 'P33· · 4 ·· · · · ·· · · ·
|
||||
·~OP 3PO· .Op3 : · ·· _____ _____ _____
|
||||
·P3O · oP3oP3O3P' · · · · / /·/ /·/ /
|
||||
O3:· O3p~ · ·:· · ·/____/·/____/ /____/
|
||||
'P · 3p3· oP3~· ·.P:· · · ·· · · ·· · · ·
|
||||
· ': · Po' ·Opo'· .3O· . o[ by Pype Club ]]]==- - - · ·
|
||||
· '_ .. · . _OP3·· · ·https://openpype.io·· ·
|
||||
~P3·OPPPO3OP~ · ·· ·
|
||||
· ' '· · ·· · · · ·· ·
|
||||
|
||||
"@
|
||||
|
||||
|
|
|
|||
|
|
@ -8,11 +8,19 @@
|
|||
art () {
|
||||
cat <<-EOF
|
||||
|
||||
▒█▀▀▀█ █▀▀█ █▀▀ █▀▀▄ ▒█▀▀█ █░░█ █▀▀█ █▀▀ ▀█▀ ▀█▀ ▀█▀
|
||||
▒█░░▒█ █░░█ █▀▀ █░░█ ▒█▄▄█ █▄▄█ █░░█ █▀▀ ▒█░ ▒█░ ▒█░
|
||||
▒█▄▄▄█ █▀▀▀ ▀▀▀ ▀░░▀ ▒█░░░ ▄▄▄█ █▀▀▀ ▀▀▀ ▄█▄ ▄█▄ ▄█▄
|
||||
.---= [ by Pype Club ] =---.
|
||||
https://openpype.io
|
||||
. . .. . ..
|
||||
_oOOP3OPP3Op_. .
|
||||
.PPpo~· ·· ~2p. ·· ···· · ·
|
||||
·Ppo · .pPO3Op.· · O:· · · ·
|
||||
.3Pp · oP3'· 'P33· · 4 ·· · · · ·· · · ·
|
||||
·~OP 3PO· .Op3 : · ·· _____ _____ _____
|
||||
·P3O · oP3oP3O3P' · · · · / /·/ /·/ /
|
||||
O3:· O3p~ · ·:· · ·/____/·/____/ /____/
|
||||
'P · 3p3· oP3~· ·.P:· · · ·· · · ·· · · ·
|
||||
· ': · Po' ·Opo'· .3O· . o[ by Pype Club ]]]==- - - · ·
|
||||
· '_ .. · . _OP3·· · ·https://openpype.io·· ·
|
||||
~P3·OPPPO3OP~ · ·· ·
|
||||
· ' '· · ·· · · · ·· ·
|
||||
|
||||
EOF
|
||||
}
|
||||
|
|
|
|||
65
tools/docker_build.sh
Executable file
65
tools/docker_build.sh
Executable file
|
|
@ -0,0 +1,65 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
# Colors for terminal
|
||||
|
||||
RST='\033[0m' # Text Reset
|
||||
BIGreen='\033[1;92m' # Green
|
||||
BIYellow='\033[1;93m' # Yellow
|
||||
BIRed='\033[1;91m' # Red
|
||||
|
||||
##############################################################################
|
||||
# Return absolute path
|
||||
# Globals:
|
||||
# None
|
||||
# Arguments:
|
||||
# Path to resolve
|
||||
# Returns:
|
||||
# None
|
||||
###############################################################################
|
||||
realpath () {
|
||||
echo $(cd $(dirname "$1"); pwd)/$(basename "$1")
|
||||
}
|
||||
|
||||
# Main
|
||||
main () {
|
||||
openpype_root=$(realpath $(dirname $(dirname "${BASH_SOURCE[0]}")))
|
||||
pushd "$openpype_root" > /dev/null || return > /dev/null
|
||||
|
||||
echo -e "${BIYellow}---${RST} Cleaning build directory ..."
|
||||
rm -rf "$openpype_root/build" && mkdir "$openpype_root/build" > /dev/null
|
||||
|
||||
version_command="import os;exec(open(os.path.join('$openpype_root', 'openpype', 'version.py')).read());print(__version__);"
|
||||
openpype_version="$(python3 <<< ${version_command})"
|
||||
|
||||
echo -e "${BIGreen}>>>${RST} Running docker build ..."
|
||||
docker build --pull --no-cache -t pypeclub/openpype:$openpype_version .
|
||||
if [ $? -ne 0 ] ; then
|
||||
echo -e "${BIRed}!!!${RST} Docker build failed."
|
||||
return 1
|
||||
fi
|
||||
|
||||
echo -e "${BIGreen}>>>${RST} Copying build from container ..."
|
||||
echo -e "${BIYellow}---${RST} Creating container from pypeclub/openpype:$openpype_version ..."
|
||||
id="$(docker create -ti pypeclub/openpype:$openpype_version bash)"
|
||||
if [ $? -ne 0 ] ; then
|
||||
echo -e "${BIRed}!!!${RST} Cannot create just built container."
|
||||
return 1
|
||||
fi
|
||||
echo -e "${BIYellow}---${RST} Copying ..."
|
||||
docker cp "$id:/opt/openpype/build/exe.linux-x86_64-3.7" "$openpype_root/build"
|
||||
if [ $? -ne 0 ] ; then
|
||||
echo -e "${BIRed}!!!${RST} Copying failed."
|
||||
return 1
|
||||
fi
|
||||
|
||||
echo -e "${BIGreen}>>>${RST} Fixing user ownership ..."
|
||||
username="$(logname)"
|
||||
chown -R $username ./build
|
||||
|
||||
echo -e "${BIGreen}>>>${RST} All done, you can delete container:"
|
||||
echo -e "${BIYellow}$id${RST}"
|
||||
}
|
||||
|
||||
return_code=0
|
||||
main || return_code=$?
|
||||
exit $return_code
|
||||
|
|
@ -5,13 +5,20 @@
|
|||
|
||||
art () {
|
||||
cat <<-EOF
|
||||
____________
|
||||
/\\ ___ \\
|
||||
\\ \\ \\/_\\ \\
|
||||
\\ \\ _____/ ______ ___ ___ ___
|
||||
\\ \\ \\___/ /\\ \\ \\ \\\\ \\\\ \\
|
||||
\\ \\____\\ \\ \\_____\\ \\__\\\\__\\\\__\\
|
||||
\\/____/ \\/_____/ . PYPE Club .
|
||||
|
||||
. . .. . ..
|
||||
_oOOP3OPP3Op_. .
|
||||
.PPpo~· ·· ~2p. ·· ···· · ·
|
||||
·Ppo · .pPO3Op.· · O:· · · ·
|
||||
.3Pp · oP3'· 'P33· · 4 ·· · · · ·· · · ·
|
||||
·~OP 3PO· .Op3 : · ·· _____ _____ _____
|
||||
·P3O · oP3oP3O3P' · · · · / /·/ /·/ /
|
||||
O3:· O3p~ · ·:· · ·/____/·/____/ /____/
|
||||
'P · 3p3· oP3~· ·.P:· · · ·· · · ·· · · ·
|
||||
· ': · Po' ·Opo'· .3O· . o[ by Pype Club ]]]==- - - · ·
|
||||
· '_ .. · . _OP3·· · ·https://openpype.io·· ·
|
||||
~P3·OPPPO3OP~ · ·· ·
|
||||
· ' '· · ·· · · · ·· ·
|
||||
|
||||
EOF
|
||||
}
|
||||
|
|
@ -51,53 +58,6 @@ BICyan='\033[1;96m' # Cyan
|
|||
BIWhite='\033[1;97m' # White
|
||||
|
||||
|
||||
##############################################################################
|
||||
# Detect required version of python
|
||||
# Globals:
|
||||
# colors
|
||||
# PYTHON
|
||||
# Arguments:
|
||||
# None
|
||||
# Returns:
|
||||
# None
|
||||
###############################################################################
|
||||
detect_python () {
|
||||
echo -e "${BIGreen}>>>${RST} Using python \c"
|
||||
local version_command="import sys;print('{0}.{1}'.format(sys.version_info[0], sys.version_info[1]))"
|
||||
local python_version="$(python3 <<< ${version_command})"
|
||||
oIFS="$IFS"
|
||||
IFS=.
|
||||
set -- $python_version
|
||||
IFS="$oIFS"
|
||||
if [ "$1" -ge "3" ] && [ "$2" -ge "6" ] ; then
|
||||
if [ "$2" -gt "7" ] ; then
|
||||
echo -e "${BIWhite}[${RST} ${BIRed}$1.$2 ${BIWhite}]${RST} - ${BIRed}FAILED${RST} ${BIYellow}Version is new and unsupported, use${RST} ${BIPurple}3.7.x${RST}"; return 1;
|
||||
else
|
||||
echo -e "${BIWhite}[${RST} ${BIGreen}$1.$2${RST} ${BIWhite}]${RST}"
|
||||
fi
|
||||
PYTHON="python3"
|
||||
else
|
||||
command -v python3 >/dev/null 2>&1 || { echo -e "${BIRed}$1.$2$ - ${BIRed}FAILED${RST} ${BIYellow}Version is old and unsupported${RST}"; return 1; }
|
||||
fi
|
||||
}
|
||||
|
||||
##############################################################################
|
||||
# Clean pyc files in specified directory
|
||||
# Globals:
|
||||
# None
|
||||
# Arguments:
|
||||
# Optional path to clean
|
||||
# Returns:
|
||||
# None
|
||||
###############################################################################
|
||||
clean_pyc () {
|
||||
local path
|
||||
path=$pype_root
|
||||
echo -e "${BIGreen}>>>${RST} Cleaning pyc at [ ${BIWhite}$path${RST} ] ... \c"
|
||||
find "$path" -regex '^.*\(__pycache__\|\.py[co]\)$' -delete
|
||||
echo -e "${BIGreen}DONE${RST}"
|
||||
}
|
||||
|
||||
##############################################################################
|
||||
# Return absolute path
|
||||
# Globals:
|
||||
|
|
@ -140,7 +100,7 @@ main () {
|
|||
pushd "$openpype_root" > /dev/null || return > /dev/null
|
||||
|
||||
echo -e "${BIGreen}>>>${RST} Running Pype tool ..."
|
||||
poetry run python3 "$openpype_root/tools/fetch_thirdparty_libs.py"
|
||||
poetry run python "$openpype_root/tools/fetch_thirdparty_libs.py"
|
||||
}
|
||||
|
||||
main
|
||||
|
|
@ -30,12 +30,19 @@ Set-Location -Path $openpype_root
|
|||
|
||||
$art = @"
|
||||
|
||||
|
||||
▒█▀▀▀█ █▀▀█ █▀▀ █▀▀▄ ▒█▀▀█ █░░█ █▀▀█ █▀▀ ▀█▀ ▀█▀ ▀█▀
|
||||
▒█░░▒█ █░░█ █▀▀ █░░█ ▒█▄▄█ █▄▄█ █░░█ █▀▀ ▒█░ ▒█░ ▒█░
|
||||
▒█▄▄▄█ █▀▀▀ ▀▀▀ ▀░░▀ ▒█░░░ ▄▄▄█ █▀▀▀ ▀▀▀ ▄█▄ ▄█▄ ▄█▄
|
||||
.---= [ by Pype Club ] =---.
|
||||
https://openpype.io
|
||||
. . .. . ..
|
||||
_oOOP3OPP3Op_. .
|
||||
.PPpo~· ·· ~2p. ·· ···· · ·
|
||||
·Ppo · .pPO3Op.· · O:· · · ·
|
||||
.3Pp · oP3'· 'P33· · 4 ·· · · · ·· · · ·
|
||||
·~OP 3PO· .Op3 : · ·· _____ _____ _____
|
||||
·P3O · oP3oP3O3P' · · · · / /·/ /·/ /
|
||||
O3:· O3p~ · ·:· · ·/____/·/____/ /____/
|
||||
'P · 3p3· oP3~· ·.P:· · · ·· · · ·· · · ·
|
||||
· ': · Po' ·Opo'· .3O· . o[ by Pype Club ]]]==- - - · ·
|
||||
· '_ .. · . _OP3·· · ·https://openpype.io·· ·
|
||||
~P3·OPPPO3OP~ · ·· ·
|
||||
· ' '· · ·· · · · ·· ·
|
||||
|
||||
"@
|
||||
|
||||
|
|
|
|||
|
|
@ -7,11 +7,20 @@
|
|||
art () {
|
||||
cat <<-EOF
|
||||
|
||||
▒█▀▀▀█ █▀▀█ █▀▀ █▀▀▄ ▒█▀▀█ █░░█ █▀▀█ █▀▀ ▀█▀ ▀█▀ ▀█▀
|
||||
▒█░░▒█ █░░█ █▀▀ █░░█ ▒█▄▄█ █▄▄█ █░░█ █▀▀ ▒█░ ▒█░ ▒█░
|
||||
▒█▄▄▄█ █▀▀▀ ▀▀▀ ▀░░▀ ▒█░░░ ▄▄▄█ █▀▀▀ ▀▀▀ ▄█▄ ▄█▄ ▄█▄
|
||||
.---= [ by Pype Club ] =---.
|
||||
https://openpype.io
|
||||
. . .. . ..
|
||||
_oOOP3OPP3Op_. .
|
||||
.PPpo~· ·· ~2p. ·· ···· · ·
|
||||
·Ppo · .pPO3Op.· · O:· · · ·
|
||||
.3Pp · oP3'· 'P33· · 4 ·· · · · ·· · · ·
|
||||
·~OP 3PO· .Op3 : · ·· _____ _____ _____
|
||||
·P3O · oP3oP3O3P' · · · · / /·/ /·/ /
|
||||
O3:· O3p~ · ·:· · ·/____/·/____/ /____/
|
||||
'P · 3p3· oP3~· ·.P:· · · ·· · · ·· · · ·
|
||||
· ': · Po' ·Opo'· .3O· . o[ by Pype Club ]]]==- - - · ·
|
||||
· '_ .. · . _OP3·· · ·https://openpype.io·· ·
|
||||
~P3·OPPPO3OP~ · ·· ·
|
||||
· ' '· · ·· · · · ·· ·
|
||||
|
||||
EOF
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -13,11 +13,19 @@ PS> .\run_mongo.ps1
|
|||
|
||||
$art = @"
|
||||
|
||||
▒█▀▀▀█ █▀▀█ █▀▀ █▀▀▄ ▒█▀▀█ █░░█ █▀▀█ █▀▀ ▀█▀ ▀█▀ ▀█▀
|
||||
▒█░░▒█ █░░█ █▀▀ █░░█ ▒█▄▄█ █▄▄█ █░░█ █▀▀ ▒█░ ▒█░ ▒█░
|
||||
▒█▄▄▄█ █▀▀▀ ▀▀▀ ▀░░▀ ▒█░░░ ▄▄▄█ █▀▀▀ ▀▀▀ ▄█▄ ▄█▄ ▄█▄
|
||||
.---= [ by Pype Club ] =---.
|
||||
https://openpype.io
|
||||
. . .. . ..
|
||||
_oOOP3OPP3Op_. .
|
||||
.PPpo~· ·· ~2p. ·· ···· · ·
|
||||
·Ppo · .pPO3Op.· · O:· · · ·
|
||||
.3Pp · oP3'· 'P33· · 4 ·· · · · ·· · · ·
|
||||
·~OP 3PO· .Op3 : · ·· _____ _____ _____
|
||||
·P3O · oP3oP3O3P' · · · · / /·/ /·/ /
|
||||
O3:· O3p~ · ·:· · ·/____/·/____/ /____/
|
||||
'P · 3p3· oP3~· ·.P:· · · ·· · · ·· · · ·
|
||||
· ': · Po' ·Opo'· .3O· . o[ by Pype Club ]]]==- - - · ·
|
||||
· '_ .. · . _OP3·· · ·https://openpype.io·· ·
|
||||
~P3·OPPPO3OP~ · ·· ·
|
||||
· ' '· · ·· · · · ·· ·
|
||||
|
||||
"@
|
||||
|
||||
|
|
|
|||
|
|
@ -7,11 +7,19 @@
|
|||
art () {
|
||||
cat <<-EOF
|
||||
|
||||
▒█▀▀▀█ █▀▀█ █▀▀ █▀▀▄ ▒█▀▀█ █░░█ █▀▀█ █▀▀ ▀█▀ ▀█▀ ▀█▀
|
||||
▒█░░▒█ █░░█ █▀▀ █░░█ ▒█▄▄█ █▄▄█ █░░█ █▀▀ ▒█░ ▒█░ ▒█░
|
||||
▒█▄▄▄█ █▀▀▀ ▀▀▀ ▀░░▀ ▒█░░░ ▄▄▄█ █▀▀▀ ▀▀▀ ▄█▄ ▄█▄ ▄█▄
|
||||
.---= [ by Pype Club ] =---.
|
||||
https://openpype.io
|
||||
. . .. . ..
|
||||
_oOOP3OPP3Op_. .
|
||||
.PPpo~· ·· ~2p. ·· ···· · ·
|
||||
·Ppo · .pPO3Op.· · O:· · · ·
|
||||
.3Pp · oP3'· 'P33· · 4 ·· · · · ·· · · ·
|
||||
·~OP 3PO· .Op3 : · ·· _____ _____ _____
|
||||
·P3O · oP3oP3O3P' · · · · / /·/ /·/ /
|
||||
O3:· O3p~ · ·:· · ·/____/·/____/ /____/
|
||||
'P · 3p3· oP3~· ·.P:· · · ·· · · ·· · · ·
|
||||
· ': · Po' ·Opo'· .3O· . o[ by Pype Club ]]]==- - - · ·
|
||||
· '_ .. · . _OP3·· · ·https://openpype.io·· ·
|
||||
~P3·OPPPO3OP~ · ·· ·
|
||||
· ' '· · ·· · · · ·· ·
|
||||
|
||||
EOF
|
||||
}
|
||||
|
|
|
|||
|
|
@ -6,11 +6,19 @@
|
|||
art () {
|
||||
cat <<-EOF
|
||||
|
||||
▒█▀▀▀█ █▀▀█ █▀▀ █▀▀▄ ▒█▀▀█ █░░█ █▀▀█ █▀▀ ▀█▀ ▀█▀ ▀█▀
|
||||
▒█░░▒█ █░░█ █▀▀ █░░█ ▒█▄▄█ █▄▄█ █░░█ █▀▀ ▒█░ ▒█░ ▒█░
|
||||
▒█▄▄▄█ █▀▀▀ ▀▀▀ ▀░░▀ ▒█░░░ ▄▄▄█ █▀▀▀ ▀▀▀ ▄█▄ ▄█▄ ▄█▄
|
||||
.---= [ by Pype Club ] =---.
|
||||
https://openpype.io
|
||||
. . .. . ..
|
||||
_oOOP3OPP3Op_. .
|
||||
.PPpo~· ·· ~2p. ·· ···· · ·
|
||||
·Ppo · .pPO3Op.· · O:· · · ·
|
||||
.3Pp · oP3'· 'P33· · 4 ·· · · · ·· · · ·
|
||||
·~OP 3PO· .Op3 : · ·· _____ _____ _____
|
||||
·P3O · oP3oP3O3P' · · · · / /·/ /·/ /
|
||||
O3:· O3p~ · ·:· · ·/____/·/____/ /____/
|
||||
'P · 3p3· oP3~· ·.P:· · · ·· · · ·· · · ·
|
||||
· ': · Po' ·Opo'· .3O· . o[ by Pype Club ]]]==- - - · ·
|
||||
· '_ .. · . _OP3·· · ·https://openpype.io·· ·
|
||||
~P3·OPPPO3OP~ · ·· ·
|
||||
· ' '· · ·· · · · ·· ·
|
||||
|
||||
EOF
|
||||
}
|
||||
|
|
@ -50,23 +58,6 @@ BICyan='\033[1;96m' # Cyan
|
|||
BIWhite='\033[1;97m' # White
|
||||
|
||||
|
||||
##############################################################################
|
||||
# Clean pyc files in specified directory
|
||||
# Globals:
|
||||
# None
|
||||
# Arguments:
|
||||
# Optional path to clean
|
||||
# Returns:
|
||||
# None
|
||||
###############################################################################
|
||||
clean_pyc () {
|
||||
local path
|
||||
path=$openpype_root
|
||||
echo -e "${BIGreen}>>>${RST} Cleaning pyc at [ ${BIWhite}$path${RST} ] ... \c"
|
||||
find "$path" -regex '^.*\(__pycache__\|\.py[co]\)$' -delete
|
||||
echo -e "${BIGreen}DONE${RST}"
|
||||
}
|
||||
|
||||
##############################################################################
|
||||
# Return absolute path
|
||||
# Globals:
|
||||
|
|
@ -82,9 +73,6 @@ realpath () {
|
|||
|
||||
# Main
|
||||
main () {
|
||||
echo -e "${BGreen}"
|
||||
art
|
||||
echo -e "${RST}"
|
||||
|
||||
# Directories
|
||||
openpype_root=$(realpath $(dirname $(dirname "${BASH_SOURCE[0]}")))
|
||||
|
|
|
|||
|
|
@ -32,11 +32,19 @@ function Show-PSWarning() {
|
|||
|
||||
$art = @"
|
||||
|
||||
▒█▀▀▀█ █▀▀█ █▀▀ █▀▀▄ ▒█▀▀█ █░░█ █▀▀█ █▀▀ ▀█▀ ▀█▀ ▀█▀
|
||||
▒█░░▒█ █░░█ █▀▀ █░░█ ▒█▄▄█ █▄▄█ █░░█ █▀▀ ▒█░ ▒█░ ▒█░
|
||||
▒█▄▄▄█ █▀▀▀ ▀▀▀ ▀░░▀ ▒█░░░ ▄▄▄█ █▀▀▀ ▀▀▀ ▄█▄ ▄█▄ ▄█▄
|
||||
.---= [ by Pype Club ] =---.
|
||||
https://openpype.io
|
||||
. . .. . ..
|
||||
_oOOP3OPP3Op_. .
|
||||
.PPpo~· ·· ~2p. ·· ···· · ·
|
||||
·Ppo · .pPO3Op.· · O:· · · ·
|
||||
.3Pp · oP3'· 'P33· · 4 ·· · · · ·· · · ·
|
||||
·~OP 3PO· .Op3 : · ·· _____ _____ _____
|
||||
·P3O · oP3oP3O3P' · · · · / /·/ /·/ /
|
||||
O3:· O3p~ · ·:· · ·/____/·/____/ /____/
|
||||
'P · 3p3· oP3~· ·.P:· · · ·· · · ·· · · ·
|
||||
· ': · Po' ·Opo'· .3O· . o[ by Pype Club ]]]==- - - · ·
|
||||
· '_ .. · . _OP3·· · ·https://openpype.io·· ·
|
||||
~P3·OPPPO3OP~ · ·· ·
|
||||
· ' '· · ·· · · · ·· ·
|
||||
|
||||
"@
|
||||
|
||||
|
|
|
|||
|
|
@ -6,11 +6,19 @@
|
|||
art () {
|
||||
cat <<-EOF
|
||||
|
||||
▒█▀▀▀█ █▀▀█ █▀▀ █▀▀▄ ▒█▀▀█ █░░█ █▀▀█ █▀▀ ▀█▀ ▀█▀ ▀█▀
|
||||
▒█░░▒█ █░░█ █▀▀ █░░█ ▒█▄▄█ █▄▄█ █░░█ █▀▀ ▒█░ ▒█░ ▒█░
|
||||
▒█▄▄▄█ █▀▀▀ ▀▀▀ ▀░░▀ ▒█░░░ ▄▄▄█ █▀▀▀ ▀▀▀ ▄█▄ ▄█▄ ▄█▄
|
||||
.---= [ by Pype Club ] =---.
|
||||
https://openpype.io
|
||||
. . .. . ..
|
||||
_oOOP3OPP3Op_. .
|
||||
.PPpo~· ·· ~2p. ·· ···· · ·
|
||||
·Ppo · .pPO3Op.· · O:· · · ·
|
||||
.3Pp · oP3'· 'P33· · 4 ·· · · · ·· · · ·
|
||||
·~OP 3PO· .Op3 : · ·· _____ _____ _____
|
||||
·P3O · oP3oP3O3P' · · · · / /·/ /·/ /
|
||||
O3:· O3p~ · ·:· · ·/____/·/____/ /____/
|
||||
'P · 3p3· oP3~· ·.P:· · · ·· · · ·· · · ·
|
||||
· ': · Po' ·Opo'· .3O· . o[ by Pype Club ]]]==- - - · ·
|
||||
· '_ .. · . _OP3·· · ·https://openpype.io·· ·
|
||||
~P3·OPPPO3OP~ · ·· ·
|
||||
· ' '· · ·· · · · ·· ·
|
||||
|
||||
EOF
|
||||
}
|
||||
|
|
@ -114,5 +122,4 @@ main () {
|
|||
PYTHONPATH=$original_pythonpath
|
||||
}
|
||||
|
||||
|
||||
|
||||
main
|
||||
|
|
|
|||
|
|
@ -1,20 +1,6 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
# Run OpenPype Tray
|
||||
|
||||
|
||||
art () {
|
||||
cat <<-EOF
|
||||
|
||||
▒█▀▀▀█ █▀▀█ █▀▀ █▀▀▄ ▒█▀▀█ █░░█ █▀▀█ █▀▀ ▀█▀ ▀█▀ ▀█▀
|
||||
▒█░░▒█ █░░█ █▀▀ █░░█ ▒█▄▄█ █▄▄█ █░░█ █▀▀ ▒█░ ▒█░ ▒█░
|
||||
▒█▄▄▄█ █▀▀▀ ▀▀▀ ▀░░▀ ▒█░░░ ▄▄▄█ █▀▀▀ ▀▀▀ ▄█▄ ▄█▄ ▄█▄
|
||||
.---= [ by Pype Club ] =---.
|
||||
https://openpype.io
|
||||
|
||||
EOF
|
||||
}
|
||||
|
||||
# Colors for terminal
|
||||
|
||||
RST='\033[0m' # Text Reset
|
||||
|
|
@ -49,22 +35,6 @@ BIPurple='\033[1;95m' # Purple
|
|||
BICyan='\033[1;96m' # Cyan
|
||||
BIWhite='\033[1;97m' # White
|
||||
|
||||
##############################################################################
|
||||
# Clean pyc files in specified directory
|
||||
# Globals:
|
||||
# None
|
||||
# Arguments:
|
||||
# Optional path to clean
|
||||
# Returns:
|
||||
# None
|
||||
###############################################################################
|
||||
clean_pyc () {
|
||||
local path
|
||||
path=$openpype_root
|
||||
echo -e "${BIGreen}>>>${RST} Cleaning pyc at [ ${BIWhite}$path${RST} ] ... \c"
|
||||
find "$path" -regex '^.*\(__pycache__\|\.py[co]\)$' -delete
|
||||
echo -e "${BIGreen}DONE${RST}"
|
||||
}
|
||||
|
||||
##############################################################################
|
||||
# Return absolute path
|
||||
|
|
@ -81,11 +51,6 @@ realpath () {
|
|||
|
||||
# Main
|
||||
main () {
|
||||
echo -e "${BGreen}"
|
||||
art
|
||||
echo -e "${RST}"
|
||||
detect_python || return 1
|
||||
|
||||
# Directories
|
||||
openpype_root=$(realpath $(dirname $(dirname "${BASH_SOURCE[0]}")))
|
||||
|
||||
|
|
|
|||
|
|
@ -10,11 +10,19 @@ PS> .\update_submodules.ps1
|
|||
|
||||
$art = @"
|
||||
|
||||
▒█▀▀▀█ █▀▀█ █▀▀ █▀▀▄ ▒█▀▀█ █░░█ █▀▀█ █▀▀ ▀█▀ ▀█▀ ▀█▀
|
||||
▒█░░▒█ █░░█ █▀▀ █░░█ ▒█▄▄█ █▄▄█ █░░█ █▀▀ ▒█░ ▒█░ ▒█░
|
||||
▒█▄▄▄█ █▀▀▀ ▀▀▀ ▀░░▀ ▒█░░░ ▄▄▄█ █▀▀▀ ▀▀▀ ▄█▄ ▄█▄ ▄█▄
|
||||
.---= [ by Pype Club ] =---.
|
||||
https://openpype.io
|
||||
. . .. . ..
|
||||
_oOOP3OPP3Op_. .
|
||||
.PPpo~· ·· ~2p. ·· ···· · ·
|
||||
·Ppo · .pPO3Op.· · O:· · · ·
|
||||
.3Pp · oP3'· 'P33· · 4 ·· · · · ·· · · ·
|
||||
·~OP 3PO· .Op3 : · ·· _____ _____ _____
|
||||
·P3O · oP3oP3O3P' · · · · / /·/ /·/ /
|
||||
O3:· O3p~ · ·:· · ·/____/·/____/ /____/
|
||||
'P · 3p3· oP3~· ·.P:· · · ·· · · ·· · · ·
|
||||
· ': · Po' ·Opo'· .3O· . o[ by Pype Club ]]]==- - - · ·
|
||||
· '_ .. · . _OP3·· · ·https://openpype.io·· ·
|
||||
~P3·OPPPO3OP~ · ·· ·
|
||||
· ' '· · ·· · · · ·· ·
|
||||
|
||||
"@
|
||||
|
||||
|
|
|
|||
|
|
@ -6,10 +6,19 @@
|
|||
art () {
|
||||
cat <<-EOF
|
||||
|
||||
▒█▀▀▀█ █▀▀█ █▀▀ █▀▀▄ ▒█▀▀█ █░░█ █▀▀█ █▀▀ ▀█▀ ▀█▀ ▀█▀
|
||||
▒█░░▒█ █░░█ █▀▀ █░░█ ▒█▄▄█ █▄▄█ █░░█ █▀▀ ▒█░ ▒█░ ▒█░
|
||||
▒█▄▄▄█ █▀▀▀ ▀▀▀ ▀░░▀ ▒█░░░ ▄▄▄█ █▀▀▀ ▀▀▀ ▄█▄ ▄█▄ ▄█▄
|
||||
.---= [ by Pype Club ] =---.
|
||||
. . .. . ..
|
||||
_oOOP3OPP3Op_. .
|
||||
.PPpo~· ·· ~2p. ·· ···· · ·
|
||||
·Ppo · .pPO3Op.· · O:· · · ·
|
||||
.3Pp · oP3'· 'P33· · 4 ·· · · · ·· · · ·
|
||||
·~OP 3PO· .Op3 : · ·· _____ _____ _____
|
||||
·P3O · oP3oP3O3P' · · · · / /·/ /·/ /
|
||||
O3:· O3p~ · ·:· · ·/____/·/____/ /____/
|
||||
'P · 3p3· oP3~· ·.P:· · · ·· · · ·· · · ·
|
||||
· ': · Po' ·Opo'· .3O· . o[ by Pype Club ]]]==- - - · ·
|
||||
· '_ .. · . _OP3·· · ·https://openpype.io·· ·
|
||||
~P3·OPPPO3OP~ · ·· ·
|
||||
· ' '· · ·· · · · ·· ·
|
||||
|
||||
EOF
|
||||
}
|
||||
|
|
|
|||
39
website/docs/admin_hosts_aftereffects.md
Normal file
39
website/docs/admin_hosts_aftereffects.md
Normal file
|
|
@ -0,0 +1,39 @@
|
|||
---
|
||||
id: admin_hosts_aftereffects
|
||||
title: AfterEffects Settings
|
||||
sidebar_label: AfterEffects
|
||||
---
|
||||
|
||||
import Tabs from '@theme/Tabs';
|
||||
import TabItem from '@theme/TabItem';
|
||||
|
||||
## AfterEffects settings
|
||||
|
||||
There is a couple of settings that could configure publishing process for **AfterEffects**.
|
||||
All of them are Project based, eg. each project could have different configuration.
|
||||
|
||||
Location: Settings > Project > AfterEffects
|
||||
|
||||

|
||||
|
||||
## Publish plugins
|
||||
|
||||
### Validate Scene Settings
|
||||
|
||||
#### Skip Resolution Check for Tasks
|
||||
|
||||
Set regex pattern(s) to look for in a Task name to skip resolution check against values from DB.
|
||||
|
||||
#### Skip Timeline Check for Tasks
|
||||
|
||||
Set regex pattern(s) to look for in a Task name to skip `frameStart`, `frameEnd` check against values from DB.
|
||||
|
||||
### AfterEffects Submit to Deadline
|
||||
|
||||
* `Use Published scene` - Set to True (green) when Deadline should take published scene as a source instead of uploaded local one.
|
||||
* `Priority` - priority of job on farm
|
||||
* `Primary Pool` - here is list of pool fetched from server you can select from.
|
||||
* `Secondary Pool`
|
||||
* `Frames Per Task` - number of sequence division between individual tasks (chunks)
|
||||
making one job on farm.
|
||||
|
||||
51
website/docs/admin_hosts_harmony.md
Normal file
51
website/docs/admin_hosts_harmony.md
Normal file
|
|
@ -0,0 +1,51 @@
|
|||
---
|
||||
id: admin_hosts_harmony
|
||||
title: ToonBoom Harmony Settings
|
||||
sidebar_label: ToonBoom Harmony
|
||||
---
|
||||
|
||||
import Tabs from '@theme/Tabs';
|
||||
import TabItem from '@theme/TabItem';
|
||||
|
||||
## ToonBoom Harmony settings
|
||||
|
||||
There is a couple of settings that could configure publishing process for **ToonBoom Harmony**.
|
||||
All of them are Project based, eg. each project could have different configuration.
|
||||
|
||||
Location: Settings > Project > Harmony
|
||||
|
||||

|
||||
|
||||
## Publish plugins
|
||||
|
||||
### Collect Palettes
|
||||
|
||||
#### Allowed tasks
|
||||
|
||||
Set regex pattern(s) only for task names when publishing of Palettes should occur.
|
||||
|
||||
Use ".*" to publish Palettes for ALL tasks.
|
||||
|
||||
### Validate Scene Settings
|
||||
|
||||
#### Skip Frame check for Assets with
|
||||
|
||||
Set regex pattern(s) for filtering Asset name that should skip validation of `frameEnd` value from DB.
|
||||
|
||||
#### Skip Resolution Check for Tasks
|
||||
|
||||
Set regex pattern(s) for filtering Asset name that should skip validation or `Resolution` value from DB.
|
||||
|
||||
#### Skip Timeline Check for Tasks
|
||||
|
||||
Set regex pattern(s) for filtering Task name that should skip validation `frameStart`, `frameEnd` check against values from DB.
|
||||
|
||||
### Harmony Submit to Deadline
|
||||
|
||||
* `Use Published scene` - Set to True (green) when Deadline should take published scene as a source instead of uploaded local one.
|
||||
* `Priority` - priority of job on farm
|
||||
* `Primary Pool` - here is list of pool fetched from server you can select from.
|
||||
* `Secondary Pool`
|
||||
* `Frames Per Task` - number of sequence division between individual tasks (chunks)
|
||||
making one job on farm.
|
||||
|
||||
BIN
website/docs/assets/admin_hosts_aftereffects_settings.png
Normal file
BIN
website/docs/assets/admin_hosts_aftereffects_settings.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 31 KiB |
BIN
website/docs/assets/admin_hosts_harmony_settings.png
Normal file
BIN
website/docs/assets/admin_hosts_harmony_settings.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 38 KiB |
|
|
@ -1,6 +1,6 @@
|
|||
---
|
||||
id: dev_build
|
||||
title: Build openPYPE from source
|
||||
title: Build OpenPYPE from source
|
||||
sidebar_label: Build
|
||||
---
|
||||
|
||||
|
|
@ -45,12 +45,12 @@ To start OpenPype from source you need to
|
|||
2) Run `.\tools\run_tray.ps1` if you have all required dependencies on your machine you should be greeted with OpenPype igniter window and once you give it your Mongo URL, with OpenPype icon in the system tray.
|
||||
|
||||
|
||||
### To build openPype:
|
||||
### To build OpenPype:
|
||||
|
||||
1) Run `.\tools\create_env.ps1` to create virtual environment in `.\venv`
|
||||
2) Run `.\tools\build.ps1` to build pype executables in `.\build\`
|
||||
|
||||
To create distributable openPype versions, run `./tools/create_zip.ps1` - that will
|
||||
To create distributable OpenPype versions, run `./tools/create_zip.ps1` - that will
|
||||
create zip file with name `pype-vx.x.x.zip` parsed from current pype repository and
|
||||
copy it to user data dir. You can specify `--path /path/to/zip` to force it into a different
|
||||
location. This can be used to prepare new version releases for artists in the studio environment
|
||||
|
|
@ -61,7 +61,24 @@ without the need to re-build the whole package
|
|||
</TabItem>
|
||||
<TabItem value="linux">
|
||||
|
||||
To build pype on linux you wil need:
|
||||
#### Docker
|
||||
You can use Docker to build OpenPype. Just run:
|
||||
```sh
|
||||
sudo ./tools/docker_build.sh
|
||||
```
|
||||
and you should have built OpenPype in `build` directory. It is using **Centos 7**
|
||||
as a base image.
|
||||
|
||||
You can pull the image:
|
||||
|
||||
```sh
|
||||
# replace 3.0.0 tag with version you want
|
||||
docker pull pypeclub/openpype:3.0.0
|
||||
```
|
||||
See https://hub.docker.com/r/pypeclub/openpype/tag for more.
|
||||
|
||||
#### Manual build
|
||||
To build OpenPype on Linux you wil need:
|
||||
|
||||
- **[curl](https://curl.se)** on systems that doesn't have one preinstalled.
|
||||
- Python header files installed (**python3-dev** on Ubuntu for example).
|
||||
|
|
@ -143,7 +160,7 @@ pyenv local 3.7.9
|
|||
To build pype on MacOS you wil need:
|
||||
|
||||
- **[Homebrew](https://brew.sh)**, Easy way of installing everything necessary is to use.
|
||||
- **[CMake](https://cmake.org/)** to build some external openPype dependencies.
|
||||
- **[CMake](https://cmake.org/)** to build some external OpenPype dependencies.
|
||||
- **XCode Command Line Tools** (or some other build system)
|
||||
|
||||
1) Install **Homebrew**:
|
||||
|
|
|
|||
|
|
@ -85,8 +85,9 @@ module.exports = {
|
|||
items: [
|
||||
"admin_hosts_blender",
|
||||
"admin_hosts_maya",
|
||||
"admin_hosts_resolve"
|
||||
|
||||
"admin_hosts_resolve",
|
||||
"admin_hosts_harmony",
|
||||
"admin_hosts_aftereffects"
|
||||
],
|
||||
},
|
||||
{
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue