[Automated] Merged develop into main

This commit is contained in:
pypebot 2022-02-05 04:28:45 +01:00 committed by GitHub
commit b09dbf5d31
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
14 changed files with 463 additions and 68 deletions

View file

@ -1,13 +1,18 @@
# Build Pype docker image
FROM debian:bookworm-slim AS builder
FROM ubuntu:focal AS builder
ARG OPENPYPE_PYTHON_VERSION=3.7.12
ARG BUILD_DATE
ARG VERSION
LABEL maintainer="info@openpype.io"
LABEL description="Docker Image to build and run OpenPype"
LABEL description="Docker Image to build and run OpenPype under Ubuntu 20.04"
LABEL org.opencontainers.image.name="pypeclub/openpype"
LABEL org.opencontainers.image.title="OpenPype Docker Image"
LABEL org.opencontainers.image.url="https://openpype.io/"
LABEL org.opencontainers.image.source="https://github.com/pypeclub/pype"
LABEL org.opencontainers.image.source="https://github.com/pypeclub/OpenPype"
LABEL org.opencontainers.image.documentation="https://openpype.io/docs/system_introduction"
LABEL org.opencontainers.image.created=$BUILD_DATE
LABEL org.opencontainers.image.version=$VERSION
USER root
@ -42,14 +47,19 @@ RUN apt-get update \
SHELL ["/bin/bash", "-c"]
RUN mkdir /opt/openpype
# download and install pyenv
RUN curl https://pyenv.run | bash \
&& echo 'export PATH="$HOME/.pyenv/bin:$PATH"'>> $HOME/.bashrc \
&& echo 'eval "$(pyenv init -)"' >> $HOME/.bashrc \
&& echo 'eval "$(pyenv virtualenv-init -)"' >> $HOME/.bashrc \
&& echo 'eval "$(pyenv init --path)"' >> $HOME/.bashrc \
&& source $HOME/.bashrc && pyenv install ${OPENPYPE_PYTHON_VERSION}
&& echo 'export PATH="$HOME/.pyenv/bin:$PATH"'>> $HOME/init_pyenv.sh \
&& echo 'eval "$(pyenv init -)"' >> $HOME/init_pyenv.sh \
&& echo 'eval "$(pyenv virtualenv-init -)"' >> $HOME/init_pyenv.sh \
&& echo 'eval "$(pyenv init --path)"' >> $HOME/init_pyenv.sh
# install python with pyenv
RUN source $HOME/init_pyenv.sh \
&& pyenv install ${OPENPYPE_PYTHON_VERSION}
COPY . /opt/openpype/
@ -57,13 +67,16 @@ RUN chmod +x /opt/openpype/tools/create_env.sh && chmod +x /opt/openpype/tools/b
WORKDIR /opt/openpype
# set local python version
RUN cd /opt/openpype \
&& source $HOME/.bashrc \
&& source $HOME/init_pyenv.sh \
&& pyenv local ${OPENPYPE_PYTHON_VERSION}
RUN source $HOME/.bashrc \
# fetch third party tools/libraries
RUN source $HOME/init_pyenv.sh \
&& ./tools/create_env.sh \
&& ./tools/fetch_thirdparty_libs.sh
RUN source $HOME/.bashrc \
# build openpype
RUN source $HOME/init_pyenv.sh \
&& bash ./tools/build.sh

View file

@ -1,11 +1,15 @@
# Build Pype docker image
FROM centos:7 AS builder
ARG OPENPYPE_PYTHON_VERSION=3.7.10
ARG OPENPYPE_PYTHON_VERSION=3.7.12
LABEL org.opencontainers.image.name="pypeclub/openpype"
LABEL org.opencontainers.image.title="OpenPype Docker Image"
LABEL org.opencontainers.image.url="https://openpype.io/"
LABEL org.opencontainers.image.source="https://github.com/pypeclub/pype"
LABEL org.opencontainers.image.documentation="https://openpype.io/docs/system_introduction"
LABEL org.opencontainers.image.created=$BUILD_DATE
LABEL org.opencontainers.image.version=$VERSION
USER root

View file

@ -412,3 +412,23 @@ def repack_version(directory):
directory name.
"""
PypeCommands().repack_version(directory)
@main.command()
@click.option("--project", help="Project name")
@click.option(
"--dirpath", help="Directory where package is stored", default=None
)
def pack_project(project, dirpath):
"""Create a package of project with all files and database dump."""
PypeCommands().pack_project(project, dirpath)
@main.command()
@click.option("--zipfile", help="Path to zip file")
@click.option(
"--root", help="Replace root which was stored in project", default=None
)
def unpack_project(zipfile, root):
"""Create a package of project with all files and database dump."""
PypeCommands().unpack_project(zipfile, root)

View file

@ -29,6 +29,10 @@ from .lib import (
maintained_selection
)
from .utils import (
colorspace_exists_on_node,
get_colorspace_list
)
__all__ = (
"file_extensions",
@ -54,4 +58,7 @@ __all__ = (
"update_container",
"maintained_selection",
"colorspace_exists_on_node",
"get_colorspace_list"
)

View file

@ -82,3 +82,50 @@ def bake_gizmos_recursively(in_group=None):
if node.Class() == "Group":
bake_gizmos_recursively(node)
def colorspace_exists_on_node(node, colorspace_name):
""" Check if colorspace exists on node
Look through all options in the colorpsace knob, and see if we have an
exact match to one of the items.
Args:
node (nuke.Node): nuke node object
colorspace_name (str): color profile name
Returns:
bool: True if exists
"""
try:
colorspace_knob = node['colorspace']
except ValueError:
# knob is not available on input node
return False
all_clrs = get_colorspace_list(colorspace_knob)
return colorspace_name in all_clrs
def get_colorspace_list(colorspace_knob):
"""Get available colorspace profile names
Args:
colorspace_knob (nuke.Knob): nuke knob object
Returns:
list: list of strings names of profiles
"""
all_clrs = list(colorspace_knob.values())
reduced_clrs = []
if not colorspace_knob.getFlag(nuke.STRIP_CASCADE_PREFIX):
return all_clrs
# strip colorspace with nested path
for clrs in all_clrs:
clrs = clrs.split('/')[-1]
reduced_clrs.append(clrs)
return reduced_clrs

View file

@ -9,7 +9,8 @@ from openpype.hosts.nuke.api.lib import (
from openpype.hosts.nuke.api import (
containerise,
update_container,
viewer_update_and_undo_stop
viewer_update_and_undo_stop,
colorspace_exists_on_node
)
from openpype.hosts.nuke.api import plugin
@ -66,11 +67,11 @@ class LoadClip(plugin.NukeLoader):
)
def load(self, context, name, namespace, options):
repre = context["representation"]
# reste container id so it is always unique for each instance
self.reset_container_id()
is_sequence = len(context["representation"]["files"]) > 1
is_sequence = len(repre["files"]) > 1
file = self.fname.replace("\\", "/")
@ -79,14 +80,13 @@ class LoadClip(plugin.NukeLoader):
version = context['version']
version_data = version.get("data", {})
repr_id = context["representation"]["_id"]
colorspace = version_data.get("colorspace")
iio_colorspace = get_imageio_input_colorspace(file)
repr_cont = context["representation"]["context"]
repre_id = repre["_id"]
repre_cont = repre["context"]
self.log.info("version_data: {}\n".format(version_data))
self.log.debug(
"Representation id `{}` ".format(repr_id))
"Representation id `{}` ".format(repre_id))
self.handle_start = version_data.get("handleStart", 0)
self.handle_end = version_data.get("handleEnd", 0)
@ -101,7 +101,7 @@ class LoadClip(plugin.NukeLoader):
first = 1
last = first + duration
elif "#" not in file:
frame = repr_cont.get("frame")
frame = repre_cont.get("frame")
assert frame, "Representation is not sequence"
padding = len(frame)
@ -113,10 +113,10 @@ class LoadClip(plugin.NukeLoader):
if not file:
self.log.warning(
"Representation id `{}` is failing to load".format(repr_id))
"Representation id `{}` is failing to load".format(repre_id))
return
read_name = self._get_node_name(context["representation"])
read_name = self._get_node_name(repre)
# Create the Loader with the filename path set
read_node = nuke.createNode(
@ -128,11 +128,8 @@ class LoadClip(plugin.NukeLoader):
with viewer_update_and_undo_stop():
read_node["file"].setValue(file)
# Set colorspace defined in version data
if colorspace:
read_node["colorspace"].setValue(str(colorspace))
elif iio_colorspace is not None:
read_node["colorspace"].setValue(iio_colorspace)
used_colorspace = self._set_colorspace(
read_node, version_data, repre["data"])
self._set_range_to_node(read_node, first, last, start_at_workfile)
@ -145,6 +142,12 @@ class LoadClip(plugin.NukeLoader):
for k in add_keys:
if k == 'version':
data_imprint.update({k: context["version"]['name']})
elif k == 'colorspace':
colorspace = repre["data"].get(k)
colorspace = colorspace or version_data.get(k)
data_imprint["db_colorspace"] = colorspace
if used_colorspace:
data_imprint["used_colorspace"] = used_colorspace
else:
data_imprint.update(
{k: context["version"]['data'].get(k, str(None))})
@ -192,10 +195,13 @@ class LoadClip(plugin.NukeLoader):
"_id": representation["parent"]
})
version_data = version.get("data", {})
repr_id = representation["_id"]
colorspace = version_data.get("colorspace")
iio_colorspace = get_imageio_input_colorspace(file)
repr_cont = representation["context"]
repre_id = representation["_id"]
repre_cont = representation["context"]
# colorspace profile
colorspace = representation["data"].get("colorspace")
colorspace = colorspace or version_data.get("colorspace")
self.handle_start = version_data.get("handleStart", 0)
self.handle_end = version_data.get("handleEnd", 0)
@ -210,7 +216,7 @@ class LoadClip(plugin.NukeLoader):
first = 1
last = first + duration
elif "#" not in file:
frame = repr_cont.get("frame")
frame = repre_cont.get("frame")
assert frame, "Representation is not sequence"
padding = len(frame)
@ -218,7 +224,7 @@ class LoadClip(plugin.NukeLoader):
if not file:
self.log.warning(
"Representation id `{}` is failing to load".format(repr_id))
"Representation id `{}` is failing to load".format(repre_id))
return
read_name = self._get_node_name(representation)
@ -229,12 +235,9 @@ class LoadClip(plugin.NukeLoader):
# to avoid multiple undo steps for rest of process
# we will switch off undo-ing
with viewer_update_and_undo_stop():
# Set colorspace defined in version data
if colorspace:
read_node["colorspace"].setValue(str(colorspace))
elif iio_colorspace is not None:
read_node["colorspace"].setValue(iio_colorspace)
used_colorspace = self._set_colorspace(
read_node, version_data, representation["data"],
path=file)
self._set_range_to_node(read_node, first, last, start_at_workfile)
@ -243,7 +246,7 @@ class LoadClip(plugin.NukeLoader):
"frameStart": str(first),
"frameEnd": str(last),
"version": str(version.get("name")),
"colorspace": colorspace,
"db_colorspace": colorspace,
"source": version_data.get("source"),
"handleStart": str(self.handle_start),
"handleEnd": str(self.handle_end),
@ -251,6 +254,10 @@ class LoadClip(plugin.NukeLoader):
"author": version_data.get("author")
}
# add used colorspace if found any
if used_colorspace:
updated_dict["used_colorspace"] = used_colorspace
# change color of read_node
# get all versions in list
versions = io.find({
@ -365,14 +372,37 @@ class LoadClip(plugin.NukeLoader):
def _get_node_name(self, representation):
repr_cont = representation["context"]
repre_cont = representation["context"]
name_data = {
"asset": repr_cont["asset"],
"subset": repr_cont["subset"],
"asset": repre_cont["asset"],
"subset": repre_cont["subset"],
"representation": representation["name"],
"ext": repr_cont["representation"],
"ext": repre_cont["representation"],
"id": representation["_id"],
"class_name": self.__class__.__name__
}
return self.node_name_template.format(**name_data)
def _set_colorspace(self, node, version_data, repre_data, path=None):
output_color = None
path = path or self.fname.replace("\\", "/")
# get colorspace
colorspace = repre_data.get("colorspace")
colorspace = colorspace or version_data.get("colorspace")
# colorspace from `project_anatomy/imageio/nuke/regexInputs`
iio_colorspace = get_imageio_input_colorspace(path)
# Set colorspace defined in version data
if (
colorspace is not None
and colorspace_exists_on_node(node, str(colorspace))
):
node["colorspace"].setValue(str(colorspace))
output_color = str(colorspace)
elif iio_colorspace is not None:
node["colorspace"].setValue(iio_colorspace)
output_color = iio_colorspace
return output_color

View file

@ -9,7 +9,7 @@ from openpype.hosts.photoshop import api as photoshop
class CollectRemoteInstances(pyblish.api.ContextPlugin):
"""Gather instances configured color code of a layer.
"""Creates instances for configured color code of a layer.
Used in remote publishing when artists marks publishable layers by color-
coding.
@ -46,6 +46,11 @@ class CollectRemoteInstances(pyblish.api.ContextPlugin):
stub = photoshop.stub()
layers = stub.get_layers()
existing_subset_names = []
for instance in context:
if instance.data.get('publish'):
existing_subset_names.append(instance.data.get('subset'))
asset, task_name, task_type = get_batch_asset_task_info(
task_data["context"])
@ -55,6 +60,10 @@ class CollectRemoteInstances(pyblish.api.ContextPlugin):
instance_names = []
for layer in layers:
self.log.debug("Layer:: {}".format(layer))
if layer.parents:
self.log.debug("!!! Not a top layer, skip")
continue
resolved_family, resolved_subset_template = self._resolve_mapping(
layer
)
@ -66,8 +75,19 @@ class CollectRemoteInstances(pyblish.api.ContextPlugin):
self.log.debug("!!! Not found family or template, skip")
continue
if layer.parents:
self.log.debug("!!! Not a top layer, skip")
fill_pairs = {
"variant": variant,
"family": resolved_family,
"task": task_name,
"layer": layer.name
}
subset = resolved_subset_template.format(
**prepare_template_data(fill_pairs))
if subset in existing_subset_names:
self.log.info(
"Subset {} already created, skipping.".format(subset))
continue
instance = context.create_instance(layer.name)
@ -76,15 +96,6 @@ class CollectRemoteInstances(pyblish.api.ContextPlugin):
instance.data["publish"] = layer.visible
instance.data["asset"] = asset
instance.data["task"] = task_name
fill_pairs = {
"variant": variant,
"family": instance.data["family"],
"task": instance.data["task"],
"layer": layer.name
}
subset = resolved_subset_template.format(
**prepare_template_data(fill_pairs))
instance.data["subset"] = subset
instance_names.append(layer.name)

View file

@ -1,3 +1,4 @@
import collections
import pyblish.api
import openpype.api
@ -16,11 +17,14 @@ class ValidateSubsetUniqueness(pyblish.api.ContextPlugin):
subset_names = []
for instance in context:
self.log.info("instance:: {}".format(instance.data))
if instance.data.get('publish'):
subset_names.append(instance.data.get('subset'))
msg = (
"Instance subset names are not unique. " +
"Remove duplicates via SubsetManager."
)
assert len(subset_names) == len(set(subset_names)), msg
non_unique = \
[item
for item, count in collections.Counter(subset_names).items()
if count > 1]
msg = ("Instance subset names {} are not unique. ".format(non_unique) +
"Remove duplicates via SubsetManager.")
assert not non_unique, msg

View file

@ -5,7 +5,7 @@ from openpype.hosts.tvpaint.api import lib, plugin
class ImportImage(plugin.Loader):
"""Load image or image sequence to TVPaint as new layer."""
families = ["render", "image", "background", "plate"]
families = ["render", "image", "background", "plate", "review"]
representations = ["*"]
label = "Import Image"

View file

@ -7,7 +7,7 @@ from openpype.hosts.tvpaint.api import lib, pipeline, plugin
class LoadImage(plugin.Loader):
"""Load image or image sequence to TVPaint as new layer."""
families = ["render", "image", "background", "plate"]
families = ["render", "image", "background", "plate", "review"]
representations = ["*"]
label = "Load Image"

View file

@ -0,0 +1,247 @@
"""These lib functions are primarily for development purposes.
WARNING: This is not meant for production data.
Goal is to be able create package of current state of project with related
documents from mongo and files from disk to zip file and then be able recreate
the project based on the zip.
This gives ability to create project where a changes and tests can be done.
Keep in mind that to be able create a package of project has few requirements.
Possible requirement should be listed in 'pack_project' function.
"""
import os
import json
import platform
import tempfile
import shutil
import datetime
import zipfile
from bson.json_util import (
loads,
dumps,
CANONICAL_JSON_OPTIONS
)
from avalon.api import AvalonMongoDB
DOCUMENTS_FILE_NAME = "database"
METADATA_FILE_NAME = "metadata"
PROJECT_FILES_DIR = "project_files"
def add_timestamp(filepath):
"""Add timestamp string to a file."""
base, ext = os.path.splitext(filepath)
timestamp = datetime.datetime.now().strftime("%y%m%d_%H%M%S")
new_base = "{}_{}".format(base, timestamp)
return new_base + ext
def pack_project(project_name, destination_dir=None):
"""Make a package of a project with mongo documents and files.
This function has few restrictions:
- project must have only one root
- project must have all templates starting with
"{root[...]}/{project[name]}"
Args:
project_name(str): Project that should be packaged.
destination_dir(str): Optinal path where zip will be stored. Project's
root is used if not passed.
"""
print("Creating package of project \"{}\"".format(project_name))
# Validate existence of project
dbcon = AvalonMongoDB()
dbcon.Session["AVALON_PROJECT"] = project_name
project_doc = dbcon.find_one({"type": "project"})
if not project_doc:
raise ValueError("Project \"{}\" was not found in database".format(
project_name
))
roots = project_doc["config"]["roots"]
# Determine root directory of project
source_root = None
source_root_name = None
for root_name, root_value in roots.items():
if source_root is not None:
raise ValueError(
"Packaging is supported only for single root projects"
)
source_root = root_value
source_root_name = root_name
root_path = source_root[platform.system().lower()]
print("Using root \"{}\" with path \"{}\"".format(
source_root_name, root_path
))
project_source_path = os.path.join(root_path, project_name)
if not os.path.exists(project_source_path):
raise ValueError("Didn't find source of project files")
# Determine zip filepath where data will be stored
if not destination_dir:
destination_dir = root_path
destination_dir = os.path.normpath(destination_dir)
if not os.path.exists(destination_dir):
os.makedirs(destination_dir)
zip_path = os.path.join(destination_dir, project_name + ".zip")
print("Project will be packaged into \"{}\"".format(zip_path))
# Rename already existing zip
if os.path.exists(zip_path):
dst_filepath = add_timestamp(zip_path)
os.rename(zip_path, dst_filepath)
# We can add more data
metadata = {
"project_name": project_name,
"root": source_root,
"version": 1
}
# Create temp json file where metadata are stored
with tempfile.NamedTemporaryFile("w", suffix=".json", delete=False) as s:
temp_metadata_json = s.name
with open(temp_metadata_json, "w") as stream:
json.dump(metadata, stream)
# Create temp json file where database documents are stored
with tempfile.NamedTemporaryFile("w", suffix=".json", delete=False) as s:
temp_docs_json = s.name
# Query all project documents and store them to temp json
docs = list(dbcon.find({}))
data = dumps(
docs, json_options=CANONICAL_JSON_OPTIONS
)
with open(temp_docs_json, "w") as stream:
stream.write(data)
print("Packing files into zip")
# Write all to zip file
with zipfile.ZipFile(zip_path, "w", zipfile.ZIP_DEFLATED) as zip_stream:
# Add metadata file
zip_stream.write(temp_metadata_json, METADATA_FILE_NAME + ".json")
# Add database documents
zip_stream.write(temp_docs_json, DOCUMENTS_FILE_NAME + ".json")
# Add project files to zip
for root, _, filenames in os.walk(project_source_path):
for filename in filenames:
filepath = os.path.join(root, filename)
# TODO add one more folder
archive_name = os.path.join(
PROJECT_FILES_DIR,
os.path.relpath(filepath, root_path)
)
zip_stream.write(filepath, archive_name)
print("Cleaning up")
# Cleanup
os.remove(temp_docs_json)
os.remove(temp_metadata_json)
dbcon.uninstall()
print("*** Packing finished ***")
def unpack_project(path_to_zip, new_root=None):
"""Unpack project zip file to recreate project.
Args:
path_to_zip(str): Path to zip which was created using 'pack_project'
function.
new_root(str): Optional way how to set different root path for unpacked
project.
"""
print("Unpacking project from zip {}".format(path_to_zip))
if not os.path.exists(path_to_zip):
print("Zip file does not exists: {}".format(path_to_zip))
return
tmp_dir = tempfile.mkdtemp(prefix="unpack_")
print("Zip is extracted to temp: {}".format(tmp_dir))
with zipfile.ZipFile(path_to_zip, "r") as zip_stream:
zip_stream.extractall(tmp_dir)
metadata_json_path = os.path.join(tmp_dir, METADATA_FILE_NAME + ".json")
with open(metadata_json_path, "r") as stream:
metadata = json.load(stream)
docs_json_path = os.path.join(tmp_dir, DOCUMENTS_FILE_NAME + ".json")
with open(docs_json_path, "r") as stream:
content = stream.readlines()
docs = loads("".join(content))
low_platform = platform.system().lower()
project_name = metadata["project_name"]
source_root = metadata["root"]
root_path = source_root[low_platform]
# Drop existing collection
dbcon = AvalonMongoDB()
database = dbcon.database
if project_name in database.list_collection_names():
database.drop_collection(project_name)
print("Removed existing project collection")
print("Creating project documents ({})".format(len(docs)))
# Create new collection with loaded docs
collection = database[project_name]
collection.insert_many(docs)
# Skip change of root if is the same as the one stored in metadata
if (
new_root
and (os.path.normpath(new_root) == os.path.normpath(root_path))
):
new_root = None
if new_root:
print("Using different root path {}".format(new_root))
root_path = new_root
project_doc = collection.find_one({"type": "project"})
roots = project_doc["config"]["roots"]
key = tuple(roots.keys())[0]
update_key = "config.roots.{}.{}".format(key, low_platform)
collection.update_one(
{"_id": project_doc["_id"]},
{"$set": {
update_key: new_root
}}
)
# Make sure root path exists
if not os.path.exists(root_path):
os.makedirs(root_path)
src_project_files_dir = os.path.join(
tmp_dir, PROJECT_FILES_DIR, project_name
)
dst_project_files_dir = os.path.normpath(
os.path.join(root_path, project_name)
)
if os.path.exists(dst_project_files_dir):
new_path = add_timestamp(dst_project_files_dir)
print("Project folder already exists. Renamed \"{}\" -> \"{}\"".format(
dst_project_files_dir, new_path
))
os.rename(dst_project_files_dir, new_path)
print("Moving project files from temp \"{}\" -> \"{}\"".format(
src_project_files_dir, dst_project_files_dir
))
shutil.move(src_project_files_dir, dst_project_files_dir)
# CLeanup
print("Cleaning up")
shutil.rmtree(tmp_dir)
dbcon.uninstall()
print("*** Unpack finished ***")

View file

@ -88,7 +88,6 @@ def publish(log, close_plugin_name=None):
if close_plugin: # close host app explicitly after error
context = pyblish.api.Context()
close_plugin().process(context)
sys.exit(1)
def publish_and_log(dbcon, _id, log, close_plugin_name=None, batch_id=None):
@ -137,7 +136,7 @@ def publish_and_log(dbcon, _id, log, close_plugin_name=None, batch_id=None):
if close_plugin: # close host app explicitly after error
context = pyblish.api.Context()
close_plugin().process(context)
sys.exit(1)
return
elif processed % log_every == 0:
# pyblish returns progress in 0.0 - 2.0
progress = min(round(result["progress"] / 2 * 100), 99)

View file

@ -251,7 +251,10 @@ class PypeCommands:
data = {
"last_workfile_path": workfile_path,
"start_last_workfile": True
"start_last_workfile": True,
"project_name": project,
"asset_name": asset,
"task_name": task_name
}
launched_app = application_manager.launch(app_name, **data)
@ -433,3 +436,13 @@ class PypeCommands:
version_packer = VersionRepacker(directory)
version_packer.process()
def pack_project(self, project_name, dirpath):
from openpype.lib.project_backpack import pack_project
pack_project(project_name, dirpath)
def unpack_project(self, zip_filepath, new_root):
from openpype.lib.project_backpack import unpack_project
unpack_project(zip_filepath, new_root)

View file

@ -68,7 +68,7 @@ main () {
echo -e "${BIGreen}>>>${RST} Running docker build ..."
# docker build --pull --no-cache -t pypeclub/openpype:$openpype_version .
docker build --pull --iidfile $openpype_root/build/docker-image.id -t pypeclub/openpype:$openpype_version -f $dockerfile .
docker build --pull --iidfile $openpype_root/build/docker-image.id --build-arg BUILD_DATE=$(date -u +'%Y-%m-%dT%H:%M:%SZ') --build-arg VERSION=$openpype_version -t pypeclub/openpype:$openpype_version -f $dockerfile .
if [ $? -ne 0 ] ; then
echo $?
echo -e "${BIRed}!!!${RST} Docker build failed."