From 94725b56469095b572a6eec4af93bd9cc97ec947 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Fri, 8 Oct 2021 18:37:23 +0200 Subject: [PATCH 001/211] implemented base of server --- .../hosts_job_server/job_server/server.py | 143 ++++++++++++++++++ 1 file changed, 143 insertions(+) create mode 100644 openpype/modules/default_modules/hosts_job_server/job_server/server.py diff --git a/openpype/modules/default_modules/hosts_job_server/job_server/server.py b/openpype/modules/default_modules/hosts_job_server/job_server/server.py new file mode 100644 index 0000000000..08a8d04a3a --- /dev/null +++ b/openpype/modules/default_modules/hosts_job_server/job_server/server.py @@ -0,0 +1,143 @@ +import threading +import asyncio +import logging + +from aiohttp import web + +log = logging.getLogger(__name__) + + +class WebServerManager: + """Manger that care about web server thread.""" + def __init__(self, port, host, loop=None): + self.port = port + self.host = host + self.app = web.Application() + if loop is None: + loop = asyncio.new_event_loop() + + # add route with multiple methods for single "external app" + self.webserver_thread = WebServerThread(self, loop) + + @property + def url(self): + return "http://{}:{}".format(self.host, self.port) + + def add_route(self, *args, **kwargs): + self.app.router.add_route(*args, **kwargs) + + def add_static(self, *args, **kwargs): + self.app.router.add_static(*args, **kwargs) + + def start_server(self): + if self.webserver_thread and not self.webserver_thread.is_alive(): + self.webserver_thread.start() + + def stop_server(self): + if not self.is_running: + return + + try: + log.debug("Stopping Web server") + self.webserver_thread.stop() + + except Exception as exc: + print("Errored", str(exc)) + log.warning( + "Error has happened during Killing Web server", + exc_info=True + ) + + @property + def is_running(self): + if self.webserver_thread is not None: + return self.webserver_thread.is_running + return False + + +class WebServerThread(threading.Thread): + """ Listener for requests in thread.""" + def __init__(self, manager, loop): + super(WebServerThread, self).__init__() + + self._is_running = False + self._stopped = False + self.manager = manager + self.loop = loop + self.runner = None + self.site = None + + @property + def port(self): + return self.manager.port + + @property + def host(self): + return self.manager.host + + @property + def stopped(self): + return self._stopped + + @property + def is_running(self): + return self._is_running + + def run(self): + self._is_running = True + + try: + log.info("Starting WebServer server") + asyncio.set_event_loop(self.loop) + self.loop.run_until_complete(self.start_server()) + + asyncio.ensure_future(self.check_shutdown(), loop=self.loop) + self.loop.run_forever() + + except Exception: + log.warning( + "Web Server service has failed", exc_info=True + ) + finally: + self.loop.close() + + self._is_running = False + log.info("Web server stopped") + + async def start_server(self): + """ Starts runner and TCPsite """ + self.runner = web.AppRunner(self.manager.app) + await self.runner.setup() + self.site = web.TCPSite(self.runner, self.host, self.port) + await self.site.start() + + def stop(self): + """Sets _stopped flag to True, 'check_shutdown' shuts server down""" + self._stopped = True + + async def check_shutdown(self): + """ Future that is running and checks if server should be running + periodically. + """ + while not self._stopped: + await asyncio.sleep(0.5) + + print("Starting shutdown") + print("Stopping site") + await self.site.stop() + print("Site stopped") + await self.runner.cleanup() + + print("Runner stopped") + tasks = [ + task + for task in asyncio.all_tasks() + if task is not asyncio.current_task() + ] + list(map(lambda task: task.cancel(), tasks)) # cancel all the tasks + results = await asyncio.gather(*tasks, return_exceptions=True) + log.debug(f'Finished awaiting cancelled tasks, results: {results}...') + await self.loop.shutdown_asyncgens() + # to really make sure everything else has time to stop + await asyncio.sleep(0.07) + self.loop.stop() From c59abcce5dd773de6a79ba9ab44952efff80d7c7 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Fri, 8 Oct 2021 18:38:54 +0200 Subject: [PATCH 002/211] base implementation of job object --- .../hosts_job_server/job_server/jobs.py | 98 +++++++++++++++++++ 1 file changed, 98 insertions(+) create mode 100644 openpype/modules/default_modules/hosts_job_server/job_server/jobs.py diff --git a/openpype/modules/default_modules/hosts_job_server/job_server/jobs.py b/openpype/modules/default_modules/hosts_job_server/job_server/jobs.py new file mode 100644 index 0000000000..dfd5f3f4ec --- /dev/null +++ b/openpype/modules/default_modules/hosts_job_server/job_server/jobs.py @@ -0,0 +1,98 @@ +import datetime +from uuid import uuid4 + + +class Job: + """Job related to specific host name. + + Data must contain everything needed to finish the job. + """ + # Remove done jobs each n days to clear memory + keep_in_memory_days = 3 + + def __init__(self, host_name, data, job_id=None, created_time=None): + if job_id is None: + job_id = str(uuid4()) + self._id = job_id + if created_time is None: + created_time = datetime.datetime.now() + self._created_time = created_time + self._started_time = None + self._done_time = None + self.host_name = host_name + self.data = data + self._result_data = None + + self._started = False + self._done = False + self._errored = False + self._message = None + self._deleted = False + + def keep_in_memory(self): + if self._done_time is None: + return True + + now = datetime.datetime.now() + delta = now - self._done_time + return delta.days < self.keep_in_memory_days + + @property + def id(self): + return self._id + + @property + def done(self): + return self._done + + def reset(self): + self._started = False + self._started_time = None + self._done = False + self._done_time = None + self._errored = False + self._message = None + + @property + def started(self): + return self._started + + @property + def deleted(self): + return self._deleted + + def set_deleted(self): + self._deleted = True + + def set_started(self): + self._started_time = datetime.datetime.now() + self._started = True + + def set_done(self, success=True, message=None, data=None): + self._done = True + self._done_time = datetime.datetime.now() + self._errored = not success + self._message = message + self._result_data = data + + def status(self): + output = {} + if self._message: + output["message"] = self._message + + state = "waiting" + if self._deleted: + state = "deleted" + elif self._errored: + state = "error" + elif self._done: + state = "done" + elif self._started: + state = "started" + + if self.done: + output["result"] = self._result_data + + output["state"] = state + + return output From 663d2bf8d377d0d54865621301ac3a58e646750e Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Fri, 8 Oct 2021 18:40:17 +0200 Subject: [PATCH 003/211] base of jobs queue --- .../hosts_job_server/job_server/jobs.py | 53 +++++++++++++++++++ 1 file changed, 53 insertions(+) diff --git a/openpype/modules/default_modules/hosts_job_server/job_server/jobs.py b/openpype/modules/default_modules/hosts_job_server/job_server/jobs.py index dfd5f3f4ec..7782d84f31 100644 --- a/openpype/modules/default_modules/hosts_job_server/job_server/jobs.py +++ b/openpype/modules/default_modules/hosts_job_server/job_server/jobs.py @@ -96,3 +96,56 @@ class Job: output["state"] = state return output + + +class JobQueue: + """Queue holds jobs that should be done and workers that can do them. + + Also asign jobs to a worker. + """ + old_jobs_check_minutes_interval = 30 + + def __init__(self): + self._last_old_jobs_check = datetime.datetime.now() + self._jobs_by_id = {} + self._job_queue_by_host_name = collections.defaultdict( + collections.deque + ) + + def get_job(self, job_id): + """Job by it's id.""" + return self._jobs_by_id.get(job_id) + + def create_job(self, host_name, job_data): + """Create new job from passed data and add it to queue.""" + job = Job(host_name, job_data) + self._jobs_by_id[job.id] = job + self._job_queue_by_host_name[host_name].append(job) + return job + + def _remove_old_jobs(self): + """Once in specific time look if should remove old finished jobs.""" + delta = datetime.datetime.now() - self._last_old_jobs_check + if delta.seconds < self.old_jobs_check_minutes_interval: + return + + for job_id in tuple(self._jobs_by_id.keys()): + job = self._jobs_by_id[job_id] + if not job.keep_in_memory(): + self._jobs_by_id.pop(job_id) + + def remove_job(self, job_id): + """Delete job and eventually stop it.""" + job = self._jobs_by_id.get(job_id) + if job is None: + return + + job.set_deleted() + self._jobs_by_id.pop(job.id) + + def get_job_status(self, job_id): + """Job's status based on id.""" + job = self._jobs_by_id.get(job_id) + if job is None: + return {} + return job.status() From d7f5797bae59f840ddcf77906f11da2eb2a5fc1a Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Fri, 8 Oct 2021 18:43:29 +0200 Subject: [PATCH 004/211] Base of worker --- .../hosts_job_server/job_server/workers.py | 14 ++++++++++++++ 1 file changed, 14 insertions(+) create mode 100644 openpype/modules/default_modules/hosts_job_server/job_server/workers.py diff --git a/openpype/modules/default_modules/hosts_job_server/job_server/workers.py b/openpype/modules/default_modules/hosts_job_server/job_server/workers.py new file mode 100644 index 0000000000..b8b7c1974c --- /dev/null +++ b/openpype/modules/default_modules/hosts_job_server/job_server/workers.py @@ -0,0 +1,14 @@ +from uuid import uuid4 + + +class Worker: + """Worker that can handle jobs of specific host.""" + def __init__(self, host_name): + self._id = None + self.host_name = host_name + + @property + def id(self): + if self._id is None: + self._id = str(uuid4()) + return self._id From a66c7ef3606f2bd1c9c28a8bad03928586e78a05 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Fri, 8 Oct 2021 18:44:29 +0200 Subject: [PATCH 005/211] added worker states --- .../hosts_job_server/job_server/workers.py | 23 +++++++++++++++++++ 1 file changed, 23 insertions(+) diff --git a/openpype/modules/default_modules/hosts_job_server/job_server/workers.py b/openpype/modules/default_modules/hosts_job_server/job_server/workers.py index b8b7c1974c..21cd8f4828 100644 --- a/openpype/modules/default_modules/hosts_job_server/job_server/workers.py +++ b/openpype/modules/default_modules/hosts_job_server/job_server/workers.py @@ -1,14 +1,37 @@ from uuid import uuid4 +class WorkerState: + IDLE = object() + JOB_ASSIGNED = object() + JOB_SENT = object() + + class Worker: """Worker that can handle jobs of specific host.""" def __init__(self, host_name): self._id = None self.host_name = host_name + self._state = WorkerState.IDLE @property def id(self): if self._id is None: self._id = str(uuid4()) return self._id + + @property + def state(self): + return self._state + + def is_idle(self): + return self._state is WorkerState.IDLE + + def job_assigned(self): + return ( + self._state is WorkerState.JOB_ASSIGNED + or self._state is WorkerState.JOB_SENT + ) + + def is_working(self): + return self._state is WorkerState.JOB_SENT From a948684d166054b461bbec246288bb73bbd3c3b0 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Fri, 8 Oct 2021 18:44:57 +0200 Subject: [PATCH 006/211] worker can have assigned job --- .../hosts_job_server/job_server/workers.py | 24 +++++++++++++++++++ 1 file changed, 24 insertions(+) diff --git a/openpype/modules/default_modules/hosts_job_server/job_server/workers.py b/openpype/modules/default_modules/hosts_job_server/job_server/workers.py index 21cd8f4828..61d76bdf48 100644 --- a/openpype/modules/default_modules/hosts_job_server/job_server/workers.py +++ b/openpype/modules/default_modules/hosts_job_server/job_server/workers.py @@ -13,6 +13,8 @@ class Worker: self._id = None self.host_name = host_name self._state = WorkerState.IDLE + self._job = None + @property def id(self): @@ -24,6 +26,10 @@ class Worker: def state(self): return self._state + @property + def current_job(self): + return self._job + def is_idle(self): return self._state is WorkerState.IDLE @@ -35,3 +41,21 @@ class Worker: def is_working(self): return self._state is WorkerState.JOB_SENT + + def set_current_job(self, job): + if job is self._job: + return + + self._job = job + if job is None: + self._set_idle() + else: + self._state = WorkerState.JOB_ASSIGNED + job.set_worker(self) + + def _set_idle(self): + self._job = None + self._state = WorkerState.IDLE + + def set_working(self): + self._state = WorkerState.JOB_SENT From 7748cf997c18335b2b644b07d3752fa7bfaea815 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Fri, 8 Oct 2021 18:45:36 +0200 Subject: [PATCH 007/211] job can have assigned worker --- .../hosts_job_server/job_server/jobs.py | 18 ++++++++++++++++++ 1 file changed, 18 insertions(+) diff --git a/openpype/modules/default_modules/hosts_job_server/job_server/jobs.py b/openpype/modules/default_modules/hosts_job_server/job_server/jobs.py index 7782d84f31..e444f24224 100644 --- a/openpype/modules/default_modules/hosts_job_server/job_server/jobs.py +++ b/openpype/modules/default_modules/hosts_job_server/job_server/jobs.py @@ -29,6 +29,8 @@ class Job: self._message = None self._deleted = False + self._worker = None + def keep_in_memory(self): if self._done_time is None: return True @@ -53,6 +55,8 @@ class Job: self._errored = False self._message = None + self._worker = None + @property def started(self): return self._started @@ -63,6 +67,18 @@ class Job: def set_deleted(self): self._deleted = True + self.set_worker(None) + + def set_worker(self, worker): + if worker is self._worker: + return + + if self._worker is not None: + self._worker.set_current_job(None) + + self._worker = worker + if worker is not None: + worker.set_current_job(self) def set_started(self): self._started_time = datetime.datetime.now() @@ -74,6 +90,8 @@ class Job: self._errored = not success self._message = message self._result_data = data + if self._worker is not None: + self._worker.set_current_job(None) def status(self): output = {} From b54b212082767fca7c752386db98c14ce7fae30f Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Fri, 8 Oct 2021 18:46:01 +0200 Subject: [PATCH 008/211] added workers to jobqueue --- .../hosts_job_server/job_server/jobs.py | 33 +++++++++++++++++++ 1 file changed, 33 insertions(+) diff --git a/openpype/modules/default_modules/hosts_job_server/job_server/jobs.py b/openpype/modules/default_modules/hosts_job_server/job_server/jobs.py index e444f24224..d2f0c58858 100644 --- a/openpype/modules/default_modules/hosts_job_server/job_server/jobs.py +++ b/openpype/modules/default_modules/hosts_job_server/job_server/jobs.py @@ -129,6 +129,39 @@ class JobQueue: self._job_queue_by_host_name = collections.defaultdict( collections.deque ) + self._workers_by_id = {} + self._workers_by_host_name = collections.defaultdict(list) + + def workers(self): + """All currently registered workers.""" + return self._workers_by_id.values() + + def add_worker(self, worker): + host_name = worker.host_name + print("Added new worker for \"{}\"".format(host_name)) + self._workers_by_id[worker.id] = worker + self._workers_by_host_name[host_name].append(worker) + + def get_worker(self, worker_id): + return self._workers_by_id.get(worker_id) + + def remove_worker(self, worker): + # Look if worker had assigned job to do + job = worker.current_job + if job is not None and not job.done: + # Reset job + job.set_worker(None) + job.reset() + # Add job back to queue + self._job_queue_by_host_name[job.host_name].appendleft(job) + + # Remove worker from registered workers + self._workers_by_id.pop(worker.id, None) + host_name = worker.host_name + if worker in self._workers_by_host_name[host_name]: + self._workers_by_host_name[host_name].remove(worker) + + print("Removed worker for \"{}\"".format(host_name)) def get_job(self, job_id): """Job by it's id.""" From ebe07ab8c61c80222c1ae6eb9605fd371ad8e65b Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Fri, 8 Oct 2021 18:46:28 +0200 Subject: [PATCH 009/211] added assign jobs method to assign not assigned jobs to idle workers --- .../hosts_job_server/job_server/jobs.py | 29 +++++++++++++++++++ 1 file changed, 29 insertions(+) diff --git a/openpype/modules/default_modules/hosts_job_server/job_server/jobs.py b/openpype/modules/default_modules/hosts_job_server/job_server/jobs.py index d2f0c58858..239ce0d3ad 100644 --- a/openpype/modules/default_modules/hosts_job_server/job_server/jobs.py +++ b/openpype/modules/default_modules/hosts_job_server/job_server/jobs.py @@ -163,6 +163,35 @@ class JobQueue: print("Removed worker for \"{}\"".format(host_name)) + def assign_jobs(self): + """Try to assign job for each idle worker. + + Error all jobs without needed worker. + """ + available_host_names = set() + for worker in self._workers_by_id.values(): + host_name = worker.host_name + available_host_names.add(host_name) + if worker.is_idle(): + jobs = self._job_queue_by_host_name[host_name] + while jobs: + job = jobs.popleft() + if not job.deleted: + worker.set_current_job(job) + break + + for host_name in tuple(self._job_queue_by_host_name.keys()): + if host_name in available_host_names: + continue + + jobs_deque = self._job_queue_by_host_name[host_name] + message = ("Not available workers for \"{}\"").format(host_name) + while jobs_deque: + job = jobs_deque.popleft() + if not job.deleted: + job.set_done(False, message) + self._remove_old_jobs() + def get_job(self, job_id): """Job by it's id.""" return self._jobs_by_id.get(job_id) From 7628ddf9cd61169a9d3ddfe33426ecd992777295 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Fri, 8 Oct 2021 18:46:34 +0200 Subject: [PATCH 010/211] added missing import --- .../modules/default_modules/hosts_job_server/job_server/jobs.py | 1 + 1 file changed, 1 insertion(+) diff --git a/openpype/modules/default_modules/hosts_job_server/job_server/jobs.py b/openpype/modules/default_modules/hosts_job_server/job_server/jobs.py index 239ce0d3ad..f565ec5066 100644 --- a/openpype/modules/default_modules/hosts_job_server/job_server/jobs.py +++ b/openpype/modules/default_modules/hosts_job_server/job_server/jobs.py @@ -1,4 +1,5 @@ import datetime +import collections from uuid import uuid4 From 353ca75974b58c5e5539a7e49086d9adb7716ebb Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Fri, 8 Oct 2021 18:47:12 +0200 Subject: [PATCH 011/211] worker have access to connection on his object --- .../hosts_job_server/job_server/workers.py | 63 ++++++++++++++++++- 1 file changed, 62 insertions(+), 1 deletion(-) diff --git a/openpype/modules/default_modules/hosts_job_server/job_server/workers.py b/openpype/modules/default_modules/hosts_job_server/job_server/workers.py index 61d76bdf48..4c22bbf43c 100644 --- a/openpype/modules/default_modules/hosts_job_server/job_server/workers.py +++ b/openpype/modules/default_modules/hosts_job_server/job_server/workers.py @@ -1,4 +1,7 @@ +import asyncio from uuid import uuid4 +from aiohttp import WSCloseCode +from aiohttp_json_rpc.protocol import encode_request class WorkerState: @@ -9,12 +12,57 @@ class WorkerState: class Worker: """Worker that can handle jobs of specific host.""" - def __init__(self, host_name): + def __init__(self, host_name, http_request): self._id = None self.host_name = host_name + self._http_request = http_request self._state = WorkerState.IDLE self._job = None + # Give ability to send requests to worker + http_request.request_id = str(uuid4()) + http_request.pending_requests = {} + + async def send_job(self): + if self._job is not None: + data = { + "job_id": self._job.id, + "data": self._job.data + } + return await self.call("start_job", data) + return False + + async def call(self, method, params=None, timeout=None): + """Call method on worker's side.""" + request_id = self._http_request.request_id + self._http_request.request_id = str(uuid4()) + pending_requests = self._http_request.pending_requests + pending_requests[request_id] = asyncio.Future() + + request = encode_request(method, id=request_id, params=params) + + await self._http_request.ws.send_str(request) + + if timeout: + await asyncio.wait_for( + pending_requests[request_id], + timeout=timeout + ) + + else: + await pending_requests[request_id] + + result = pending_requests[request_id].result() + del pending_requests[request_id] + + return result + + async def close(self): + # TODO disconnect without calling a method + return await self.ws.close( + code=WSCloseCode.GOING_AWAY, + message="Server shutdown" + ) @property def id(self): @@ -30,6 +78,19 @@ class Worker: def current_job(self): return self._job + @property + def http_request(self): + return self._http_request + + @property + def ws(self): + return self.http_request.ws + + def connection_is_alive(self): + if self.ws.closed or self.ws._writer.transport.is_closing(): + return False + return True + def is_idle(self): return self._state is WorkerState.IDLE From 187811f8be9b1182e57e3315cab8691a3cc29dd0 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Fri, 8 Oct 2021 18:48:00 +0200 Subject: [PATCH 012/211] implemented route for creating and getting jobs --- .../job_server/job_queue_route.py | 59 +++++++++++++++++++ .../hosts_job_server/job_server/server.py | 6 ++ 2 files changed, 65 insertions(+) create mode 100644 openpype/modules/default_modules/hosts_job_server/job_server/job_queue_route.py diff --git a/openpype/modules/default_modules/hosts_job_server/job_server/job_queue_route.py b/openpype/modules/default_modules/hosts_job_server/job_server/job_queue_route.py new file mode 100644 index 0000000000..fd9c27f055 --- /dev/null +++ b/openpype/modules/default_modules/hosts_job_server/job_server/job_queue_route.py @@ -0,0 +1,59 @@ +import json + +from aiohttp.web_response import Response + + +class JobQueueResource: + def __init__(self, job_queue, server_manager): + self.server_manager = server_manager + + self._prefix = "/api" + + self._job_queue = job_queue + + self.endpoint_defs = ( + ("POST", "/jobs", self.post_job), + ("GET", "/jobs", self.get_jobs), + ("GET", "/jobs/{job_id}", self.get_job) + ) + + self.register() + + def register(self): + for methods, url, callback in self.endpoint_defs: + final_url = self._prefix + url + self.server_manager.add_route( + methods, final_url, callback + ) + + async def get_jobs(self, request): + return Response(status=200) + + async def post_job(self, request): + data = await request.json() + host_name = data.get("host_name") + if not host_name: + return Response( + status=400, message="Key \"host_name\" not filled." + ) + + job = self._job_queue.create_job(host_name, data) + return Response(status=201, text=job.id) + + async def get_job(self, request): + job_id = request.match_info["job_id"] + content = self._job_queue.get_job_status(job_id) + if content is None: + content = {} + return Response( + status=200, + body=self.encode(content), + content_type="application/json" + ) + + @classmethod + def encode(cls, data): + return json.dumps( + data, + indent=4 + ).encode("utf-8") diff --git a/openpype/modules/default_modules/hosts_job_server/job_server/server.py b/openpype/modules/default_modules/hosts_job_server/job_server/server.py index 08a8d04a3a..1521445a4d 100644 --- a/openpype/modules/default_modules/hosts_job_server/job_server/server.py +++ b/openpype/modules/default_modules/hosts_job_server/job_server/server.py @@ -6,6 +6,9 @@ from aiohttp import web log = logging.getLogger(__name__) +from .jobs import JobQueue +from .job_queue_route import JobQueueResource + class WebServerManager: """Manger that care about web server thread.""" @@ -67,6 +70,9 @@ class WebServerThread(threading.Thread): self.runner = None self.site = None + job_queue = JobQueue() + self.job_queue_route = JobQueueResource(job_queue, manager) + @property def port(self): return self.manager.port From 41bf917757bee7156d30310fd1bd82fedeb62c57 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Fri, 8 Oct 2021 18:48:19 +0200 Subject: [PATCH 013/211] implemented workers websocket route --- .../hosts_job_server/job_server/server.py | 5 + .../job_server/workers_rpc_route.py | 124 ++++++++++++++++++ 2 files changed, 129 insertions(+) create mode 100644 openpype/modules/default_modules/hosts_job_server/job_server/workers_rpc_route.py diff --git a/openpype/modules/default_modules/hosts_job_server/job_server/server.py b/openpype/modules/default_modules/hosts_job_server/job_server/server.py index 1521445a4d..e12e582e1c 100644 --- a/openpype/modules/default_modules/hosts_job_server/job_server/server.py +++ b/openpype/modules/default_modules/hosts_job_server/job_server/server.py @@ -8,6 +8,7 @@ log = logging.getLogger(__name__) from .jobs import JobQueue from .job_queue_route import JobQueueResource +from .workers_rpc_route import WorkerRpc class WebServerManager: @@ -72,6 +73,7 @@ class WebServerThread(threading.Thread): job_queue = JobQueue() self.job_queue_route = JobQueueResource(job_queue, manager) + self.workers_route = WorkerRpc(job_queue, manager, loop=loop) @property def port(self): @@ -129,6 +131,9 @@ class WebServerThread(threading.Thread): await asyncio.sleep(0.5) print("Starting shutdown") + if self.workers_route: + await self.workers_route.stop() + print("Stopping site") await self.site.stop() print("Site stopped") diff --git a/openpype/modules/default_modules/hosts_job_server/job_server/workers_rpc_route.py b/openpype/modules/default_modules/hosts_job_server/job_server/workers_rpc_route.py new file mode 100644 index 0000000000..60ad6d24bf --- /dev/null +++ b/openpype/modules/default_modules/hosts_job_server/job_server/workers_rpc_route.py @@ -0,0 +1,124 @@ +import asyncio + +import aiohttp +from aiohttp_json_rpc import JsonRpc +from aiohttp_json_rpc.protocol import ( + encode_request, encode_error, decode_msg, JsonRpcMsgTyp +) +from aiohttp_json_rpc.exceptions import RpcError +from .workers import Worker + + +class WorkerRpc(JsonRpc): + def __init__(self, job_queue, manager, **kwargs): + super().__init__(**kwargs) + + self._job_queue = job_queue + self._manager = manager + + self._stopped = False + + # Register methods + self.add_methods( + ("", self.register_worker), + ("", self.job_done) + ) + asyncio.ensure_future(self._rpc_loop(), loop=self.loop) + + self._manager.add_route( + "*", "/ws", self.handle_request + ) + + # Panel routes for tools + async def register_worker(self, request, host_name): + worker = Worker(host_name, request.http_request) + self._job_queue.add_worker(worker) + return worker.id + + async def _rpc_loop(self): + while self.loop.is_running(): + if self._stopped: + break + + for worker in tuple(self._job_queue.workers()): + if not worker.connection_is_alive(): + self._job_queue.remove_worker(worker) + self._job_queue.assign_jobs() + + await self.send_jobs() + await asyncio.sleep(5) + + async def job_done(self, worker_id, job_id, success, message, data): + worker = self._job_queue.get_worker(worker_id) + if worker is not None: + worker.set_current_job(None) + + job = self._job_queue.get_job(job_id) + if job is not None: + job.set_done(success, message, data) + return True + + async def send_jobs(self): + invalid_workers = [] + for worker in self._job_queue.workers(): + if worker.job_assigned() and not worker.is_working(): + try: + await worker.send_job() + + except ConnectionResetError: + invalid_workers.append(worker) + + for worker in invalid_workers: + self._job_queue.remove_worker(worker) + + async def handle_websocket_request(self, http_request): + """Overide this method to catch CLOSING messages.""" + http_request.msg_id = 0 + http_request.pending = {} + + # prepare and register websocket + ws = aiohttp.web_ws.WebSocketResponse() + await ws.prepare(http_request) + http_request.ws = ws + self.clients.append(http_request) + + while not ws.closed: + self.logger.debug('waiting for messages') + raw_msg = await ws.receive() + + if raw_msg.type == aiohttp.WSMsgType.TEXT: + self.logger.debug('raw msg received: %s', raw_msg.data) + self.loop.create_task( + self._handle_rpc_msg(http_request, raw_msg) + ) + + elif raw_msg.type == aiohttp.WSMsgType.CLOSING: + break + + self.clients.remove(http_request) + return ws + + async def _handle_rpc_msg(self, http_request, raw_msg): + # This is duplicated code from super but there is no way how to do it + # to be able handle server->client requests + try: + _raw_message = raw_msg.data + msg = decode_msg(_raw_message) + + except RpcError as error: + await self._ws_send_str(http_request, encode_error(error)) + return + + if msg.type in (JsonRpcMsgTyp.RESULT, JsonRpcMsgTyp.ERROR): + request_id = msg.data["id"] + if request_id in http_request.pending_requests: + future = http_request.pending_requests[request_id] + future.set_result(msg.data["result"]) + return + + return await super()._handle_rpc_msg(http_request, raw_msg) + + async def stop(self): + self._stopped = True + for worker in tuple(self._job_queue.workers()): + await worker.close() From 0fbc49b64fd4677f8964f44f93d6a642d6d1adab Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Fri, 8 Oct 2021 18:48:45 +0200 Subject: [PATCH 014/211] implemented main function for job server --- .../hosts_job_server/job_server/__init__.py | 8 +++ .../hosts_job_server/job_server/utils.py | 52 +++++++++++++++++++ 2 files changed, 60 insertions(+) create mode 100644 openpype/modules/default_modules/hosts_job_server/job_server/__init__.py create mode 100644 openpype/modules/default_modules/hosts_job_server/job_server/utils.py diff --git a/openpype/modules/default_modules/hosts_job_server/job_server/__init__.py b/openpype/modules/default_modules/hosts_job_server/job_server/__init__.py new file mode 100644 index 0000000000..c73d830257 --- /dev/null +++ b/openpype/modules/default_modules/hosts_job_server/job_server/__init__.py @@ -0,0 +1,8 @@ +from .server import WebServerManager +from .utils import main + + +__all__ = ( + "WebServerManager", + "main" +) diff --git a/openpype/modules/default_modules/hosts_job_server/job_server/utils.py b/openpype/modules/default_modules/hosts_job_server/job_server/utils.py new file mode 100644 index 0000000000..8082de68f5 --- /dev/null +++ b/openpype/modules/default_modules/hosts_job_server/job_server/utils.py @@ -0,0 +1,52 @@ +import sys +import signal +import time +import socket + +from .server import WebServerManager + + +class SharedObjects: + stopped = False + + @classmethod + def stop(cls): + cls.stopped = True + + +def main(port=None, host=None): + def signal_handler(sig, frame): + print("Signal to kill process received. Termination starts.") + SharedObjects.stop() + + signal.signal(signal.SIGINT, signal_handler) + signal.signal(signal.SIGTERM, signal_handler) + + port = int(port or 8079) + host = str(host or "localhost") + + with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as con: + print(con, type(con)) + result_of_check = con.connect_ex((host, port)) + + print(result_of_check) + if result_of_check == 0: + print(( + "Server {}:{} is already running or address is occupied." + ).format(host, port)) + return 1 + + manager = WebServerManager(port, host) + manager.start_server() + + stopped = False + while manager.is_running: + if not stopped and SharedObjects.stopped: + stopped = True + manager.stop_server() + time.sleep(0.1) + return 0 + + +if __name__ == "__main__": + sys.exit(main()) From 3932532b683c936bc7fdec9498a21a8ec6f58294 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Fri, 8 Oct 2021 18:49:35 +0200 Subject: [PATCH 015/211] implemented base worker client --- .../hosts_job_server/job_workers/__init__.py | 5 + .../job_workers/base_worker.py | 165 ++++++++++++++++++ 2 files changed, 170 insertions(+) create mode 100644 openpype/modules/default_modules/hosts_job_server/job_workers/__init__.py create mode 100644 openpype/modules/default_modules/hosts_job_server/job_workers/base_worker.py diff --git a/openpype/modules/default_modules/hosts_job_server/job_workers/__init__.py b/openpype/modules/default_modules/hosts_job_server/job_workers/__init__.py new file mode 100644 index 0000000000..f771797aea --- /dev/null +++ b/openpype/modules/default_modules/hosts_job_server/job_workers/__init__.py @@ -0,0 +1,5 @@ +from .base_worker import WorkerJobsConnection + +__all__ = ( + "WorkerJobsConnection", +) diff --git a/openpype/modules/default_modules/hosts_job_server/job_workers/base_worker.py b/openpype/modules/default_modules/hosts_job_server/job_workers/base_worker.py new file mode 100644 index 0000000000..3e9dbbfd7f --- /dev/null +++ b/openpype/modules/default_modules/hosts_job_server/job_workers/base_worker.py @@ -0,0 +1,165 @@ +import sys +import datetime +import asyncio + +from aiohttp_json_rpc import JsonRpcClient + + +class WorkerClient(JsonRpcClient): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + + self.add_methods( + ("", self.start_job), + ) + self.current_job = None + self._id = None + + def set_id(self, worker_id): + self._id = worker_id + + async def start_job(self, job_data): + if self.current_job is not None: + return False + + print("Got new job {}".format(str(job_data))) + self.current_job = job_data + return True + + def finish_job(self, success, message, data): + self._loop.create_task(self._finish_job(success, message, data)) + + async def _finish_job(self, success, message, data): + job_id = self.current_job["job_id"] + self.current_job = None + + return await self.call( + "job_done", [self._id, job_id, success, message, data] + ) + + +class WorkerJobsConnection: + retry_time_seconds = 5 + + def __init__(self, server_url, host_name, loop=None): + self.client = None + self._loop = loop + + self._host_name = host_name + self._server_url = server_url + + self._is_running = False + self._connecting = False + self._connected = False + self._stopped = False + + def stop(self): + print("Stopping worker") + self._stopped = True + + @property + def is_running(self): + return self._is_running + + @property + def current_job(self): + if self.client is not None: + return self.client.current_job + return None + + def finish_job(self, success=True, message=None, data=None): + if self.client is None: + print(( + "Couldn't sent job status to server because" + " client is not connected." + )) + else: + self.client.finish_job(success, message, data) + + async def main_loop(self): + self._is_running = True + + while not self._stopped: + start_time = datetime.datetime.now() + await self._connection_loop() + delta = datetime.datetime.now() - start_time + print("Client was connected {}".format(str(delta))) + # Check if was stopped and stop while loop in that case + if self._stopped: + break + + if delta.seconds < 60: + print(( + "Can't connect to server will try in {} seconds." + ).format(self.retry_time_seconds)) + + await asyncio.sleep(self.retry_time_seconds) + self._is_running = False + + async def _connect(self): + self.client = WorkerClient() + print("Connecting to {}".format(self._server_url)) + await self.client.connect_url(self._server_url) + + async def _connection_loop(self): + self._connecting = True + asyncio.run_coroutine_threadsafe( + self._connect(), loop=self._loop + ) + + while self._connecting: + if self.client is None: + await asyncio.sleep(0.07) + continue + session = getattr(self.client, "_session", None) + ws = getattr(self.client, "_ws", None) + if session is not None: + if session.closed: + self._connecting = False + self._connected = False + break + + elif ws is not None: + self._connecting = False + self._connected = True + + if self._stopped: + break + + await asyncio.sleep(0.07) + + if not self._connected: + self.client = None + return + + worker_id = await self.client.call( + "register_worker", [self._host_name] + ) + self.client.set_id(worker_id) + print( + "Registered as worker with id {}".format(worker_id) + ) + counter = 0 + while self._connected and self._loop.is_running(): + if self._stopped or ws.closed: + break + + if self.client.current_job: + if counter == 3: + counter = 0 + self.finish_job() + else: + counter += 1 + + await asyncio.sleep(0.3) + + await self._stop_cleanup() + + async def _stop_cleanup(self): + print("Cleanup after stop") + if self.client is not None and hasattr(self.client, "_ws"): + await self.client.disconnect() + + self.client = None + self._connecting = False + self._connected = False From c5962fd35ebab960eacbe3d734a02598c0fa806d Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Fri, 8 Oct 2021 18:50:06 +0200 Subject: [PATCH 016/211] initial tvpaint worker connection --- .../job_workers/tvpaint_worker.py | 93 +++++++++++++++++++ 1 file changed, 93 insertions(+) create mode 100644 openpype/modules/default_modules/hosts_job_server/job_workers/tvpaint_worker.py diff --git a/openpype/modules/default_modules/hosts_job_server/job_workers/tvpaint_worker.py b/openpype/modules/default_modules/hosts_job_server/job_workers/tvpaint_worker.py new file mode 100644 index 0000000000..0cb6d4a7a1 --- /dev/null +++ b/openpype/modules/default_modules/hosts_job_server/job_workers/tvpaint_worker.py @@ -0,0 +1,93 @@ +import signal +import time +import asyncio + +from avalon.tvpaint.communication_server import ( + BaseCommunicator, + CommunicationWrapper +) +from .base_worker import WorkerJobsConnection + + +class WorkerCommunicator(BaseCommunicator): + def __init__(self, server_url): + super().__init__(self) + + self._server_url = server_url + self._worker_connection = None + + def _start_webserver(self): + loop = self.websocket_server.loop + self._worker_connection = WorkerJobsConnection( + self._server_url, "tvpaint", loop + ) + asyncio.ensure_future( + self._worker_connection.main_loop(), loop=loop + ) + + super()._start_webserver() + + def stop(self): + self._worker_connection.stop() + super().stop() + + @property + def current_job(self): + if self._worker_connection: + return self._worker_connection.current_job + return None + + def _check_process(self): + if self.process is None: + return True + + if self.process.poll() is not None: + asyncio.ensure_future( + self._worker_connection.disconnect(), + loop=self.websocket_server.loop + ) + self._exit() + return False + return True + + def _process_job(self): + job = self.current_job + if job is None: + return + + print(job) + self._worker_connection.finish_job() + + def main_loop(self): + while self.server_is_running: + if self._check_process(): + self._process_job() + time.sleep(1) + + return self.return_code + + +def _start_tvpaint(tvpaint_executable_path, server_url): + communicator = WorkerCommunicator(server_url) + CommunicationWrapper.set_communicator(communicator) + communicator.launch([tvpaint_executable_path]) + + +def main(tvpaint_executable_path, server_url): + # Register terminal signal handler + def signal_handler(*_args): + print("Termination signal received. Stopping.") + if CommunicationWrapper.communicator is not None: + CommunicationWrapper.communicator.stop() + + signal.signal(signal.SIGINT, signal_handler) + signal.signal(signal.SIGTERM, signal_handler) + + _start_tvpaint(tvpaint_executable_path, server_url) + + communicator = CommunicationWrapper.communicator + if communicator is None: + print("Communicator is not set") + return 1 + + return communicator.main_loop() From bdd57980b359522f1ee3087827a75df129caf9ae Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Fri, 8 Oct 2021 18:50:28 +0200 Subject: [PATCH 017/211] added module implementation --- .../hosts_job_server/__init__.py.py | 5 +++ .../hosts_job_server/module.py | 43 +++++++++++++++++++ 2 files changed, 48 insertions(+) create mode 100644 openpype/modules/default_modules/hosts_job_server/__init__.py.py create mode 100644 openpype/modules/default_modules/hosts_job_server/module.py diff --git a/openpype/modules/default_modules/hosts_job_server/__init__.py.py b/openpype/modules/default_modules/hosts_job_server/__init__.py.py new file mode 100644 index 0000000000..f82987baff --- /dev/null +++ b/openpype/modules/default_modules/hosts_job_server/__init__.py.py @@ -0,0 +1,5 @@ +from .module import HostsJobServer + +__all__ = ( + "HostsJobServer", +) diff --git a/openpype/modules/default_modules/hosts_job_server/module.py b/openpype/modules/default_modules/hosts_job_server/module.py new file mode 100644 index 0000000000..7f2a17c5c7 --- /dev/null +++ b/openpype/modules/default_modules/hosts_job_server/module.py @@ -0,0 +1,43 @@ +from openpype.modules import OpenPypeModule + + +class HostsJobServer(OpenPypeModule): + name = "hosts_job_server" + + def initialize(self, modules_settings): + self._server_url = modules_settings.get("server_url") + self.enabled = True + + def start_server(self, port=None, host=None): + from .job_server import main + + return main(port, host) + + def start_worker(self, app_name, server_url=None): + from openpype.lib import ApplicationManager + + if server_url is None: + server_url = self._server_url + + app_manager = ApplicationManager() + app = app_manager.applications.get(app_name) + if app is None: + raise ValueError( + "Didn't find application \"{}\" in settings.".format(app_name) + ) + + if app.host_name == "tvpaint": + return self._start_tvpaint_worker(app, server_url) + raise ValueError("Unknown host \"{}\"".format(app.host_name)) + + def _start_tvpaint_worker(self, app, server_url): + from .job_workers.tvpaint_worker import main + + executable = app.find_executable() + if not executable: + raise ValueError(( + "Executable for app \"{}\" is not set" + " or accessible on this workstation." + ).format(app.full_name)) + + return main(executable, server_url) From 28391e5bb85ebcf056557dbda5ab9044c0fec930 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Mon, 11 Oct 2021 09:59:20 +0200 Subject: [PATCH 018/211] send also worker id --- .../default_modules/hosts_job_server/job_server/workers.py | 1 + 1 file changed, 1 insertion(+) diff --git a/openpype/modules/default_modules/hosts_job_server/job_server/workers.py b/openpype/modules/default_modules/hosts_job_server/job_server/workers.py index 4c22bbf43c..b3ad3e2f71 100644 --- a/openpype/modules/default_modules/hosts_job_server/job_server/workers.py +++ b/openpype/modules/default_modules/hosts_job_server/job_server/workers.py @@ -27,6 +27,7 @@ class Worker: if self._job is not None: data = { "job_id": self._job.id, + "worker_id": self.id, "data": self._job.data } return await self.call("start_job", data) From 9f68ee1aacb313104ee58373c3c16b9a52adbb7d Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Mon, 11 Oct 2021 09:59:50 +0200 Subject: [PATCH 019/211] removed todo --- .../default_modules/hosts_job_server/job_server/workers.py | 1 - 1 file changed, 1 deletion(-) diff --git a/openpype/modules/default_modules/hosts_job_server/job_server/workers.py b/openpype/modules/default_modules/hosts_job_server/job_server/workers.py index b3ad3e2f71..28ca649c03 100644 --- a/openpype/modules/default_modules/hosts_job_server/job_server/workers.py +++ b/openpype/modules/default_modules/hosts_job_server/job_server/workers.py @@ -59,7 +59,6 @@ class Worker: return result async def close(self): - # TODO disconnect without calling a method return await self.ws.close( code=WSCloseCode.GOING_AWAY, message="Server shutdown" From 6973584ca2c80ae17850bf384c901f414a007ce1 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Mon, 11 Oct 2021 12:07:31 +0200 Subject: [PATCH 020/211] added base of tvpaint worker implementation --- openpype/hosts/tvpaint/worker/__init__.py | 14 + openpype/hosts/tvpaint/worker/worker_job.py | 242 ++++++++++++++++++ .../job_workers/tvpaint_worker.py | 15 +- 3 files changed, 269 insertions(+), 2 deletions(-) create mode 100644 openpype/hosts/tvpaint/worker/__init__.py create mode 100644 openpype/hosts/tvpaint/worker/worker_job.py diff --git a/openpype/hosts/tvpaint/worker/__init__.py b/openpype/hosts/tvpaint/worker/__init__.py new file mode 100644 index 0000000000..4dca6754ab --- /dev/null +++ b/openpype/hosts/tvpaint/worker/__init__.py @@ -0,0 +1,14 @@ +from .worker_job import ( + ExecuteSimpleGeorgeScript, + ExecuteGeorgeScript, + ExecuteGeorgeScriptWithResult, + TVPaintCommands +) + + +__all__ = ( + "ExecuteSimpleGeorgeScript", + "ExecuteGeorgeScript", + "ExecuteGeorgeScriptWithResult", + "TVPaintCommands" +) diff --git a/openpype/hosts/tvpaint/worker/worker_job.py b/openpype/hosts/tvpaint/worker/worker_job.py new file mode 100644 index 0000000000..44633a27dd --- /dev/null +++ b/openpype/hosts/tvpaint/worker/worker_job.py @@ -0,0 +1,242 @@ +import os +import tempfile +import inspect +import copy +from uuid import uuid4 +from abc import ABCMeta, abstractmethod, abstractproperty + +import six + + +TMP_FILE_PREFIX = "opw_tvp_" + + +@six.add_metaclass(ABCMeta) +class BaseCommand: + @abstractproperty + def name(self): + """Command name (must be unique).""" + pass + + def __init__(self, parent, data): + if data is None: + data = {} + else: + data = copy.deepcopy(data) + + command_id = data.get("id") + if command_id is None: + command_id = str(uuid4()) + data["id"] = command_id + data["command"] = self.name + + self._parent = parent + self._result = None + self._command_data = data + self._done = False + + @property + def id(self): + return self._command_data["id"] + + @property + def parent(self): + return self._parent + + @property + def done(self): + return self._done + + def set_done(self): + self._done = True + + def set_result(self, result): + self._result = result + + def result(self): + return { + "id": self.id, + "result": self._result, + "done": self._done + } + + def command_data(self): + return copy.deepcopy(self._command_data) + + @abstractmethod + def execute(self): + pass + + @classmethod + @abstractmethod + def from_existing(cls, data): + pass + + def execute_george(self, george_script): + return self.parent.execute_george(george_script) + + def execute_george_through_file(self, george_script): + return self.parent.execute_george_through_file(george_script) + + +class ExecuteSimpleGeorgeScript(BaseCommand): + name = "execute_george_simple" + + def __init__(self, parent, script, data=None): + data = data or {} + data["script"] = script + self._script = script + super().__init__(parent, data) + + def execute(self): + self._result = self.execute_george(self._script) + + @classmethod + def from_existing(cls, parent, data): + script = data.pop("script") + return cls(parent, script, data) + + +class ExecuteGeorgeScript(BaseCommand): + name = "execute_george_through_file" + + def __init__(self, parent, script, data=None): + data = data or {} + data["script"] = script + self._script = script + super().__init__(parent, data) + + def execute(self): + self.execute_george_through_file(self._script) + + @classmethod + def from_existing(cls, parent, data): + script = data.pop("script") + return cls(parent, script, data) + + +class ExecuteGeorgeScriptWithResult(BaseCommand): + name = "execute_george_through_file_result" + + def __init__(self, parent, script, tmp_file_keys, data=None): + data = data or {} + data["script"] = script + data["tmp_file_keys"] = tmp_file_keys + self._script = script + self._tmp_file_keys = tmp_file_keys + super().__init__(parent, data) + + def execute(self): + filepath_by_key = {} + for key in self._tmp_file_keys: + output_file = tempfile.NamedTemporaryFile( + mode="w", prefix=TMP_FILE_PREFIX, suffix=".txt", delete=False + ) + output_file.close() + filepath_by_key[key] = output_file.name.replace("\\", "/") + + formatted_script = self._script.format(**filepath_by_key) + self.execute_george_through_file(formatted_script) + + result = {} + for key, filepath in filepath_by_key.items(): + with open(filepath, "r") as stream: + data = stream.read() + result[key] = data + os.remove(filepath) + + self._result = result + + @classmethod + def from_existing(cls, parent, data): + script = data.pop("script") + tmp_file_keys = data.pop("tmp_file_keys") + return cls(parent, script, tmp_file_keys, data) + + +class TVPaintCommands: + def __init__(self, workfile, commands=None, communicator=None): + if not commands: + commands = [] + + self._workfile = workfile + self._commands = [] + self._communicator = communicator + self._command_classes_by_name = None + + self.commands_from_data(commands) + + @property + def communicator(self): + return self._communicator + + @property + def classes_by_name(self): + if self._command_classes_by_name is None: + command_classes_by_name = {} + for attr in globals().values(): + if ( + not inspect.isclass(attr) + or not issubclass(attr, BaseCommand) + or attr is BaseCommand + ): + continue + + if inspect.isabstract(attr): + print("Skipping abstract class {}".format(attr.__name__)) + command_classes_by_name[attr.name] = attr + self._command_classes_by_name = command_classes_by_name + + return self._command_classes_by_name + + def commands_from_data(self, commands_data): + for command_data in commands_data: + command_name = command_data["command"] + + klass = self.classes_by_name[command_name] + command = klass.from_existing(command_data) + self.add_command(command) + + def add_command(self, command): + self._commands.append(command) + + def _open_workfile(self): + george_script = "tv_LoadProject '\"'\"{}\"'\"'".format( + self._workfile.replace("\\", "/") + ) + self.execute_george_through_file(george_script) + + def _close_workfile(self): + pass + + def execute(self): + self._open_workfile() + for command in self._commands: + command.execute() + command.set_done() + self._close_workfile() + + def commands_data(self): + return [ + command.command_data() + for command in self._commands + ] + + def result(self): + return [ + command.result() + for command in self._commands + ] + + def execute_george(self, george_script): + return self.communicator.execute_george(george_script) + + def execute_george_through_file(self, george_script): + temporary_file = tempfile.NamedTemporaryFile( + mode="w", prefix=TMP_FILE_PREFIX, suffix=".grg", delete=False + ) + temporary_file.write(george_script) + temporary_file.close() + temp_file_path = temporary_file.name.replace("\\", "/") + self.execute_george("tv_runscript {}".format(temp_file_path)) + os.remove(temp_file_path) diff --git a/openpype/modules/default_modules/hosts_job_server/job_workers/tvpaint_worker.py b/openpype/modules/default_modules/hosts_job_server/job_workers/tvpaint_worker.py index 0cb6d4a7a1..c08203b0c2 100644 --- a/openpype/modules/default_modules/hosts_job_server/job_workers/tvpaint_worker.py +++ b/openpype/modules/default_modules/hosts_job_server/job_workers/tvpaint_worker.py @@ -2,6 +2,7 @@ import signal import time import asyncio +from openpype.hosts.tvpaint.worker import TVPaintCommands from avalon.tvpaint.communication_server import ( BaseCommunicator, CommunicationWrapper @@ -55,8 +56,18 @@ class WorkerCommunicator(BaseCommunicator): if job is None: return - print(job) - self._worker_connection.finish_job() + success = False + message = "Unknown function" + data = None + workfile = job["workfile"] + if job.data.get("function") == "commands": + commands = TVPaintCommands(workfile, job.data["commands"]) + commands.execute() + success = True + message = "Executed" + data = commands.result() + + self._worker_connection.finish_job(success, message, data) def main_loop(self): while self.server_is_running: From 35da55ab4591e49bf84bee6850ed6fe4a81c044c Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Mon, 11 Oct 2021 12:07:46 +0200 Subject: [PATCH 021/211] added send/get job methods in module --- .../hosts_job_server/module.py | 20 ++++++++++++++++++- 1 file changed, 19 insertions(+), 1 deletion(-) diff --git a/openpype/modules/default_modules/hosts_job_server/module.py b/openpype/modules/default_modules/hosts_job_server/module.py index 7f2a17c5c7..af62f5d9a7 100644 --- a/openpype/modules/default_modules/hosts_job_server/module.py +++ b/openpype/modules/default_modules/hosts_job_server/module.py @@ -5,9 +5,27 @@ class HostsJobServer(OpenPypeModule): name = "hosts_job_server" def initialize(self, modules_settings): - self._server_url = modules_settings.get("server_url") + server_url = modules_settings.get("server_url") + while server_url.endswith("/"): + server_url = server_url[:-1] + self._server_url = server_url self.enabled = True + def send_job(self, host_name, job_data): + import requests + + job_data = job_data or {} + job_data["host_name"] = host_name + api_path = "{}/api/jobs".format(self._server_url) + job_id = requests.post(api_path, data=job_data) + return job_id + + def get_job_status(self, job_id): + import requests + + api_path = "{}/api/jobs/{}".format(self._server_url, job_id) + return requests.get(api_path) + def start_server(self, port=None, host=None): from .job_server import main From f4609f4ebfcd82a6eea634264042c9f69690a30f Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Mon, 11 Oct 2021 12:10:55 +0200 Subject: [PATCH 022/211] close workfile implemented --- openpype/hosts/tvpaint/worker/worker_job.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/tvpaint/worker/worker_job.py b/openpype/hosts/tvpaint/worker/worker_job.py index 44633a27dd..84f1be2a10 100644 --- a/openpype/hosts/tvpaint/worker/worker_job.py +++ b/openpype/hosts/tvpaint/worker/worker_job.py @@ -207,7 +207,7 @@ class TVPaintCommands: self.execute_george_through_file(george_script) def _close_workfile(self): - pass + self.execute_george_through_file("tv_projectclose") def execute(self): self._open_workfile() From 5ad722b1978321c2bb7f1112cb4166d6dc586b07 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Mon, 11 Oct 2021 17:23:14 +0200 Subject: [PATCH 023/211] fix init file --- .../hosts_job_server/{__init__.py.py => __init__.py} | 1 + 1 file changed, 1 insertion(+) rename openpype/modules/default_modules/hosts_job_server/{__init__.py.py => __init__.py} (98%) diff --git a/openpype/modules/default_modules/hosts_job_server/__init__.py.py b/openpype/modules/default_modules/hosts_job_server/__init__.py similarity index 98% rename from openpype/modules/default_modules/hosts_job_server/__init__.py.py rename to openpype/modules/default_modules/hosts_job_server/__init__.py index f82987baff..7ef92cf4d5 100644 --- a/openpype/modules/default_modules/hosts_job_server/__init__.py.py +++ b/openpype/modules/default_modules/hosts_job_server/__init__.py @@ -1,5 +1,6 @@ from .module import HostsJobServer + __all__ = ( "HostsJobServer", ) From 936c76c414cbeeef9b8661ca586d5f8c120ba421 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Mon, 11 Oct 2021 17:23:24 +0200 Subject: [PATCH 024/211] make sure server_url is string --- openpype/modules/default_modules/hosts_job_server/module.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/modules/default_modules/hosts_job_server/module.py b/openpype/modules/default_modules/hosts_job_server/module.py index af62f5d9a7..f26dbe7d92 100644 --- a/openpype/modules/default_modules/hosts_job_server/module.py +++ b/openpype/modules/default_modules/hosts_job_server/module.py @@ -5,7 +5,7 @@ class HostsJobServer(OpenPypeModule): name = "hosts_job_server" def initialize(self, modules_settings): - server_url = modules_settings.get("server_url") + server_url = modules_settings.get("server_url") or "" while server_url.endswith("/"): server_url = server_url[:-1] self._server_url = server_url From 16d3b3997894fc3bfd28a05dcb115cbd8b407235 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 12 Oct 2021 10:09:47 +0200 Subject: [PATCH 025/211] added server_url property --- openpype/modules/default_modules/hosts_job_server/module.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/openpype/modules/default_modules/hosts_job_server/module.py b/openpype/modules/default_modules/hosts_job_server/module.py index f26dbe7d92..51bf76f6bf 100644 --- a/openpype/modules/default_modules/hosts_job_server/module.py +++ b/openpype/modules/default_modules/hosts_job_server/module.py @@ -11,6 +11,10 @@ class HostsJobServer(OpenPypeModule): self._server_url = server_url self.enabled = True + @property + def server_url(self): + return self._server_url + def send_job(self, host_name, job_data): import requests From 41bb148caeb8067605020db3c1567ad158b85bf8 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 12 Oct 2021 10:11:27 +0200 Subject: [PATCH 026/211] converted start method to class methods --- .../hosts_job_server/module.py | 30 +++++++++++++++---- 1 file changed, 25 insertions(+), 5 deletions(-) diff --git a/openpype/modules/default_modules/hosts_job_server/module.py b/openpype/modules/default_modules/hosts_job_server/module.py index 51bf76f6bf..086b5a09f9 100644 --- a/openpype/modules/default_modules/hosts_job_server/module.py +++ b/openpype/modules/default_modules/hosts_job_server/module.py @@ -1,4 +1,5 @@ from openpype.modules import OpenPypeModule +from openpype.api import get_system_settings class HostsJobServer(OpenPypeModule): @@ -30,16 +31,34 @@ class HostsJobServer(OpenPypeModule): api_path = "{}/api/jobs/{}".format(self._server_url, job_id) return requests.get(api_path) - def start_server(self, port=None, host=None): + @classmethod + def get_server_url_from_settings(cls): + module_settings = get_system_settings()["modules"] + return ( + module_settings + .get("hosts_job_server", {}) + .get("server_url") + ) + + @classmethod + def start_server(cls, port=None, host=None): from .job_server import main return main(port, host) - def start_worker(self, app_name, server_url=None): + @classmethod + def start_worker(cls, app_name, server_url=None): + import requests from openpype.lib import ApplicationManager if server_url is None: - server_url = self._server_url + server_url = cls.get_server_url_from_settings() + + if not server_url: + raise ValueError("Server url is not set.") + + # Validate url + requests.get(server_url) app_manager = ApplicationManager() app = app_manager.applications.get(app_name) @@ -49,10 +68,11 @@ class HostsJobServer(OpenPypeModule): ) if app.host_name == "tvpaint": - return self._start_tvpaint_worker(app, server_url) + return cls._start_tvpaint_worker(app, server_url) raise ValueError("Unknown host \"{}\"".format(app.host_name)) - def _start_tvpaint_worker(self, app, server_url): + @classmethod + def _start_tvpaint_worker(cls, app, server_url): from .job_workers.tvpaint_worker import main executable = app.find_executable() From 55c25452ccb660ef6b79554ddec0db73c9771b35 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 12 Oct 2021 10:11:54 +0200 Subject: [PATCH 027/211] added module command to openpype cli --- openpype/cli.py | 8 +++++++- openpype/pype_commands.py | 11 +++++++++++ 2 files changed, 18 insertions(+), 1 deletion(-) diff --git a/openpype/cli.py b/openpype/cli.py index c69407e295..b79055d474 100644 --- a/openpype/cli.py +++ b/openpype/cli.py @@ -57,7 +57,13 @@ def tray(debug=False): PypeCommands().launch_tray(debug) -@main.command() +@PypeCommands.add_modules +@main.group(help="Run command line arguments of OpenPype modules") +@click.pass_context +def module(ctx): + pass + + @click.option("-d", "--debug", is_flag=True, help="Print debug messages") @click.option("--ftrack-url", envvar="FTRACK_SERVER", help="Ftrack server url") diff --git a/openpype/pype_commands.py b/openpype/pype_commands.py index 5288749e8b..d6895bcb2c 100644 --- a/openpype/pype_commands.py +++ b/openpype/pype_commands.py @@ -3,6 +3,7 @@ import os import sys import json +import click from datetime import datetime from openpype.lib import PypeLogger @@ -33,6 +34,16 @@ class PypeCommands: user_role = "manager" settings.main(user_role) + @staticmethod + def add_modules(click_func): + from openpype.modules import ModulesManager + + manager = ModulesManager() + for module in manager.modules: + if hasattr(module, "cli"): + module.cli(click_func) + return click_func + @staticmethod def launch_eventservercli(*args): from openpype_modules.ftrack.ftrack_server.event_server_cli import ( From 0b1cdcaf26a8efe2b6f8e39007e7419357546e70 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 12 Oct 2021 10:13:03 +0200 Subject: [PATCH 028/211] added cli commands to hosts job server --- .../hosts_job_server/module.py | 33 +++++++++++++++++++ 1 file changed, 33 insertions(+) diff --git a/openpype/modules/default_modules/hosts_job_server/module.py b/openpype/modules/default_modules/hosts_job_server/module.py index 086b5a09f9..89fa7149f0 100644 --- a/openpype/modules/default_modules/hosts_job_server/module.py +++ b/openpype/modules/default_modules/hosts_job_server/module.py @@ -1,3 +1,4 @@ +import click from openpype.modules import OpenPypeModule from openpype.api import get_system_settings @@ -31,6 +32,9 @@ class HostsJobServer(OpenPypeModule): api_path = "{}/api/jobs/{}".format(self._server_url, job_id) return requests.get(api_path) + def cli(self, click_group): + click_group.add_command(cli_main) + @classmethod def get_server_url_from_settings(cls): module_settings = get_system_settings()["modules"] @@ -83,3 +87,32 @@ class HostsJobServer(OpenPypeModule): ).format(app.full_name)) return main(executable, server_url) + + +@click.group( + "hosts_job_server", + help="Application job server. Can be used as render farm." +) +def cli_main(): + pass + + +@cli_main.command( + "start_server", + help="Start server handling workers and their jobs." +) +@click.option("--host", help="Server host (ip address)") +@click.option("--port", help="Server port") +def cli_start_server(host, port): + HostsJobServer.start_server(host, port) + + +@cli_main.command( + "start_worker", help=( + "Start a worker for a specific application. (e.g. \"tvpaint/11.5\")" + ) +) +@click.argument("app_name") +@click.option("--server_url", help="Server url which handle workers and jobs.") +def cli_start_worker(app_name, server_url): + HostsJobServer.start_worker(app_name, server_url) From 5255f0bffa81614ff6c3e3af0e01026273be70f0 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 12 Oct 2021 10:26:44 +0200 Subject: [PATCH 029/211] renamed module to job_queue --- .../{hosts_job_server => job_queue}/__init__.py | 0 .../job_server/__init__.py | 0 .../job_server/job_queue_route.py | 0 .../job_server/jobs.py | 0 .../job_server/server.py | 0 .../job_server/utils.py | 0 .../job_server/workers.py | 0 .../job_server/workers_rpc_route.py | 0 .../job_workers/__init__.py | 0 .../job_workers/base_worker.py | 0 .../job_workers/tvpaint_worker.py | 0 .../{hosts_job_server => job_queue}/module.py | 12 ++++++------ 12 files changed, 6 insertions(+), 6 deletions(-) rename openpype/modules/default_modules/{hosts_job_server => job_queue}/__init__.py (100%) rename openpype/modules/default_modules/{hosts_job_server => job_queue}/job_server/__init__.py (100%) rename openpype/modules/default_modules/{hosts_job_server => job_queue}/job_server/job_queue_route.py (100%) rename openpype/modules/default_modules/{hosts_job_server => job_queue}/job_server/jobs.py (100%) rename openpype/modules/default_modules/{hosts_job_server => job_queue}/job_server/server.py (100%) rename openpype/modules/default_modules/{hosts_job_server => job_queue}/job_server/utils.py (100%) rename openpype/modules/default_modules/{hosts_job_server => job_queue}/job_server/workers.py (100%) rename openpype/modules/default_modules/{hosts_job_server => job_queue}/job_server/workers_rpc_route.py (100%) rename openpype/modules/default_modules/{hosts_job_server => job_queue}/job_workers/__init__.py (100%) rename openpype/modules/default_modules/{hosts_job_server => job_queue}/job_workers/base_worker.py (100%) rename openpype/modules/default_modules/{hosts_job_server => job_queue}/job_workers/tvpaint_worker.py (100%) rename openpype/modules/default_modules/{hosts_job_server => job_queue}/module.py (93%) diff --git a/openpype/modules/default_modules/hosts_job_server/__init__.py b/openpype/modules/default_modules/job_queue/__init__.py similarity index 100% rename from openpype/modules/default_modules/hosts_job_server/__init__.py rename to openpype/modules/default_modules/job_queue/__init__.py diff --git a/openpype/modules/default_modules/hosts_job_server/job_server/__init__.py b/openpype/modules/default_modules/job_queue/job_server/__init__.py similarity index 100% rename from openpype/modules/default_modules/hosts_job_server/job_server/__init__.py rename to openpype/modules/default_modules/job_queue/job_server/__init__.py diff --git a/openpype/modules/default_modules/hosts_job_server/job_server/job_queue_route.py b/openpype/modules/default_modules/job_queue/job_server/job_queue_route.py similarity index 100% rename from openpype/modules/default_modules/hosts_job_server/job_server/job_queue_route.py rename to openpype/modules/default_modules/job_queue/job_server/job_queue_route.py diff --git a/openpype/modules/default_modules/hosts_job_server/job_server/jobs.py b/openpype/modules/default_modules/job_queue/job_server/jobs.py similarity index 100% rename from openpype/modules/default_modules/hosts_job_server/job_server/jobs.py rename to openpype/modules/default_modules/job_queue/job_server/jobs.py diff --git a/openpype/modules/default_modules/hosts_job_server/job_server/server.py b/openpype/modules/default_modules/job_queue/job_server/server.py similarity index 100% rename from openpype/modules/default_modules/hosts_job_server/job_server/server.py rename to openpype/modules/default_modules/job_queue/job_server/server.py diff --git a/openpype/modules/default_modules/hosts_job_server/job_server/utils.py b/openpype/modules/default_modules/job_queue/job_server/utils.py similarity index 100% rename from openpype/modules/default_modules/hosts_job_server/job_server/utils.py rename to openpype/modules/default_modules/job_queue/job_server/utils.py diff --git a/openpype/modules/default_modules/hosts_job_server/job_server/workers.py b/openpype/modules/default_modules/job_queue/job_server/workers.py similarity index 100% rename from openpype/modules/default_modules/hosts_job_server/job_server/workers.py rename to openpype/modules/default_modules/job_queue/job_server/workers.py diff --git a/openpype/modules/default_modules/hosts_job_server/job_server/workers_rpc_route.py b/openpype/modules/default_modules/job_queue/job_server/workers_rpc_route.py similarity index 100% rename from openpype/modules/default_modules/hosts_job_server/job_server/workers_rpc_route.py rename to openpype/modules/default_modules/job_queue/job_server/workers_rpc_route.py diff --git a/openpype/modules/default_modules/hosts_job_server/job_workers/__init__.py b/openpype/modules/default_modules/job_queue/job_workers/__init__.py similarity index 100% rename from openpype/modules/default_modules/hosts_job_server/job_workers/__init__.py rename to openpype/modules/default_modules/job_queue/job_workers/__init__.py diff --git a/openpype/modules/default_modules/hosts_job_server/job_workers/base_worker.py b/openpype/modules/default_modules/job_queue/job_workers/base_worker.py similarity index 100% rename from openpype/modules/default_modules/hosts_job_server/job_workers/base_worker.py rename to openpype/modules/default_modules/job_queue/job_workers/base_worker.py diff --git a/openpype/modules/default_modules/hosts_job_server/job_workers/tvpaint_worker.py b/openpype/modules/default_modules/job_queue/job_workers/tvpaint_worker.py similarity index 100% rename from openpype/modules/default_modules/hosts_job_server/job_workers/tvpaint_worker.py rename to openpype/modules/default_modules/job_queue/job_workers/tvpaint_worker.py diff --git a/openpype/modules/default_modules/hosts_job_server/module.py b/openpype/modules/default_modules/job_queue/module.py similarity index 93% rename from openpype/modules/default_modules/hosts_job_server/module.py rename to openpype/modules/default_modules/job_queue/module.py index 89fa7149f0..ec5ea1b3f0 100644 --- a/openpype/modules/default_modules/hosts_job_server/module.py +++ b/openpype/modules/default_modules/job_queue/module.py @@ -3,8 +3,8 @@ from openpype.modules import OpenPypeModule from openpype.api import get_system_settings -class HostsJobServer(OpenPypeModule): - name = "hosts_job_server" +class JobQueueModule(OpenPypeModule): + name = "job_queue" def initialize(self, modules_settings): server_url = modules_settings.get("server_url") or "" @@ -40,7 +40,7 @@ class HostsJobServer(OpenPypeModule): module_settings = get_system_settings()["modules"] return ( module_settings - .get("hosts_job_server", {}) + .get(cls.name, {}) .get("server_url") ) @@ -90,7 +90,7 @@ class HostsJobServer(OpenPypeModule): @click.group( - "hosts_job_server", + JobQueueModule.name, help="Application job server. Can be used as render farm." ) def cli_main(): @@ -104,7 +104,7 @@ def cli_main(): @click.option("--host", help="Server host (ip address)") @click.option("--port", help="Server port") def cli_start_server(host, port): - HostsJobServer.start_server(host, port) + JobQueueModule.start_server(host, port) @cli_main.command( @@ -115,4 +115,4 @@ def cli_start_server(host, port): @click.argument("app_name") @click.option("--server_url", help="Server url which handle workers and jobs.") def cli_start_worker(app_name, server_url): - HostsJobServer.start_worker(app_name, server_url) + JobQueueModule.start_worker(app_name, server_url) From 9ee98eb344d9918a16207758c15f4b6f774fb563 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 12 Oct 2021 10:28:27 +0200 Subject: [PATCH 030/211] added settings for job_queue module --- .../settings/defaults/system_settings/modules.json | 3 +++ .../schemas/system_schema/schema_modules.json | 14 ++++++++++++++ 2 files changed, 17 insertions(+) diff --git a/openpype/settings/defaults/system_settings/modules.json b/openpype/settings/defaults/system_settings/modules.json index beb1eb4f24..43aeea7885 100644 --- a/openpype/settings/defaults/system_settings/modules.json +++ b/openpype/settings/defaults/system_settings/modules.json @@ -178,5 +178,8 @@ }, "slack": { "enabled": false + }, + "job_queue": { + "server_url": "" } } \ No newline at end of file diff --git a/openpype/settings/entities/schemas/system_schema/schema_modules.json b/openpype/settings/entities/schemas/system_schema/schema_modules.json index a2b31772e9..5a163d380b 100644 --- a/openpype/settings/entities/schemas/system_schema/schema_modules.json +++ b/openpype/settings/entities/schemas/system_schema/schema_modules.json @@ -237,6 +237,20 @@ } ] }, + { + "type": "dict", + "key": "job_queue", + "label": "Job Queue", + "require_restart": true, + "collapsible": true, + "children": [ + { + "type": "text", + "key": "server_url", + "label": "Server Rest URL" + } + ] + }, { "type": "dynamic_schema", "name": "system_settings/modules" From 3a4ea103cdc94f9386de313094c7292dc498ac5f Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 12 Oct 2021 10:42:26 +0200 Subject: [PATCH 031/211] fox import of module --- openpype/modules/default_modules/job_queue/__init__.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/modules/default_modules/job_queue/__init__.py b/openpype/modules/default_modules/job_queue/__init__.py index 7ef92cf4d5..6f2cec1b97 100644 --- a/openpype/modules/default_modules/job_queue/__init__.py +++ b/openpype/modules/default_modules/job_queue/__init__.py @@ -1,6 +1,6 @@ -from .module import HostsJobServer +from .module import JobQueueModule __all__ = ( - "HostsJobServer", + "JobQueueModule", ) From aea428aa938151c6a760e6787062230ea7146f33 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 12 Oct 2021 10:42:37 +0200 Subject: [PATCH 032/211] added server url conversion --- .../default_modules/job_queue/module.py | 31 ++++++++++++++++--- 1 file changed, 26 insertions(+), 5 deletions(-) diff --git a/openpype/modules/default_modules/job_queue/module.py b/openpype/modules/default_modules/job_queue/module.py index ec5ea1b3f0..1c58b56a10 100644 --- a/openpype/modules/default_modules/job_queue/module.py +++ b/openpype/modules/default_modules/job_queue/module.py @@ -1,3 +1,10 @@ +import sys + +if sys.version_info[0] == 2: + from urlparse import urlsplit, urlunsplit +else: + from urllib.parse import urlsplit, urlunsplit + import click from openpype.modules import OpenPypeModule from openpype.api import get_system_settings @@ -8,11 +15,22 @@ class JobQueueModule(OpenPypeModule): def initialize(self, modules_settings): server_url = modules_settings.get("server_url") or "" - while server_url.endswith("/"): - server_url = server_url[:-1] - self._server_url = server_url + + self._server_url = self.url_conversion(server_url) self.enabled = True + @staticmethod + def url_conversion(url): + if not url: + return url + + url_parts = list(urlsplit(url)) + if not url_parts[0]: + url = "http://{}".format(url) + url_parts = list(urlsplit(url)) + + return urlunsplit(url_parts) + @property def server_url(self): return self._server_url @@ -38,7 +56,7 @@ class JobQueueModule(OpenPypeModule): @classmethod def get_server_url_from_settings(cls): module_settings = get_system_settings()["modules"] - return ( + return cls.url_conversion( module_settings .get(cls.name, {}) .get("server_url") @@ -55,12 +73,15 @@ class JobQueueModule(OpenPypeModule): import requests from openpype.lib import ApplicationManager - if server_url is None: + if not server_url: server_url = cls.get_server_url_from_settings() + server_url = "localhost:8079" if not server_url: raise ValueError("Server url is not set.") + server_url = cls.url_conversion(server_url) + # Validate url requests.get(server_url) From f79b377b8893efcfb7d2145363925de43cd0e733 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 12 Oct 2021 12:23:51 +0200 Subject: [PATCH 033/211] fix order or argumnets --- openpype/modules/default_modules/job_queue/module.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/openpype/modules/default_modules/job_queue/module.py b/openpype/modules/default_modules/job_queue/module.py index 1c58b56a10..ea8397616b 100644 --- a/openpype/modules/default_modules/job_queue/module.py +++ b/openpype/modules/default_modules/job_queue/module.py @@ -122,10 +122,10 @@ def cli_main(): "start_server", help="Start server handling workers and their jobs." ) -@click.option("--host", help="Server host (ip address)") @click.option("--port", help="Server port") -def cli_start_server(host, port): - JobQueueModule.start_server(host, port) +@click.option("--host", help="Server host (ip address)") +def cli_start_server(port, host): + JobQueueModule.start_server(port, host) @cli_main.command( From c32705bb39d23328e6db12f25149115256ddbad9 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 12 Oct 2021 12:24:09 +0200 Subject: [PATCH 034/211] pass executable as string --- openpype/modules/default_modules/job_queue/module.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/modules/default_modules/job_queue/module.py b/openpype/modules/default_modules/job_queue/module.py index ea8397616b..7d6e3c7d1b 100644 --- a/openpype/modules/default_modules/job_queue/module.py +++ b/openpype/modules/default_modules/job_queue/module.py @@ -107,7 +107,7 @@ class JobQueueModule(OpenPypeModule): " or accessible on this workstation." ).format(app.full_name)) - return main(executable, server_url) + return main(str(executable), server_url) @click.group( From f72cdd712229470696d2102f5d0169c5b523db84 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 12 Oct 2021 12:24:19 +0200 Subject: [PATCH 035/211] fix init and add return code --- .../default_modules/job_queue/job_workers/tvpaint_worker.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/openpype/modules/default_modules/job_queue/job_workers/tvpaint_worker.py b/openpype/modules/default_modules/job_queue/job_workers/tvpaint_worker.py index c08203b0c2..14f91858b2 100644 --- a/openpype/modules/default_modules/job_queue/job_workers/tvpaint_worker.py +++ b/openpype/modules/default_modules/job_queue/job_workers/tvpaint_worker.py @@ -12,8 +12,9 @@ from .base_worker import WorkerJobsConnection class WorkerCommunicator(BaseCommunicator): def __init__(self, server_url): - super().__init__(self) + super().__init__() + self.return_code = 1 self._server_url = server_url self._worker_connection = None @@ -30,6 +31,7 @@ class WorkerCommunicator(BaseCommunicator): def stop(self): self._worker_connection.stop() + self.return_code = 0 super().stop() @property From 19bbdc1257c2a1c64771494eaacc71c7e8fcab67 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 12 Oct 2021 12:24:35 +0200 Subject: [PATCH 036/211] fix client base connection --- .../job_queue/job_workers/base_worker.py | 19 +++++++++++++++---- 1 file changed, 15 insertions(+), 4 deletions(-) diff --git a/openpype/modules/default_modules/job_queue/job_workers/base_worker.py b/openpype/modules/default_modules/job_queue/job_workers/base_worker.py index 3e9dbbfd7f..5dc1df5eb0 100644 --- a/openpype/modules/default_modules/job_queue/job_workers/base_worker.py +++ b/openpype/modules/default_modules/job_queue/job_workers/base_worker.py @@ -1,6 +1,7 @@ import sys import datetime import asyncio +import traceback from aiohttp_json_rpc import JsonRpcClient @@ -83,7 +84,7 @@ class WorkerJobsConnection: start_time = datetime.datetime.now() await self._connection_loop() delta = datetime.datetime.now() - start_time - print("Client was connected {}".format(str(delta))) + print("Connection loop took {}s".format(str(delta))) # Check if was stopped and stop while loop in that case if self._stopped: break @@ -99,18 +100,25 @@ class WorkerJobsConnection: async def _connect(self): self.client = WorkerClient() print("Connecting to {}".format(self._server_url)) - await self.client.connect_url(self._server_url) + try: + await self.client.connect_url(self._server_url) + except KeyboardInterrupt: + raise + except Exception: + traceback.print_exception(*sys.exc_info()) + async def _connection_loop(self): self._connecting = True - asyncio.run_coroutine_threadsafe( + future = asyncio.run_coroutine_threadsafe( self._connect(), loop=self._loop ) while self._connecting: - if self.client is None: + if not future.done(): await asyncio.sleep(0.07) continue + session = getattr(self.client, "_session", None) ws = getattr(self.client, "_ws", None) if session is not None: @@ -155,6 +163,9 @@ class WorkerJobsConnection: await self._stop_cleanup() + async def disconnect(self): + await self._stop_cleanup() + async def _stop_cleanup(self): print("Cleanup after stop") if self.client is not None and hasattr(self.client, "_ws"): From 8c0f7e0e3098c935d2279698e3f1abbca1125dce Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 12 Oct 2021 12:24:45 +0200 Subject: [PATCH 037/211] added more usefull prints in server --- .../modules/default_modules/job_queue/job_server/utils.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/modules/default_modules/job_queue/job_server/utils.py b/openpype/modules/default_modules/job_queue/job_server/utils.py index 8082de68f5..09d401a9c2 100644 --- a/openpype/modules/default_modules/job_queue/job_server/utils.py +++ b/openpype/modules/default_modules/job_queue/job_server/utils.py @@ -25,17 +25,17 @@ def main(port=None, host=None): port = int(port or 8079) host = str(host or "localhost") + print(host, port) with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as con: - print(con, type(con)) result_of_check = con.connect_ex((host, port)) - print(result_of_check) if result_of_check == 0: print(( "Server {}:{} is already running or address is occupied." ).format(host, port)) return 1 + print("Running server {}:{}".format(host, port)) manager = WebServerManager(port, host) manager.start_server() From 8199cf74f50258ebcb0d7fbba47c80a2f1ba58b1 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 12 Oct 2021 14:14:53 +0200 Subject: [PATCH 038/211] added more url parse methods --- .../default_modules/job_queue/module.py | 32 +++++++++++++++---- 1 file changed, 25 insertions(+), 7 deletions(-) diff --git a/openpype/modules/default_modules/job_queue/module.py b/openpype/modules/default_modules/job_queue/module.py index 7d6e3c7d1b..4c71618a7f 100644 --- a/openpype/modules/default_modules/job_queue/module.py +++ b/openpype/modules/default_modules/job_queue/module.py @@ -20,15 +20,32 @@ class JobQueueModule(OpenPypeModule): self.enabled = True @staticmethod - def url_conversion(url): + def url_conversion(url, ws=False): if not url: return url url_parts = list(urlsplit(url)) - if not url_parts[0]: - url = "http://{}".format(url) + scheme = url_parts[0] + if not scheme: + if ws: + url = "ws://{}".format(url) + else: + url = "http://{}".format(url) url_parts = list(urlsplit(url)) + elif ws: + if scheme not in ("ws", "wss"): + if scheme == "https": + url_parts[0] = "wss" + else: + url_parts[0] = "ws" + + elif scheme not in ("http", "https"): + if scheme == "wss": + url_parts[0] = "https" + else: + url_parts[0] = "http" + return urlunsplit(url_parts) @property @@ -76,14 +93,15 @@ class JobQueueModule(OpenPypeModule): if not server_url: server_url = cls.get_server_url_from_settings() - server_url = "localhost:8079" if not server_url: raise ValueError("Server url is not set.") - server_url = cls.url_conversion(server_url) + http_server_url = cls.url_conversion(server_url) # Validate url - requests.get(server_url) + requests.get(http_server_url) + + ws_server_url = cls.url_conversion(server_url) + "/ws" app_manager = ApplicationManager() app = app_manager.applications.get(app_name) @@ -93,7 +111,7 @@ class JobQueueModule(OpenPypeModule): ) if app.host_name == "tvpaint": - return cls._start_tvpaint_worker(app, server_url) + return cls._start_tvpaint_worker(app, ws_server_url) raise ValueError("Unknown host \"{}\"".format(app.host_name)) @classmethod From eb39271ffb8ca03e2ca9539c791a81b8a19079e1 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 12 Oct 2021 14:15:04 +0200 Subject: [PATCH 039/211] added more jobs info to status --- .../modules/default_modules/job_queue/job_server/jobs.py | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/openpype/modules/default_modules/job_queue/job_server/jobs.py b/openpype/modules/default_modules/job_queue/job_server/jobs.py index f565ec5066..f8c4752138 100644 --- a/openpype/modules/default_modules/job_queue/job_server/jobs.py +++ b/openpype/modules/default_modules/job_queue/job_server/jobs.py @@ -95,7 +95,13 @@ class Job: self._worker.set_current_job(None) def status(self): - output = {} + worker_id = None + if self._worker is not None: + worker_id = self._worker.id + output = { + "id": self.id, + "worker_id": worker_id + } if self._message: output["message"] = self._message From 5fb0a87a6b2d30af5ded5a0b21a979abe5b1c9d6 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 12 Oct 2021 14:15:13 +0200 Subject: [PATCH 040/211] implemented get jobs --- .../default_modules/job_queue/job_server/job_queue_route.py | 5 ++++- .../modules/default_modules/job_queue/job_server/jobs.py | 3 +++ 2 files changed, 7 insertions(+), 1 deletion(-) diff --git a/openpype/modules/default_modules/job_queue/job_server/job_queue_route.py b/openpype/modules/default_modules/job_queue/job_server/job_queue_route.py index fd9c27f055..8929e64dc5 100644 --- a/openpype/modules/default_modules/job_queue/job_server/job_queue_route.py +++ b/openpype/modules/default_modules/job_queue/job_server/job_queue_route.py @@ -27,7 +27,10 @@ class JobQueueResource: ) async def get_jobs(self, request): - return Response(status=200) + jobs_data = [] + for job in self._job_queue.get_jobs(): + jobs_data.append(job.status()) + return Response(status=200, body=self.encode(jobs_data)) async def post_job(self, request): data = await request.json() diff --git a/openpype/modules/default_modules/job_queue/job_server/jobs.py b/openpype/modules/default_modules/job_queue/job_server/jobs.py index f8c4752138..f8731cc8b7 100644 --- a/openpype/modules/default_modules/job_queue/job_server/jobs.py +++ b/openpype/modules/default_modules/job_queue/job_server/jobs.py @@ -199,6 +199,9 @@ class JobQueue: job.set_done(False, message) self._remove_old_jobs() + def get_jobs(self): + return self._jobs_by_id.values() + def get_job(self, job_id): """Job by it's id.""" return self._jobs_by_id.get(job_id) From 578c1034f0ef4c8c57a45761ab5a46b0be7b12ec Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 12 Oct 2021 14:21:58 +0200 Subject: [PATCH 041/211] fixed request methods --- openpype/modules/default_modules/job_queue/module.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/openpype/modules/default_modules/job_queue/module.py b/openpype/modules/default_modules/job_queue/module.py index 4c71618a7f..39daa961b1 100644 --- a/openpype/modules/default_modules/job_queue/module.py +++ b/openpype/modules/default_modules/job_queue/module.py @@ -58,14 +58,14 @@ class JobQueueModule(OpenPypeModule): job_data = job_data or {} job_data["host_name"] = host_name api_path = "{}/api/jobs".format(self._server_url) - job_id = requests.post(api_path, data=job_data) - return job_id + post_request = requests.post(api_path, data=job_data) + return str(post_request.content.decode()) def get_job_status(self, job_id): import requests api_path = "{}/api/jobs/{}".format(self._server_url, job_id) - return requests.get(api_path) + return requests.get(api_path).json() def cli(self, click_group): click_group.add_command(cli_main) From 81cd89b8b8150b2c3c1dfc930962877ba61c7a97 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 12 Oct 2021 15:08:15 +0200 Subject: [PATCH 042/211] fixed job data access --- .../default_modules/job_queue/job_workers/base_worker.py | 1 - .../job_queue/job_workers/tvpaint_worker.py | 9 ++++++--- 2 files changed, 6 insertions(+), 4 deletions(-) diff --git a/openpype/modules/default_modules/job_queue/job_workers/base_worker.py b/openpype/modules/default_modules/job_queue/job_workers/base_worker.py index 5dc1df5eb0..0daf17764b 100644 --- a/openpype/modules/default_modules/job_queue/job_workers/base_worker.py +++ b/openpype/modules/default_modules/job_queue/job_workers/base_worker.py @@ -107,7 +107,6 @@ class WorkerJobsConnection: except Exception: traceback.print_exception(*sys.exc_info()) - async def _connection_loop(self): self._connecting = True future = asyncio.run_coroutine_threadsafe( diff --git a/openpype/modules/default_modules/job_queue/job_workers/tvpaint_worker.py b/openpype/modules/default_modules/job_queue/job_workers/tvpaint_worker.py index 14f91858b2..74bce1c47f 100644 --- a/openpype/modules/default_modules/job_queue/job_workers/tvpaint_worker.py +++ b/openpype/modules/default_modules/job_queue/job_workers/tvpaint_worker.py @@ -61,9 +61,12 @@ class WorkerCommunicator(BaseCommunicator): success = False message = "Unknown function" data = None - workfile = job["workfile"] - if job.data.get("function") == "commands": - commands = TVPaintCommands(workfile, job.data["commands"]) + job_data = job["data"] + workfile = job_data["workfile"] + if job_data.get("function") == "commands": + commands = TVPaintCommands( + workfile, job_data["commands"], self + ) commands.execute() success = True message = "Executed" From 6c37f14e8ec28fb60d9e2d5f6cab32725a6fb56a Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 12 Oct 2021 15:08:43 +0200 Subject: [PATCH 043/211] fixed from_existing --- openpype/hosts/tvpaint/worker/worker_job.py | 12 +++++++----- 1 file changed, 7 insertions(+), 5 deletions(-) diff --git a/openpype/hosts/tvpaint/worker/worker_job.py b/openpype/hosts/tvpaint/worker/worker_job.py index 84f1be2a10..0aabc74d05 100644 --- a/openpype/hosts/tvpaint/worker/worker_job.py +++ b/openpype/hosts/tvpaint/worker/worker_job.py @@ -69,7 +69,7 @@ class BaseCommand: @classmethod @abstractmethod - def from_existing(cls, data): + def from_existing(cls, parent, data): pass def execute_george(self, george_script): @@ -194,23 +194,25 @@ class TVPaintCommands: command_name = command_data["command"] klass = self.classes_by_name[command_name] - command = klass.from_existing(command_data) + command = klass.from_existing(self, command_data) self.add_command(command) def add_command(self, command): self._commands.append(command) def _open_workfile(self): - george_script = "tv_LoadProject '\"'\"{}\"'\"'".format( - self._workfile.replace("\\", "/") - ) + workfile = self._workfile.replace("\\", "/") + print("Opening workfile {}".format(workfile)) + george_script = "tv_LoadProject '\"'\"{}\"'\"'".format(workfile) self.execute_george_through_file(george_script) def _close_workfile(self): + print("Closing workfile") self.execute_george_through_file("tv_projectclose") def execute(self): self._open_workfile() + print("Commands execution started ({})".format(len(self._commands))) for command in self._commands: command.execute() command.set_done() From b458f270357f360ad75b6ddc53154c3280224c7b Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 12 Oct 2021 15:31:30 +0200 Subject: [PATCH 044/211] removed dump finish job --- .../job_queue/job_workers/base_worker.py | 14 +++++--------- 1 file changed, 5 insertions(+), 9 deletions(-) diff --git a/openpype/modules/default_modules/job_queue/job_workers/base_worker.py b/openpype/modules/default_modules/job_queue/job_workers/base_worker.py index 0daf17764b..9469a4d305 100644 --- a/openpype/modules/default_modules/job_queue/job_workers/base_worker.py +++ b/openpype/modules/default_modules/job_queue/job_workers/base_worker.py @@ -28,9 +28,13 @@ class WorkerClient(JsonRpcClient): return True def finish_job(self, success, message, data): - self._loop.create_task(self._finish_job(success, message, data)) + asyncio.ensure_future( + self._finish_job(success, message, data), + loop=self._loop + ) async def _finish_job(self, success, message, data): + print("Current job", self.current_job) job_id = self.current_job["job_id"] self.current_job = None @@ -146,18 +150,10 @@ class WorkerJobsConnection: print( "Registered as worker with id {}".format(worker_id) ) - counter = 0 while self._connected and self._loop.is_running(): if self._stopped or ws.closed: break - if self.client.current_job: - if counter == 3: - counter = 0 - self.finish_job() - else: - counter += 1 - await asyncio.sleep(0.3) await self._stop_cleanup() From 8634d4f3d60afcaa6caab6c8a74c04545e4dfedf Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 12 Oct 2021 15:44:57 +0200 Subject: [PATCH 045/211] register as worker after tvpaint first steps --- .../job_queue/job_server/utils.py | 1 - .../job_queue/job_workers/base_worker.py | 29 ++++++++++++------- .../job_queue/job_workers/tvpaint_worker.py | 7 ++++- 3 files changed, 25 insertions(+), 12 deletions(-) diff --git a/openpype/modules/default_modules/job_queue/job_server/utils.py b/openpype/modules/default_modules/job_queue/job_server/utils.py index 09d401a9c2..127ca5f090 100644 --- a/openpype/modules/default_modules/job_queue/job_server/utils.py +++ b/openpype/modules/default_modules/job_queue/job_server/utils.py @@ -25,7 +25,6 @@ def main(port=None, host=None): port = int(port or 8079) host = str(host or "localhost") - print(host, port) with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as con: result_of_check = con.connect_ex((host, port)) diff --git a/openpype/modules/default_modules/job_queue/job_workers/base_worker.py b/openpype/modules/default_modules/job_queue/job_workers/base_worker.py index 9469a4d305..2336f91be2 100644 --- a/openpype/modules/default_modules/job_queue/job_workers/base_worker.py +++ b/openpype/modules/default_modules/job_queue/job_workers/base_worker.py @@ -81,12 +81,12 @@ class WorkerJobsConnection: else: self.client.finish_job(success, message, data) - async def main_loop(self): + async def main_loop(self, register_worker=True): self._is_running = True while not self._stopped: start_time = datetime.datetime.now() - await self._connection_loop() + await self._connection_loop(register_worker) delta = datetime.datetime.now() - start_time print("Connection loop took {}s".format(str(delta))) # Check if was stopped and stop while loop in that case @@ -111,7 +111,7 @@ class WorkerJobsConnection: except Exception: traceback.print_exception(*sys.exc_info()) - async def _connection_loop(self): + async def _connection_loop(self, register_worker): self._connecting = True future = asyncio.run_coroutine_threadsafe( self._connect(), loop=self._loop @@ -143,13 +143,10 @@ class WorkerJobsConnection: self.client = None return - worker_id = await self.client.call( - "register_worker", [self._host_name] - ) - self.client.set_id(worker_id) - print( - "Registered as worker with id {}".format(worker_id) - ) + print("Connected to job queue server") + if register_worker: + self.register_as_worker() + while self._connected and self._loop.is_running(): if self._stopped or ws.closed: break @@ -158,6 +155,18 @@ class WorkerJobsConnection: await self._stop_cleanup() + def register_as_worker(self): + asyncio.ensure_future(self._register_as_worker(), loop=self._loop) + + async def _register_as_worker(self): + worker_id = await self.client.call( + "register_worker", [self._host_name] + ) + self.client.set_id(worker_id) + print( + "Registered as worker with id {}".format(worker_id) + ) + async def disconnect(self): await self._stop_cleanup() diff --git a/openpype/modules/default_modules/job_queue/job_workers/tvpaint_worker.py b/openpype/modules/default_modules/job_queue/job_workers/tvpaint_worker.py index 74bce1c47f..463ec7cc99 100644 --- a/openpype/modules/default_modules/job_queue/job_workers/tvpaint_worker.py +++ b/openpype/modules/default_modules/job_queue/job_workers/tvpaint_worker.py @@ -24,11 +24,16 @@ class WorkerCommunicator(BaseCommunicator): self._server_url, "tvpaint", loop ) asyncio.ensure_future( - self._worker_connection.main_loop(), loop=loop + self._worker_connection.main_loop(register_worker=False), + loop=loop ) super()._start_webserver() + def _on_client_connect(self, *args, **kwargs): + super()._on_client_connect(*args, **kwargs) + self._worker_connection.register_as_worker() + def stop(self): self._worker_connection.stop() self.return_code = 0 From ad97e77ab42e3354598d6fa2750cde342732b117 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Thu, 14 Oct 2021 12:11:46 +0200 Subject: [PATCH 046/211] added conversion to job data --- openpype/hosts/tvpaint/worker/worker_job.py | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/openpype/hosts/tvpaint/worker/worker_job.py b/openpype/hosts/tvpaint/worker/worker_job.py index 0aabc74d05..ed6d0670de 100644 --- a/openpype/hosts/tvpaint/worker/worker_job.py +++ b/openpype/hosts/tvpaint/worker/worker_job.py @@ -224,6 +224,13 @@ class TVPaintCommands: for command in self._commands ] + def to_job_data(self): + return { + "workfile": self._workfile, + "function": "commands", + "commands": self.commands_data() + } + def result(self): return [ command.result() From d07406bb0934979d08522086692a56ec7581bcb0 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Thu, 14 Oct 2021 12:14:46 +0200 Subject: [PATCH 047/211] merged ExecuteGeorgeScript and ExecuteGeorgeScriptWithResult into one command --- openpype/hosts/tvpaint/worker/worker_job.py | 44 ++++++++++----------- 1 file changed, 22 insertions(+), 22 deletions(-) diff --git a/openpype/hosts/tvpaint/worker/worker_job.py b/openpype/hosts/tvpaint/worker/worker_job.py index ed6d0670de..7ae382ca9b 100644 --- a/openpype/hosts/tvpaint/worker/worker_job.py +++ b/openpype/hosts/tvpaint/worker/worker_job.py @@ -100,30 +100,22 @@ class ExecuteSimpleGeorgeScript(BaseCommand): class ExecuteGeorgeScript(BaseCommand): name = "execute_george_through_file" - def __init__(self, parent, script, data=None): + def __init__( + self, parent, script, tmp_file_keys=None, output_dirs=None, data=None + ): data = data or {} - data["script"] = script - self._script = script - super().__init__(parent, data) + if not tmp_file_keys: + tmp_file_keys = data.get("tmp_file_keys") or [] - def execute(self): - self.execute_george_through_file(self._script) + if not output_dirs: + output_dirs = data.get("output_dirs") or {} - @classmethod - def from_existing(cls, parent, data): - script = data.pop("script") - return cls(parent, script, data) - - -class ExecuteGeorgeScriptWithResult(BaseCommand): - name = "execute_george_through_file_result" - - def __init__(self, parent, script, tmp_file_keys, data=None): - data = data or {} data["script"] = script data["tmp_file_keys"] = tmp_file_keys + data["output_dirs"] = output_dirs self._script = script self._tmp_file_keys = tmp_file_keys + self._output_dirs = output_dirs super().__init__(parent, data) def execute(self): @@ -133,10 +125,17 @@ class ExecuteGeorgeScriptWithResult(BaseCommand): mode="w", prefix=TMP_FILE_PREFIX, suffix=".txt", delete=False ) output_file.close() - filepath_by_key[key] = output_file.name.replace("\\", "/") + format_key = "{" + key + "}" + output_path = output_file.name.replace("\\", "/") + self._script.replace(format_key, output_path) + filepath_by_key[key] = output_path - formatted_script = self._script.format(**filepath_by_key) - self.execute_george_through_file(formatted_script) + for key, dir_path in self._output_dirs.items(): + format_key = "{" + key + "}" + dir_path = dir_path.replace("\\", "/") + self._script.replace(format_key, dir_path) + + self.execute_george_through_file(self._script) result = {} for key, filepath in filepath_by_key.items(): @@ -150,8 +149,9 @@ class ExecuteGeorgeScriptWithResult(BaseCommand): @classmethod def from_existing(cls, parent, data): script = data.pop("script") - tmp_file_keys = data.pop("tmp_file_keys") - return cls(parent, script, tmp_file_keys, data) + tmp_file_keys = data.pop("tmp_file_keys", None) + output_dirs = data.pop("output_dirs", None) + return cls(parent, script, tmp_file_keys, output_dirs, data) class TVPaintCommands: From 9cfbea38eb4338a30ab01ea92681dfe7cea9d11a Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Thu, 14 Oct 2021 12:15:27 +0200 Subject: [PATCH 048/211] parent is not passed with arguments --- openpype/hosts/tvpaint/worker/worker_job.py | 28 ++++++++++++--------- 1 file changed, 16 insertions(+), 12 deletions(-) diff --git a/openpype/hosts/tvpaint/worker/worker_job.py b/openpype/hosts/tvpaint/worker/worker_job.py index 7ae382ca9b..32044108d5 100644 --- a/openpype/hosts/tvpaint/worker/worker_job.py +++ b/openpype/hosts/tvpaint/worker/worker_job.py @@ -18,7 +18,7 @@ class BaseCommand: """Command name (must be unique).""" pass - def __init__(self, parent, data): + def __init__(self, data=None): if data is None: data = {} else: @@ -30,11 +30,14 @@ class BaseCommand: data["id"] = command_id data["command"] = self.name - self._parent = parent + self._parent = None self._result = None self._command_data = data self._done = False + def set_parent(self, parent): + self._parent = parent + @property def id(self): return self._command_data["id"] @@ -69,7 +72,7 @@ class BaseCommand: @classmethod @abstractmethod - def from_existing(cls, parent, data): + def from_existing(cls, data): pass def execute_george(self, george_script): @@ -82,26 +85,26 @@ class BaseCommand: class ExecuteSimpleGeorgeScript(BaseCommand): name = "execute_george_simple" - def __init__(self, parent, script, data=None): + def __init__(self, script, data=None): data = data or {} data["script"] = script self._script = script - super().__init__(parent, data) + super().__init__(data) def execute(self): self._result = self.execute_george(self._script) @classmethod - def from_existing(cls, parent, data): + def from_existing(cls, data): script = data.pop("script") - return cls(parent, script, data) + return cls(script, data) class ExecuteGeorgeScript(BaseCommand): name = "execute_george_through_file" def __init__( - self, parent, script, tmp_file_keys=None, output_dirs=None, data=None + self, script, tmp_file_keys=None, output_dirs=None, data=None ): data = data or {} if not tmp_file_keys: @@ -116,7 +119,7 @@ class ExecuteGeorgeScript(BaseCommand): self._script = script self._tmp_file_keys = tmp_file_keys self._output_dirs = output_dirs - super().__init__(parent, data) + super().__init__(data) def execute(self): filepath_by_key = {} @@ -147,11 +150,11 @@ class ExecuteGeorgeScript(BaseCommand): self._result = result @classmethod - def from_existing(cls, parent, data): + def from_existing(cls, data): script = data.pop("script") tmp_file_keys = data.pop("tmp_file_keys", None) output_dirs = data.pop("output_dirs", None) - return cls(parent, script, tmp_file_keys, output_dirs, data) + return cls(script, tmp_file_keys, output_dirs, data) class TVPaintCommands: @@ -194,10 +197,11 @@ class TVPaintCommands: command_name = command_data["command"] klass = self.classes_by_name[command_name] - command = klass.from_existing(self, command_data) + command = klass.from_existing(command_data) self.add_command(command) def add_command(self, command): + command.set_parent(self) self._commands.append(command) def _open_workfile(self): From 06995cc2dbecb0529efa267921b83e8ec50879b2 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Thu, 14 Oct 2021 12:15:37 +0200 Subject: [PATCH 049/211] command has access to communicator --- openpype/hosts/tvpaint/worker/worker_job.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/openpype/hosts/tvpaint/worker/worker_job.py b/openpype/hosts/tvpaint/worker/worker_job.py index 32044108d5..46d5b00b2c 100644 --- a/openpype/hosts/tvpaint/worker/worker_job.py +++ b/openpype/hosts/tvpaint/worker/worker_job.py @@ -46,6 +46,10 @@ class BaseCommand: def parent(self): return self._parent + @property + def communicator(self): + return self._parent.communicator + @property def done(self): return self._done From 26245ec93c4b3fa56efe0b8d9a3ce532dcf927dd Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Thu, 14 Oct 2021 12:16:01 +0200 Subject: [PATCH 050/211] added command which collect all scene data --- openpype/hosts/tvpaint/worker/worker_job.py | 45 +++++++++++++++++++++ 1 file changed, 45 insertions(+) diff --git a/openpype/hosts/tvpaint/worker/worker_job.py b/openpype/hosts/tvpaint/worker/worker_job.py index 46d5b00b2c..c74bfa484d 100644 --- a/openpype/hosts/tvpaint/worker/worker_job.py +++ b/openpype/hosts/tvpaint/worker/worker_job.py @@ -161,6 +161,51 @@ class ExecuteGeorgeScript(BaseCommand): return cls(script, tmp_file_keys, output_dirs, data) +class CollectSceneData(BaseCommand): + name = "collect_scene_data" + + def execute(self): + from avalon.tvpaint.lib import ( + get_layers_data, + get_groups_data, + get_layers_pre_post_behavior, + get_layers_exposure_frames, + get_scene_data + ) + + groups_data = get_groups_data(communicator=self.communicator) + layers_data = get_layers_data(communicator=self.communicator) + layers_by_id = { + layer_data["layer_id"]: layer_data + for layer_data in layers_data + } + layer_ids = tuple(layers_by_id.keys()) + pre_post_beh = get_layers_pre_post_behavior( + layer_ids, communicator=self.communicator + ) + exposure_frames = get_layers_exposure_frames( + layer_ids, layers_data, communicator=self.communicator + ) + output_layers_data = [] + for layer_data in layers_data: + layer_id = layer_data["layer_id"] + layer_data["exposure_frames"] = exposure_frames[layer_id] + behaviors = pre_post_beh[layer_id] + for key, value in behaviors.items(): + layer_data[key] = value + output_layers_data.append(layer_data) + + self._result = { + "layers_data": output_layers_data, + "groups_data": groups_data, + "scene_data": get_scene_data(self.communicator) + } + + @classmethod + def from_existing(cls, data): + return cls(data) + + class TVPaintCommands: def __init__(self, workfile, commands=None, communicator=None): if not commands: From 44aee660f449a56c9c30491b8c870a24ffd4230f Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Thu, 14 Oct 2021 12:22:41 +0200 Subject: [PATCH 051/211] added done key to job status --- openpype/modules/default_modules/job_queue/job_server/jobs.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/openpype/modules/default_modules/job_queue/job_server/jobs.py b/openpype/modules/default_modules/job_queue/job_server/jobs.py index f8731cc8b7..91d484de3b 100644 --- a/openpype/modules/default_modules/job_queue/job_server/jobs.py +++ b/openpype/modules/default_modules/job_queue/job_server/jobs.py @@ -100,7 +100,8 @@ class Job: worker_id = self._worker.id output = { "id": self.id, - "worker_id": worker_id + "worker_id": worker_id, + "done": self._done } if self._message: output["message"] = self._message From cdce52a33958a71dd79646e033a5d1b5116e0ce4 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Thu, 14 Oct 2021 12:29:11 +0200 Subject: [PATCH 052/211] added dummy tvpaint file collector to webpublisher --- .../publish/collect_tvpaint_workfile_data.py | 53 +++++++++++++++++++ 1 file changed, 53 insertions(+) create mode 100644 openpype/hosts/webpublisher/plugins/publish/collect_tvpaint_workfile_data.py diff --git a/openpype/hosts/webpublisher/plugins/publish/collect_tvpaint_workfile_data.py b/openpype/hosts/webpublisher/plugins/publish/collect_tvpaint_workfile_data.py new file mode 100644 index 0000000000..3e865bb603 --- /dev/null +++ b/openpype/hosts/webpublisher/plugins/publish/collect_tvpaint_workfile_data.py @@ -0,0 +1,53 @@ +""" +Requires: + CollectPublishedFiles + CollectModules + +Provides: + Instance +""" +import pyblish.api +from openpype.hosts.tvpaint.worker import ( + TVPaintCommands, + CollectSceneData +) +from avalon.tvpaint import CommunicationWrapper + + +class CollectTVPaintWorkfileData(pyblish.api.InstancePlugin): + label = "Collect TVPaint Workfile data" + order = pyblish.api.CollectorOrder + 0.1 + hosts = ["webpublisher"] + # TODO add families filter + + def process(self, instance): + # TODO change 'tvpaint_workfile' this is just dummy access + workfile = instance.data["tvpaint_workfile"] + # Get JobQueue module + modules = instance.context.data["openPypeModules"] + job_queue_module = modules["job_queue"] + + # Prepare tvpaint command + commands = TVPaintCommands(workfile, CommunicationWrapper.communicator) + commands.append(CollectSceneData()) + + # Send job data to job queue server + job_data = commands.to_job_data() + job_id = job_queue_module.send_job("tvpaint", job_data) + # Wait for job to be finished + while True: + job_status = job_queue_module.get_job_status(job_id) + if job_status["done"]: + break + + # Check if job state is done + if job_status["state"] != "done": + message = job_status["message"] or "Unknown issue" + raise ValueError( + "Job didn't finish properly." + " Job state: \"{}\" | Job message: \"{}\"".format( + job_status["state"], + message + ) + ) + instance.data["sceneData"] = job_status["result"] From b966ba135a7464ad03db51ac4e7fdf417c80ec5a Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Thu, 14 Oct 2021 12:43:49 +0200 Subject: [PATCH 053/211] job status always contain message and result --- .../modules/default_modules/job_queue/job_server/jobs.py | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/openpype/modules/default_modules/job_queue/job_server/jobs.py b/openpype/modules/default_modules/job_queue/job_server/jobs.py index 91d484de3b..0fc3c381d4 100644 --- a/openpype/modules/default_modules/job_queue/job_server/jobs.py +++ b/openpype/modules/default_modules/job_queue/job_server/jobs.py @@ -103,8 +103,7 @@ class Job: "worker_id": worker_id, "done": self._done } - if self._message: - output["message"] = self._message + output["message"] = self._message or None state = "waiting" if self._deleted: @@ -116,8 +115,7 @@ class Job: elif self._started: state = "started" - if self.done: - output["result"] = self._result_data + output["result"] = self._result_data output["state"] = state From 04847820108d057b33a30526bec80749a7a82e6f Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Thu, 14 Oct 2021 12:50:59 +0200 Subject: [PATCH 054/211] added logs to collector --- .../publish/collect_tvpaint_workfile_data.py | 13 ++++++++++++- 1 file changed, 12 insertions(+), 1 deletion(-) diff --git a/openpype/hosts/webpublisher/plugins/publish/collect_tvpaint_workfile_data.py b/openpype/hosts/webpublisher/plugins/publish/collect_tvpaint_workfile_data.py index 3e865bb603..699d4e5f47 100644 --- a/openpype/hosts/webpublisher/plugins/publish/collect_tvpaint_workfile_data.py +++ b/openpype/hosts/webpublisher/plugins/publish/collect_tvpaint_workfile_data.py @@ -6,6 +6,7 @@ Requires: Provides: Instance """ +import json import pyblish.api from openpype.hosts.tvpaint.worker import ( TVPaintCommands, @@ -33,7 +34,14 @@ class CollectTVPaintWorkfileData(pyblish.api.InstancePlugin): # Send job data to job queue server job_data = commands.to_job_data() + self.debug("Sending job to JobQueue server.\n{}".format( + json.dumps(job_data, indent=4) + )) job_id = job_queue_module.send_job("tvpaint", job_data) + self.log.info(( + "Job sent to JobQueue server and got id \"{}\"." + " Waiting for finishing the job." + ).format(job_id)) # Wait for job to be finished while True: job_status = job_queue_module.get_job_status(job_id) @@ -50,4 +58,7 @@ class CollectTVPaintWorkfileData(pyblish.api.InstancePlugin): message ) ) - instance.data["sceneData"] = job_status["result"] + job_result = job_status["result"] + + self.log.debug("Job is done with result.\n{}".format(job_result)) + instance.data["sceneData"] = job_result From 0a0aab8b12b6885615f103199f87850f866c4440 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Thu, 14 Oct 2021 12:57:58 +0200 Subject: [PATCH 055/211] fix imports --- openpype/hosts/tvpaint/worker/__init__.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/hosts/tvpaint/worker/__init__.py b/openpype/hosts/tvpaint/worker/__init__.py index 4dca6754ab..3d8d5de24e 100644 --- a/openpype/hosts/tvpaint/worker/__init__.py +++ b/openpype/hosts/tvpaint/worker/__init__.py @@ -1,7 +1,7 @@ from .worker_job import ( ExecuteSimpleGeorgeScript, ExecuteGeorgeScript, - ExecuteGeorgeScriptWithResult, + CollectSceneData, TVPaintCommands ) @@ -9,6 +9,6 @@ from .worker_job import ( __all__ = ( "ExecuteSimpleGeorgeScript", "ExecuteGeorgeScript", - "ExecuteGeorgeScriptWithResult", + "CollectSceneData", "TVPaintCommands" ) From c52959adc3b8136eca8c3f905ae7c9148642b147 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Thu, 14 Oct 2021 13:05:59 +0200 Subject: [PATCH 056/211] fix send job method --- openpype/modules/default_modules/job_queue/module.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/modules/default_modules/job_queue/module.py b/openpype/modules/default_modules/job_queue/module.py index 39daa961b1..cdcfafdce1 100644 --- a/openpype/modules/default_modules/job_queue/module.py +++ b/openpype/modules/default_modules/job_queue/module.py @@ -1,5 +1,5 @@ import sys - +import json if sys.version_info[0] == 2: from urlparse import urlsplit, urlunsplit else: @@ -58,7 +58,7 @@ class JobQueueModule(OpenPypeModule): job_data = job_data or {} job_data["host_name"] = host_name api_path = "{}/api/jobs".format(self._server_url) - post_request = requests.post(api_path, data=job_data) + post_request = requests.post(api_path, data=json.dumps(job_data)) return str(post_request.content.decode()) def get_job_status(self, job_id): From bb02a57acd3b83e5bebbea9347b72b143f8c0338 Mon Sep 17 00:00:00 2001 From: davidlatwe Date: Mon, 1 Nov 2021 02:45:58 +0800 Subject: [PATCH 057/211] collect and integrate workfile centric dependency links --- .../publish/collect_scene_loaded_versions.py | 61 +++++++++++++ .../plugins/publish/integrate_inputlinks.py | 90 +++++++++++++++++++ 2 files changed, 151 insertions(+) create mode 100644 openpype/plugins/publish/collect_scene_loaded_versions.py create mode 100644 openpype/plugins/publish/integrate_inputlinks.py diff --git a/openpype/plugins/publish/collect_scene_loaded_versions.py b/openpype/plugins/publish/collect_scene_loaded_versions.py new file mode 100644 index 0000000000..199a467dec --- /dev/null +++ b/openpype/plugins/publish/collect_scene_loaded_versions.py @@ -0,0 +1,61 @@ + +import pyblish.api + + +class CollectSceneLoadedVersions(pyblish.api.ContextPlugin): + + order = pyblish.api.CollectorOrder + 0.0001 + label = "Collect Versions Loaded in Scene" + hosts = [ + "aftereffects", + "blender", + "celaction", + "fusion", + "harmony", + "hiero", + "houdini", + "maya", + "nuke", + "photoshop", + "resolve", + "tvpaint" + ] + + def process(self, context): + from avalon import api, io + + current_file = context.data.get("currentFile") + if not current_file: + self.log.warn("No work file collected.") + return + + host = api.registered_host() + if host is None: + self.log.warn("No registered host.") + return + + if not hasattr(host, "ls"): + host_name = host.__name__ + self.log.warn("Host %r doesn't have ls() implemented." % host_name) + return + + loaded_versions = [] + _containers = list(host.ls()) + _repr_ids = [io.ObjectId(c["representation"]) for c in _containers] + version_by_repr = { + str(doc["_id"]): doc["parent"] for doc in + io.find({"_id": {"$in": _repr_ids}}, projection={"parent": 1}) + } + + for con in _containers: + # NOTE: + # may have more then one representation that are same version + version = { + "objectName": con["objectName"], # container node name + "subsetName": con["name"], + "representation": io.ObjectId(con["representation"]), + "version": version_by_repr[con["representation"]], # _id + } + loaded_versions.append(version) + + context.data["loadedVersions"] = loaded_versions diff --git a/openpype/plugins/publish/integrate_inputlinks.py b/openpype/plugins/publish/integrate_inputlinks.py new file mode 100644 index 0000000000..fbd8738927 --- /dev/null +++ b/openpype/plugins/publish/integrate_inputlinks.py @@ -0,0 +1,90 @@ + +import pyblish.api + + +class IntegrateInputLinks(pyblish.api.ContextPlugin): + """Connecting version level dependency links""" + + order = pyblish.api.IntegratorOrder + 0.2 + label = "Connect Dependency InputLinks" + + def process(self, context): + workfile = None + publishing = [] + + for instance in context: + version_doc = instance.data.get("versionEntity") + if not version_doc: + self.log.debug("Instance %s doesn't have version." % instance) + continue + + version_data = version_doc.get("data", {}) + families = version_data.get("families", []) + self.log.debug(families) + + if "workfile" in families: + workfile = instance + else: + publishing.append(instance) + + if workfile is None: + self.log.warn("No workfile in this publish session.") + else: + workfile_version_doc = workfile.data["versionEntity"] + # link all loaded versions in scene into workfile + for version in context.data.get("loadedVersions", []): + self.add_link( + link_type="reference", + input_id=version["version"], + version_doc=workfile_version_doc, + ) + # link workfile to all publishing versions + for instance in publishing: + self.add_link( + link_type="generative", + input_id=workfile_version_doc["_id"], + version_doc=instance.data["versionEntity"], + ) + + # link versions as dependencies to the instance + for instance in publishing: + for input_version in instance.data.get("inputVersions") or []: + self.add_link( + link_type="generative", + input_id=input_version, + version_doc=instance.data["versionEntity"], + ) + + self.write_links_to_database(context) + + def add_link(self, link_type, input_id, version_doc): + from collections import OrderedDict + from avalon import io + # NOTE: + # using OrderedDict() here is just for ensuring field order between + # python versions, if we ever need to use mongodb operation '$addToSet' + # to update and avoid duplicating elements in 'inputLinks' array in the + # future. + link = OrderedDict() + link["type"] = link_type + link["input"] = io.ObjectId(input_id) + link["linkedBy"] = "publish" + + if "inputLinks" not in version_doc["data"]: + version_doc["data"]["inputLinks"] = [] + version_doc["data"]["inputLinks"].append(link) + + def write_links_to_database(self, context): + from avalon import io + + for instance in context: + version_doc = instance.data.get("versionEntity") + if version_doc is None: + continue + + input_links = version_doc["data"].get("inputLinks") + if input_links is None: + continue + + io.update_one({"_id": version_doc["_id"]}, + {"$set": {"data.inputLinks": input_links}}) From 323472f002fad535dfac9c3b78be34187233259d Mon Sep 17 00:00:00 2001 From: davidlatwe Date: Mon, 1 Nov 2021 02:48:01 +0800 Subject: [PATCH 058/211] improve plugin name to differ from the new 'Versions Loaded in Scene' plugin --- openpype/plugins/publish/collect_scene_version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/plugins/publish/collect_scene_version.py b/openpype/plugins/publish/collect_scene_version.py index ca12f2900c..8ed6e25e66 100644 --- a/openpype/plugins/publish/collect_scene_version.py +++ b/openpype/plugins/publish/collect_scene_version.py @@ -10,7 +10,7 @@ class CollectSceneVersion(pyblish.api.ContextPlugin): """ order = pyblish.api.CollectorOrder - label = 'Collect Version' + label = 'Collect Scene Version' hosts = [ "aftereffects", "blender", From cbe8a83067aff29dfc577c766bec4d64d38d953c Mon Sep 17 00:00:00 2001 From: davidlatwe Date: Mon, 1 Nov 2021 03:16:59 +0800 Subject: [PATCH 059/211] remove unused debug message --- openpype/plugins/publish/integrate_inputlinks.py | 1 - 1 file changed, 1 deletion(-) diff --git a/openpype/plugins/publish/integrate_inputlinks.py b/openpype/plugins/publish/integrate_inputlinks.py index fbd8738927..b012427aec 100644 --- a/openpype/plugins/publish/integrate_inputlinks.py +++ b/openpype/plugins/publish/integrate_inputlinks.py @@ -20,7 +20,6 @@ class IntegrateInputLinks(pyblish.api.ContextPlugin): version_data = version_doc.get("data", {}) families = version_data.get("families", []) - self.log.debug(families) if "workfile" in families: workfile = instance From 74f3f80bb6dfd3bf5dbb8be2fa1e96f1f3152577 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Thu, 4 Nov 2021 14:47:34 +0100 Subject: [PATCH 060/211] initial commit of lib for tvpaint which contains render logic --- openpype/hosts/tvpaint/lib.py | 410 ++++++++++++++++++++++++++++++++++ 1 file changed, 410 insertions(+) create mode 100644 openpype/hosts/tvpaint/lib.py diff --git a/openpype/hosts/tvpaint/lib.py b/openpype/hosts/tvpaint/lib.py new file mode 100644 index 0000000000..507374442f --- /dev/null +++ b/openpype/hosts/tvpaint/lib.py @@ -0,0 +1,410 @@ +def backwards_id_conversion(data_by_layer_id): + """Convert layer ids to strings from integers.""" + for key in tuple(data_by_layer_id.keys()): + if not isinstance(str): + data_by_layer_id[str(key)] = data_by_layer_id.pop(key) + + +def get_base_filename_template(frame_end, ext=None): + """Get filetemplate for rendered files. + + This is simple template contains `{frame}{ext}` for sequential outputs + and `single_file{ext}` for single file output. Output is rendered to + temporary folder so filename should not matter as integrator change + them. + """ + frame_padding = 4 + frame_end_str_len = len(str(frame_end)) + if frame_end_str_len > frame_padding: + frame_padding = frame_end_str_len + + if ext is None: + ext = ".png" + return "{{frame:0>{}}}{}".format(frame_padding, ext) + + +def get_layer_filename_template(base_template): + return "pos_{pos}." + base_template + + +def _calculate_pre_behavior_copy( + range_start, exposure_frames, pre_beh, + layer_frame_start, layer_frame_end, + output_idx_by_frame_idx +): + """Calculate frames before first exposure frame based on pre behavior. + + Function may skip whole processing if first exposure frame is before + layer's first frame. In that case pre behavior does not make sense. + + Args: + range_start(int): First frame of range which should be rendered. + exposure_frames(list): List of all exposure frames on layer. + pre_beh(str): Pre behavior of layer (enum of 4 strings). + layer_frame_start(int): First frame of layer. + layer_frame_end(int): Last frame of layer. + output_idx_by_frame_idx(dict): References to already prepared frames + and where result will be stored. + """ + # Check if last layer frame is after range end + if layer_frame_start < range_start: + return + + first_exposure_frame = min(exposure_frames) + # Skip if last exposure frame is after range end + if first_exposure_frame < range_start: + return + + # Calculate frame count of layer + frame_count = layer_frame_end - layer_frame_start + 1 + + if pre_beh == "none": + # Just fill all frames from last exposure frame to range end with None + for frame_idx in range(range_start, layer_frame_start): + output_idx_by_frame_idx[frame_idx] = None + + elif pre_beh == "hold": + # Keep first frame for whole time + for frame_idx in range(range_start, layer_frame_start): + output_idx_by_frame_idx[frame_idx] = first_exposure_frame + + elif pre_beh in ("loop", "repeat"): + # Loop backwards from last frame of layer + for frame_idx in reversed(range(range_start, layer_frame_start)): + eq_frame_idx_offset = ( + (layer_frame_end - frame_idx) % frame_count + ) + eq_frame_idx = layer_frame_end - eq_frame_idx_offset + output_idx_by_frame_idx[frame_idx] = eq_frame_idx + + elif pre_beh == "pingpong": + half_seq_len = frame_count - 1 + seq_len = half_seq_len * 2 + for frame_idx in reversed(range(range_start, layer_frame_start)): + eq_frame_idx_offset = (layer_frame_start - frame_idx) % seq_len + if eq_frame_idx_offset > half_seq_len: + eq_frame_idx_offset = (seq_len - eq_frame_idx_offset) + eq_frame_idx = layer_frame_start + eq_frame_idx_offset + output_idx_by_frame_idx[frame_idx] = eq_frame_idx + + +def _calculate_post_behavior_copy( + range_end, exposure_frames, post_beh, + layer_frame_start, layer_frame_end, + output_idx_by_frame_idx +): + """Calculate frames after last frame of layer based on post behavior. + + Function may skip whole processing if last layer frame is after range_end. + In that case post behavior does not make sense. + + Args: + range_end(int): Last frame of range which should be rendered. + exposure_frames(list): List of all exposure frames on layer. + post_beh(str): Post behavior of layer (enum of 4 strings). + layer_frame_start(int): First frame of layer. + layer_frame_end(int): Last frame of layer. + output_idx_by_frame_idx(dict): References to already prepared frames + and where result will be stored. + """ + # Check if last layer frame is after range end + if layer_frame_end >= range_end: + return + + last_exposure_frame = max(exposure_frames) + # Skip if last exposure frame is after range end + # - this is probably irrelevant with layer frame end check? + if last_exposure_frame >= range_end: + return + + # Calculate frame count of layer + frame_count = layer_frame_end - layer_frame_start + 1 + + if post_beh == "none": + # Just fill all frames from last exposure frame to range end with None + for frame_idx in range(layer_frame_end + 1, range_end + 1): + output_idx_by_frame_idx[frame_idx] = None + + elif post_beh == "hold": + # Keep last exposure frame to the end + for frame_idx in range(layer_frame_end + 1, range_end + 1): + output_idx_by_frame_idx[frame_idx] = last_exposure_frame + + elif post_beh in ("loop", "repeat"): + # Loop backwards from last frame of layer + for frame_idx in range(layer_frame_end + 1, range_end + 1): + eq_frame_idx = frame_idx % frame_count + output_idx_by_frame_idx[frame_idx] = eq_frame_idx + + elif post_beh == "pingpong": + half_seq_len = frame_count - 1 + seq_len = half_seq_len * 2 + for frame_idx in range(layer_frame_end + 1, range_end + 1): + eq_frame_idx_offset = (frame_idx - layer_frame_end) % seq_len + if eq_frame_idx_offset > half_seq_len: + eq_frame_idx_offset = seq_len - eq_frame_idx_offset + eq_frame_idx = layer_frame_end - eq_frame_idx_offset + output_idx_by_frame_idx[frame_idx] = eq_frame_idx + + +def _calculate_in_range_frames( + range_start, range_end, + exposure_frames, layer_frame_end, + output_idx_by_frame_idx +): + """Calculate frame references in defined range. + + Function may skip whole processing if last layer frame is after range_end. + In that case post behavior does not make sense. + + Args: + range_start(int): First frame of range which should be rendered. + range_end(int): Last frame of range which should be rendered. + exposure_frames(list): List of all exposure frames on layer. + layer_frame_end(int): Last frame of layer. + output_idx_by_frame_idx(dict): References to already prepared frames + and where result will be stored. + """ + # Calculate in range frames + in_range_frames = [] + for frame_idx in exposure_frames: + if range_start <= frame_idx <= range_end: + output_idx_by_frame_idx[frame_idx] = frame_idx + in_range_frames.append(frame_idx) + + if in_range_frames: + first_in_range_frame = min(in_range_frames) + # Calculate frames from first exposure frames to range end or last + # frame of layer (post behavior should be calculated since that time) + previous_exposure = first_in_range_frame + for frame_idx in range(first_in_range_frame, range_end + 1): + if frame_idx > layer_frame_end: + break + + if frame_idx in exposure_frames: + previous_exposure = frame_idx + else: + output_idx_by_frame_idx[frame_idx] = previous_exposure + + # There can be frames before first exposure frame in range + # First check if we don't alreade have first range frame filled + if range_start in output_idx_by_frame_idx: + return + + first_exposure_frame = max(exposure_frames) + last_exposure_frame = max(exposure_frames) + # Check if is first exposure frame smaller than defined range + # if not then skip + if first_exposure_frame >= range_start: + return + + # Check is if last exposure frame is also before range start + # in that case we can't use fill frames before out range + if last_exposure_frame < range_start: + return + + closest_exposure_frame = first_exposure_frame + for frame_idx in exposure_frames: + if frame_idx >= range_start: + break + if frame_idx > closest_exposure_frame: + closest_exposure_frame = frame_idx + + output_idx_by_frame_idx[closest_exposure_frame] = closest_exposure_frame + for frame_idx in range(range_start, range_end + 1): + if frame_idx in output_idx_by_frame_idx: + break + output_idx_by_frame_idx[frame_idx] = closest_exposure_frame + + +def _cleanup_frame_references(output_idx_by_frame_idx): + """Cleanup frame references to frame reference. + + Cleanup not direct references to rendered frame. + ``` + // Example input + { + 1: 1, + 2: 1, + 3: 2 + } + // Result + { + 1: 1, + 2: 1, + 3: 1 // Changed reference to final rendered frame + } + ``` + Result is dictionary where keys leads to frame that should be rendered. + """ + for frame_idx in tuple(output_idx_by_frame_idx.keys()): + reference_idx = output_idx_by_frame_idx[frame_idx] + if reference_idx == frame_idx: + continue + + real_reference_idx = reference_idx + _tmp_reference_idx = reference_idx + while True: + _temp = output_idx_by_frame_idx[_tmp_reference_idx] + if _temp == _tmp_reference_idx: + real_reference_idx = _tmp_reference_idx + break + _tmp_reference_idx = _temp + + if real_reference_idx != reference_idx: + output_idx_by_frame_idx[frame_idx] = real_reference_idx + + +def calculate_layer_frame_references( + range_start, range_end, + layer_frame_start, + layer_frame_end, + exposure_frames, + pre_beh, post_beh +): + """Calculate frame references for one layer based on it's data. + + Output is dictionary where key is frame index referencing to rendered frame + index. If frame index should be rendered then is referencing to self. + + ``` + // Example output + { + 1: 1, // Reference to self - will be rendered + 2: 1, // Reference to frame 1 - will be copied + 3: 1, // Reference to frame 1 - will be copied + 4: 4, // Reference to self - will be rendered + ... + 20: 4 // Reference to frame 4 - will be copied + 21: None // Has reference to None - transparent image + } + ``` + + Args: + range_start(int): First frame of range which should be rendered. + range_end(int): Last frame of range which should be rendered. + layer_frame_start(int)L First frame of layer. + layer_frame_end(int): Last frame of layer. + exposure_frames(list): List of all exposure frames on layer. + pre_beh(str): Pre behavior of layer (enum of 4 strings). + post_beh(str): Post behavior of layer (enum of 4 strings). + """ + # Output variable + output_idx_by_frame_idx = {} + # Skip if layer does not have any exposure frames + if not exposure_frames: + return output_idx_by_frame_idx + + # First calculate in range frames + _calculate_in_range_frames( + range_start, range_end, + exposure_frames, layer_frame_end, + output_idx_by_frame_idx + ) + # Calculate frames by pre behavior of layer + _calculate_pre_behavior_copy( + range_start, exposure_frames, pre_beh, + layer_frame_start, layer_frame_end, + output_idx_by_frame_idx + ) + # Calculate frames by post behavior of layer + _calculate_post_behavior_copy( + range_end, exposure_frames, post_beh, + layer_frame_start, layer_frame_end, + output_idx_by_frame_idx + ) + # Cleanup of referenced frames + _cleanup_frame_references(output_idx_by_frame_idx) + + return output_idx_by_frame_idx + + +def calculate_layers_extraction_data( + layers_data, + exposure_frames_by_id, + behavior_by_layer_id, + range_start, + range_end, + skip_not_visible=True +): + """Calculate extraction data for passed layers data. + + Args: + layers_data(list): Layers data loaded from TVPaint. + exposure_frames_by_id(dict): Exposure frames of layers stored by + layer id. + behavior_by_layer_id(dict): Pre and Post behavior of layers stored by + layer id. + range_start(int): First frame of rendered range. + range_end(int): Last frame of rendered range. + skip_not_visible(bool): Skip calculations for hidden layers (Skipped + by default). + + Returns: + dict: Prepared data for rendering by layer position. + """ + # Make sure layer ids are strings + # backwards compatibility when layer ids were integers + backwards_id_conversion(exposure_frames_by_id) + backwards_id_conversion(behavior_by_layer_id) + + base_template = get_base_filename_template(range_end) + layer_template = get_layer_filename_template(base_template) + output = {} + for layer_data in layers_data: + if skip_not_visible and not layer_data["visible"]: + continue + + layer_id = str(layer_data["layer_id"]) + + # Skip if does not have any exposure frames (empty layer) + exposure_frames = exposure_frames_by_id[layer_id] + if not exposure_frames: + continue + + layer_position = layer_data["position"] + layer_frame_start = layer_data["frame_start"] + layer_frame_end = layer_data["frame_end"] + + layer_behavior = behavior_by_layer_id[layer_id] + + pre_behavior = layer_behavior["pre"] + post_behavior = layer_behavior["post"] + + frame_references = calculate_layer_frame_references( + range_start, range_end, + layer_frame_start, + layer_frame_end, + exposure_frames, + pre_behavior, post_behavior + ) + # All values in 'frame_references' reference to a frame that must be + # rendered out + frames_to_render = set(frame_references.values()) + # Remove 'None' reference (transparent image) + if None in frames_to_render: + frames_to_render.remove(None) + + # Skip layer if has nothing to render + if not frames_to_render: + continue + + # All filenames that should be as output (not final output) + filename_frames = ( + set(range(range_start, range_end + 1)) + | frames_to_render + ) + filenames_by_frame_index = {} + for frame_idx in filename_frames: + filenames_by_frame_index[frame_idx] = layer_template.format( + pos=layer_position, + frame=frame_idx + ) + + # Store objects under the layer id + # so if layer is skipped at any part they will be there + output[layer_position] = { + "frame_references": frame_references, + "filenames_by_frame_index": filenames_by_frame_index + } + return output From f7bdcb1f8d1e1882be9efa3fd2275288758bc24b Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Thu, 4 Nov 2021 16:20:42 +0100 Subject: [PATCH 061/211] store result by layer id --- openpype/hosts/tvpaint/lib.py | 22 +++++++++++++++++++--- 1 file changed, 19 insertions(+), 3 deletions(-) diff --git a/openpype/hosts/tvpaint/lib.py b/openpype/hosts/tvpaint/lib.py index 507374442f..e6fc382a3d 100644 --- a/openpype/hosts/tvpaint/lib.py +++ b/openpype/hosts/tvpaint/lib.py @@ -329,6 +329,22 @@ def calculate_layers_extraction_data( ): """Calculate extraction data for passed layers data. + ``` + { + : { + "frame_references": {...}, + "filenames_by_frame_index": {...} + }, + ... + } + ``` + + Frame references contains frame index reference to rendered frame index. + + Filename by frame index represents filename under which should be frame + stored. Directory is not handled here because each usage may need different + approach. + Args: layers_data(list): Layers data loaded from TVPaint. exposure_frames_by_id(dict): Exposure frames of layers stored by @@ -355,7 +371,8 @@ def calculate_layers_extraction_data( if skip_not_visible and not layer_data["visible"]: continue - layer_id = str(layer_data["layer_id"]) + orig_layer_id = layer_data["layer_id"] + layer_id = str(orig_layer_id) # Skip if does not have any exposure frames (empty layer) exposure_frames = exposure_frames_by_id[layer_id] @@ -402,8 +419,7 @@ def calculate_layers_extraction_data( ) # Store objects under the layer id - # so if layer is skipped at any part they will be there - output[layer_position] = { + output[orig_layer_id] = { "frame_references": frame_references, "filenames_by_frame_index": filenames_by_frame_index } From bfa0bb53bbfbb8273ae0e4bab53482f16b18202b Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Thu, 4 Nov 2021 19:05:11 +0100 Subject: [PATCH 062/211] renamed get_base_filename_template to get_frame_filename_template --- openpype/hosts/tvpaint/lib.py | 16 ++++++++++------ 1 file changed, 10 insertions(+), 6 deletions(-) diff --git a/openpype/hosts/tvpaint/lib.py b/openpype/hosts/tvpaint/lib.py index e6fc382a3d..33ff3c7b77 100644 --- a/openpype/hosts/tvpaint/lib.py +++ b/openpype/hosts/tvpaint/lib.py @@ -5,8 +5,8 @@ def backwards_id_conversion(data_by_layer_id): data_by_layer_id[str(key)] = data_by_layer_id.pop(key) -def get_base_filename_template(frame_end, ext=None): - """Get filetemplate for rendered files. +def get_frame_filename_template(frame_end, filename_prefix=None, ext=None): + """Get file template with frame key for rendered files. This is simple template contains `{frame}{ext}` for sequential outputs and `single_file{ext}` for single file output. Output is rendered to @@ -321,11 +321,13 @@ def calculate_layer_frame_references( def calculate_layers_extraction_data( layers_data, - exposure_frames_by_id, + exposure_frames_by_layer_id, behavior_by_layer_id, range_start, range_end, - skip_not_visible=True + skip_not_visible=True, + filename_prefix=None, + ext=None ): """Calculate extraction data for passed layers data. @@ -364,8 +366,10 @@ def calculate_layers_extraction_data( backwards_id_conversion(exposure_frames_by_id) backwards_id_conversion(behavior_by_layer_id) - base_template = get_base_filename_template(range_end) - layer_template = get_layer_filename_template(base_template) + frame_template = get_frame_filename_template( + range_end, filename_prefix, ext + ) + layer_template = get_layer_filename_template(frame_template) output = {} for layer_data in layers_data: if skip_not_visible and not layer_data["visible"]: From d1a3361d837438a507b9159c8ecdba87a9c27cf5 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Thu, 4 Nov 2021 19:10:28 +0100 Subject: [PATCH 063/211] changed how position template is created --- openpype/hosts/tvpaint/lib.py | 16 +++++++++------- 1 file changed, 9 insertions(+), 7 deletions(-) diff --git a/openpype/hosts/tvpaint/lib.py b/openpype/hosts/tvpaint/lib.py index 33ff3c7b77..f6ce38462f 100644 --- a/openpype/hosts/tvpaint/lib.py +++ b/openpype/hosts/tvpaint/lib.py @@ -18,13 +18,16 @@ def get_frame_filename_template(frame_end, filename_prefix=None, ext=None): if frame_end_str_len > frame_padding: frame_padding = frame_end_str_len - if ext is None: - ext = ".png" - return "{{frame:0>{}}}{}".format(frame_padding, ext) + ext = ext or ".png" + filename_prefix = filename_prefix or "" + + return "{}{{frame:0>{}}}{}".format(filename_prefix, frame_padding, ext) -def get_layer_filename_template(base_template): - return "pos_{pos}." + base_template +def get_layer_pos_filename_template(range_end, filename_prefix=None, ext=None): + filename_prefix = filename_prefix or "" + new_filename_prefix = filename_prefix + "pos_{pos}." + return get_frame_filename_template(range_end, new_filename_prefix, ext) def _calculate_pre_behavior_copy( @@ -366,10 +369,9 @@ def calculate_layers_extraction_data( backwards_id_conversion(exposure_frames_by_id) backwards_id_conversion(behavior_by_layer_id) - frame_template = get_frame_filename_template( + layer_template = get_layer_pos_filename_template( range_end, filename_prefix, ext ) - layer_template = get_layer_filename_template(frame_template) output = {} for layer_data in layers_data: if skip_not_visible and not layer_data["visible"]: From 1b925901c035d64a516afffd8e5edb0a940e993e Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Thu, 4 Nov 2021 19:11:06 +0100 Subject: [PATCH 064/211] added function to create transparent copy of source image --- openpype/hosts/tvpaint/lib.py | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/openpype/hosts/tvpaint/lib.py b/openpype/hosts/tvpaint/lib.py index f6ce38462f..af01994934 100644 --- a/openpype/hosts/tvpaint/lib.py +++ b/openpype/hosts/tvpaint/lib.py @@ -1,3 +1,6 @@ +from PIL import Image, ImageDraw + + def backwards_id_conversion(data_by_layer_id): """Convert layer ids to strings from integers.""" for key in tuple(data_by_layer_id.keys()): @@ -430,3 +433,11 @@ def calculate_layers_extraction_data( "filenames_by_frame_index": filenames_by_frame_index } return output + + +def create_transparent_image_from_source(src_filepath, dst_filepath): + """Create transparent image of same type and size as source image.""" + img_obj = Image.open(src_filepath) + painter = ImageDraw.Draw(img_obj) + painter.rectangle((0, 0, *img_obj.size), fill=(0, 0, 0, 0)) + img_obj.save(dst_filepath) From 2dddd974bea3a558412f6b2b797d3ec5f0af7a25 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Thu, 4 Nov 2021 19:11:31 +0100 Subject: [PATCH 065/211] added function to fill referenced frames --- openpype/hosts/tvpaint/lib.py | 20 ++++++++++++++++++++ 1 file changed, 20 insertions(+) diff --git a/openpype/hosts/tvpaint/lib.py b/openpype/hosts/tvpaint/lib.py index af01994934..1c9c303cc5 100644 --- a/openpype/hosts/tvpaint/lib.py +++ b/openpype/hosts/tvpaint/lib.py @@ -1,3 +1,5 @@ +import os +import shutil from PIL import Image, ImageDraw @@ -441,3 +443,21 @@ def create_transparent_image_from_source(src_filepath, dst_filepath): painter = ImageDraw.Draw(img_obj) painter.rectangle((0, 0, *img_obj.size), fill=(0, 0, 0, 0)) img_obj.save(dst_filepath) + + +def fill_reference_frames(frame_references, filepaths_by_frame): + # Store path to first transparent image if there is any + for frame_idx, ref_idx in frame_references.items(): + # Frame referencing to self should be rendered and used as source + # and reference indexes with None can't be filled + if ref_idx is None or frame_idx == ref_idx: + continue + + # Get destination filepath + src_filepath = filepaths_by_frame[ref_idx] + dst_filepath = filepaths_by_frame[ref_idx] + + if hasattr(os, "link"): + os.link(src_filepath, dst_filepath) + else: + shutil.copy(src_filepath, dst_filepath) From 2879c62b07f2db5f7267fd704e13cd222672ca8d Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Thu, 4 Nov 2021 19:11:54 +0100 Subject: [PATCH 066/211] added helper function for copying files --- openpype/hosts/tvpaint/lib.py | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/openpype/hosts/tvpaint/lib.py b/openpype/hosts/tvpaint/lib.py index 1c9c303cc5..20f19e010c 100644 --- a/openpype/hosts/tvpaint/lib.py +++ b/openpype/hosts/tvpaint/lib.py @@ -461,3 +461,11 @@ def fill_reference_frames(frame_references, filepaths_by_frame): os.link(src_filepath, dst_filepath) else: shutil.copy(src_filepath, dst_filepath) + + +def copy_render_file(src_path, dst_path): + """Create copy file of an image.""" + if hasattr(os, "link"): + os.link(src_path, dst_path) + else: + shutil.copy(src_path, dst_path) From 6093f2d26a0a2612b99dbccac75abd04e778b447 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Thu, 4 Nov 2021 19:12:29 +0100 Subject: [PATCH 067/211] created copy of composite_images from tvpaint api --- openpype/hosts/tvpaint/lib.py | 19 +++++++++++++++++++ 1 file changed, 19 insertions(+) diff --git a/openpype/hosts/tvpaint/lib.py b/openpype/hosts/tvpaint/lib.py index 20f19e010c..b2e27325b2 100644 --- a/openpype/hosts/tvpaint/lib.py +++ b/openpype/hosts/tvpaint/lib.py @@ -469,3 +469,22 @@ def copy_render_file(src_path, dst_path): os.link(src_path, dst_path) else: shutil.copy(src_path, dst_path) + + +def composite_images(input_image_paths, output_filepath): + """Composite images in order from passed list. + + Raises: + ValueError: When entered list is empty. + """ + if not input_image_paths: + raise ValueError("Nothing to composite.") + + img_obj = None + for image_filepath in input_image_paths: + _img_obj = Image.open(image_filepath) + if img_obj is None: + img_obj = _img_obj + else: + img_obj.alpha_composite(_img_obj) + img_obj.save(output_filepath) From bfbbbbb22ffa51e0e3990de91b0014c649afc5a9 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Thu, 4 Nov 2021 19:12:52 +0100 Subject: [PATCH 068/211] added function to remove all rendered frames --- openpype/hosts/tvpaint/lib.py | 13 +++++++++++++ 1 file changed, 13 insertions(+) diff --git a/openpype/hosts/tvpaint/lib.py b/openpype/hosts/tvpaint/lib.py index b2e27325b2..e9ddb8ec84 100644 --- a/openpype/hosts/tvpaint/lib.py +++ b/openpype/hosts/tvpaint/lib.py @@ -471,6 +471,19 @@ def copy_render_file(src_path, dst_path): shutil.copy(src_path, dst_path) +def cleanup_rendered_layers(filepaths_by_layer_id): + """Delete all files for each individual layer files after compositing.""" + # Collect all filepaths from data + all_filepaths = [] + for filepaths_by_frame in filepaths_by_layer_id.values(): + all_filepaths.extend(filepaths_by_frame.values()) + + # Loop over loop + for filepath in set(all_filepaths): + if filepath is not None and os.path.exists(filepath): + os.remove(filepath) + + def composite_images(input_image_paths, output_filepath): """Composite images in order from passed list. From 2fa39d1f679a980b8815a1cb65db85401ab7add9 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Thu, 4 Nov 2021 19:13:05 +0100 Subject: [PATCH 069/211] implemented compositing function --- openpype/hosts/tvpaint/lib.py | 83 +++++++++++++++++++++++++++++++++++ 1 file changed, 83 insertions(+) diff --git a/openpype/hosts/tvpaint/lib.py b/openpype/hosts/tvpaint/lib.py index e9ddb8ec84..67e7f67980 100644 --- a/openpype/hosts/tvpaint/lib.py +++ b/openpype/hosts/tvpaint/lib.py @@ -484,6 +484,89 @@ def cleanup_rendered_layers(filepaths_by_layer_id): os.remove(filepath) +def composite_rendered_layers( + layers_data, filepaths_by_layer_id, + range_start, range_end, + dst_filepaths_by_frame, cleanup=True +): + """Composite multiple rendered layers by their position. + + Result is single frame sequence with transparency matching content + created in TVPaint. Missing source filepaths are replaced with transparent + images but at least one image must be rendered and exist. + + Function can be used even if single layer was created to fill transparent + filepaths. + + Args: + layers_data(list): Layers data loaded from TVPaint. + filepaths_by_layer_id(dict): Rendered filepaths stored by frame index + per layer id. Used as source for compositing. + range_start(int): First frame of rendered range. + range_end(int): Last frame of rendered range. + dst_filepaths_by_frame(dict): Output filepaths by frame where final + image after compositing will be stored. Path must not clash with + source filepaths. + cleanup(bool): Remove all source filepaths when done with compositing. + """ + # Prepare layers by their position + # - position tells in which order will compositing happen + layer_ids_by_position = {} + for layer in layers_data: + layer_position = layer["position"] + layer_ids_by_position[layer_position] = layer["layer_id"] + + # Sort layer positions + sorted_positions = tuple(sorted(layer_ids_by_position.keys())) + # Prepare variable where filepaths without any rendered content + # - transparent will be created + transparent_filepaths = set() + # Store first final filepath + first_dst_filepath = None + for frame_idx in range(range_start, range_end + 1): + dst_filepath = dst_filepaths_by_frame[frame_idx] + src_filepaths = [] + for layer_position in sorted_positions: + layer_id = layer_ids_by_position[layer_position] + filepaths_by_frame = filepaths_by_layer_id[layer_id] + src_filepath = filepaths_by_frame.get(frame_idx) + if src_filepath is not None: + src_filepaths.append(src_filepath) + + if not src_filepaths: + transparent_filepaths.add(dst_filepath) + continue + + # Store first destionation filepath to be used for transparent images + if first_dst_filepath is None: + first_dst_filepath = dst_filepath + + if len(src_filepaths) == 1: + src_filepath = src_filepaths[0] + if cleanup: + os.rename(src_filepath, dst_filepath) + else: + copy_render_file(src_filepath, dst_filepath) + + else: + composite_images(src_filepaths, dst_filepath) + + # Store first transparent filepath to be able copy it + transparent_filepath = None + for dst_filepath in transparent_filepaths: + if transparent_filepath is None: + create_transparent_image_from_source( + first_dst_filepath, dst_filepath + ) + transparent_filepath = dst_filepath + else: + copy_render_file(transparent_filepath, dst_filepath) + + # Remove all files that were used as source for compositing + if cleanup: + cleanup_rendered_layers(filepaths_by_layer_id) + + def composite_images(input_image_paths, output_filepath): """Composite images in order from passed list. From c2dbfbb4e2d34224c43fdc9ec8ae6a21a79a0074 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Thu, 4 Nov 2021 19:13:14 +0100 Subject: [PATCH 070/211] fixed variable names --- openpype/hosts/tvpaint/lib.py | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/openpype/hosts/tvpaint/lib.py b/openpype/hosts/tvpaint/lib.py index 67e7f67980..63386aec7d 100644 --- a/openpype/hosts/tvpaint/lib.py +++ b/openpype/hosts/tvpaint/lib.py @@ -357,7 +357,7 @@ def calculate_layers_extraction_data( Args: layers_data(list): Layers data loaded from TVPaint. - exposure_frames_by_id(dict): Exposure frames of layers stored by + exposure_frames_by_layer_id(dict): Exposure frames of layers stored by layer id. behavior_by_layer_id(dict): Pre and Post behavior of layers stored by layer id. @@ -365,13 +365,15 @@ def calculate_layers_extraction_data( range_end(int): Last frame of rendered range. skip_not_visible(bool): Skip calculations for hidden layers (Skipped by default). + filename_prefix(str): Prefix before filename. + ext(str): Extension which filenames will have ('.png' is default). Returns: dict: Prepared data for rendering by layer position. """ # Make sure layer ids are strings # backwards compatibility when layer ids were integers - backwards_id_conversion(exposure_frames_by_id) + backwards_id_conversion(exposure_frames_by_layer_id) backwards_id_conversion(behavior_by_layer_id) layer_template = get_layer_pos_filename_template( @@ -386,7 +388,7 @@ def calculate_layers_extraction_data( layer_id = str(orig_layer_id) # Skip if does not have any exposure frames (empty layer) - exposure_frames = exposure_frames_by_id[layer_id] + exposure_frames = exposure_frames_by_layer_id[layer_id] if not exposure_frames: continue From 4b276c7bb1cccbd225ebaa99b9ff9bb1e30f3d95 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Fri, 5 Nov 2021 11:15:40 +0100 Subject: [PATCH 071/211] added function renaming rendered output to final frame frange --- openpype/hosts/tvpaint/lib.py | 40 +++++++++++++++++++++++++++++++++++ 1 file changed, 40 insertions(+) diff --git a/openpype/hosts/tvpaint/lib.py b/openpype/hosts/tvpaint/lib.py index 63386aec7d..7624658bca 100644 --- a/openpype/hosts/tvpaint/lib.py +++ b/openpype/hosts/tvpaint/lib.py @@ -586,3 +586,43 @@ def composite_images(input_image_paths, output_filepath): else: img_obj.alpha_composite(_img_obj) img_obj.save(output_filepath) + + +def rename_filepaths_by_frame_start( + filepaths_by_frame, range_start, range_end, new_frame_start +): + """Change frames in filenames of finished images to new frame start.""" + # Skip if source first frame is same as destination first frame + if range_start == new_frame_start: + return + + # Calculate frame end + new_frame_end = range_end + (new_frame_start - range_start) + # Create filename template + filename_template = get_frame_filename_template( + max(range_end, new_frame_end) + ) + + # Use differnet ranges based on Mark In and output Frame Start values + # - this is to make sure that filename renaming won't affect files that + # are not renamed yet + if range_start < new_frame_start: + source_range = range(range_end, range_start - 1, -1) + output_range = range(new_frame_end, new_frame_start - 1, -1) + else: + # This is less possible situation as frame start will be in most + # cases higher than Mark In. + source_range = range(range_start, range_end + 1) + output_range = range(new_frame_start, new_frame_end + 1) + + new_dst_filepaths = {} + for src_frame, dst_frame in zip(source_range, output_range): + src_filepath = filepaths_by_frame[src_frame] + src_dirpath = os.path.dirname(src_filepath) + dst_filename = filename_template.format(frame=dst_frame) + dst_filepath = os.path.join(src_dirpath, dst_filename) + + os.rename(src_filepath, dst_filepath) + + new_dst_filepaths[dst_frame] = dst_filepath + return new_dst_filepaths From ebc56eb9a45e5112f3b1d21f3406255eb99f80cd Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Fri, 5 Nov 2021 11:30:48 +0100 Subject: [PATCH 072/211] use lib code in extractor --- .../plugins/publish/extract_sequence.py | 556 ++++-------------- 1 file changed, 105 insertions(+), 451 deletions(-) diff --git a/openpype/hosts/tvpaint/plugins/publish/extract_sequence.py b/openpype/hosts/tvpaint/plugins/publish/extract_sequence.py index c45ff53c3c..729f631029 100644 --- a/openpype/hosts/tvpaint/plugins/publish/extract_sequence.py +++ b/openpype/hosts/tvpaint/plugins/publish/extract_sequence.py @@ -6,6 +6,13 @@ import tempfile import pyblish.api from avalon.tvpaint import lib from openpype.hosts.tvpaint.api.lib import composite_images +from openpype.hosts.tvpaint.lib import ( + calculate_layers_extraction_data, + get_frame_filename_template, + fill_reference_frames, + composite_rendered_layers, + rename_filepaths_by_frame_start +) from PIL import Image, ImageDraw @@ -111,14 +118,6 @@ class ExtractSequence(pyblish.api.Extractor): # ------------------------------------------------------------------- - filename_template = self._get_filename_template( - # Use the biggest number - max(mark_out, frame_end) - ) - ext = os.path.splitext(filename_template)[1].replace(".", "") - - self.log.debug("Using file template \"{}\"".format(filename_template)) - # Save to staging dir output_dir = instance.data.get("stagingDir") if not output_dir: @@ -133,30 +132,30 @@ class ExtractSequence(pyblish.api.Extractor): ) if instance.data["family"] == "review": - output_filenames, thumbnail_fullpath = self.render_review( - filename_template, output_dir, mark_in, mark_out, - scene_bg_color + result = self.render_review( + output_dir, mark_in, mark_out, scene_bg_color ) else: # Render output - output_filenames, thumbnail_fullpath = self.render( - filename_template, output_dir, - mark_in, mark_out, - filtered_layers + result = self.render( + output_dir, mark_in, mark_out, filtered_layers ) + output_filepaths_by_frame_idx, thumbnail_fullpath = result + # Change scene frame Start back to previous value lib.execute_george("tv_startframe {}".format(scene_start_frame)) # Sequence of one frame - if not output_filenames: + if not output_filepaths_by_frame_idx: self.log.warning("Extractor did not create any output.") return repre_files = self._rename_output_files( - filename_template, output_dir, - mark_in, mark_out, - output_frame_start, output_frame_end + output_filepaths_by_frame_idx, + mark_in, + mark_out, + output_frame_start ) # Fill tags and new families @@ -169,9 +168,11 @@ class ExtractSequence(pyblish.api.Extractor): if single_file: repre_files = repre_files[0] + # Extension is harcoded + # - changing extension would require change code new_repre = { - "name": ext, - "ext": ext, + "name": "png", + "ext": "png", "files": repre_files, "stagingDir": output_dir, "tags": tags @@ -206,69 +207,28 @@ class ExtractSequence(pyblish.api.Extractor): } instance.data["representations"].append(thumbnail_repre) - def _get_filename_template(self, frame_end): - """Get filetemplate for rendered files. - - This is simple template contains `{frame}{ext}` for sequential outputs - and `single_file{ext}` for single file output. Output is rendered to - temporary folder so filename should not matter as integrator change - them. - """ - frame_padding = 4 - frame_end_str_len = len(str(frame_end)) - if frame_end_str_len > frame_padding: - frame_padding = frame_end_str_len - - return "{{frame:0>{}}}".format(frame_padding) + ".png" - def _rename_output_files( - self, filename_template, output_dir, - mark_in, mark_out, output_frame_start, output_frame_end + self, filepaths_by_frame, mark_in, mark_out, output_frame_start ): - # Use differnet ranges based on Mark In and output Frame Start values - # - this is to make sure that filename renaming won't affect files that - # are not renamed yet - mark_start_is_less = bool(mark_in < output_frame_start) - if mark_start_is_less: - marks_range = range(mark_out, mark_in - 1, -1) - frames_range = range(output_frame_end, output_frame_start - 1, -1) - else: - # This is less possible situation as frame start will be in most - # cases higher than Mark In. - marks_range = range(mark_in, mark_out + 1) - frames_range = range(output_frame_start, output_frame_end + 1) + new_filepaths_by_frame = rename_filepaths_by_frame_start( + filepaths_by_frame, mark_in, mark_out, output_frame_start + ) - repre_filepaths = [] - for mark, frame in zip(marks_range, frames_range): - new_filename = filename_template.format(frame=frame) - new_filepath = os.path.join(output_dir, new_filename) + repre_filenames = [] + for filepath in new_filepaths_by_frame.values(): + repre_filenames.append(os.path.basename(filepath)) - repre_filepaths.append(new_filepath) + if mark_in < output_frame_start: + repre_filenames = list(reversed(repre_filenames)) - if mark != frame: - old_filename = filename_template.format(frame=mark) - old_filepath = os.path.join(output_dir, old_filename) - os.rename(old_filepath, new_filepath) - - # Reverse repre files order if output - if mark_start_is_less: - repre_filepaths = list(reversed(repre_filepaths)) - - return [ - os.path.basename(path) - for path in repre_filepaths - ] + return repre_filenames def render_review( - self, filename_template, output_dir, mark_in, mark_out, scene_bg_color + self, output_dir, mark_in, mark_out, scene_bg_color ): """ Export images from TVPaint using `tv_savesequence` command. Args: - filename_template (str): Filename template of an output. Template - should already contain extension. Template may contain only - keyword argument `{frame}` or index argument (for same value). - Extension in template must match `save_mode`. output_dir (str): Directory where files will be stored. mark_in (int): Starting frame index from which export will begin. mark_out (int): On which frame index export will end. @@ -279,6 +239,8 @@ class ExtractSequence(pyblish.api.Extractor): tuple: With 2 items first is list of filenames second is path to thumbnail. """ + filename_template = get_frame_filename_template(mark_out) + self.log.debug("Preparing data for rendering.") first_frame_filepath = os.path.join( output_dir, @@ -313,12 +275,13 @@ class ExtractSequence(pyblish.api.Extractor): lib.execute_george_through_file("\n".join(george_script_lines)) first_frame_filepath = None - output_filenames = [] - for frame in range(mark_in, mark_out + 1): - filename = filename_template.format(frame=frame) - output_filenames.append(filename) - + output_filepaths_by_frame_idx = {} + for frame_idx in range(mark_in, mark_out + 1): + filename = filename_template.format(frame=frame_idx) filepath = os.path.join(output_dir, filename) + + output_filepaths_by_frame_idx[frame_idx] = filepath + if not os.path.exists(filepath): raise AssertionError( "Output was not rendered. File was not found {}".format( @@ -337,16 +300,12 @@ class ExtractSequence(pyblish.api.Extractor): source_img = source_img.convert("RGB") source_img.save(thumbnail_filepath) - return output_filenames, thumbnail_filepath + return output_filepaths_by_frame_idx, thumbnail_filepath - def render(self, filename_template, output_dir, mark_in, mark_out, layers): + def render(self, output_dir, mark_in, mark_out, layers): """ Export images from TVPaint. Args: - filename_template (str): Filename template of an output. Template - should already contain extension. Template may contain only - keyword argument `{frame}` or index argument (for same value). - Extension in template must match `save_mode`. output_dir (str): Directory where files will be stored. mark_in (int): Starting frame index from which export will begin. mark_out (int): On which frame index export will end. @@ -358,14 +317,18 @@ class ExtractSequence(pyblish.api.Extractor): """ self.log.debug("Preparing data for rendering.") + calculate_layers_extraction_data() # Map layers by position layers_by_position = {} + layers_by_id = {} layer_ids = [] for layer in layers: + layer_id = layer["layer_id"] position = layer["position"] layers_by_position[position] = layer + layers_by_id[layer_id] = layer - layer_ids.append(layer["layer_id"]) + layer_ids.append(layer_id) # Sort layer positions in reverse order sorted_positions = list(reversed(sorted(layers_by_position.keys()))) @@ -374,59 +337,45 @@ class ExtractSequence(pyblish.api.Extractor): self.log.debug("Collecting pre/post behavior of individual layers.") behavior_by_layer_id = lib.get_layers_pre_post_behavior(layer_ids) - - tmp_filename_template = "pos_{pos}." + filename_template - - files_by_position = {} - for position in sorted_positions: - layer = layers_by_position[position] - behavior = behavior_by_layer_id[layer["layer_id"]] - - files_by_frames = self._render_layer( - layer, - tmp_filename_template, - output_dir, - behavior, - mark_in, - mark_out - ) - if files_by_frames: - files_by_position[position] = files_by_frames - else: - self.log.warning(( - "Skipped layer \"{}\". Probably out of Mark In/Out range." - ).format(layer["name"])) - - if not files_by_position: - layer_names = set(layer["name"] for layer in layers) - joined_names = ", ".join( - ["\"{}\"".format(name) for name in layer_names] - ) - self.log.warning( - "Layers {} do not have content in range {} - {}".format( - joined_names, mark_in, mark_out - ) - ) - return [], None - - output_filepaths = self._composite_files( - files_by_position, - mark_in, - mark_out, - filename_template, - output_dir + exposure_frames_by_layer_id = lib.get_layers_exposure_frames( + layer_ids, layers ) - self._cleanup_tmp_files(files_by_position) - - output_filenames = [ - os.path.basename(filepath) - for filepath in output_filepaths - ] + extraction_data_by_layer_id = calculate_layers_extraction_data( + layers, + exposure_frames_by_layer_id, + behavior_by_layer_id, + mark_in, + mark_out + ) + # Render layers + filepaths_by_layer_id = {} + for layer_id, render_data in extraction_data_by_layer_id.items(): + layer = layers_by_id[layer_id] + filepaths_by_layer_id = self._render_layer( + render_data, layer, output_dir + ) + # Prepare final filepaths where compositing should store result + output_filepaths_by_frame = {} thumbnail_src_filepath = None - if output_filepaths: - thumbnail_src_filepath = output_filepaths[0] + finale_template = get_frame_filename_template(mark_out) + for frame_idx in range(mark_in, mark_out + 1): + filename = finale_template.format(frame=frame_idx) + filepath = os.path.join(output_dir, filename) + output_filepaths_by_frame[frame_idx] = filepath + + if thumbnail_src_filepath is None: + thumbnail_src_filepath = filepath + + self.log.info("Started compositing of layer frames.") + composite_rendered_layers( + layers, filepaths_by_layer_id, + mark_in, mark_out, + output_filepaths_by_frame + ) + + self.log.info("Compositing finished") thumbnail_filepath = None if thumbnail_src_filepath and os.path.exists(thumbnail_src_filepath): source_img = Image.open(thumbnail_src_filepath) @@ -449,7 +398,7 @@ class ExtractSequence(pyblish.api.Extractor): ).format(source_img.mode)) source_img.save(thumbnail_filepath) - return output_filenames, thumbnail_filepath + return output_filepaths_by_frame, thumbnail_filepath def _get_review_bg_color(self): red = green = blue = 255 @@ -460,338 +409,43 @@ class ExtractSequence(pyblish.api.Extractor): red, green, blue = self.review_bg return (red, green, blue) - def _render_layer( - self, - layer, - tmp_filename_template, - output_dir, - behavior, - mark_in_index, - mark_out_index - ): + def _render_layer(self, render_data, layer, output_dir): + frame_references = render_data["frame_references"] + filenames_by_frame_index = render_data["filenames_by_frame_index"] + layer_id = layer["layer_id"] - frame_start_index = layer["frame_start"] - frame_end_index = layer["frame_end"] - - pre_behavior = behavior["pre"] - post_behavior = behavior["post"] - - # Check if layer is before mark in - if frame_end_index < mark_in_index: - # Skip layer if post behavior is "none" - if post_behavior == "none": - return {} - - # Check if layer is after mark out - elif frame_start_index > mark_out_index: - # Skip layer if pre behavior is "none" - if pre_behavior == "none": - return {} - - exposure_frames = lib.get_exposure_frames( - layer_id, frame_start_index, frame_end_index - ) - - if frame_start_index not in exposure_frames: - exposure_frames.append(frame_start_index) - - layer_files_by_frame = {} george_script_lines = [ + "tv_layerset {}".format(layer_id), "tv_SaveMode \"PNG\"" ] - layer_position = layer["position"] - for frame_idx in exposure_frames: - filename = tmp_filename_template.format( - pos=layer_position, - frame=frame_idx - ) + filepaths_by_frame = {} + frames_to_render = [] + for frame_idx, ref_idx in frame_references.items(): + # None reference is skipped because does not have source + if ref_idx is None: + filepaths_by_frame[frame_idx] = None + continue + filename = filenames_by_frame_index[frame_idx] dst_path = "/".join([output_dir, filename]) - layer_files_by_frame[frame_idx] = os.path.normpath(dst_path) + filepaths_by_frame[frame_idx] = dst_path + if frame_idx != ref_idx: + continue + frames_to_render.append(frame_idx) # Go to frame george_script_lines.append("tv_layerImage {}".format(frame_idx)) # Store image to output george_script_lines.append("tv_saveimage \"{}\"".format(dst_path)) self.log.debug("Rendering Exposure frames {} of layer {} ({})".format( - str(exposure_frames), layer_id, layer["name"] + ",".join(frames_to_render), layer_id, layer["name"] )) # Let TVPaint render layer's image lib.execute_george_through_file("\n".join(george_script_lines)) # Fill frames between `frame_start_index` and `frame_end_index` - self.log.debug(( - "Filling frames between first and last frame of layer ({} - {})." - ).format(frame_start_index + 1, frame_end_index + 1)) + self.log.debug("Filling frames not rendered frames.") + fill_reference_frames(frame_references, filepaths_by_frame) - _debug_filled_frames = [] - prev_filepath = None - for frame_idx in range(frame_start_index, frame_end_index + 1): - if frame_idx in layer_files_by_frame: - prev_filepath = layer_files_by_frame[frame_idx] - continue - - if prev_filepath is None: - raise ValueError("BUG: First frame of layer was not rendered!") - _debug_filled_frames.append(frame_idx) - filename = tmp_filename_template.format( - pos=layer_position, - frame=frame_idx - ) - new_filepath = "/".join([output_dir, filename]) - self._copy_image(prev_filepath, new_filepath) - layer_files_by_frame[frame_idx] = new_filepath - - self.log.debug("Filled frames {}".format(str(_debug_filled_frames))) - - # Fill frames by pre/post behavior of layer - self.log.debug(( - "Completing image sequence of layer by pre/post behavior." - " PRE: {} | POST: {}" - ).format(pre_behavior, post_behavior)) - - # Pre behavior - self._fill_frame_by_pre_behavior( - layer, - pre_behavior, - mark_in_index, - layer_files_by_frame, - tmp_filename_template, - output_dir - ) - self._fill_frame_by_post_behavior( - layer, - post_behavior, - mark_out_index, - layer_files_by_frame, - tmp_filename_template, - output_dir - ) - return layer_files_by_frame - - def _fill_frame_by_pre_behavior( - self, - layer, - pre_behavior, - mark_in_index, - layer_files_by_frame, - filename_template, - output_dir - ): - layer_position = layer["position"] - frame_start_index = layer["frame_start"] - frame_end_index = layer["frame_end"] - frame_count = frame_end_index - frame_start_index + 1 - if mark_in_index >= frame_start_index: - self.log.debug(( - "Skipping pre-behavior." - " All frames after Mark In are rendered." - )) - return - - if pre_behavior == "none": - # Empty frames are handled during `_composite_files` - pass - - elif pre_behavior == "hold": - # Keep first frame for whole time - eq_frame_filepath = layer_files_by_frame[frame_start_index] - for frame_idx in range(mark_in_index, frame_start_index): - filename = filename_template.format( - pos=layer_position, - frame=frame_idx - ) - new_filepath = "/".join([output_dir, filename]) - self._copy_image(eq_frame_filepath, new_filepath) - layer_files_by_frame[frame_idx] = new_filepath - - elif pre_behavior in ("loop", "repeat"): - # Loop backwards from last frame of layer - for frame_idx in reversed(range(mark_in_index, frame_start_index)): - eq_frame_idx_offset = ( - (frame_end_index - frame_idx) % frame_count - ) - eq_frame_idx = frame_end_index - eq_frame_idx_offset - eq_frame_filepath = layer_files_by_frame[eq_frame_idx] - - filename = filename_template.format( - pos=layer_position, - frame=frame_idx - ) - new_filepath = "/".join([output_dir, filename]) - self._copy_image(eq_frame_filepath, new_filepath) - layer_files_by_frame[frame_idx] = new_filepath - - elif pre_behavior == "pingpong": - half_seq_len = frame_count - 1 - seq_len = half_seq_len * 2 - for frame_idx in reversed(range(mark_in_index, frame_start_index)): - eq_frame_idx_offset = (frame_start_index - frame_idx) % seq_len - if eq_frame_idx_offset > half_seq_len: - eq_frame_idx_offset = (seq_len - eq_frame_idx_offset) - eq_frame_idx = frame_start_index + eq_frame_idx_offset - - eq_frame_filepath = layer_files_by_frame[eq_frame_idx] - - filename = filename_template.format( - pos=layer_position, - frame=frame_idx - ) - new_filepath = "/".join([output_dir, filename]) - self._copy_image(eq_frame_filepath, new_filepath) - layer_files_by_frame[frame_idx] = new_filepath - - def _fill_frame_by_post_behavior( - self, - layer, - post_behavior, - mark_out_index, - layer_files_by_frame, - filename_template, - output_dir - ): - layer_position = layer["position"] - frame_start_index = layer["frame_start"] - frame_end_index = layer["frame_end"] - frame_count = frame_end_index - frame_start_index + 1 - if mark_out_index <= frame_end_index: - self.log.debug(( - "Skipping post-behavior." - " All frames up to Mark Out are rendered." - )) - return - - if post_behavior == "none": - # Empty frames are handled during `_composite_files` - pass - - elif post_behavior == "hold": - # Keep first frame for whole time - eq_frame_filepath = layer_files_by_frame[frame_end_index] - for frame_idx in range(frame_end_index + 1, mark_out_index + 1): - filename = filename_template.format( - pos=layer_position, - frame=frame_idx - ) - new_filepath = "/".join([output_dir, filename]) - self._copy_image(eq_frame_filepath, new_filepath) - layer_files_by_frame[frame_idx] = new_filepath - - elif post_behavior in ("loop", "repeat"): - # Loop backwards from last frame of layer - for frame_idx in range(frame_end_index + 1, mark_out_index + 1): - eq_frame_idx = frame_idx % frame_count - eq_frame_filepath = layer_files_by_frame[eq_frame_idx] - - filename = filename_template.format( - pos=layer_position, - frame=frame_idx - ) - new_filepath = "/".join([output_dir, filename]) - self._copy_image(eq_frame_filepath, new_filepath) - layer_files_by_frame[frame_idx] = new_filepath - - elif post_behavior == "pingpong": - half_seq_len = frame_count - 1 - seq_len = half_seq_len * 2 - for frame_idx in range(frame_end_index + 1, mark_out_index + 1): - eq_frame_idx_offset = (frame_idx - frame_end_index) % seq_len - if eq_frame_idx_offset > half_seq_len: - eq_frame_idx_offset = seq_len - eq_frame_idx_offset - eq_frame_idx = frame_end_index - eq_frame_idx_offset - - eq_frame_filepath = layer_files_by_frame[eq_frame_idx] - - filename = filename_template.format( - pos=layer_position, - frame=frame_idx - ) - new_filepath = "/".join([output_dir, filename]) - self._copy_image(eq_frame_filepath, new_filepath) - layer_files_by_frame[frame_idx] = new_filepath - - def _composite_files( - self, files_by_position, frame_start, frame_end, - filename_template, output_dir - ): - """Composite frames when more that one layer was exported. - - This method is used when more than one layer is rendered out so and - output should be composition of each frame of rendered layers. - Missing frames are filled with transparent images. - """ - self.log.debug("Preparing files for compisiting.") - # Prepare paths to images by frames into list where are stored - # in order of compositing. - images_by_frame = {} - for frame_idx in range(frame_start, frame_end + 1): - images_by_frame[frame_idx] = [] - for position in sorted(files_by_position.keys(), reverse=True): - position_data = files_by_position[position] - if frame_idx in position_data: - filepath = position_data[frame_idx] - images_by_frame[frame_idx].append(filepath) - - output_filepaths = [] - missing_frame_paths = [] - random_frame_path = None - for frame_idx in sorted(images_by_frame.keys()): - image_filepaths = images_by_frame[frame_idx] - output_filename = filename_template.format(frame=frame_idx) - output_filepath = os.path.join(output_dir, output_filename) - output_filepaths.append(output_filepath) - - # Store information about missing frame and skip - if not image_filepaths: - missing_frame_paths.append(output_filepath) - continue - - # Just rename the file if is no need of compositing - if len(image_filepaths) == 1: - os.rename(image_filepaths[0], output_filepath) - - # Composite images - else: - composite_images(image_filepaths, output_filepath) - - # Store path of random output image that will 100% exist after all - # multiprocessing as mockup for missing frames - if random_frame_path is None: - random_frame_path = output_filepath - - self.log.debug( - "Creating transparent images for frames without render {}.".format( - str(missing_frame_paths) - ) - ) - # Fill the sequence with transparent frames - transparent_filepath = None - for filepath in missing_frame_paths: - if transparent_filepath is None: - img_obj = Image.open(random_frame_path) - painter = ImageDraw.Draw(img_obj) - painter.rectangle((0, 0, *img_obj.size), fill=(0, 0, 0, 0)) - img_obj.save(filepath) - transparent_filepath = filepath - else: - self._copy_image(transparent_filepath, filepath) - return output_filepaths - - def _cleanup_tmp_files(self, files_by_position): - """Remove temporary files that were used for compositing.""" - for data in files_by_position.values(): - for filepath in data.values(): - if os.path.exists(filepath): - os.remove(filepath) - - def _copy_image(self, src_path, dst_path): - """Create a copy of an image. - - This was added to be able easier change copy method. - """ - # Create hardlink of image instead of copying if possible - if hasattr(os, "link"): - os.link(src_path, dst_path) - else: - shutil.copy(src_path, dst_path) + return filepaths_by_frame From 251f80a75c9d5b92b62fe92753a6f0800006aae5 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Fri, 5 Nov 2021 17:17:14 +0100 Subject: [PATCH 073/211] removed forgotten line --- openpype/hosts/tvpaint/plugins/publish/extract_sequence.py | 1 - 1 file changed, 1 deletion(-) diff --git a/openpype/hosts/tvpaint/plugins/publish/extract_sequence.py b/openpype/hosts/tvpaint/plugins/publish/extract_sequence.py index 729f631029..ff0c65475b 100644 --- a/openpype/hosts/tvpaint/plugins/publish/extract_sequence.py +++ b/openpype/hosts/tvpaint/plugins/publish/extract_sequence.py @@ -317,7 +317,6 @@ class ExtractSequence(pyblish.api.Extractor): """ self.log.debug("Preparing data for rendering.") - calculate_layers_extraction_data() # Map layers by position layers_by_position = {} layers_by_id = {} From c8d2500c8f3dfd390d2ccfa6c164c3232a8d096c Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Fri, 5 Nov 2021 17:44:02 +0100 Subject: [PATCH 074/211] few smaller fixes --- openpype/hosts/tvpaint/lib.py | 6 +++--- openpype/hosts/tvpaint/plugins/publish/extract_sequence.py | 4 ++-- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/openpype/hosts/tvpaint/lib.py b/openpype/hosts/tvpaint/lib.py index 7624658bca..c34afb00c4 100644 --- a/openpype/hosts/tvpaint/lib.py +++ b/openpype/hosts/tvpaint/lib.py @@ -6,7 +6,7 @@ from PIL import Image, ImageDraw def backwards_id_conversion(data_by_layer_id): """Convert layer ids to strings from integers.""" for key in tuple(data_by_layer_id.keys()): - if not isinstance(str): + if not isinstance(key, str): data_by_layer_id[str(key)] = data_by_layer_id.pop(key) @@ -247,7 +247,7 @@ def _cleanup_frame_references(output_idx_by_frame_idx): """ for frame_idx in tuple(output_idx_by_frame_idx.keys()): reference_idx = output_idx_by_frame_idx[frame_idx] - if reference_idx == frame_idx: + if reference_idx is None or reference_idx == frame_idx: continue real_reference_idx = reference_idx @@ -457,7 +457,7 @@ def fill_reference_frames(frame_references, filepaths_by_frame): # Get destination filepath src_filepath = filepaths_by_frame[ref_idx] - dst_filepath = filepaths_by_frame[ref_idx] + dst_filepath = filepaths_by_frame[frame_idx] if hasattr(os, "link"): os.link(src_filepath, dst_filepath) diff --git a/openpype/hosts/tvpaint/plugins/publish/extract_sequence.py b/openpype/hosts/tvpaint/plugins/publish/extract_sequence.py index ff0c65475b..916d8ee73b 100644 --- a/openpype/hosts/tvpaint/plugins/publish/extract_sequence.py +++ b/openpype/hosts/tvpaint/plugins/publish/extract_sequence.py @@ -350,7 +350,7 @@ class ExtractSequence(pyblish.api.Extractor): filepaths_by_layer_id = {} for layer_id, render_data in extraction_data_by_layer_id.items(): layer = layers_by_id[layer_id] - filepaths_by_layer_id = self._render_layer( + filepaths_by_layer_id[layer_id] = self._render_layer( render_data, layer, output_dir ) @@ -431,7 +431,7 @@ class ExtractSequence(pyblish.api.Extractor): if frame_idx != ref_idx: continue - frames_to_render.append(frame_idx) + frames_to_render.append(str(frame_idx)) # Go to frame george_script_lines.append("tv_layerImage {}".format(frame_idx)) # Store image to output From 9f266b7a3e7737581942adcf68abbb6ea8a5f3f1 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Fri, 5 Nov 2021 19:09:10 +0100 Subject: [PATCH 075/211] added JobFailed exception --- openpype/hosts/tvpaint/worker/__init__.py | 2 ++ openpype/hosts/tvpaint/worker/worker_job.py | 15 +++++++++++++++ 2 files changed, 17 insertions(+) diff --git a/openpype/hosts/tvpaint/worker/__init__.py b/openpype/hosts/tvpaint/worker/__init__.py index 3d8d5de24e..4d3f706187 100644 --- a/openpype/hosts/tvpaint/worker/__init__.py +++ b/openpype/hosts/tvpaint/worker/__init__.py @@ -1,4 +1,5 @@ from .worker_job import ( + JobFailed, ExecuteSimpleGeorgeScript, ExecuteGeorgeScript, CollectSceneData, @@ -7,6 +8,7 @@ from .worker_job import ( __all__ = ( + "JobFailed", "ExecuteSimpleGeorgeScript", "ExecuteGeorgeScript", "CollectSceneData", diff --git a/openpype/hosts/tvpaint/worker/worker_job.py b/openpype/hosts/tvpaint/worker/worker_job.py index c74bfa484d..49de74fd1a 100644 --- a/openpype/hosts/tvpaint/worker/worker_job.py +++ b/openpype/hosts/tvpaint/worker/worker_job.py @@ -11,6 +11,21 @@ import six TMP_FILE_PREFIX = "opw_tvp_" +class JobFailed(Exception): + """Raised when job was sent and finished unsuccessfully.""" + def __init__(self, job_status): + job_state = job_status["state"] + job_message = job_status["message"] or "Unknown issue" + error_msg = ( + "Job didn't finish properly." + " Job state: \"{}\" | Job message: \"{}\"" + ).format(job_state, job_message) + + self.job_status = job_status + + super().__init__(error_msg) + + @six.add_metaclass(ABCMeta) class BaseCommand: @abstractproperty From 9d6e6958fef1ae72136f1d6d04516b22f885115b Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Fri, 5 Nov 2021 19:09:50 +0100 Subject: [PATCH 076/211] split TVPaintCommands into sender and processor and added few features --- openpype/hosts/tvpaint/worker/__init__.py | 6 +- openpype/hosts/tvpaint/worker/worker_job.py | 154 ++++++++++++++------ 2 files changed, 113 insertions(+), 47 deletions(-) diff --git a/openpype/hosts/tvpaint/worker/__init__.py b/openpype/hosts/tvpaint/worker/__init__.py index 4d3f706187..b60ce83ada 100644 --- a/openpype/hosts/tvpaint/worker/__init__.py +++ b/openpype/hosts/tvpaint/worker/__init__.py @@ -3,7 +3,8 @@ from .worker_job import ( ExecuteSimpleGeorgeScript, ExecuteGeorgeScript, CollectSceneData, - TVPaintCommands + SenderTVPaintCommands, + ProcessTVPaintCommands ) @@ -12,5 +13,6 @@ __all__ = ( "ExecuteSimpleGeorgeScript", "ExecuteGeorgeScript", "CollectSceneData", - "TVPaintCommands" + "SenderTVPaintCommands", + "ProcessTVPaintCommands" ) diff --git a/openpype/hosts/tvpaint/worker/worker_job.py b/openpype/hosts/tvpaint/worker/worker_job.py index 49de74fd1a..f9d4467b26 100644 --- a/openpype/hosts/tvpaint/worker/worker_job.py +++ b/openpype/hosts/tvpaint/worker/worker_job.py @@ -2,11 +2,15 @@ import os import tempfile import inspect import copy +import json +import time from uuid import uuid4 from abc import ABCMeta, abstractmethod, abstractproperty import six +from openpype.api import PypeLogger + TMP_FILE_PREFIX = "opw_tvp_" @@ -222,20 +226,17 @@ class CollectSceneData(BaseCommand): class TVPaintCommands: - def __init__(self, workfile, commands=None, communicator=None): - if not commands: - commands = [] - + def __init__(self, workfile): + self._log = None self._workfile = workfile self._commands = [] - self._communicator = communicator self._command_classes_by_name = None - self.commands_from_data(commands) - @property - def communicator(self): - return self._communicator + def log(self): + if self._log is None: + self._log = PypeLogger.get_logger(self.__class__.__name__) + return self._log @property def classes_by_name(self): @@ -250,12 +251,98 @@ class TVPaintCommands: continue if inspect.isabstract(attr): - print("Skipping abstract class {}".format(attr.__name__)) + self.log.debug( + "Skipping abstract class {}".format(attr.__name__) + ) command_classes_by_name[attr.name] = attr self._command_classes_by_name = command_classes_by_name return self._command_classes_by_name + def add_command(self, command): + command.set_parent(self) + self._commands.append(command) + + def result(self): + return [ + command.result() + for command in self._commands + ] + + +class SenderTVPaintCommand(TVPaintCommands): + def __init__(self, workfile, job_queue_module): + super().__init__(workfile) + + self._job_queue_module = job_queue_module + + def commands_data(self): + return [ + command.command_data() + for command in self._commands + ] + + def to_job_data(self): + return { + "workfile": self._workfile, + "function": "commands", + "commands": self.commands_data() + } + + def set_result(self, result): + commands_by_id = { + command.id: command + for command in self._commands + } + + for item in result: + command = commands_by_id[item["id"]] + command.set_result(item["result"]) + command.set_done() + + def _send_job(self): + # Send job data to job queue server + job_data = self.to_job_data() + self.log.debug("Sending job to JobQueue server.\n{}".format( + json.dumps(job_data, indent=4) + )) + job_id = self._job_queue_module.send_job("tvpaint", job_data) + self.log.info(( + "Job sent to JobQueue server and got id \"{}\"." + " Waiting for finishing the job." + ).format(job_id)) + + return job_id + + def send_job_and_wait(self): + job_id = self._send_job() + while True: + job_status = self._job_queue_module.get_job_status(job_id) + if job_status["done"]: + break + time.sleep(0.3) + + # Check if job state is done + if job_status["state"] != "done": + raise JobFailed(job_status) + + self.set_result(job_status["result"]) + + self.log.debug("Job is done and result is stored.") + + +class ProcessTVPaintCommands(TVPaintCommands): + def __init__(self, workfile, commands, communicator): + super(ProcessTVPaintCommands, self).__init__(workfile) + + self._communicator = communicator + + self.commands_from_data(commands) + + @property + def communicator(self): + return self._communicator + def commands_from_data(self, commands_data): for command_data in commands_data: command_name = command_data["command"] @@ -264,9 +351,18 @@ class TVPaintCommands: command = klass.from_existing(command_data) self.add_command(command) - def add_command(self, command): - command.set_parent(self) - self._commands.append(command) + def execute_george(self, george_script): + return self.communicator.execute_george(george_script) + + def execute_george_through_file(self, george_script): + temporary_file = tempfile.NamedTemporaryFile( + mode="w", prefix=TMP_FILE_PREFIX, suffix=".grg", delete=False + ) + temporary_file.write(george_script) + temporary_file.close() + temp_file_path = temporary_file.name.replace("\\", "/") + self.execute_george("tv_runscript {}".format(temp_file_path)) + os.remove(temp_file_path) def _open_workfile(self): workfile = self._workfile.replace("\\", "/") @@ -285,35 +381,3 @@ class TVPaintCommands: command.execute() command.set_done() self._close_workfile() - - def commands_data(self): - return [ - command.command_data() - for command in self._commands - ] - - def to_job_data(self): - return { - "workfile": self._workfile, - "function": "commands", - "commands": self.commands_data() - } - - def result(self): - return [ - command.result() - for command in self._commands - ] - - def execute_george(self, george_script): - return self.communicator.execute_george(george_script) - - def execute_george_through_file(self, george_script): - temporary_file = tempfile.NamedTemporaryFile( - mode="w", prefix=TMP_FILE_PREFIX, suffix=".grg", delete=False - ) - temporary_file.write(george_script) - temporary_file.close() - temp_file_path = temporary_file.name.replace("\\", "/") - self.execute_george("tv_runscript {}".format(temp_file_path)) - os.remove(temp_file_path) From 067f148d8c21bf60521fa09b883393732c83326b Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Fri, 5 Nov 2021 19:10:06 +0100 Subject: [PATCH 077/211] modified collect workfile plugin based on changes in tvpaint worker --- .../publish/collect_tvpaint_workfile_data.py | 43 +++++-------------- 1 file changed, 10 insertions(+), 33 deletions(-) diff --git a/openpype/hosts/webpublisher/plugins/publish/collect_tvpaint_workfile_data.py b/openpype/hosts/webpublisher/plugins/publish/collect_tvpaint_workfile_data.py index 699d4e5f47..3674b74a5d 100644 --- a/openpype/hosts/webpublisher/plugins/publish/collect_tvpaint_workfile_data.py +++ b/openpype/hosts/webpublisher/plugins/publish/collect_tvpaint_workfile_data.py @@ -7,12 +7,12 @@ Provides: Instance """ import json +import time import pyblish.api from openpype.hosts.tvpaint.worker import ( - TVPaintCommands, + SenderTVPaintCommands, CollectSceneData ) -from avalon.tvpaint import CommunicationWrapper class CollectTVPaintWorkfileData(pyblish.api.InstancePlugin): @@ -29,36 +29,13 @@ class CollectTVPaintWorkfileData(pyblish.api.InstancePlugin): job_queue_module = modules["job_queue"] # Prepare tvpaint command - commands = TVPaintCommands(workfile, CommunicationWrapper.communicator) - commands.append(CollectSceneData()) + collect_scene_data_command = CollectSceneData() + # Create TVPaint sender commands + commands = SenderTVPaintCommands(workfile, job_queue_module) + commands.add_command(collect_scene_data_command) - # Send job data to job queue server - job_data = commands.to_job_data() - self.debug("Sending job to JobQueue server.\n{}".format( - json.dumps(job_data, indent=4) - )) - job_id = job_queue_module.send_job("tvpaint", job_data) - self.log.info(( - "Job sent to JobQueue server and got id \"{}\"." - " Waiting for finishing the job." - ).format(job_id)) - # Wait for job to be finished - while True: - job_status = job_queue_module.get_job_status(job_id) - if job_status["done"]: - break + # Send job and wait for answer + commands.send_job_and_wait() - # Check if job state is done - if job_status["state"] != "done": - message = job_status["message"] or "Unknown issue" - raise ValueError( - "Job didn't finish properly." - " Job state: \"{}\" | Job message: \"{}\"".format( - job_status["state"], - message - ) - ) - job_result = job_status["result"] - - self.log.debug("Job is done with result.\n{}".format(job_result)) - instance.data["sceneData"] = job_result + # Store result + instance.data["sceneData"] = collect_scene_data_command.result() From 27009cb54fb4e3c4e4e58a67ab4dba786de62095 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Fri, 5 Nov 2021 19:10:25 +0100 Subject: [PATCH 078/211] use ProcessTVPaintCommands in worker --- .../default_modules/job_queue/job_workers/tvpaint_worker.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/modules/default_modules/job_queue/job_workers/tvpaint_worker.py b/openpype/modules/default_modules/job_queue/job_workers/tvpaint_worker.py index 463ec7cc99..b44325c1db 100644 --- a/openpype/modules/default_modules/job_queue/job_workers/tvpaint_worker.py +++ b/openpype/modules/default_modules/job_queue/job_workers/tvpaint_worker.py @@ -2,7 +2,7 @@ import signal import time import asyncio -from openpype.hosts.tvpaint.worker import TVPaintCommands +from openpype.hosts.tvpaint.worker import ProcessTVPaintCommands from avalon.tvpaint.communication_server import ( BaseCommunicator, CommunicationWrapper @@ -69,7 +69,7 @@ class WorkerCommunicator(BaseCommunicator): job_data = job["data"] workfile = job_data["workfile"] if job_data.get("function") == "commands": - commands = TVPaintCommands( + commands = ProcessTVPaintCommands( workfile, job_data["commands"], self ) commands.execute() From 9ddd898eab58702b343a290ede0be3a21f3a53ef Mon Sep 17 00:00:00 2001 From: davidlatwe Date: Sun, 7 Nov 2021 23:24:01 +0800 Subject: [PATCH 079/211] update docstring --- .../plugins/publish/integrate_inputlinks.py | 38 +++++++++++++++++++ 1 file changed, 38 insertions(+) diff --git a/openpype/plugins/publish/integrate_inputlinks.py b/openpype/plugins/publish/integrate_inputlinks.py index b012427aec..00cd459da9 100644 --- a/openpype/plugins/publish/integrate_inputlinks.py +++ b/openpype/plugins/publish/integrate_inputlinks.py @@ -9,6 +9,27 @@ class IntegrateInputLinks(pyblish.api.ContextPlugin): label = "Connect Dependency InputLinks" def process(self, context): + """Connect dependency links for all instances, globally + + Code steps: + * filter out instances that has "versionEntity" entry in data + * find workfile instance within context + * if workfile found: + - link all `loadedVersions` as input of the workfile + - link workfile as input of all publishing instances + * else: + - show "no workfile" warning + * link instances' inputs if it's data has "inputVersions" entry + * Write into database + + inputVersions: + The "inputVersions" in instance.data should be a list of + version document's Id (str or ObjectId), which are the + dependencies of the publishing instance that should be + extracted from working scene by the DCC specific publish + plugin. + + """ workfile = None publishing = [] @@ -57,6 +78,17 @@ class IntegrateInputLinks(pyblish.api.ContextPlugin): self.write_links_to_database(context) def add_link(self, link_type, input_id, version_doc): + """Add dependency link data into version document + + Args: + link_type (str): Type of link, one of 'reference' or 'generative' + input_id (str or ObjectId): Document Id of input version + version_doc (dict): The version document that takes the input + + Returns: + None + + """ from collections import OrderedDict from avalon import io # NOTE: @@ -74,6 +106,12 @@ class IntegrateInputLinks(pyblish.api.ContextPlugin): version_doc["data"]["inputLinks"].append(link) def write_links_to_database(self, context): + """Iter instances in context to update database + + If `versionEntity.data.inputLinks` not None in `instance.data`, doc + in database will be updated. + + """ from avalon import io for instance in context: From 2f883567bfb507e9d002290f0ede9b06c22eb21a Mon Sep 17 00:00:00 2001 From: davidlatwe Date: Mon, 8 Nov 2021 02:23:53 +0800 Subject: [PATCH 080/211] skip inactive instances --- openpype/plugins/publish/integrate_inputlinks.py | 11 ++++++++--- 1 file changed, 8 insertions(+), 3 deletions(-) diff --git a/openpype/plugins/publish/integrate_inputlinks.py b/openpype/plugins/publish/integrate_inputlinks.py index 00cd459da9..745b09d8e0 100644 --- a/openpype/plugins/publish/integrate_inputlinks.py +++ b/openpype/plugins/publish/integrate_inputlinks.py @@ -34,6 +34,10 @@ class IntegrateInputLinks(pyblish.api.ContextPlugin): publishing = [] for instance in context: + if not instance.data.get("publish", True): + # Skip inactive instances + continue + version_doc = instance.data.get("versionEntity") if not version_doc: self.log.debug("Instance %s doesn't have version." % instance) @@ -75,7 +79,8 @@ class IntegrateInputLinks(pyblish.api.ContextPlugin): version_doc=instance.data["versionEntity"], ) - self.write_links_to_database(context) + publishing.append(workfile) + self.write_links_to_database(publishing) def add_link(self, link_type, input_id, version_doc): """Add dependency link data into version document @@ -105,7 +110,7 @@ class IntegrateInputLinks(pyblish.api.ContextPlugin): version_doc["data"]["inputLinks"] = [] version_doc["data"]["inputLinks"].append(link) - def write_links_to_database(self, context): + def write_links_to_database(self, instances): """Iter instances in context to update database If `versionEntity.data.inputLinks` not None in `instance.data`, doc @@ -114,7 +119,7 @@ class IntegrateInputLinks(pyblish.api.ContextPlugin): """ from avalon import io - for instance in context: + for instance in instances: version_doc = instance.data.get("versionEntity") if version_doc is None: continue From 46ba5a9161434d67b431c842c832da69d57c61ed Mon Sep 17 00:00:00 2001 From: davidlatwe Date: Mon, 8 Nov 2021 03:00:28 +0800 Subject: [PATCH 081/211] embed simple dependency view --- openpype/tools/assetlinks/__init__.py | 0 openpype/tools/assetlinks/widgets.py | 85 +++++++++++++++++++++++++++ openpype/tools/loader/widgets.py | 17 ++++-- 3 files changed, 97 insertions(+), 5 deletions(-) create mode 100644 openpype/tools/assetlinks/__init__.py create mode 100644 openpype/tools/assetlinks/widgets.py diff --git a/openpype/tools/assetlinks/__init__.py b/openpype/tools/assetlinks/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/openpype/tools/assetlinks/widgets.py b/openpype/tools/assetlinks/widgets.py new file mode 100644 index 0000000000..e0087e9d47 --- /dev/null +++ b/openpype/tools/assetlinks/widgets.py @@ -0,0 +1,85 @@ + +from Qt import QtWidgets, QtCore, QtGui + + +class SimpleLinkView(QtWidgets.QWidget): + + def __init__(self, dbcon, parent=None): + super(SimpleLinkView, self).__init__(parent=parent) + self.dbcon = dbcon + + # TODO: display selected target + + in_text = QtWidgets.QLabel("Inputs") + in_view = QtWidgets.QListWidget(parent=self) + out_text = QtWidgets.QLabel("Outputs") + out_view = QtWidgets.QListWidget(parent=self) + + layout = QtWidgets.QGridLayout(self) + layout.setContentsMargins(0, 0, 0, 0) + layout.addWidget(in_text, 0, 0) + layout.addWidget(in_view, 1, 0) + layout.addWidget(out_text, 0, 1) + layout.addWidget(out_view, 1, 1) + + self._in_view = in_view + self._out_view = out_view + + def clear(self): + self._in_view.clear() + self._out_view.clear() + + def set_version(self, version_doc): + self.clear() + if not version_doc or not self.isVisible(): + return + + # inputs + # + for link in version_doc["data"].get("inputLinks", []): + version = self.dbcon.find_one( + {"_id": link["input"], "type": "version"}, + projection={"name": 1, "parent": 1} + ) + if not version: + continue + subset = self.dbcon.find_one( + {"_id": version["parent"], "type": "subset"}, + projection={"name": 1, "parent": 1} + ) + if not subset: + continue + asset = self.dbcon.find_one( + {"_id": subset["parent"], "type": "asset"}, + projection={"name": 1} + ) + + self._in_view.addItem("{asset} {subset} v{version:0>3}".format( + asset=asset["name"], + subset=subset["name"], + version=version["name"], + )) + + # outputs + # + outputs = self.dbcon.find( + {"type": "version", "data.inputLinks.input": version_doc["_id"]}, + projection={"name": 1, "parent": 1} + ) + for version in outputs or []: + subset = self.dbcon.find_one( + {"_id": version["parent"], "type": "subset"}, + projection={"name": 1, "parent": 1} + ) + if not subset: + continue + asset = self.dbcon.find_one( + {"_id": subset["parent"], "type": "asset"}, + projection={"name": 1} + ) + + self._out_view.addItem("{asset} {subset} v{version:0>3}".format( + asset=asset["name"], + subset=subset["name"], + version=version["name"], + )) diff --git a/openpype/tools/loader/widgets.py b/openpype/tools/loader/widgets.py index 4c075382ac..c370e5bdc1 100644 --- a/openpype/tools/loader/widgets.py +++ b/openpype/tools/loader/widgets.py @@ -21,6 +21,7 @@ from openpype.tools.utils.views import ( TreeViewSpinner, DeselectableTreeView ) +from openpype.tools.assetlinks.widgets import SimpleLinkView from .model import ( SubsetsModel, @@ -830,19 +831,25 @@ class VersionWidget(QtWidgets.QWidget): def __init__(self, dbcon, parent=None): super(VersionWidget, self).__init__(parent=parent) - layout = QtWidgets.QVBoxLayout(self) - layout.setContentsMargins(0, 0, 0, 0) - label = QtWidgets.QLabel("Version", self) data = VersionTextEdit(dbcon, self) data.setReadOnly(True) - layout.addWidget(label) - layout.addWidget(data) + depend_widget = SimpleLinkView(dbcon, self) + + tab = QtWidgets.QTabWidget() + tab.addTab(data, "Version Info") + tab.addTab(depend_widget, "Dependency") + + layout = QtWidgets.QVBoxLayout(self) + layout.setContentsMargins(0, 0, 0, 0) + layout.addWidget(tab) self.data = data + self.depend_widget = depend_widget def set_version(self, version_doc): self.data.set_version(version_doc) + self.depend_widget.set_version(version_doc) class FamilyModel(QtGui.QStandardItemModel): From 07f3593e99b3f46ca352fefde05c574add193564 Mon Sep 17 00:00:00 2001 From: davidlatwe Date: Mon, 8 Nov 2021 03:02:04 +0800 Subject: [PATCH 082/211] cleanup unused import --- openpype/tools/assetlinks/widgets.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/tools/assetlinks/widgets.py b/openpype/tools/assetlinks/widgets.py index e0087e9d47..9a136462b0 100644 --- a/openpype/tools/assetlinks/widgets.py +++ b/openpype/tools/assetlinks/widgets.py @@ -1,5 +1,5 @@ -from Qt import QtWidgets, QtCore, QtGui +from Qt import QtWidgets class SimpleLinkView(QtWidgets.QWidget): From c6484e4930d6dac71e0d344699b804ece350ac4e Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Mon, 8 Nov 2021 11:21:45 +0100 Subject: [PATCH 083/211] added cleanup of frames that are out of range --- openpype/hosts/tvpaint/lib.py | 54 +++++++++++++++++++++++++++++++++++ 1 file changed, 54 insertions(+) diff --git a/openpype/hosts/tvpaint/lib.py b/openpype/hosts/tvpaint/lib.py index c34afb00c4..513bb2d952 100644 --- a/openpype/hosts/tvpaint/lib.py +++ b/openpype/hosts/tvpaint/lib.py @@ -1,5 +1,6 @@ import os import shutil +import collections from PIL import Image, ImageDraw @@ -247,6 +248,7 @@ def _cleanup_frame_references(output_idx_by_frame_idx): """ for frame_idx in tuple(output_idx_by_frame_idx.keys()): reference_idx = output_idx_by_frame_idx[frame_idx] + # Skip transparent frames if reference_idx is None or reference_idx == frame_idx: continue @@ -263,6 +265,55 @@ def _cleanup_frame_references(output_idx_by_frame_idx): output_idx_by_frame_idx[frame_idx] = real_reference_idx +def _cleanup_out_range_frames(output_idx_by_frame_idx, range_start, range_end): + """Cleanup frame references to frames out of passed range. + + First available frame in range is used + ``` + // Example input. Range 2-3 + { + 1: 1, + 2: 1, + 3: 1 + } + // Result + { + 2: 2, // Redirect to self as is first that refence out range + 3: 2 // Redirect to first redirected frame + } + ``` + Result is dictionary where keys leads to frame that should be rendered. + """ + in_range_frames_by_out_frames = collections.defaultdict(set) + out_range_frames = set() + for frame_idx in tuple(output_idx_by_frame_idx.keys()): + # Skip frames that are already out of range + if frame_idx < range_start or frame_idx > range_end: + out_range_frames.add(frame_idx) + continue + + reference_idx = output_idx_by_frame_idx[frame_idx] + # Skip transparent frames + if reference_idx is None: + continue + + # Skip references in range + if reference_idx < range_start or reference_idx > range_end: + in_range_frames_by_out_frames[reference_idx].add(frame_idx) + + for reference_idx in tuple(in_range_frames_by_out_frames.keys()): + frame_indexes = in_range_frames_by_out_frames.pop(reference_idx) + new_reference = None + for frame_idx in frame_indexes: + if new_reference is None: + new_reference = frame_idx + output_idx_by_frame_idx[frame_idx] = new_reference + + # Finally remove out of range frames + for frame_idx in out_range_frames: + output_idx_by_frame_idx.pop(frame_idx) + + def calculate_layer_frame_references( range_start, range_end, layer_frame_start, @@ -324,6 +375,9 @@ def calculate_layer_frame_references( # Cleanup of referenced frames _cleanup_frame_references(output_idx_by_frame_idx) + # Remove frames out of range + _cleanup_out_range_frames(output_idx_by_frame_idx, range_start, range_end) + return output_idx_by_frame_idx From c22483f4e963985cdd9913fd38632ac111468cdc Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Mon, 8 Nov 2021 11:42:41 +0100 Subject: [PATCH 084/211] modified how data are stored --- openpype/hosts/tvpaint/worker/worker_job.py | 23 +++++++-------------- 1 file changed, 8 insertions(+), 15 deletions(-) diff --git a/openpype/hosts/tvpaint/worker/worker_job.py b/openpype/hosts/tvpaint/worker/worker_job.py index f9d4467b26..e458c1e272 100644 --- a/openpype/hosts/tvpaint/worker/worker_job.py +++ b/openpype/hosts/tvpaint/worker/worker_job.py @@ -194,28 +194,21 @@ class CollectSceneData(BaseCommand): groups_data = get_groups_data(communicator=self.communicator) layers_data = get_layers_data(communicator=self.communicator) - layers_by_id = { - layer_data["layer_id"]: layer_data + layer_ids = [ + layer_data["layer_id"] for layer_data in layers_data - } - layer_ids = tuple(layers_by_id.keys()) - pre_post_beh = get_layers_pre_post_behavior( + ] + pre_post_beh_by_layer_id = get_layers_pre_post_behavior( layer_ids, communicator=self.communicator ) - exposure_frames = get_layers_exposure_frames( + exposure_frames_by_layer_id = get_layers_exposure_frames( layer_ids, layers_data, communicator=self.communicator ) - output_layers_data = [] - for layer_data in layers_data: - layer_id = layer_data["layer_id"] - layer_data["exposure_frames"] = exposure_frames[layer_id] - behaviors = pre_post_beh[layer_id] - for key, value in behaviors.items(): - layer_data[key] = value - output_layers_data.append(layer_data) self._result = { - "layers_data": output_layers_data, + "layers_data": layers_data, + "exposure_frames_by_layer_id": exposure_frames_by_layer_id, + "pre_post_beh_by_layer_id": pre_post_beh_by_layer_id, "groups_data": groups_data, "scene_data": get_scene_data(self.communicator) } From 06a165116ea8adff5d99debc5702b03454698ece Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Mon, 8 Nov 2021 11:43:53 +0100 Subject: [PATCH 085/211] modified webpublisher collector --- .../publish/collect_tvpaint_workfile_data.py | 37 +++++++++++++++++-- 1 file changed, 33 insertions(+), 4 deletions(-) diff --git a/openpype/hosts/webpublisher/plugins/publish/collect_tvpaint_workfile_data.py b/openpype/hosts/webpublisher/plugins/publish/collect_tvpaint_workfile_data.py index 3674b74a5d..f4562c6f63 100644 --- a/openpype/hosts/webpublisher/plugins/publish/collect_tvpaint_workfile_data.py +++ b/openpype/hosts/webpublisher/plugins/publish/collect_tvpaint_workfile_data.py @@ -6,8 +6,6 @@ Requires: Provides: Instance """ -import json -import time import pyblish.api from openpype.hosts.tvpaint.worker import ( SenderTVPaintCommands, @@ -37,5 +35,36 @@ class CollectTVPaintWorkfileData(pyblish.api.InstancePlugin): # Send job and wait for answer commands.send_job_and_wait() - # Store result - instance.data["sceneData"] = collect_scene_data_command.result() + collected_data = collect_scene_data_command.result() + layers_data = collected_data["layers_data"] + groups_data = collected_data["groups_data"] + scene_data = collected_data["scene_data"] + exposure_frames_by_layer_id = ( + collected_data["exposure_frames_by_layer_id"] + ) + pre_post_beh_by_layer_id = ( + collected_data["pre_post_beh_by_layer_id"] + ) + + # Store results + # scene data store the same way as TVPaint collector + instance.data["sceneData"] = { + "sceneWidth": scene_data["width"], + "sceneHeight": scene_data["height"], + "scenePixelAspect": scene_data["pixel_aspect"], + "sceneFps": scene_data["fps"], + "sceneFieldOrder": scene_data["field_order"], + "sceneMarkIn": scene_data["mark_in"], + # scene_data["mark_in_state"], + "sceneMarkInState": scene_data["mark_in_set"], + "sceneMarkOut": scene_data["mark_out"], + # scene_data["mark_out_state"], + "sceneMarkOutState": scene_data["mark_out_set"], + "sceneStartFrame": scene_data["start_frame"], + "sceneBgColor": scene_data["bg_color"] + } + # Store only raw data + instance.data["groupsData"] = groups_data + instance.data["layersData"] = layers_data + instance.data["layersExposureFrames"] = exposure_frames_by_layer_id + instance.data["layersPrePostBehavior"] = pre_post_beh_by_layer_id From 23c68fa3686f9c4aaf135107a719fbd5f7ee1110 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Mon, 8 Nov 2021 11:44:00 +0100 Subject: [PATCH 086/211] fix import order --- .../modules/default_modules/job_queue/job_server/server.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/modules/default_modules/job_queue/job_server/server.py b/openpype/modules/default_modules/job_queue/job_server/server.py index e12e582e1c..cc0968b6b6 100644 --- a/openpype/modules/default_modules/job_queue/job_server/server.py +++ b/openpype/modules/default_modules/job_queue/job_server/server.py @@ -4,12 +4,12 @@ import logging from aiohttp import web -log = logging.getLogger(__name__) - from .jobs import JobQueue from .job_queue_route import JobQueueResource from .workers_rpc_route import WorkerRpc +log = logging.getLogger(__name__) + class WebServerManager: """Manger that care about web server thread.""" From a77ab7982b26e744a1f0a038560f0e4e780f2aca Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Mon, 8 Nov 2021 19:03:23 +0100 Subject: [PATCH 087/211] initial commit of extractor --- .../publish/extract_tvpaint_workfile.py | 492 ++++++++++++++++++ 1 file changed, 492 insertions(+) create mode 100644 openpype/hosts/webpublisher/plugins/publish/extract_tvpaint_workfile.py diff --git a/openpype/hosts/webpublisher/plugins/publish/extract_tvpaint_workfile.py b/openpype/hosts/webpublisher/plugins/publish/extract_tvpaint_workfile.py new file mode 100644 index 0000000000..3659e9f0df --- /dev/null +++ b/openpype/hosts/webpublisher/plugins/publish/extract_tvpaint_workfile.py @@ -0,0 +1,492 @@ +import os +import copy +import tempfile + +from openpype.hosts.tvpaint.worker import ( + SenderTVPaintCommands, + ExecuteSimpleGeorgeScript, + ExecuteGeorgeScript +) + +import pyblish.api +from avalon.tvpaint import lib +from openpype.hosts.tvpaint.api.lib import composite_images +from openpype.hosts.tvpaint.lib import ( + calculate_layers_extraction_data, + get_frame_filename_template, + fill_reference_frames, + composite_rendered_layers, + rename_filepaths_by_frame_start +) +from PIL import Image + + +class ExtractTVPaintSequences(pyblish.api.Extractor): + label = "Extract TVPaint Sequences" + hosts = ["webpublisher"] + families = ["review", "renderPass", "renderLayer"] + + # Modifiable with settings + review_bg = [255, 255, 255, 255] + + def process(self, context): + scene_data = context["sceneData"] + scene_mark_in = scene_data["sceneMarkIn"] + scene_mark_out = scene_data["sceneMarkOut"] + scene_start_frame = scene_data["sceneStartFrame"] + scene_bg_color = scene_data["sceneBgColor"] + + behavior_by_layer_id = context.data["layersPrePostBehavior"] + exposure_frames_by_layer_id = context.data["layersExposureFrames"] + + # Handles are not stored per instance but on Context + handle_start = context.data["handleStart"] + handle_end = context.data["handleEnd"] + + tvpaint_commands = SenderTVPaintCommands() + + # Change scene Start Frame to 0 to prevent frame index issues + # - issue is that TVPaint versions deal with frame indexes in a + # different way when Start Frame is not `0` + # NOTE It will be set back after rendering + tvpaint_commands.add_command( + ExecuteSimpleGeorgeScript("tv_startframe 0") + ) + + after_render_instances = [] + for instance in context: + self.log.info("* Preparing commands for instance \"{}\"".format( + instance.data["label"] + )) + # Get all layers and filter out not visible + layers = instance.data["layers"] + filtered_layers = [layer for layer in layers if layer["visible"]] + if not filtered_layers: + self.log.info( + "None of the layers from the instance" + " are visible. Extraction skipped." + ) + continue + + joined_layer_names = ", ".join([ + "\"{}\"".format(str(layer["name"])) + for layer in filtered_layers + ]) + self.log.debug( + "Instance has {} layers with names: {}".format( + len(filtered_layers), joined_layer_names + ) + ) + + # TODO handle this whole staging dir properly + # Staging dir must be created during collection + output_dir = instance.data["stagingDir"] + src_root = "c:/" + dst_root = "{worker_root}" + work_output_dir = output_dir.replace(src_root, dst_root) + + # Frame start/end may be stored as float + frame_start = int(instance.data["frameStart"]) + frame_end = int(instance.data["frameEnd"]) + + # Prepare output frames + output_frame_start = frame_start - handle_start + output_frame_end = frame_end + handle_end + + # Change output frame start to 0 if handles cause it's negative + # number + if output_frame_start < 0: + self.log.warning(( + "Frame start with handles has negative value." + " Changed to \"0\". Frames start: {}, Handle Start: {}" + ).format(frame_start, handle_start)) + output_frame_start = 0 + + # Create copy of scene Mark In/Out + mark_in, mark_out = scene_mark_in, scene_mark_out + + # Fix possible changes of output frame + mark_out, output_frame_end = self._fix_range_changes( + mark_in, mark_out, output_frame_start, output_frame_end + ) + filename_template = get_frame_filename_template( + max(scene_mark_out, output_frame_end) + ) + + # ----------------------------------------------------------------- + self.log.debug( + "Files will be rendered to folder: {}".format(output_dir) + ) + + output_filepaths_by_frame_idx = {} + for frame_idx in range(mark_in, mark_out + 1): + filename = filename_template.format(frame=frame_idx) + filepath = os.path.join(output_dir, filename) + output_filepaths_by_frame_idx[frame_idx] = filepath + + # Prepare data for post render processing + post_render_data = { + "output_dir": output_dir, + "layers": filtered_layers, + "output_filepaths_by_frame_idx": output_filepaths_by_frame_idx, + "instance": instance, + "is_layers_render": False, + "output_frame_start": output_frame_start, + "output_frame_end": output_frame_end + } + # Store them to list + after_render_instances.append(post_render_data) + + # Review rendering + if instance.data["family"] == "review": + self.add_render_review_command( + tvpaint_commands, mark_in, mark_out, scene_bg_color, + work_output_dir, filename_template + ) + continue + + # Layers rendering + extraction_data_by_layer_id = calculate_layers_extraction_data( + filtered_layers, + exposure_frames_by_layer_id, + behavior_by_layer_id, + mark_in, + mark_out + ) + filepaths_by_layer_id = self.add_render_command( + tvpaint_commands, + work_output_dir, + filtered_layers, + extraction_data_by_layer_id + ) + # Add more data to post render processing + post_render_data.update({ + "is_layers_render": True, + "extraction_data_by_layer_id": extraction_data_by_layer_id, + "filepaths_by_layer_id": filepaths_by_layer_id + }) + + # Change scene frame Start back to previous value + tvpaint_commands.add_command( + ExecuteSimpleGeorgeScript( + "tv_startframe {}".format(scene_start_frame) + ) + ) + self.log.info("Sending the job and waiting for response...") + tvpaint_commands.send_job_and_wait() + self.log.info("Render job finished") + + for post_render_data in after_render_instances: + self._post_render_processing(post_render_data, mark_in, mark_out) + + def _fix_range_changes( + self, mark_in, mark_out, output_frame_start, output_frame_end + ): + # Check Marks range and output range + output_range = output_frame_end - output_frame_start + marks_range = mark_out - mark_in + + # Lower Mark Out if mark range is bigger than output + # - do not rendered not used frames + if output_range < marks_range: + new_mark_out = mark_out - (marks_range - output_range) + self.log.warning(( + "Lowering render range to {} frames. Changed Mark Out {} -> {}" + ).format(marks_range + 1, mark_out, new_mark_out)) + # Assign new mark out to variable + mark_out = new_mark_out + + # Lower output frame end so representation has right `frameEnd` value + elif output_range > marks_range: + new_output_frame_end = ( + output_frame_end - (output_range - marks_range) + ) + self.log.warning(( + "Lowering representation range to {} frames." + " Changed frame end {} -> {}" + ).format(output_range + 1, mark_out, new_output_frame_end)) + output_frame_end = new_output_frame_end + return mark_out, output_frame_end + + def _post_render_processing(self, post_render_data, mark_in, mark_out): + # Unpack values + instance = post_render_data["instance"] + output_filepaths_by_frame_idx = ( + post_render_data["output_filepaths_by_frame_idx"] + ) + is_layers_render = post_render_data["is_layers_render"] + output_dir = post_render_data["output_dir"] + layers = post_render_data["layers"] + output_frame_start = post_render_data["output_frame_start"] + output_frame_end = post_render_data["output_frame_end"] + + # Trigger post processing of layers rendering + # - only few frames were rendered this will complete the sequence + # - multiple layers can be in single instance they must be composite + # over each other + if is_layers_render: + self._finish_layer_render( + layers, + post_render_data["extraction_data_by_layer_id"], + post_render_data["filepaths_by_layer_id"], + mark_in, + mark_out, + output_filepaths_by_frame_idx + ) + + # Create thumbnail + thumbnail_filepath = os.path.join(output_dir, "thumbnail.jpg") + thumbnail_src_path = output_filepaths_by_frame_idx[mark_in] + self._create_thumbnail(thumbnail_src_path, thumbnail_filepath) + + # Rename filepaths to final frames + repre_files = self._rename_output_files( + output_filepaths_by_frame_idx, + mark_in, + mark_out, + output_frame_start + ) + + # Fill tags and new families + family_lowered = instance.data["family"].lower() + tags = [] + if family_lowered in ("review", "renderlayer"): + tags.append("review") + + # Sequence of one frame + single_file = len(repre_files) == 1 + if single_file: + repre_files = repre_files[0] + + # Extension is harcoded + # - changing extension would require change code + new_repre = { + "name": "png", + "ext": "png", + "files": repre_files, + "stagingDir": output_dir, + "tags": tags + } + + if not single_file: + new_repre["frameStart"] = output_frame_start + new_repre["frameEnd"] = output_frame_end + + self.log.debug("Creating new representation: {}".format(new_repre)) + + instance.data["representations"].append(new_repre) + + if family_lowered in ("renderpass", "renderlayer"): + # Change family to render + instance.data["family"] = "render" + + thumbnail_ext = os.path.splitext(thumbnail_filepath)[1] + # Create thumbnail representation + thumbnail_repre = { + "name": "thumbnail", + "ext": thumbnail_ext.replace(".", ""), + "outputName": "thumb", + "files": os.path.basename(thumbnail_filepath), + "stagingDir": output_dir, + "tags": ["thumbnail"] + } + instance.data["representations"].append(thumbnail_repre) + + def _rename_output_files( + self, filepaths_by_frame, mark_in, mark_out, output_frame_start + ): + new_filepaths_by_frame = rename_filepaths_by_frame_start( + filepaths_by_frame, mark_in, mark_out, output_frame_start + ) + + repre_filenames = [] + for filepath in new_filepaths_by_frame.values(): + repre_filenames.append(os.path.basename(filepath)) + + if mark_in < output_frame_start: + repre_filenames = list(reversed(repre_filenames)) + + return repre_filenames + + def add_render_review_command( + self, + tvpaint_commands, + mark_in, + mark_out, + scene_bg_color, + work_output_dir, + filename_template + ): + """ Export images from TVPaint using `tv_savesequence` command. + + Args: + output_dir (str): Directory where files will be stored. + mark_in (int): Starting frame index from which export will begin. + mark_out (int): On which frame index export will end. + scene_bg_color (list): Bg color set in scene. Result of george + script command `tv_background`. + """ + self.log.debug("Preparing data for rendering.") + bg_color = self._get_review_bg_color() + first_frame_filepath = "/".join([ + work_output_dir, + filename_template.format(frame=mark_in) + ]).replace("\\", "/") + + george_script_lines = [ + # Change bg color to color from settings + "tv_background \"color\" {} {} {}".format(*bg_color), + "tv_SaveMode \"PNG\"", + "export_path = \"{}\"".format(first_frame_filepath), + "tv_savesequence '\"'export_path'\"' {} {}".format( + mark_in, mark_out + ) + ] + if scene_bg_color: + # Change bg color back to previous scene bg color + _scene_bg_color = copy.deepcopy(scene_bg_color) + bg_type = _scene_bg_color.pop(0) + orig_color_command = [ + "tv_background", + "\"{}\"".format(bg_type) + ] + orig_color_command.extend(_scene_bg_color) + + george_script_lines.append(" ".join(orig_color_command)) + + tvpaint_commands.add_command( + ExecuteGeorgeScript("\n".join(george_script_lines)) + ) + + def add_render_command( + self, + tvpaint_commands, + work_output_dir, + layers, + extraction_data_by_layer_id + ): + """ Export images from TVPaint. + + Args: + output_dir (str): Directory where files will be stored. + mark_in (int): Starting frame index from which export will begin. + mark_out (int): On which frame index export will end. + layers (list): List of layers to be exported. + + Retruns: + tuple: With 2 items first is list of filenames second is path to + thumbnail. + """ + # Map layers by position + layers_by_id = { + layer["layer_id"]: layer + for layer in layers + } + + # Render layers + filepaths_by_layer_id = {} + for layer_id, render_data in extraction_data_by_layer_id.items(): + layer = layers_by_id[layer_id] + frame_references = render_data["frame_references"] + filenames_by_frame_index = render_data["filenames_by_frame_index"] + + filepaths_by_frame = {} + for frame_idx, ref_idx in frame_references.items(): + # None reference is skipped because does not have source + if ref_idx is None: + filepaths_by_frame[frame_idx] = None + continue + filename = filenames_by_frame_index[frame_idx] + dst_path = "/".join([work_output_dir, filename]) + filepaths_by_frame[frame_idx] = dst_path + if frame_idx != ref_idx: + continue + + filepaths_by_layer_id[layer_id] = self._add_render_layer_command( + tvpaint_commands, layer, filepaths_by_frame + ) + + return filepaths_by_layer_id + + def _add_render_layer_command( + self, tvpaint_commands, layer, filepaths_by_frame + ): + george_script_lines = [ + # Set current layer by position + "tv_layergetid {}".format(layer["position"]), + "layer_id = result", + "tv_layerset layer_id", + "tv_SaveMode \"PNG\"" + ] + + filepaths_by_frame = {} + for frame_idx, filepath in filepaths_by_frame.items(): + if filepath is None: + continue + + # Go to frame + george_script_lines.append("tv_layerImage {}".format(frame_idx)) + # Store image to output + george_script_lines.append("tv_saveimage \"{}\"".format(filepath)) + + tvpaint_commands.add_command( + ExecuteGeorgeScript("\n".join(george_script_lines)) + ) + return filepaths_by_frame + + def _finish_layer_render( + self, + layers, + extraction_data_by_layer_id, + filepaths_by_layer_id, + mark_in, + mark_out, + output_filepaths_by_frame_idx + ): + # Fill frames between `frame_start_index` and `frame_end_index` + self.log.debug("Filling frames not rendered frames.") + for layer_id, render_data in extraction_data_by_layer_id.items(): + frame_references = render_data["frame_references"] + filepaths_by_frame = filepaths_by_layer_id[layer_id] + fill_reference_frames(frame_references, filepaths_by_frame) + + # Prepare final filepaths where compositing should store result + self.log.info("Started compositing of layer frames.") + composite_rendered_layers( + layers, filepaths_by_layer_id, + mark_in, mark_out, + output_filepaths_by_frame_idx + ) + + def _create_thumbnail(self, thumbnail_src_path, thumbnail_filepath): + if not os.path.exists(thumbnail_src_path): + return + + source_img = Image.open(thumbnail_src_path) + + # Composite background only on rgba images + # - just making sure + if source_img.mode.lower() == "rgba": + bg_color = self._get_review_bg_color() + self.log.debug("Adding thumbnail background color {}.".format( + " ".join([str(val) for val in bg_color]) + )) + bg_image = Image.new("RGBA", source_img.size, bg_color) + thumbnail_obj = Image.alpha_composite(bg_image, source_img) + thumbnail_obj.convert("RGB").save(thumbnail_filepath) + + else: + self.log.info(( + "Source for thumbnail has mode \"{}\" (Expected: RGBA)." + " Can't use thubmanail background color." + ).format(source_img.mode)) + source_img.save(thumbnail_filepath) + + def _get_review_bg_color(self): + red = green = blue = 255 + if self.review_bg: + if len(self.review_bg) == 4: + red, green, blue, _ = self.review_bg + elif len(self.review_bg) == 3: + red, green, blue = self.review_bg + return (red, green, blue) From a16993541de5702e374477f8d6332fbd1216130d Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 9 Nov 2021 09:47:13 +0100 Subject: [PATCH 088/211] added work root to settings --- .../defaults/system_settings/modules.json | 7 ++++++- .../schemas/system_schema/schema_modules.json | 18 ++++++++++++++++++ 2 files changed, 24 insertions(+), 1 deletion(-) diff --git a/openpype/settings/defaults/system_settings/modules.json b/openpype/settings/defaults/system_settings/modules.json index 43aeea7885..1c4277af34 100644 --- a/openpype/settings/defaults/system_settings/modules.json +++ b/openpype/settings/defaults/system_settings/modules.json @@ -180,6 +180,11 @@ "enabled": false }, "job_queue": { - "server_url": "" + "server_url": "", + "work_root": { + "windows": "", + "darwin": "", + "linux": "" + } } } \ No newline at end of file diff --git a/openpype/settings/entities/schemas/system_schema/schema_modules.json b/openpype/settings/entities/schemas/system_schema/schema_modules.json index 5a163d380b..905d53a87d 100644 --- a/openpype/settings/entities/schemas/system_schema/schema_modules.json +++ b/openpype/settings/entities/schemas/system_schema/schema_modules.json @@ -244,10 +244,28 @@ "require_restart": true, "collapsible": true, "children": [ + { + "type": "label", + "label": "Address of machine where job queue server is running." + }, { "type": "text", "key": "server_url", "label": "Server Rest URL" + }, + { + "type": "separator" + }, + { + "type": "label", + "label": "Work root is used as temporary directory for workers where source is copied and render output can be stored." + }, + { + "key": "work_root", + "label": "Work root", + "type": "path", + "multipath": false, + "multiplatform": true } ] }, From 65a77361a9285c150e049d3e0b2c3f3f7946e86d Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Tue, 9 Nov 2021 12:39:59 +0100 Subject: [PATCH 089/211] adding baking colorspace knob to render nodes --- openpype/hosts/nuke/api/lib.py | 397 ++------------------------------- 1 file changed, 17 insertions(+), 380 deletions(-) diff --git a/openpype/hosts/nuke/api/lib.py b/openpype/hosts/nuke/api/lib.py index 9ee3a4464b..e1e417df4d 100644 --- a/openpype/hosts/nuke/api/lib.py +++ b/openpype/hosts/nuke/api/lib.py @@ -478,6 +478,7 @@ def create_write_node(name, data, input=None, prenodes=None, if review: add_review_knob(GN) + add_bake_colorspace_knob(GN) # add divider GN.addKnob(nuke.Text_Knob('', 'Rendering')) @@ -571,6 +572,22 @@ def add_review_knob(node): return node +def add_bake_colorspace_knob(node): + ''' Adds additional bake colorspace knob to given node + + Arguments: + node (obj): nuke node object to be fixed + + Return: + node (obj): with added knob + ''' + if "bake_colorspace" not in node.knobs(): + knob = nuke.Boolean_Knob("bake_colorspace", "Bake colorspace") + knob.setValue(True) + node.addKnob(knob) + return node + + def add_deadline_tab(node): node.addKnob(nuke.Tab_Knob("Deadline")) @@ -1160,386 +1177,6 @@ def get_write_node_template_attr(node): return anlib.fix_data_for_node_create(correct_data) -class ExporterReview: - """ - Base class object for generating review data from Nuke - - Args: - klass (pyblish.plugin): pyblish plugin parent - instance (pyblish.instance): instance of pyblish context - - """ - _temp_nodes = [] - data = dict({ - "representations": list() - }) - - def __init__(self, - klass, - instance - ): - - self.log = klass.log - self.instance = instance - self.path_in = self.instance.data.get("path", None) - self.staging_dir = self.instance.data["stagingDir"] - self.collection = self.instance.data.get("collection", None) - - def get_file_info(self): - if self.collection: - self.log.debug("Collection: `{}`".format(self.collection)) - # get path - self.fname = os.path.basename(self.collection.format( - "{head}{padding}{tail}")) - self.fhead = self.collection.format("{head}") - - # get first and last frame - self.first_frame = min(self.collection.indexes) - self.last_frame = max(self.collection.indexes) - if "slate" in self.instance.data["families"]: - self.first_frame += 1 - else: - self.fname = os.path.basename(self.path_in) - self.fhead = os.path.splitext(self.fname)[0] + "." - self.first_frame = self.instance.data.get("frameStartHandle", None) - self.last_frame = self.instance.data.get("frameEndHandle", None) - - if "#" in self.fhead: - self.fhead = self.fhead.replace("#", "")[:-1] - - def get_representation_data(self, tags=None, range=False): - add_tags = [] - if tags: - add_tags = tags - - repre = { - 'name': self.name, - 'ext': self.ext, - 'files': self.file, - "stagingDir": self.staging_dir, - "tags": [self.name.replace("_", "-")] + add_tags - } - - if range: - repre.update({ - "frameStart": self.first_frame, - "frameEnd": self.last_frame, - }) - - self.data["representations"].append(repre) - - def get_view_process_node(self): - """ - Will get any active view process. - - Arguments: - self (class): in object definition - - Returns: - nuke.Node: copy node of Input Process node - """ - anlib.reset_selection() - ipn_orig = None - for v in nuke.allNodes(filter="Viewer"): - ip = v['input_process'].getValue() - ipn = v['input_process_node'].getValue() - if "VIEWER_INPUT" not in ipn and ip: - ipn_orig = nuke.toNode(ipn) - ipn_orig.setSelected(True) - - if ipn_orig: - # copy selected to clipboard - nuke.nodeCopy('%clipboard%') - # reset selection - anlib.reset_selection() - # paste node and selection is on it only - nuke.nodePaste('%clipboard%') - # assign to variable - ipn = nuke.selectedNode() - - return ipn - - def clean_nodes(self): - for node in self._temp_nodes: - nuke.delete(node) - self._temp_nodes = [] - self.log.info("Deleted nodes...") - - -class ExporterReviewLut(ExporterReview): - """ - Generator object for review lut from Nuke - - Args: - klass (pyblish.plugin): pyblish plugin parent - instance (pyblish.instance): instance of pyblish context - - - """ - - def __init__(self, - klass, - instance, - name=None, - ext=None, - cube_size=None, - lut_size=None, - lut_style=None): - # initialize parent class - ExporterReview.__init__(self, klass, instance) - self._temp_nodes = [] - - # deal with now lut defined in viewer lut - if hasattr(klass, "viewer_lut_raw"): - self.viewer_lut_raw = klass.viewer_lut_raw - else: - self.viewer_lut_raw = False - - self.name = name or "baked_lut" - self.ext = ext or "cube" - self.cube_size = cube_size or 32 - self.lut_size = lut_size or 1024 - self.lut_style = lut_style or "linear" - - # set frame start / end and file name to self - self.get_file_info() - - self.log.info("File info was set...") - - self.file = self.fhead + self.name + ".{}".format(self.ext) - self.path = os.path.join( - self.staging_dir, self.file).replace("\\", "/") - - def generate_lut(self): - # ---------- start nodes creation - - # CMSTestPattern - cms_node = nuke.createNode("CMSTestPattern") - cms_node["cube_size"].setValue(self.cube_size) - # connect - self._temp_nodes.append(cms_node) - self.previous_node = cms_node - self.log.debug("CMSTestPattern... `{}`".format(self._temp_nodes)) - - # Node View Process - ipn = self.get_view_process_node() - if ipn is not None: - # connect - ipn.setInput(0, self.previous_node) - self._temp_nodes.append(ipn) - self.previous_node = ipn - self.log.debug("ViewProcess... `{}`".format(self._temp_nodes)) - - if not self.viewer_lut_raw: - # OCIODisplay - dag_node = nuke.createNode("OCIODisplay") - # connect - dag_node.setInput(0, self.previous_node) - self._temp_nodes.append(dag_node) - self.previous_node = dag_node - self.log.debug("OCIODisplay... `{}`".format(self._temp_nodes)) - - # GenerateLUT - gen_lut_node = nuke.createNode("GenerateLUT") - gen_lut_node["file"].setValue(self.path) - gen_lut_node["file_type"].setValue(".{}".format(self.ext)) - gen_lut_node["lut1d"].setValue(self.lut_size) - gen_lut_node["style1d"].setValue(self.lut_style) - # connect - gen_lut_node.setInput(0, self.previous_node) - self._temp_nodes.append(gen_lut_node) - self.log.debug("GenerateLUT... `{}`".format(self._temp_nodes)) - - # ---------- end nodes creation - - # Export lut file - nuke.execute( - gen_lut_node.name(), - int(self.first_frame), - int(self.first_frame)) - - self.log.info("Exported...") - - # ---------- generate representation data - self.get_representation_data() - - self.log.debug("Representation... `{}`".format(self.data)) - - # ---------- Clean up - self.clean_nodes() - - return self.data - - -class ExporterReviewMov(ExporterReview): - """ - Metaclass for generating review mov files - - Args: - klass (pyblish.plugin): pyblish plugin parent - instance (pyblish.instance): instance of pyblish context - - """ - - def __init__(self, - klass, - instance, - name=None, - ext=None, - ): - # initialize parent class - ExporterReview.__init__(self, klass, instance) - - # passing presets for nodes to self - if hasattr(klass, "nodes"): - self.nodes = klass.nodes - else: - self.nodes = {} - - # deal with now lut defined in viewer lut - self.viewer_lut_raw = klass.viewer_lut_raw - self.bake_colorspace_fallback = klass.bake_colorspace_fallback - self.bake_colorspace_main = klass.bake_colorspace_main - self.write_colorspace = instance.data["colorspace"] - - self.name = name or "baked" - self.ext = ext or "mov" - - # set frame start / end and file name to self - self.get_file_info() - - self.log.info("File info was set...") - - self.file = self.fhead + self.name + ".{}".format(self.ext) - self.path = os.path.join( - self.staging_dir, self.file).replace("\\", "/") - - def render(self, render_node_name): - self.log.info("Rendering... ") - # Render Write node - nuke.execute( - render_node_name, - int(self.first_frame), - int(self.last_frame)) - - self.log.info("Rendered...") - - def save_file(self): - import shutil - with anlib.maintained_selection(): - self.log.info("Saving nodes as file... ") - # create nk path - path = os.path.splitext(self.path)[0] + ".nk" - # save file to the path - shutil.copyfile(self.instance.context.data["currentFile"], path) - - self.log.info("Nodes exported...") - return path - - def generate_mov(self, farm=False): - # ---------- start nodes creation - - # Read node - r_node = nuke.createNode("Read") - r_node["file"].setValue(self.path_in) - r_node["first"].setValue(self.first_frame) - r_node["origfirst"].setValue(self.first_frame) - r_node["last"].setValue(self.last_frame) - r_node["origlast"].setValue(self.last_frame) - r_node["colorspace"].setValue(self.write_colorspace) - - # connect - self._temp_nodes.append(r_node) - self.previous_node = r_node - self.log.debug("Read... `{}`".format(self._temp_nodes)) - - # View Process node - ipn = self.get_view_process_node() - if ipn is not None: - # connect - ipn.setInput(0, self.previous_node) - self._temp_nodes.append(ipn) - self.previous_node = ipn - self.log.debug("ViewProcess... `{}`".format(self._temp_nodes)) - - if not self.viewer_lut_raw: - colorspaces = [ - self.bake_colorspace_main, self.bake_colorspace_fallback - ] - - if any(colorspaces): - # OCIOColorSpace with controled output - dag_node = nuke.createNode("OCIOColorSpace") - self._temp_nodes.append(dag_node) - for c in colorspaces: - test = dag_node["out_colorspace"].setValue(str(c)) - if test: - self.log.info( - "Baking in colorspace... `{}`".format(c)) - break - - if not test: - dag_node = nuke.createNode("OCIODisplay") - else: - # OCIODisplay - dag_node = nuke.createNode("OCIODisplay") - - # connect - dag_node.setInput(0, self.previous_node) - self._temp_nodes.append(dag_node) - self.previous_node = dag_node - self.log.debug("OCIODisplay... `{}`".format(self._temp_nodes)) - - # Write node - write_node = nuke.createNode("Write") - self.log.debug("Path: {}".format(self.path)) - write_node["file"].setValue(self.path) - write_node["file_type"].setValue(self.ext) - - # Knobs `meta_codec` and `mov64_codec` are not available on centos. - # TODO change this to use conditions, if possible. - try: - write_node["meta_codec"].setValue("ap4h") - except Exception: - self.log.info("`meta_codec` knob was not found") - - try: - write_node["mov64_codec"].setValue("ap4h") - except Exception: - self.log.info("`mov64_codec` knob was not found") - write_node["mov64_write_timecode"].setValue(1) - write_node["raw"].setValue(1) - # connect - write_node.setInput(0, self.previous_node) - self._temp_nodes.append(write_node) - self.log.debug("Write... `{}`".format(self._temp_nodes)) - # ---------- end nodes creation - - # ---------- render or save to nk - if farm: - nuke.scriptSave() - path_nk = self.save_file() - self.data.update({ - "bakeScriptPath": path_nk, - "bakeWriteNodeName": write_node.name(), - "bakeRenderPath": self.path - }) - else: - self.render(write_node.name()) - # ---------- generate representation data - self.get_representation_data( - tags=["review", "delete"], - range=True - ) - - self.log.debug("Representation... `{}`".format(self.data)) - - # ---------- Clean up - self.clean_nodes() - nuke.scriptSave() - return self.data - - def get_dependent_nodes(nodes): """Get all dependent nodes connected to the list of nodes. From 46f808c5bcd7dfde7dd9e4a42cc33e2909d34b25 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Tue, 9 Nov 2021 12:40:46 +0100 Subject: [PATCH 090/211] moving plugin based funcs to right module --- openpype/hosts/nuke/api/plugin.py | 385 ++++++++++++++++++ .../publish/extract_review_data_lut.py | 6 +- .../publish/extract_review_data_mov.py | 6 +- 3 files changed, 391 insertions(+), 6 deletions(-) diff --git a/openpype/hosts/nuke/api/plugin.py b/openpype/hosts/nuke/api/plugin.py index 62eadecaf4..9801e19126 100644 --- a/openpype/hosts/nuke/api/plugin.py +++ b/openpype/hosts/nuke/api/plugin.py @@ -1,3 +1,4 @@ +import os import random import string @@ -94,3 +95,387 @@ class NukeLoader(api.Loader): nuke.delete(member) return dependent_nodes + + + +class ExporterReview: + """ + Base class object for generating review data from Nuke + + Args: + klass (pyblish.plugin): pyblish plugin parent + instance (pyblish.instance): instance of pyblish context + + """ + _temp_nodes = [] + data = dict({ + "representations": list() + }) + + def __init__(self, + klass, + instance + ): + + self.log = klass.log + self.instance = instance + self.bake_colorspace = instance.data["bakeColorspace"] + self.path_in = self.instance.data.get("path", None) + self.staging_dir = self.instance.data["stagingDir"] + self.collection = self.instance.data.get("collection", None) + + def get_file_info(self): + if self.collection: + self.log.debug("Collection: `{}`".format(self.collection)) + # get path + self.fname = os.path.basename(self.collection.format( + "{head}{padding}{tail}")) + self.fhead = self.collection.format("{head}") + + # get first and last frame + self.first_frame = min(self.collection.indexes) + self.last_frame = max(self.collection.indexes) + if "slate" in self.instance.data["families"]: + self.first_frame += 1 + else: + self.fname = os.path.basename(self.path_in) + self.fhead = os.path.splitext(self.fname)[0] + "." + self.first_frame = self.instance.data.get("frameStartHandle", None) + self.last_frame = self.instance.data.get("frameEndHandle", None) + + if "#" in self.fhead: + self.fhead = self.fhead.replace("#", "")[:-1] + + def get_representation_data(self, tags=None, range=False): + add_tags = [] + if tags: + add_tags = tags + + repre = { + 'name': self.name, + 'ext': self.ext, + 'files': self.file, + "stagingDir": self.staging_dir, + "tags": [self.name.replace("_", "-")] + add_tags + } + + if range: + repre.update({ + "frameStart": self.first_frame, + "frameEnd": self.last_frame, + }) + + self.data["representations"].append(repre) + + def get_view_process_node(self): + """ + Will get any active view process. + + Arguments: + self (class): in object definition + + Returns: + nuke.Node: copy node of Input Process node + """ + anlib.reset_selection() + ipn_orig = None + for v in nuke.allNodes(filter="Viewer"): + ip = v['input_process'].getValue() + ipn = v['input_process_node'].getValue() + if "VIEWER_INPUT" not in ipn and ip: + ipn_orig = nuke.toNode(ipn) + ipn_orig.setSelected(True) + + if ipn_orig: + # copy selected to clipboard + nuke.nodeCopy('%clipboard%') + # reset selection + anlib.reset_selection() + # paste node and selection is on it only + nuke.nodePaste('%clipboard%') + # assign to variable + ipn = nuke.selectedNode() + + return ipn + + def clean_nodes(self): + for node in self._temp_nodes: + nuke.delete(node) + self._temp_nodes = [] + self.log.info("Deleted nodes...") + + +class ExporterReviewLut(ExporterReview): + """ + Generator object for review lut from Nuke + + Args: + klass (pyblish.plugin): pyblish plugin parent + instance (pyblish.instance): instance of pyblish context + + + """ + + def __init__(self, + klass, + instance, + name=None, + ext=None, + cube_size=None, + lut_size=None, + lut_style=None): + # initialize parent class + ExporterReview.__init__(self, klass, instance) + self._temp_nodes = [] + + # deal with now lut defined in viewer lut + if hasattr(klass, "viewer_lut_raw"): + self.viewer_lut_raw = klass.viewer_lut_raw + else: + self.viewer_lut_raw = False + + self.name = name or "baked_lut" + self.ext = ext or "cube" + self.cube_size = cube_size or 32 + self.lut_size = lut_size or 1024 + self.lut_style = lut_style or "linear" + + # set frame start / end and file name to self + self.get_file_info() + + self.log.info("File info was set...") + + self.file = self.fhead + self.name + ".{}".format(self.ext) + self.path = os.path.join( + self.staging_dir, self.file).replace("\\", "/") + + def generate_lut(self): + # ---------- start nodes creation + + # CMSTestPattern + cms_node = nuke.createNode("CMSTestPattern") + cms_node["cube_size"].setValue(self.cube_size) + # connect + self._temp_nodes.append(cms_node) + self.previous_node = cms_node + self.log.debug("CMSTestPattern... `{}`".format(self._temp_nodes)) + + # Node View Process + ipn = self.get_view_process_node() + if ipn is not None: + # connect + ipn.setInput(0, self.previous_node) + self._temp_nodes.append(ipn) + self.previous_node = ipn + self.log.debug("ViewProcess... `{}`".format(self._temp_nodes)) + + if not self.viewer_lut_raw: + # OCIODisplay + dag_node = nuke.createNode("OCIODisplay") + # connect + dag_node.setInput(0, self.previous_node) + self._temp_nodes.append(dag_node) + self.previous_node = dag_node + self.log.debug("OCIODisplay... `{}`".format(self._temp_nodes)) + + # GenerateLUT + gen_lut_node = nuke.createNode("GenerateLUT") + gen_lut_node["file"].setValue(self.path) + gen_lut_node["file_type"].setValue(".{}".format(self.ext)) + gen_lut_node["lut1d"].setValue(self.lut_size) + gen_lut_node["style1d"].setValue(self.lut_style) + # connect + gen_lut_node.setInput(0, self.previous_node) + self._temp_nodes.append(gen_lut_node) + self.log.debug("GenerateLUT... `{}`".format(self._temp_nodes)) + + # ---------- end nodes creation + + # Export lut file + nuke.execute( + gen_lut_node.name(), + int(self.first_frame), + int(self.first_frame)) + + self.log.info("Exported...") + + # ---------- generate representation data + self.get_representation_data() + + self.log.debug("Representation... `{}`".format(self.data)) + + # ---------- Clean up + self.clean_nodes() + + return self.data + + +class ExporterReviewMov(ExporterReview): + """ + Metaclass for generating review mov files + + Args: + klass (pyblish.plugin): pyblish plugin parent + instance (pyblish.instance): instance of pyblish context + + """ + + def __init__(self, + klass, + instance, + name=None, + ext=None, + ): + # initialize parent class + ExporterReview.__init__(self, klass, instance) + + # passing presets for nodes to self + if hasattr(klass, "nodes"): + self.nodes = klass.nodes + else: + self.nodes = {} + + # deal with now lut defined in viewer lut + self.viewer_lut_raw = klass.viewer_lut_raw + self.bake_colorspace_fallback = klass.bake_colorspace_fallback + self.bake_colorspace_main = klass.bake_colorspace_main + self.write_colorspace = instance.data["colorspace"] + + self.name = name or "baked" + self.ext = ext or "mov" + + # set frame start / end and file name to self + self.get_file_info() + + self.log.info("File info was set...") + + self.file = self.fhead + self.name + ".{}".format(self.ext) + self.path = os.path.join( + self.staging_dir, self.file).replace("\\", "/") + + def render(self, render_node_name): + self.log.info("Rendering... ") + # Render Write node + nuke.execute( + render_node_name, + int(self.first_frame), + int(self.last_frame)) + + self.log.info("Rendered...") + + def save_file(self): + import shutil + with anlib.maintained_selection(): + self.log.info("Saving nodes as file... ") + # create nk path + path = os.path.splitext(self.path)[0] + ".nk" + # save file to the path + shutil.copyfile(self.instance.context.data["currentFile"], path) + + self.log.info("Nodes exported...") + return path + + def generate_mov(self, farm=False): + # ---------- start nodes creation + + # Read node + r_node = nuke.createNode("Read") + r_node["file"].setValue(self.path_in) + r_node["first"].setValue(self.first_frame) + r_node["origfirst"].setValue(self.first_frame) + r_node["last"].setValue(self.last_frame) + r_node["origlast"].setValue(self.last_frame) + r_node["colorspace"].setValue(self.write_colorspace) + + # connect + self._temp_nodes.append(r_node) + self.previous_node = r_node + self.log.debug("Read... `{}`".format(self._temp_nodes)) + + # only create colorspace baking if toggled on + if self.bake_colorspace: + # View Process node + ipn = self.get_view_process_node() + if ipn is not None: + # connect + ipn.setInput(0, self.previous_node) + self._temp_nodes.append(ipn) + self.previous_node = ipn + self.log.debug("ViewProcess... `{}`".format(self._temp_nodes)) + + if not self.viewer_lut_raw: + colorspaces = [ + self.bake_colorspace_main, self.bake_colorspace_fallback + ] + + if any(colorspaces): + # OCIOColorSpace with controled output + dag_node = nuke.createNode("OCIOColorSpace") + self._temp_nodes.append(dag_node) + for c in colorspaces: + test = dag_node["out_colorspace"].setValue(str(c)) + if test: + self.log.info( + "Baking in colorspace... `{}`".format(c)) + break + + if not test: + dag_node = nuke.createNode("OCIODisplay") + else: + # OCIODisplay + dag_node = nuke.createNode("OCIODisplay") + + # connect + dag_node.setInput(0, self.previous_node) + self._temp_nodes.append(dag_node) + self.previous_node = dag_node + self.log.debug("OCIODisplay... `{}`".format(self._temp_nodes)) + + # Write node + write_node = nuke.createNode("Write") + self.log.debug("Path: {}".format(self.path)) + write_node["file"].setValue(self.path) + write_node["file_type"].setValue(self.ext) + + # Knobs `meta_codec` and `mov64_codec` are not available on centos. + # TODO change this to use conditions, if possible. + try: + write_node["meta_codec"].setValue("ap4h") + except Exception: + self.log.info("`meta_codec` knob was not found") + + try: + write_node["mov64_codec"].setValue("ap4h") + except Exception: + self.log.info("`mov64_codec` knob was not found") + write_node["mov64_write_timecode"].setValue(1) + write_node["raw"].setValue(1) + # connect + write_node.setInput(0, self.previous_node) + self._temp_nodes.append(write_node) + self.log.debug("Write... `{}`".format(self._temp_nodes)) + # ---------- end nodes creation + + # ---------- render or save to nk + if farm: + nuke.scriptSave() + path_nk = self.save_file() + self.data.update({ + "bakeScriptPath": path_nk, + "bakeWriteNodeName": write_node.name(), + "bakeRenderPath": self.path + }) + else: + self.render(write_node.name()) + # ---------- generate representation data + self.get_representation_data( + tags=["review", "delete"], + range=True + ) + + self.log.debug("Representation... `{}`".format(self.data)) + + # ---------- Clean up + self.clean_nodes() + nuke.scriptSave() + return self.data diff --git a/openpype/hosts/nuke/plugins/publish/extract_review_data_lut.py b/openpype/hosts/nuke/plugins/publish/extract_review_data_lut.py index a0f1c9a087..d21cb0eef9 100644 --- a/openpype/hosts/nuke/plugins/publish/extract_review_data_lut.py +++ b/openpype/hosts/nuke/plugins/publish/extract_review_data_lut.py @@ -1,7 +1,7 @@ import os import pyblish.api from avalon.nuke import lib as anlib -from openpype.hosts.nuke.api import lib as pnlib +from openpype.hosts.nuke.api import plugin import openpype try: @@ -9,7 +9,7 @@ try: except ImportError: from importlib import reload -reload(pnlib) +reload(plugin) class ExtractReviewDataLut(openpype.api.Extractor): @@ -45,7 +45,7 @@ class ExtractReviewDataLut(openpype.api.Extractor): # generate data with anlib.maintained_selection(): - exporter = pnlib.ExporterReviewLut( + exporter = plugin.ExporterReviewLut( self, instance ) data = exporter.generate_lut() diff --git a/openpype/hosts/nuke/plugins/publish/extract_review_data_mov.py b/openpype/hosts/nuke/plugins/publish/extract_review_data_mov.py index f4fbc2d0e4..a3fb00bd0e 100644 --- a/openpype/hosts/nuke/plugins/publish/extract_review_data_mov.py +++ b/openpype/hosts/nuke/plugins/publish/extract_review_data_mov.py @@ -1,7 +1,7 @@ import os import pyblish.api from avalon.nuke import lib as anlib -from openpype.hosts.nuke.api import lib as pnlib +from openpype.hosts.nuke.api import plugin import openpype try: @@ -9,7 +9,7 @@ try: except ImportError: from importlib import reload -reload(pnlib) +reload(plugin) class ExtractReviewDataMov(openpype.api.Extractor): @@ -47,7 +47,7 @@ class ExtractReviewDataMov(openpype.api.Extractor): # generate data with anlib.maintained_selection(): - exporter = pnlib.ExporterReviewMov( + exporter = plugin.ExporterReviewMov( self, instance) if "render.farm" in families: From 9c21c826a79b97be9654f17e41b663c63ac8d558 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Tue, 9 Nov 2021 12:41:08 +0100 Subject: [PATCH 091/211] dealing with baking colorspace in publishing collector --- .../hosts/nuke/plugins/publish/precollect_instances.py | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/openpype/hosts/nuke/plugins/publish/precollect_instances.py b/openpype/hosts/nuke/plugins/publish/precollect_instances.py index 5c30df9a62..7bd3b83818 100644 --- a/openpype/hosts/nuke/plugins/publish/precollect_instances.py +++ b/openpype/hosts/nuke/plugins/publish/precollect_instances.py @@ -74,6 +74,11 @@ class PreCollectNukeInstances(pyblish.api.ContextPlugin): if review: families.append("review") + # deside if to bake or not to bake + baking = True + if "bake_colorspace" in node.knobs(): + baking = node["bake_colorspace"].value() + # Add all nodes in group instances. if node.Class() == "Group": # only alter families for render family @@ -142,7 +147,8 @@ class PreCollectNukeInstances(pyblish.api.ContextPlugin): "resolutionWidth": resolution_width, "resolutionHeight": resolution_height, "pixelAspect": pixel_aspect, - "review": review + "review": review, + "bakeColorspace": baking }) self.log.info("collected instance: {}".format(instance.data)) From f52a017591170488bea2102bf73a41e15d8498e9 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 9 Nov 2021 15:05:13 +0100 Subject: [PATCH 092/211] removed unused imports --- openpype/hosts/tvpaint/plugins/publish/extract_sequence.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/openpype/hosts/tvpaint/plugins/publish/extract_sequence.py b/openpype/hosts/tvpaint/plugins/publish/extract_sequence.py index 916d8ee73b..6235b6211d 100644 --- a/openpype/hosts/tvpaint/plugins/publish/extract_sequence.py +++ b/openpype/hosts/tvpaint/plugins/publish/extract_sequence.py @@ -1,5 +1,4 @@ import os -import shutil import copy import tempfile @@ -13,7 +12,7 @@ from openpype.hosts.tvpaint.lib import ( composite_rendered_layers, rename_filepaths_by_frame_start ) -from PIL import Image, ImageDraw +from PIL import Image class ExtractSequence(pyblish.api.Extractor): From 2a8a85ac2efecfaa5f3f3622af3aef7d03186aad Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 9 Nov 2021 15:05:29 +0100 Subject: [PATCH 093/211] modified collect fps to not override instance fps if is already set --- openpype/hosts/webpublisher/plugins/publish/collect_fps.py | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/openpype/hosts/webpublisher/plugins/publish/collect_fps.py b/openpype/hosts/webpublisher/plugins/publish/collect_fps.py index 79fe53176a..b5e665c761 100644 --- a/openpype/hosts/webpublisher/plugins/publish/collect_fps.py +++ b/openpype/hosts/webpublisher/plugins/publish/collect_fps.py @@ -20,9 +20,8 @@ class CollectFPS(pyblish.api.InstancePlugin): hosts = ["webpublisher"] def process(self, instance): - fps = instance.context.data["fps"] + instance_fps = instance.data.get("fps") + if instance_fps is None: + instance.data["fps"] = instance.context.data["fps"] - instance.data.update({ - "fps": fps - }) self.log.debug(f"instance.data: {pformat(instance.data)}") From d270f4cc0aa2f80700d7a37057958a8528cec698 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 9 Nov 2021 15:06:01 +0100 Subject: [PATCH 094/211] formatting changes --- .../plugins/publish/integrate_context_to_log.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/openpype/hosts/webpublisher/plugins/publish/integrate_context_to_log.py b/openpype/hosts/webpublisher/plugins/publish/integrate_context_to_log.py index 419c065e16..261b7e9e0d 100644 --- a/openpype/hosts/webpublisher/plugins/publish/integrate_context_to_log.py +++ b/openpype/hosts/webpublisher/plugins/publish/integrate_context_to_log.py @@ -28,11 +28,11 @@ class IntegrateContextToLog(pyblish.api.ContextPlugin): "batch_id": instance.data.get("batch_id"), "status": "in_progress" }, - {"$set": - { + { + "$set": { "path": instance.data.get("ctx_path") - - }} + } + } ) return From 500d2c66e46415c1a5b79bf04b9410fa4ffa3bd0 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 9 Nov 2021 15:20:53 +0100 Subject: [PATCH 095/211] added new batch collector which collect batch data --- .../plugins/publish/collect_batch_data.py | 84 +++++++++++++++++++ 1 file changed, 84 insertions(+) create mode 100644 openpype/hosts/webpublisher/plugins/publish/collect_batch_data.py diff --git a/openpype/hosts/webpublisher/plugins/publish/collect_batch_data.py b/openpype/hosts/webpublisher/plugins/publish/collect_batch_data.py new file mode 100644 index 0000000000..a710fcb3e8 --- /dev/null +++ b/openpype/hosts/webpublisher/plugins/publish/collect_batch_data.py @@ -0,0 +1,84 @@ +"""Loads batch context from json and continues in publish process. + +Provides: + context -> Loaded batch file. +""" + +import os + +import pyblish.api +from avalon import io +from openpype.lib.plugin_tools import ( + parse_json, + get_batch_asset_task_info +) +from openpype.lib.remote_publish import get_webpublish_conn + + +class CollectBatchData(pyblish.api.ContextPlugin): + """Collect batch data from json stored in 'OPENPYPE_PUBLISH_DATA' env dir. + + The directory must contain 'manifest.json' file where batch data should be + stored. + """ + # must be really early, context values are only in json file + order = pyblish.api.CollectorOrder - 0.495 + label = "Collect batch data" + host = ["webpublisher"] + + def process(self, context): + batch_dir = os.environ.get("OPENPYPE_PUBLISH_DATA") + + assert batch_dir, ( + "Missing `OPENPYPE_PUBLISH_DATA`") + + assert os.path.exists(batch_dir), \ + "Folder {} doesn't exist".format(batch_dir) + + project_name = os.environ.get("AVALON_PROJECT") + if project_name is None: + raise AssertionError( + "Environment `AVALON_PROJECT` was not found." + "Could not set project `root` which may cause issues." + ) + + batch_data = parse_json(os.path.join(batch_dir, "manifest.json")) + + context.data["batchDir"] = batch_dir + context.data["batchData"] = batch_data + + asset_name, task_name, task_type = get_batch_asset_task_info( + batch_data["context"] + ) + + os.environ["AVALON_ASSET"] = asset_name + io.Session["AVALON_ASSET"] = asset_name + os.environ["AVALON_TASK"] = task_name + io.Session["AVALON_TASK"] = task_name + + context.data["asset"] = asset_name + context.data["task"] = task_name + context.data["taskType"] = task_type + + self._set_ctx_path(batch_data) + + def _set_ctx_path(self, batch_data): + dbcon = get_webpublish_conn() + + batch_id = batch_data["batch"] + ctx_path = batch_data["context"]["path"] + self.log.info("ctx_path: {}".format(ctx_path)) + self.log.info("batch_id: {}".format(batch_id)) + if ctx_path and batch_id: + self.log.info("Updating log record") + dbcon.update_one( + { + "batch_id": batch_id, + "status": "in_progress" + }, + { + "$set": { + "path": ctx_path + } + } + ) From b7a3921cd7dd511f288f06fb19db50a6082d7fbf Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 9 Nov 2021 15:21:22 +0100 Subject: [PATCH 096/211] modified collect published files to use data from collect batch data --- .../publish/collect_published_files.py | 78 +++++++------------ 1 file changed, 26 insertions(+), 52 deletions(-) diff --git a/openpype/hosts/webpublisher/plugins/publish/collect_published_files.py b/openpype/hosts/webpublisher/plugins/publish/collect_published_files.py index ecd65ebae4..ac2f4d6e54 100644 --- a/openpype/hosts/webpublisher/plugins/publish/collect_published_files.py +++ b/openpype/hosts/webpublisher/plugins/publish/collect_published_files.py @@ -1,20 +1,21 @@ -"""Loads publishing context from json and continues in publish process. +"""Create instances from batch data and continues in publish process. Requires: - anatomy -> context["anatomy"] *(pyblish.api.CollectorOrder - 0.11) + CollectBatchData Provides: context, instances -> All data from previous publishing process. """ import os -import json import clique import tempfile - -import pyblish.api from avalon import io -from openpype.lib import prepare_template_data +import pyblish.api +from openpype.lib import ( + prepare_template_data, + OpenPypeMongoConnection +) from openpype.lib.plugin_tools import parse_json, get_batch_asset_task_info @@ -29,27 +30,26 @@ class CollectPublishedFiles(pyblish.api.ContextPlugin): label = "Collect rendered frames" host = ["webpublisher"] - _context = None - # from Settings task_type_to_family = {} - def _process_batch(self, dir_url): - task_subfolders = [ - os.path.join(dir_url, o) - for o in os.listdir(dir_url) - if os.path.isdir(os.path.join(dir_url, o))] + def process(self, context): + batch_dir = context.data["batchDir"] + task_subfolders = [] + for folder_name in os.listdir(batch_dir): + full_path = os.path.join(batch_dir, folder_name) + if os.path.isdir(full_path): + task_subfolders.append(full_path) + self.log.info("task_sub:: {}".format(task_subfolders)) + + asset_name = context.data["asset"] + task_name = context.data["task"] + task_type = context.data["taskType"] for task_dir in task_subfolders: task_data = parse_json(os.path.join(task_dir, "manifest.json")) self.log.info("task_data:: {}".format(task_data)) - ctx = task_data["context"] - - asset, task_name, task_type = get_batch_asset_task_info(ctx) - - if task_name: - os.environ["AVALON_TASK"] = task_name is_sequence = len(task_data["files"]) > 1 @@ -60,26 +60,20 @@ class CollectPublishedFiles(pyblish.api.ContextPlugin): is_sequence, extension.replace(".", '')) - subset = self._get_subset_name(family, subset_template, task_name, - task_data["variant"]) + subset = self._get_subset_name( + family, subset_template, task_name, task_data["variant"] + ) + version = self._get_last_version(asset_name, subset) + 1 - os.environ["AVALON_ASSET"] = asset - io.Session["AVALON_ASSET"] = asset - - instance = self._context.create_instance(subset) - instance.data["asset"] = asset + instance = context.create_instance(subset) + instance.data["asset"] = asset_name instance.data["subset"] = subset instance.data["family"] = family instance.data["families"] = families - instance.data["version"] = \ - self._get_last_version(asset, subset) + 1 + instance.data["version"] = version instance.data["stagingDir"] = tempfile.mkdtemp() instance.data["source"] = "webpublisher" - # to store logging info into DB openpype.webpublishes - instance.data["ctx_path"] = ctx["path"] - instance.data["batch_id"] = task_data["batch"] - # to convert from email provided into Ftrack username instance.data["user_email"] = task_data["user"] @@ -230,23 +224,3 @@ class CollectPublishedFiles(pyblish.api.ContextPlugin): return version[0].get("version") or 0 else: return 0 - - def process(self, context): - self._context = context - - batch_dir = os.environ.get("OPENPYPE_PUBLISH_DATA") - - assert batch_dir, ( - "Missing `OPENPYPE_PUBLISH_DATA`") - - assert os.path.exists(batch_dir), \ - "Folder {} doesn't exist".format(batch_dir) - - project_name = os.environ.get("AVALON_PROJECT") - if project_name is None: - raise AssertionError( - "Environment `AVALON_PROJECT` was not found." - "Could not set project `root` which may cause issues." - ) - - self._process_batch(batch_dir) From ad395595a833df68c2f724a5ad8ed859c7cd4a4b Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 9 Nov 2021 15:52:23 +0100 Subject: [PATCH 097/211] extended post of webpublish service to be able run different commands and add different arguments based on extensions filter for studio publishing --- .../webserver_service/webpublish_routes.py | 89 +++++++++++++------ 1 file changed, 62 insertions(+), 27 deletions(-) diff --git a/openpype/hosts/webpublisher/webserver_service/webpublish_routes.py b/openpype/hosts/webpublisher/webserver_service/webpublish_routes.py index 920ed042dc..79015c2521 100644 --- a/openpype/hosts/webpublisher/webserver_service/webpublish_routes.py +++ b/openpype/hosts/webpublisher/webserver_service/webpublish_routes.py @@ -176,23 +176,56 @@ class TaskNode(Node): class WebpublisherBatchPublishEndpoint(_RestApiEndpoint): """Triggers headless publishing of batch.""" async def post(self, request) -> Response: - # for postprocessing in host, currently only PS - host_map = {"photoshop": [".psd", ".psb"]} + # Validate existence of openpype executable + openpype_app = self.resource.executable + if not openpype_app or not os.path.exists(openpype_app): + msg = "Non existent OpenPype executable {}".format(openpype_app) + raise RuntimeError(msg) + # for postprocessing in host, currently only PS output = {} log.info("WebpublisherBatchPublishEndpoint called") content = await request.json() - batch_path = os.path.join(self.resource.upload_dir, - content["batch"]) + # Each filter have extensions which are checked on first task item + # - first filter with extensions that are on first task is used + # - filter defines command and can extend arguments dictionary + # This is used only if 'studio_processing' is enabled on batch + studio_processing_filters = [ + # TVPaint filter + { + "extensions": [".tvpp"], + "command": "remotepublish", + "arguments": { + "targets": ["tvpaint"] + } + }, + # Photoshop filter + { + "extensions": [".psd", ".psb"], + "command": "remotepublishfromapp", + "arguments": { + # Command 'remotepublishfromapp' requires --host argument + "host": "photoshop", + # Make sure targets are set to None for cases that default + # would change + # - targets argument is not used in 'remotepublishfromapp' + "targets": None + } + } + ] - add_args = { - "host": "webpublisher", - "project": content["project_name"], - "user": content["user"] - } + batch_path = os.path.join(self.resource.upload_dir, content["batch"]) + # Default command and arguments command = "remotepublish" + add_args = { + # All commands need 'project' and 'user' + "project": content["project_name"], + "user": content["user"], + + "targets": None + } if content.get("studio_processing"): log.info("Post processing called") @@ -208,32 +241,34 @@ class WebpublisherBatchPublishEndpoint(_RestApiEndpoint): raise ValueError( "Cannot parse batch meta in {} folder".format(task_data)) - command = "remotepublishfromapp" - for host, extensions in host_map.items(): - for ext in extensions: - for file_name in task_data["files"]: - if ext in file_name: - add_args["host"] = host - break + for process_filter in studio_processing_filters: + filter_extensions = process_filter.get("extensions") or [] + for file_name in task_data["files"]: + file_ext = os.path.splitext(file_name)[-1].lower() + if file_ext in filter_extensions: + # Change command + command = process_filter["command"] + # Update arguments + add_args.update( + process_filter.get("arguments") or {} + ) + break - if not add_args.get("host"): - raise ValueError( - "Couldn't discern host from {}".format(task_data["files"])) - - openpype_app = self.resource.executable args = [ openpype_app, command, batch_path ] - if not openpype_app or not os.path.exists(openpype_app): - msg = "Non existent OpenPype executable {}".format(openpype_app) - raise RuntimeError(msg) - for key, value in add_args.items(): - args.append("--{}".format(key)) - args.append(value) + # Skip key values where value is None + if value is not None: + args.append("--{}".format(key)) + # Extend list into arguments (targets can be a list) + if isinstance(value, (tuple, list)): + args.extend(value) + else: + args.append(value) log.info("args:: {}".format(args)) From e41741d036ae41e5628eccfee365153b1adc89c8 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 9 Nov 2021 15:52:54 +0100 Subject: [PATCH 098/211] added targets for default webpublisher --- .../hosts/webpublisher/webserver_service/webpublish_routes.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/webpublisher/webserver_service/webpublish_routes.py b/openpype/hosts/webpublisher/webserver_service/webpublish_routes.py index 79015c2521..70d791b07f 100644 --- a/openpype/hosts/webpublisher/webserver_service/webpublish_routes.py +++ b/openpype/hosts/webpublisher/webserver_service/webpublish_routes.py @@ -224,7 +224,7 @@ class WebpublisherBatchPublishEndpoint(_RestApiEndpoint): "project": content["project_name"], "user": content["user"], - "targets": None + "targets": ["filespublish"] } if content.get("studio_processing"): From 3b33e2f1aa51f766cd9ef2afe750ae83f3769254 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 9 Nov 2021 15:53:05 +0100 Subject: [PATCH 099/211] added targets to collect published files --- .../webpublisher/plugins/publish/collect_published_files.py | 1 + 1 file changed, 1 insertion(+) diff --git a/openpype/hosts/webpublisher/plugins/publish/collect_published_files.py b/openpype/hosts/webpublisher/plugins/publish/collect_published_files.py index ac2f4d6e54..5d6fe69c8d 100644 --- a/openpype/hosts/webpublisher/plugins/publish/collect_published_files.py +++ b/openpype/hosts/webpublisher/plugins/publish/collect_published_files.py @@ -29,6 +29,7 @@ class CollectPublishedFiles(pyblish.api.ContextPlugin): order = pyblish.api.CollectorOrder - 0.490 label = "Collect rendered frames" host = ["webpublisher"] + targets = ["filespublish"] # from Settings task_type_to_family = {} From 98ced1932a96c782e8e4c116d115886dd189f7ec Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 9 Nov 2021 15:54:35 +0100 Subject: [PATCH 100/211] reorganized args order in remotepublishfromapp to match called function --- openpype/cli.py | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/openpype/cli.py b/openpype/cli.py index 8afa6b2c75..be505120f4 100644 --- a/openpype/cli.py +++ b/openpype/cli.py @@ -172,7 +172,7 @@ def publish(debug, paths, targets): @click.option("-p", "--project", help="Project") @click.option("-t", "--targets", help="Targets", default=None, multiple=True) -def remotepublishfromapp(debug, project, path, host, targets=None, user=None): +def remotepublishfromapp(debug, project, path, host, user=None, targets=None): """Start CLI publishing. Publish collects json from paths provided as an argument. @@ -180,8 +180,10 @@ def remotepublishfromapp(debug, project, path, host, targets=None, user=None): """ if debug: os.environ['OPENPYPE_DEBUG'] = '3' - PypeCommands.remotepublishfromapp(project, path, host, user, - targets=targets) + PypeCommands.remotepublishfromapp( + project, path, host, user, targets=targets + ) + @main.command() @click.argument("path") From 54545478aa0794f8ae1541ffce61d0e4215547db Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 9 Nov 2021 15:55:42 +0100 Subject: [PATCH 101/211] do not trigger install/uninstall in 'remotepublishfromapp' as it's not needed --- openpype/pype_commands.py | 7 +------ 1 file changed, 1 insertion(+), 6 deletions(-) diff --git a/openpype/pype_commands.py b/openpype/pype_commands.py index 769b173087..0267f321ce 100644 --- a/openpype/pype_commands.py +++ b/openpype/pype_commands.py @@ -147,16 +147,13 @@ class PypeCommands: SLEEP = 5 # seconds for another loop check for concurrently runs WAIT_FOR = 300 # seconds to wait for conc. runs - from openpype import install, uninstall from openpype.api import Logger + from openpype.lib import ApplicationManager log = Logger.get_logger() log.info("remotepublishphotoshop command") - install() - - from openpype.lib import ApplicationManager application_manager = ApplicationManager() found_variant_key = find_variant_key(application_manager, host) @@ -230,8 +227,6 @@ class PypeCommands: while launched_app.poll() is None: time.sleep(0.5) - uninstall() - @staticmethod def remotepublish(project, batch_path, host, user, targets=None): """Start headless publishing. From 5c1b388292b65da9b948001cf3a0b9edba1ca912 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 9 Nov 2021 15:56:05 +0100 Subject: [PATCH 102/211] use already imported logger --- openpype/pype_commands.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/openpype/pype_commands.py b/openpype/pype_commands.py index 0267f321ce..43d7c45365 100644 --- a/openpype/pype_commands.py +++ b/openpype/pype_commands.py @@ -252,13 +252,12 @@ class PypeCommands: raise RuntimeError("No publish paths specified") from openpype import install, uninstall - from openpype.api import Logger # Register target and host import pyblish.api import pyblish.util - log = Logger.get_logger() + log = PypeLogger.get_logger() log.info("remotepublish command") From 76769104ef706373ee1c822f7819fb0bac7c148a Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 9 Nov 2021 16:03:42 +0100 Subject: [PATCH 103/211] host name in 'remotepublish command is always "webpublisher" --- openpype/pype_commands.py | 10 +++------- 1 file changed, 3 insertions(+), 7 deletions(-) diff --git a/openpype/pype_commands.py b/openpype/pype_commands.py index 43d7c45365..0aeadf5d55 100644 --- a/openpype/pype_commands.py +++ b/openpype/pype_commands.py @@ -251,8 +251,6 @@ class PypeCommands: if not batch_path: raise RuntimeError("No publish paths specified") - from openpype import install, uninstall - # Register target and host import pyblish.api import pyblish.util @@ -261,10 +259,10 @@ class PypeCommands: log.info("remotepublish command") - install() + host_name = "webpublisher" + os.environ["AVALON_APP"] = host_name - if host: - pyblish.api.register_host(host) + pyblish.api.register_host(host_name) if targets: if isinstance(targets, str): @@ -274,7 +272,6 @@ class PypeCommands: os.environ["OPENPYPE_PUBLISH_DATA"] = batch_path os.environ["AVALON_PROJECT"] = project - os.environ["AVALON_APP"] = host import avalon.api from openpype.hosts.webpublisher import api as webpublisher @@ -290,7 +287,6 @@ class PypeCommands: publish_and_log(dbcon, _id, log) log.info("Publish finished.") - uninstall() @staticmethod def extractenvironments(output_json_path, project, asset, task, app): From 350d1c0f008024f7be2edc64712fa0177e74d1f9 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 9 Nov 2021 16:04:06 +0100 Subject: [PATCH 104/211] removed host argument from 'remotepublish' command --- openpype/cli.py | 5 ++--- openpype/pype_commands.py | 7 +++---- 2 files changed, 5 insertions(+), 7 deletions(-) diff --git a/openpype/cli.py b/openpype/cli.py index be505120f4..050a80444f 100644 --- a/openpype/cli.py +++ b/openpype/cli.py @@ -188,12 +188,11 @@ def remotepublishfromapp(debug, project, path, host, user=None, targets=None): @main.command() @click.argument("path") @click.option("-d", "--debug", is_flag=True, help="Print debug messages") -@click.option("-h", "--host", help="Host") @click.option("-u", "--user", help="User email address") @click.option("-p", "--project", help="Project") @click.option("-t", "--targets", help="Targets", default=None, multiple=True) -def remotepublish(debug, project, path, host, targets=None, user=None): +def remotepublish(debug, project, path, user=None, targets=None): """Start CLI publishing. Publish collects json from paths provided as an argument. @@ -201,7 +200,7 @@ def remotepublish(debug, project, path, host, targets=None, user=None): """ if debug: os.environ['OPENPYPE_DEBUG'] = '3' - PypeCommands.remotepublish(project, path, host, user, targets=targets) + PypeCommands.remotepublish(project, path, user, targets=targets) @main.command() diff --git a/openpype/pype_commands.py b/openpype/pype_commands.py index 0aeadf5d55..220dabd0e7 100644 --- a/openpype/pype_commands.py +++ b/openpype/pype_commands.py @@ -228,7 +228,7 @@ class PypeCommands: time.sleep(0.5) @staticmethod - def remotepublish(project, batch_path, host, user, targets=None): + def remotepublish(project, batch_path, user, targets=None): """Start headless publishing. Used to publish rendered assets, workfiles etc. @@ -240,10 +240,9 @@ class PypeCommands: per call of remotepublish batch_path (str): Path batch folder. Contains subfolders with resources (workfile, another subfolder 'renders' etc.) - targets (string): What module should be targeted - (to choose validator for example) - host (string) user (string): email address for webpublisher + targets (list): Pyblish targets + (to choose validator for example) Raises: RuntimeError: When there is no path to process. From 9b43fcb2c84e724e75a79f3d25df18f4a8160f38 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 9 Nov 2021 16:04:30 +0100 Subject: [PATCH 105/211] set all context information at one place --- openpype/pype_commands.py | 10 ++++------ 1 file changed, 4 insertions(+), 6 deletions(-) diff --git a/openpype/pype_commands.py b/openpype/pype_commands.py index 220dabd0e7..24a4279715 100644 --- a/openpype/pype_commands.py +++ b/openpype/pype_commands.py @@ -253,12 +253,16 @@ class PypeCommands: # Register target and host import pyblish.api import pyblish.util + import avalon.api + from openpype.hosts.webpublisher import api as webpublisher log = PypeLogger.get_logger() log.info("remotepublish command") host_name = "webpublisher" + os.environ["OPENPYPE_PUBLISH_DATA"] = batch_path + os.environ["AVALON_PROJECT"] = project os.environ["AVALON_APP"] = host_name pyblish.api.register_host(host_name) @@ -269,12 +273,6 @@ class PypeCommands: for target in targets: pyblish.api.register_target(target) - os.environ["OPENPYPE_PUBLISH_DATA"] = batch_path - os.environ["AVALON_PROJECT"] = project - - import avalon.api - from openpype.hosts.webpublisher import api as webpublisher - avalon.api.install(webpublisher) log.info("Running publish ...") From f55c2f44a0eafe535b98eea5b6ffb2d2f44fc57c Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 9 Nov 2021 16:08:28 +0100 Subject: [PATCH 106/211] removed unused import --- .../default_modules/job_queue/job_server/workers_rpc_route.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/modules/default_modules/job_queue/job_server/workers_rpc_route.py b/openpype/modules/default_modules/job_queue/job_server/workers_rpc_route.py index 60ad6d24bf..0800ca0d4d 100644 --- a/openpype/modules/default_modules/job_queue/job_server/workers_rpc_route.py +++ b/openpype/modules/default_modules/job_queue/job_server/workers_rpc_route.py @@ -3,7 +3,7 @@ import asyncio import aiohttp from aiohttp_json_rpc import JsonRpc from aiohttp_json_rpc.protocol import ( - encode_request, encode_error, decode_msg, JsonRpcMsgTyp + encode_error, decode_msg, JsonRpcMsgTyp ) from aiohttp_json_rpc.exceptions import RpcError from .workers import Worker From 948daa5b76c8a87f2ed10404cba55faf5ffbdb41 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 9 Nov 2021 18:34:02 +0100 Subject: [PATCH 107/211] added targets to tvpaint related plugins --- .../plugins/publish/collect_tvpaint_workfile_data.py | 1 + .../webpublisher/plugins/publish/extract_tvpaint_workfile.py | 1 + 2 files changed, 2 insertions(+) diff --git a/openpype/hosts/webpublisher/plugins/publish/collect_tvpaint_workfile_data.py b/openpype/hosts/webpublisher/plugins/publish/collect_tvpaint_workfile_data.py index f4562c6f63..593816114e 100644 --- a/openpype/hosts/webpublisher/plugins/publish/collect_tvpaint_workfile_data.py +++ b/openpype/hosts/webpublisher/plugins/publish/collect_tvpaint_workfile_data.py @@ -17,6 +17,7 @@ class CollectTVPaintWorkfileData(pyblish.api.InstancePlugin): label = "Collect TVPaint Workfile data" order = pyblish.api.CollectorOrder + 0.1 hosts = ["webpublisher"] + targets = ["tvpaint"] # TODO add families filter def process(self, instance): diff --git a/openpype/hosts/webpublisher/plugins/publish/extract_tvpaint_workfile.py b/openpype/hosts/webpublisher/plugins/publish/extract_tvpaint_workfile.py index 3659e9f0df..c012841f31 100644 --- a/openpype/hosts/webpublisher/plugins/publish/extract_tvpaint_workfile.py +++ b/openpype/hosts/webpublisher/plugins/publish/extract_tvpaint_workfile.py @@ -25,6 +25,7 @@ class ExtractTVPaintSequences(pyblish.api.Extractor): label = "Extract TVPaint Sequences" hosts = ["webpublisher"] families = ["review", "renderPass", "renderLayer"] + targets = ["tvpaint"] # Modifiable with settings review_bg = [255, 255, 255, 255] From c4de365d0650c4f87f61c007ea6f457086bc5862 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Wed, 10 Nov 2021 10:57:55 +0100 Subject: [PATCH 108/211] adding baking toggle knobs to render node bake colorspace, bake viewer process --- openpype/hosts/nuke/api/lib.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/openpype/hosts/nuke/api/lib.py b/openpype/hosts/nuke/api/lib.py index e1e417df4d..bea2fe47a9 100644 --- a/openpype/hosts/nuke/api/lib.py +++ b/openpype/hosts/nuke/api/lib.py @@ -544,8 +544,7 @@ def add_rendering_knobs(node, farm=True): Return: node (obj): with added knobs ''' - knob_options = [ - "Use existing frames", "Local"] + knob_options = ["Use existing frames", "Local"] if farm: knob_options.append("On farm") @@ -568,6 +567,7 @@ def add_review_knob(node): if "review" not in node.knobs(): knob = nuke.Boolean_Knob("review", "Review") knob.setValue(True) + knob.setFlag(nuke.STARTLINE) node.addKnob(knob) return node @@ -585,6 +585,10 @@ def add_bake_colorspace_knob(node): knob = nuke.Boolean_Knob("bake_colorspace", "Bake colorspace") knob.setValue(True) node.addKnob(knob) + if "bake_viewer_input" not in node.knobs(): + knob = nuke.Boolean_Knob("bake_viewer_input", "Bake viewer input") + knob.setValue(True) + node.addKnob(knob) return node From 3a63f587bf32055b6895c8875091588dc3eef9ad Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Wed, 10 Nov 2021 10:59:10 +0100 Subject: [PATCH 109/211] improving extractor for multiple processing --- openpype/hosts/nuke/api/plugin.py | 111 ++++++++++++++++-------------- 1 file changed, 61 insertions(+), 50 deletions(-) diff --git a/openpype/hosts/nuke/api/plugin.py b/openpype/hosts/nuke/api/plugin.py index 9801e19126..b753bc0965 100644 --- a/openpype/hosts/nuke/api/plugin.py +++ b/openpype/hosts/nuke/api/plugin.py @@ -97,8 +97,7 @@ class NukeLoader(api.Loader): return dependent_nodes - -class ExporterReview: +class ExporterReview(object): """ Base class object for generating review data from Nuke @@ -107,7 +106,6 @@ class ExporterReview: instance (pyblish.instance): instance of pyblish context """ - _temp_nodes = [] data = dict({ "representations": list() }) @@ -120,6 +118,7 @@ class ExporterReview: self.log = klass.log self.instance = instance self.bake_colorspace = instance.data["bakeColorspace"] + self.bake_viewer_input = instance.data["bakeViewerInput"] self.path_in = self.instance.data.get("path", None) self.staging_dir = self.instance.data["stagingDir"] self.collection = self.instance.data.get("collection", None) @@ -198,12 +197,6 @@ class ExporterReview: return ipn - def clean_nodes(self): - for node in self._temp_nodes: - nuke.delete(node) - self._temp_nodes = [] - self.log.info("Deleted nodes...") - class ExporterReviewLut(ExporterReview): """ @@ -215,6 +208,7 @@ class ExporterReviewLut(ExporterReview): """ + _temp_nodes = [] def __init__(self, klass, @@ -225,8 +219,7 @@ class ExporterReviewLut(ExporterReview): lut_size=None, lut_style=None): # initialize parent class - ExporterReview.__init__(self, klass, instance) - self._temp_nodes = [] + super(ExporterReviewLut, self).__init__(klass, instance) # deal with now lut defined in viewer lut if hasattr(klass, "viewer_lut_raw"): @@ -249,6 +242,12 @@ class ExporterReviewLut(ExporterReview): self.path = os.path.join( self.staging_dir, self.file).replace("\\", "/") + def clean_nodes(self): + for node in self._temp_nodes: + nuke.delete(node) + self._temp_nodes = [] + self.log.info("Deleted nodes...") + def generate_lut(self): # ---------- start nodes creation @@ -260,23 +259,26 @@ class ExporterReviewLut(ExporterReview): self.previous_node = cms_node self.log.debug("CMSTestPattern... `{}`".format(self._temp_nodes)) - # Node View Process - ipn = self.get_view_process_node() - if ipn is not None: - # connect - ipn.setInput(0, self.previous_node) - self._temp_nodes.append(ipn) - self.previous_node = ipn - self.log.debug("ViewProcess... `{}`".format(self._temp_nodes)) + if self.bake_colorspace: + # Node View Process + if self.bake_viewer_input: + ipn = self.get_view_process_node() + if ipn is not None: + # connect + ipn.setInput(0, self.previous_node) + self._temp_nodes.append(ipn) + self.previous_node = ipn + self.log.debug( + "ViewProcess... `{}`".format(self._temp_nodes)) - if not self.viewer_lut_raw: - # OCIODisplay - dag_node = nuke.createNode("OCIODisplay") - # connect - dag_node.setInput(0, self.previous_node) - self._temp_nodes.append(dag_node) - self.previous_node = dag_node - self.log.debug("OCIODisplay... `{}`".format(self._temp_nodes)) + if not self.viewer_lut_raw: + # OCIODisplay + dag_node = nuke.createNode("OCIODisplay") + # connect + dag_node.setInput(0, self.previous_node) + self._temp_nodes.append(dag_node) + self.previous_node = dag_node + self.log.debug("OCIODisplay... `{}`".format(self._temp_nodes)) # GenerateLUT gen_lut_node = nuke.createNode("GenerateLUT") @@ -319,6 +321,7 @@ class ExporterReviewMov(ExporterReview): instance (pyblish.instance): instance of pyblish context """ + _temp_nodes = {} def __init__(self, klass, @@ -327,13 +330,9 @@ class ExporterReviewMov(ExporterReview): ext=None, ): # initialize parent class - ExporterReview.__init__(self, klass, instance) - + super(ExporterReviewMov, self).__init__(klass, instance) # passing presets for nodes to self - if hasattr(klass, "nodes"): - self.nodes = klass.nodes - else: - self.nodes = {} + self.nodes = klass.nodes if hasattr(klass, "nodes") else {} # deal with now lut defined in viewer lut self.viewer_lut_raw = klass.viewer_lut_raw @@ -353,6 +352,12 @@ class ExporterReviewMov(ExporterReview): self.path = os.path.join( self.staging_dir, self.file).replace("\\", "/") + def clean_nodes(self, node_name): + for node in self._temp_nodes[node_name]: + nuke.delete(node) + self._temp_nodes[node_name] = [] + self.log.info("Deleted nodes...") + def render(self, render_node_name): self.log.info("Rendering... ") # Render Write node @@ -376,6 +381,8 @@ class ExporterReviewMov(ExporterReview): return path def generate_mov(self, farm=False): + subset = self.instance.data["subset"] + self._temp_nodes[subset] = [] # ---------- start nodes creation # Read node @@ -388,20 +395,23 @@ class ExporterReviewMov(ExporterReview): r_node["colorspace"].setValue(self.write_colorspace) # connect - self._temp_nodes.append(r_node) + self._temp_nodes[subset].append(r_node) self.previous_node = r_node - self.log.debug("Read... `{}`".format(self._temp_nodes)) + self.log.debug("Read... `{}`".format(self._temp_nodes[subset])) # only create colorspace baking if toggled on if self.bake_colorspace: - # View Process node - ipn = self.get_view_process_node() - if ipn is not None: - # connect - ipn.setInput(0, self.previous_node) - self._temp_nodes.append(ipn) - self.previous_node = ipn - self.log.debug("ViewProcess... `{}`".format(self._temp_nodes)) + if self.bake_viewer_input: + # View Process node + ipn = self.get_view_process_node() + if ipn is not None: + # connect + ipn.setInput(0, self.previous_node) + self._temp_nodes[subset].append(ipn) + self.previous_node = ipn + self.log.debug( + "ViewProcess... `{}`".format( + self._temp_nodes[subset])) if not self.viewer_lut_raw: colorspaces = [ @@ -411,7 +421,7 @@ class ExporterReviewMov(ExporterReview): if any(colorspaces): # OCIOColorSpace with controled output dag_node = nuke.createNode("OCIOColorSpace") - self._temp_nodes.append(dag_node) + self._temp_nodes[subset].append(dag_node) for c in colorspaces: test = dag_node["out_colorspace"].setValue(str(c)) if test: @@ -427,9 +437,10 @@ class ExporterReviewMov(ExporterReview): # connect dag_node.setInput(0, self.previous_node) - self._temp_nodes.append(dag_node) + self._temp_nodes[subset].append(dag_node) self.previous_node = dag_node - self.log.debug("OCIODisplay... `{}`".format(self._temp_nodes)) + self.log.debug("OCIODisplay... `{}`".format( + self._temp_nodes[subset])) # Write node write_node = nuke.createNode("Write") @@ -452,8 +463,8 @@ class ExporterReviewMov(ExporterReview): write_node["raw"].setValue(1) # connect write_node.setInput(0, self.previous_node) - self._temp_nodes.append(write_node) - self.log.debug("Write... `{}`".format(self._temp_nodes)) + self._temp_nodes[subset].append(write_node) + self.log.debug("Write... `{}`".format(self._temp_nodes[subset])) # ---------- end nodes creation # ---------- render or save to nk @@ -475,7 +486,7 @@ class ExporterReviewMov(ExporterReview): self.log.debug("Representation... `{}`".format(self.data)) - # ---------- Clean up - self.clean_nodes() + self.clean_nodes(subset) nuke.scriptSave() + return self.data From 83b62aad14d34ffa776c40b4c5b985cb73cc1786 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Wed, 10 Nov 2021 11:00:17 +0100 Subject: [PATCH 110/211] better pep8 --- openpype/hosts/nuke/plugins/publish/extract_review_data_mov.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/nuke/plugins/publish/extract_review_data_mov.py b/openpype/hosts/nuke/plugins/publish/extract_review_data_mov.py index a3fb00bd0e..83275f9716 100644 --- a/openpype/hosts/nuke/plugins/publish/extract_review_data_mov.py +++ b/openpype/hosts/nuke/plugins/publish/extract_review_data_mov.py @@ -35,7 +35,7 @@ class ExtractReviewDataMov(openpype.api.Extractor): self.log.info("Creating staging dir...") if "representations" not in instance.data: - instance.data["representations"] = list() + instance.data["representations"] = [] staging_dir = os.path.normpath( os.path.dirname(instance.data['path'])) From ab68b5ee4fdfe3df4ca01cbad6d6cedf95705eb7 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Wed, 10 Nov 2021 11:01:00 +0100 Subject: [PATCH 111/211] baking toggles to precollect instances --- .../nuke/plugins/publish/precollect_instances.py | 13 ++++++++++++- 1 file changed, 12 insertions(+), 1 deletion(-) diff --git a/openpype/hosts/nuke/plugins/publish/precollect_instances.py b/openpype/hosts/nuke/plugins/publish/precollect_instances.py index 7bd3b83818..c34e314a79 100644 --- a/openpype/hosts/nuke/plugins/publish/precollect_instances.py +++ b/openpype/hosts/nuke/plugins/publish/precollect_instances.py @@ -79,6 +79,16 @@ class PreCollectNukeInstances(pyblish.api.ContextPlugin): if "bake_colorspace" in node.knobs(): baking = node["bake_colorspace"].value() + if baking: + families.append("bake_viewer") + + viewer_input = True + if "bake_viewer_input" in node.knobs(): + viewer_input = node["bake_viewer_input"].value() + + if viewer_input: + families.append("bake_viewer_input") + # Add all nodes in group instances. if node.Class() == "Group": # only alter families for render family @@ -148,7 +158,8 @@ class PreCollectNukeInstances(pyblish.api.ContextPlugin): "resolutionHeight": resolution_height, "pixelAspect": pixel_aspect, "review": review, - "bakeColorspace": baking + "bakeColorspace": baking, + "bakeViewerInput": viewer_input }) self.log.info("collected instance: {}".format(instance.data)) From 8de9cfa8574257cc08bf86746b175e5c17a2118e Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Wed, 10 Nov 2021 14:24:33 +0100 Subject: [PATCH 112/211] added workfile collection with copying the workfile to job queue root --- .../publish/collect_tvpaint_workfile_data.py | 81 ++++++++++++++++--- 1 file changed, 68 insertions(+), 13 deletions(-) diff --git a/openpype/hosts/webpublisher/plugins/publish/collect_tvpaint_workfile_data.py b/openpype/hosts/webpublisher/plugins/publish/collect_tvpaint_workfile_data.py index 593816114e..a56eb36270 100644 --- a/openpype/hosts/webpublisher/plugins/publish/collect_tvpaint_workfile_data.py +++ b/openpype/hosts/webpublisher/plugins/publish/collect_tvpaint_workfile_data.py @@ -4,9 +4,18 @@ Requires: CollectModules Provides: - Instance + workfilePath - Path to tvpaint workfile + sceneData - Scene data loaded from the workfile + groupsData - + layersData + layersExposureFrames + layersPrePostBehavior """ +import os +import uuid +import shutil import pyblish.api +from openpype.lib.plugin_tools import parse_json from openpype.hosts.tvpaint.worker import ( SenderTVPaintCommands, CollectSceneData @@ -15,22 +24,28 @@ from openpype.hosts.tvpaint.worker import ( class CollectTVPaintWorkfileData(pyblish.api.InstancePlugin): label = "Collect TVPaint Workfile data" - order = pyblish.api.CollectorOrder + 0.1 + order = pyblish.api.CollectorOrder hosts = ["webpublisher"] targets = ["tvpaint"] - # TODO add families filter - def process(self, instance): - # TODO change 'tvpaint_workfile' this is just dummy access - workfile = instance.data["tvpaint_workfile"] + def process(self, context): # Get JobQueue module - modules = instance.context.data["openPypeModules"] + modules = context.context.data["openPypeModules"] job_queue_module = modules["job_queue"] + context_staging_dir = self._create_context_staging_dir( + context, job_queue_module + ) + workfile_path = self._extract_workfile_path( + context, context_staging_dir + ) + context.data["contextStagingDir"] = context_staging_dir + context.data["workfilePath"] = workfile_path + # Prepare tvpaint command collect_scene_data_command = CollectSceneData() # Create TVPaint sender commands - commands = SenderTVPaintCommands(workfile, job_queue_module) + commands = SenderTVPaintCommands(workfile_path, job_queue_module) commands.add_command(collect_scene_data_command) # Send job and wait for answer @@ -49,7 +64,7 @@ class CollectTVPaintWorkfileData(pyblish.api.InstancePlugin): # Store results # scene data store the same way as TVPaint collector - instance.data["sceneData"] = { + context.data["sceneData"] = { "sceneWidth": scene_data["width"], "sceneHeight": scene_data["height"], "scenePixelAspect": scene_data["pixel_aspect"], @@ -65,7 +80,47 @@ class CollectTVPaintWorkfileData(pyblish.api.InstancePlugin): "sceneBgColor": scene_data["bg_color"] } # Store only raw data - instance.data["groupsData"] = groups_data - instance.data["layersData"] = layers_data - instance.data["layersExposureFrames"] = exposure_frames_by_layer_id - instance.data["layersPrePostBehavior"] = pre_post_beh_by_layer_id + context.data["groupsData"] = groups_data + context.data["layersData"] = layers_data + context.data["layersExposureFrames"] = exposure_frames_by_layer_id + context.data["layersPrePostBehavior"] = pre_post_beh_by_layer_id + + def _create_context_staging_dir(self, context, job_queue_module): + work_root = job_queue_module.get_work_root() + if not work_root: + raise ValueError("Work root in JobQueue module is not set.") + + if not os.path.exists(work_root): + os.makedirs(work_root) + + random_folder_name = str(uuid.uuid4()) + full_path = os.path.join(work_root, random_folder_name) + if not os.path.exists(full_path): + os.makedirs(full_path) + return full_path + + def _extract_workfile_path(self, context, context_staging_dir): + """Find first TVPaint file in tasks and use it.""" + batch_dir = context.data["batchDir"] + batch_data = context.data["batchData"] + src_workfile_path = None + for task_id in batch_data["tasks"]: + if src_workfile_path is not None: + break + task_dir = os.path.join(batch_dir, task_id) + task_manifest_path = os.path.join(task_dir, "manifest.json") + task_data = parse_json(task_manifest_path) + task_files = task_data["files"] + for filename in task_files: + _, ext = os.path.splitext(filename) + if ext.lower() == ".tvpp": + src_workfile_path = os.path.join(task_dir, filename) + break + + # Copy workfile to job queue work root + new_workfile_path = os.path.join( + context_staging_dir, os.path.basename(src_workfile_path) + ) + shutil.copy(src_workfile_path, new_workfile_path) + + return new_workfile_path From 2d513073c8fcebe4d1b180817b012ca05f883593 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Wed, 10 Nov 2021 14:25:04 +0100 Subject: [PATCH 113/211] use workfile in extraction --- .../plugins/publish/extract_tvpaint_workfile.py | 14 ++++++++++++-- 1 file changed, 12 insertions(+), 2 deletions(-) diff --git a/openpype/hosts/webpublisher/plugins/publish/extract_tvpaint_workfile.py b/openpype/hosts/webpublisher/plugins/publish/extract_tvpaint_workfile.py index c012841f31..2118982917 100644 --- a/openpype/hosts/webpublisher/plugins/publish/extract_tvpaint_workfile.py +++ b/openpype/hosts/webpublisher/plugins/publish/extract_tvpaint_workfile.py @@ -31,12 +31,16 @@ class ExtractTVPaintSequences(pyblish.api.Extractor): review_bg = [255, 255, 255, 255] def process(self, context): - scene_data = context["sceneData"] + # Get workfle path + workfile_path = context.data["workfilePath"] + # Prepare scene data + scene_data = context.data["sceneData"] scene_mark_in = scene_data["sceneMarkIn"] scene_mark_out = scene_data["sceneMarkOut"] scene_start_frame = scene_data["sceneStartFrame"] scene_bg_color = scene_data["sceneBgColor"] + # Prepare layers behavior behavior_by_layer_id = context.data["layersPrePostBehavior"] exposure_frames_by_layer_id = context.data["layersExposureFrames"] @@ -44,7 +48,13 @@ class ExtractTVPaintSequences(pyblish.api.Extractor): handle_start = context.data["handleStart"] handle_end = context.data["handleEnd"] - tvpaint_commands = SenderTVPaintCommands() + # Get JobQueue module + modules = context.context.data["openPypeModules"] + job_queue_module = modules["job_queue"] + + tvpaint_commands = SenderTVPaintCommands( + workfile_path, job_queue_module + ) # Change scene Start Frame to 0 to prevent frame index issues # - issue is that TVPaint versions deal with frame indexes in a From e10d69cf759a1f767e3f3a60c8e03478af72274d Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Wed, 10 Nov 2021 14:25:15 +0100 Subject: [PATCH 114/211] implemented collection of instance in tvpaint --- .../publish/collect_tvpaint_instances.py | 234 ++++++++++++++++++ 1 file changed, 234 insertions(+) create mode 100644 openpype/hosts/webpublisher/plugins/publish/collect_tvpaint_instances.py diff --git a/openpype/hosts/webpublisher/plugins/publish/collect_tvpaint_instances.py b/openpype/hosts/webpublisher/plugins/publish/collect_tvpaint_instances.py new file mode 100644 index 0000000000..7c574fde5b --- /dev/null +++ b/openpype/hosts/webpublisher/plugins/publish/collect_tvpaint_instances.py @@ -0,0 +1,234 @@ +""" +Requires: + CollectTVPaintWorkfileData + +Provides: + Instance +""" +import os +import re +import copy +import pyblish.api + +from openpype.lib import get_subset_name_with_asset_doc + + +class CollectTVPaintWorkfileData(pyblish.api.InstancePlugin): + label = "Collect TVPaint Workfile data" + order = pyblish.api.CollectorOrder + 0.1 + hosts = ["webpublisher"] + targets = ["tvpaint"] + + workfile_family = "workfile" + workfile_variant = "" + review_family = "review" + review_variant = "Main" + render_pass_family = "renderPass" + render_layer_family = "renderLayer" + render_layer_pass_name = "beauty" + + # Set by settings + # Regex must constain 'layer' and 'variant' groups which are extracted from + # name when instances are created + layer_name_regex = r"(?PL[0-9]{3}_\w+)_(?P.+)" + + def process(self, context): + # Prepare compiled regex + layer_name_regex = re.compile(self.layer_name_regex) + + layers_data = context.data["layersData"] + + host_name = "tvpaint" + task_name = context.data.get("task") + asset_doc = context.data["assetEntity"] + project_doc = context.data["projectEntity"] + project_name = project_doc["name"] + + new_instances = [] + + # Workfile instance + workfile_subset_name = get_subset_name_with_asset_doc( + self.workfile_family, + self.workfile_variant, + task_name, + asset_doc, + project_name, + host_name + ) + workfile_instance = self._create_workfile_instance( + context, workfile_subset_name + ) + new_instances.append(workfile_instance) + + # Review instance + review_subset_name = get_subset_name_with_asset_doc( + self.review_family, + self.review_variant, + task_name, + asset_doc, + project_name, + host_name + ) + review_instance = self._create_review_instance( + context, review_subset_name + ) + new_instances.append(review_instance) + + layers_by_render_layer = {} + for layer in layers_data: + # Filter only visible layers + if not layer["visible"]: + continue + + result = layer_name_regex.search(layer["name"]) + render_layer = result.group("layer") + variant = result.group("variant") + + if render_layer not in layers_by_render_layer: + layers_by_render_layer[render_layer] = [] + layers_by_render_layer[render_layer].append(copy.deepcopy(layer)) + dynamic_data = { + "render_pass": variant, + "render_layer": render_layer, + # Override family for subset name + "family": "render" + } + + subset_name = get_subset_name_with_asset_doc( + self.render_pass_family, + variant, + task_name, + asset_doc, + project_name, + host_name, + dynamic_data=dynamic_data + ) + + instance = self._create_render_pass_instance( + context, layer, subset_name + ) + new_instances.append(instance) + + for render_layer, layers in layers_by_render_layer.items(): + variant = render_layer + dynamic_data = { + "render_pass": self.render_layer_pass_name, + "render_layer": render_layer, + # Override family for subset name + "family": "render" + } + subset_name = get_subset_name_with_asset_doc( + self.render_pass_family, + variant, + task_name, + asset_doc, + project_name, + host_name, + dynamic_data=dynamic_data + ) + instance = self._create_render_layer_instance( + context, subset_name, layers + ) + new_instances.append(instance) + + # Set data same for all instances + scene_fps = context.data["sceneFps"] + frame_start = context.data.get("frameStart") + frame_end = context.data.get("frameEnd") + + for instance in new_instances: + if instance.data.get("fps") is None: + instance.data["fps"] = scene_fps + + if ( + instance.data.get("frameStart") is None + or instance.data.get("frameEnd") is None + ): + instance.data["frameStart"] = frame_start + instance.data["frameEnd"] = frame_end + + if instance.data.get("asset") is None: + instance.data["asset"] = asset_doc["name"] + + if instance.data.get("task") is None: + instance.data["task"] = task_name + + def _create_workfile_instance(self, context, subset_name): + workfile_path = context.data["workfilePath"] + staging_dir = os.path.dirname(workfile_path) + filename = os.path.basename(workfile_path) + ext = os.path.splitext(filename)[-1] + + return context.create_instance(**{ + "name": subset_name, + "label": subset_name, + "subset": subset_name, + "family": self.workfile_family, + "families": [self.workfile_family], + "stagingDir": staging_dir, + "representations": [{ + "name": ext.lstrip("."), + "ext": ext.lstrip("."), + "files": filename, + "stagingDir": staging_dir + }] + }) + + def _create_review_instance(self, context, subset_name): + context_staging_dir = context.data["contextStagingDir"] + staging_dir = os.path.join(context_staging_dir, subset_name) + layers_data = context.data["layersData"] + # Filter hidden layers + filtered_layers_data = [ + copy.deepcopy(layer) + for layer in layers_data + if layer["visible"] + ] + return context.create_instance(**{ + "name": subset_name, + "label": subset_name, + "subset": subset_name, + "family": self.review_family, + "layers": filtered_layers_data, + "stagingDir": staging_dir + }) + + def _create_render_pass_instance(self, context, layer, subset_name): + # Global instance data modifications + # Fill families + instance_label = "{} [{}-{}]".format( + subset_name, + context.data["sceneMarkIn"] + 1, + context.data["sceneMarkOut"] + 1 + ) + + return context.create_instance(**{ + "subset": subset_name, + "label": instance_label, + "family": self.render_pass_family, + # Add `review` family for thumbnail integration + "families": [self.render_pass_family, "review"], + "fps": context.data["sceneFps"], + "representations": [], + "layers": [layer] + }) + + def _create_render_layer_instance(self, context, layers, subset_name): + # Global instance data modifications + # Fill families + instance_label = "{} [{}-{}]".format( + subset_name, + context.data["sceneMarkIn"] + 1, + context.data["sceneMarkOut"] + 1 + ) + + return context.create_instance(**{ + "subset": subset_name, + "label": instance_label, + "family": self.render_pass_family, + # Add `review` family for thumbnail integration + "families": [self.render_pass_family, "review"], + "fps": context.data["sceneFps"], + "representations": [], + "layers": layers + }) From 40c9791e4a61e418aa70b830e23f5b83a282b0f0 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Wed, 10 Nov 2021 14:25:30 +0100 Subject: [PATCH 115/211] added getter methods for work roots --- .../default_modules/job_queue/module.py | 19 +++++++++++++++++++ 1 file changed, 19 insertions(+) diff --git a/openpype/modules/default_modules/job_queue/module.py b/openpype/modules/default_modules/job_queue/module.py index cdcfafdce1..69005b3162 100644 --- a/openpype/modules/default_modules/job_queue/module.py +++ b/openpype/modules/default_modules/job_queue/module.py @@ -1,5 +1,7 @@ import sys import json +import copy +import platform if sys.version_info[0] == 2: from urlparse import urlsplit, urlunsplit else: @@ -17,6 +19,17 @@ class JobQueueModule(OpenPypeModule): server_url = modules_settings.get("server_url") or "" self._server_url = self.url_conversion(server_url) + work_root_mapping = modules_settings.get("work_root") + if not work_root_mapping: + work_root_mapping = { + "windows": "", + "linux": "", + "darwin": "" + } + self._work_root_mapping = work_root_mapping + + # Is always enabled + # - the module does nothing until is used self.enabled = True @staticmethod @@ -48,6 +61,12 @@ class JobQueueModule(OpenPypeModule): return urlunsplit(url_parts) + def get_work_root_mapping(self): + return copy.deepcopy(self._work_root_mapping) + + def get_work_root(self): + self._work_root_mapping.get(platform.system().lower()) + @property def server_url(self): return self._server_url From 9578a4844eedbd450ff076b8d0f359aba676da8e Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Wed, 10 Nov 2021 14:43:23 +0100 Subject: [PATCH 116/211] be able to get latest system settings roots settings --- openpype/modules/default_modules/job_queue/module.py | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/openpype/modules/default_modules/job_queue/module.py b/openpype/modules/default_modules/job_queue/module.py index 69005b3162..85eede20c7 100644 --- a/openpype/modules/default_modules/job_queue/module.py +++ b/openpype/modules/default_modules/job_queue/module.py @@ -67,6 +67,18 @@ class JobQueueModule(OpenPypeModule): def get_work_root(self): self._work_root_mapping.get(platform.system().lower()) + @classmethod + def get_work_root_from_settings(cls): + module_settings = get_system_settings()["modules"] + work_root_mapping = module_settings.get(cls.name, {}).get("work_root") + if not work_root_mapping: + work_root_mapping = { + "windows": "", + "linux": "", + "darwin": "" + } + return work_root_mapping.get(platform.system().lower()) + @property def server_url(self): return self._server_url From fa233315869cac532ef19cae17d5e1a645c3e386 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Wed, 10 Nov 2021 14:44:48 +0100 Subject: [PATCH 117/211] replaced output_dirs with root dir key --- openpype/hosts/tvpaint/worker/worker_job.py | 17 +++++------------ 1 file changed, 5 insertions(+), 12 deletions(-) diff --git a/openpype/hosts/tvpaint/worker/worker_job.py b/openpype/hosts/tvpaint/worker/worker_job.py index e458c1e272..8890abc98c 100644 --- a/openpype/hosts/tvpaint/worker/worker_job.py +++ b/openpype/hosts/tvpaint/worker/worker_job.py @@ -127,21 +127,18 @@ class ExecuteGeorgeScript(BaseCommand): name = "execute_george_through_file" def __init__( - self, script, tmp_file_keys=None, output_dirs=None, data=None + self, script, tmp_file_keys=None, root_dir_key=None, data=None ): data = data or {} if not tmp_file_keys: tmp_file_keys = data.get("tmp_file_keys") or [] - if not output_dirs: - output_dirs = data.get("output_dirs") or {} - data["script"] = script data["tmp_file_keys"] = tmp_file_keys - data["output_dirs"] = output_dirs + data["root_dir_key"] = root_dir_key self._script = script self._tmp_file_keys = tmp_file_keys - self._output_dirs = output_dirs + self._root_dir_key = root_dir_key super().__init__(data) def execute(self): @@ -156,10 +153,6 @@ class ExecuteGeorgeScript(BaseCommand): self._script.replace(format_key, output_path) filepath_by_key[key] = output_path - for key, dir_path in self._output_dirs.items(): - format_key = "{" + key + "}" - dir_path = dir_path.replace("\\", "/") - self._script.replace(format_key, dir_path) self.execute_george_through_file(self._script) @@ -176,8 +169,8 @@ class ExecuteGeorgeScript(BaseCommand): def from_existing(cls, data): script = data.pop("script") tmp_file_keys = data.pop("tmp_file_keys", None) - output_dirs = data.pop("output_dirs", None) - return cls(script, tmp_file_keys, output_dirs, data) + root_dir_key = data.pop("root_dir_key", None) + return cls(script, tmp_file_keys, root_dir_key, data) class CollectSceneData(BaseCommand): From 2f979569c0b0124524a01ad072519e4ad5583c9b Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Wed, 10 Nov 2021 14:45:13 +0100 Subject: [PATCH 118/211] all TVPaintCommands have job queue module --- openpype/hosts/tvpaint/worker/worker_job.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/openpype/hosts/tvpaint/worker/worker_job.py b/openpype/hosts/tvpaint/worker/worker_job.py index 8890abc98c..b2513d3740 100644 --- a/openpype/hosts/tvpaint/worker/worker_job.py +++ b/openpype/hosts/tvpaint/worker/worker_job.py @@ -10,6 +10,7 @@ from abc import ABCMeta, abstractmethod, abstractproperty import six from openpype.api import PypeLogger +from openpype.modules import ModulesManager TMP_FILE_PREFIX = "opw_tvp_" @@ -212,11 +213,15 @@ class CollectSceneData(BaseCommand): class TVPaintCommands: - def __init__(self, workfile): + def __init__(self, workfile, job_queue_module=None): self._log = None self._workfile = workfile self._commands = [] self._command_classes_by_name = None + if job_queue_module is None: + manager = ModulesManager() + job_queue_module = manager.modules_by_name["job_queue"] + self._job_queue_module = job_queue_module @property def log(self): @@ -257,11 +262,6 @@ class TVPaintCommands: class SenderTVPaintCommand(TVPaintCommands): - def __init__(self, workfile, job_queue_module): - super().__init__(workfile) - - self._job_queue_module = job_queue_module - def commands_data(self): return [ command.command_data() From 3c88483475fbe368001927b1595675a470d90896 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Wed, 10 Nov 2021 14:45:17 +0100 Subject: [PATCH 119/211] enhancing settings for baking multiple profiles with defined imageio profile --- .../defaults/project_anatomy/imageio.json | 3 + .../defaults/project_settings/nuke.json | 17 +++++- .../schemas/schema_anatomy_imageio.json | 15 ++++- .../schemas/schema_nuke_publish.json | 55 +++++++++++++++++++ 4 files changed, 86 insertions(+), 4 deletions(-) diff --git a/openpype/settings/defaults/project_anatomy/imageio.json b/openpype/settings/defaults/project_anatomy/imageio.json index fc34ef6813..09ab398c37 100644 --- a/openpype/settings/defaults/project_anatomy/imageio.json +++ b/openpype/settings/defaults/project_anatomy/imageio.json @@ -28,6 +28,9 @@ "viewer": { "viewerProcess": "sRGB" }, + "baking": { + "viewerProcess": "rec709" + }, "workfile": { "colorManagement": "Nuke", "OCIO_config": "nuke-default", diff --git a/openpype/settings/defaults/project_settings/nuke.json b/openpype/settings/defaults/project_settings/nuke.json index e3c7834e4a..67446ca6b9 100644 --- a/openpype/settings/defaults/project_settings/nuke.json +++ b/openpype/settings/defaults/project_settings/nuke.json @@ -103,7 +103,19 @@ }, "ExtractReviewDataMov": { "enabled": true, - "viewer_lut_raw": false + "viewer_lut_raw": false, + "outputs": { + "baking": { + "filter": { + "task_types": [], + "families": [] + }, + "viewer_process_override": "", + "bake_viewer_process": true, + "bake_viewer_input_process": true, + "add_taggs": [] + } + } }, "ExtractSlateFrame": { "viewer_lut_raw": false @@ -130,8 +142,7 @@ }, "LoadClip": { "enabled": true, - "_representations": [ - ], + "_representations": [], "node_name_template": "{class_name}_{ext}" } }, diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_anatomy_imageio.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_anatomy_imageio.json index 7423d6fd3e..380ea4a83d 100644 --- a/openpype/settings/entities/schemas/projects_schema/schemas/schema_anatomy_imageio.json +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_anatomy_imageio.json @@ -131,6 +131,19 @@ } ] }, + { + "key": "baking", + "type": "dict", + "label": "Extract-review baking profile", + "collapsible": false, + "children": [ + { + "type": "text", + "key": "viewerProcess", + "label": "Viewer Process" + } + ] + }, { "key": "workfile", "type": "dict", @@ -363,7 +376,7 @@ "key": "maya", "type": "dict", "label": "Maya", - "children": [ + "children": [ { "key": "colorManagementPreference", "type": "dict", diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_nuke_publish.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_nuke_publish.json index 74b2592d29..1bdd15c650 100644 --- a/openpype/settings/entities/schemas/projects_schema/schemas/schema_nuke_publish.json +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_nuke_publish.json @@ -167,7 +167,62 @@ "type": "boolean", "key": "viewer_lut_raw", "label": "Viewer LUT raw" + }, + { + "key": "outputs", + "label": "Output Definitions", + "type": "dict-modifiable", + "highlight_content": true, + "object_type": { + "type": "dict", + "children": [ + { + "type": "dict", + "collapsible": false, + "key": "filter", + "label": "Filtering", + "children": [ + { + "key": "task_types", + "label": "Task types", + "type": "task-types-enum" + }, + { + "key": "families", + "label": "Families", + "type": "list", + "object_type": "text" + } + ] + }, + { + "type": "separator" + }, + { + "type": "text", + "key": "viewer_process_override", + "label": "Viewer Process colorspace profile override" + }, + { + "type": "boolean", + "key": "bake_viewer_process", + "label": "Bake Viewer Process" + }, + { + "type": "boolean", + "key": "bake_viewer_input_process", + "label": "Bake Viewer Input Process (LUTs)" + }, + { + "key": "add_taggs", + "label": "Add tags to representations", + "type": "list", + "object_type": "text" + } + ] + } } + ] }, { From a5898c4359b87de04a2c4922314a7e5f443d6d16 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Wed, 10 Nov 2021 14:45:35 +0100 Subject: [PATCH 120/211] commands and tvpaint commands has access to work root from job queue settings --- openpype/hosts/tvpaint/worker/worker_job.py | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/openpype/hosts/tvpaint/worker/worker_job.py b/openpype/hosts/tvpaint/worker/worker_job.py index b2513d3740..3403a40e33 100644 --- a/openpype/hosts/tvpaint/worker/worker_job.py +++ b/openpype/hosts/tvpaint/worker/worker_job.py @@ -55,6 +55,11 @@ class BaseCommand: self._command_data = data self._done = False + def work_root(self): + if self._parent is None: + return None + return self._parent.work_root() + def set_parent(self, parent): self._parent = parent @@ -223,6 +228,9 @@ class TVPaintCommands: job_queue_module = manager.modules_by_name["job_queue"] self._job_queue_module = job_queue_module + def work_root(self): + return self._job_queue_module.get_work_root_from_settings() + @property def log(self): if self._log is None: From b3de979cf87b5d4618f5d182e61609630edb145f Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Wed, 10 Nov 2021 14:46:59 +0100 Subject: [PATCH 121/211] replace work root in use work root in execute goerge script --- openpype/hosts/tvpaint/worker/worker_job.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/openpype/hosts/tvpaint/worker/worker_job.py b/openpype/hosts/tvpaint/worker/worker_job.py index 3403a40e33..a45775b4c3 100644 --- a/openpype/hosts/tvpaint/worker/worker_job.py +++ b/openpype/hosts/tvpaint/worker/worker_job.py @@ -159,6 +159,10 @@ class ExecuteGeorgeScript(BaseCommand): self._script.replace(format_key, output_path) filepath_by_key[key] = output_path + if self._root_dir_key: + work_root = self.work_root() + format_key = "{" + self._root_dir_key + "}" + self._script.replace(format_key, work_root.replace("\\", "/")) self.execute_george_through_file(self._script) From 42e46ded6c4f6314587e6f2ab7187486cb4e6659 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Wed, 10 Nov 2021 14:52:00 +0100 Subject: [PATCH 122/211] renamed 'work_root' to 'job_queue_root' --- openpype/hosts/tvpaint/worker/worker_job.py | 12 ++++----- .../publish/collect_tvpaint_workfile_data.py | 14 +++++----- .../default_modules/job_queue/module.py | 26 +++++++++---------- .../defaults/system_settings/modules.json | 2 +- .../schemas/system_schema/schema_modules.json | 6 ++--- 5 files changed, 30 insertions(+), 30 deletions(-) diff --git a/openpype/hosts/tvpaint/worker/worker_job.py b/openpype/hosts/tvpaint/worker/worker_job.py index a45775b4c3..477404b74c 100644 --- a/openpype/hosts/tvpaint/worker/worker_job.py +++ b/openpype/hosts/tvpaint/worker/worker_job.py @@ -55,10 +55,10 @@ class BaseCommand: self._command_data = data self._done = False - def work_root(self): + def job_queue_root(self): if self._parent is None: return None - return self._parent.work_root() + return self._parent.job_queue_root() def set_parent(self, parent): self._parent = parent @@ -160,9 +160,9 @@ class ExecuteGeorgeScript(BaseCommand): filepath_by_key[key] = output_path if self._root_dir_key: - work_root = self.work_root() + job_queue_root = self.job_queue_root() format_key = "{" + self._root_dir_key + "}" - self._script.replace(format_key, work_root.replace("\\", "/")) + self._script.replace(format_key, job_queue_root.replace("\\", "/")) self.execute_george_through_file(self._script) @@ -232,8 +232,8 @@ class TVPaintCommands: job_queue_module = manager.modules_by_name["job_queue"] self._job_queue_module = job_queue_module - def work_root(self): - return self._job_queue_module.get_work_root_from_settings() + def job_queue_root(self): + return self._job_queue_module.get_jobs_root_from_settings() @property def log(self): diff --git a/openpype/hosts/webpublisher/plugins/publish/collect_tvpaint_workfile_data.py b/openpype/hosts/webpublisher/plugins/publish/collect_tvpaint_workfile_data.py index a56eb36270..206d7538c9 100644 --- a/openpype/hosts/webpublisher/plugins/publish/collect_tvpaint_workfile_data.py +++ b/openpype/hosts/webpublisher/plugins/publish/collect_tvpaint_workfile_data.py @@ -34,7 +34,7 @@ class CollectTVPaintWorkfileData(pyblish.api.InstancePlugin): job_queue_module = modules["job_queue"] context_staging_dir = self._create_context_staging_dir( - context, job_queue_module + job_queue_module ) workfile_path = self._extract_workfile_path( context, context_staging_dir @@ -85,16 +85,16 @@ class CollectTVPaintWorkfileData(pyblish.api.InstancePlugin): context.data["layersExposureFrames"] = exposure_frames_by_layer_id context.data["layersPrePostBehavior"] = pre_post_beh_by_layer_id - def _create_context_staging_dir(self, context, job_queue_module): - work_root = job_queue_module.get_work_root() - if not work_root: + def _create_context_staging_dir(self, job_queue_module): + jobs_root = job_queue_module.get_jobs_root() + if not jobs_root: raise ValueError("Work root in JobQueue module is not set.") - if not os.path.exists(work_root): - os.makedirs(work_root) + if not os.path.exists(jobs_root): + os.makedirs(jobs_root) random_folder_name = str(uuid.uuid4()) - full_path = os.path.join(work_root, random_folder_name) + full_path = os.path.join(jobs_root, random_folder_name) if not os.path.exists(full_path): os.makedirs(full_path) return full_path diff --git a/openpype/modules/default_modules/job_queue/module.py b/openpype/modules/default_modules/job_queue/module.py index 85eede20c7..4fee48c1dc 100644 --- a/openpype/modules/default_modules/job_queue/module.py +++ b/openpype/modules/default_modules/job_queue/module.py @@ -19,14 +19,14 @@ class JobQueueModule(OpenPypeModule): server_url = modules_settings.get("server_url") or "" self._server_url = self.url_conversion(server_url) - work_root_mapping = modules_settings.get("work_root") - if not work_root_mapping: - work_root_mapping = { + jobs_root_mapping = modules_settings.get("jobs_root") + if not jobs_root_mapping: + jobs_root_mapping = { "windows": "", "linux": "", "darwin": "" } - self._work_root_mapping = work_root_mapping + self._jobs_root_mapping = jobs_root_mapping # Is always enabled # - the module does nothing until is used @@ -61,23 +61,23 @@ class JobQueueModule(OpenPypeModule): return urlunsplit(url_parts) - def get_work_root_mapping(self): - return copy.deepcopy(self._work_root_mapping) + def get_jobs_root_mapping(self): + return copy.deepcopy(self._jobs_root_mapping) - def get_work_root(self): - self._work_root_mapping.get(platform.system().lower()) + def get_jobs_root(self): + self._jobs_root_mapping.get(platform.system().lower()) @classmethod - def get_work_root_from_settings(cls): + def get_jobs_root_from_settings(cls): module_settings = get_system_settings()["modules"] - work_root_mapping = module_settings.get(cls.name, {}).get("work_root") - if not work_root_mapping: - work_root_mapping = { + jobs_root_mapping = module_settings.get(cls.name, {}).get("jobs_root") + if not jobs_root_mapping: + jobs_root_mapping = { "windows": "", "linux": "", "darwin": "" } - return work_root_mapping.get(platform.system().lower()) + return jobs_root_mapping.get(platform.system().lower()) @property def server_url(self): diff --git a/openpype/settings/defaults/system_settings/modules.json b/openpype/settings/defaults/system_settings/modules.json index 1c4277af34..507d7751e5 100644 --- a/openpype/settings/defaults/system_settings/modules.json +++ b/openpype/settings/defaults/system_settings/modules.json @@ -181,7 +181,7 @@ }, "job_queue": { "server_url": "", - "work_root": { + "jobs_root": { "windows": "", "darwin": "", "linux": "" diff --git a/openpype/settings/entities/schemas/system_schema/schema_modules.json b/openpype/settings/entities/schemas/system_schema/schema_modules.json index 905d53a87d..ba184f8e82 100644 --- a/openpype/settings/entities/schemas/system_schema/schema_modules.json +++ b/openpype/settings/entities/schemas/system_schema/schema_modules.json @@ -258,11 +258,11 @@ }, { "type": "label", - "label": "Work root is used as temporary directory for workers where source is copied and render output can be stored." + "label": "Jobs root is used as temporary directory for workers where source is copied and render output can be stored." }, { - "key": "work_root", - "label": "Work root", + "key": "jobs_root", + "label": "Jobs root", "type": "path", "multipath": false, "multiplatform": true From c19d311c7564af702f8aa283656ab4f8362211f7 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Wed, 10 Nov 2021 16:41:38 +0100 Subject: [PATCH 123/211] added settings for layer name regex --- .../publish/collect_tvpaint_instances.py | 6 +++--- .../project_settings/webpublisher.json | 3 +++ .../schema_project_webpublisher.json | 19 ++++++++++++++++++- 3 files changed, 24 insertions(+), 4 deletions(-) diff --git a/openpype/hosts/webpublisher/plugins/publish/collect_tvpaint_instances.py b/openpype/hosts/webpublisher/plugins/publish/collect_tvpaint_instances.py index 7c574fde5b..ab23b5ff84 100644 --- a/openpype/hosts/webpublisher/plugins/publish/collect_tvpaint_instances.py +++ b/openpype/hosts/webpublisher/plugins/publish/collect_tvpaint_instances.py @@ -3,7 +3,7 @@ Requires: CollectTVPaintWorkfileData Provides: - Instance + Instances """ import os import re @@ -13,8 +13,8 @@ import pyblish.api from openpype.lib import get_subset_name_with_asset_doc -class CollectTVPaintWorkfileData(pyblish.api.InstancePlugin): - label = "Collect TVPaint Workfile data" +class CollectTVPaintInstances(pyblish.api.InstancePlugin): + label = "Collect TVPaint Instances" order = pyblish.api.CollectorOrder + 0.1 hosts = ["webpublisher"] targets = ["tvpaint"] diff --git a/openpype/settings/defaults/project_settings/webpublisher.json b/openpype/settings/defaults/project_settings/webpublisher.json index f57b79a609..d0fd7ffaf3 100644 --- a/openpype/settings/defaults/project_settings/webpublisher.json +++ b/openpype/settings/defaults/project_settings/webpublisher.json @@ -115,6 +115,9 @@ "default_task_type": "Default task type" } } + }, + "CollectTVPaintInstances": { + "layer_name_regex": "(?PL[0-9]{3}_\\w+)_(?P.+)" } } } \ No newline at end of file diff --git a/openpype/settings/entities/schemas/projects_schema/schema_project_webpublisher.json b/openpype/settings/entities/schemas/projects_schema/schema_project_webpublisher.json index 91337da2b2..62d4780513 100644 --- a/openpype/settings/entities/schemas/projects_schema/schema_project_webpublisher.json +++ b/openpype/settings/entities/schemas/projects_schema/schema_project_webpublisher.json @@ -62,8 +62,25 @@ } } ] + }, + { + "type": "dict", + "collapsible": true, + "key": "CollectTVPaintInstances", + "label": "Collect TVPaint Instances", + "children": [ + { + "type": "label", + "label": "Regex helps to extract render layer and pass names from TVPaint layer name.
The regex must contain named groups 'layer' and 'variant' which are used for creation of RenderPass instances.

Example layer name: \"L001_Person_Hand\"
Example regex: \"(?P<layer>L[0-9]{3}_\\w+)_(?P<variant>.+)\"
Extracted layer: \"L001_Person\"
Extracted variant: \"Hand\"" + }, + { + "type": "text", + "key": "layer_name_regex", + "label": "Layer name regex" + } + ] } ] } ] -} \ No newline at end of file +} From a4c62600d74e6e7f7bb20666991addbab1b29efb Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Wed, 10 Nov 2021 18:51:24 +0100 Subject: [PATCH 124/211] fix class name --- openpype/hosts/tvpaint/worker/worker_job.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/tvpaint/worker/worker_job.py b/openpype/hosts/tvpaint/worker/worker_job.py index 477404b74c..72f8365287 100644 --- a/openpype/hosts/tvpaint/worker/worker_job.py +++ b/openpype/hosts/tvpaint/worker/worker_job.py @@ -273,7 +273,7 @@ class TVPaintCommands: ] -class SenderTVPaintCommand(TVPaintCommands): +class SenderTVPaintCommands(TVPaintCommands): def commands_data(self): return [ command.command_data() From 8e8945ce32afc99a6d829706c7da8764ee6ee7a3 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Wed, 10 Nov 2021 19:01:12 +0100 Subject: [PATCH 125/211] collect openpype modules earlier --- openpype/plugins/publish/collect_modules.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/plugins/publish/collect_modules.py b/openpype/plugins/publish/collect_modules.py index bec0c2b436..8defe1a17b 100644 --- a/openpype/plugins/publish/collect_modules.py +++ b/openpype/plugins/publish/collect_modules.py @@ -7,7 +7,7 @@ import pyblish.api class CollectModules(pyblish.api.ContextPlugin): """Collect OpenPype modules.""" - order = pyblish.api.CollectorOrder + order = pyblish.api.CollectorOrder - 0.4 label = "OpenPype Modules" def process(self, context): From b296ae2d2908c979d6037f0b76f2f95274f8d9bb Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Wed, 10 Nov 2021 19:01:43 +0100 Subject: [PATCH 126/211] changed tvpaint related collectors to context collectors --- .../webpublisher/plugins/publish/collect_tvpaint_instances.py | 2 +- .../plugins/publish/collect_tvpaint_workfile_data.py | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/openpype/hosts/webpublisher/plugins/publish/collect_tvpaint_instances.py b/openpype/hosts/webpublisher/plugins/publish/collect_tvpaint_instances.py index ab23b5ff84..c1da8646d8 100644 --- a/openpype/hosts/webpublisher/plugins/publish/collect_tvpaint_instances.py +++ b/openpype/hosts/webpublisher/plugins/publish/collect_tvpaint_instances.py @@ -13,7 +13,7 @@ import pyblish.api from openpype.lib import get_subset_name_with_asset_doc -class CollectTVPaintInstances(pyblish.api.InstancePlugin): +class CollectTVPaintInstances(pyblish.api.ContextPlugin): label = "Collect TVPaint Instances" order = pyblish.api.CollectorOrder + 0.1 hosts = ["webpublisher"] diff --git a/openpype/hosts/webpublisher/plugins/publish/collect_tvpaint_workfile_data.py b/openpype/hosts/webpublisher/plugins/publish/collect_tvpaint_workfile_data.py index 206d7538c9..dd99d095fe 100644 --- a/openpype/hosts/webpublisher/plugins/publish/collect_tvpaint_workfile_data.py +++ b/openpype/hosts/webpublisher/plugins/publish/collect_tvpaint_workfile_data.py @@ -22,7 +22,7 @@ from openpype.hosts.tvpaint.worker import ( ) -class CollectTVPaintWorkfileData(pyblish.api.InstancePlugin): +class CollectTVPaintWorkfileData(pyblish.api.ContextPlugin): label = "Collect TVPaint Workfile data" order = pyblish.api.CollectorOrder hosts = ["webpublisher"] @@ -30,7 +30,7 @@ class CollectTVPaintWorkfileData(pyblish.api.InstancePlugin): def process(self, context): # Get JobQueue module - modules = context.context.data["openPypeModules"] + modules = context.data["openPypeModules"] job_queue_module = modules["job_queue"] context_staging_dir = self._create_context_staging_dir( From 7aa1cb43efb854015c18846559fc58a6b097dc3c Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Wed, 10 Nov 2021 19:48:09 +0100 Subject: [PATCH 127/211] few minor fixes --- .../publish/collect_tvpaint_instances.py | 30 +++++++------------ .../publish/collect_tvpaint_workfile_data.py | 4 +-- .../publish/extract_tvpaint_workfile.py | 4 +-- openpype/plugins/publish/collect_modules.py | 2 +- 4 files changed, 16 insertions(+), 24 deletions(-) diff --git a/openpype/hosts/webpublisher/plugins/publish/collect_tvpaint_instances.py b/openpype/hosts/webpublisher/plugins/publish/collect_tvpaint_instances.py index c1da8646d8..890f3cc6af 100644 --- a/openpype/hosts/webpublisher/plugins/publish/collect_tvpaint_instances.py +++ b/openpype/hosts/webpublisher/plugins/publish/collect_tvpaint_instances.py @@ -15,7 +15,7 @@ from openpype.lib import get_subset_name_with_asset_doc class CollectTVPaintInstances(pyblish.api.ContextPlugin): label = "Collect TVPaint Instances" - order = pyblish.api.CollectorOrder + 0.1 + order = pyblish.api.CollectorOrder - 0.35 hosts = ["webpublisher"] targets = ["tvpaint"] @@ -81,6 +81,10 @@ class CollectTVPaintInstances(pyblish.api.ContextPlugin): continue result = layer_name_regex.search(layer["name"]) + # Layer name not matching layer name regex + # should raise an exception? + if result is None: + continue render_layer = result.group("layer") variant = result.group("variant") @@ -127,12 +131,12 @@ class CollectTVPaintInstances(pyblish.api.ContextPlugin): dynamic_data=dynamic_data ) instance = self._create_render_layer_instance( - context, subset_name, layers + context, layers, subset_name ) new_instances.append(instance) # Set data same for all instances - scene_fps = context.data["sceneFps"] + scene_fps = context.data["sceneData"]["sceneFps"] frame_start = context.data.get("frameStart") frame_end = context.data.get("frameEnd") @@ -196,19 +200,13 @@ class CollectTVPaintInstances(pyblish.api.ContextPlugin): def _create_render_pass_instance(self, context, layer, subset_name): # Global instance data modifications # Fill families - instance_label = "{} [{}-{}]".format( - subset_name, - context.data["sceneMarkIn"] + 1, - context.data["sceneMarkOut"] + 1 - ) - return context.create_instance(**{ + "name": subset_name, "subset": subset_name, - "label": instance_label, + "label": subset_name, "family": self.render_pass_family, # Add `review` family for thumbnail integration "families": [self.render_pass_family, "review"], - "fps": context.data["sceneFps"], "representations": [], "layers": [layer] }) @@ -216,19 +214,13 @@ class CollectTVPaintInstances(pyblish.api.ContextPlugin): def _create_render_layer_instance(self, context, layers, subset_name): # Global instance data modifications # Fill families - instance_label = "{} [{}-{}]".format( - subset_name, - context.data["sceneMarkIn"] + 1, - context.data["sceneMarkOut"] + 1 - ) - return context.create_instance(**{ + "name": subset_name, "subset": subset_name, - "label": instance_label, + "label": subset_name, "family": self.render_pass_family, # Add `review` family for thumbnail integration "families": [self.render_pass_family, "review"], - "fps": context.data["sceneFps"], "representations": [], "layers": layers }) diff --git a/openpype/hosts/webpublisher/plugins/publish/collect_tvpaint_workfile_data.py b/openpype/hosts/webpublisher/plugins/publish/collect_tvpaint_workfile_data.py index dd99d095fe..3cfad7c7bd 100644 --- a/openpype/hosts/webpublisher/plugins/publish/collect_tvpaint_workfile_data.py +++ b/openpype/hosts/webpublisher/plugins/publish/collect_tvpaint_workfile_data.py @@ -24,7 +24,7 @@ from openpype.hosts.tvpaint.worker import ( class CollectTVPaintWorkfileData(pyblish.api.ContextPlugin): label = "Collect TVPaint Workfile data" - order = pyblish.api.CollectorOrder + order = pyblish.api.CollectorOrder - 0.4 hosts = ["webpublisher"] targets = ["tvpaint"] @@ -88,7 +88,7 @@ class CollectTVPaintWorkfileData(pyblish.api.ContextPlugin): def _create_context_staging_dir(self, job_queue_module): jobs_root = job_queue_module.get_jobs_root() if not jobs_root: - raise ValueError("Work root in JobQueue module is not set.") + raise ValueError("Job Queue root is not set.") if not os.path.exists(jobs_root): os.makedirs(jobs_root) diff --git a/openpype/hosts/webpublisher/plugins/publish/extract_tvpaint_workfile.py b/openpype/hosts/webpublisher/plugins/publish/extract_tvpaint_workfile.py index 2118982917..a3db74b66f 100644 --- a/openpype/hosts/webpublisher/plugins/publish/extract_tvpaint_workfile.py +++ b/openpype/hosts/webpublisher/plugins/publish/extract_tvpaint_workfile.py @@ -49,7 +49,7 @@ class ExtractTVPaintSequences(pyblish.api.Extractor): handle_end = context.data["handleEnd"] # Get JobQueue module - modules = context.context.data["openPypeModules"] + modules = context.data["openPypeModules"] job_queue_module = modules["job_queue"] tvpaint_commands = SenderTVPaintCommands( @@ -66,7 +66,7 @@ class ExtractTVPaintSequences(pyblish.api.Extractor): after_render_instances = [] for instance in context: - self.log.info("* Preparing commands for instance \"{}\"".format( + self.log.info("* Preparing commands for instance \"{}\"".format( instance.data["label"] )) # Get all layers and filter out not visible diff --git a/openpype/plugins/publish/collect_modules.py b/openpype/plugins/publish/collect_modules.py index 8defe1a17b..2f6cb1ef0e 100644 --- a/openpype/plugins/publish/collect_modules.py +++ b/openpype/plugins/publish/collect_modules.py @@ -7,7 +7,7 @@ import pyblish.api class CollectModules(pyblish.api.ContextPlugin): """Collect OpenPype modules.""" - order = pyblish.api.CollectorOrder - 0.4 + order = pyblish.api.CollectorOrder - 0.45 label = "OpenPype Modules" def process(self, context): From 827d7fee0dce4eead14c15bea1d1b0c84be1e70c Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Wed, 10 Nov 2021 19:48:20 +0100 Subject: [PATCH 128/211] modified called method on worker sender --- openpype/hosts/tvpaint/worker/worker_job.py | 11 ++++++++++- .../job_queue/job_workers/tvpaint_worker.py | 2 +- 2 files changed, 11 insertions(+), 2 deletions(-) diff --git a/openpype/hosts/tvpaint/worker/worker_job.py b/openpype/hosts/tvpaint/worker/worker_job.py index 72f8365287..0d343a6bea 100644 --- a/openpype/hosts/tvpaint/worker/worker_job.py +++ b/openpype/hosts/tvpaint/worker/worker_job.py @@ -86,6 +86,9 @@ class BaseCommand: self._result = result def result(self): + return copy.deepcopy(self._result) + + def response_data(self): return { "id": self.id, "result": self._result, @@ -272,6 +275,12 @@ class TVPaintCommands: for command in self._commands ] + def response_data(self): + return [ + command.response_data() + for command in self._commands + ] + class SenderTVPaintCommands(TVPaintCommands): def commands_data(self): @@ -318,7 +327,7 @@ class SenderTVPaintCommands(TVPaintCommands): job_status = self._job_queue_module.get_job_status(job_id) if job_status["done"]: break - time.sleep(0.3) + time.sleep(1) # Check if job state is done if job_status["state"] != "done": diff --git a/openpype/modules/default_modules/job_queue/job_workers/tvpaint_worker.py b/openpype/modules/default_modules/job_queue/job_workers/tvpaint_worker.py index b44325c1db..cec0213d5b 100644 --- a/openpype/modules/default_modules/job_queue/job_workers/tvpaint_worker.py +++ b/openpype/modules/default_modules/job_queue/job_workers/tvpaint_worker.py @@ -75,7 +75,7 @@ class WorkerCommunicator(BaseCommunicator): commands.execute() success = True message = "Executed" - data = commands.result() + data = commands.response_data() self._worker_connection.finish_job(success, message, data) From 4c2debb007ef9bf7511d56f782b324eea667ca2a Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Thu, 11 Nov 2021 11:40:27 +0100 Subject: [PATCH 129/211] major fixes to work --- openpype/hosts/tvpaint/worker/worker_job.py | 22 +++-- .../publish/collect_tvpaint_instances.py | 33 ++++--- .../publish/collect_tvpaint_workfile_data.py | 15 ++-- .../publish/extract_tvpaint_workfile.py | 89 +++++++++++++------ 4 files changed, 106 insertions(+), 53 deletions(-) diff --git a/openpype/hosts/tvpaint/worker/worker_job.py b/openpype/hosts/tvpaint/worker/worker_job.py index 0d343a6bea..1f30777901 100644 --- a/openpype/hosts/tvpaint/worker/worker_job.py +++ b/openpype/hosts/tvpaint/worker/worker_job.py @@ -136,22 +136,26 @@ class ExecuteGeorgeScript(BaseCommand): name = "execute_george_through_file" def __init__( - self, script, tmp_file_keys=None, root_dir_key=None, data=None + self, script_lines, tmp_file_keys=None, root_dir_key=None, data=None ): data = data or {} if not tmp_file_keys: tmp_file_keys = data.get("tmp_file_keys") or [] - data["script"] = script + data["script_lines"] = script_lines data["tmp_file_keys"] = tmp_file_keys data["root_dir_key"] = root_dir_key - self._script = script + self._script_lines = script_lines self._tmp_file_keys = tmp_file_keys self._root_dir_key = root_dir_key super().__init__(data) def execute(self): filepath_by_key = {} + script = self._script_lines + if isinstance(script, list): + script = "\n".join(script) + for key in self._tmp_file_keys: output_file = tempfile.NamedTemporaryFile( mode="w", prefix=TMP_FILE_PREFIX, suffix=".txt", delete=False @@ -159,15 +163,17 @@ class ExecuteGeorgeScript(BaseCommand): output_file.close() format_key = "{" + key + "}" output_path = output_file.name.replace("\\", "/") - self._script.replace(format_key, output_path) + script = script.replace(format_key, output_path) filepath_by_key[key] = output_path if self._root_dir_key: job_queue_root = self.job_queue_root() format_key = "{" + self._root_dir_key + "}" - self._script.replace(format_key, job_queue_root.replace("\\", "/")) + script = script.replace( + format_key, job_queue_root.replace("\\", "/") + ) - self.execute_george_through_file(self._script) + self.execute_george_through_file(script) result = {} for key, filepath in filepath_by_key.items(): @@ -180,10 +186,10 @@ class ExecuteGeorgeScript(BaseCommand): @classmethod def from_existing(cls, data): - script = data.pop("script") + script_lines = data.pop("script_lines") tmp_file_keys = data.pop("tmp_file_keys", None) root_dir_key = data.pop("root_dir_key", None) - return cls(script, tmp_file_keys, root_dir_key, data) + return cls(script_lines, tmp_file_keys, root_dir_key, data) class CollectSceneData(BaseCommand): diff --git a/openpype/hosts/webpublisher/plugins/publish/collect_tvpaint_instances.py b/openpype/hosts/webpublisher/plugins/publish/collect_tvpaint_instances.py index 890f3cc6af..dd0d421447 100644 --- a/openpype/hosts/webpublisher/plugins/publish/collect_tvpaint_instances.py +++ b/openpype/hosts/webpublisher/plugins/publish/collect_tvpaint_instances.py @@ -15,7 +15,7 @@ from openpype.lib import get_subset_name_with_asset_doc class CollectTVPaintInstances(pyblish.api.ContextPlugin): label = "Collect TVPaint Instances" - order = pyblish.api.CollectorOrder - 0.35 + order = pyblish.api.CollectorOrder + 0.2 hosts = ["webpublisher"] targets = ["tvpaint"] @@ -136,14 +136,10 @@ class CollectTVPaintInstances(pyblish.api.ContextPlugin): new_instances.append(instance) # Set data same for all instances - scene_fps = context.data["sceneData"]["sceneFps"] frame_start = context.data.get("frameStart") frame_end = context.data.get("frameEnd") for instance in new_instances: - if instance.data.get("fps") is None: - instance.data["fps"] = scene_fps - if ( instance.data.get("frameStart") is None or instance.data.get("frameEnd") is None @@ -157,6 +153,12 @@ class CollectTVPaintInstances(pyblish.api.ContextPlugin): if instance.data.get("task") is None: instance.data["task"] = task_name + if "representations" not in instance.data: + instance.data["representations"] = [] + + if "source" not in instance.data: + instance.data["source"] = "webpublisher" + def _create_workfile_instance(self, context, subset_name): workfile_path = context.data["workfilePath"] staging_dir = os.path.dirname(workfile_path) @@ -168,7 +170,7 @@ class CollectTVPaintInstances(pyblish.api.ContextPlugin): "label": subset_name, "subset": subset_name, "family": self.workfile_family, - "families": [self.workfile_family], + "families": [], "stagingDir": staging_dir, "representations": [{ "name": ext.lstrip("."), @@ -179,8 +181,7 @@ class CollectTVPaintInstances(pyblish.api.ContextPlugin): }) def _create_review_instance(self, context, subset_name): - context_staging_dir = context.data["contextStagingDir"] - staging_dir = os.path.join(context_staging_dir, subset_name) + staging_dir = self._create_staging_dir(context, subset_name) layers_data = context.data["layersData"] # Filter hidden layers filtered_layers_data = [ @@ -193,11 +194,13 @@ class CollectTVPaintInstances(pyblish.api.ContextPlugin): "label": subset_name, "subset": subset_name, "family": self.review_family, + "families": [], "layers": filtered_layers_data, "stagingDir": staging_dir }) def _create_render_pass_instance(self, context, layer, subset_name): + staging_dir = self._create_staging_dir(context, subset_name) # Global instance data modifications # Fill families return context.create_instance(**{ @@ -208,10 +211,12 @@ class CollectTVPaintInstances(pyblish.api.ContextPlugin): # Add `review` family for thumbnail integration "families": [self.render_pass_family, "review"], "representations": [], - "layers": [layer] + "layers": [layer], + "stagingDir": staging_dir }) def _create_render_layer_instance(self, context, layers, subset_name): + staging_dir = self._create_staging_dir(context, subset_name) # Global instance data modifications # Fill families return context.create_instance(**{ @@ -222,5 +227,13 @@ class CollectTVPaintInstances(pyblish.api.ContextPlugin): # Add `review` family for thumbnail integration "families": [self.render_pass_family, "review"], "representations": [], - "layers": layers + "layers": layers, + "stagingDir": staging_dir }) + + def _create_staging_dir(self, context, subset_name): + context_staging_dir = context.data["contextStagingDir"] + staging_dir = os.path.join(context_staging_dir, subset_name) + if not os.path.exists(staging_dir): + os.makedirs(staging_dir) + return staging_dir diff --git a/openpype/hosts/webpublisher/plugins/publish/collect_tvpaint_workfile_data.py b/openpype/hosts/webpublisher/plugins/publish/collect_tvpaint_workfile_data.py index 3cfad7c7bd..147b6f3d88 100644 --- a/openpype/hosts/webpublisher/plugins/publish/collect_tvpaint_workfile_data.py +++ b/openpype/hosts/webpublisher/plugins/publish/collect_tvpaint_workfile_data.py @@ -32,10 +32,13 @@ class CollectTVPaintWorkfileData(pyblish.api.ContextPlugin): # Get JobQueue module modules = context.data["openPypeModules"] job_queue_module = modules["job_queue"] + jobs_root = job_queue_module.get_jobs_root() + if not jobs_root: + raise ValueError("Job Queue root is not set.") - context_staging_dir = self._create_context_staging_dir( - job_queue_module - ) + context.data["jobsRoot"] = jobs_root + + context_staging_dir = self._create_context_staging_dir(jobs_root) workfile_path = self._extract_workfile_path( context, context_staging_dir ) @@ -85,11 +88,7 @@ class CollectTVPaintWorkfileData(pyblish.api.ContextPlugin): context.data["layersExposureFrames"] = exposure_frames_by_layer_id context.data["layersPrePostBehavior"] = pre_post_beh_by_layer_id - def _create_context_staging_dir(self, job_queue_module): - jobs_root = job_queue_module.get_jobs_root() - if not jobs_root: - raise ValueError("Job Queue root is not set.") - + def _create_context_staging_dir(self, jobs_root): if not os.path.exists(jobs_root): os.makedirs(jobs_root) diff --git a/openpype/hosts/webpublisher/plugins/publish/extract_tvpaint_workfile.py b/openpype/hosts/webpublisher/plugins/publish/extract_tvpaint_workfile.py index a3db74b66f..3016e9683e 100644 --- a/openpype/hosts/webpublisher/plugins/publish/extract_tvpaint_workfile.py +++ b/openpype/hosts/webpublisher/plugins/publish/extract_tvpaint_workfile.py @@ -24,15 +24,22 @@ from PIL import Image class ExtractTVPaintSequences(pyblish.api.Extractor): label = "Extract TVPaint Sequences" hosts = ["webpublisher"] - families = ["review", "renderPass", "renderLayer"] targets = ["tvpaint"] + # Context plugin does not have families filtering + families_filter = ["review", "renderPass", "renderLayer"] + + job_queue_root_key = "jobs_root" + # Modifiable with settings review_bg = [255, 255, 255, 255] def process(self, context): # Get workfle path workfile_path = context.data["workfilePath"] + jobs_root = context.data["jobsRoot"] + jobs_root_slashed = jobs_root.replace("\\", "/") + # Prepare scene data scene_data = context.data["sceneData"] scene_mark_in = scene_data["sceneMarkIn"] @@ -64,8 +71,20 @@ class ExtractTVPaintSequences(pyblish.api.Extractor): ExecuteSimpleGeorgeScript("tv_startframe 0") ) + root_key_replacement = "{" + self.job_queue_root_key + "}" after_render_instances = [] for instance in context: + instance_families = set(instance.data.get("families", [])) + instance_families.add(instance.data["family"]) + valid = False + for family in instance_families: + if family in self.families_filter: + valid = True + break + + if not valid: + continue + self.log.info("* Preparing commands for instance \"{}\"".format( instance.data["label"] )) @@ -89,12 +108,12 @@ class ExtractTVPaintSequences(pyblish.api.Extractor): ) ) - # TODO handle this whole staging dir properly # Staging dir must be created during collection - output_dir = instance.data["stagingDir"] - src_root = "c:/" - dst_root = "{worker_root}" - work_output_dir = output_dir.replace(src_root, dst_root) + staging_dir = instance.data["stagingDir"].replace("\\", "/") + + job_root_template = staging_dir.replace( + jobs_root_slashed, root_key_replacement + ) # Frame start/end may be stored as float frame_start = int(instance.data["frameStart"]) @@ -126,18 +145,18 @@ class ExtractTVPaintSequences(pyblish.api.Extractor): # ----------------------------------------------------------------- self.log.debug( - "Files will be rendered to folder: {}".format(output_dir) + "Files will be rendered to folder: {}".format(staging_dir) ) output_filepaths_by_frame_idx = {} for frame_idx in range(mark_in, mark_out + 1): filename = filename_template.format(frame=frame_idx) - filepath = os.path.join(output_dir, filename) + filepath = os.path.join(staging_dir, filename) output_filepaths_by_frame_idx[frame_idx] = filepath # Prepare data for post render processing post_render_data = { - "output_dir": output_dir, + "output_dir": staging_dir, "layers": filtered_layers, "output_filepaths_by_frame_idx": output_filepaths_by_frame_idx, "instance": instance, @@ -152,7 +171,7 @@ class ExtractTVPaintSequences(pyblish.api.Extractor): if instance.data["family"] == "review": self.add_render_review_command( tvpaint_commands, mark_in, mark_out, scene_bg_color, - work_output_dir, filename_template + job_root_template, filename_template ) continue @@ -166,7 +185,8 @@ class ExtractTVPaintSequences(pyblish.api.Extractor): ) filepaths_by_layer_id = self.add_render_command( tvpaint_commands, - work_output_dir, + job_root_template, + staging_dir, filtered_layers, extraction_data_by_layer_id ) @@ -325,7 +345,7 @@ class ExtractTVPaintSequences(pyblish.api.Extractor): mark_in, mark_out, scene_bg_color, - work_output_dir, + job_root_template, filename_template ): """ Export images from TVPaint using `tv_savesequence` command. @@ -340,15 +360,17 @@ class ExtractTVPaintSequences(pyblish.api.Extractor): self.log.debug("Preparing data for rendering.") bg_color = self._get_review_bg_color() first_frame_filepath = "/".join([ - work_output_dir, + job_root_template, filename_template.format(frame=mark_in) - ]).replace("\\", "/") + ]) george_script_lines = [ # Change bg color to color from settings "tv_background \"color\" {} {} {}".format(*bg_color), "tv_SaveMode \"PNG\"", - "export_path = \"{}\"".format(first_frame_filepath), + "export_path = \"{}\"".format( + first_frame_filepath.replace("\\", "/") + ), "tv_savesequence '\"'export_path'\"' {} {}".format( mark_in, mark_out ) @@ -366,13 +388,17 @@ class ExtractTVPaintSequences(pyblish.api.Extractor): george_script_lines.append(" ".join(orig_color_command)) tvpaint_commands.add_command( - ExecuteGeorgeScript("\n".join(george_script_lines)) + ExecuteGeorgeScript( + george_script_lines, + root_dir_key=self.job_queue_root_key + ) ) def add_render_command( self, tvpaint_commands, - work_output_dir, + job_root_template, + staging_dir, layers, extraction_data_by_layer_id ): @@ -402,20 +428,26 @@ class ExtractTVPaintSequences(pyblish.api.Extractor): filenames_by_frame_index = render_data["filenames_by_frame_index"] filepaths_by_frame = {} + command_filepath_by_frame = {} for frame_idx, ref_idx in frame_references.items(): # None reference is skipped because does not have source if ref_idx is None: filepaths_by_frame[frame_idx] = None continue filename = filenames_by_frame_index[frame_idx] - dst_path = "/".join([work_output_dir, filename]) - filepaths_by_frame[frame_idx] = dst_path - if frame_idx != ref_idx: - continue - filepaths_by_layer_id[layer_id] = self._add_render_layer_command( - tvpaint_commands, layer, filepaths_by_frame + filepaths_by_frame[frame_idx] = os.path.join( + staging_dir, filename + ) + if frame_idx == ref_idx: + command_filepath_by_frame[frame_idx] = "/".join( + [job_root_template, filename] + ) + + self._add_render_layer_command( + tvpaint_commands, layer, command_filepath_by_frame ) + filepaths_by_layer_id[layer_id] = filepaths_by_frame return filepaths_by_layer_id @@ -430,7 +462,6 @@ class ExtractTVPaintSequences(pyblish.api.Extractor): "tv_SaveMode \"PNG\"" ] - filepaths_by_frame = {} for frame_idx, filepath in filepaths_by_frame.items(): if filepath is None: continue @@ -438,12 +469,16 @@ class ExtractTVPaintSequences(pyblish.api.Extractor): # Go to frame george_script_lines.append("tv_layerImage {}".format(frame_idx)) # Store image to output - george_script_lines.append("tv_saveimage \"{}\"".format(filepath)) + george_script_lines.append( + "tv_saveimage \"{}\"".format(filepath.replace("\\", "/")) + ) tvpaint_commands.add_command( - ExecuteGeorgeScript("\n".join(george_script_lines)) + ExecuteGeorgeScript( + george_script_lines, + root_dir_key=self.job_queue_root_key + ) ) - return filepaths_by_frame def _finish_layer_render( self, From 340285148516d653673723efd7cf6d81323d8b5b Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Thu, 11 Nov 2021 12:00:22 +0100 Subject: [PATCH 130/211] added cleaunp plugin which removed content of job root --- .../publish/others_cleanup_job_root.py | 31 +++++++++++++++++++ 1 file changed, 31 insertions(+) create mode 100644 openpype/hosts/webpublisher/plugins/publish/others_cleanup_job_root.py diff --git a/openpype/hosts/webpublisher/plugins/publish/others_cleanup_job_root.py b/openpype/hosts/webpublisher/plugins/publish/others_cleanup_job_root.py new file mode 100644 index 0000000000..fc5cd1ea9a --- /dev/null +++ b/openpype/hosts/webpublisher/plugins/publish/others_cleanup_job_root.py @@ -0,0 +1,31 @@ +# -*- coding: utf-8 -*- +"""Cleanup leftover files from publish.""" +import os +import shutil +import pyblish.api + + +class CleanUpJobRoot(pyblish.api.ContextPlugin): + """Cleans up the job root directory after a successful publish. + + Remove all files in job root as all of them should be published. + """ + + order = pyblish.api.IntegratorOrder + 1 + label = "Clean Up Job Root" + optional = True + active = True + + def process(self, context): + context_staging_dir = context.data.get("contextStagingDir") + if not context_staging_dir: + self.log.info("Key 'contextStagingDir' is empty.") + + elif not os.path.exists(context_staging_dir): + self.log.info(( + "Job root directory for this publish does not" + " exists anymore \"{}\"." + ).format(context_staging_dir)) + else: + self.log.info("Deleting job root with all files.") + shutil.rmtree(context_staging_dir) From 57058f1ea4e3f075ca002195fe7c35d92f6acd7b Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Thu, 11 Nov 2021 12:29:57 +0100 Subject: [PATCH 131/211] added frame range validation --- .../publish/validate_tvpaint_workfile_data.py | 37 +++++++++++++++++++ 1 file changed, 37 insertions(+) create mode 100644 openpype/hosts/webpublisher/plugins/publish/validate_tvpaint_workfile_data.py diff --git a/openpype/hosts/webpublisher/plugins/publish/validate_tvpaint_workfile_data.py b/openpype/hosts/webpublisher/plugins/publish/validate_tvpaint_workfile_data.py new file mode 100644 index 0000000000..b70145e838 --- /dev/null +++ b/openpype/hosts/webpublisher/plugins/publish/validate_tvpaint_workfile_data.py @@ -0,0 +1,37 @@ +import json + +import pyblish.api + + +class ValidateWorkfileData(pyblish.api.ContextPlugin): + """Validate mark in and out are enabled and it's duration. + + Mark In/Out does not have to match frameStart and frameEnd but duration is + important. + """ + + label = "Validate Workfile Data" + order = pyblish.api.ValidatorOrder + + def process(self, context): + # Data collected in `CollectAvalonEntities` + frame_start = context.data["frameStart"] + frame_end = context.data["frameEnd"] + handle_start = context.data["handleStart"] + handle_end = context.data["handleEnd"] + + scene_data = context.data["sceneData"] + scene_mark_in = scene_data["sceneMarkIn"] + scene_mark_out = scene_data["sceneMarkIn"] + + expected_range = ( + (frame_end - frame_start + 1) + + handle_start + + handle_end + ) + marks_range = scene_mark_out - scene_mark_in + 1 + if expected_range != marks_range: + raise AssertionError(( + "Wrong Mark In/Out range." + " Expected range is {} frames got {} frames" + ).format(expected_range, marks_range)) From e0e3ae1d1913faa8bbe466f10ac8bbc6cd85b5e0 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Thu, 11 Nov 2021 12:39:35 +0100 Subject: [PATCH 132/211] removed unused import --- .../plugins/publish/validate_tvpaint_workfile_data.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/openpype/hosts/webpublisher/plugins/publish/validate_tvpaint_workfile_data.py b/openpype/hosts/webpublisher/plugins/publish/validate_tvpaint_workfile_data.py index b70145e838..652209a83e 100644 --- a/openpype/hosts/webpublisher/plugins/publish/validate_tvpaint_workfile_data.py +++ b/openpype/hosts/webpublisher/plugins/publish/validate_tvpaint_workfile_data.py @@ -1,5 +1,3 @@ -import json - import pyblish.api From 3ee344ecb56023f24fa5acacce456fb9940f44c8 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Thu, 11 Nov 2021 14:47:36 +0100 Subject: [PATCH 133/211] fixed hound imports --- .../plugins/publish/extract_tvpaint_workfile.py | 3 --- openpype/modules/default_modules/job_queue/module.py | 9 +++++---- 2 files changed, 5 insertions(+), 7 deletions(-) diff --git a/openpype/hosts/webpublisher/plugins/publish/extract_tvpaint_workfile.py b/openpype/hosts/webpublisher/plugins/publish/extract_tvpaint_workfile.py index 3016e9683e..18f47fdade 100644 --- a/openpype/hosts/webpublisher/plugins/publish/extract_tvpaint_workfile.py +++ b/openpype/hosts/webpublisher/plugins/publish/extract_tvpaint_workfile.py @@ -1,6 +1,5 @@ import os import copy -import tempfile from openpype.hosts.tvpaint.worker import ( SenderTVPaintCommands, @@ -9,8 +8,6 @@ from openpype.hosts.tvpaint.worker import ( ) import pyblish.api -from avalon.tvpaint import lib -from openpype.hosts.tvpaint.api.lib import composite_images from openpype.hosts.tvpaint.lib import ( calculate_layers_extraction_data, get_frame_filename_template, diff --git a/openpype/modules/default_modules/job_queue/module.py b/openpype/modules/default_modules/job_queue/module.py index 4fee48c1dc..4e47df63fa 100644 --- a/openpype/modules/default_modules/job_queue/module.py +++ b/openpype/modules/default_modules/job_queue/module.py @@ -2,10 +2,6 @@ import sys import json import copy import platform -if sys.version_info[0] == 2: - from urlparse import urlsplit, urlunsplit -else: - from urllib.parse import urlsplit, urlunsplit import click from openpype.modules import OpenPypeModule @@ -34,6 +30,11 @@ class JobQueueModule(OpenPypeModule): @staticmethod def url_conversion(url, ws=False): + if sys.version_info[0] == 2: + from urlparse import urlsplit, urlunsplit + else: + from urllib.parse import urlsplit, urlunsplit + if not url: return url From bedb58daa79568ed0c7a6697ff154a1d201ee86b Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Thu, 11 Nov 2021 15:10:17 +0100 Subject: [PATCH 134/211] moved tvpaint worker to tvpaint host --- openpype/hosts/tvpaint/worker/__init__.py | 5 ++- .../tvpaint/worker/worker.py} | 41 ++++++++++++++----- .../default_modules/job_queue/module.py | 2 +- 3 files changed, 35 insertions(+), 13 deletions(-) rename openpype/{modules/default_modules/job_queue/job_workers/tvpaint_worker.py => hosts/tvpaint/worker/worker.py} (69%) diff --git a/openpype/hosts/tvpaint/worker/__init__.py b/openpype/hosts/tvpaint/worker/__init__.py index b60ce83ada..69208a7566 100644 --- a/openpype/hosts/tvpaint/worker/__init__.py +++ b/openpype/hosts/tvpaint/worker/__init__.py @@ -7,6 +7,7 @@ from .worker_job import ( ProcessTVPaintCommands ) +from .worker import main __all__ = ( "JobFailed", @@ -14,5 +15,7 @@ __all__ = ( "ExecuteGeorgeScript", "CollectSceneData", "SenderTVPaintCommands", - "ProcessTVPaintCommands" + "ProcessTVPaintCommands", + + "main" ) diff --git a/openpype/modules/default_modules/job_queue/job_workers/tvpaint_worker.py b/openpype/hosts/tvpaint/worker/worker.py similarity index 69% rename from openpype/modules/default_modules/job_queue/job_workers/tvpaint_worker.py rename to openpype/hosts/tvpaint/worker/worker.py index cec0213d5b..738656fa91 100644 --- a/openpype/modules/default_modules/job_queue/job_workers/tvpaint_worker.py +++ b/openpype/hosts/tvpaint/worker/worker.py @@ -2,15 +2,20 @@ import signal import time import asyncio -from openpype.hosts.tvpaint.worker import ProcessTVPaintCommands from avalon.tvpaint.communication_server import ( BaseCommunicator, CommunicationWrapper ) -from .base_worker import WorkerJobsConnection +from openpype_modules.job_queue.job_workers import WorkerJobsConnection + +from .worker_job import ProcessTVPaintCommands -class WorkerCommunicator(BaseCommunicator): +class TVPaintWorkerCommunicator(BaseCommunicator): + """Modified commuicator which cares about processing jobs. + + Received jobs are send to TVPaint by parsing 'ProcessTVPaintCommands'. + """ def __init__(self, server_url): super().__init__() @@ -19,6 +24,7 @@ class WorkerCommunicator(BaseCommunicator): self._worker_connection = None def _start_webserver(self): + """Create connection to workers server before TVPaint server.""" loop = self.websocket_server.loop self._worker_connection = WorkerJobsConnection( self._server_url, "tvpaint", loop @@ -32,15 +38,18 @@ class WorkerCommunicator(BaseCommunicator): def _on_client_connect(self, *args, **kwargs): super()._on_client_connect(*args, **kwargs) + # Register as "ready to work" worker self._worker_connection.register_as_worker() def stop(self): + """Stop worker connection and TVPaint server.""" self._worker_connection.stop() self.return_code = 0 super().stop() @property def current_job(self): + """Retrieve job which should be processed.""" if self._worker_connection: return self._worker_connection.current_job return None @@ -63,23 +72,33 @@ class WorkerCommunicator(BaseCommunicator): if job is None: return + # Prepare variables used for sendig success = False message = "Unknown function" data = None job_data = job["data"] workfile = job_data["workfile"] + # Currently can process only "commands" function if job_data.get("function") == "commands": - commands = ProcessTVPaintCommands( - workfile, job_data["commands"], self - ) - commands.execute() - success = True - message = "Executed" - data = commands.response_data() + try: + commands = ProcessTVPaintCommands( + workfile, job_data["commands"], self + ) + commands.execute() + data = commands.response_data() + success = True + message = "Executed" + + except Exception as exc: + message = "Error on worker: {}".format(str(exc)) self._worker_connection.finish_job(success, message, data) def main_loop(self): + """Main loop where jobs are processed. + + Server is stopped by killing this process or TVPaint process. + """ while self.server_is_running: if self._check_process(): self._process_job() @@ -89,7 +108,7 @@ class WorkerCommunicator(BaseCommunicator): def _start_tvpaint(tvpaint_executable_path, server_url): - communicator = WorkerCommunicator(server_url) + communicator = TVPaintWorkerCommunicator(server_url) CommunicationWrapper.set_communicator(communicator) communicator.launch([tvpaint_executable_path]) diff --git a/openpype/modules/default_modules/job_queue/module.py b/openpype/modules/default_modules/job_queue/module.py index 4e47df63fa..96047e87d0 100644 --- a/openpype/modules/default_modules/job_queue/module.py +++ b/openpype/modules/default_modules/job_queue/module.py @@ -148,7 +148,7 @@ class JobQueueModule(OpenPypeModule): @classmethod def _start_tvpaint_worker(cls, app, server_url): - from .job_workers.tvpaint_worker import main + from openpype.hosts.tvpaint.worker import main executable = app.find_executable() if not executable: From 83ba1115a4cecdd4e71f8f7746b27348fd065236 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Thu, 11 Nov 2021 16:08:05 +0100 Subject: [PATCH 135/211] added few docstrings --- openpype/hosts/tvpaint/worker/worker_job.py | 145 +++++++++++++++++- .../job_queue/job_workers/base_worker.py | 10 ++ 2 files changed, 153 insertions(+), 2 deletions(-) diff --git a/openpype/hosts/tvpaint/worker/worker_job.py b/openpype/hosts/tvpaint/worker/worker_job.py index 1f30777901..308fbf3bd7 100644 --- a/openpype/hosts/tvpaint/worker/worker_job.py +++ b/openpype/hosts/tvpaint/worker/worker_job.py @@ -33,6 +33,17 @@ class JobFailed(Exception): @six.add_metaclass(ABCMeta) class BaseCommand: + """Abstract TVPaint command which can be executed through worker. + + Each command must have unique name and implemented 'execute' and + 'from_existing' methods. + + Command also have id which is created on command creation. + + The idea is that command is just a data container on sender side send + througth server to a worker where is replicated one by one, executed and + result sent back to sender through server. + """ @abstractproperty def name(self): """Command name (must be unique).""" @@ -44,6 +55,7 @@ class BaseCommand: else: data = copy.deepcopy(data) + # Use 'id' from data when replicating on process side command_id = data.get("id") if command_id is None: command_id = str(uuid4()) @@ -56,6 +68,11 @@ class BaseCommand: self._done = False def job_queue_root(self): + """Access to job queue root. + + Job queue root is shared access point to files shared across senders + and workers. + """ if self._parent is None: return None return self._parent.job_queue_root() @@ -65,30 +82,41 @@ class BaseCommand: @property def id(self): + """Command id.""" return self._command_data["id"] @property def parent(self): + """Parent of command expected type of 'TVPaintCommands'.""" return self._parent @property def communicator(self): + """TVPaint communicator. + + Available only on worker side. + """ return self._parent.communicator @property def done(self): + """Is command done.""" return self._done def set_done(self): + """Change state of done.""" self._done = True def set_result(self, result): + """Set result of executed command.""" self._result = result def result(self): + """Result of command.""" return copy.deepcopy(self._result) def response_data(self): + """Data send as response to sender.""" return { "id": self.id, "result": self._result, @@ -96,25 +124,35 @@ class BaseCommand: } def command_data(self): + """Raw command data.""" return copy.deepcopy(self._command_data) @abstractmethod def execute(self): + """Execute command on worker side.""" pass @classmethod @abstractmethod def from_existing(cls, data): + """Recreate object based on passed data.""" pass def execute_george(self, george_script): + """Execute george script in TVPaint.""" return self.parent.execute_george(george_script) def execute_george_through_file(self, george_script): + """Execute george script through temp file in TVPaint.""" return self.parent.execute_george_through_file(george_script) class ExecuteSimpleGeorgeScript(BaseCommand): + """Execute simple george script in TVPaint. + + Args: + script(str): Script that will be executed. + """ name = "execute_george_simple" def __init__(self, script, data=None): @@ -133,6 +171,18 @@ class ExecuteSimpleGeorgeScript(BaseCommand): class ExecuteGeorgeScript(BaseCommand): + """Execute multiline george script in TVPaint. + + Args: + script_lines(list): Lines that will be executed in george script + through temp george file. + tmp_file_keys(list): List of formatting keys in george script that + require replacement with path to a temp file where result will be + stored. The content of file is stored to result by the key. + root_dir_key(str): Formatting key that will be replaced in george + script with job queue root which can be different on worker side. + data(dict): Raw data about command. + """ name = "execute_george_through_file" def __init__( @@ -156,6 +206,7 @@ class ExecuteGeorgeScript(BaseCommand): if isinstance(script, list): script = "\n".join(script) + # Replace temporary files in george script for key in self._tmp_file_keys: output_file = tempfile.NamedTemporaryFile( mode="w", prefix=TMP_FILE_PREFIX, suffix=".txt", delete=False @@ -166,6 +217,7 @@ class ExecuteGeorgeScript(BaseCommand): script = script.replace(format_key, output_path) filepath_by_key[key] = output_path + # Replace job queue root in script if self._root_dir_key: job_queue_root = self.job_queue_root() format_key = "{" + self._root_dir_key + "}" @@ -173,8 +225,10 @@ class ExecuteGeorgeScript(BaseCommand): format_key, job_queue_root.replace("\\", "/") ) + # Execute the script self.execute_george_through_file(script) + # Store result of temporary files result = {} for key, filepath in filepath_by_key.items(): with open(filepath, "r") as stream: @@ -186,6 +240,7 @@ class ExecuteGeorgeScript(BaseCommand): @classmethod def from_existing(cls, data): + """Recreate the object from data.""" script_lines = data.pop("script_lines") tmp_file_keys = data.pop("tmp_file_keys", None) root_dir_key = data.pop("root_dir_key", None) @@ -193,6 +248,11 @@ class ExecuteGeorgeScript(BaseCommand): class CollectSceneData(BaseCommand): + """Helper command which will collect all usefull info about workfile. + + Result is dictionary with all layers data, exposure frames by layer ids + pre/post behavior of layers by their ids, group information and scene data. + """ name = "collect_scene_data" def execute(self): @@ -230,10 +290,22 @@ class CollectSceneData(BaseCommand): return cls(data) +@six.add_metaclass(ABCMeta) class TVPaintCommands: + """Wrapper around TVPaint commands to be able send multiple commands. + + Commands may send one or multiple commands at once. Also gives api access + for commands info. + + Base for sender and receiver which are extending the logic for their + purposes. One of differences is preparation of workfile path. + + Args: + workfile(str): Path to workfile. + job_queue_module(JobQueueModule): Object of OpenPype module JobQueue. + """ def __init__(self, workfile, job_queue_module=None): self._log = None - self._workfile = workfile self._commands = [] self._command_classes_by_name = None if job_queue_module is None: @@ -241,17 +313,31 @@ class TVPaintCommands: job_queue_module = manager.modules_by_name["job_queue"] self._job_queue_module = job_queue_module + self._workfile = self._prepare_workfile(workfile) + + @abstractmethod + def _prepare_workfile(self, workfile): + """Modification of workfile path on initialization to match platorm.""" + pass + def job_queue_root(self): + """Job queue root for current platform using current settings.""" return self._job_queue_module.get_jobs_root_from_settings() @property def log(self): + """Access to logger object.""" if self._log is None: self._log = PypeLogger.get_logger(self.__class__.__name__) return self._log @property def classes_by_name(self): + """Prepare commands classes for validation and recreation of commands. + + It is expected that all commands are defined in this python file so + we're looking for all implementation of BaseCommand in globals. + """ if self._command_classes_by_name is None: command_classes_by_name = {} for attr in globals().values(): @@ -272,16 +358,19 @@ class TVPaintCommands: return self._command_classes_by_name def add_command(self, command): + """Add command to process.""" command.set_parent(self) self._commands.append(command) def result(self): + """Result of commands in list in which they were processed.""" return [ command.result() for command in self._commands ] def response_data(self): + """Data which should be send from worker.""" return [ command.response_data() for command in self._commands @@ -289,13 +378,30 @@ class TVPaintCommands: class SenderTVPaintCommands(TVPaintCommands): + """Sender implementation of TVPaint Commands.""" + def _prepare_workfile(self, workfile): + """Remove job queue root from workfile path. + + It is expected that worker will add it's root before passed workfile. + """ + new_workfile = workfile.replace("\\", "/") + job_queue_root = self.job_queue_root.replace("\\", "/") + if job_queue_root not in new_workfile: + raise ValueError(( + "Workfile is not located in JobQueue root." + " Workfile path: \"{}\". JobQueue root: \"{}\"" + ).format(workfile, job_queue_root)) + return new_workfile.replace(job_queue_root, "") + def commands_data(self): + """Commands data to be able recreate them.""" return [ command.command_data() for command in self._commands ] def to_job_data(self): + """Convert commands to job data before sending to workers server.""" return { "workfile": self._workfile, "function": "commands", @@ -314,6 +420,7 @@ class SenderTVPaintCommands(TVPaintCommands): command.set_done() def _send_job(self): + """Send job to a workers server.""" # Send job data to job queue server job_data = self.to_job_data() self.log.debug("Sending job to JobQueue server.\n{}".format( @@ -328,6 +435,13 @@ class SenderTVPaintCommands(TVPaintCommands): return job_id def send_job_and_wait(self): + """Send job to workers server and wait for response. + + Result of job is stored into the object. + + Raises: + JobFailed: When job was finished but not successfully. + """ job_id = self._send_job() while True: job_status = self._job_queue_module.get_job_status(job_id) @@ -345,6 +459,13 @@ class SenderTVPaintCommands(TVPaintCommands): class ProcessTVPaintCommands(TVPaintCommands): + """Worker side of TVPaint Commands. + + It is expected this object is created only on worker's side from existing + data loaded from job. + + Workfile path logic is based on 'SenderTVPaintCommands'. + """ def __init__(self, workfile, commands, communicator): super(ProcessTVPaintCommands, self).__init__(workfile) @@ -352,11 +473,22 @@ class ProcessTVPaintCommands(TVPaintCommands): self.commands_from_data(commands) + def _prepare_workfile(self, workfile): + """Preprend job queue root before passed workfile.""" + workfile = workfile.replace("\\", "/") + job_queue_root = self.job_queue_root.replace("\\", "/") + new_workfile = "/".join([job_queue_root, workfile]) + while "//" in new_workfile: + new_workfile = new_workfile.replace("//", "/") + return os.path.normpath(new_workfile) + @property def communicator(self): + """Access to TVPaint communicator.""" return self._communicator def commands_from_data(self, commands_data): + """Recreate command from passed data.""" for command_data in commands_data: command_name = command_data["command"] @@ -365,9 +497,11 @@ class ProcessTVPaintCommands(TVPaintCommands): self.add_command(command) def execute_george(self, george_script): + """Helper method to execute george script.""" return self.communicator.execute_george(george_script) def execute_george_through_file(self, george_script): + """Helper method to execute george script through temp file.""" temporary_file = tempfile.NamedTemporaryFile( mode="w", prefix=TMP_FILE_PREFIX, suffix=".grg", delete=False ) @@ -378,19 +512,26 @@ class ProcessTVPaintCommands(TVPaintCommands): os.remove(temp_file_path) def _open_workfile(self): - workfile = self._workfile.replace("\\", "/") + """Open workfile in TVPaint.""" + workfile = self._workfile print("Opening workfile {}".format(workfile)) george_script = "tv_LoadProject '\"'\"{}\"'\"'".format(workfile) self.execute_george_through_file(george_script) def _close_workfile(self): + """Close workfile in TVPaint.""" print("Closing workfile") self.execute_george_through_file("tv_projectclose") def execute(self): + """Execute commands.""" + # First open the workfile self._open_workfile() + # Execute commands one by one + # TODO maybe stop processing when command fails? print("Commands execution started ({})".format(len(self._commands))) for command in self._commands: command.execute() command.set_done() + # Finally close workfile self._close_workfile() diff --git a/openpype/modules/default_modules/job_queue/job_workers/base_worker.py b/openpype/modules/default_modules/job_queue/job_workers/base_worker.py index 2336f91be2..85506565f4 100644 --- a/openpype/modules/default_modules/job_queue/job_workers/base_worker.py +++ b/openpype/modules/default_modules/job_queue/job_workers/base_worker.py @@ -44,6 +44,13 @@ class WorkerClient(JsonRpcClient): class WorkerJobsConnection: + """WS connection to Job server. + + Helper class to create a connection to process jobs from job server. + + To be able receive jobs is needed to create a connection and then register + as worker for specific host. + """ retry_time_seconds = 5 def __init__(self, server_url, host_name, loop=None): @@ -73,6 +80,7 @@ class WorkerJobsConnection: return None def finish_job(self, success=True, message=None, data=None): + """Worker finished job and sets the result which is send to server.""" if self.client is None: print(( "Couldn't sent job status to server because" @@ -82,6 +90,7 @@ class WorkerJobsConnection: self.client.finish_job(success, message, data) async def main_loop(self, register_worker=True): + """Main loop of connection which keep connection to server alive.""" self._is_running = True while not self._stopped: @@ -156,6 +165,7 @@ class WorkerJobsConnection: await self._stop_cleanup() def register_as_worker(self): + """Register as worker ready to work on server side.""" asyncio.ensure_future(self._register_as_worker(), loop=self._loop) async def _register_as_worker(self): From 1ac6b90f066389247248770aaf72c51d7120dd28 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Thu, 11 Nov 2021 16:23:09 +0100 Subject: [PATCH 136/211] added docstring to job queue module --- .../default_modules/job_queue/module.py | 37 +++++++++++++++++++ 1 file changed, 37 insertions(+) diff --git a/openpype/modules/default_modules/job_queue/module.py b/openpype/modules/default_modules/job_queue/module.py index 96047e87d0..5eb2d9ca28 100644 --- a/openpype/modules/default_modules/job_queue/module.py +++ b/openpype/modules/default_modules/job_queue/module.py @@ -1,3 +1,40 @@ +"""Job queue OpenPype module was created for remote execution of commands. + +## Why is needed +Primarily created for hosts which are not easilly controlled from command line +or in headless mode and is easier to keep one process of host running listening +for jobs to do. + +### Example +One of examples is TVPaint which does not have headless mode, can run only one +process at one time and it's impossible to know what should be executed inside +TVPaint before we know all data about the file that should be processed. + +## Idea +Idea is that there is a server, workers and workstation/s which need to process +something on a worker. All of them must have access to job queue root which +can be set in settings. Root is used as temp where files needed for job are +stored before sending it or result files are stored when job is done. + +Workers and workstation/s must have access to server through ip adress to it's +running instance. Workers use WebSockets and workstations are using HTTP calls. + +Server's address must be set in settings when is running so workers and +workstations know where to send or receive jobs. + +## Command line commands +### start_server +- start server which is handles jobs +- it is possible to specify port and host address (default is localhost:8079) + +### start_worker +- start worker which will process jobs +- has required possitional argument which is application name from OpenPype + settings e.g. 'tvpaint/11-5' ('tvpaint' is group '11-5' is variant) +- it is possible to specify server url but url from settings is used when not + passed (this is added mainly for developing purposes) +""" + import sys import json import copy From 9a4d2e8f4d9349fb6c243cada53e42acc2cdd859 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Thu, 11 Nov 2021 16:45:52 +0100 Subject: [PATCH 137/211] modified docstring --- openpype/modules/default_modules/job_queue/module.py | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/openpype/modules/default_modules/job_queue/module.py b/openpype/modules/default_modules/job_queue/module.py index 5eb2d9ca28..665f18f7a7 100644 --- a/openpype/modules/default_modules/job_queue/module.py +++ b/openpype/modules/default_modules/job_queue/module.py @@ -12,12 +12,13 @@ TVPaint before we know all data about the file that should be processed. ## Idea Idea is that there is a server, workers and workstation/s which need to process -something on a worker. All of them must have access to job queue root which -can be set in settings. Root is used as temp where files needed for job are -stored before sending it or result files are stored when job is done. +something on a worker. -Workers and workstation/s must have access to server through ip adress to it's +Workers and workstation/s must have access to server through adress to it's running instance. Workers use WebSockets and workstations are using HTTP calls. +Also both of them must have access to job queue root which is set in +settings. Root is used as temp where files needed for job can be stored before +sending the job or where result files are stored when job is done. Server's address must be set in settings when is running so workers and workstations know where to send or receive jobs. From 3d9244a39929cafd235acdac72cd5d3b53bb2b47 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 12 Nov 2021 11:21:41 +0100 Subject: [PATCH 138/211] nuke: reversing baking toggles from write node --- openpype/hosts/nuke/api/lib.py | 35 +++++++--------------------------- 1 file changed, 7 insertions(+), 28 deletions(-) diff --git a/openpype/hosts/nuke/api/lib.py b/openpype/hosts/nuke/api/lib.py index bea2fe47a9..2c91ebc5ae 100644 --- a/openpype/hosts/nuke/api/lib.py +++ b/openpype/hosts/nuke/api/lib.py @@ -37,6 +37,10 @@ opnl.workfiles_launched = False opnl._node_tab_name = "{}".format(os.getenv("AVALON_LABEL") or "Avalon") +def get_nuke_imageio_settings(): + return get_anatomy_settings(opnl.project_name)["imageio"]["nuke"] + + def get_created_node_imageio_setting(**kwarg): ''' Get preset data for dataflow (fileType, compression, bitDepth) ''' @@ -47,8 +51,7 @@ def get_created_node_imageio_setting(**kwarg): assert any([creator, nodeclass]), nuke.message( "`{}`: Missing mandatory kwargs `host`, `cls`".format(__file__)) - imageio = get_anatomy_settings(opnl.project_name)["imageio"] - imageio_nodes = imageio["nuke"]["nodes"]["requiredNodes"] + imageio_nodes = get_nuke_imageio_settings()["nodes"]["requiredNodes"] imageio_node = None for node in imageio_nodes: @@ -66,8 +69,7 @@ def get_imageio_input_colorspace(filename): ''' Get input file colorspace based on regex in settings. ''' imageio_regex_inputs = ( - get_anatomy_settings(opnl.project_name) - ["imageio"]["nuke"]["regexInputs"]["inputs"]) + get_nuke_imageio_settings()["regexInputs"]["inputs"]) preset_clrsp = None for regexInput in imageio_regex_inputs: @@ -478,7 +480,6 @@ def create_write_node(name, data, input=None, prenodes=None, if review: add_review_knob(GN) - add_bake_colorspace_knob(GN) # add divider GN.addKnob(nuke.Text_Knob('', 'Rendering')) @@ -567,27 +568,6 @@ def add_review_knob(node): if "review" not in node.knobs(): knob = nuke.Boolean_Knob("review", "Review") knob.setValue(True) - knob.setFlag(nuke.STARTLINE) - node.addKnob(knob) - return node - - -def add_bake_colorspace_knob(node): - ''' Adds additional bake colorspace knob to given node - - Arguments: - node (obj): nuke node object to be fixed - - Return: - node (obj): with added knob - ''' - if "bake_colorspace" not in node.knobs(): - knob = nuke.Boolean_Knob("bake_colorspace", "Bake colorspace") - knob.setValue(True) - node.addKnob(knob) - if "bake_viewer_input" not in node.knobs(): - knob = nuke.Boolean_Knob("bake_viewer_input", "Bake viewer input") - knob.setValue(True) node.addKnob(knob) return node @@ -923,8 +903,7 @@ class WorkfileSettings(object): ''' Setting colorpace following presets ''' # get imageio - imageio = get_anatomy_settings(opnl.project_name)["imageio"] - nuke_colorspace = imageio["nuke"] + nuke_colorspace = get_nuke_imageio_settings() try: self.set_root_colorspace(nuke_colorspace["workfile"]) From 9f86e8d3e322bb3fc3f531271f01301feca89b0e Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 12 Nov 2021 11:23:20 +0100 Subject: [PATCH 139/211] Nuke: adding settings to plugin orchestrating baking workflow also removing rest of the on write node toggles --- openpype/hosts/nuke/api/plugin.py | 86 +++++++++--------- .../publish/extract_review_data_mov.py | 87 ++++++++++++++----- .../plugins/publish/precollect_instances.py | 19 +--- .../defaults/project_settings/nuke.json | 4 +- .../schemas/schema_nuke_publish.json | 15 +++- 5 files changed, 129 insertions(+), 82 deletions(-) diff --git a/openpype/hosts/nuke/api/plugin.py b/openpype/hosts/nuke/api/plugin.py index b753bc0965..7b8af96df0 100644 --- a/openpype/hosts/nuke/api/plugin.py +++ b/openpype/hosts/nuke/api/plugin.py @@ -117,8 +117,6 @@ class ExporterReview(object): self.log = klass.log self.instance = instance - self.bake_colorspace = instance.data["bakeColorspace"] - self.bake_viewer_input = instance.data["bakeViewerInput"] self.path_in = self.instance.data.get("path", None) self.staging_dir = self.instance.data["stagingDir"] self.collection = self.instance.data.get("collection", None) @@ -146,11 +144,10 @@ class ExporterReview(object): self.fhead = self.fhead.replace("#", "")[:-1] def get_representation_data(self, tags=None, range=False): - add_tags = [] - if tags: - add_tags = tags + add_tags = tags or [] repre = { + 'outputName': self.name, 'name': self.name, 'ext': self.ext, 'files': self.file, @@ -166,7 +163,7 @@ class ExporterReview(object): self.data["representations"].append(repre) - def get_view_process_node(self): + def get_view_input_process_node(self): """ Will get any active view process. @@ -197,6 +194,11 @@ class ExporterReview(object): return ipn + def get_imageio_baking_profile(self): + from . import lib as opnlib + nuke_imageio = opnlib.get_nuke_imageio_settings() + return nuke_imageio["baking"]["viewerProcess"] + class ExporterReviewLut(ExporterReview): """ @@ -249,6 +251,10 @@ class ExporterReviewLut(ExporterReview): self.log.info("Deleted nodes...") def generate_lut(self): + bake_viewer_process = kwargs["bake_viewer_process"] + bake_viewer_input_process_node = kwargs[ + "bake_viewer_input_process"] + # ---------- start nodes creation # CMSTestPattern @@ -259,10 +265,10 @@ class ExporterReviewLut(ExporterReview): self.previous_node = cms_node self.log.debug("CMSTestPattern... `{}`".format(self._temp_nodes)) - if self.bake_colorspace: + if bake_viewer_process: # Node View Process - if self.bake_viewer_input: - ipn = self.get_view_process_node() + if bake_viewer_input_process_node: + ipn = self.get_view_input_process_node() if ipn is not None: # connect ipn.setInput(0, self.previous_node) @@ -336,8 +342,6 @@ class ExporterReviewMov(ExporterReview): # deal with now lut defined in viewer lut self.viewer_lut_raw = klass.viewer_lut_raw - self.bake_colorspace_fallback = klass.bake_colorspace_fallback - self.bake_colorspace_main = klass.bake_colorspace_main self.write_colorspace = instance.data["colorspace"] self.name = name or "baked" @@ -380,7 +384,26 @@ class ExporterReviewMov(ExporterReview): self.log.info("Nodes exported...") return path - def generate_mov(self, farm=False): + def generate_mov(self, farm=False, **kwargs): + bake_viewer_process = kwargs["bake_viewer_process"] + bake_viewer_input_process_node = kwargs[ + "bake_viewer_input_process"] + viewer_process_override = kwargs[ + "viewer_process_override"] + + baking_view_profile = ( + viewer_process_override or self.get_imageio_baking_profile()) + + fps = self.instance.context.data["fps"] + + self.log.debug(">> baking_view_profile `{}`".format( + baking_view_profile)) + + add_tags = kwargs.get("add_tags", []) + + self.log.info( + "__ add_tags: `{0}`".format(add_tags)) + subset = self.instance.data["subset"] self._temp_nodes[subset] = [] # ---------- start nodes creation @@ -400,10 +423,10 @@ class ExporterReviewMov(ExporterReview): self.log.debug("Read... `{}`".format(self._temp_nodes[subset])) # only create colorspace baking if toggled on - if self.bake_colorspace: - if self.bake_viewer_input: + if bake_viewer_process: + if bake_viewer_input_process_node: # View Process node - ipn = self.get_view_process_node() + ipn = self.get_view_input_process_node() if ipn is not None: # connect ipn.setInput(0, self.previous_node) @@ -414,26 +437,9 @@ class ExporterReviewMov(ExporterReview): self._temp_nodes[subset])) if not self.viewer_lut_raw: - colorspaces = [ - self.bake_colorspace_main, self.bake_colorspace_fallback - ] - - if any(colorspaces): - # OCIOColorSpace with controled output - dag_node = nuke.createNode("OCIOColorSpace") - self._temp_nodes[subset].append(dag_node) - for c in colorspaces: - test = dag_node["out_colorspace"].setValue(str(c)) - if test: - self.log.info( - "Baking in colorspace... `{}`".format(c)) - break - - if not test: - dag_node = nuke.createNode("OCIODisplay") - else: - # OCIODisplay - dag_node = nuke.createNode("OCIODisplay") + # OCIODisplay + dag_node = nuke.createNode("OCIODisplay") + dag_node["view"].setValue(str(baking_view_profile)) # connect dag_node.setInput(0, self.previous_node) @@ -445,11 +451,11 @@ class ExporterReviewMov(ExporterReview): # Write node write_node = nuke.createNode("Write") self.log.debug("Path: {}".format(self.path)) - write_node["file"].setValue(self.path) - write_node["file_type"].setValue(self.ext) + write_node["file"].setValue(str(self.path)) + write_node["file_type"].setValue(str(self.ext)) # Knobs `meta_codec` and `mov64_codec` are not available on centos. - # TODO change this to use conditions, if possible. + # TODO should't this come from settings on outputs? try: write_node["meta_codec"].setValue("ap4h") except Exception: @@ -457,8 +463,10 @@ class ExporterReviewMov(ExporterReview): try: write_node["mov64_codec"].setValue("ap4h") + write_node["mov64_fps"].setValue(float(fps)) except Exception: self.log.info("`mov64_codec` knob was not found") + write_node["mov64_write_timecode"].setValue(1) write_node["raw"].setValue(1) # connect @@ -480,7 +488,7 @@ class ExporterReviewMov(ExporterReview): self.render(write_node.name()) # ---------- generate representation data self.get_representation_data( - tags=["review", "delete"], + tags=["review", "delete"] + add_tags, range=True ) diff --git a/openpype/hosts/nuke/plugins/publish/extract_review_data_mov.py b/openpype/hosts/nuke/plugins/publish/extract_review_data_mov.py index 83275f9716..f092700e3b 100644 --- a/openpype/hosts/nuke/plugins/publish/extract_review_data_mov.py +++ b/openpype/hosts/nuke/plugins/publish/extract_review_data_mov.py @@ -27,11 +27,11 @@ class ExtractReviewDataMov(openpype.api.Extractor): # presets viewer_lut_raw = None - bake_colorspace_fallback = None - bake_colorspace_main = None + outputs = {} def process(self, instance): families = instance.data["families"] + task_type = instance.context.data["taskType"] self.log.info("Creating staging dir...") if "representations" not in instance.data: @@ -45,28 +45,71 @@ class ExtractReviewDataMov(openpype.api.Extractor): self.log.info( "StagingDir `{0}`...".format(instance.data["stagingDir"])) + self.log.info(self.outputs) + # generate data with anlib.maintained_selection(): - exporter = plugin.ExporterReviewMov( - self, instance) + for o_name, o_data in self.outputs.items(): + f_families = o_data["filter"]["families"] + f_task_types = o_data["filter"]["task_types"] - if "render.farm" in families: - instance.data["families"].remove("review") - data = exporter.generate_mov(farm=True) + test_families = any([ + bool(set(families).intersection(f_families)), + bool(not f_families) + ]) + + test_task_types = any([ + bool(task_type in f_task_types), + bool(not f_task_types) + ]) + + test_all = all([ + test_families, + test_task_types + ]) + + if not test_all: + continue + + self.log.info( + "Baking output `{}` with settings: {}".format( + o_name, o_data)) + + # add additional families + add_families = o_data["add_families"] + for adf in add_families: + if adf in instance.data["families"]: + continue + instance.data["families"].append(adf) + + # create exporter instance + exporter = plugin.ExporterReviewMov( + self, instance, o_name, o_data["extension"]) + + if "render.farm" in families: + if "review" in instance.data["families"]: + instance.data["families"].remove("review") + + data = exporter.generate_mov(farm=True, **o_data) + + self.log.debug( + "_ data: {}".format(data)) + + if not instance.data.get("bakingNukeScripts"): + instance.data["bakingNukeScripts"] = [] + + instance.data["bakingNukeScripts"].append({ + "bakeRenderPath": data.get("bakeRenderPath"), + "bakeScriptPath": data.get("bakeScriptPath"), + "bakeWriteNodeName": data.get("bakeWriteNodeName") + }) + else: + data = exporter.generate_mov(**o_data) + + self.log.info(data["representations"]) + + # assign to representations + instance.data["representations"] += data["representations"] self.log.debug( - "_ data: {}".format(data)) - - instance.data.update({ - "bakeRenderPath": data.get("bakeRenderPath"), - "bakeScriptPath": data.get("bakeScriptPath"), - "bakeWriteNodeName": data.get("bakeWriteNodeName") - }) - else: - data = exporter.generate_mov() - - # assign to representations - instance.data["representations"] += data["representations"] - - self.log.debug( - "_ representations: {}".format(instance.data["representations"])) + "_ representations: {}".format(instance.data["representations"])) diff --git a/openpype/hosts/nuke/plugins/publish/precollect_instances.py b/openpype/hosts/nuke/plugins/publish/precollect_instances.py index c34e314a79..5c30df9a62 100644 --- a/openpype/hosts/nuke/plugins/publish/precollect_instances.py +++ b/openpype/hosts/nuke/plugins/publish/precollect_instances.py @@ -74,21 +74,6 @@ class PreCollectNukeInstances(pyblish.api.ContextPlugin): if review: families.append("review") - # deside if to bake or not to bake - baking = True - if "bake_colorspace" in node.knobs(): - baking = node["bake_colorspace"].value() - - if baking: - families.append("bake_viewer") - - viewer_input = True - if "bake_viewer_input" in node.knobs(): - viewer_input = node["bake_viewer_input"].value() - - if viewer_input: - families.append("bake_viewer_input") - # Add all nodes in group instances. if node.Class() == "Group": # only alter families for render family @@ -157,9 +142,7 @@ class PreCollectNukeInstances(pyblish.api.ContextPlugin): "resolutionWidth": resolution_width, "resolutionHeight": resolution_height, "pixelAspect": pixel_aspect, - "review": review, - "bakeColorspace": baking, - "bakeViewerInput": viewer_input + "review": review }) self.log.info("collected instance: {}".format(instance.data)) diff --git a/openpype/settings/defaults/project_settings/nuke.json b/openpype/settings/defaults/project_settings/nuke.json index 67446ca6b9..ed0db6a000 100644 --- a/openpype/settings/defaults/project_settings/nuke.json +++ b/openpype/settings/defaults/project_settings/nuke.json @@ -110,10 +110,12 @@ "task_types": [], "families": [] }, + "extension": "mov", "viewer_process_override": "", "bake_viewer_process": true, "bake_viewer_input_process": true, - "add_taggs": [] + "add_tags": [], + "add_families": [] } } }, diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_nuke_publish.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_nuke_publish.json index 1bdd15c650..b1f640e951 100644 --- a/openpype/settings/entities/schemas/projects_schema/schemas/schema_nuke_publish.json +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_nuke_publish.json @@ -198,6 +198,11 @@ { "type": "separator" }, + { + "type": "text", + "key": "extension", + "label": "File extension" + }, { "type": "text", "key": "viewer_process_override", @@ -214,8 +219,14 @@ "label": "Bake Viewer Input Process (LUTs)" }, { - "key": "add_taggs", - "label": "Add tags to representations", + "key": "add_families", + "label": "Add additional families to instance", + "type": "list", + "object_type": "text" + }, + { + "key": "add_tags", + "label": "Add additional tags to representations", "type": "list", "object_type": "text" } From 162d3864a11a541f5516e2a335bd64cb2f34c05d Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 12 Nov 2021 11:23:56 +0100 Subject: [PATCH 140/211] nuke: adding parent to workfile tool (it is not working yet) --- openpype/hosts/nuke/api/menu.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/openpype/hosts/nuke/api/menu.py b/openpype/hosts/nuke/api/menu.py index 3e74893589..d92a99a965 100644 --- a/openpype/hosts/nuke/api/menu.py +++ b/openpype/hosts/nuke/api/menu.py @@ -1,6 +1,7 @@ import os import nuke from avalon.api import Session +from avalon.nuke.pipeline import get_main_window from .lib import WorkfileSettings from openpype.api import Logger, BuildWorkfile, get_current_project_settings @@ -25,7 +26,7 @@ def install(): menu.removeItem(rm_item[1].name()) menu.addCommand( name, - host_tools.show_workfiles, + lambda: host_tools.show_workfiles(parent=get_main_window()), index=2 ) menu.addSeparator(index=3) From 49890910504906f30d885806f0e2d157a3495bcf Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 12 Nov 2021 11:24:44 +0100 Subject: [PATCH 141/211] Global: adding burnin profiles linking to extract review profile --- openpype/settings/defaults/project_settings/global.json | 1 + 1 file changed, 1 insertion(+) diff --git a/openpype/settings/defaults/project_settings/global.json b/openpype/settings/defaults/project_settings/global.json index 45c1a59d17..0eacfdd768 100644 --- a/openpype/settings/defaults/project_settings/global.json +++ b/openpype/settings/defaults/project_settings/global.json @@ -49,6 +49,7 @@ "burnin", "ftrackreview" ], + "burnins": [], "ffmpeg_args": { "video_filters": [], "audio_filters": [], From 28e28db6c76aec511ae3c0da398666e3681386d1 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 12 Nov 2021 11:25:26 +0100 Subject: [PATCH 142/211] Global: implementing burnin profiles linking to extract review --- openpype/plugins/publish/extract_burnin.py | 47 +++++++++++++++---- openpype/plugins/publish/extract_review.py | 19 +++++++- .../schemas/schema_global_publish.json | 6 +++ 3 files changed, 63 insertions(+), 9 deletions(-) diff --git a/openpype/plugins/publish/extract_burnin.py b/openpype/plugins/publish/extract_burnin.py index 06eb85c593..8015e5b54b 100644 --- a/openpype/plugins/publish/extract_burnin.py +++ b/openpype/plugins/publish/extract_burnin.py @@ -110,6 +110,9 @@ class ExtractBurnin(openpype.api.Extractor): ).format(host_name, family, task_name)) return + self.log.debug("profile: {}".format( + profile)) + # Pre-filter burnin definitions by instance families burnin_defs = self.filter_burnins_defs(profile, instance) if not burnin_defs: @@ -126,18 +129,44 @@ class ExtractBurnin(openpype.api.Extractor): anatomy = instance.context.data["anatomy"] scriptpath = self.burnin_script_path() + # Executable args that will execute the script # [pype executable, *pype script, "run"] executable_args = get_pype_execute_args("run", scriptpath) + from pprint import pformat + self.log.debug(pformat(instance.data["representations"])) + for idx, repre in enumerate(tuple(instance.data["representations"])): self.log.debug("repre ({}): `{}`".format(idx + 1, repre["name"])) + + repre_burnin_links = repre.get("burnins", []) + if not self.repres_is_valid(repre): continue + self.log.debug("repre_burnin_links: {}".format( + repre_burnin_links)) + + self.log.debug("burnin_defs.keys(): {}".format( + burnin_defs.keys())) + + # Filter output definition by `burnin` represetation key + repre_linked_burnins = { + name: output for name, output in burnin_defs.items() + if name in repre_burnin_links + } + self.log.debug("repre_linked_burnins: {}".format( + repre_linked_burnins)) + + # if any match then replace burnin defs and follow tag filtering + _burnin_defs = copy.deepcopy(burnin_defs) + if repre_linked_burnins: + _burnin_defs = repre_linked_burnins + # Filter output definition by representation tags (optional) repre_burnin_defs = self.filter_burnins_by_tags( - burnin_defs, repre["tags"] + _burnin_defs, repre["tags"] ) if not repre_burnin_defs: self.log.info(( @@ -281,14 +310,16 @@ class ExtractBurnin(openpype.api.Extractor): # NOTE we maybe can keep source representation if necessary instance.data["representations"].remove(repre) - # Delete input files - for filepath in files_to_delete: - if os.path.exists(filepath): - os.remove(filepath) - self.log.debug("Removed: \"{}\"".format(filepath)) + self.log.debug("Files to delete: {}".format(files_to_delete)) - if do_decompress and os.path.exists(decompressed_dir): - shutil.rmtree(decompressed_dir) + # Delete input files + for filepath in files_to_delete: + if os.path.exists(filepath): + os.remove(filepath) + self.log.debug("Removed: \"{}\"".format(filepath)) + + if do_decompress and os.path.exists(decompressed_dir): + shutil.rmtree(decompressed_dir) def _get_burnin_options(self): # Prepare burnin options diff --git a/openpype/plugins/publish/extract_review.py b/openpype/plugins/publish/extract_review.py index 7284483f5f..81225cec62 100644 --- a/openpype/plugins/publish/extract_review.py +++ b/openpype/plugins/publish/extract_review.py @@ -180,6 +180,9 @@ class ExtractReview(pyblish.api.InstancePlugin): if "tags" not in output_def: output_def["tags"] = [] + if "burnins" not in output_def: + output_def["burnins"] = [] + # Create copy of representation new_repre = copy.deepcopy(repre) @@ -192,6 +195,17 @@ class ExtractReview(pyblish.api.InstancePlugin): if tag not in new_repre["tags"]: new_repre["tags"].append(tag) + # Add burnin link from output definition to representation + for burnin in output_def["burnins"]: + if burnin not in new_repre.get("burnins", []): + if not new_repre.get("burnins"): + new_repre["burnins"] = [] + new_repre["burnins"].append(str(burnin)) + + self.log.debug( + "Linked burnins: `{}`".format(new_repre["burnins"]) + ) + self.log.debug( "New representation tags: `{}`".format(new_repre["tags"]) ) @@ -232,7 +246,10 @@ class ExtractReview(pyblish.api.InstancePlugin): for f in files_to_clean: os.unlink(f) - output_name = output_def["filename_suffix"] + output_name = new_repre.get("outputName", "") + if output_name: + output_name += "_" + output_name += output_def["filename_suffix"] if temp_data["without_handles"]: output_name += "_noHandles" diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_global_publish.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_global_publish.json index c50f383f02..db83ba1192 100644 --- a/openpype/settings/entities/schemas/projects_schema/schemas/schema_global_publish.json +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_global_publish.json @@ -198,6 +198,12 @@ "type": "schema", "name": "schema_representation_tags" }, + { + "key": "burnins", + "label": "Link to a burnin by name", + "type": "list", + "object_type": "text" + }, { "key": "ffmpeg_args", "label": "FFmpeg arguments", From 4eb71bc5764fa6fb6cf0ec8a4d56b2f08a5ac63d Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 12 Nov 2021 11:25:54 +0100 Subject: [PATCH 143/211] Nuke/Global: new baking workflow to Deadline plugins --- .../plugins/publish/submit_nuke_deadline.py | 35 ++++++++++--------- .../plugins/publish/submit_publish_job.py | 12 +++---- 2 files changed, 25 insertions(+), 22 deletions(-) diff --git a/openpype/modules/default_modules/deadline/plugins/publish/submit_nuke_deadline.py b/openpype/modules/default_modules/deadline/plugins/publish/submit_nuke_deadline.py index 4cba35963c..a064a0aa86 100644 --- a/openpype/modules/default_modules/deadline/plugins/publish/submit_nuke_deadline.py +++ b/openpype/modules/default_modules/deadline/plugins/publish/submit_nuke_deadline.py @@ -94,24 +94,27 @@ class NukeSubmitDeadline(pyblish.api.InstancePlugin): render_path).replace("\\", "/") instance.data["publishJobState"] = "Suspended" - if instance.data.get("bakeScriptPath"): - render_path = instance.data.get("bakeRenderPath") - script_path = instance.data.get("bakeScriptPath") - exe_node_name = instance.data.get("bakeWriteNodeName") + if instance.data.get("bakingNukeScripts"): + for baking_script in instance.data["bakingNukeScripts"]: + render_path = baking_script["bakeRenderPath"] + script_path = baking_script["bakeScriptPath"] + exe_node_name = baking_script["bakeWriteNodeName"] - # exception for slate workflow - if "slate" in instance.data["families"]: - self._frame_start += 1 + # exception for slate workflow + if "slate" in instance.data["families"]: + self._frame_start += 1 - resp = self.payload_submit(instance, - script_path, - render_path, - exe_node_name, - response.json() - ) - # Store output dir for unified publisher (filesequence) - instance.data["deadlineSubmissionJob"] = resp.json() - instance.data["publishJobState"] = "Suspended" + resp = self.payload_submit( + instance, + script_path, + render_path, + exe_node_name, + response.json() + ) + + # Store output dir for unified publisher (filesequence) + instance.data["deadlineSubmissionJob"] = resp.json() + instance.data["publishJobState"] = "Suspended" # redefinition of families if "render.farm" in families: diff --git a/openpype/modules/default_modules/deadline/plugins/publish/submit_publish_job.py b/openpype/modules/default_modules/deadline/plugins/publish/submit_publish_job.py index 6b07749819..7c7e1f4025 100644 --- a/openpype/modules/default_modules/deadline/plugins/publish/submit_publish_job.py +++ b/openpype/modules/default_modules/deadline/plugins/publish/submit_publish_job.py @@ -142,8 +142,7 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin): instance_transfer = { "slate": ["slateFrame"], "review": ["lutPath"], - "render2d": ["bakeScriptPath", "bakeRenderPath", - "bakeWriteNodeName", "version"], + "render2d": ["bakingNukeScripts", "version"], "renderlayer": ["convertToScanline"] } @@ -505,9 +504,9 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin): """ representations = [] collections, remainders = clique.assemble(exp_files) - bake_render_path = instance.get("bakeRenderPath", []) + bake_renders = instance.get("bakingNukeScripts", []) - # create representation for every collected sequence + # create representation for every collected sequento ce for collection in collections: ext = collection.tail.lstrip(".") preview = False @@ -523,7 +522,7 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin): preview = True break - if bake_render_path: + if bake_renders: preview = False staging = os.path.dirname(list(collection)[0]) @@ -595,7 +594,8 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin): }) self._solve_families(instance, True) - if remainder in bake_render_path: + if (bake_renders + and remainder in bake_renders[0]["bakeRenderPath"]): rep.update({ "fps": instance.get("fps"), "tags": ["review", "delete"] From 48debb498333e764362879ee0447a1be69854192 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Mon, 15 Nov 2021 10:17:56 +0100 Subject: [PATCH 144/211] expect that regex can produce multiple layers for single pass --- .../publish/collect_tvpaint_instances.py | 70 ++++++++++++------- 1 file changed, 43 insertions(+), 27 deletions(-) diff --git a/openpype/hosts/webpublisher/plugins/publish/collect_tvpaint_instances.py b/openpype/hosts/webpublisher/plugins/publish/collect_tvpaint_instances.py index dd0d421447..3115f39793 100644 --- a/openpype/hosts/webpublisher/plugins/publish/collect_tvpaint_instances.py +++ b/openpype/hosts/webpublisher/plugins/publish/collect_tvpaint_instances.py @@ -30,7 +30,7 @@ class CollectTVPaintInstances(pyblish.api.ContextPlugin): # Set by settings # Regex must constain 'layer' and 'variant' groups which are extracted from # name when instances are created - layer_name_regex = r"(?PL[0-9]{3}_\w+)_(?P.+)" + layer_name_regex = r"(?PL[0-9]{3}_\w+)_(?P.+)" def process(self, context): # Prepare compiled regex @@ -74,7 +74,9 @@ class CollectTVPaintInstances(pyblish.api.ContextPlugin): ) new_instances.append(review_instance) - layers_by_render_layer = {} + # Get render layers and passes from TVPaint layers + # - it's based on regex extraction + layers_by_layer_and_pass = {} for layer in layers_data: # Filter only visible layers if not layer["visible"]: @@ -86,32 +88,46 @@ class CollectTVPaintInstances(pyblish.api.ContextPlugin): if result is None: continue render_layer = result.group("layer") - variant = result.group("variant") + render_pass = result.group("pass") - if render_layer not in layers_by_render_layer: - layers_by_render_layer[render_layer] = [] - layers_by_render_layer[render_layer].append(copy.deepcopy(layer)) - dynamic_data = { - "render_pass": variant, - "render_layer": render_layer, - # Override family for subset name - "family": "render" - } - - subset_name = get_subset_name_with_asset_doc( - self.render_pass_family, - variant, - task_name, - asset_doc, - project_name, - host_name, - dynamic_data=dynamic_data + render_pass_maping = layers_by_layer_and_pass.get( + render_layer ) + if render_pass_maping is None: + render_pass_maping = {} + layers_by_layer_and_pass[render_layer] = render_pass_maping - instance = self._create_render_pass_instance( - context, layer, subset_name - ) - new_instances.append(instance) + if render_pass not in render_pass_maping: + render_pass_maping[render_pass] = [] + render_pass_maping[render_pass].append(copy.deepcopy(layer)) + + layers_by_render_layer = {} + for render_layer, render_passes in layers_by_layer_and_pass.items(): + render_layer_layers = [] + layers_by_render_layer[render_layer] = render_layer_layers + for render_pass, layers in render_passes.items(): + render_layer_layers.extend(copy.deepcopy(layers)) + dynamic_data = { + "render_pass": render_pass, + "render_layer": render_layer, + # Override family for subset name + "family": "render" + } + + subset_name = get_subset_name_with_asset_doc( + self.render_pass_family, + render_pass, + task_name, + asset_doc, + project_name, + host_name, + dynamic_data=dynamic_data + ) + + instance = self._create_render_pass_instance( + context, layers, subset_name + ) + new_instances.append(instance) for render_layer, layers in layers_by_render_layer.items(): variant = render_layer @@ -199,7 +215,7 @@ class CollectTVPaintInstances(pyblish.api.ContextPlugin): "stagingDir": staging_dir }) - def _create_render_pass_instance(self, context, layer, subset_name): + def _create_render_pass_instance(self, context, layers, subset_name): staging_dir = self._create_staging_dir(context, subset_name) # Global instance data modifications # Fill families @@ -211,7 +227,7 @@ class CollectTVPaintInstances(pyblish.api.ContextPlugin): # Add `review` family for thumbnail integration "families": [self.render_pass_family, "review"], "representations": [], - "layers": [layer], + "layers": layers, "stagingDir": staging_dir }) From af43b09c074801f2602833e02a96d41dd9306aa3 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Mon, 15 Nov 2021 10:18:18 +0100 Subject: [PATCH 145/211] renamed 'variant' regex group to 'pass' --- openpype/settings/defaults/project_settings/webpublisher.json | 2 +- .../schemas/projects_schema/schema_project_webpublisher.json | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/settings/defaults/project_settings/webpublisher.json b/openpype/settings/defaults/project_settings/webpublisher.json index d0fd7ffaf3..9db98acd5a 100644 --- a/openpype/settings/defaults/project_settings/webpublisher.json +++ b/openpype/settings/defaults/project_settings/webpublisher.json @@ -117,7 +117,7 @@ } }, "CollectTVPaintInstances": { - "layer_name_regex": "(?PL[0-9]{3}_\\w+)_(?P.+)" + "layer_name_regex": "(?PL[0-9]{3}_\\w+)_(?P.+)" } } } \ No newline at end of file diff --git a/openpype/settings/entities/schemas/projects_schema/schema_project_webpublisher.json b/openpype/settings/entities/schemas/projects_schema/schema_project_webpublisher.json index 62d4780513..78f38f111d 100644 --- a/openpype/settings/entities/schemas/projects_schema/schema_project_webpublisher.json +++ b/openpype/settings/entities/schemas/projects_schema/schema_project_webpublisher.json @@ -71,7 +71,7 @@ "children": [ { "type": "label", - "label": "Regex helps to extract render layer and pass names from TVPaint layer name.
The regex must contain named groups 'layer' and 'variant' which are used for creation of RenderPass instances.

Example layer name: \"L001_Person_Hand\"
Example regex: \"(?P<layer>L[0-9]{3}_\\w+)_(?P<variant>.+)\"
Extracted layer: \"L001_Person\"
Extracted variant: \"Hand\"" + "label": "Regex helps to extract render layer and pass names from TVPaint layer name.
The regex must contain named groups 'layer' and 'pass' which are used for creation of RenderPass instances.

Example layer name: \"L001_Person_Hand\"
Example regex: \"(?P<layer>L[0-9]{3}_\\w+)_(?P<pass>.+)\"
Extracted layer: \"L001_Person\"
Extracted pass: \"Hand\"" }, { "type": "text", From bf308ebecd36113f0ccc4c59ff1889c58aaa6a2a Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Mon, 15 Nov 2021 13:12:12 +0100 Subject: [PATCH 146/211] improving the output name with ext and upstream outputName value --- openpype/plugins/publish/extract_review.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/openpype/plugins/publish/extract_review.py b/openpype/plugins/publish/extract_review.py index da1d493067..e500bb361d 100644 --- a/openpype/plugins/publish/extract_review.py +++ b/openpype/plugins/publish/extract_review.py @@ -247,6 +247,7 @@ class ExtractReview(pyblish.api.InstancePlugin): os.unlink(f) output_name = new_repre.get("outputName", "") + output_ext = new_repre["ext"] if output_name: output_name += "_" output_name += output_def["filename_suffix"] @@ -254,7 +255,7 @@ class ExtractReview(pyblish.api.InstancePlugin): output_name += "_noHandles" new_repre.update({ - "name": output_def["filename_suffix"], + "name": "{}_{}".format(output_name, output_ext), "outputName": output_name, "outputDef": output_def, "frameStartFtrack": temp_data["output_frame_start"], From 2f9ce16f174f286b7c8eaee9e9c5deb3b9fedf7f Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Mon, 15 Nov 2021 16:17:42 +0100 Subject: [PATCH 147/211] otio burnin script supporting mxf source timecode --- openpype/scripts/otio_burnin.py | 25 ++++++++++++++++--------- 1 file changed, 16 insertions(+), 9 deletions(-) diff --git a/openpype/scripts/otio_burnin.py b/openpype/scripts/otio_burnin.py index 206abfc0b4..341604763f 100644 --- a/openpype/scripts/otio_burnin.py +++ b/openpype/scripts/otio_burnin.py @@ -37,7 +37,7 @@ TIMECODE_KEY = "{timecode}" SOURCE_TIMECODE_KEY = "{source_timecode}" -def _streams(source): +def _ffprobe_data(source): """Reimplemented from otio burnins to be able use full path to ffprobe :param str source: source media file :rtype: [{}, ...] @@ -47,7 +47,7 @@ def _streams(source): out = proc.communicate()[0] if proc.returncode != 0: raise RuntimeError("Failed to run: %s" % command) - return json.loads(out)['streams'] + return json.loads(out) def get_fps(str_value): @@ -244,15 +244,16 @@ class ModifiedBurnins(ffmpeg_burnins.Burnins): } def __init__( - self, source, streams=None, options_init=None, first_frame=None + self, source, ffprobe_data=None, options_init=None, first_frame=None ): - if not streams: - streams = _streams(source) + if not ffprobe_data: + ffprobe_data = _ffprobe_data(source) + self.ffprobe_data = ffprobe_data self.first_frame = first_frame self.input_args = [] - super().__init__(source, streams) + super().__init__(source, ffprobe_data["streams"]) if options_init: self.options_init.update(options_init) @@ -564,11 +565,11 @@ def burnins_from_data( "shot": "sh0010" } """ - streams = None + ffprobe_data = None if full_input_path: - streams = _streams(full_input_path) + ffprobe_data = _ffprobe_data(full_input_path) - burnin = ModifiedBurnins(input_path, streams, options, first_frame) + burnin = ModifiedBurnins(input_path, ffprobe_data, options, first_frame) frame_start = data.get("frame_start") frame_end = data.get("frame_end") @@ -595,6 +596,12 @@ def burnins_from_data( if source_timecode is None: source_timecode = stream.get("tags", {}).get("timecode") + if source_timecode is None: + input_format = burnin.ffprobe_data.get("format") or {} + source_timecode = input_format.get("timecode") + if source_timecode is None: + source_timecode = input_format.get("tags", {}).get("timecode") + if source_timecode is not None: data[SOURCE_TIMECODE_KEY[1:-1]] = SOURCE_TIMECODE_KEY From c3274cc555f1845738b99b03319254d2bafd1afb Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Tue, 16 Nov 2021 10:49:03 +0100 Subject: [PATCH 148/211] hound suggestions --- openpype/hosts/nuke/plugins/publish/extract_review_data_mov.py | 3 ++- openpype/plugins/publish/extract_burnin.py | 3 --- 2 files changed, 2 insertions(+), 4 deletions(-) diff --git a/openpype/hosts/nuke/plugins/publish/extract_review_data_mov.py b/openpype/hosts/nuke/plugins/publish/extract_review_data_mov.py index f092700e3b..cd918afe39 100644 --- a/openpype/hosts/nuke/plugins/publish/extract_review_data_mov.py +++ b/openpype/hosts/nuke/plugins/publish/extract_review_data_mov.py @@ -112,4 +112,5 @@ class ExtractReviewDataMov(openpype.api.Extractor): instance.data["representations"] += data["representations"] self.log.debug( - "_ representations: {}".format(instance.data["representations"])) + "_ representations: {}".format( + instance.data["representations"])) diff --git a/openpype/plugins/publish/extract_burnin.py b/openpype/plugins/publish/extract_burnin.py index 8015e5b54b..b1ba4ddeac 100644 --- a/openpype/plugins/publish/extract_burnin.py +++ b/openpype/plugins/publish/extract_burnin.py @@ -134,9 +134,6 @@ class ExtractBurnin(openpype.api.Extractor): # [pype executable, *pype script, "run"] executable_args = get_pype_execute_args("run", scriptpath) - from pprint import pformat - self.log.debug(pformat(instance.data["representations"])) - for idx, repre in enumerate(tuple(instance.data["representations"])): self.log.debug("repre ({}): `{}`".format(idx + 1, repre["name"])) From 8839abd0f97c6a680d532dd80c4d21ad7127b8f6 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Tue, 16 Nov 2021 10:49:38 +0100 Subject: [PATCH 149/211] Nuke: settings removing additional family --- openpype/settings/defaults/project_settings/nuke.json | 3 +-- .../projects_schema/schemas/schema_nuke_publish.json | 6 ------ 2 files changed, 1 insertion(+), 8 deletions(-) diff --git a/openpype/settings/defaults/project_settings/nuke.json b/openpype/settings/defaults/project_settings/nuke.json index 9b7a0c3090..c3e229b8e8 100644 --- a/openpype/settings/defaults/project_settings/nuke.json +++ b/openpype/settings/defaults/project_settings/nuke.json @@ -121,8 +121,7 @@ "viewer_process_override": "", "bake_viewer_process": true, "bake_viewer_input_process": true, - "add_tags": [], - "add_families": [] + "add_tags": [] } } }, diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_nuke_publish.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_nuke_publish.json index b1f640e951..d6fc30c315 100644 --- a/openpype/settings/entities/schemas/projects_schema/schemas/schema_nuke_publish.json +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_nuke_publish.json @@ -218,12 +218,6 @@ "key": "bake_viewer_input_process", "label": "Bake Viewer Input Process (LUTs)" }, - { - "key": "add_families", - "label": "Add additional families to instance", - "type": "list", - "object_type": "text" - }, { "key": "add_tags", "label": "Add additional tags to representations", From 172a143ecbdf39b18c36148f086d6b641d59cf9b Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Tue, 16 Nov 2021 11:56:20 +0100 Subject: [PATCH 150/211] plugin_tools: adding exception because 'mxf` is not supported by oiio --- openpype/lib/plugin_tools.py | 20 ++++++++++++++------ 1 file changed, 14 insertions(+), 6 deletions(-) diff --git a/openpype/lib/plugin_tools.py b/openpype/lib/plugin_tools.py index aa9e0c9b57..12c77abead 100644 --- a/openpype/lib/plugin_tools.py +++ b/openpype/lib/plugin_tools.py @@ -531,12 +531,20 @@ def should_decompress(file_url): and we can decompress (oiiotool supported) """ if oiio_supported(): - output = run_subprocess([ - get_oiio_tools_path(), - "--info", "-v", file_url]) - return "compression: \"dwaa\"" in output or \ - "compression: \"dwab\"" in output - + try: + output = run_subprocess([ + get_oiio_tools_path(), + "--info", "-v", file_url]) + return "compression: \"dwaa\"" in output or \ + "compression: \"dwab\"" in output + except RuntimeError as _E: + _name, ext = os.path.splitext(file_url) + if ext in [".mxf"]: + # TODO: should't the list of allowed extensions be + # taken from an OIIO variable of supported formats + return False + else: + raise RuntimeError(_E) return False From 7755a0cf74d71d4f4f9053d3b4c404bf8e955fc0 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Tue, 16 Nov 2021 11:56:42 +0100 Subject: [PATCH 151/211] nuke: fix removing adding families form plugin --- .../hosts/nuke/plugins/publish/extract_review_data_mov.py | 7 ------- 1 file changed, 7 deletions(-) diff --git a/openpype/hosts/nuke/plugins/publish/extract_review_data_mov.py b/openpype/hosts/nuke/plugins/publish/extract_review_data_mov.py index cd918afe39..00159a81bc 100644 --- a/openpype/hosts/nuke/plugins/publish/extract_review_data_mov.py +++ b/openpype/hosts/nuke/plugins/publish/extract_review_data_mov.py @@ -75,13 +75,6 @@ class ExtractReviewDataMov(openpype.api.Extractor): "Baking output `{}` with settings: {}".format( o_name, o_data)) - # add additional families - add_families = o_data["add_families"] - for adf in add_families: - if adf in instance.data["families"]: - continue - instance.data["families"].append(adf) - # create exporter instance exporter = plugin.ExporterReviewMov( self, instance, o_name, o_data["extension"]) From ea76c1c346aca130a2c187143e26ff8eb156d89f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jakub=20Je=C5=BEek?= Date: Tue, 16 Nov 2021 14:45:00 +0100 Subject: [PATCH 152/211] Update openpype/lib/plugin_tools.py improving exception behavior Co-authored-by: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> --- openpype/lib/plugin_tools.py | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/openpype/lib/plugin_tools.py b/openpype/lib/plugin_tools.py index 12c77abead..891163e3ae 100644 --- a/openpype/lib/plugin_tools.py +++ b/openpype/lib/plugin_tools.py @@ -537,14 +537,14 @@ def should_decompress(file_url): "--info", "-v", file_url]) return "compression: \"dwaa\"" in output or \ "compression: \"dwab\"" in output - except RuntimeError as _E: + except RuntimeError: _name, ext = os.path.splitext(file_url) - if ext in [".mxf"]: - # TODO: should't the list of allowed extensions be - # taken from an OIIO variable of supported formats - return False - else: - raise RuntimeError(_E) + # TODO: should't the list of allowed extensions be + # taken from an OIIO variable of supported formats + if ext not in [".mxf"]: + # Reraise exception + raise + return False return False From dd27953068efe35579ae77fb3401e539ffaef041 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 18 Nov 2021 10:51:02 +0100 Subject: [PATCH 153/211] reverse commit: removing representation after all presets are used --- openpype/plugins/publish/extract_burnin.py | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/openpype/plugins/publish/extract_burnin.py b/openpype/plugins/publish/extract_burnin.py index b1ba4ddeac..961c7f50d1 100644 --- a/openpype/plugins/publish/extract_burnin.py +++ b/openpype/plugins/publish/extract_burnin.py @@ -307,16 +307,16 @@ class ExtractBurnin(openpype.api.Extractor): # NOTE we maybe can keep source representation if necessary instance.data["representations"].remove(repre) - self.log.debug("Files to delete: {}".format(files_to_delete)) + self.log.debug("Files to delete: {}".format(files_to_delete)) - # Delete input files - for filepath in files_to_delete: - if os.path.exists(filepath): - os.remove(filepath) - self.log.debug("Removed: \"{}\"".format(filepath)) + # Delete input files + for filepath in files_to_delete: + if os.path.exists(filepath): + os.remove(filepath) + self.log.debug("Removed: \"{}\"".format(filepath)) - if do_decompress and os.path.exists(decompressed_dir): - shutil.rmtree(decompressed_dir) + if do_decompress and os.path.exists(decompressed_dir): + shutil.rmtree(decompressed_dir) def _get_burnin_options(self): # Prepare burnin options From 25ac9344c421dfcf4b016cf8a29d287d71c49659 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Thu, 18 Nov 2021 13:25:26 +0100 Subject: [PATCH 154/211] fix tvpaint route --- .../hosts/webpublisher/webserver_service/webpublish_routes.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/openpype/hosts/webpublisher/webserver_service/webpublish_routes.py b/openpype/hosts/webpublisher/webserver_service/webpublish_routes.py index e9c399ad7b..256f144aef 100644 --- a/openpype/hosts/webpublisher/webserver_service/webpublish_routes.py +++ b/openpype/hosts/webpublisher/webserver_service/webpublish_routes.py @@ -204,7 +204,8 @@ class WebpublisherBatchPublishEndpoint(_RestApiEndpoint): "command": "remotepublish", "arguments": { "targets": ["tvpaint"] - } + }, + "add_to_queue": False }, # Photoshop filter { From a9a94b283e1e0dbf7fd6fc161d207eb7d8d676d4 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Mon, 22 Nov 2021 17:58:15 +0100 Subject: [PATCH 155/211] fix function calls --- openpype/hosts/tvpaint/worker/worker_job.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/hosts/tvpaint/worker/worker_job.py b/openpype/hosts/tvpaint/worker/worker_job.py index 308fbf3bd7..c3893b6f2e 100644 --- a/openpype/hosts/tvpaint/worker/worker_job.py +++ b/openpype/hosts/tvpaint/worker/worker_job.py @@ -385,7 +385,7 @@ class SenderTVPaintCommands(TVPaintCommands): It is expected that worker will add it's root before passed workfile. """ new_workfile = workfile.replace("\\", "/") - job_queue_root = self.job_queue_root.replace("\\", "/") + job_queue_root = self.job_queue_root().replace("\\", "/") if job_queue_root not in new_workfile: raise ValueError(( "Workfile is not located in JobQueue root." @@ -476,7 +476,7 @@ class ProcessTVPaintCommands(TVPaintCommands): def _prepare_workfile(self, workfile): """Preprend job queue root before passed workfile.""" workfile = workfile.replace("\\", "/") - job_queue_root = self.job_queue_root.replace("\\", "/") + job_queue_root = self.job_queue_root().replace("\\", "/") new_workfile = "/".join([job_queue_root, workfile]) while "//" in new_workfile: new_workfile = new_workfile.replace("//", "/") From 0023d94d3084e85c99e3b6527f2a1d3ea631f9b5 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Mon, 22 Nov 2021 17:58:24 +0100 Subject: [PATCH 156/211] fix validation key of mark out --- .../plugins/publish/validate_tvpaint_workfile_data.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/webpublisher/plugins/publish/validate_tvpaint_workfile_data.py b/openpype/hosts/webpublisher/plugins/publish/validate_tvpaint_workfile_data.py index 652209a83e..eec6ef1004 100644 --- a/openpype/hosts/webpublisher/plugins/publish/validate_tvpaint_workfile_data.py +++ b/openpype/hosts/webpublisher/plugins/publish/validate_tvpaint_workfile_data.py @@ -20,7 +20,7 @@ class ValidateWorkfileData(pyblish.api.ContextPlugin): scene_data = context.data["sceneData"] scene_mark_in = scene_data["sceneMarkIn"] - scene_mark_out = scene_data["sceneMarkIn"] + scene_mark_out = scene_data["sceneMarkOut"] expected_range = ( (frame_end - frame_start + 1) From d33f952df266eb3824dce9d1f985b2d2132b2153 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Mon, 22 Nov 2021 17:59:18 +0100 Subject: [PATCH 157/211] method get_jobs_root returns a value --- openpype/modules/default_modules/job_queue/module.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/modules/default_modules/job_queue/module.py b/openpype/modules/default_modules/job_queue/module.py index 665f18f7a7..284aa80fe9 100644 --- a/openpype/modules/default_modules/job_queue/module.py +++ b/openpype/modules/default_modules/job_queue/module.py @@ -104,7 +104,7 @@ class JobQueueModule(OpenPypeModule): return copy.deepcopy(self._jobs_root_mapping) def get_jobs_root(self): - self._jobs_root_mapping.get(platform.system().lower()) + return self._jobs_root_mapping.get(platform.system().lower()) @classmethod def get_jobs_root_from_settings(cls): From 0e7a20787003906f82aade29295b9ab209310b85 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Mon, 22 Nov 2021 18:15:16 +0100 Subject: [PATCH 158/211] changed target name from "tvpaint" to "tvpaint_worker" --- .../webpublisher/plugins/publish/collect_tvpaint_instances.py | 2 +- .../plugins/publish/collect_tvpaint_workfile_data.py | 2 +- .../webpublisher/plugins/publish/extract_tvpaint_workfile.py | 2 +- .../hosts/webpublisher/webserver_service/webpublish_routes.py | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/openpype/hosts/webpublisher/plugins/publish/collect_tvpaint_instances.py b/openpype/hosts/webpublisher/plugins/publish/collect_tvpaint_instances.py index 3115f39793..c533403e5f 100644 --- a/openpype/hosts/webpublisher/plugins/publish/collect_tvpaint_instances.py +++ b/openpype/hosts/webpublisher/plugins/publish/collect_tvpaint_instances.py @@ -17,7 +17,7 @@ class CollectTVPaintInstances(pyblish.api.ContextPlugin): label = "Collect TVPaint Instances" order = pyblish.api.CollectorOrder + 0.2 hosts = ["webpublisher"] - targets = ["tvpaint"] + targets = ["tvpaint_worker"] workfile_family = "workfile" workfile_variant = "" diff --git a/openpype/hosts/webpublisher/plugins/publish/collect_tvpaint_workfile_data.py b/openpype/hosts/webpublisher/plugins/publish/collect_tvpaint_workfile_data.py index 147b6f3d88..9006032192 100644 --- a/openpype/hosts/webpublisher/plugins/publish/collect_tvpaint_workfile_data.py +++ b/openpype/hosts/webpublisher/plugins/publish/collect_tvpaint_workfile_data.py @@ -26,7 +26,7 @@ class CollectTVPaintWorkfileData(pyblish.api.ContextPlugin): label = "Collect TVPaint Workfile data" order = pyblish.api.CollectorOrder - 0.4 hosts = ["webpublisher"] - targets = ["tvpaint"] + targets = ["tvpaint_worker"] def process(self, context): # Get JobQueue module diff --git a/openpype/hosts/webpublisher/plugins/publish/extract_tvpaint_workfile.py b/openpype/hosts/webpublisher/plugins/publish/extract_tvpaint_workfile.py index 18f47fdade..85c8526c83 100644 --- a/openpype/hosts/webpublisher/plugins/publish/extract_tvpaint_workfile.py +++ b/openpype/hosts/webpublisher/plugins/publish/extract_tvpaint_workfile.py @@ -21,7 +21,7 @@ from PIL import Image class ExtractTVPaintSequences(pyblish.api.Extractor): label = "Extract TVPaint Sequences" hosts = ["webpublisher"] - targets = ["tvpaint"] + targets = ["tvpaint_worker"] # Context plugin does not have families filtering families_filter = ["review", "renderPass", "renderLayer"] diff --git a/openpype/hosts/webpublisher/webserver_service/webpublish_routes.py b/openpype/hosts/webpublisher/webserver_service/webpublish_routes.py index 256f144aef..30399a6ba7 100644 --- a/openpype/hosts/webpublisher/webserver_service/webpublish_routes.py +++ b/openpype/hosts/webpublisher/webserver_service/webpublish_routes.py @@ -203,7 +203,7 @@ class WebpublisherBatchPublishEndpoint(_RestApiEndpoint): "extensions": [".tvpp"], "command": "remotepublish", "arguments": { - "targets": ["tvpaint"] + "targets": ["tvpaint_worker"] }, "add_to_queue": False }, From 97a2584fc2f7e93d6638ecc3aca35e48e731ff23 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Mon, 22 Nov 2021 18:15:31 +0100 Subject: [PATCH 159/211] added "tvpaint_worker" target to ftrack collect username --- .../default_modules/ftrack/plugins/publish/collect_username.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/modules/default_modules/ftrack/plugins/publish/collect_username.py b/openpype/modules/default_modules/ftrack/plugins/publish/collect_username.py index a5187dd52b..7ea1c1f323 100644 --- a/openpype/modules/default_modules/ftrack/plugins/publish/collect_username.py +++ b/openpype/modules/default_modules/ftrack/plugins/publish/collect_username.py @@ -27,7 +27,7 @@ class CollectUsername(pyblish.api.ContextPlugin): order = pyblish.api.CollectorOrder - 0.488 label = "Collect ftrack username" hosts = ["webpublisher", "photoshop"] - targets = ["remotepublish", "filespublish"] + targets = ["remotepublish", "filespublish", "tvpaint_worker"] _context = None From 17b32059e434392625ebe285da99a3a810cc7e49 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Mon, 22 Nov 2021 18:29:52 +0100 Subject: [PATCH 160/211] make sure all roots end without slashes --- .../default_modules/job_queue/module.py | 42 ++++++++++++------- 1 file changed, 28 insertions(+), 14 deletions(-) diff --git a/openpype/modules/default_modules/job_queue/module.py b/openpype/modules/default_modules/job_queue/module.py index 284aa80fe9..719d7c8f38 100644 --- a/openpype/modules/default_modules/job_queue/module.py +++ b/openpype/modules/default_modules/job_queue/module.py @@ -53,19 +53,37 @@ class JobQueueModule(OpenPypeModule): server_url = modules_settings.get("server_url") or "" self._server_url = self.url_conversion(server_url) - jobs_root_mapping = modules_settings.get("jobs_root") - if not jobs_root_mapping: - jobs_root_mapping = { - "windows": "", - "linux": "", - "darwin": "" - } + jobs_root_mapping = self._roots_mapping_conversion( + modules_settings.get("jobs_root") + ) + self._jobs_root_mapping = jobs_root_mapping # Is always enabled # - the module does nothing until is used self.enabled = True + @classmethod + def _root_conversion(cls, root_path): + """Make sure root path does not end with slash.""" + # Return empty string if path is invalid + if not root_path: + return "" + + # Remove all slashes + while root_path.endswith("/") or root_path.endswith("\\"): + root_path = root_path[:-1] + return root_path + + @classmethod + def _roots_mapping_conversion(cls, roots_mapping): + roots_mapping = roots_mapping or {} + for platform_name in ("windows", "linux", "darwin"): + roots_mapping[platform_name] = cls._root_conversion( + roots_mapping.get(platform_name) + ) + return roots_mapping + @staticmethod def url_conversion(url, ws=False): if sys.version_info[0] == 2: @@ -110,13 +128,9 @@ class JobQueueModule(OpenPypeModule): def get_jobs_root_from_settings(cls): module_settings = get_system_settings()["modules"] jobs_root_mapping = module_settings.get(cls.name, {}).get("jobs_root") - if not jobs_root_mapping: - jobs_root_mapping = { - "windows": "", - "linux": "", - "darwin": "" - } - return jobs_root_mapping.get(platform.system().lower()) + converted_mapping = cls._roots_mapping_conversion(jobs_root_mapping) + + return converted_mapping[platform.system().lower()] @property def server_url(self): From 22ed6f758918a92a0ba5bf5c517565f324a1ee86 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Mon, 22 Nov 2021 18:36:50 +0100 Subject: [PATCH 161/211] added debug log --- .../publish/collect_tvpaint_workfile_data.py | 19 ++++++++++++++++++- 1 file changed, 18 insertions(+), 1 deletion(-) diff --git a/openpype/hosts/webpublisher/plugins/publish/collect_tvpaint_workfile_data.py b/openpype/hosts/webpublisher/plugins/publish/collect_tvpaint_workfile_data.py index 9006032192..f0f29260a2 100644 --- a/openpype/hosts/webpublisher/plugins/publish/collect_tvpaint_workfile_data.py +++ b/openpype/hosts/webpublisher/plugins/publish/collect_tvpaint_workfile_data.py @@ -13,6 +13,7 @@ Provides: """ import os import uuid +import json import shutil import pyblish.api from openpype.lib.plugin_tools import parse_json @@ -67,7 +68,7 @@ class CollectTVPaintWorkfileData(pyblish.api.ContextPlugin): # Store results # scene data store the same way as TVPaint collector - context.data["sceneData"] = { + scene_data = { "sceneWidth": scene_data["width"], "sceneHeight": scene_data["height"], "scenePixelAspect": scene_data["pixel_aspect"], @@ -82,12 +83,28 @@ class CollectTVPaintWorkfileData(pyblish.api.ContextPlugin): "sceneStartFrame": scene_data["start_frame"], "sceneBgColor": scene_data["bg_color"] } + context.data["sceneData"] = scene_data # Store only raw data context.data["groupsData"] = groups_data context.data["layersData"] = layers_data context.data["layersExposureFrames"] = exposure_frames_by_layer_id context.data["layersPrePostBehavior"] = pre_post_beh_by_layer_id + self.log.debug( + ( + "Collected data" + "\nScene data: {}" + "\nLayers data: {}" + "\nExposure frames: {}" + "\nPre/Post behavior: {}" + ).format( + json.dumps(scene_data, indent=4), + json.dumps(layers_data, indent=4), + json.dumps(exposure_frames_by_layer_id, indent=4), + json.dumps(pre_post_beh_by_layer_id, indent=4) + ) + ) + def _create_context_staging_dir(self, jobs_root): if not os.path.exists(jobs_root): os.makedirs(jobs_root) From e9234600806c5bedde9b6c1d4dd7188c01561b7e Mon Sep 17 00:00:00 2001 From: David Lai Date: Tue, 23 Nov 2021 19:37:11 +0800 Subject: [PATCH 162/211] remove redundant check --- openpype/plugins/publish/collect_scene_loaded_versions.py | 5 ----- 1 file changed, 5 deletions(-) diff --git a/openpype/plugins/publish/collect_scene_loaded_versions.py b/openpype/plugins/publish/collect_scene_loaded_versions.py index 199a467dec..0c6f0821d5 100644 --- a/openpype/plugins/publish/collect_scene_loaded_versions.py +++ b/openpype/plugins/publish/collect_scene_loaded_versions.py @@ -24,11 +24,6 @@ class CollectSceneLoadedVersions(pyblish.api.ContextPlugin): def process(self, context): from avalon import api, io - current_file = context.data.get("currentFile") - if not current_file: - self.log.warn("No work file collected.") - return - host = api.registered_host() if host is None: self.log.warn("No registered host.") From 039ffdf5d5139d1a94c409b6a67d4122b96dbccb Mon Sep 17 00:00:00 2001 From: David Lai Date: Tue, 23 Nov 2021 19:39:09 +0800 Subject: [PATCH 163/211] move import statements to top --- openpype/plugins/publish/collect_scene_loaded_versions.py | 3 +-- openpype/plugins/publish/integrate_inputlinks.py | 6 ++---- 2 files changed, 3 insertions(+), 6 deletions(-) diff --git a/openpype/plugins/publish/collect_scene_loaded_versions.py b/openpype/plugins/publish/collect_scene_loaded_versions.py index 0c6f0821d5..c26b322df2 100644 --- a/openpype/plugins/publish/collect_scene_loaded_versions.py +++ b/openpype/plugins/publish/collect_scene_loaded_versions.py @@ -1,5 +1,6 @@ import pyblish.api +from avalon import api, io class CollectSceneLoadedVersions(pyblish.api.ContextPlugin): @@ -22,8 +23,6 @@ class CollectSceneLoadedVersions(pyblish.api.ContextPlugin): ] def process(self, context): - from avalon import api, io - host = api.registered_host() if host is None: self.log.warn("No registered host.") diff --git a/openpype/plugins/publish/integrate_inputlinks.py b/openpype/plugins/publish/integrate_inputlinks.py index 745b09d8e0..e8a8b2296c 100644 --- a/openpype/plugins/publish/integrate_inputlinks.py +++ b/openpype/plugins/publish/integrate_inputlinks.py @@ -1,4 +1,6 @@ +from collections import OrderedDict +from avalon import io import pyblish.api @@ -94,8 +96,6 @@ class IntegrateInputLinks(pyblish.api.ContextPlugin): None """ - from collections import OrderedDict - from avalon import io # NOTE: # using OrderedDict() here is just for ensuring field order between # python versions, if we ever need to use mongodb operation '$addToSet' @@ -117,8 +117,6 @@ class IntegrateInputLinks(pyblish.api.ContextPlugin): in database will be updated. """ - from avalon import io - for instance in instances: version_doc = instance.data.get("versionEntity") if version_doc is None: From 0713bc901e91fb5d848c20f4d4dbf0dcfa67ef33 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Tue, 23 Nov 2021 13:03:10 +0100 Subject: [PATCH 164/211] Fix - validate remote_site If Site Sync was enabled, but both sites were same it failes with NPE --- openpype/plugins/publish/integrate_new.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/openpype/plugins/publish/integrate_new.py b/openpype/plugins/publish/integrate_new.py index 1611bd4afd..1b0b8da2ff 100644 --- a/openpype/plugins/publish/integrate_new.py +++ b/openpype/plugins/publish/integrate_new.py @@ -1071,10 +1071,12 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): already_attached_sites[meta["name"]] = meta["created_dt"] if sync_project_presets and sync_project_presets["enabled"]: - # add remote - meta = {"name": remote_site.strip()} - rec["sites"].append(meta) - already_attached_sites[meta["name"]] = None + if remote_site and \ + remote_site not in already_attached_sites.keys(): + # add remote + meta = {"name": remote_site.strip()} + rec["sites"].append(meta) + already_attached_sites[meta["name"]] = None # add skeleton for site where it should be always synced to for always_on_site in always_accesible: @@ -1102,8 +1104,6 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): local_site = local_site_id remote_site = sync_project_presets["config"].get("remote_site") - if remote_site == local_site: - remote_site = None if remote_site == 'local': remote_site = local_site_id From 824571a2fc30c067661cd862a1a4d11572f5b24f Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Wed, 24 Nov 2021 11:03:12 +0100 Subject: [PATCH 165/211] fixed imports --- openpype/tools/project_manager/project_manager/model.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/openpype/tools/project_manager/project_manager/model.py b/openpype/tools/project_manager/project_manager/model.py index b7ab9e40d0..0c02872b4c 100644 --- a/openpype/tools/project_manager/project_manager/model.py +++ b/openpype/tools/project_manager/project_manager/model.py @@ -3,6 +3,10 @@ import copy import json from uuid import uuid4 +from pymongo import UpdateOne, DeleteOne + +from Qt import QtCore, QtGui + from .constants import ( IDENTIFIER_ROLE, ITEM_TYPE_ROLE, @@ -15,9 +19,6 @@ from .constants import ( from .style import ResourceCache from openpype.lib import CURRENT_DOC_SCHEMAS -from pymongo import UpdateOne, DeleteOne -from avalon.vendor import qtawesome -from Qt import QtCore, QtGui class ProjectModel(QtGui.QStandardItemModel): From a6ab267f3baeb0ae5c8e7209f705eef60aff008b Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Wed, 24 Nov 2021 11:08:47 +0100 Subject: [PATCH 166/211] implemented dialog for confirmation of project deletion --- .../project_manager/widgets.py | 122 ++++++++++++++++++ 1 file changed, 122 insertions(+) diff --git a/openpype/tools/project_manager/project_manager/widgets.py b/openpype/tools/project_manager/project_manager/widgets.py index 8c2f693f11..84cb637920 100644 --- a/openpype/tools/project_manager/project_manager/widgets.py +++ b/openpype/tools/project_manager/project_manager/widgets.py @@ -288,3 +288,125 @@ class CreateProjectDialog(QtWidgets.QDialog): project_codes.add(project_code) return project_names, project_codes + + +class _SameSizeBtns(QtWidgets.QPushButton): + """Button that keep width of all button added as related. + + This happens without changing min/max/fix size of button. Which is + welcomed for multidisplay desktops with different resolution. + """ + def __init__(self, *args, **kwargs): + super(_SameSizeBtns, self).__init__(*args, **kwargs) + self._related_btns = [] + + def add_related_btn(self, btn): + """Add related button which should be checked for width. + + Args: + btn (_SameSizeBtns): Other object of _SameSizeBtns. + """ + self._related_btns.append(btn) + + def hint_width(self): + """Get size hint of button not related to others.""" + return super(_SameSizeBtns, self).sizeHint().width() + + def sizeHint(self): + """Calculate size hint based on size hint of this button and related. + + If width is lower than any other button it is changed to higher. + """ + result = super(_SameSizeBtns, self).sizeHint() + width = result.width() + for btn in self._related_btns: + btn_width = btn.hint_width() + if btn_width > width: + width = btn_width + + result.setWidth(width) + return result + + +class ConfirmProjectDeletion(QtWidgets.QDialog): + """Dialog which confirms deletion of a project.""" + def __init__(self, project_name, parent): + super(ConfirmProjectDeletion, self).__init__(parent) + + self.setWindowTitle("Delete project?") + + message = ( + "Project \"{}\" with all related data will be" + " permanently removed from the database (This actions won't remove" + " any files on disk)." + ).format(project_name) + message_label = QtWidgets.QLabel(message, self) + message_label.setWordWrap(True) + + question_label = QtWidgets.QLabel("Are you sure?", self) + + confirm_input = QtWidgets.QLineEdit(self) + confirm_input.setPlaceholderText("Type \"Delete\" to confirm...") + + cancel_btn = _SameSizeBtns("Cancel", self) + cancel_btn.setToolTip("Cancel deletion of the project") + confirm_btn = _SameSizeBtns("Delete", self) + confirm_btn.setEnabled(False) + confirm_btn.setToolTip("Confirm deletion") + + cancel_btn.add_related_btn(confirm_btn) + confirm_btn.add_related_btn(cancel_btn) + + btns_layout = QtWidgets.QHBoxLayout() + btns_layout.addStretch(1) + btns_layout.addWidget(cancel_btn, 0) + btns_layout.addWidget(confirm_btn, 0) + + layout = QtWidgets.QVBoxLayout(self) + layout.addWidget(message_label, 0) + layout.addStretch(1) + layout.addWidget(question_label, 0) + layout.addWidget(confirm_input, 0) + layout.addLayout(btns_layout) + + cancel_btn.clicked.connect(self._on_cancel_click) + confirm_btn.clicked.connect(self._on_confirm_click) + confirm_input.textChanged.connect(self._on_confirm_text_change) + confirm_input.returnPressed.connect(self._on_enter_clicked) + + self._cancel_btn = cancel_btn + self._confirm_btn = confirm_btn + self._confirm_input = confirm_input + self._result = 0 + + self.setMinimumWidth(450) + self.setMaximumWidth(650) + self.setMaximumHeight(250) + + def exec_(self, *args, **kwargs): + super(ConfirmProjectDeletion, self).exec_(*args, **kwargs) + return self._result + + def showEvent(self, event): + """Reset result on show.""" + super(ConfirmProjectDeletion, self).showEvent(event) + self._result = 0 + + def result(self): + """Get result of dialog 1 for confirm 0 for cancel.""" + return self._result + + def _on_cancel_click(self): + self.close() + + def _on_confirm_click(self): + self._result = 1 + self.close() + + def _on_enter_clicked(self): + if self._confirm_btn.isEnabled(): + self._on_confirm_click() + + def _on_confirm_text_change(self): + enabled = self._confirm_input.text().lower() == "delete" + self._confirm_btn.setEnabled(enabled) From 1bcab11fc821b9524b90060923b5e73a30045e93 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Wed, 24 Nov 2021 11:09:05 +0100 Subject: [PATCH 167/211] added bin image to images in project manager --- .../project_manager/images/bin.png | Bin 0 -> 6231 bytes 1 file changed, 0 insertions(+), 0 deletions(-) create mode 100644 openpype/tools/project_manager/project_manager/images/bin.png diff --git a/openpype/tools/project_manager/project_manager/images/bin.png b/openpype/tools/project_manager/project_manager/images/bin.png new file mode 100644 index 0000000000000000000000000000000000000000..6bc4046ca48386345ee6d8836237571ae59dcfce GIT binary patch literal 6231 zcmYkBdpOho`^R^{9I9a$&0)^um_pRdYEp!ch(zS96mpzHvmA1qP$@aBmGsG}Pzagi z^l=zTlG8$&jOJ{Pzc=51et&H5*R}h4Jn#E{zV6rS+O;M9V`+Mj|1duY1Uh*B9R4B* z1O}F15HAn#a0-0o1w8ly&N&2uKmv%p7Z~(3R~!Tq=R1$b*%ET+@?ce$25RG@Kc7>` zy7or@Cf!fn${o&|{PwQf;zJd=%Wf$LMc-ac`X%HpqV}G2xXH)%_&Nv_U_mapg3~G)( za=Ho{;T=9Sfo=C(GFysH5u}Q$r+X5z$~AVHhhIgFZjDzNL!&KEcZCIa1wLwtzGm~w zu)h*4`}@B_v0YhzeBAX_#iPj52hosDRMdE#)FTY!d zy9RGO+EL~2J6qd5fhG0THY{6NC+eL;7GI?%?SRfsr>I{w-%*eE31VGekoox9<8LO` zT=s|L+|RErzCoTH1%-mQ(pJ~!qy4fkE_$otb3VE3cE4GNJ71bmhK8I!SB1&~inuyZ zaq-`RS3by_4?TUdP!$(?7ClhiOJcisi?2ApqJ5xb-+YIeiR@o{<%1?VN0qD7uWTt@ zdiZThqIk|>G(@E%*+I!+E9MA)W|D)QHu!D!fKvBaEh_b4$FPImQR_rl2(IYo0y3(T2@HEZfbSCM!jEQ<29N^-n<-NahN>znS; zoF=hGbn>SzV??Ed>w*APr7QQko1VJ{5hoI#?Q7B1YI??0E0L;wZ0z$*rOR6}D*VU7 za-MXT{*Tg7^6EdKqN)-oON3@+(bl*OMpp-X?G)D^Zh!v{lh5D<2d)47OfiURKG_|YsUW|AtfJ{ z`#!X+k8?NOXS`vE<9hG*8NH)8_e=X97Ul3AP64(?&CDp}31u<6Me75V5T(Vh^Y`-B zC$;2h3q%m7_kMpxR$@8vTUK%=?1F%^+|f

!VCp7pvvqgQ^84!#a){p3AY+QffO@ndAi8~5YS+WYd?#ppcpYGw7G>QHG-@3rINtUQUgtH*Sz&pRQ!WitmtNWY7 zPbhGr4LO|b90cb(iY$v9yk;?}wM?J|8Kmr*VZ_-+D&IM0d^r-G^-n|?Kc2c+<6~!T zY{j1~`gnmiptNoGtkdBDXT9E`B}dwjfg8N$(#a{l-5vEuF{En~=Kr+8L2{S2kDdFS3E`kL`{b9=sh zOgpRk7B(m&u4+u9?Dwobt!n}LKRfdAN_H?x;HCGoX(-V6YbLZu?K^pq(eZA8$ zeHhuppv|fu0lh)872ZboV|y&W`qH8w1sD~u3}3|+tDMIIWVoVJBf9R}5}t2bIW~7( z>t~ec&^s$Wd+W^k)~hTXc$ASW5SF*ZO%_7j?~y zZ@MTp*X791DCMD$(!C%{KrQTHz@0~|EA(TExdjIUiDhw1P>(@H^P1$$H!@|93@)u9 zD||7YUf;0DDW~5}zeK1-<5f;?;);DhMo%8RV^}?6r*7PdxWPC5>KXch`H4pSvij1S zi^mhMw>q*LAF-nnnc4K+hXF*{c21da&tG9oCkxhxep3E=O!H^BR^PD#o$3K9@m6hZ z+_9hw>+*`HOk!(>2?9pXNEas6Nco*lq*GkThd|9)(cNX$ujFs4J|qK+8j^s zPpxBi-pzmBp1)EL*)Z&n$&KdQC^K1=dwkq8X*Yf8J#=ajPsC0DqE(3`s4Y%4RvxQIzr}uk=i(w5volv1b8s*G3<-o`!5)pJ zuZp@J4Zve^`wq2sCZ<`%q{)F1tnn6MVF(3as zwpfNKpep%36aGVhfa()!c&j2D@b7}yj$QB3*D`EG?_jpjRO%|kQqIsBWSGpQJ$y|m zCe3=AR1ovArwi>}g-kb){C&LFT7ls+sd38ijm~SCHy1j4O8ysWVrl~+k^>((?faEf z<`cSl?;uB=R^8Pr`g=6K!Pqndggx=;%K9L#&0k1ri6gljBKN23zbJRtkMEe5KG_@> zkM{4nvaVA;AZPTCOBQ?k2n~s;?rplUTdGbc8(U?N2FROt|9~RSE<=gj9}K&|IV+TE zuQeMo}mGDbLUGR*FUV?ztu?;j}P^wg;@?99g#lBJG69TO^sgd$tKc%kpBK`kZOI za)vvHfGw>3kAPyd?Mb2+KR?~RQYyWiHF}LYskq1S0vv8Xog8VFiYxQcVp!wbTu5nh zvTZ1TC?#n2`34vXbs91O+HN3qjm19)EEW81PkUf&`g6} zl(=QxF`g5s;M6}b7NkZ~XzB-7jUf^t)t5k%lkLimMDADFP9m4>XVb;Ebpwj&@5t5Q zR?=Pr=8!UT?pi9{-cQM}jNdRhPFtU4v0r{*nCr}5*KBF3{Ne=@g;si5Wm&l;ND8R& zG1%$x!9Ww4HyE?UC9oPZBp^fFE~BNH;sRJ1NSa;T&t{uBev(pvW`uP}8~?VV$~+7E&Dr`J-Q?S#25|Bb;yx2YRod$5x|P)h{(q|mk|Y>O*tDYaTi z46+4;GSH>^q@v+*?%3!BXxTo%?pvu@|&{#4xMX7|Q2vde@WR z7%-3n@ESeF;e}cL9|ASdzT4)2Ci{9r3Hc3XFP0eP=%7VIT-%|G3^H?Z{9nD zYYaP2MgDNNXi2zx}G7No#bfjGRvD%9a=IkdZKbbbTyFG^z}>`!?%|OXZ&-! zwYis3XBIoX9cfDm;Cv(~zo%275d#dui|X5Y&Dj;@RBb;sf(tn#z*miD0v8?xMLFOXgo8|0E_XX<WRV!|&4&ph_kL~Mgn21O(lW%^GVsg?QM};Tz0U6<&q39|s7|>m z&dx+&fa~mXrp!f0%^3EEQtM0OvzGyzc7VA`U?r|XD$bV3j$7gdcl0|RXAx~E?$n`e z(k3Jg+MWi09x*z(wLMN>C~@#a;h4A; z$&W>(ZG+oz9;U&c54-0TKt}QCR7hGQ$*-T!k8=Pb340*b=WN_M@Hf&JpmvG*5$+Th zM<5>zlm^S(kCod@0L?%k7#FndhLn`z- zjl2*gSI`V8D3i=+eD|mk=v<^w4 zAb#%W{8rE|uJ;vA0(aMN=1BU`eL3;++io}0Zh`beu?%a{WS@uW3uL-(Z4C+-E>{ft zw9-(;Cip&Ba)?{Wf+X+j)kFn`q1AtsBijEp2P*NtX&xpCIU#KtijM%p(S-0$k;T{c zwjk`Q2NAIccVl*}mVyy%~v0@hmne09o^e{wd&sD1dMFQ0S!h^e*E*()0d19cfK^RQE* z0jJ2U?@|Hv@r)29hT@SJ|E;emuG-I^=3zG>k~!^Z=d{q17exc_owns_CBNq*A?suW zYkLfHS&7RKBQy37I9pis?n2$FJPA1~Y>sILx4EQCWtd;0BkDtWfI69;Gy3hew3Jae zfrrs;G>Axs6&Leo8XM%_l*i0oWfEn((j`Mdofh?MfpFriP>4i z=jyh-NkCdl9Cq3sk9~oBqt-Nw0r1p8jqS4>X+{eRb_&UM?@>1XF~O%us^$(x7^L%ld=Dag|xHa~F}e1je=={S%SyOw-_Vo&;-I9~DOs zszLQH5%qs?JbupSme0(OtSJ$$as!*uRHO3&vzHD3x>QqXL$-mm&FkDgi~u0hD;agH zF5rOwr!xufiBN(Pf$eOAI#b=&&#}IuJh8M{T#P;DF@*R$kr)OcnhMsZhu^2Vv3zIr z`7btvz?+KmJ3)<|Nkn}xQCYZt1G0y{$OcfA{fsxN%YX~Hft1>5=C~(&(+dCthtr1O z1Vu)eXdj#d{R4>^{6TvBO1#)%gRCRWK75A#dRm_}?n9MXe46>QT4c|b3BVTg-<0fa zk~^reIEmOk`yPcj$>A>K-fMm<%i)jjbA6C=SLJFu zCx@JMD~c-P-V&D|n8iI!x!8EtBuzObIWrpCrI2!7%ayDp!Zunc)Qf2STAAS|jFa`&@22~jD-j-cJ=JQbedj?#LISRBFI zibT_M11swVP8__zdI(=k*IBVyjY_W**)?Oub^RkK+LGY*azwoVVhNc3g0ZMXbpgIx za`Ra#iv|me{yt&e=$icTnAa*)ZpAZotRdn9GI&$7X@(JFN&R%7Vj1PN5Cf1jNB$26 zlSxFANU+4@*qS<1;m8FU0JT$LR*5$Ko9`&_!Jw^sIQ5!Vft}w~<-D}Q$#`KIG6EkQmKvKEXPtU)6?`_L zi!KY@0_`5o4eS@ebhol#x&QQbyOPz2)SYGcH;}Pn+4| zQLbU;)wj0>tTT#+hLwV^s}_p}xF5LKUDPhYj}6RQRH9gddfL{*LFH7-cj1sWl>;vZUgcp*DYAlt`Y+lxzO;665G z<`(tpM6=FweTp!u9EY5Hs90I=wUF~qQb&qi-9yD6M^O%Qe8J0Jwly4u&JRj)1M`n7 z$2-+-b3Wbh$HqPBzSb`TnUi(VFMlUEP8-M0!tZkQiJydKWJTt93t;pQFN@ha%{LM% z;+`lEdxZB-=$C$ZvL3>;xR3~0&jV|HO&qH?t0ikq(B(aknzYZ+wb!1 z8$c4Cl6&jAzt9nK2h6U{*f#iUjo{K}0rjY$85C}1LfOztJCB9nGQ2Ve>Cm-9-H>SD z4Et`uC)7&yTFqlmnegu9UTaWK@TzUUacq6}#yc<~BJZ*h@xt(K`byB|nBDO&H>3`j kbY71C->;?AhNWnpo012IEfgYwe+oe7O)T+cr?18QALaeg*8l(j literal 0 HcmV?d00001 From 025b9ccb15b147eb017344bc94974f1c15afb8a0 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Wed, 24 Nov 2021 11:09:27 +0100 Subject: [PATCH 168/211] add disabled color for buttons --- openpype/tools/project_manager/project_manager/style.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/openpype/tools/project_manager/project_manager/style.py b/openpype/tools/project_manager/project_manager/style.py index 17e269c1f6..b138072cfd 100644 --- a/openpype/tools/project_manager/project_manager/style.py +++ b/openpype/tools/project_manager/project_manager/style.py @@ -2,8 +2,10 @@ from avalon.vendor import qtawesome class ResourceCache: + # TODO use colors from OpenPype style colors = { "standard": "#bfccd6", + "disabled": "#969696", "new": "#2d9a4c", "warning": "#c83232" } @@ -58,8 +60,9 @@ class ResourceCache: }, "refresh": qtawesome.icon( "fa.refresh", - color=cls.colors["standard"] - ) + color=cls.colors["standard"], + color_disabled=cls.colors["disabled"] + ), } return cls.icons From 75545e86058bda6cefcc1faebdf8fbfa48a73b5b Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Wed, 24 Nov 2021 11:09:54 +0100 Subject: [PATCH 169/211] added loading of bin image to resources cache --- .../project_manager/project_manager/style.py | 53 +++++++++++++++++++ 1 file changed, 53 insertions(+) diff --git a/openpype/tools/project_manager/project_manager/style.py b/openpype/tools/project_manager/project_manager/style.py index b138072cfd..d3d6857a63 100644 --- a/openpype/tools/project_manager/project_manager/style.py +++ b/openpype/tools/project_manager/project_manager/style.py @@ -1,3 +1,6 @@ +import os +from Qt import QtCore, QtGui + from avalon.vendor import qtawesome @@ -63,9 +66,59 @@ class ResourceCache: color=cls.colors["standard"], color_disabled=cls.colors["disabled"] ), + "remove": cls.get_remove_icon() } return cls.icons @classmethod def get_color(cls, color_name): return cls.colors[color_name] + + @classmethod + def get_remove_icon(cls): + src_image = get_remove_image() + normal_pix = paint_image_with_color( + src_image, + QtGui.QColor(cls.colors["standard"]) + ) + disabled_pix = paint_image_with_color( + src_image, + QtGui.QColor(cls.colors["disabled"]) + ) + icon = QtGui.QIcon(normal_pix) + icon.addPixmap(disabled_pix, QtGui.QIcon.Disabled, QtGui.QIcon.On) + icon.addPixmap(disabled_pix, QtGui.QIcon.Disabled, QtGui.QIcon.Off) + return icon + + +def get_remove_image(): + image_path = os.path.join( + os.path.dirname(os.path.abspath(__file__)), + "images", + "bin.png" + ) + return QtGui.QImage(image_path) + + +def paint_image_with_color(image, color): + """TODO: This function should be imported from utils. + + At the moment of creation is not available yet. + """ + width = image.width() + height = image.height() + + alpha_mask = image.createAlphaMask() + alpha_region = QtGui.QRegion(QtGui.QBitmap.fromImage(alpha_mask)) + + pixmap = QtGui.QPixmap(width, height) + pixmap.fill(QtCore.Qt.transparent) + + painter = QtGui.QPainter(pixmap) + painter.setClipRegion(alpha_region) + painter.setPen(QtCore.Qt.NoPen) + painter.setBrush(color) + painter.drawRect(QtCore.QRect(0, 0, width, height)) + painter.end() + + return pixmap From 8c61afba1169a30581c894da38eb4ff916da0358 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Wed, 24 Nov 2021 11:10:17 +0100 Subject: [PATCH 170/211] added deletion button to main window with confirmation --- .../project_manager/project_manager/window.py | 23 +++++++++++++++++++ 1 file changed, 23 insertions(+) diff --git a/openpype/tools/project_manager/project_manager/window.py b/openpype/tools/project_manager/project_manager/window.py index a19031ceda..a05811e813 100644 --- a/openpype/tools/project_manager/project_manager/window.py +++ b/openpype/tools/project_manager/project_manager/window.py @@ -11,6 +11,7 @@ from . import ( CreateProjectDialog, PROJECT_NAME_ROLE ) +from .widgets import ConfirmProjectDeletion from .style import ResourceCache from openpype.style import load_stylesheet from openpype.lib import is_admin_password_required @@ -77,6 +78,10 @@ class ProjectManagerWindow(QtWidgets.QWidget): ) create_folders_btn.setEnabled(False) + remove_projects_btn = QtWidgets.QPushButton(project_widget) + remove_projects_btn.setIcon(ResourceCache.get_icon("remove")) + remove_projects_btn.setObjectName("IconBtn") + project_layout = QtWidgets.QHBoxLayout(project_widget) project_layout.setContentsMargins(0, 0, 0, 0) project_layout.addWidget(project_combobox, 0) @@ -84,6 +89,7 @@ class ProjectManagerWindow(QtWidgets.QWidget): project_layout.addWidget(create_project_btn, 0) project_layout.addWidget(create_folders_btn) project_layout.addStretch(1) + project_layout.addWidget(remove_projects_btn) # Helper buttons helper_btns_widget = QtWidgets.QWidget(top_part_widget) @@ -145,11 +151,13 @@ class ProjectManagerWindow(QtWidgets.QWidget): refresh_projects_btn.clicked.connect(self._on_project_refresh) create_project_btn.clicked.connect(self._on_project_create) create_folders_btn.clicked.connect(self._on_create_folders) + remove_projects_btn.clicked.connect(self._on_remove_project) project_combobox.currentIndexChanged.connect(self._on_project_change) save_btn.clicked.connect(self._on_save_click) add_asset_btn.clicked.connect(self._on_add_asset) add_task_btn.clicked.connect(self._on_add_task) + self._dbcon = dbcon self._project_model = project_model self._project_proxy_model = project_proxy @@ -162,6 +170,7 @@ class ProjectManagerWindow(QtWidgets.QWidget): self._project_combobox = project_combobox self._create_project_btn = create_project_btn self._create_folders_btn = create_folders_btn + self._remove_projects_btn = remove_projects_btn self._add_asset_btn = add_asset_btn self._add_task_btn = add_task_btn @@ -171,6 +180,7 @@ class ProjectManagerWindow(QtWidgets.QWidget): def _set_project(self, project_name=None): self._create_folders_btn.setEnabled(project_name is not None) + self._remove_projects_btn.setEnabled(project_name is not None) self._project_proxy_model.set_filter_default(project_name is not None) self.hierarchy_view.set_project(project_name) @@ -252,6 +262,19 @@ class ProjectManagerWindow(QtWidgets.QWidget): exc_info=True ) + def _on_remove_project(self): + project_name = self._current_project() + dialog = ConfirmProjectDeletion(project_name, self) + result = dialog.exec_() + if result != 1: + return + + database = self._dbcon.database + if project_name in database.collection_names(): + collection = database[project_name] + collection.drop() + self.refresh_projects() + def show_message(self, message): # TODO add nicer message pop self.message_label.setText(message) From a098d621162bd02d8d237605c363587550122391 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Wed, 24 Nov 2021 11:34:52 +0100 Subject: [PATCH 171/211] changed sizes of dialog --- openpype/tools/project_manager/project_manager/widgets.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/openpype/tools/project_manager/project_manager/widgets.py b/openpype/tools/project_manager/project_manager/widgets.py index 84cb637920..b4d791b6d5 100644 --- a/openpype/tools/project_manager/project_manager/widgets.py +++ b/openpype/tools/project_manager/project_manager/widgets.py @@ -379,7 +379,7 @@ class ConfirmProjectDeletion(QtWidgets.QDialog): self._confirm_input = confirm_input self._result = 0 - self.setMinimumWidth(450) + self.setMinimumWidth(480) self.setMaximumWidth(650) self.setMaximumHeight(250) @@ -391,6 +391,8 @@ class ConfirmProjectDeletion(QtWidgets.QDialog): """Reset result on show.""" super(ConfirmProjectDeletion, self).showEvent(event) self._result = 0 + minimum_size_hint = self.minimumSizeHint() + self.resize(self.width(), minimum_size_hint.height() + 30) def result(self): """Get result of dialog 1 for confirm 0 for cancel.""" From 0ccd1c4e0e67c00e79f90d6b6d37417aabd592eb Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Wed, 24 Nov 2021 13:14:59 +0100 Subject: [PATCH 172/211] explaining how filter of preset works in comments --- .../plugins/publish/extract_review_data_mov.py | 15 +++++++++++++++ 1 file changed, 15 insertions(+) diff --git a/openpype/hosts/nuke/plugins/publish/extract_review_data_mov.py b/openpype/hosts/nuke/plugins/publish/extract_review_data_mov.py index 00159a81bc..37e98c0d35 100644 --- a/openpype/hosts/nuke/plugins/publish/extract_review_data_mov.py +++ b/openpype/hosts/nuke/plugins/publish/extract_review_data_mov.py @@ -53,21 +53,36 @@ class ExtractReviewDataMov(openpype.api.Extractor): f_families = o_data["filter"]["families"] f_task_types = o_data["filter"]["task_types"] + # test if family found in context test_families = any([ + # first if exact family set is mathing + # make sure only interesetion of list is correct bool(set(families).intersection(f_families)), + # and if famiies are set at all + # if not then return True because we want this preset + # to be active if nothig is set bool(not f_families) ]) + # test task types from filter test_task_types = any([ + # check if actual task type is defined in task types + # set in preset's filter bool(task_type in f_task_types), + # and if taskTypes are defined in preset filter + # if not then return True, because we want this filter + # to be active if no taskType is set bool(not f_task_types) ]) + # we need all filters to be positive for this + # preset to be activated test_all = all([ test_families, test_task_types ]) + # if it is not positive then skip this preset if not test_all: continue From b3d581dc474ce1a3e3eb217660eb5acea35ea7cb Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Wed, 24 Nov 2021 13:43:42 +0100 Subject: [PATCH 173/211] make sure paths are available at any platform --- openpype/lib/delivery.py | 18 ++++++++++++------ 1 file changed, 12 insertions(+), 6 deletions(-) diff --git a/openpype/lib/delivery.py b/openpype/lib/delivery.py index c89e2e7ae0..01fcc907ed 100644 --- a/openpype/lib/delivery.py +++ b/openpype/lib/delivery.py @@ -60,12 +60,13 @@ def path_from_representation(representation, anatomy): path = pipeline.format_template_with_optional_keys( context, template ) + path = os.path.normpath(path.replace("/", "\\")) except KeyError: # Template references unavailable data return None - return os.path.normpath(path) + return path def copy_file(src_path, dst_path): @@ -179,9 +180,11 @@ def process_single_file( Returns: (collections.defaultdict , int) """ + # Make sure path is valid for all platforms + src_path = os.path.normpath(src_path.replace("\\", "/")) + if not os.path.exists(src_path): - msg = "{} doesn't exist for {}".format(src_path, - repre["_id"]) + msg = "{} doesn't exist for {}".format(src_path, repre["_id"]) report_items["Source file was not found"].append(msg) return report_items, 0 @@ -192,8 +195,10 @@ def process_single_file( else: delivery_path = anatomy_filled["delivery"][template_name] - # context.representation could be .psd + # Backwards compatibility when extension contained `.` delivery_path = delivery_path.replace("..", ".") + # Make sure path is valid for all platforms + delivery_path = os.path.normpath(delivery_path.replace("\\", "/")) delivery_folder = os.path.dirname(delivery_path) if not os.path.exists(delivery_folder): @@ -230,14 +235,14 @@ def process_sequence( Returns: (collections.defaultdict , int) """ + src_path = os.path.normpath(src_path.replace("\\", "/")) def hash_path_exist(myPath): res = myPath.replace('#', '*') glob_search_results = glob.glob(res) if len(glob_search_results) > 0: return True - else: - return False + return False if not hash_path_exist(src_path): msg = "{} doesn't exist for {}".format(src_path, @@ -307,6 +312,7 @@ def process_sequence( else: delivery_path = anatomy_filled["delivery"][template_name] + delivery_path = os.path.normpath(delivery_path.replace("\\", "/")) delivery_folder = os.path.dirname(delivery_path) dst_head, dst_tail = delivery_path.split(frame_indicator) dst_padding = src_collection.padding From 6eba64e409fed50348b35e56190d476efbd38bef Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Wed, 24 Nov 2021 15:29:45 +0100 Subject: [PATCH 174/211] use different bin image --- .../project_manager/images/bin.png | Bin 6231 -> 149 bytes 1 file changed, 0 insertions(+), 0 deletions(-) diff --git a/openpype/tools/project_manager/project_manager/images/bin.png b/openpype/tools/project_manager/project_manager/images/bin.png index 6bc4046ca48386345ee6d8836237571ae59dcfce..9e5bc7a943d37cc5d6ab5c6b2624461a1b5963c4 100644 GIT binary patch literal 149 zcmeAS@N?(olHy`uVBq!ia0vp^1|ZDA0wn)(8}b0D7*7|+kch)?FC64Opuod&@O5+2 z%416pcFQej^f<_;Ao%uIy>g_IRd87Sjy>i7gfBL;Z{Yjke)~Q%P%j7+a0bMDKFU9X t+rWOpm1WOoPZCk|`C7W?RMKac&AeYDWt-j~`LP|O(9_k=Wt~$(69A9nHMsx) literal 6231 zcmYkBdpOho`^R^{9I9a$&0)^um_pRdYEp!ch(zS96mpzHvmA1qP$@aBmGsG}Pzagi z^l=zTlG8$&jOJ{Pzc=51et&H5*R}h4Jn#E{zV6rS+O;M9V`+Mj|1duY1Uh*B9R4B* z1O}F15HAn#a0-0o1w8ly&N&2uKmv%p7Z~(3R~!Tq=R1$b*%ET+@?ce$25RG@Kc7>` zy7or@Cf!fn${o&|{PwQf;zJd=%Wf$LMc-ac`X%HpqV}G2xXH)%_&Nv_U_mapg3~G)( za=Ho{;T=9Sfo=C(GFysH5u}Q$r+X5z$~AVHhhIgFZjDzNL!&KEcZCIa1wLwtzGm~w zu)h*4`}@B_v0YhzeBAX_#iPj52hosDRMdE#)FTY!d zy9RGO+EL~2J6qd5fhG0THY{6NC+eL;7GI?%?SRfsr>I{w-%*eE31VGekoox9<8LO` zT=s|L+|RErzCoTH1%-mQ(pJ~!qy4fkE_$otb3VE3cE4GNJ71bmhK8I!SB1&~inuyZ zaq-`RS3by_4?TUdP!$(?7ClhiOJcisi?2ApqJ5xb-+YIeiR@o{<%1?VN0qD7uWTt@ zdiZThqIk|>G(@E%*+I!+E9MA)W|D)QHu!D!fKvBaEh_b4$FPImQR_rl2(IYo0y3(T2@HEZfbSCM!jEQ<29N^-n<-NahN>znS; zoF=hGbn>SzV??Ed>w*APr7QQko1VJ{5hoI#?Q7B1YI??0E0L;wZ0z$*rOR6}D*VU7 za-MXT{*Tg7^6EdKqN)-oON3@+(bl*OMpp-X?G)D^Zh!v{lh5D<2d)47OfiURKG_|YsUW|AtfJ{ z`#!X+k8?NOXS`vE<9hG*8NH)8_e=X97Ul3AP64(?&CDp}31u<6Me75V5T(Vh^Y`-B zC$;2h3q%m7_kMpxR$@8vTUK%=?1F%^+|f

!VCp7pvvqgQ^84!#a){p3AY+QffO@ndAi8~5YS+WYd?#ppcpYGw7G>QHG-@3rINtUQUgtH*Sz&pRQ!WitmtNWY7 zPbhGr4LO|b90cb(iY$v9yk;?}wM?J|8Kmr*VZ_-+D&IM0d^r-G^-n|?Kc2c+<6~!T zY{j1~`gnmiptNoGtkdBDXT9E`B}dwjfg8N$(#a{l-5vEuF{En~=Kr+8L2{S2kDdFS3E`kL`{b9=sh zOgpRk7B(m&u4+u9?Dwobt!n}LKRfdAN_H?x;HCGoX(-V6YbLZu?K^pq(eZA8$ zeHhuppv|fu0lh)872ZboV|y&W`qH8w1sD~u3}3|+tDMIIWVoVJBf9R}5}t2bIW~7( z>t~ec&^s$Wd+W^k)~hTXc$ASW5SF*ZO%_7j?~y zZ@MTp*X791DCMD$(!C%{KrQTHz@0~|EA(TExdjIUiDhw1P>(@H^P1$$H!@|93@)u9 zD||7YUf;0DDW~5}zeK1-<5f;?;);DhMo%8RV^}?6r*7PdxWPC5>KXch`H4pSvij1S zi^mhMw>q*LAF-nnnc4K+hXF*{c21da&tG9oCkxhxep3E=O!H^BR^PD#o$3K9@m6hZ z+_9hw>+*`HOk!(>2?9pXNEas6Nco*lq*GkThd|9)(cNX$ujFs4J|qK+8j^s zPpxBi-pzmBp1)EL*)Z&n$&KdQC^K1=dwkq8X*Yf8J#=ajPsC0DqE(3`s4Y%4RvxQIzr}uk=i(w5volv1b8s*G3<-o`!5)pJ zuZp@J4Zve^`wq2sCZ<`%q{)F1tnn6MVF(3as zwpfNKpep%36aGVhfa()!c&j2D@b7}yj$QB3*D`EG?_jpjRO%|kQqIsBWSGpQJ$y|m zCe3=AR1ovArwi>}g-kb){C&LFT7ls+sd38ijm~SCHy1j4O8ysWVrl~+k^>((?faEf z<`cSl?;uB=R^8Pr`g=6K!Pqndggx=;%K9L#&0k1ri6gljBKN23zbJRtkMEe5KG_@> zkM{4nvaVA;AZPTCOBQ?k2n~s;?rplUTdGbc8(U?N2FROt|9~RSE<=gj9}K&|IV+TE zuQeMo}mGDbLUGR*FUV?ztu?;j}P^wg;@?99g#lBJG69TO^sgd$tKc%kpBK`kZOI za)vvHfGw>3kAPyd?Mb2+KR?~RQYyWiHF}LYskq1S0vv8Xog8VFiYxQcVp!wbTu5nh zvTZ1TC?#n2`34vXbs91O+HN3qjm19)EEW81PkUf&`g6} zl(=QxF`g5s;M6}b7NkZ~XzB-7jUf^t)t5k%lkLimMDADFP9m4>XVb;Ebpwj&@5t5Q zR?=Pr=8!UT?pi9{-cQM}jNdRhPFtU4v0r{*nCr}5*KBF3{Ne=@g;si5Wm&l;ND8R& zG1%$x!9Ww4HyE?UC9oPZBp^fFE~BNH;sRJ1NSa;T&t{uBev(pvW`uP}8~?VV$~+7E&Dr`J-Q?S#25|Bb;yx2YRod$5x|P)h{(q|mk|Y>O*tDYaTi z46+4;GSH>^q@v+*?%3!BXxTo%?pvu@|&{#4xMX7|Q2vde@WR z7%-3n@ESeF;e}cL9|ASdzT4)2Ci{9r3Hc3XFP0eP=%7VIT-%|G3^H?Z{9nD zYYaP2MgDNNXi2zx}G7No#bfjGRvD%9a=IkdZKbbbTyFG^z}>`!?%|OXZ&-! zwYis3XBIoX9cfDm;Cv(~zo%275d#dui|X5Y&Dj;@RBb;sf(tn#z*miD0v8?xMLFOXgo8|0E_XX<WRV!|&4&ph_kL~Mgn21O(lW%^GVsg?QM};Tz0U6<&q39|s7|>m z&dx+&fa~mXrp!f0%^3EEQtM0OvzGyzc7VA`U?r|XD$bV3j$7gdcl0|RXAx~E?$n`e z(k3Jg+MWi09x*z(wLMN>C~@#a;h4A; z$&W>(ZG+oz9;U&c54-0TKt}QCR7hGQ$*-T!k8=Pb340*b=WN_M@Hf&JpmvG*5$+Th zM<5>zlm^S(kCod@0L?%k7#FndhLn`z- zjl2*gSI`V8D3i=+eD|mk=v<^w4 zAb#%W{8rE|uJ;vA0(aMN=1BU`eL3;++io}0Zh`beu?%a{WS@uW3uL-(Z4C+-E>{ft zw9-(;Cip&Ba)?{Wf+X+j)kFn`q1AtsBijEp2P*NtX&xpCIU#KtijM%p(S-0$k;T{c zwjk`Q2NAIccVl*}mVyy%~v0@hmne09o^e{wd&sD1dMFQ0S!h^e*E*()0d19cfK^RQE* z0jJ2U?@|Hv@r)29hT@SJ|E;emuG-I^=3zG>k~!^Z=d{q17exc_owns_CBNq*A?suW zYkLfHS&7RKBQy37I9pis?n2$FJPA1~Y>sILx4EQCWtd;0BkDtWfI69;Gy3hew3Jae zfrrs;G>Axs6&Leo8XM%_l*i0oWfEn((j`Mdofh?MfpFriP>4i z=jyh-NkCdl9Cq3sk9~oBqt-Nw0r1p8jqS4>X+{eRb_&UM?@>1XF~O%us^$(x7^L%ld=Dag|xHa~F}e1je=={S%SyOw-_Vo&;-I9~DOs zszLQH5%qs?JbupSme0(OtSJ$$as!*uRHO3&vzHD3x>QqXL$-mm&FkDgi~u0hD;agH zF5rOwr!xufiBN(Pf$eOAI#b=&&#}IuJh8M{T#P;DF@*R$kr)OcnhMsZhu^2Vv3zIr z`7btvz?+KmJ3)<|Nkn}xQCYZt1G0y{$OcfA{fsxN%YX~Hft1>5=C~(&(+dCthtr1O z1Vu)eXdj#d{R4>^{6TvBO1#)%gRCRWK75A#dRm_}?n9MXe46>QT4c|b3BVTg-<0fa zk~^reIEmOk`yPcj$>A>K-fMm<%i)jjbA6C=SLJFu zCx@JMD~c-P-V&D|n8iI!x!8EtBuzObIWrpCrI2!7%ayDp!Zunc)Qf2STAAS|jFa`&@22~jD-j-cJ=JQbedj?#LISRBFI zibT_M11swVP8__zdI(=k*IBVyjY_W**)?Oub^RkK+LGY*azwoVVhNc3g0ZMXbpgIx za`Ra#iv|me{yt&e=$icTnAa*)ZpAZotRdn9GI&$7X@(JFN&R%7Vj1PN5Cf1jNB$26 zlSxFANU+4@*qS<1;m8FU0JT$LR*5$Ko9`&_!Jw^sIQ5!Vft}w~<-D}Q$#`KIG6EkQmKvKEXPtU)6?`_L zi!KY@0_`5o4eS@ebhol#x&QQbyOPz2)SYGcH;}Pn+4| zQLbU;)wj0>tTT#+hLwV^s}_p}xF5LKUDPhYj}6RQRH9gddfL{*LFH7-cj1sWl>;vZUgcp*DYAlt`Y+lxzO;665G z<`(tpM6=FweTp!u9EY5Hs90I=wUF~qQb&qi-9yD6M^O%Qe8J0Jwly4u&JRj)1M`n7 z$2-+-b3Wbh$HqPBzSb`TnUi(VFMlUEP8-M0!tZkQiJydKWJTt93t;pQFN@ha%{LM% z;+`lEdxZB-=$C$ZvL3>;xR3~0&jV|HO&qH?t0ikq(B(aknzYZ+wb!1 z8$c4Cl6&jAzt9nK2h6U{*f#iUjo{K}0rjY$85C}1LfOztJCB9nGQ2Ve>Cm-9-H>SD z4Et`uC)7&yTFqlmnegu9UTaWK@TzUUacq6}#yc<~BJZ*h@xt(K`byB|nBDO&H>3`j kbY71C->;?AhNWnpo012IEfgYwe+oe7O)T+cr?18QALaeg*8l(j From c74de7864856c3f07e53693b5908e38442ca52c6 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Wed, 24 Nov 2021 18:02:45 +0100 Subject: [PATCH 175/211] fixing representation duplicity --- .../nuke/plugins/publish/extract_review_data_mov.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/openpype/hosts/nuke/plugins/publish/extract_review_data_mov.py b/openpype/hosts/nuke/plugins/publish/extract_review_data_mov.py index 37e98c0d35..f7d0102b42 100644 --- a/openpype/hosts/nuke/plugins/publish/extract_review_data_mov.py +++ b/openpype/hosts/nuke/plugins/publish/extract_review_data_mov.py @@ -116,9 +116,9 @@ class ExtractReviewDataMov(openpype.api.Extractor): self.log.info(data["representations"]) - # assign to representations - instance.data["representations"] += data["representations"] + # assign to representations + instance.data["representations"] += data["representations"] - self.log.debug( - "_ representations: {}".format( - instance.data["representations"])) + self.log.debug( + "_ representations: {}".format( + instance.data["representations"])) From 005919f45c7f454f422800c47c12497e7aaa1dc4 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Wed, 24 Nov 2021 18:04:26 +0100 Subject: [PATCH 176/211] fixing burnin missing key --- openpype/plugins/publish/extract_review.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/openpype/plugins/publish/extract_review.py b/openpype/plugins/publish/extract_review.py index e500bb361d..ba6ef17072 100644 --- a/openpype/plugins/publish/extract_review.py +++ b/openpype/plugins/publish/extract_review.py @@ -203,11 +203,12 @@ class ExtractReview(pyblish.api.InstancePlugin): new_repre["burnins"].append(str(burnin)) self.log.debug( - "Linked burnins: `{}`".format(new_repre["burnins"]) + "Linked burnins: `{}`".format(new_repre.get("burnins")) ) self.log.debug( - "New representation tags: `{}`".format(new_repre["tags"]) + "New representation tags: `{}`".format( + new_repre.get("tags")) ) temp_data = self.prepare_temp_data( From 2acf100f152442823154d5bcfd53a795a0f8e82f Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Wed, 24 Nov 2021 21:09:31 +0100 Subject: [PATCH 177/211] removed unused TasksModel and TasksProxyModel --- openpype/tools/utils/models.py | 160 --------------------------------- 1 file changed, 160 deletions(-) diff --git a/openpype/tools/utils/models.py b/openpype/tools/utils/models.py index c488743f36..819eda00b4 100644 --- a/openpype/tools/utils/models.py +++ b/openpype/tools/utils/models.py @@ -654,163 +654,3 @@ class ProjectSortFilterProxy(QtCore.QSortFilterProxyModel): def set_filter_enabled(self, value): self._filter_enabled = value self.invalidateFilter() - - -class TasksModel(QtGui.QStandardItemModel): - """A model listing the tasks combined for a list of assets""" - def __init__(self, dbcon, parent=None): - super(TasksModel, self).__init__(parent=parent) - self.dbcon = dbcon - self._default_icon = qtawesome.icon( - "fa.male", - color=style.colors.default - ) - self._no_tasks_icon = qtawesome.icon( - "fa.exclamation-circle", - color=style.colors.mid - ) - self._cached_icons = {} - self._project_task_types = {} - - self._last_asset_id = None - - self.refresh() - - def refresh(self): - if self.dbcon.Session.get("AVALON_PROJECT"): - self._refresh_task_types() - self.set_asset_id(self._last_asset_id) - else: - self.clear() - - def _refresh_task_types(self): - # Get the project configured icons from database - project = self.dbcon.find_one( - {"type": "project"}, - {"config.tasks"} - ) - tasks = project["config"].get("tasks") or {} - self._project_task_types = tasks - - def _try_get_awesome_icon(self, icon_name): - icon = None - if icon_name: - try: - icon = qtawesome.icon( - "fa.{}".format(icon_name), - color=style.colors.default - ) - - except Exception: - pass - return icon - - def headerData(self, section, orientation, role): - # Show nice labels in the header - if ( - role == QtCore.Qt.DisplayRole - and orientation == QtCore.Qt.Horizontal - ): - if section == 0: - return "Tasks" - - return super(TasksModel, self).headerData(section, orientation, role) - - def _get_icon(self, task_icon, task_type_icon): - if task_icon in self._cached_icons: - return self._cached_icons[task_icon] - - icon = self._try_get_awesome_icon(task_icon) - if icon is not None: - self._cached_icons[task_icon] = icon - return icon - - if task_type_icon in self._cached_icons: - icon = self._cached_icons[task_type_icon] - self._cached_icons[task_icon] = icon - return icon - - icon = self._try_get_awesome_icon(task_type_icon) - if icon is None: - icon = self._default_icon - - self._cached_icons[task_icon] = icon - self._cached_icons[task_type_icon] = icon - - return icon - - def set_asset_id(self, asset_id): - asset_doc = None - if asset_id: - asset_doc = self.dbcon.find_one( - {"_id": asset_id}, - {"data.tasks": True} - ) - self.set_asset(asset_doc) - - def set_asset(self, asset_doc): - """Set assets to track by their database id - - Arguments: - asset_doc (dict): Asset document from MongoDB. - """ - self.clear() - - if not asset_doc: - self._last_asset_id = None - return - - self._last_asset_id = asset_doc["_id"] - - asset_tasks = asset_doc.get("data", {}).get("tasks") or {} - items = [] - for task_name, task_info in asset_tasks.items(): - task_icon = task_info.get("icon") - task_type = task_info.get("type") - task_order = task_info.get("order") - task_type_info = self._project_task_types.get(task_type) or {} - task_type_icon = task_type_info.get("icon") - icon = self._get_icon(task_icon, task_type_icon) - - label = "{} ({})".format(task_name, task_type or "type N/A") - item = QtGui.QStandardItem(label) - item.setData(task_name, TASK_NAME_ROLE) - item.setData(task_type, TASK_TYPE_ROLE) - item.setData(task_order, TASK_ORDER_ROLE) - item.setData(icon, QtCore.Qt.DecorationRole) - item.setFlags(QtCore.Qt.ItemIsEnabled | QtCore.Qt.ItemIsSelectable) - items.append(item) - - if not items: - item = QtGui.QStandardItem("No task") - item.setData(self._no_tasks_icon, QtCore.Qt.DecorationRole) - item.setFlags(QtCore.Qt.NoItemFlags) - items.append(item) - - self.invisibleRootItem().appendRows(items) - - -class TasksProxyModel(QtCore.QSortFilterProxyModel): - def lessThan(self, x_index, y_index): - x_order = x_index.data(TASK_ORDER_ROLE) - y_order = y_index.data(TASK_ORDER_ROLE) - if x_order is not None and y_order is not None: - if x_order < y_order: - return True - if x_order > y_order: - return False - - elif x_order is None and y_order is not None: - return True - - elif y_order is None and x_order is not None: - return False - - x_name = x_index.data(QtCore.Qt.DisplayRole) - y_name = y_index.data(QtCore.Qt.DisplayRole) - if x_name == y_name: - return True - - if x_name == tuple(sorted((x_name, y_name)))[0]: - return True - return False From 3ac669d9a54da86151e8ac13f72ac49f93740f64 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Wed, 24 Nov 2021 21:10:14 +0100 Subject: [PATCH 178/211] moved task roles to tasks_widget as it's only place where are used --- openpype/tools/utils/constants.py | 4 ---- openpype/tools/utils/models.py | 5 +---- openpype/tools/utils/tasks_widget.py | 10 +++++----- 3 files changed, 6 insertions(+), 13 deletions(-) diff --git a/openpype/tools/utils/constants.py b/openpype/tools/utils/constants.py index 33bdf43c08..8f12c57321 100644 --- a/openpype/tools/utils/constants.py +++ b/openpype/tools/utils/constants.py @@ -5,10 +5,6 @@ DEFAULT_PROJECT_LABEL = "< Default >" PROJECT_NAME_ROLE = QtCore.Qt.UserRole + 101 PROJECT_IS_ACTIVE_ROLE = QtCore.Qt.UserRole + 102 -TASK_NAME_ROLE = QtCore.Qt.UserRole + 301 -TASK_TYPE_ROLE = QtCore.Qt.UserRole + 302 -TASK_ORDER_ROLE = QtCore.Qt.UserRole + 303 - LOCAL_PROVIDER_ROLE = QtCore.Qt.UserRole + 500 # provider of active site REMOTE_PROVIDER_ROLE = QtCore.Qt.UserRole + 501 # provider of remote site LOCAL_PROGRESS_ROLE = QtCore.Qt.UserRole + 502 # percentage downld on active diff --git a/openpype/tools/utils/models.py b/openpype/tools/utils/models.py index 819eda00b4..ffcdc7a820 100644 --- a/openpype/tools/utils/models.py +++ b/openpype/tools/utils/models.py @@ -11,10 +11,7 @@ from . import lib from .constants import ( PROJECT_IS_ACTIVE_ROLE, PROJECT_NAME_ROLE, - DEFAULT_PROJECT_LABEL, - TASK_ORDER_ROLE, - TASK_TYPE_ROLE, - TASK_NAME_ROLE + DEFAULT_PROJECT_LABEL ) log = logging.getLogger(__name__) diff --git a/openpype/tools/utils/tasks_widget.py b/openpype/tools/utils/tasks_widget.py index 513402b455..419e77c780 100644 --- a/openpype/tools/utils/tasks_widget.py +++ b/openpype/tools/utils/tasks_widget.py @@ -4,11 +4,11 @@ from avalon import style from avalon.vendor import qtawesome from .views import DeselectableTreeView -from .constants import ( - TASK_ORDER_ROLE, - TASK_TYPE_ROLE, - TASK_NAME_ROLE -) + + +TASK_NAME_ROLE = QtCore.Qt.UserRole + 1 +TASK_TYPE_ROLE = QtCore.Qt.UserRole + 2 +TASK_ORDER_ROLE = QtCore.Qt.UserRole + 3 class TasksModel(QtGui.QStandardItemModel): From 145e335a48a75b29bc3bf2c494dff67d4607f89c Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Wed, 24 Nov 2021 21:33:22 +0100 Subject: [PATCH 179/211] added double clicked signal emitting --- openpype/tools/utils/assets_widget.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/openpype/tools/utils/assets_widget.py b/openpype/tools/utils/assets_widget.py index 9ebb62456f..041bb1ef1c 100644 --- a/openpype/tools/utils/assets_widget.py +++ b/openpype/tools/utils/assets_widget.py @@ -564,6 +564,8 @@ class AssetsWidget(QtWidgets.QWidget): refreshed = QtCore.Signal() # on view selection change selection_changed = QtCore.Signal() + # It was double clicked on view + double_clicked = QtCore.Signal() def __init__(self, dbcon, parent=None): super(AssetsWidget, self).__init__(parent=parent) @@ -618,6 +620,7 @@ class AssetsWidget(QtWidgets.QWidget): refresh_btn.clicked.connect(self.refresh) current_asset_btn.clicked.connect(self.set_current_session_asset) model.refreshed.connect(self._on_model_refresh) + view.doubleClicked.connect(self.double_clicked) self._current_asset_btn = current_asset_btn self._model = model From 85ae741d3d790cd826e7d0381caca01e62ba215b Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Wed, 24 Nov 2021 21:38:44 +0100 Subject: [PATCH 180/211] houdini usd has it's own dialog using new assets widget --- openpype/hosts/houdini/api/usd.py | 93 ++++++++++++++++++++----------- 1 file changed, 60 insertions(+), 33 deletions(-) diff --git a/openpype/hosts/houdini/api/usd.py b/openpype/hosts/houdini/api/usd.py index 6f808779ea..a992f1d082 100644 --- a/openpype/hosts/houdini/api/usd.py +++ b/openpype/hosts/houdini/api/usd.py @@ -3,9 +3,10 @@ import contextlib import logging -from Qt import QtCore, QtGui -from openpype.tools.utils.widgets import AssetWidget -from avalon import style, io +from Qt import QtWidgets, QtCore, QtGui +from avalon import io +from openpype import style +from openpype.tools.utils.assets_widget import SingleSelectAssetsWidget from pxr import Sdf @@ -13,6 +14,60 @@ from pxr import Sdf log = logging.getLogger(__name__) +class SelectAssetDialog(QtWidgets.QWidget): + """Frameless assets dialog to select asset with double click. + + Args: + parm: Parameter where selected asset name is set. + """ + def __init__(self, parm): + self.setWindowTitle("Pick Asset") + self.setWindowFlags(QtCore.Qt.FramelessWindowHint | QtCore.Qt.Popup) + + assets_widget = SingleSelectAssetsWidget(io, parent=self) + + layout = QtWidgets.QHBoxLayout(self) + layout.addWidget(assets_widget) + + assets_widget.double_clicked.connect(self._set_parameter) + self._assets_widget = assets_widget + self._parm = parm + + def _set_parameter(self): + name = self._assets_widget.get_selected_asset_name() + self._parm.set(name) + self.close() + + def _on_show(self): + pos = QtGui.QCursor.pos() + # Select the current asset if there is any + select_id = None + name = self._parm.eval() + if name: + db_asset = io.find_one( + {"name": name, "type": "asset"}, + {"_id": True} + ) + if db_asset: + select_id = db_asset["_id"] + + # Set stylesheet + self.setStyleSheet(style.load_stylesheet()) + # Refresh assets (is threaded) + self._assets_widget.refresh() + # Select asset - must be done after refresh + if select_id is not None: + self._assets_widget.select_asset(select_id) + + # Show cursor (top right of window) near cursor + self.resize(250, 400) + self.move(self.mapFromGlobal(pos) - QtCore.QPoint(self.width(), 0)) + + def showEvent(self, event): + super(SelectAssetDialog, self).showEvent(event) + self._on_show() + + def pick_asset(node): """Show a user interface to select an Asset in the project @@ -21,43 +76,15 @@ def pick_asset(node): """ - pos = QtGui.QCursor.pos() - parm = node.parm("asset_name") if not parm: log.error("Node has no 'asset' parameter: %s", node) return - # Construct the AssetWidget as a frameless popup so it automatically + # Construct a frameless popup so it automatically # closes when clicked outside of it. global tool - tool = AssetWidget(io) - tool.setContentsMargins(5, 5, 5, 5) - tool.setWindowTitle("Pick Asset") - tool.setStyleSheet(style.load_stylesheet()) - tool.setWindowFlags(QtCore.Qt.FramelessWindowHint | QtCore.Qt.Popup) - tool.refresh() - - # Select the current asset if there is any - name = parm.eval() - if name: - db_asset = io.find_one({"name": name, "type": "asset"}) - if db_asset: - silo = db_asset.get("silo") - if silo: - tool.set_silo(silo) - tool.select_assets([name], expand=True) - - # Show cursor (top right of window) near cursor - tool.resize(250, 400) - tool.move(tool.mapFromGlobal(pos) - QtCore.QPoint(tool.width(), 0)) - - def set_parameter_callback(index): - name = index.data(tool.model.DocumentRole)["name"] - parm.set(name) - tool.close() - - tool.view.doubleClicked.connect(set_parameter_callback) + tool = SelectAssetDialog(parm) tool.show() From ba5fe7674656a4a70dc24d679ef5eb469e2311c0 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Wed, 24 Nov 2021 21:40:05 +0100 Subject: [PATCH 181/211] removed unused AssetWidget --- openpype/tools/utils/widgets.py | 287 -------------------------------- 1 file changed, 287 deletions(-) diff --git a/openpype/tools/utils/widgets.py b/openpype/tools/utils/widgets.py index 493255071d..8346db211b 100644 --- a/openpype/tools/utils/widgets.py +++ b/openpype/tools/utils/widgets.py @@ -38,293 +38,6 @@ class PlaceholderLineEdit(QtWidgets.QLineEdit): self.setPalette(filter_palette) -class AssetWidget(QtWidgets.QWidget): - """A Widget to display a tree of assets with filter - - To list the assets of the active project: - >>> # widget = AssetWidget() - >>> # widget.refresh() - >>> # widget.show() - - """ - - refresh_triggered = QtCore.Signal() # on model refresh - refreshed = QtCore.Signal() - selection_changed = QtCore.Signal() # on view selection change - current_changed = QtCore.Signal() # on view current index change - - def __init__(self, dbcon, multiselection=False, parent=None): - super(AssetWidget, self).__init__(parent=parent) - - self.dbcon = dbcon - - # Tree View - model = AssetModel(dbcon=self.dbcon, parent=self) - proxy = RecursiveSortFilterProxyModel() - proxy.setSourceModel(model) - proxy.setFilterCaseSensitivity(QtCore.Qt.CaseInsensitive) - - view = AssetsView(self) - view.setModel(proxy) - if multiselection: - asset_delegate = AssetDelegate() - view.setSelectionMode(view.ExtendedSelection) - view.setItemDelegate(asset_delegate) - - icon = qtawesome.icon("fa.arrow-down", color=style.colors.light) - set_current_asset_btn = QtWidgets.QPushButton(icon, "") - set_current_asset_btn.setToolTip("Go to Asset from current Session") - # Hide by default - set_current_asset_btn.setVisible(False) - - icon = qtawesome.icon("fa.refresh", color=style.colors.light) - refresh = QtWidgets.QPushButton(icon, "", parent=self) - refresh.setToolTip("Refresh items") - - filter_input = QtWidgets.QLineEdit(self) - filter_input.setPlaceholderText("Filter assets..") - - # Header - header_layout = QtWidgets.QHBoxLayout() - header_layout.addWidget(filter_input) - header_layout.addWidget(set_current_asset_btn) - header_layout.addWidget(refresh) - - # Layout - layout = QtWidgets.QVBoxLayout(self) - layout.setContentsMargins(0, 0, 0, 0) - layout.setSpacing(4) - layout.addLayout(header_layout) - layout.addWidget(view) - - # Signals/Slots - filter_input.textChanged.connect(proxy.setFilterFixedString) - - selection = view.selectionModel() - selection.selectionChanged.connect(self.selection_changed) - selection.currentChanged.connect(self.current_changed) - refresh.clicked.connect(self.refresh) - set_current_asset_btn.clicked.connect(self.set_current_session_asset) - - self.set_current_asset_btn = set_current_asset_btn - self.model = model - self.proxy = proxy - self.view = view - - self.model_selection = {} - - def set_current_asset_btn_visibility(self, visible=None): - """Hide set current asset button. - - Not all tools support using of current context asset. - """ - if visible is None: - visible = not self.set_current_asset_btn.isVisible() - self.set_current_asset_btn.setVisible(visible) - - def _refresh_model(self): - # Store selection - self._store_model_selection() - time_start = time.time() - - self.set_loading_state( - loading=True, - empty=True - ) - - def on_refreshed(has_item): - self.set_loading_state(loading=False, empty=not has_item) - self._restore_model_selection() - self.model.refreshed.disconnect() - self.refreshed.emit() - print("Duration: %.3fs" % (time.time() - time_start)) - - # Connect to signal - self.model.refreshed.connect(on_refreshed) - # Trigger signal before refresh is called - self.refresh_triggered.emit() - # Refresh model - self.model.refresh() - - def refresh(self): - self._refresh_model() - - def get_active_asset(self): - """Return the asset item of the current selection.""" - current = self.view.currentIndex() - return current.data(self.model.ItemRole) - - def get_active_asset_document(self): - """Return the asset document of the current selection.""" - current = self.view.currentIndex() - return current.data(self.model.DocumentRole) - - def get_active_index(self): - return self.view.currentIndex() - - def get_selected_assets(self): - """Return the documents of selected assets.""" - selection = self.view.selectionModel() - rows = selection.selectedRows() - assets = [row.data(self.model.DocumentRole) for row in rows] - - # NOTE: skip None object assumed they are silo (backwards comp.) - return [asset for asset in assets if asset] - - def select_assets(self, assets, expand=True, key="name"): - """Select assets by item key. - - Args: - assets (list): List of asset values that can be found under - specified `key` - expand (bool): Whether to also expand to the asset in the view - key (string): Key that specifies where to look for `assets` values - - Returns: - None - - Default `key` is "name" in that case `assets` should contain single - asset name or list of asset names. (It is good idea to use "_id" key - instead of name in that case `assets` must contain `ObjectId` object/s) - It is expected that each value in `assets` will be found only once. - If the filters according to the `key` and `assets` correspond to - the more asset, only the first found will be selected. - - """ - - if not isinstance(assets, (tuple, list)): - assets = [assets] - - # convert to list - tuple cant be modified - assets = set(assets) - - # Clear selection - selection_model = self.view.selectionModel() - selection_model.clearSelection() - - # Select - mode = selection_model.Select | selection_model.Rows - for index in lib.iter_model_rows( - self.proxy, column=0, include_root=False - ): - # stop iteration if there are no assets to process - if not assets: - break - - value = index.data(self.model.ItemRole).get(key) - if value not in assets: - continue - - # Remove processed asset - assets.discard(value) - - selection_model.select(index, mode) - if expand: - # Expand parent index - self.view.expand(self.proxy.parent(index)) - - # Set the currently active index - self.view.setCurrentIndex(index) - - def set_loading_state(self, loading, empty): - if self.view.is_loading != loading: - if loading: - self.view.spinner.repaintNeeded.connect( - self.view.viewport().update - ) - else: - self.view.spinner.repaintNeeded.disconnect() - - self.view.is_loading = loading - self.view.is_empty = empty - - def _store_model_selection(self): - index = self.view.currentIndex() - current = None - if index and index.isValid(): - current = index.data(self.model.ObjectIdRole) - - expanded = set() - model = self.view.model() - for index in lib.iter_model_rows( - model, column=0, include_root=False - ): - if self.view.isExpanded(index): - value = index.data(self.model.ObjectIdRole) - expanded.add(value) - - selection_model = self.view.selectionModel() - - selected = None - selected_rows = selection_model.selectedRows() - if selected_rows: - selected = set( - row.data(self.model.ObjectIdRole) - for row in selected_rows - ) - - self.model_selection = { - "expanded": expanded, - "selected": selected, - "current": current - } - - def _restore_model_selection(self): - model = self.view.model() - not_set = object() - expanded = self.model_selection.pop("expanded", not_set) - selected = self.model_selection.pop("selected", not_set) - current = self.model_selection.pop("current", not_set) - - if ( - expanded is not_set - or selected is not_set - or current is not_set - ): - return - - if expanded: - for index in lib.iter_model_rows( - model, column=0, include_root=False - ): - is_expanded = index.data(self.model.ObjectIdRole) in expanded - self.view.setExpanded(index, is_expanded) - - if not selected and not current: - self.set_current_session_asset() - return - - current_index = None - selected_indexes = [] - # Go through all indices, select the ones with similar data - for index in lib.iter_model_rows( - model, column=0, include_root=False - ): - object_id = index.data(self.model.ObjectIdRole) - if object_id in selected: - selected_indexes.append(index) - - if not current_index and object_id == current: - current_index = index - - if current_index: - self.view.setCurrentIndex(current_index) - - if not selected_indexes: - return - selection_model = self.view.selectionModel() - flags = selection_model.Select | selection_model.Rows - for index in selected_indexes: - # Ensure item is visible - self.view.scrollTo(index) - selection_model.select(index, flags) - - def set_current_session_asset(self): - asset_name = self.dbcon.Session.get("AVALON_ASSET") - if asset_name: - self.select_assets([asset_name]) - - class OptionalMenu(QtWidgets.QMenu): """A subclass of `QtWidgets.QMenu` to work with `OptionalAction` From e6a396ba3f5d4f61b4f98aadefeed32bae051508 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Wed, 24 Nov 2021 21:46:14 +0100 Subject: [PATCH 182/211] removed unused asset related classes --- openpype/tools/utils/delegates.py | 172 +------------------ openpype/tools/utils/models.py | 277 ------------------------------ openpype/tools/utils/views.py | 23 --- openpype/tools/utils/widgets.py | 4 +- 4 files changed, 2 insertions(+), 474 deletions(-) diff --git a/openpype/tools/utils/delegates.py b/openpype/tools/utils/delegates.py index 96353c44c6..1caed732d8 100644 --- a/openpype/tools/utils/delegates.py +++ b/openpype/tools/utils/delegates.py @@ -8,10 +8,7 @@ from Qt import QtWidgets, QtGui, QtCore from avalon.lib import HeroVersionType from openpype.style import get_objected_colors -from .models import ( - AssetModel, - TreeModel -) +from .models import TreeModel from . import lib if Qt.__binding__ == "PySide": @@ -22,173 +19,6 @@ elif Qt.__binding__ == "PyQt4": log = logging.getLogger(__name__) -class AssetDelegate(QtWidgets.QItemDelegate): - bar_height = 3 - - def __init__(self, *args, **kwargs): - super(AssetDelegate, self).__init__(*args, **kwargs) - asset_view_colors = get_objected_colors()["loader"]["asset-view"] - self._selected_color = ( - asset_view_colors["selected"].get_qcolor() - ) - self._hover_color = ( - asset_view_colors["hover"].get_qcolor() - ) - self._selected_hover_color = ( - asset_view_colors["selected-hover"].get_qcolor() - ) - - def sizeHint(self, option, index): - result = super(AssetDelegate, self).sizeHint(option, index) - height = result.height() - result.setHeight(height + self.bar_height) - - return result - - def paint(self, painter, option, index): - # Qt4 compat - if Qt.__binding__ in ("PySide", "PyQt4"): - option = QStyleOptionViewItemV4(option) - - painter.save() - - item_rect = QtCore.QRect(option.rect) - item_rect.setHeight(option.rect.height() - self.bar_height) - - subset_colors = index.data(AssetModel.subsetColorsRole) - subset_colors_width = 0 - if subset_colors: - subset_colors_width = option.rect.width() / len(subset_colors) - - subset_rects = [] - counter = 0 - for subset_c in subset_colors: - new_color = None - new_rect = None - if subset_c: - new_color = QtGui.QColor(*subset_c) - - new_rect = QtCore.QRect( - option.rect.left() + (counter * subset_colors_width), - option.rect.top() + ( - option.rect.height() - self.bar_height - ), - subset_colors_width, - self.bar_height - ) - subset_rects.append((new_color, new_rect)) - counter += 1 - - # Background - if option.state & QtWidgets.QStyle.State_Selected: - if len(subset_colors) == 0: - item_rect.setTop(item_rect.top() + (self.bar_height / 2)) - - if option.state & QtWidgets.QStyle.State_MouseOver: - bg_color = self._selected_hover_color - else: - bg_color = self._selected_color - else: - item_rect.setTop(item_rect.top() + (self.bar_height / 2)) - if option.state & QtWidgets.QStyle.State_MouseOver: - bg_color = self._hover_color - else: - bg_color = QtGui.QColor() - bg_color.setAlpha(0) - - # When not needed to do a rounded corners (easier and without - # painter restore): - # painter.fillRect( - # item_rect, - # QtGui.QBrush(bg_color) - # ) - pen = painter.pen() - pen.setStyle(QtCore.Qt.NoPen) - pen.setWidth(0) - painter.setPen(pen) - painter.setBrush(QtGui.QBrush(bg_color)) - painter.drawRoundedRect(option.rect, 3, 3) - - if option.state & QtWidgets.QStyle.State_Selected: - for color, subset_rect in subset_rects: - if not color or not subset_rect: - continue - painter.fillRect(subset_rect, QtGui.QBrush(color)) - - painter.restore() - painter.save() - - # Icon - icon_index = index.model().index( - index.row(), index.column(), index.parent() - ) - # - Default icon_rect if not icon - icon_rect = QtCore.QRect( - item_rect.left(), - item_rect.top(), - # To make sure it's same size all the time - option.rect.height() - self.bar_height, - option.rect.height() - self.bar_height - ) - icon = index.model().data(icon_index, QtCore.Qt.DecorationRole) - - if icon: - mode = QtGui.QIcon.Normal - if not (option.state & QtWidgets.QStyle.State_Enabled): - mode = QtGui.QIcon.Disabled - elif option.state & QtWidgets.QStyle.State_Selected: - mode = QtGui.QIcon.Selected - - if isinstance(icon, QtGui.QPixmap): - icon = QtGui.QIcon(icon) - option.decorationSize = icon.size() / icon.devicePixelRatio() - - elif isinstance(icon, QtGui.QColor): - pixmap = QtGui.QPixmap(option.decorationSize) - pixmap.fill(icon) - icon = QtGui.QIcon(pixmap) - - elif isinstance(icon, QtGui.QImage): - icon = QtGui.QIcon(QtGui.QPixmap.fromImage(icon)) - option.decorationSize = icon.size() / icon.devicePixelRatio() - - elif isinstance(icon, QtGui.QIcon): - state = QtGui.QIcon.Off - if option.state & QtWidgets.QStyle.State_Open: - state = QtGui.QIcon.On - actualSize = option.icon.actualSize( - option.decorationSize, mode, state - ) - option.decorationSize = QtCore.QSize( - min(option.decorationSize.width(), actualSize.width()), - min(option.decorationSize.height(), actualSize.height()) - ) - - state = QtGui.QIcon.Off - if option.state & QtWidgets.QStyle.State_Open: - state = QtGui.QIcon.On - - icon.paint( - painter, icon_rect, - QtCore.Qt.AlignLeft, mode, state - ) - - # Text - text_rect = QtCore.QRect( - icon_rect.left() + icon_rect.width() + 2, - item_rect.top(), - item_rect.width(), - item_rect.height() - ) - - painter.drawText( - text_rect, QtCore.Qt.AlignVCenter, - index.data(QtCore.Qt.DisplayRole) - ) - - painter.restore() - - class VersionDelegate(QtWidgets.QStyledItemDelegate): """A delegate that display version integer formatted as version string.""" diff --git a/openpype/tools/utils/models.py b/openpype/tools/utils/models.py index ffcdc7a820..94694483ab 100644 --- a/openpype/tools/utils/models.py +++ b/openpype/tools/utils/models.py @@ -200,283 +200,6 @@ class Item(dict): self._children.append(child) -class AssetModel(TreeModel): - """A model listing assets in the silo in the active project. - - The assets are displayed in a treeview, they are visually parented by - a `visualParent` field in the database containing an `_id` to a parent - asset. - - """ - - Columns = ["label"] - Name = 0 - Deprecated = 2 - ObjectId = 3 - - DocumentRole = QtCore.Qt.UserRole + 2 - ObjectIdRole = QtCore.Qt.UserRole + 3 - subsetColorsRole = QtCore.Qt.UserRole + 4 - - doc_fetched = QtCore.Signal(bool) - refreshed = QtCore.Signal(bool) - - # Asset document projection - asset_projection = { - "type": 1, - "schema": 1, - "name": 1, - "silo": 1, - "data.visualParent": 1, - "data.label": 1, - "data.tags": 1, - "data.icon": 1, - "data.color": 1, - "data.deprecated": 1 - } - - def __init__(self, dbcon=None, parent=None, asset_projection=None): - super(AssetModel, self).__init__(parent=parent) - if dbcon is None: - dbcon = io - self.dbcon = dbcon - self.asset_colors = {} - - # Projections for Mongo queries - # - let ability to modify them if used in tools that require more than - # defaults - if asset_projection: - self.asset_projection = asset_projection - - self.asset_projection = asset_projection - - self._doc_fetching_thread = None - self._doc_fetching_stop = False - self._doc_payload = {} - - self.doc_fetched.connect(self.on_doc_fetched) - - self.refresh() - - def _add_hierarchy(self, assets, parent=None, silos=None): - """Add the assets that are related to the parent as children items. - - This method does *not* query the database. These instead are queried - in a single batch upfront as an optimization to reduce database - queries. Resulting in up to 10x speed increase. - - Args: - assets (dict): All assets in the currently active silo stored - by key/value - - Returns: - None - - """ - # Reset colors - self.asset_colors = {} - - if silos: - # WARNING: Silo item "_id" is set to silo value - # mainly because GUI issue with perserve selection and expanded row - # and because of easier hierarchy parenting (in "assets") - for silo in silos: - item = Item({ - "_id": silo, - "name": silo, - "label": silo, - "type": "silo" - }) - self.add_child(item, parent=parent) - self._add_hierarchy(assets, parent=item) - - parent_id = parent["_id"] if parent else None - current_assets = assets.get(parent_id, list()) - - for asset in current_assets: - # get label from data, otherwise use name - data = asset.get("data", {}) - label = data.get("label", asset["name"]) - tags = data.get("tags", []) - - # store for the asset for optimization - deprecated = "deprecated" in tags - - item = Item({ - "_id": asset["_id"], - "name": asset["name"], - "label": label, - "type": asset["type"], - "tags": ", ".join(tags), - "deprecated": deprecated, - "_document": asset - }) - self.add_child(item, parent=parent) - - # Add asset's children recursively if it has children - if asset["_id"] in assets: - self._add_hierarchy(assets, parent=item) - - self.asset_colors[asset["_id"]] = [] - - def on_doc_fetched(self, was_stopped): - if was_stopped: - self.stop_fetch_thread() - return - - self.beginResetModel() - - assets_by_parent = self._doc_payload.get("assets_by_parent") - silos = self._doc_payload.get("silos") - if assets_by_parent is not None: - # Build the hierarchical tree items recursively - self._add_hierarchy( - assets_by_parent, - parent=None, - silos=silos - ) - - self.endResetModel() - - has_content = bool(assets_by_parent) or bool(silos) - self.refreshed.emit(has_content) - - self.stop_fetch_thread() - - def fetch(self): - self._doc_payload = self._fetch() or {} - # Emit doc fetched only if was not stopped - self.doc_fetched.emit(self._doc_fetching_stop) - - def _fetch(self): - if not self.dbcon.Session.get("AVALON_PROJECT"): - return - - project_doc = self.dbcon.find_one( - {"type": "project"}, - {"_id": True} - ) - if not project_doc: - return - - # Get all assets sorted by name - db_assets = self.dbcon.find( - {"type": "asset"}, - self.asset_projection - ).sort("name", 1) - - # Group the assets by their visual parent's id - assets_by_parent = collections.defaultdict(list) - for asset in db_assets: - if self._doc_fetching_stop: - return - parent_id = asset.get("data", {}).get("visualParent") - assets_by_parent[parent_id].append(asset) - - return { - "assets_by_parent": assets_by_parent, - "silos": None - } - - def stop_fetch_thread(self): - if self._doc_fetching_thread is not None: - self._doc_fetching_stop = True - while self._doc_fetching_thread.isRunning(): - time.sleep(0.001) - self._doc_fetching_thread = None - - def refresh(self, force=False): - """Refresh the data for the model.""" - # Skip fetch if there is already other thread fetching documents - if self._doc_fetching_thread is not None: - if not force: - return - self.stop_fetch_thread() - - # Clear model items - self.clear() - - # Fetch documents from mongo - # Restart payload - self._doc_payload = {} - self._doc_fetching_stop = False - self._doc_fetching_thread = lib.create_qthread(self.fetch) - self._doc_fetching_thread.start() - - def flags(self, index): - return QtCore.Qt.ItemIsEnabled | QtCore.Qt.ItemIsSelectable - - def setData(self, index, value, role=QtCore.Qt.EditRole): - if not index.isValid(): - return False - - if role == self.subsetColorsRole: - asset_id = index.data(self.ObjectIdRole) - self.asset_colors[asset_id] = value - - if Qt.__binding__ in ("PyQt4", "PySide"): - self.dataChanged.emit(index, index) - else: - self.dataChanged.emit(index, index, [role]) - - return True - - return super(AssetModel, self).setData(index, value, role) - - def data(self, index, role): - if not index.isValid(): - return - - item = index.internalPointer() - if role == QtCore.Qt.DecorationRole: - column = index.column() - if column == self.Name: - # Allow a custom icon and custom icon color to be defined - data = item.get("_document", {}).get("data", {}) - icon = data.get("icon", None) - if icon is None and item.get("type") == "silo": - icon = "database" - color = data.get("color", style.colors.default) - - if icon is None: - # Use default icons if no custom one is specified. - # If it has children show a full folder, otherwise - # show an open folder - has_children = self.rowCount(index) > 0 - icon = "folder" if has_children else "folder-o" - - # Make the color darker when the asset is deprecated - if item.get("deprecated", False): - color = QtGui.QColor(color).darker(250) - - try: - key = "fa.{0}".format(icon) # font-awesome key - icon = qtawesome.icon(key, color=color) - return icon - except Exception as exception: - # Log an error message instead of erroring out completely - # when the icon couldn't be created (e.g. invalid name) - log.error(exception) - - return - - if role == QtCore.Qt.ForegroundRole: # font color - if "deprecated" in item.get("tags", []): - return QtGui.QColor(style.colors.light).darker(250) - - if role == self.ObjectIdRole: - return item.get("_id", None) - - if role == self.DocumentRole: - return item.get("_document", None) - - if role == self.subsetColorsRole: - asset_id = item.get("_id", None) - return self.asset_colors.get(asset_id) or [] - - return super(AssetModel, self).data(index, role) - - class RecursiveSortFilterProxyModel(QtCore.QSortFilterProxyModel): """Filters to the regex if any of the children matches allow parent""" def filterAcceptsRow(self, row, parent): diff --git a/openpype/tools/utils/views.py b/openpype/tools/utils/views.py index 89e49fe142..97aaf622a4 100644 --- a/openpype/tools/utils/views.py +++ b/openpype/tools/utils/views.py @@ -61,26 +61,3 @@ class TreeViewSpinner(QtWidgets.QTreeView): self.paint_empty(event) else: super(TreeViewSpinner, self).paintEvent(event) - - -class AssetsView(TreeViewSpinner, DeselectableTreeView): - """Item view. - This implements a context menu. - """ - - def __init__(self, parent=None): - super(AssetsView, self).__init__(parent) - self.setIndentation(15) - self.setContextMenuPolicy(QtCore.Qt.CustomContextMenu) - self.setHeaderHidden(True) - - def mousePressEvent(self, event): - index = self.indexAt(event.pos()) - if not index.isValid(): - modifiers = QtWidgets.QApplication.keyboardModifiers() - if modifiers == QtCore.Qt.ShiftModifier: - return - elif modifiers == QtCore.Qt.ControlModifier: - return - - super(AssetsView, self).mousePressEvent(event) diff --git a/openpype/tools/utils/widgets.py b/openpype/tools/utils/widgets.py index 8346db211b..ea0d23470c 100644 --- a/openpype/tools/utils/widgets.py +++ b/openpype/tools/utils/widgets.py @@ -9,9 +9,7 @@ from avalon.vendor import qtawesome, qargparse from avalon import style from openpype.style import get_objected_colors -from .models import AssetModel, RecursiveSortFilterProxyModel -from .views import AssetsView -from .delegates import AssetDelegate +from .models import RecursiveSortFilterProxyModel log = logging.getLogger(__name__) From e4dff2f17192c461795df397665a2bbb1f100a35 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Wed, 24 Nov 2021 21:52:02 +0100 Subject: [PATCH 183/211] removed unused imports --- openpype/tools/utils/models.py | 2 -- openpype/tools/utils/widgets.py | 2 -- 2 files changed, 4 deletions(-) diff --git a/openpype/tools/utils/models.py b/openpype/tools/utils/models.py index 94694483ab..df3eee41a2 100644 --- a/openpype/tools/utils/models.py +++ b/openpype/tools/utils/models.py @@ -1,7 +1,5 @@ import re -import time import logging -import collections import Qt from Qt import QtCore, QtGui diff --git a/openpype/tools/utils/widgets.py b/openpype/tools/utils/widgets.py index ea0d23470c..cea9f4ea3e 100644 --- a/openpype/tools/utils/widgets.py +++ b/openpype/tools/utils/widgets.py @@ -1,12 +1,10 @@ import logging -import time from . import lib from Qt import QtWidgets, QtCore, QtGui from avalon.vendor import qtawesome, qargparse -from avalon import style from openpype.style import get_objected_colors from .models import RecursiveSortFilterProxyModel From 7e7a9c42cb5adbf6020811c8d4117642056a4bf7 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Wed, 24 Nov 2021 21:52:26 +0100 Subject: [PATCH 184/211] use right import of qt_app_context function --- openpype/tools/assetcreator/app.py | 14 +++++--------- openpype/tools/libraryloader/app.py | 2 +- openpype/tools/loader/app.py | 2 +- 3 files changed, 7 insertions(+), 11 deletions(-) diff --git a/openpype/tools/assetcreator/app.py b/openpype/tools/assetcreator/app.py index 5c2553e81e..58697e8aa3 100644 --- a/openpype/tools/assetcreator/app.py +++ b/openpype/tools/assetcreator/app.py @@ -1,16 +1,12 @@ import os import sys -import json from subprocess import Popen -try: - import ftrack_api_old as ftrack_api -except Exception: - import ftrack_api + +import ftrack_api +from Qt import QtWidgets, QtCore from openpype.api import get_current_project_settings -from openpype import lib as pypelib -from avalon.vendor.Qt import QtWidgets, QtCore +from openpype.tools.utils.lib import qt_app_context from avalon import io, api, style, schema -from avalon.tools import lib as parentlib from . import widget, model module = sys.modules[__name__] @@ -630,7 +626,7 @@ def show(parent=None, debug=False, context=None): if debug is True: io.install() - with parentlib.application(): + with qt_app_context(): window = Window(parent, context) window.setStyleSheet(style.load_stylesheet()) window.show() diff --git a/openpype/tools/libraryloader/app.py b/openpype/tools/libraryloader/app.py index d0d07a316c..6ad5bfa16a 100644 --- a/openpype/tools/libraryloader/app.py +++ b/openpype/tools/libraryloader/app.py @@ -555,7 +555,7 @@ def show( import traceback sys.excepthook = lambda typ, val, tb: traceback.print_last() - with tools_lib.application(): + with tools_lib.qt_app_context(): window = LibraryLoaderWindow( parent, icon, show_projects, show_libraries ) diff --git a/openpype/tools/loader/app.py b/openpype/tools/loader/app.py index 0c7844c4fb..b6becc3e9f 100644 --- a/openpype/tools/loader/app.py +++ b/openpype/tools/loader/app.py @@ -631,7 +631,7 @@ def show(debug=False, parent=None, use_context=False): api.Session["AVALON_PROJECT"] = any_project["name"] module.project = any_project["name"] - with lib.application(): + with lib.qt_app_context(): window = LoaderWindow(parent) window.show() From 93225171f7a84907b733862e83b98975292c7d91 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Wed, 24 Nov 2021 22:27:02 +0100 Subject: [PATCH 185/211] removed icon argument from library loader --- openpype/tools/libraryloader/app.py | 9 +++------ 1 file changed, 3 insertions(+), 6 deletions(-) diff --git a/openpype/tools/libraryloader/app.py b/openpype/tools/libraryloader/app.py index 6ad5bfa16a..d030aa903d 100644 --- a/openpype/tools/libraryloader/app.py +++ b/openpype/tools/libraryloader/app.py @@ -31,7 +31,7 @@ class LibraryLoaderWindow(QtWidgets.QDialog): message_timeout = 5000 def __init__( - self, parent=None, icon=None, show_projects=False, show_libraries=True + self, parent=None, show_projects=False, show_libraries=True ): super(LibraryLoaderWindow, self).__init__(parent) @@ -517,10 +517,7 @@ class LibraryLoaderWindow(QtWidgets.QDialog): return super(LibraryLoaderWindow, self).closeEvent(event) -def show( - debug=False, parent=None, icon=None, - show_projects=False, show_libraries=True -): +def show(debug=False, parent=None, show_projects=False, show_libraries=True): """Display Loader GUI Arguments: @@ -557,7 +554,7 @@ def show( with tools_lib.qt_app_context(): window = LibraryLoaderWindow( - parent, icon, show_projects, show_libraries + parent, show_projects, show_libraries ) window.show() From 975df8b4bf638f65b25cce361cef67fa5c47c308 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Wed, 24 Nov 2021 22:31:32 +0100 Subject: [PATCH 186/211] use PlaceholderLineEdit in subsets widget --- openpype/tools/loader/widgets.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/openpype/tools/loader/widgets.py b/openpype/tools/loader/widgets.py index c7138d8f72..db86d1cd4e 100644 --- a/openpype/tools/loader/widgets.py +++ b/openpype/tools/loader/widgets.py @@ -16,7 +16,10 @@ from openpype.tools.utils.delegates import ( VersionDelegate, PrettyTimeDelegate ) -from openpype.tools.utils.widgets import OptionalMenu +from openpype.tools.utils.widgets import ( + OptionalMenu, + PlaceholderLineEdit +) from openpype.tools.utils.views import ( TreeViewSpinner, DeselectableTreeView @@ -175,7 +178,7 @@ class SubsetWidget(QtWidgets.QWidget): family_proxy = FamiliesFilterProxyModel() family_proxy.setSourceModel(proxy) - subset_filter = QtWidgets.QLineEdit(self) + subset_filter = PlaceholderLineEdit(self) subset_filter.setPlaceholderText("Filter subsets..") group_checkbox = QtWidgets.QCheckBox("Enable Grouping", self) From ff3e6a8a1a687714c0212764e43f8c5645f5d98d Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Wed, 24 Nov 2021 22:39:17 +0100 Subject: [PATCH 187/211] removed unused imports --- openpype/tools/utils/widgets.py | 6 +----- 1 file changed, 1 insertion(+), 5 deletions(-) diff --git a/openpype/tools/utils/widgets.py b/openpype/tools/utils/widgets.py index cea9f4ea3e..ea80636d1a 100644 --- a/openpype/tools/utils/widgets.py +++ b/openpype/tools/utils/widgets.py @@ -1,14 +1,10 @@ import logging -from . import lib - from Qt import QtWidgets, QtCore, QtGui + from avalon.vendor import qtawesome, qargparse - from openpype.style import get_objected_colors -from .models import RecursiveSortFilterProxyModel - log = logging.getLogger(__name__) From 37b32c328d365563948918a52a0036240ce9a21c Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Thu, 25 Nov 2021 01:01:48 +0100 Subject: [PATCH 188/211] NameWindow replaces task formatting key with task[name] --- openpype/tools/workfiles/app.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/openpype/tools/workfiles/app.py b/openpype/tools/workfiles/app.py index a4b1717a1c..4f5e179d9b 100644 --- a/openpype/tools/workfiles/app.py +++ b/openpype/tools/workfiles/app.py @@ -100,7 +100,9 @@ class NameWindow(QtWidgets.QDialog): # Store project anatomy self.anatomy = anatomy - self.template = anatomy.templates[template_key]["file"] + self.template = anatomy.templates[template_key]["file"].replace( + "{task}", "{task[name]}" + ) self.template_key = template_key # Btns widget From f47d74d7b43b5f77c4ebb9710ff59c7b200e05e8 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 25 Nov 2021 11:28:26 +0100 Subject: [PATCH 189/211] securing backward compatibility of project locked anatomy settings --- openpype/hosts/nuke/api/plugin.py | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/openpype/hosts/nuke/api/plugin.py b/openpype/hosts/nuke/api/plugin.py index 7b8af96df0..da78883447 100644 --- a/openpype/hosts/nuke/api/plugin.py +++ b/openpype/hosts/nuke/api/plugin.py @@ -197,7 +197,15 @@ class ExporterReview(object): def get_imageio_baking_profile(self): from . import lib as opnlib nuke_imageio = opnlib.get_nuke_imageio_settings() - return nuke_imageio["baking"]["viewerProcess"] + + # TODO: this is only securing backward compatibility lets remove + # this once all projects's anotomy are upated to newer config + if "baking" in nuke_imageio.keys(): + return nuke_imageio["baking"]["viewerProcess"] + else: + return nuke_imageio["viewer"]["viewerProcess"] + + class ExporterReviewLut(ExporterReview): From 6a4f56da4ac5d37243e2a292167eca05b57d364a Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 25 Nov 2021 11:48:49 +0100 Subject: [PATCH 190/211] removing developer code --- openpype/hosts/nuke/plugins/load/load_backdrop.py | 1 - openpype/hosts/nuke/plugins/publish/extract_backdrop.py | 1 - .../hosts/nuke/plugins/publish/extract_review_data_lut.py | 7 ------- .../hosts/nuke/plugins/publish/extract_review_data_mov.py | 7 ------- 4 files changed, 16 deletions(-) diff --git a/openpype/hosts/nuke/plugins/load/load_backdrop.py b/openpype/hosts/nuke/plugins/load/load_backdrop.py index e615af51ff..9148260e9e 100644 --- a/openpype/hosts/nuke/plugins/load/load_backdrop.py +++ b/openpype/hosts/nuke/plugins/load/load_backdrop.py @@ -4,7 +4,6 @@ import nukescripts from openpype.hosts.nuke.api import lib as pnlib from avalon.nuke import lib as anlib from avalon.nuke import containerise, update_container -reload(pnlib) class LoadBackdropNodes(api.Loader): """Loading Published Backdrop nodes (workfile, nukenodes)""" diff --git a/openpype/hosts/nuke/plugins/publish/extract_backdrop.py b/openpype/hosts/nuke/plugins/publish/extract_backdrop.py index 13f8656005..0747c15ea7 100644 --- a/openpype/hosts/nuke/plugins/publish/extract_backdrop.py +++ b/openpype/hosts/nuke/plugins/publish/extract_backdrop.py @@ -4,7 +4,6 @@ from openpype.hosts.nuke.api import lib as pnlib import nuke import os import openpype -reload(pnlib) class ExtractBackdropNode(openpype.api.Extractor): """Extracting content of backdrop nodes diff --git a/openpype/hosts/nuke/plugins/publish/extract_review_data_lut.py b/openpype/hosts/nuke/plugins/publish/extract_review_data_lut.py index d21cb0eef9..8ba746a3c4 100644 --- a/openpype/hosts/nuke/plugins/publish/extract_review_data_lut.py +++ b/openpype/hosts/nuke/plugins/publish/extract_review_data_lut.py @@ -4,13 +4,6 @@ from avalon.nuke import lib as anlib from openpype.hosts.nuke.api import plugin import openpype -try: - from __builtin__ import reload -except ImportError: - from importlib import reload - -reload(plugin) - class ExtractReviewDataLut(openpype.api.Extractor): """Extracts movie and thumbnail with baked in luts diff --git a/openpype/hosts/nuke/plugins/publish/extract_review_data_mov.py b/openpype/hosts/nuke/plugins/publish/extract_review_data_mov.py index f7d0102b42..b5890b5c51 100644 --- a/openpype/hosts/nuke/plugins/publish/extract_review_data_mov.py +++ b/openpype/hosts/nuke/plugins/publish/extract_review_data_mov.py @@ -4,13 +4,6 @@ from avalon.nuke import lib as anlib from openpype.hosts.nuke.api import plugin import openpype -try: - from __builtin__ import reload -except ImportError: - from importlib import reload - -reload(plugin) - class ExtractReviewDataMov(openpype.api.Extractor): """Extracts movie and thumbnail with baked in luts From 96e30dd989453b6ba729c85ebfd049db90d17c33 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Thu, 25 Nov 2021 12:27:26 +0100 Subject: [PATCH 191/211] fix check of thumbnail id --- openpype/tools/loader/widgets.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/openpype/tools/loader/widgets.py b/openpype/tools/loader/widgets.py index db86d1cd4e..ea45fd4364 100644 --- a/openpype/tools/loader/widgets.py +++ b/openpype/tools/loader/widgets.py @@ -813,8 +813,9 @@ class ThumbnailWidget(QtWidgets.QLabel): {"_id": doc_id}, {"data.thumbnail_id"} ) - - thumbnail_id = doc.get("data", {}).get("thumbnail_id") + thumbnail_id = None + if doc: + thumbnail_id = doc.get("data", {}).get("thumbnail_id") if thumbnail_id == self.current_thumb_id: if self.current_thumbnail is None: self.set_pixmap() From 0544147c6fbb2a6428533f2e64c18f365e096624 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Thu, 25 Nov 2021 13:13:41 +0100 Subject: [PATCH 192/211] log stderr as info --- openpype/lib/execute.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/lib/execute.py b/openpype/lib/execute.py index a1111fba29..ad77b2f899 100644 --- a/openpype/lib/execute.py +++ b/openpype/lib/execute.py @@ -124,7 +124,7 @@ def run_subprocess(*args, **kwargs): if full_output: full_output += "\n" full_output += _stderr - logger.warning(_stderr) + logger.info(_stderr) if proc.returncode != 0: exc_msg = "Executing arguments was not successful: \"{}\"".format(args) From df6a3faef6bac1966fe4bba88cd694c3f91979a5 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Thu, 25 Nov 2021 15:04:54 +0100 Subject: [PATCH 193/211] Added more logging --- .../custom/plugins/GlobalJobPreLoad.py | 42 ++++++++++++++----- 1 file changed, 31 insertions(+), 11 deletions(-) diff --git a/vendor/deadline/custom/plugins/GlobalJobPreLoad.py b/vendor/deadline/custom/plugins/GlobalJobPreLoad.py index 8631b035cf..2e7a00bc48 100644 --- a/vendor/deadline/custom/plugins/GlobalJobPreLoad.py +++ b/vendor/deadline/custom/plugins/GlobalJobPreLoad.py @@ -16,8 +16,9 @@ def inject_openpype_environment(deadlinePlugin): job = deadlinePlugin.GetJob() job = RepositoryUtils.GetJob(job.JobId, True) # invalidates cache - print("inject_openpype_environment start") + print(">>> Injecting OpenPype environments ...") try: + print(">>> Getting OpenPype executable ...") exe_list = job.GetJobExtraInfoKeyValue("openpype_executables") openpype_app = FileUtils.SearchFileList(exe_list) if openpype_app == "": @@ -27,11 +28,13 @@ def inject_openpype_environment(deadlinePlugin): "The path to the render executable can be configured " + "from the Plugin Configuration in the Deadline Monitor.") + print("--- penPype executable: {}".format(openpype_app)) + # tempfile.TemporaryFile cannot be used because of locking export_url = os.path.join(tempfile.gettempdir(), time.strftime('%Y%m%d%H%M%S'), 'env.json') # add HHMMSS + delete later - print("export_url {}".format(export_url)) + print(">>> Temporary path: {}".format(export_url)) args = [ openpype_app, @@ -55,21 +58,31 @@ def inject_openpype_environment(deadlinePlugin): "AVALON_TASK, AVALON_APP_NAME" raise RuntimeError(msg) - print("args:::{}".format(args)) + if not os.environ.get("OPENPYPE_MONGO"): + print(">>> Missing OPENPYPE_MONGO env var, process won't work") - exit_code = subprocess.call(args, cwd=os.path.dirname(openpype_app)) - if exit_code != 0: - raise RuntimeError("Publishing failed, check worker's log") + env = os.environ + env["OPENPYPE_HEADLESS_MODE"] = "1" + print(">>> Executing: {}".format(args)) + std_output = subprocess.check_output(args, + cwd=os.path.dirname(openpype_app), + env=env) + print(">>> Process result {}".format(std_output)) + + print(">>> Loading file ...") with open(export_url) as fp: contents = json.load(fp) for key, value in contents.items(): deadlinePlugin.SetProcessEnvironmentVariable(key, value) + print(">>> Removing temporary file") os.remove(export_url) - print("inject_openpype_environment end") - except Exception: + print(">> Injection end.") + except Exception as e: + if hasattr(e, "output"): + print(">>> Exception {}".format(e.output)) import traceback print(traceback.format_exc()) print("inject_openpype_environment failed") @@ -79,17 +92,17 @@ def inject_openpype_environment(deadlinePlugin): def inject_render_job_id(deadlinePlugin): """Inject dependency ids to publish process as env var for validation.""" - print("inject_render_job_id start") + print(">>> Injecting render job id ...") job = deadlinePlugin.GetJob() job = RepositoryUtils.GetJob(job.JobId, True) # invalidates cache dependency_ids = job.JobDependencyIDs - print("dependency_ids {}".format(dependency_ids)) + print(">>> Dependency IDs: {}".format(dependency_ids)) render_job_ids = ",".join(dependency_ids) deadlinePlugin.SetProcessEnvironmentVariable("RENDER_JOB_IDS", render_job_ids) - print("inject_render_job_id end") + print(">>> Injection end.") def pype_command_line(executable, arguments, workingDirectory): @@ -133,10 +146,13 @@ def pype(deadlinePlugin): deadlinePlugin: Deadline job plugin passed by Deadline """ + print(">>> Getting job ...") job = deadlinePlugin.GetJob() # PYPE should be here, not OPENPYPE - backward compatibility!! pype_metadata = job.GetJobEnvironmentKeyValue("PYPE_METADATA_FILE") pype_python = job.GetJobEnvironmentKeyValue("PYPE_PYTHON_EXE") + print(">>> Having backward compatible env vars {}/{}".format(pype_metadata, + pype_python)) # test if it is pype publish job. if pype_metadata: pype_metadata = RepositoryUtils.CheckPathMapping(pype_metadata) @@ -162,6 +178,8 @@ def pype(deadlinePlugin): def __main__(deadlinePlugin): + print("*** GlobalJobPreload start ...") + print(">>> Getting job ...") job = deadlinePlugin.GetJob() job = RepositoryUtils.GetJob(job.JobId, True) # invalidates cache @@ -170,6 +188,8 @@ def __main__(deadlinePlugin): openpype_publish_job = \ job.GetJobEnvironmentKeyValue('OPENPYPE_PUBLISH_JOB') or '0' + print("--- Job type - render {}".format(openpype_render_job)) + print("--- Job type - publish {}".format(openpype_publish_job)) if openpype_publish_job == '1' and openpype_render_job == '1': raise RuntimeError("Misconfiguration. Job couldn't be both " + "render and publish.") From 22f8c13c7fff94afaa46d7a7500f48810de32996 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Thu, 25 Nov 2021 15:05:34 +0100 Subject: [PATCH 194/211] Added possibility for timeout as in other places --- igniter/tools.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/igniter/tools.py b/igniter/tools.py index 04d7451335..3e862f5803 100644 --- a/igniter/tools.py +++ b/igniter/tools.py @@ -59,7 +59,7 @@ def validate_mongo_connection(cnx: str) -> (bool, str): return False, "Not mongodb schema" kwargs = { - "serverSelectionTimeoutMS": 2000 + "serverSelectionTimeoutMS": os.environ.get("AVALON_TIMEOUT", 2000) } # Add certificate path if should be required if should_add_certificate_path_to_mongo_url(cnx): From 6dcbb53ca386f69f18ee14ab77599fff1ab62993 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 25 Nov 2021 15:23:32 +0100 Subject: [PATCH 195/211] nuke: do not multiply representation on class method --- openpype/hosts/nuke/api/plugin.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/openpype/hosts/nuke/api/plugin.py b/openpype/hosts/nuke/api/plugin.py index cc01201f9b..e53b97e297 100644 --- a/openpype/hosts/nuke/api/plugin.py +++ b/openpype/hosts/nuke/api/plugin.py @@ -112,9 +112,7 @@ class ExporterReview(object): instance (pyblish.instance): instance of pyblish context """ - data = dict({ - "representations": list() - }) + data = None def __init__(self, klass, @@ -126,6 +124,9 @@ class ExporterReview(object): self.path_in = self.instance.data.get("path", None) self.staging_dir = self.instance.data["stagingDir"] self.collection = self.instance.data.get("collection", None) + self.data = dict({ + "representations": list() + }) def get_file_info(self): if self.collection: From 883c1336aea5dc8a323ce69267d1cf77ecedfc49 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Thu, 25 Nov 2021 16:24:45 +0100 Subject: [PATCH 196/211] Add explicit higher timeout for cloud nodes --- vendor/deadline/custom/plugins/GlobalJobPreLoad.py | 1 + 1 file changed, 1 insertion(+) diff --git a/vendor/deadline/custom/plugins/GlobalJobPreLoad.py b/vendor/deadline/custom/plugins/GlobalJobPreLoad.py index 2e7a00bc48..8d6784c74e 100644 --- a/vendor/deadline/custom/plugins/GlobalJobPreLoad.py +++ b/vendor/deadline/custom/plugins/GlobalJobPreLoad.py @@ -63,6 +63,7 @@ def inject_openpype_environment(deadlinePlugin): env = os.environ env["OPENPYPE_HEADLESS_MODE"] = "1" + env["AVALON_TIMEOUT"] = "5000" print(">>> Executing: {}".format(args)) std_output = subprocess.check_output(args, From 3d4be3b63df314ec3373b0c8055d4cdc7a76aacb Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Thu, 25 Nov 2021 18:13:13 +0100 Subject: [PATCH 197/211] fix typo of "inputLinks" key --- openpype/lib/avalon_context.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/lib/avalon_context.py b/openpype/lib/avalon_context.py index 3e0e0c6ea6..85beecab51 100644 --- a/openpype/lib/avalon_context.py +++ b/openpype/lib/avalon_context.py @@ -271,7 +271,7 @@ def get_linked_asset_ids(asset_doc): if not asset_doc: return output - input_links = asset_doc["data"].get("inputsLinks") or [] + input_links = asset_doc["data"].get("inputLinks") or [] if input_links: output = [item["_id"] for item in input_links] return output From 8a8f39d4986ab0e85e3553d7edb5018d9a64b317 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Thu, 25 Nov 2021 18:29:03 +0100 Subject: [PATCH 198/211] Fixing typo MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: Ondřej Samohel <33513211+antirotor@users.noreply.github.com> --- vendor/deadline/custom/plugins/GlobalJobPreLoad.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/vendor/deadline/custom/plugins/GlobalJobPreLoad.py b/vendor/deadline/custom/plugins/GlobalJobPreLoad.py index 8d6784c74e..4683828445 100644 --- a/vendor/deadline/custom/plugins/GlobalJobPreLoad.py +++ b/vendor/deadline/custom/plugins/GlobalJobPreLoad.py @@ -28,7 +28,7 @@ def inject_openpype_environment(deadlinePlugin): "The path to the render executable can be configured " + "from the Plugin Configuration in the Deadline Monitor.") - print("--- penPype executable: {}".format(openpype_app)) + print("--- OpenPype executable: {}".format(openpype_app)) # tempfile.TemporaryFile cannot be used because of locking export_url = os.path.join(tempfile.gettempdir(), From d76c5720ef585bef3048617bed071f0b6085c5a4 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Thu, 25 Nov 2021 18:30:31 +0100 Subject: [PATCH 199/211] Fixed label MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: Ondřej Samohel <33513211+antirotor@users.noreply.github.com> --- vendor/deadline/custom/plugins/GlobalJobPreLoad.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/vendor/deadline/custom/plugins/GlobalJobPreLoad.py b/vendor/deadline/custom/plugins/GlobalJobPreLoad.py index 4683828445..0aa5adaa20 100644 --- a/vendor/deadline/custom/plugins/GlobalJobPreLoad.py +++ b/vendor/deadline/custom/plugins/GlobalJobPreLoad.py @@ -86,7 +86,7 @@ def inject_openpype_environment(deadlinePlugin): print(">>> Exception {}".format(e.output)) import traceback print(traceback.format_exc()) - print("inject_openpype_environment failed") + print("!!! Injection failed.") RepositoryUtils.FailJob(job) raise From c728eec8ab22e3e260bdf2b9cc0fcad9e3b52c18 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Fri, 26 Nov 2021 12:55:26 +0100 Subject: [PATCH 200/211] use "id" key where id of input is stored instead of "input" --- openpype/plugins/publish/integrate_inputlinks.py | 2 +- openpype/tools/assetlinks/widgets.py | 7 ++++++- 2 files changed, 7 insertions(+), 2 deletions(-) diff --git a/openpype/plugins/publish/integrate_inputlinks.py b/openpype/plugins/publish/integrate_inputlinks.py index e8a8b2296c..3b134345e4 100644 --- a/openpype/plugins/publish/integrate_inputlinks.py +++ b/openpype/plugins/publish/integrate_inputlinks.py @@ -103,7 +103,7 @@ class IntegrateInputLinks(pyblish.api.ContextPlugin): # future. link = OrderedDict() link["type"] = link_type - link["input"] = io.ObjectId(input_id) + link["id"] = io.ObjectId(input_id) link["linkedBy"] = "publish" if "inputLinks" not in version_doc["data"]: diff --git a/openpype/tools/assetlinks/widgets.py b/openpype/tools/assetlinks/widgets.py index 9a136462b0..22e8848a60 100644 --- a/openpype/tools/assetlinks/widgets.py +++ b/openpype/tools/assetlinks/widgets.py @@ -37,8 +37,13 @@ class SimpleLinkView(QtWidgets.QWidget): # inputs # for link in version_doc["data"].get("inputLinks", []): + # Backwards compatibility for "input" key used as "id" + if "id" not in link: + link_id = link["input"] + else: + link_id = link["id"] version = self.dbcon.find_one( - {"_id": link["input"], "type": "version"}, + {"_id": link_id, "type": "version"}, projection={"name": 1, "parent": 1} ) if not version: From 46cb7d7a71bebd51f794c4d3a893dc2b780e3bff Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Fri, 26 Nov 2021 12:56:20 +0100 Subject: [PATCH 201/211] skip processing if workfile instance is not available --- openpype/plugins/publish/integrate_inputlinks.py | 1 + 1 file changed, 1 insertion(+) diff --git a/openpype/plugins/publish/integrate_inputlinks.py b/openpype/plugins/publish/integrate_inputlinks.py index 3b134345e4..539ad984b1 100644 --- a/openpype/plugins/publish/integrate_inputlinks.py +++ b/openpype/plugins/publish/integrate_inputlinks.py @@ -55,6 +55,7 @@ class IntegrateInputLinks(pyblish.api.ContextPlugin): if workfile is None: self.log.warn("No workfile in this publish session.") + return else: workfile_version_doc = workfile.data["versionEntity"] # link all loaded versions in scene into workfile From a732ce3ee95306a55ab87bc987cb8e4f75e716e1 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Fri, 26 Nov 2021 12:56:37 +0100 Subject: [PATCH 202/211] skip else statement --- .../plugins/publish/integrate_inputlinks.py | 32 +++++++++---------- 1 file changed, 16 insertions(+), 16 deletions(-) diff --git a/openpype/plugins/publish/integrate_inputlinks.py b/openpype/plugins/publish/integrate_inputlinks.py index 539ad984b1..beefd8c05a 100644 --- a/openpype/plugins/publish/integrate_inputlinks.py +++ b/openpype/plugins/publish/integrate_inputlinks.py @@ -56,22 +56,22 @@ class IntegrateInputLinks(pyblish.api.ContextPlugin): if workfile is None: self.log.warn("No workfile in this publish session.") return - else: - workfile_version_doc = workfile.data["versionEntity"] - # link all loaded versions in scene into workfile - for version in context.data.get("loadedVersions", []): - self.add_link( - link_type="reference", - input_id=version["version"], - version_doc=workfile_version_doc, - ) - # link workfile to all publishing versions - for instance in publishing: - self.add_link( - link_type="generative", - input_id=workfile_version_doc["_id"], - version_doc=instance.data["versionEntity"], - ) + + workfile_version_doc = workfile.data["versionEntity"] + # link all loaded versions in scene into workfile + for version in context.data.get("loadedVersions", []): + self.add_link( + link_type="reference", + input_id=version["version"], + version_doc=workfile_version_doc, + ) + # link workfile to all publishing versions + for instance in publishing: + self.add_link( + link_type="generative", + input_id=workfile_version_doc["_id"], + version_doc=instance.data["versionEntity"], + ) # link versions as dependencies to the instance for instance in publishing: From 8a71f5ecacaf93ba2e1c0ec8b0b785d52519b896 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Fri, 26 Nov 2021 13:35:14 +0100 Subject: [PATCH 203/211] changed "_id" key to "id" in ftrack sync --- .../ftrack/event_handlers_server/event_sync_links.py | 2 +- openpype/modules/default_modules/ftrack/lib/avalon_sync.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/modules/default_modules/ftrack/event_handlers_server/event_sync_links.py b/openpype/modules/default_modules/ftrack/event_handlers_server/event_sync_links.py index 8c3d858a96..83132acd85 100644 --- a/openpype/modules/default_modules/ftrack/event_handlers_server/event_sync_links.py +++ b/openpype/modules/default_modules/ftrack/event_handlers_server/event_sync_links.py @@ -113,7 +113,7 @@ class SyncLinksToAvalon(BaseEvent): continue links.append({ - "_id": ObjectId(link_mongo_id), + "id": ObjectId(link_mongo_id), "linkedBy": "ftrack", "type": "breakdown" }) diff --git a/openpype/modules/default_modules/ftrack/lib/avalon_sync.py b/openpype/modules/default_modules/ftrack/lib/avalon_sync.py index 9e22f80b1c..3ba874281a 100644 --- a/openpype/modules/default_modules/ftrack/lib/avalon_sync.py +++ b/openpype/modules/default_modules/ftrack/lib/avalon_sync.py @@ -1479,7 +1479,7 @@ class SyncEntitiesFactory: mongo_id = self.ftrack_avalon_mapper.get(ftrack_link_id) if mongo_id is not None: input_links.append({ - "_id": ObjectId(mongo_id), + "id": ObjectId(mongo_id), "linkedBy": "ftrack", "type": "breakdown" }) From 9772282a5c4a6319f470d68896a5ba7e5968a26f Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Fri, 26 Nov 2021 13:35:34 +0100 Subject: [PATCH 204/211] check for "id" key instead of "_id" in get_linked_asset_ids --- openpype/lib/avalon_context.py | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/openpype/lib/avalon_context.py b/openpype/lib/avalon_context.py index 3e0e0c6ea6..c8e88610a5 100644 --- a/openpype/lib/avalon_context.py +++ b/openpype/lib/avalon_context.py @@ -273,7 +273,15 @@ def get_linked_asset_ids(asset_doc): input_links = asset_doc["data"].get("inputsLinks") or [] if input_links: - output = [item["_id"] for item in input_links] + for item in input_links: + # Backwards compatibility for "_id" key which was replaced with + # "id" + if "_id" in item: + link_id = item["_id"] + else: + link_id = item["id"] + output.append(link_id) + return output From ea269ef8c004e87868d1db5cf00c23364b6d65f3 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Fri, 26 Nov 2021 13:53:28 +0100 Subject: [PATCH 205/211] added experimental tools action to hiero --- openpype/hosts/hiero/api/menu.py | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/openpype/hosts/hiero/api/menu.py b/openpype/hosts/hiero/api/menu.py index 61b515d719..5aaab7a2e5 100644 --- a/openpype/hosts/hiero/api/menu.py +++ b/openpype/hosts/hiero/api/menu.py @@ -105,9 +105,9 @@ def menu_install(): sceneinventory_action.triggered.connect( lambda: host_tools.show_scene_inventory(parent=main_window) ) - menu.addSeparator() if os.getenv("OPENPYPE_DEVELOP"): + menu.addSeparator() reload_action = menu.addAction("Reload pipeline") reload_action.setIcon(QtGui.QIcon("icons:ColorAdd.png")) reload_action.triggered.connect(reload_config) @@ -120,3 +120,10 @@ def menu_install(): apply_colorspace_c_action = menu.addAction("Apply Colorspace Clips") apply_colorspace_c_action.setIcon(QtGui.QIcon("icons:ColorAdd.png")) apply_colorspace_c_action.triggered.connect(apply_colorspace_clips) + + menu.addSeparator() + + exeprimental_action = menu.addAction("Experimental tools...") + exeprimental_action.triggered.connect( + lambda: host_tools.show_experimental_tools_dialog(parent=main_window) + ) From 15d0920bc994a2684b716c644b42dfc539ced144 Mon Sep 17 00:00:00 2001 From: Julien Martin Date: Fri, 26 Nov 2021 14:03:54 +0100 Subject: [PATCH 206/211] docs[website]: Add Ellipse Studio (logo) as an OpenPype contributor --- website/src/pages/index.js | 6 +++++- website/static/img/ellipse-studio.png | Bin 0 -> 14742 bytes 2 files changed, 5 insertions(+), 1 deletion(-) create mode 100644 website/static/img/ellipse-studio.png diff --git a/website/src/pages/index.js b/website/src/pages/index.js index 00cf002aec..29b81e973f 100644 --- a/website/src/pages/index.js +++ b/website/src/pages/index.js @@ -64,6 +64,10 @@ const collab = [ title: 'Clothcat Animation', image: '/img/clothcat.png', infoLink: 'https://www.clothcatanimation.com/' + }, { + title: 'Ellipse Studio', + image: '/img/ellipse-studio.png', + infoLink: 'http://www.dargaudmedia.com' } ]; @@ -125,7 +129,7 @@ const studios = [ title: "Moonrock Animation Studio", image: "/img/moonrock_logo.png", infoLink: "https://www.moonrock.eu/", - } + } ]; function Service({imageUrl, title, description}) { diff --git a/website/static/img/ellipse-studio.png b/website/static/img/ellipse-studio.png new file mode 100644 index 0000000000000000000000000000000000000000..c6fd62a6d5bc64742aceb65709fc774c7143f503 GIT binary patch literal 14742 zcmd73WmHw|7d?7F6p#)_v$!{VM2plCvSxp4ux;+AcLWO|} z?+}$+@xdPfRx&aVm1JaS9i1F3tZdB@2-X|df!S>(~$D|xc?iPFdcMK%O_NZ*QK_vwfFTY{g6{WGtzucu<+%D z#N4dsOw+Xhi-bap0mf|W#2B^q4nrTE4bKjQG)8Mwa{B|Zlt;Z^UwX~9eH(9J9oQBB zw{lziCi68f`W?z)oG`aHZmz>aLVtf2G*2v(E5Fam{!7AMX7*r;X5C` zO2_L)5EEHVMR^`6Q2K@%^3>fiUeuSwxx@Gk?*hH0fiG-W%U+WUcfWb!@Z0UdTC6Ja znsaa4=~ANBOKd|cD&JDfc5x*K=c+CO{HYrpWg@AK=!RQ}HuZWl#mL11HsY|>Q!;NY z>f80#QEluUHoeou5s%iak8cPHqIIgoH*M`~U2M%=hz)Q3MRz}ODyqc3^#L9WrlaDc zmk0!&3GyGx2Z;hJcoW@ONmUMg2@CHg8s!8rl?nnui%^o4(srBLoPD8qYjV1E$MZ1M zlvNMA<6&$&EB3q3xx}HS#PS-%fnl0ewuaoH^lag#?^wf0-m`|^mD6BtsA80hAr8$Y z4kDFP3TIS`mt`ZC(};J!OrAffuQm|=H0j?&dWz6Won^luB`oIu zkFSd7{`d%l@y%~hH2>U0+1S|TF9(*Fmy3#u`m-b?czC1-5v@g{)Ob>@zJ*0apLTz- zwfe>tyn9;j93to^eof}=aM}T7^;Q(Mm?@**UmF{nXOB_2QvCUdNlDiSugM5v(&gmj z(7BT@I16`U{NWuOKK=JME-voc(QD7cjTBDaj?T_ck5T5G8fKrCkFOu}U#4wsZJnR~ zd-duS5#{iEryN@Euz{7mW3MMqo}6F3sHmu9siVJ@8q~5fwdU8@PD;PK&(Bw+F(#8BKvfK$mqbi_kDw4Bmiv(t$;Bxkpr=7xU4mfmEY>R@ zaj9HdT9TDnQ99jQ86f3&l9`!#=+Sgzj3ALwRCH!x$E|6uySv*V$z-6v-=k@6W5bpZ z=boccchw zY!oe~E*W0kcN2|>h?v(UgM(}BXO(>siECXk4Dp1Pv1}rI{N)Y%;yZq zW5yHGGEmQ7H+FyV;>D*X`)97Mhfxw2w0wh-8P{kee9|AXk8V!aj*>Z9TC&_q>2PeA z{r7kNvZ&c{p)&$*!~fc!a81;_;n7h!e1yKfmDOs2dKROtzK_qv;vyraKmT~y6D{JU zUu|y#e!smqTEKI`JzFntA;h_)67z6&b8|C!kyBC!BdqMI_GXSa^^qFFY6Hxh=`}FtM0wK_&CY^AD?~OKdZ;5$94BQ-VLNhR4~SE9o-@&6}W$2_5>dvu0K0F zyFON0d1%6O{km~H7Dnu+PoKudbg?Kg@c0nXulp&O1X-CRscqvC z2*U5Zy^I(bvHSbZFPj(-3`PhL6C|+t(o+$Hh8(1mDBd>^{&e*8rcO>nc@YJ0`hMh2 z`CX0-4N2qUYr_JDhllfR_+jC@f-imtTo@^{p=cVvLqkMs$;n-plB@^}3c|(4PX6%4 z&d$yZ$4|7(sHN;ONmJR6c!CuLv3Q@4PYyr(&C$^j#A|A9A;6V=ZCX4^g^)64w0{)^ z6I=}YxD>&aK_%h}5B&SQ8n1XH3Ic^x8UumAC?aA}qm=NO3BPCb(e(Q2o6NQK^+0@{ z+KQ5r-mg3Vj=jXQ)X&im=EK9o_y7GHxb&sMeLK85Q(^Mv#DU*-;-|ClsIRV`b^Et( z-#R)zTDR>2@?!XlTHDyzz3L5par5TQKQHz3zJFJ~c6w*0U|`U)B&WJshno&73eEN+ zR;6}YOkCW4I?{4TZM~qThCk{X!+=Q8jD795g&mJ32-1C-;RAX5b;pW3kW^G=W@gFu z8%)PhZQE}LR>H!=wJ+VBowaN9Ce+l`-v$OM-4-Yxw+TJt=i|!|d8M{HUfx^ocf?s& zS9g4TOda)>h$NcG6ZStFU8q3*GO(lgkl@xWrhsVcDKc{M)YQ~FCM9N>VMz&O8KvZpQ+3&VYckikcxv*wrWPA!# ztf{8Jef!MTmYtDNHp6(j;OyUGH&Jy1hqkshr0lm4Y$S@d`g1&ObX)1CI-VvY{JIy# z&lE86`NccN2(sI^qwfd;c*YiRQwZ1-OUug1_4W1raG=6f(9zM+)ExTy+y9Yrt)IIF zoJLaOEgp^d%jNK@zO1U)qT z?D(bCp}e@bxUeus=0;mrSJ&ET@#v?ooGoY4l71cUAnE)};86wOLmA<*#=aL|%#GUo zrioXZg5Q>Fd%D(PUhSr{4>ii0ybB|nXi**q9_Jy0-`vj_=YhK`|b_zQ@+mPU*;q_OyJa4kltz78#iciLsFpS$u4| zZVGM`V>)F_c`CIJ@H|p0(9(#H zk04PbXgJzu|Id%FzolQk+)L%N3C1p2D{r}gv`3~0g0{h+ z)?R5SR={CS)O|M~yo-&Ox3ZwXviXzazpW>xsCR|jwzEYeAflekHoC{r!n2U?+!CA^ z8DUgsb^SXxJ3g+E6g>}Aak=NF!+lu*1;1z}$s>4s3n3^n49j@+bCQUL0dMyZuC2h~kX=%fHI z4L?;@;*>{!zqAR!H{DzA+b7YYxPAK}aYw5_Ch9Ugx|5idh(Xwe7;*{<%&(@6KIe}y zzC8c7kzas(g{e4Oopj}?pt_d~vPH1dKhAJy9Ue*AM zLEP0-`)Q=OTlSE*`6V|Civos*rlvF}PkCo&=g!WH-w36%v%P^&d?A{s&bu3vy#5eg zj}$c(xDEzRZ{4~jjX~EwGear%J)EANo>7PZV({r?DrBj!$$%UJ(JP&~s->nDdHpvT zAt8N$ev=2^ldxfw3T(LJ?uiL_I0#86jwF@Lkm13>=mgX0>FHt)O$24|J90!J!uLJg8a3Q$xt9#nKn~IY7zrA3Ez$`I38A^# zy1L)TD#F9DDe|BB6v*p9MFWb`q)iR9j~>G>R1hni+3k3b|VY4vW@ z>K^%55CGt@))8os;2X4@0{NOLOy-k4sO>^)%u^CdRTx4Hu{rp|#@wcLWh#`}%mf zsy7DcKkyw$N#j(>oSmKV|L+6vodH^PJw7#k`lP85zr1Y0V^e}d!H@i$E&zFLDf7|I z9UflZTXHW1DV3CzVAmNR`yw$7!;els0gmu9W@BfsYU4kp5%+pXyaY7I%+Qh}spms^ zW=aa(MFl_=9EBlzz=s3ZIuu%jvVRAH^E(u!RfKSGWF-Igba7FUBFg`E0M*LT@gauE z(D;3{_Cl?KkyL3bE4FhwH*wz!+n=>Yjczf-|JzntuKN1=0DMeL%m8T}UETaKVi33? z^2TDm7c&z#fHg;7|KCO*tvA14a+)p__ZdyE)PM@d7YE_oK8m6es0q(F(d zhU6UxKY3nWUL=HoZVBl$l~qu{!^Zw#9tahU2p{MB_wPvk`D}V{eqNp4_~q3$SD1lP zB9aaW3?#VOht!Ho4v@2GX57pyEanrh>AAUM3|o;=ge=-YK|xS=N6j%nV4*Skg@%Q# z^xOnd3E|$pcozzn1~HSUX!=7059lCR&Lyb48uK2KYOe~@j!@t}ri8Y@soUDx_7bi? zy%uw22)t8>Vze3vA!7OS9!kegbdWz%2=5zy-p(#AoAWJ3XZP;hDay%tU2*&Q1s{IV z?yl1*rRTF}?{Zm7OG`DY*G>-BomFT9piUzuDxPu)3hEmetb=hPPJ_Z76UU;SDeCTY z;j8xGLAuDRe$%*VV7^<^wd(yf8A30E)F3i3{Hs8G>>tyc{FJ&ADxJT>nW2eMI5j*u znUb2S{FG6EkFVNxlIH~b!IWiq6AQW9DBt#n%1@sjKYlDoU_7c}BO}uWPbW*%-87dU z9D}o|YMXHFlm}nfpTwh6(o#`l1l+LwiUzf-x2Lbq`|RlBZUop)o}!shLE8*ITaUs` z?FQQM%2ClV2Jp?cUV>=RJXA>`Vcy2#oedI~$@+LG76Wwc-I#l`@9xUGbT!M9d5}o# z`040))Rdxb(e{|#qa!24BqW;3hy~C5K(6_AKlolwJV=jt!d66rJ*Le+9co9v@Z=z= zH>8|T@zDk?fgWb0p)f-ceV8PJ8L+yRhy^BN) z7nYTc{W5@PsE7pR>W_eXqJ8**0dX0qiOP7ezpCZ$_x|^fS8!^sELlllg0ZiXlW^5v zfa=V1-I$QVuh8a53Zn%BjrMi`zI5=HFH+Jb=upd_KqY(O=MFp%?ewm6Fb`a?;x;4) z?pp?7;gf}LQ3M18Xzj83;Rr1KIw$jr7JnVgXuTINxTRV(cbGNcf@sr8oJPp#bh+s; z*GNa3`0>Sp{2E3py@s-1l9$sn}5H+NZ}*XW!f$i9CqLNeck)U@*1yj;~AMBBZx^- z5)yu+M*!hJ;chX)#7E3Aq=PRmd~#7lp4;2!L)L|X6QY1`TqaILPOeOX@buXJ`DJ&h zXXP#yF!u7IB6{}Md`42nWT^O(aHWQ|HEXE5h>f4{shpgg>FZl}b!X~b!n)XXe0_bj zwTW+`wXQnfSdC+2l5_==4Gl#FisA!&$t)CO6rf)Uk(30NRpvd3kZAbn4$rFH_bm8i zLqbBHJ$u%U_T0e%Z-_+;Y#}Lt2{aVg0o~ET!3xiRTjguW1xdJWK;f)Zr<9kM$E6W( zbiDozK!26GRr<@cL8>O|17j%WaLrFnOUhT_W^fEQ1GQs#XsBIaXlO_+OWf}8&YOXi z=@#G1uM43O5lt^wa0}6~>9Ei2cz61F5_O`bKhH51;h2p6RLHrn*z+rZ13 z&;QkU{6to~|F5r~1--q!uV14qDTw==C1+-;eZD$lMIjg;a>bzI@g=X`2&Z?xXkJ}i z9UOdkDPd`0!OYGceum6SM0RDC^JzYadDc+EXkq`hUl=$zI>bxhB8u))%1*v(F5ZE+ILs)qP;q#YK+n)?(kdq-*J*$*KNuJhK4j{B3DKVwHR+*efcNm zn8Q&JyVB7--AAsgHTz{Bhy1rVzYZ*AHk09e)f?2Y)0@oY|D9M#kt9P3?XL$lThp*K&?+Zf*wO5VD!?{(V4u zr_ESZaEUA|mi3Z`1Cvy<2LAp0@&*FVG?2+~JUgW~i@a7hln(ne-^O0oo&E{e@tW;6!lVyO(e_ zM81()(%bUQkKz_@T=voSuWOh?>|kB=#y-k0x3r9-?E~dlm(ybfRm|e=lc(hZ%*;Uq zso2koK7Ha*#?0ShR4W*cX4lr!JDuDG30TNc3a-f|4IKn_UE#e>^+O?TI<$e*lb4>+ zdHpA)d3lT}wJy;!t*xybs@-eHM|a-<@FK3c%KLkJwaZ@vD_L>cZ#u4?#T5h|$D(##_fbw>UU^zHXx!%s)OD?{(=LPy8mw zKQWSF){}S-JW&fuHnPzSGnH2k4u3&s)~$Q5voug`kBr3Q`}!;F&cm_P&qYsoUI%(g2F>!?_BI5^ZcGNKvj zaoH9(yfkS^$I}H&xEsnNEUavK;Sp><%`ni{S6W_Pfc{)dPp{Vd)HMa&eX%RcT_lC6 zQW#u;X!n=s-As&(*toca=-%N`QR^)uW4n5{b(y@@TVG#;wvWL0d$7$Upm7!tMr3_X zjCjU(c0IkU0cZqt@);e(*+n%1RW)k64-1bwPUQ%2gYKKvke9l$x;jgxneIxrBM3&J zAq!*U?1F-~f@o>=48ksGf1cUfN7Cx6w+b{^IXH}4p5c!S=#fsQEUm4%Hr$fy?(ff+ z5ZP$FV=_0}@CuJgSeCL1luJorVN9hRFr!a)(l+!wf5W68#~-0%sh>5z)1tvk1=*RO zmxuqB)_s$0zXl^QUiS`qQI5^dD}M_9e0~}ww&!^olEi_ zB=8QYr3B!EpvY62c78JD&D~X^!&^L;prWj-7`Kcv34Two3>T^j70Su{Du#ccs+wB; zti6PU>_DIL)YKH}MG7PYuVj;@pI1ucG`d*oA#F$?WKU7`B-wFq{k7iBC9(Xx#-fkB`=4pN zs{#5kTj(c;d2!_8;$vdK{FjTYTHV-Ktk&^?7B0|b;-?1oL|GEPHNRTlEab);n@b13 zTHDa~y#z%>R4lqnGQkjUqx^I=sqAJcYI}X?!;R z`R;90Crj=k6T^RJ<3mG)+cQ++Uiz7AA664Hg);iH`)h5it!wP24>R^bt2ept1zDrM z56EutI_BXe>hn3>6zEQ3QEVuE!?uO;x zZO1fQ{1AjX`GqxD%k@-8u=!bLBxfmTKYVCx zya4=CH7PioU%!oIT^Ap_>$=o*TEYWobWB@Mc;-Z{*d(N+FWW|gASF4Db}Gs&poCCS zQzus1ZG`=b0tmO_j5ixAN^udSlnw?DbtupMcCFAyuE5Fdh@MK#p{8|ga$0H{N;#KFX* zQE@+xK1_mML3DaC~pJpXFlg#R?~}$ ziYf|VUHa-BjrRmz^ywD?Pkl%I7=-~l*-7x=aMOU0v0KIl;)^5bMDRw{uLHFD**~RJ z#o)v!LsOlViGA_FgYRFxY8rnxuMB3_gNeqZ!NoCH(=WHs0;hEs1{ z6y)WRXLd2dhKzP$4)_e&V>Kk&FXFOB{6e1EyL4<)^8 zO_h{FE+ZzPi|M|O@!GmAl1hY?>8H^Lk>K4iBfAkjN;z5CuE|DVpZTAfp8jU%8Pq0f_Cc7Efq7MW9TYpbfv%+1M?zd!@EcG_+r zO#bQ9Ihg9W_j2-qs_G`d8^`uNT_!YRo?cd|N)Y_rq-x`%qcO~VGNC`4(lUBuCw;43 zH>W@{Ml;xndmV9caY<65HqSXaImLcJmL{rCc3xhvmJPm9!>3Z6~F|`nkgPki76n^49(2)f?9Y^b-rfN{4&pc zU;|;2MNa~0KwhBcGSxRA=}grC=uqlqO>^^HDzA~+ zu5oZw)Jlj}AIOl{*w{Qf>SEvX`rj3#3@M|~N@>#Z#Tk@RQ&R&fh=VtiM~ZeZ8)tMr z4+Naks5!d*z3H@EA-Qd*9jR)n!ZmfV$AJ4^dAx&aWXM= z=nkv(S!5>&{6i(!ZdBAX;C^^UtbAeOLWj|L$W1xAdNlU z-E<16SlqhaBYQ2t8m5F6U;qj0)ysW&-LuJ&L1N5%_wK!qM?~-vd5YVR ze@IdC@TgC>Q+euCEm+`1T8x$v~u1q8c@c{3o|neI_SqMo$FaxS{fJ_gj~aCB_bk1 z1KV-CDWw}m%Yxr5r-W3ANSK?MA-@VOPN+agUJ~NXefAMJR>Nk;XJk)J_VHu#Nh4M< zG3fnXd;OK!a+4Autw?9G|Q-9>h1%?v@pTqRYEjjS0Vk+ICaRhpr zMRf{FJXp8M$N)LNIQ<*{iciGzutcmN2GaDmL#h#>JReF$5zNBiqum8Q&WsUHxJXDr z(X%y}ii3yO;(e;yUo$#7%FX-379eN{SE(E5^17p706xqI+_%b!)+v8Aes1N z4gx6*XJLDFn5F-^B-3VExLI6!x^1W{)HdtWHt;Z%F)>J7UOoSv4T?H+kvC4_@+=Nc zld>^ciA-Rz_wV0Bxr<@26BZG9KT9fO3G00H=+SQq_z##HP$yln?a!YDt!T_q_i5e{ z26TKVFRyi#E;p<*Z7Sh@mC!#~X}NjWZiUE@@V)rm5$06Wx9Y6iQ1|8xKo1xoQh849 z@89R8?}NYCj~T-IW8>nUa3r1J!cZ0!5m6<0a#ltmyHRxcKO~}$jwxiWx^NMzQx3)x zZ>EfKXy%57ViOX~k3zD2sn1uBVT=(r{esc~mS|;FsKiMwg~QoGCNpk-6NyjT+gzAK zy-Q9pNNWJ7OEhgCwU|dc&a5MlwI+`PYXgI{*{u$rn)z)~xfG5KDZ7dwb$fez{zf7T z3yZZZ1>gE}fEOImgTJMJsU4IP>8G| zqoRT$$A|^c z)56K77EVx8kVsJlS##wTfV+uoR==rQ@Ttz1Wc5RQ+7ws5&}K2Gt_K3Hy*JN)|NcW| zKW=!ChkxhQVJ~7Kp_Xi?kM~x{>aAQ?hnTGX3LqDok(Ndz4U9jA83#rWnC3~Z8GiOd zJl$cujCk@GCS{^4bS@g*p6>DBK!Ap~T%3eaG+KZ!9LuTh_HT>IYDj+hKcz@<>sB?4 z(KzMx{rU6U#-?~NqG}H{ltCeKPzB{0Cj-p-kO-c^V5QB)^x>iFBrXe-h^!=!8>@6+ zn&|KLE<$*=OYQYCa&khiru0d}*jK$2Li^-P!Da{~@rBGs-Fz7r_g5C78eZ=%`l^Z_Fd|Y6}|+3sk5mk`B(N zjwtAK$M8dEd#L{bQsrKytzMZS9Td}0@blv90%728x21&G&lcv~z-Ae7%Mp5=w?*D2IGru1o(Fdf}%N~^tJZRl42I*!J`U>@bpt>WZ3SQ-2ivHRJ5FMnC(edZ=M9-c3B zxN2I&pv}w|yW}ixXCja=jT%l$tZ zR=gsYc^vnn-FxmMD~+(en`sD*W8%~X+^WIPAAg`xc!NI;<fCoj_JlSNeys7Dl*tZNdOG)urnWOKdBz9f&g}79n;(|Na~W#-cGF<7B?3aL!3`Sj zSH{VIy^*^av7m=w3z~h->lq0y&W>L~Xhw^B!Dw>OU#ta7YAh@)@FdtB-`pl63js_s zj^nqRB93k?JvnL z20Ad%LY5_%0Hcy}AnbbI(>s}UTXQ3&Cpd@qD(+=zB;GTCnNagu-XH&Jp$`bIH%?ZC z%ler2>7m_hz0Tz;J7p)H$Io=6TSXRO(+4s{YDb#)p_;b3{;jIIzZE4_->lSd=gY1P z6-AjscD(V6-UlTfa<^~a2D;CpFvWP~HeQJcXuenpyo#O&#ILoR?kpW9KHQpxXRE?S z=l8X(?ajaCRA^I!jevuTd*UVjwk@YDrh5SS!keSkB=6Kq6um28KQr;;2c6Eqcuu_W zsO{8V-3?{TvGJK1Xf`NOR&{rExq)a*w=<}=9z~kVG5k$UO<mSl;ONuNDORaI3;Na(O}dG7}LI(WUI z;o*$9Q&lxJh4FuDZ1@618Ys-+9Q z_`2~#8so?KxaA6?rgB9L^e?aGZJzUriJe1$3r*{4XnfgxR0+*2E8lkJO5MH^;fv`1 zj6g!5wBN|r zFU_}8t7~hcA|tQD3kJ#L6H`kcRmx}WZo$Mq3~oKv1tQu5V`E=loi*8hUmozJFtxCd z2j=|Eaip?ynT>*+-1J>K&Ep!|ltqDdV9|8#fS*b*sNMhrk7j2@lQh0(+#(`(EvHAk zxq*Z!#@&GFGYw)G-vdDFm$9Sf8GoP=4|7AShb}-+!Bw<=M90H3lu5J-wZAeY?AORQ zM>Bg9dHEYMss}e)OgnD=t~hd3ny?!=uXvZ6s|Yi=XA90$WMp)>wiCeQIku}dIjpIkNu z2M4>ml``TcW@dV^35+ei_O|bA|B`{e7Z~2i7)U|7XYlKtMBio>-!Fgu{CJ0L`#?`Q zxz=S}SIn~q+~eE%cR~p_B8=?}l=c6>PeG(qO=0pu>noGVD9*(^@T3E;-y^2|uqVg6 z=0elU&cg3!XQX^yzI;hXM~4(zwlE?+)kn2RN=V2fAYfU)IuE}PffJEZQI+oL>499( z)^7Ct*Ka!8Iv@@DKIi)32k;s=?%oA$WQ33a%Wwfkj)ngkbFK{VLp4AG>h5BQb}1;o z;j>y08y4=70N!WtTb>JFzU1044$dwC!7tZ_(aTjw!4coRP2Y{ZW0DJ_TMK01(O>jH zJ^cOq_l*6&ug&{917wuQS4nB<`R)?dJ1stDNC=pXNTj1Z(ghA+VUa&o3;#o44! uZ_tRk87J1N{U3ePtM=;uPyX{8J)>#N6&`HMOFBg^Nl8vkwoKaC|NjDQ>o8CN literal 0 HcmV?d00001 From f18f207f03b74a822ab1ab0d79f18e8a940ea7eb Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 26 Nov 2021 16:27:53 +0100 Subject: [PATCH 207/211] Fix - provider icons are pulled from a folder --- openpype/tools/utils/lib.py | 15 ++++++++++++--- 1 file changed, 12 insertions(+), 3 deletions(-) diff --git a/openpype/tools/utils/lib.py b/openpype/tools/utils/lib.py index 4626e35a93..d38cb33029 100644 --- a/openpype/tools/utils/lib.py +++ b/openpype/tools/utils/lib.py @@ -477,6 +477,7 @@ def create_qthread(func, *args, **kwargs): def get_repre_icons(): + """Returns a dict {'provider_name': QIcon}""" try: from openpype_modules import sync_server except Exception: @@ -488,9 +489,17 @@ def get_repre_icons(): "providers", "resources" ) icons = {} - # TODO get from sync module - for provider in ['studio', 'local_drive', 'gdrive']: - pix_url = "{}/{}.png".format(resource_path, provider) + if not os.path.exists(resource_path): + print("No icons for Site Sync found") + return {} + + for file_name in os.listdir(resource_path): + if file_name and not file_name.endswith("png"): + continue + + provider, _ = os.path.splitext(file_name) + + pix_url = "{}{}{}".format(resource_path, os.path.sep, file_name) icons[provider] = QtGui.QIcon(pix_url) return icons From a1c6e1b031ddd01c96e9b2c0621e4b622a8d6808 Mon Sep 17 00:00:00 2001 From: Julien Martin Date: Fri, 26 Nov 2021 16:48:03 +0100 Subject: [PATCH 208/211] docs[website]: Homogenize collab and client logo display sizes --- website/src/css/custom.css | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/website/src/css/custom.css b/website/src/css/custom.css index 4f7f8396f6..0a72dc0f23 100644 --- a/website/src/css/custom.css +++ b/website/src/css/custom.css @@ -197,7 +197,7 @@ h5, h6 { font-weight: var(--ifm-font-weight-semibold); } } .showcase .client img { - max-height: 80px; + max-height: 70px; padding: 20px; max-width: 120px; align-self: center; @@ -215,10 +215,10 @@ h5, h6 { font-weight: var(--ifm-font-weight-semibold); } } .showcase .collab img { - max-height: 60px; + max-height: 70px; padding: 20px; align-self: center; - max-width: 200px; + max-width: 160px; } .showcase .pype_logo img{ From 216601a015ceda9def3d64eaefc803ea8280fa4b Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 26 Nov 2021 17:52:59 +0100 Subject: [PATCH 209/211] Added basic documentation for new custom provider --- openpype/modules/default_modules/sync_server/README.md | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/openpype/modules/default_modules/sync_server/README.md b/openpype/modules/default_modules/sync_server/README.md index d7d7f3718b..e283b3bb66 100644 --- a/openpype/modules/default_modules/sync_server/README.md +++ b/openpype/modules/default_modules/sync_server/README.md @@ -56,6 +56,13 @@ representation.files.sites: `db.getCollection('MY_PROJECT').update({type:"representation"}, {$set:{"files.$[].sites.MY_CONFIGURED_REMOTE_SITE" : {}}}, true, true)` +I want to create new custom provider: +----------------------------------- +- take `providers\abstract_provider.py` as a base class +- create provider class in `providers` with a name according to a provider (eg. 'gdrive.py' for gdrive provider etc.) +- upload provider icon in png format, 24x24, into `providers\resources`, its name must follow name of provider (eg. 'gdrive.png' for gdrive provider) +- register new provider into `providers.lib.py`, test how many files could be manipulated at same time, check provider's API for limits + Needed configuration: -------------------- `pype/settings/defaults/project_settings/global.json`.`sync_server`: From 06f23af879d6272123494cb040d57be469a4c786 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 26 Nov 2021 17:54:45 +0100 Subject: [PATCH 210/211] Better path creation --- openpype/tools/utils/lib.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/tools/utils/lib.py b/openpype/tools/utils/lib.py index d38cb33029..8246d606b7 100644 --- a/openpype/tools/utils/lib.py +++ b/openpype/tools/utils/lib.py @@ -499,7 +499,7 @@ def get_repre_icons(): provider, _ = os.path.splitext(file_name) - pix_url = "{}{}{}".format(resource_path, os.path.sep, file_name) + pix_url = os.path.join(resource_path, file_name) icons[provider] = QtGui.QIcon(pix_url) return icons From b306dfd8e947533b64a8cea54d6763981fafdba8 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Fri, 26 Nov 2021 19:35:22 +0100 Subject: [PATCH 211/211] skip workfile instance linking if workfile instance is not available --- .../plugins/publish/integrate_inputlinks.py | 36 +++++++++---------- 1 file changed, 18 insertions(+), 18 deletions(-) diff --git a/openpype/plugins/publish/integrate_inputlinks.py b/openpype/plugins/publish/integrate_inputlinks.py index beefd8c05a..f973dfc963 100644 --- a/openpype/plugins/publish/integrate_inputlinks.py +++ b/openpype/plugins/publish/integrate_inputlinks.py @@ -55,23 +55,22 @@ class IntegrateInputLinks(pyblish.api.ContextPlugin): if workfile is None: self.log.warn("No workfile in this publish session.") - return - - workfile_version_doc = workfile.data["versionEntity"] - # link all loaded versions in scene into workfile - for version in context.data.get("loadedVersions", []): - self.add_link( - link_type="reference", - input_id=version["version"], - version_doc=workfile_version_doc, - ) - # link workfile to all publishing versions - for instance in publishing: - self.add_link( - link_type="generative", - input_id=workfile_version_doc["_id"], - version_doc=instance.data["versionEntity"], - ) + else: + workfile_version_doc = workfile.data["versionEntity"] + # link all loaded versions in scene into workfile + for version in context.data.get("loadedVersions", []): + self.add_link( + link_type="reference", + input_id=version["version"], + version_doc=workfile_version_doc, + ) + # link workfile to all publishing versions + for instance in publishing: + self.add_link( + link_type="generative", + input_id=workfile_version_doc["_id"], + version_doc=instance.data["versionEntity"], + ) # link versions as dependencies to the instance for instance in publishing: @@ -82,7 +81,8 @@ class IntegrateInputLinks(pyblish.api.ContextPlugin): version_doc=instance.data["versionEntity"], ) - publishing.append(workfile) + if workfile is not None: + publishing.append(workfile) self.write_links_to_database(publishing) def add_link(self, link_type, input_id, version_doc):