From 9a6dc109254ab4d1aca99f4abd3464d65b4e46c2 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Fri, 6 Jan 2023 03:02:31 +0100 Subject: [PATCH 001/187] Initial draft for Substance Painter integration --- openpype/hosts/substancepainter/__init__.py | 10 + openpype/hosts/substancepainter/addon.py | 34 +++ .../hosts/substancepainter/api/__init__.py | 8 + .../hosts/substancepainter/api/pipeline.py | 234 ++++++++++++++++++ .../deploy/plugins/openpype_plugin.py | 15 ++ .../resources/app_icons/substancepainter.png | Bin 0 -> 107059 bytes .../system_settings/applications.json | 27 ++ openpype/settings/entities/enum_entity.py | 1 + .../schema_substancepainter.json | 40 +++ .../system_schema/schema_applications.json | 4 + 10 files changed, 373 insertions(+) create mode 100644 openpype/hosts/substancepainter/__init__.py create mode 100644 openpype/hosts/substancepainter/addon.py create mode 100644 openpype/hosts/substancepainter/api/__init__.py create mode 100644 openpype/hosts/substancepainter/api/pipeline.py create mode 100644 openpype/hosts/substancepainter/deploy/plugins/openpype_plugin.py create mode 100644 openpype/resources/app_icons/substancepainter.png create mode 100644 openpype/settings/entities/schemas/system_schema/host_settings/schema_substancepainter.json diff --git a/openpype/hosts/substancepainter/__init__.py b/openpype/hosts/substancepainter/__init__.py new file mode 100644 index 0000000000..4c33b9f507 --- /dev/null +++ b/openpype/hosts/substancepainter/__init__.py @@ -0,0 +1,10 @@ +from .addon import ( + SubstanceAddon, + SUBSTANCE_HOST_DIR, +) + + +__all__ = ( + "SubstanceAddon", + "SUBSTANCE_HOST_DIR" +) diff --git a/openpype/hosts/substancepainter/addon.py b/openpype/hosts/substancepainter/addon.py new file mode 100644 index 0000000000..bb55f20189 --- /dev/null +++ b/openpype/hosts/substancepainter/addon.py @@ -0,0 +1,34 @@ +import os +from openpype.modules import OpenPypeModule, IHostAddon + +SUBSTANCE_HOST_DIR = os.path.dirname(os.path.abspath(__file__)) + + +class SubstanceAddon(OpenPypeModule, IHostAddon): + name = "substancepainter" + host_name = "substancepainter" + + def initialize(self, module_settings): + self.enabled = True + + def add_implementation_envs(self, env, _app): + # Add requirements to SUBSTANCE_PAINTER_PLUGINS_PATH + plugin_path = os.path.join(SUBSTANCE_HOST_DIR, "deploy") + plugin_path = plugin_path.replace("\\", "/") + if env.get("SUBSTANCE_PAINTER_PLUGINS_PATH"): + plugin_path += os.pathsep + env["SUBSTANCE_PAINTER_PLUGINS_PATH"] + + env["SUBSTANCE_PAINTER_PLUGINS_PATH"] = plugin_path + + # Fix UI scale issue + env.pop("QT_AUTO_SCREEN_SCALE_FACTOR", None) + + def get_launch_hook_paths(self, app): + if app.host_name != self.host_name: + return [] + return [ + os.path.join(SUBSTANCE_HOST_DIR, "hooks") + ] + + def get_workfile_extensions(self): + return [".spp", ".toc"] diff --git a/openpype/hosts/substancepainter/api/__init__.py b/openpype/hosts/substancepainter/api/__init__.py new file mode 100644 index 0000000000..937d0c429e --- /dev/null +++ b/openpype/hosts/substancepainter/api/__init__.py @@ -0,0 +1,8 @@ +from .pipeline import ( + SubstanceHost, + +) + +__all__ = [ + "SubstanceHost", +] diff --git a/openpype/hosts/substancepainter/api/pipeline.py b/openpype/hosts/substancepainter/api/pipeline.py new file mode 100644 index 0000000000..3fd081ca1c --- /dev/null +++ b/openpype/hosts/substancepainter/api/pipeline.py @@ -0,0 +1,234 @@ +# -*- coding: utf-8 -*- +"""Pipeline tools for OpenPype Gaffer integration.""" +import os +import sys +import logging +from functools import partial + +# Substance 3D Painter modules +import substance_painter.ui +import substance_painter.event +import substance_painter.export +import substance_painter.project +import substance_painter.textureset + +from openpype.host import HostBase, IWorkfileHost, ILoadHost, IPublishHost + +import pyblish.api + +from openpype.pipeline import ( + register_creator_plugin_path, + register_loader_plugin_path, + AVALON_CONTAINER_ID +) +from openpype.lib import ( + register_event_callback, + emit_event, +) +from openpype.pipeline.load import any_outdated_containers +from openpype.hosts.substancepainter import SUBSTANCE_HOST_DIR + +log = logging.getLogger("openpype.hosts.substance") + +PLUGINS_DIR = os.path.join(SUBSTANCE_HOST_DIR, "plugins") +PUBLISH_PATH = os.path.join(PLUGINS_DIR, "publish") +LOAD_PATH = os.path.join(PLUGINS_DIR, "load") +CREATE_PATH = os.path.join(PLUGINS_DIR, "create") +INVENTORY_PATH = os.path.join(PLUGINS_DIR, "inventory") + +self = sys.modules[__name__] +self.menu = None +self.callbacks = [] + + +class SubstanceHost(HostBase, IWorkfileHost, ILoadHost, IPublishHost): + name = "substancepainter" + + def __init__(self): + super(SubstanceHost, self).__init__() + self._has_been_setup = False + + def install(self): + pyblish.api.register_host("substancepainter") + + pyblish.api.register_plugin_path(PUBLISH_PATH) + register_loader_plugin_path(LOAD_PATH) + register_creator_plugin_path(CREATE_PATH) + + log.info("Installing callbacks ... ") + # register_event_callback("init", on_init) + _register_callbacks() + # register_event_callback("before.save", before_save) + # register_event_callback("save", on_save) + register_event_callback("open", on_open) + # register_event_callback("new", on_new) + + log.info("Installing menu ... ") + _install_menu() + + self._has_been_setup = True + + def uninstall(self): + _uninstall_menu() + _deregister_callbacks() + + def has_unsaved_changes(self): + + if not substance_painter.project.is_open(): + return False + + return substance_painter.project.needs_saving() + + def get_workfile_extensions(self): + return [".spp", ".toc"] + + def save_workfile(self, dst_path=None): + + if not substance_painter.project.is_open(): + return False + + if not dst_path: + dst_path = self.get_current_workfile() + + full_save_mode = substance_painter.project.ProjectSaveMode.Full + substance_painter.project.save_as(dst_path, full_save_mode) + + return dst_path + + def open_workfile(self, filepath): + + if not os.path.exists(filepath): + raise RuntimeError("File does not exist: {}".format(filepath)) + + # We must first explicitly close current project before opening another + if substance_painter.project.is_open(): + substance_painter.project.close() + + substance_painter.project.open(filepath) + return filepath + + def get_current_workfile(self): + if not substance_painter.project.is_open(): + return None + + filepath = substance_painter.project.file_path() + if filepath.endswith(".spt"): + # When currently in a Substance Painter template assume our + # scene isn't saved. This can be the case directly after doing + # "New project", the path will then be the template used. This + # avoids Workfiles tool trying to save as .spt extension if the + # file hasn't been saved before. + return + + return filepath + + def get_containers(self): + return [] + + @staticmethod + def create_context_node(): + pass + + def update_context_data(self, data, changes): + pass + + def get_context_data(self): + pass + + +def _install_menu(): + from PySide2 import QtWidgets + from openpype.tools.utils import host_tools + + parent = substance_painter.ui.get_main_window() + + menu = QtWidgets.QMenu("OpenPype") + + action = menu.addAction("Load...") + action.triggered.connect( + lambda: host_tools.show_loader(parent=parent, use_context=True) + ) + + action = menu.addAction("Publish...") + action.triggered.connect( + lambda: host_tools.show_publisher(parent=parent) + ) + + action = menu.addAction("Manage...") + action.triggered.connect( + lambda: host_tools.show_scene_inventory(parent=parent) + ) + + action = menu.addAction("Library...") + action.triggered.connect( + lambda: host_tools.show_library_loader(parent=parent) + ) + + menu.addSeparator() + action = menu.addAction("Work Files...") + action.triggered.connect( + lambda: host_tools.show_workfiles(parent=parent) + ) + + substance_painter.ui.add_menu(menu) + + def on_menu_destroyed(): + self.menu = None + + menu.destroyed.connect(on_menu_destroyed) + + self.menu = menu + + +def _uninstall_menu(): + if self.menu: + self.menu.destroy() + self.menu = None + + +def _register_callbacks(): + # Prepare emit event callbacks + open_callback = partial(emit_event, "open") + + # Connect to the Substance Painter events + dispatcher = substance_painter.event.DISPATCHER + for event, callback in [ + (substance_painter.event.ProjectOpened, open_callback) + ]: + dispatcher.connect(event, callback) + # Keep a reference so we can deregister if needed + self.callbacks.append((event, callback)) + + +def _deregister_callbacks(): + for event, callback in self.callbacks: + substance_painter.event.DISPATCHER.disconnect(event, callback) + + +def on_open(): + log.info("Running callback on open..") + print("Run") + + if any_outdated_containers(): + from openpype.widgets import popup + + log.warning("Scene has outdated content.") + + # Get main window + parent = substance_painter.ui.get_main_window() + if parent is None: + log.info("Skipping outdated content pop-up " + "because Substance window can't be found.") + else: + + # Show outdated pop-up + def _on_show_inventory(): + from openpype.tools.utils import host_tools + host_tools.show_scene_inventory(parent=parent) + + dialog = popup.Popup(parent=parent) + dialog.setWindowTitle("Substance scene has outdated content") + dialog.setMessage("There are outdated containers in " + "your Substance scene.") + dialog.on_clicked.connect(_on_show_inventory) + dialog.show() \ No newline at end of file diff --git a/openpype/hosts/substancepainter/deploy/plugins/openpype_plugin.py b/openpype/hosts/substancepainter/deploy/plugins/openpype_plugin.py new file mode 100644 index 0000000000..01779156f1 --- /dev/null +++ b/openpype/hosts/substancepainter/deploy/plugins/openpype_plugin.py @@ -0,0 +1,15 @@ + +def start_plugin(): + from openpype.pipeline import install_host + from openpype.hosts.substancepainter.api import SubstanceHost + + install_host(SubstanceHost()) + + +def close_plugin(): + from openpype.pipeline import uninstall_host + uninstall_host() + + +if __name__ == "__main__": + start_plugin() diff --git a/openpype/resources/app_icons/substancepainter.png b/openpype/resources/app_icons/substancepainter.png new file mode 100644 index 0000000000000000000000000000000000000000..dc46f25d747f6626fd085818bbc593636898eecf GIT binary patch literal 107059 zcmeFZ_g|CA_dbm4s%rstZ6Hk)5d{G$BHbE^AQ&-JDN653m5#E2t`U?fT?~Q*kPgyO zun?Mb=_*FLNbm4HbCbBAzu|e_><{~T-P6vTIpw;}Onh)fezoSS z)+w!>1KR^So?md-oy<99_U_-{zpJ$BJF4)17JstCKQ@~6e{RTj*2XO+TR6R~W|3QQ zW@Tx@-84>cJqY!`zyE3Ae;W9o2L7jk|L(5qGoNnVesgem-N?){b~SIWNoo0i(>fZMo_cP)wK;%Wk!Qu;;=`&k&&ny^J6>Bd+fL6W?bw%cr6>Aq zu~2T`hpT!^M2inwukzPXCT{A&6r|O|^eMehv_?hg6c`@!qj2h9%dgHS@_35+&T${D zlw;=1ok}T5=Cn;a=VEOUK9<%wEk*anNL^)!ve+%0OgWLx~5*0PoROPPT`CcbQsj{r^PTWHS}DWv zQV_pLDW1~?y%sh#CC%;rgFYvRUkv-n7;RAH3#Z~19%@#er0<+>q3+Tfqy+g0hK$<{ zby6I!w+QLS!Xf#-u5YM%M(on+r|s!}WZapwhu-Z#br!cZf9&IlJLX$%`}YOK555lx z*ZCn6LzI6*?^6C5uO=3=n;Mv~WA+p;TZfbS=-kZDsi~v#4U8U&!^Z9BE%x*$>?r$c zW|XiHcdSyNh)2FEiG$vQ^=O&XuZiOq4<737+5PQe%FH0U#}Hln-tn!~S$oh4^LX|W zK5FI)`_2veiipv4=!2QIh3bV}PAKjQIy-*u;sL|%^^P(t%b5Do7+p5%Nmlmad3wGK zoZCLXOR>#LKY#E+DGx)y@?@Q;lL=1q5xc*6`g2L2b|#`dbkrW1d(W7m+U7=}7a%IM~Wh+J8+>-&anrv2&t_VA^qQ-) zHuqj(Onigr;;ypgcKEri%qH)`_a!EIHAyn8W-e4OkM?x;x46U|vr?}u zp2v6YsQ=&l-~3GJcsI5fMt83AzdBkXJ>0j9^KJTj|JNB}ygS)ehQ1`}@hH2gsuGWl zBWrvnf1t*6mepHHZhE8N^%i*#!!4OkJL1PFOCQ~nvZ)N+mOs`u+XLfPDtG?#KKq+~ zp`~P;!Djk6gOAAPw~Uu@Dwx%H&C`bSRaN7Rb~0KO9VWlcLj(-qO8BhA(~|+Q$e4}M zSl*I0SG@b1M*wHSYMUfS8U1@XlM+Pk0KJ6uPJMZ;Y)5nu1wntTB1dc!zVLp3f08Kj#kD=nLTDHITjK( zVBEfCD`S7v3Dk#-wBtXujYL? zr-)xgna8ub;a+nfbux`1q2OI)!JJe}P60)a;gw^7^_fESxjtn`SEg_tQ}h@+g2K{U z`sdT~&&x$KUk*mEX8@!^C6Dcz?k51X}8xjOCy70sddZ1_uq*?)Ta`9AG}5 zo^^@-{nmZTj&ujB1I)_K&XUnRS;~y>JBUwR&TP3|X1Lz`7G>g_Wr?89jd;fDOehsO zY{?w{bb5dB&<^1&;F#FoEvBQDUJeEan62D~9=+pJ+Ccxl{mI*r_La&f;=eqcOPjQe zGwI*IoI!yk`fMua9z`Gh&=1a<4F9}yWeaT+=WfM{$s>Vz{TFNs=p$I&wyM#h6>jq~ z>$YhL(?7n~wzaV_-b0Pz+^JZh!TE2v)(J+7I_EcEmcJ6>t=rxj&9f>Y3JibcpW91q zns%w(S8$d+z&x2=T3ME9vev{zLV4sA$78?JO?m$9W(?P6WpBAQKLxS*;`&C#DPjs^ zCQUHO^IP8Kb8b~6t3lSb?WWJe>T)zt_%{E6N80bUyCE}R81n??|93Piw&Xgq=cx;o6MAfKzD^61vTL z;E}s?m%N-u&^x+885oTS{R_KuKDFgBuIyOl!!$0sP=^lDXi=PhDjGKZd?yjp5=OT( z?|4aYyGuOk0qYV@924#_T4rp*)v;*loIg|J=iJY_Me~1d{cEkE{L>KH_W0h5OpT}1 zV{JFkMP=1?or2Hy@3g{ptWDt^-&8BPhim!83E{xF zd_Vev+hPFB{rV;R$YaqN41OnL-u);%g`6#}CYL2N3-uW8`Q6|sQ`+NGmau7Y9>tYv zhJaTTU6ukqc;d^B1*Yea7_%3n^m48&!GCJcFr#O=5|3Wr6aB_>9dU1KT({B#WO;ey zn1lL@D_w)oR57I@@cTOjl3U+U|1 zxMOVQwea9+V*H)BT!u=%B*>OFR+J|AU2>Oj@s;f;k8P2%zy^q(A z0r|j%AeNQ>fN!DdyXYFcOXz=Bz%_N4^X67SW*O0pUH$_Znrref=R|sG&c98^kI=tY zzX~_rb+q!;2Y0-`-nh^(L&mHW?c1$~N)swZaR2Aq#2icDTo6mPepg=Uq}f(_q%6<6 z#M3jdQ@`=6y8ndie_sb*b-SP4ay$JZ%`1V-nFyT46yjUDDRCl6EMYo!yGV9V#JkS~7$Q zO3}`alIv2ErrKmBGUh0du6!ul0pDhXlvTKF33N-*yqe6Z@S{Pmu|Yl8L{jG(y?YoS z;!RwmhdHes@9)$8{j~-m)uWaAcXGlVn*D`c#&4(mUNrgK{I%^fK?@T}0cTyZiNB>x zxDq?|M(=7|M-%V~b1lZYe>>jSa7ztuPDgR%#aH^&+dAe_Jn}d4Nj#)HKent2W1#?~c>}JY-AToS^YSE<$iO*@E%2hZ(OLHh^x3os@y zbW*-7*VR1adGMqSN~xNwQ=MBdJfIhjJZaKH!IVM|4&bu(vN6#VJ!pX ze0sW@s^JFxt|;VY(f1^tX!D^B`?lz+mG`!hv8nsgm0RWNlq@J%==&{xbNro@4=(u2 zxoM8?tHS>K>{8qXYb6e;sK(tiXQ=x;kgYqg+%<)cE3$9?@6;*S)PPqFe|P?;Vjuo; zo_$54+Hpo+d!#rzQGzy04Ba;@`1|Y2$%!q2lR=Rd0ETjM82dQrJ7wBf7)-$N}g<& z;2JnAG4)WGJL~s4iA7G6o6+ayqW`*H;#p~i@f$tIX=nelwxJzm{cF}XvFHCD&U)W^hPtmXF3XQG$n^ImOD)FpI}CvlfJ3^7g+}LOyW@IxWXO^ zV=W`(Gk_X-xm9xOcl7Z%Me68paDH4>akBp5=k1zfJN@ms=!9|d}hFEe7<-lLW5vJh_@ zDaEInf*30bUgckX+gFls$Z8DbU?s+)SuZEPDF@C51!Q<-Kc@>V7^Rb#6nL4ZXLOx@ z*H@fXf6vpzvr3oE~)Ui2`e z?L!(XAGH~?+{(+<{s63JB13p^CyTyNb{Jg++Y)wvQ*il=pu*dlkQWTu54;d@%lqJg z-!zvS?(!ep0HF1+v~bWSPyF_@E#o(TH=IuC9Iuz1P;ymVbh7#EK=EbF^Ni?{3DuNd zfI0e*q_uwoohyn^e8|*qOlQ%b_;&sOAfGw8IefTw06Sl7e zQDjRijxqLi4K(tzL4W_EX9M#%elMLeddFY29$8Wc2;lE!PO=P-%lr-LUPuee{~Xuf z--{t52JE_WNllD1aJBKXQ}7;!=r3Os&n2#BGGC&cb&uch-&b&`Lrmr)9UV}G+4~zm zhMyg`2535WEx*rFkimx!#cvt+1sI!|>{dNBmH8XtKKWFg_25{Dq|9HB-#;sM&u(F0 z~3`iO~VTF!z3A=!B^(s|$b)Sk@;rq-}Y}t@9goUya$1I!+?>Moge&*|1DqP~U>_HnQ%R$O<9 zkYGq7NaQK~^wnEDy2a%Q>Y^h`pXj~1MR=V3`2);fzbf(9XB#lY$z7{6^o%|}y`Mr3 zw@lkdZ!yveo4Au~_=mY!;c(t`%n=<1IsTYs1$>U^KPR_oL!-#G(mD&eQ03`(s6Qp_ z7_+~2fZ0JG;J`oc=mzBNugFvS?#5m5jP4i!-2ah8GE^+D-j_Tj@_1v0Q*rqv##Dy} zYy>-3XRH5aVNN)cyk-0$6g0|Lvz%AYIK`OW_>j(9owvL16t4=0*6p^5lKO&)me|?r zy5&U5-dy5-zh{km_nLID?c9hvZJu(4dcQ8m$4sd~;q-+~+70Xeog+{NuWY;=f9+F9 zzH~3V7=*&MN_`pGd=dbv&Dh+ zYZ7`7i~RdzjQ!-}u*<47Q?9cGq72C$S@X4raRkqb^1BYzML`X{!xXwvGf5hlJTEvx zjuyLfpt}%9oDYA+aJ*l~NR*W}qmX{)1B#MmH$oI5xwf4w{7vyw#7Ps|xA7=S7A;hL z;O-bdsd0dQN_QS*sd>ITaL=KsP*$g<_Y0M72uu&ZSS>y&5~H%M-`sflC1rcz5Pd}X z!TsvfJ{A1o+xQffI&N#Q)4yx}!Dj4?bLqKtFK6!2b6MBC?$V0Tjr7K_k1v~wJ0IW6 z8K|wr74+`;{r$5DRh9MHd-G^=DsevtKIJmQC}qp1rV9l8Zkh<1uxe z&(?9arkbskf4MhBDT-`x_fMhI5~i*{OgjU+VhAhEOg;{M%resow;8O@TW8>Ak(VuQ zKI6>B!Hs@;&sW$q7$p@X zXx}bWLYnyJ#-?PFf5TGbkrIYtx7d|ZQb-f8-)u@79a2qb2?+G*;z5`~lr zEuTHwKbFS`Exq5^>GXif{fX5qcO5^s==FHx&n>CchQT)mWLpN@`IQy>a=48-Wn$~b z<2mk>xEhstzFrmjvR?|f$xP=Pg{ZO>emb6}wz6myvgmP$t~+n`pc4hh$xEv1SqnGF z$&nU{Jhn2LG_1QU(yGl^j6#x_4F~w9{j;}^S}o4do0%ToOzc!P3h5OKi9b9Nks3Km z(n*^xy-i=W{P})xa8UOVvU|s4zrFbjlJ(Z?vR`xcaWR7b7aAsGt_;%8d z^4Ai%ZzLJ!t2u+~9Ol#t?G>HMOkDh&(X{kwq*XV{{SbY7-g`_xTld5gR{VJ$OSHTr zgm}s38)vjj(wmty$LlVZ8HMz*(JuJ#q|9y>)(Snhk>OF^L~|#%rmB}-LS$LnD@H9- zpI@q#I-A`5rV_`}d}rh?mI?g*gZ4q&fa~brWfy*j6N$ zVseZuA4+(&pI>XngAX?4w0a}%_hK8`e=0;1Jf+F9#^pxs)BY7N%N9}4^SH`5Nxe5+ zVQ+riNSZHVqZ#4mqvL~hT75}B^yAWps7Ze_%xI(TfK%_&m+6m% zEXBZ=EkE&Ojk?Sb>4!{~+G_JlwVo(~e6F_Nbw7fTadVO%aXCFpcWiOs+;r4(3u6`a z%o{|7Q>2M}f8u;UTdlBhND~|FTb8FTA*tAzb-JEJSy(hFtc@*TEw0jM=+Ij|rNyGw zn$0S9R7EA=`Mi#n#8?%-f_y4f*eE2J%pYhH5OsZ?|NH;{H1IzS{7(b_Kh%I=X?t4HN!s`C z>_T~qvnI+mi;aGxjrBjje|boWEDLGqUA*8J>1@=ZtgA}4ap@0V?kA6|G_J0#k&(&Y-np#Hwp>!7GB2YQY73BR z8+m>ul^Rc)AhXdv0=TfkE!(rvDgma}zEnl2q}IWSdQp@*PnxJAO+=7H^$yEr|Ed=c zCe)dTH)*wuMXj2qdx?>wKbozabE1}2^IycpObBNk^UqRSxq-7c*iGy7k6F+WU2yY< zvNIEn26qv%Y%Fm;HU5c2Z}!%6 zV%$_vK+@7Br=d71g=0Uce!EO<5stWODU>suX4*L_<6gl~GP|hQiQE1AVm~(aF8U^A zh8)tr^S6>%iAedacOw2_D}$o7Q%b5~5A43bNTQzi8taea?~7s}z?3cw^O*L}jEIl1 zI#QiYP<{=GtEVj4;FkN@XlxaGwV%`~5C$?y3)gV-??^v>Wl+j2n~#sxIIVtFD(TUE z#qgF4<;&YQvR?#ELW0$jP-~NYL93KdH7l@CJZrhMS+swzpW%-lLKl-`YkW zC-_9Hq-V50rEtGfq_Z)RyGCVMO)GR&lr(YCPdnkl@J*b{DnHfMC~01j%&+ix)U`Jw z+kUa#X^};Pk+|<{Fm)~I6a!W_HCw0f;5kPX|B?QpCje8ic0C%NS<(j?8>-;Z_lnb zcCh*#y(l`g3`76P?KUFRlN>CWQq7!8Edf(pIe;nPvR%sTQ=Kt*Y9!@7k22b3-;)Q66 zIH?#ytfS^}yi)5!VdjeS_T`pAbU9x&z0ezbMapvfTDAU&D0LT0{Ncf{*}?fFO9}sM z-LaxiqoobPd)GQ={4j=a_Ez9wH-B24Ht^D-bRHSj^1U?% zQNCXs*w07E7m*tKT%JGVH8+;u$wz%cTIs>`{B>=E2BFlaU11FzANk?bj&MA^6r`?I zZ`7QGLbi<5uWEXYM&L7cyemEPUb{*ooDLkrYa8lDbI(|@Y|G!?W5GcD)t{@2O9$*( z3|m6f~98Rd)o&*9L1bW>CiudYVD-lBE_vS7x=N!Vu;`TZK)`ylm< zyreX}dBKZ+*yu}I=*JP4CHo;w=+Jq5`N4U_d&Gv8x2ih2bbL+V#Vd8#PpbMPZ;u5A zU4Ds9`fRR15=pV+Cr8)uYjB>u6y#7X9S&Ip4z4G=|6~gNDr4?%L{z}o`nR`IH~5o+ zc#nS=f{0bFJ{A{Y_A~PyU0Jn}gC}>f)!rr9iNbk_5P}l+w9rU)(y&73vVWS@+R19R zK#gbtBAY0fZf%YRD?TWPs`p?wO*3!U1BIS6cY4QhL#0Y~O9b`jkH}`24fitu~|jAxl3m zzOj+zjX0o$ukE9kKU`W1>E7%!bLV7wPLd{kFeaJ8@)bI!C9edNCMv|bP2O%}lNV;0PduTC+MPFeVZ3S6P|DrZ{6g9kDYO{$TxZe?kPdA1DLK~!qsIMwO- z-9|tf6Utk{s4KGLF~P|+#E^|iQ^ZS7mJ8`n? z*T>31ylHQtS`lDc)FX-6hQiupdxRiPHgMGOn{xVr3M86+eed>mtN*j6BY}^7+eh!{ zs87EknRLX5^Wv{YRPkRFa!}Gdx^{4a=frl^%0c>pgOes-cPu}Q51Ptaz6~P~2j8-G zY+v@G2qs4Ti3(4$eFv0+cn1sJc*xN;ln7Kz*4A2^t&4XuUG$j0DAmPZJ%|^dUQhrv zw|0w*)-TpdP#J%k@bUeGK>$}=LZ5(wuC_op;f@}iF74Q|X6;`jVA}>?i8xi6`!GkL zxm-9w#b|04m2lKwtZl2!{I$ogRxCrEV~OF#2idql$Wwg({iFbKO)cTKQHclaao#KY zb@6~V9+cefco-bbj71g#TlT#*3u!F|&M!t)HEmXv1|8$IPsoK5=V!BH7Iq4(U4$E4 zM(|85#l&Kzm(}SOESB@uO%Pn_$dO)y7i&p3ReN9IVNvQ%RcF|fcBotoA%MB_g)NQV zx&?Vvf85Wj;5r(de}OD%zK~pgl!Gsr2;9UZHh35D$j(chb_EyV`0EfXV&ZlT)uaZs zc-A6jb;s)#?YM^CD9}d8KF%pcID<$P(-sUxrA1>Qz}vWQ$w#Iq>z70LB#Q%xI<-1@ z6DK9wWX+eFu+~F|5Aq6fQun%b0Z4Nh0&LUC0CjF#TKKd`V6tRP>ABB;yZp?Q*>dtL`Y=kA)o=~}q<7$j^p z%S~9wVVBIPgd#S$oLQ#RruPY93~DuEn}m?sS<=r$qEL2qtpcsr0FI@Pvp+?4?^^c| zMu%ysg1Z3f%d(^Q2!hHCPAZll-+%f7%urn{fby_cdZ;Y6Mk=9C-O>Xh8r$?y|08IL zc#i{QDG`(q1=a$b=K4Bo_F{SPVhHAE9>6N*Z~1Bl*0~A7O&qt59W=+{wN35#el5<%Jrd$XplI} zur7m}MQB%X_IL8y7A<7f9PG6(M#sec#UNYWM7<=NO|y<&6b2UHw1#hP?W%=J7Wic< z)s#~cbxBX&CrTZ^VcPE`vli<#&t`X5RUsn*&pC^9*Zc}9ZDOMf!W}M{H01}W{BZvR zeOm7RgEsv=2AGg7RKBy>*X&H=1}cLCJ4#vbVBvx;2~oWwk=sq07e4@;HSu2y7;UIm zJ>pPH$rT%slWIB0lcfxp)}Wo<8;SM8+_y3gK+ExJlWpZ|i%Xyc1|~~dsORKGI}k8p zN~CO$way>QAY;oLTwZuvuH;hl<}MQMAp3rA`sV_ZFSH7lEl8dgF(xr!LP?zTqi*cq zq~}LiN%vWY0^vz14AEwocTz zf(JtA_%=vTU(NMLa3x$k(z&3%-KV$F3+ONkH$#OHs^0)Lbqqxz9(^7;tw;waqQ0%0?yKTK*$YD*q?u&{_{MxJi*hSkk?N z*&0-UcYPOI%YPB0uPT^De%Z?oLZZq)8|@zZXC#F~x7?Ek4pu1Te_l)BU4OL4a2OHG z>X|LEIa=hxVt#NsJwQTqa3gv6p;`&ED?7 zL+-Ktum!dK+G4`p(H`p+k{3nAgyUcbi!dz;4t83jNLw*$MA6gUh8LSYYwR6NkLv#U zWK6);9ILiPQR=5hhDbl!<}IowL~ahy>!=N~R&R;J0JSg1Oz27ei>>clw!%*(i^U{n zHeut{OAt!jr)N1;%wCBJQK;uN^_8-4OhzBcXh4j#o-LCn%l`1o00r}=b#T>FR@7w- zu*Q~Ct6u`(R*&I`Yz&u<%|9zY zK*EFT$g!*| zIW!@FxJFhXM8JjqMj4Esd`XOiTON}T1&3^`$JGACa;uQ^?54HFyJ{*cV=Bx%1p&X* zYM~`o150EA!zSgo%IXKrO5_=uJ_)^LO_Uqk^I~zOC*>ZiX~~Xk-&5Njva&S&l{ly# z=Qp_bRwSWL*_qtr&=h%BjAXL0P;N>*>0QKn!@^BwyUrR0;s4;_;a|6Fx6G3?9o0}l z;oH%79vbUht>wPmq*ZKIpmaR0UaEK1!(l9~chc?pbXd;Wn2##UN*_*63pq|=a;m?x zD_Th;*?l~VZ6fCqXVuHzpBu4AmHPDSk0{|Pez2m=N}8Ask2xPBcR8sNb-&`UF44g& z?vw9Hc7g9w&>5LOD)PG0GG-@+MXS2cnU_nH;=HKU_-b`*xU|z^MrX63gx{*zu=}JQePh ze>s$MJ`}C{$Yr}R(W@*9LsXv%$KLk{h*GWDXd$#@Osso_hF6Z;*kvQJyxCd;e{ixH zn`s?G%SL|Usx@;e_=;97-&W=8n*stqJiIn;3#HdSN7vS=4;y$ioVX13%A-WUU((P2g4$~hHvLSd2v}jU*x>E9{(XBF8oG?Dj$%`fhxw1b%WuaR zNco?v$B)Ov5!-_av6)@i;AE00wJMdmQZYJn;UsnlB8dw`k8`;B$G(YZcD(SFBzL=l zCh?Ql{30fzLosZ$%@37PTaaE>pH`?Gxiw(Y3VUg95W`t;qcbJ@*lNSKtEfYQ*iePf zOy2R67-7gzS%_Hx>o7eofH{$L3Rkt6Q#*z^%J)lml@3x#c2OIk`T%1E2+Of6tv#03 z7)&$>HEw~d++}(c+gIbiGoPMOdW|QMzL-+J+$!=QC{R~}2r61Q6dZzbEV`Yn)W)tx zVPb3YTLS~89lT6=LB^J~?0|ZD(Z#gZjh$=F=Vq_9 z>|vv|9p8`1tX)6UD;cGv-h3p_@7$wi!2%Wh!C^V(!jALmUs@1rdH`{<^dD%xIG6GX6-*m3xWH+d>Dl&| zs4?ReF0B<_WyO;0-Zs+P&p@GWar=zxzRy^G&G->eS6^ngqjpTEc}=$L^mzl!qp^qU zsS(lo1I8hT!tAl>2d@ubbU9@>zLy<)?W3-X%N8KZ8lHf<5lF6{0i~`ULP~DK3`(rt zduDItySXS(Z8a;`<|d8=T-SWv!G{dUdyiOGhl41Ur&m2O;#Fw1o z^|iXu1QyMVwWX(RV2{G}5*qEF(GL$)3C^uCr$5gG;HA5R$+2o(CLp*0~D>)05b%Q!y)68bC19Fk_f) zgX_tZtksm&P0c=z9$L6;NwVx{FVL#ATeE^6-WD?>5L~{EN^wzxqYpxrsB$&EpEWbz z+RBD#9oy$6;mC zwsJYuO9{dEF z=OxgL%Ryax4HPyuBQJbVH$YR?0Ol_s?P^PB3q57b6o^7ql{QA{8P>(T% zn=Nl&ugM|gKq4C!GJip6KcNJi$03Xv;=JXFe50aL2{OmHEl>~jXML6u70{C@<5CvI zSz$Hw&%JuUj%p}WHLzD&LBYc|J5QiW zycMQ%*c(IT&+mnkn;w>`-$r?N88$b$D(4ARlfjW;@iIpgzS&XM^WOY;>sAeTL#Jg0 zDi=#%nM{q8eAHwmcK-)^Hf6QB9v8$LW9XIFHCp=q2a5LwB<7CJJfo1Jo-}Hf5u>j61n@G*A>Coy)@l71ZR!0L_1dTf0#_bFuBM;Fv_hBtJ0*3c zHc!?uOowAO8!5AWmk-4={4xqy<*b7mG$@al3IgD4WOKOq{_UgSQ;Xa3t>9zl^0veH zMlGjySqp8XM=E#CJUk3#+R)bAnK~z@~&WQflw=s(_k^EyK)= zbXj|*8WN?$eyB0p$N&yXa*^NfRMcifoB?j^`2%L$CgYw!5nw-p){?lzyH^Rvrg;hU z+cXg9H@S6dwz$A?=SV)Fb#W!UcH{1xe?_^gFRC+we?0JYGGGk*~6}>z2=@juH?JL4<@(l zw<(NE zHk!BOPS|}@`n>Zdx00>OSS0Vqpa5?Eay59&)ZfeP09W~fo2uDyE)@r;7nkZ3VMI#l zBtnaq31~B>Jp1;(f~S;OMsW1PjSVE%qz}L1A|U>rAooNNxTa(vbgGcc_TvhAcB6oN zYh!?w?ouIXK`jiJ)Zm7No49rqq6?A-N94Lj^!lTH-zEk@S%&~p@G^mbz;ZU zN_|RtHUOk4*{KDGb^gdg!_q%h$Wp#k%dRSDs%Ue;RQ+x!QkJs2rHdR*DABaO44`S}v}UU0j?>evZ8(>$fNgq)Ry(X6Kg`R~pE{7Mwd zJYRN}d}>C1Uz32)B^zxulNYwaX51V&G3c2zFQnEuSLNhkiPM^K17xlDY}Vxxx>^>o z_v`*C!A1)_?hjRc+`PNGaiHqf!1<*-$mLnb+~AVa(-gc#qxoo@Y{x6fC`94CBQj)r zYmC9;{QytT1I=1LYm=;Rd^VB;OZ)2q+xOl8i!kQoRPVHCLhdWlvM}oE29aMBY$9T8 z`QpvYrvZFz0N+|k6Z<^FVU6(V0@8#@FxERXb~Wx$pw=J2Et*htjPMhJXZ;xXB9%WW%fhqqAEaC5 zoHODIu>=+E8O0#+hZ38zEqaU&M#`~01S)-QWg}8xyQwrv34B*Uk2ENZi?wHxOxjRX zJ{YNs90yFrG(>nV>M^hRkHy{^Kj>Uu$qFfWBf+!b>P65EBHBO^qF#yb9FOCSiSsg6 zH}=9weYHIgjX%pFAGg_S+Xj0z2R_K;m5k_K8*%3Go0y*GAD8WckFQlGi`Fy& z1H3aGz)VDBYHhW?!v@r_s3@}ec43xoZzN%1#R(3xy~hjq;|=UbHSeh?8mrS3w0fYs zn1@h`YyO88`;!6IL%Na3?ewSeArvdmWEwUUwO&ISBKZPVf(tFxi5#ZfS+W+%H(M_1eI@)!39Zy!F~`A|QOIIp_Gg~r5w)GU6t8|^)na;6z# zNTAMp&{nQTYOh{#dvnEWXg^Hmn8-g0nwqM4W>MPtC)WGl8*by)`U_K!E|7TfYM&tz z_9Fx{bfb%7WLoGWr6-2KHJN`5-#Aq^{{k|Ado*D<`CJvVd6qiLh|rKHsfA7 zqNDEv|J%-ju5Bh*!dY z&0Tw!L2Ydhu;`m>|rrG<^u8be#PW4o~+Yf>diZ<9aq8Hrbi%DXwq> z`^RhQJ{}l?YHhvND^S)RxhP`0SMQ?|ji+``)n{$nX9`Jn<-J9`_@?363h{|zJp(WxvGMbGLJZjZ!~Iy zFgOz89BK3P0;D`cT!|Q2_WSf{7!qYKL*tHeLL1)~5;HMdJP6xWwfr)@jBa7eHCV4r_tpamQUP=S{5ue4-A#8$pro7;FB@|_ z3re(zJBmcYeq~UmwtA=<5(nSh?J?r=T|kP;62BUe3pR8OJDvlHk}0dEyHEkA=5;ES z>mll!cLP^q4@5zvrF>B8^RVt?Vu=jY_9P%A9KEyaPIH+R7w{8rHri!jID(-gI@Jgz zq5{#0hVu47+tHGf&KeO_`9O(So?A_jijC=>R355c(W5ChY-DD0|0XdrJ>UsyGgnTS+tKr}`C(%>dNAQiB&hQj4 zV-Vr2_l{qCgnGjWv3Zq{nrh^=_S4Eyz-!u~JTI$v+!Kma8EtY%+FiiF&ha5G%_`Pz zUfL3+2U;qUZ?zTfvATM<{RG1OEh8WK*Qeiw+TU1|{zk&;vnp(j^Ej8u@KX6V-@qwO z44TnxI=S4+HRF|VHG~QKLbSzf1p1TFh}QN4L-}{GjW_Mv1h3vu?yyO=5Zm7zD$4mp8VXrRtn#h@G; zRhI&>&KH)))~R338`cq{?o7`F4PWpNM>rk9UISmOmV}!;Q@si9j!Hd4R85nLo6i4{ z8g{Pxr!T~1F>0Tl^I^R9*nC-m02uShc%-r|SWE%L^X&}0{rE5T@s?||UV9iNN`<#T zR77}g}N4kF4th;mq617xhvffMX|S%A%(9}%P8JtS%6mTUf}=5`;m(i4CKO1PAQ za$YUROTK{!UM@vdnJm)=1eoI&D3Lz2UC=r5{q8?f^HFNS;Qy}$L44za`27iPgl!^} zVUaI_M&b&#Zy@eQO4Ph`4gi*txWgb?R_y1M?cdiaQxIQdt9?b9 z&`KYKsc2FCQDv6s_mSTFo)F69$Q=3S!L?eX!x~2R=Q>53PB10Dfr2|h(|!c+p2s^vUa=;;Lg3tIXsFruE>g3iTZ#Z6)r) zfbvN{oxJ=-VAbNV3bD}eQ%~Nz%5^pOtlIKTLJsLI!l~0J6U~UCHKA-ImR^=S+d;~* zx&gZV*V$QNZ)sD`x|&GrT7fbGNJeHWR252qsB_s^ODCfGjY%t@jSIN>e=fj{xIhOP zWqK$xM4j~3I!Utgz)3*}lwyZbX_0PY-B-(~-(maZHJEwDOl%5U4|>t8HxB9KG85J_ zZDg7g_9OH_k#%Q3T>q!E#>v5{Mxf04lnoVgaq|*NaE-2>x)veXn1o$7wZr>{8vgCl zVz5f+&tJ`WBo+=gIT^MNCRg+acKlR`Nu|D``zH-@AVfhn0093wN1=Z&&^tN2VI=F% z$Gn5l2nZHIi8OQYk1sXM`6pvYc14qrPxp*<^P%?(iRw;NARG33nSd)hf?3?9+007t zZS~&snp}y|!WhW4Y2RaB%NGpXRxyL z2Gd8NwZ?28r}>j>L(m}|1|-r5hp*-6MGuHXrqtN~SYfmr2w)#L+iBeJ57K5O$}GB0 zRMYElg$alC%0*YHhP#eB9Pb*LL%Rw)0?)Ca0r zAlV%tN86d8Vw7WKuVXLl#o#8w9zyVPAV$3&z$>bCMjT*z-{9x@<;eY zx-w$Jq!Lf!>eGkCy!fPP-Ptjzt1Y2}>kP6;K)vB+W11{GA{Y!Mvi;}hk2azBWN5i# zS_ja*5Ij97`JTj~5F9h161=|LNQVLZZ}&&RWbg#2#XTR7ww@WBdCL$tpITi6<%y2| z>|<`ImKedol=z`x{%xH0-g;mbgby0mDipZiu_^VQk2zZw1ivy>1TNEUYeueIzvZ1_# zwooRuY416SD)|`}$lwyVX=I_VK?z!`yf-0dex13j5y`@IYIuisYw$V@6DV{; zMY#kRRsJtKP{9TGbvsQbNW8E6gqdyUO;N~=KegD`2%>2!+9r1Z6!s*KqII5&M#ec0 zhVjuz(53QTOY*VTEv=I^B=2rROQ3yU+Acl1{51*+&D z{QGQv zeYKgLfc#W=T`9=q?Fk7hkBPhZA6_ixpRY5vDoq{T}1eF3orJ%%@^~S_8F39Ws37ZVmIg^Ku zkv#(oP6M@;D2iTy>BX$zIK`;JLLY&&4Yrf)Ix`1!heI}b3uA>j`*f)@LE>q%a485|assCRKPsef43$tw{ebBH)A2Jw|GJON<; z#R2X6AjatD&xPyZ8_!R_ND3Ksd)*0F&MZupWc^UvpBL@Jway2LfAtul?~cjf5@5=) zECGCE#?Mn(j^o48-dD-#}Q)1l3>J?WuTLE14LiG&j{Ja5fr}NgEhS6qv#9)#=Nhl^TFbS z_{j3u8mB?&S~TL*SEt42@Dfqi<}lu{C!2mHGB~x-dpr!Q^vBI#2ol%Qcey4%S_cJX zkq29W?`a6-0h99jJ{7ZEHRw5}Vxm-h_9I^NzxSV?>{Ee~fcNHGP`rRwa@DE4=G)xFQ?mNe1;Q?N zro@V8+Dm?zkU=KPeYrdH%~8dY>8U&d za_Y4KBvFxB#h~}aZ;;hb0bpW#sXOK6&c54_B$cyir3oS-KED0Vfm$QC^rzJqB!io4sY4nQ56RJ^d zGBNACSFPa6&=Xx5ySD=H*q_aXt|rW3&l#~>E7dLg4n+1k<3oAVCTGd3E?%WB-q506l0D>tpPQt-|X4j#uPc{n>3@97kuL2`IXM=R2iN7q4!cDN% zS0H<#X%jyD@v#fEkn(HrF;D>?bPN#M7Y_?gk)QS_Cd?3=f6B%twWe}u314r#&H0m%2&WdDXmyD$pAD>Y%< zxifL5YYEx_z6>0POhrjidgwA7H#BQTXQM5qy&sF_1+{>g%?59nFrB!Y@ErJQZ3gLQ zzrGN`;KdXmro-|1Fg((i!k9eKQ1s}T;3F9Pop=gTYF6}iap(pH9~|-#3WJL12D$#t zHqF}CV?SdJ4@MM}ok*-YJcR785lY)o@RYF!2WFzExRfCj3QhWKd~D!p$>3=|K};lY zqrh{1!fnF?(>CWajsrJU-ts#(!N~8~^Teh?Ysgmc!IX?+n8-N75`#Rg0g#F3hiQ9t zNI)q9P{n-R6gd)`ElMt|PvIUro`f1<2WtE7|BQW&zrLC$4KN;Fyur=z>(9-907t%;M&Wu zi%igHi`49!bOMTkw;rUa^YLLM>S$U0WdL}bGoaG3_3a~|x2D=fIVgH`z5WZDuyQdU zCyW#0Dm$R)y}Np14hs*5jI*KSzQ_qR#(CNX%cwl|8*sf z*q8}@(#rSiN$SwKHsiGjhmH@c&@EzW9(9e;1{*EjjInXPQuonTAS*Ry!{fQe{nRgz z5T!5V8VD? zpr|wTWG(8{BhHoVl4%T)IRRYHocZ1Bn@q@=NN#BQf6wY%;O*WUnlN&nSd+zvqEA9N zX!G3hmsvBM#`D}Z>0SWvxj`pJ2Cjrkum6#&?HGpKB#Oo-zX;4%h3j3WE51NKXhG?A zE=`i}C`NLT{Z|2Y9*3@D3Zxtd`M4ppe#A!vJ$iUFh*tUPOR9Q}yEBepq@c-7U9@?a ze-kB5Q*)j?578d4`(O?y>A_`^M003>jN-Lzc5E#)Jj1Dh8$GA!gK z3iO1P4Lq1}h>{`1W~#%TGM--o?DO1y-h_R)T4KP`Mzg$~wPK{WlUmyZ`k6UI~YRuUC$ZndHG>og=IHJ@|8^$YrQCXGdAm5AWSNp!41@ZE}1T%w< z^@J!NLG0s3w`*YYW^7<+Cuk*j1{LRGXv4ZMli?I*M=dBT3qse4;zPPaP*6=nrQ^g0 z4QuPqDxA@o?pEP<8`_BChP0HW1L}Xm^EuG6{p3~2V?(V(y?$qCGF=m5IE)&Y2Rvv< zv`Qa=x8yH=nmdYQ*$;0bx$gv#b00Ao&y=Q{ha`>^C0<{&-O9jq!l( z4+8FZm(@4#Q-dX@o$; z!>LFQ#$iYyN{Dc1IdMv9Rv1MpcPAjGo{<Eo=u$)lrs=FOU|x z;}#E}g}S_hRtmLzqhw9pg!zlP!@yW`kd-5Zoz*{yb!7m73B&Z_#20WVOI4 zVK}&bkl0ir0Kr&MdLZ!#VbEx`NFDNk0i#HTkjjhjpv$RjtGg{w*XWtl9_#N385UaD zO*mBlZ~@vUGP$5MVv(Bb4^<+#SJ&aE;L3VOj60U{ES3@%_l{VdQ-7FNoix4QaCzA~ zMC!$!?e7TJ-~D}$B9NjbGqke^u2%f;$)#fJY0IwHPtrKba`dmA7xrt zE50S{m;Xr|(fm_1n3Y{$X1OSYTkrvgY@M53eRCpb&i3-fqY2|RRVKF8>rCVE@14In z&-cw84$*nfX)pe)Sd-KI>j6CM(4Izkk;fFezg6aKb8`&Y;TKYC{RmN7?g z+g}~w8hV}Pwp^megJB(~K92RUS(@Ane=ifI-29i%(8nBIh~>}uGqR~&${z3jI8T(! zRnWhx<5+kU_88VbpP(l^#3GZV-2CCRx5wyAcoj^s6v#`zv>&3~1rHFoY3cS{vVYSb#Gq z{JnI&a`SMj%F%3PYn_iy^O$}aYjiPeh zPVTtO11EW4lGQ&6$m%#J%kb>-%Od1k%;f!n%6T_H&IZ%R>6VyOE-B zncOSG>jM*TtTv@>=L%jwfOoKR$1ch3K0_?#rE%#N>4EV=KsH;Fl4(S~Z+vl)4gK1m&#_%VGN- z-vo5qChzaH`s*6I{~#8BZc-xO1@71Mp`IY!0;wfX@ zcEv2Pwv6~U+%`kBda75>F*vQ&Rxz<|&&;);Ga=l@E_b`qeFD@XmI0AN8ynvDhUA=?p@1TD#z^LNSm> zioRiXfkstc+H;qK;Q8_J!@^khh6cMLC!qY*Wf8HW6Iwq1(^^Z0lG0d9L#Tu*fUZjp zx@i*AKmLe|Al%WZyfl#6>XY^>NRLggesW$gxnW1>Oi|NyqD!d6cz^LN*yLWIUIx2B zlJncp)RD=BNW+AO6yO2duhql$l_?FiP;U_qT&Ruczl5@bJrev6a;w7?hOcoE`ddHp zkFV;1cQMmBm(q1WfYZGuEHx^N-V+LGl^|efL61$ceBVA%B-T88!$p}@Vh!7tx4ZKNFd)A`Z9M~5;vthm)~@PY7=C0hbZd0&H1I-6z1HPq7dH*wB2VfKwCdj6>H zdd8Wc)a_I6l$V}~P0rWn05POgnSa%Rm1nG+4%VXw|H0-|A3RnjO;U~>*ewhq$p-n7 z{;o{!brsW+|5C+IIB<1%;UbuxnX>UBf49b|D&<&`JjKzi?`t-eUh4B@B z!cq+vbQ|?R+1bcDf>xM!pCIyP|Le-&X;PqU^Q6gPONj#l3e?jAHp#U!{FL=vUDz|A zp}DnHceU{6yZZ^O&0?Cvf1Arzwj09#U_y=e^~TfrXa7uH$(c<{CFDfS99%dLN4215 z(zkDe=Vy>F<0@101vXN~BxYPv=`8VLJZe|m^+ChTluzepd%C8sSZ@23V7?Z0YGEOk z<{pR8Xj?(#4?Ny=YB%4ZxzrX|N9n>}13oz=H>`IZIoHg#pd7x`6c(4$US;o|>fMGGxd~~Rc!Mci#j#~`h8mpIlAks@ zX0u7A-(r)loBV;x&1KeJowygOsCJ z0BGCpX-SLvO_e)Xe^r^1vmM-R{?gWS1rk~kU=D635#qxkrfbgHP##DQety!BKfpWMM_8cGn`Bpqs-#KA<_SojQb<2KwvHL>oowR*9W zXfF`NL5o|8;*jSK9&qPg*Gp|u*ZvKNKYcE{B{*Dxy7Rcr&Z&Ym769t8ws-gS)I(?V z@3IiMT1TSlVXhWpAj#q$N*1Q1SyT=UMINHWsk%Vn5fXS?w6_1rndW!{l zIMS=HZa;0pYFQ(uwdOoxPDs&rEd!(-v(3T7yLaEsGtH0OCAH)i&CKb#yijG3EMz}E zbvsKAoGRuH9vzID7Jmms?>d(?<@Vu`_!plm<&?4L4$vg;+@ZR;Zn}V9SGj)kJ=!<$ z{U=|eN9W=a%495*4}E^9s6Q>03#xMC2>60SnX$p)wFAGMu+ag1%8MQVz1W%z0p;x@ z@jG&dGZS$MYk_R8os3MUvL78n-XtNLC;0b;T@aRI`e4D&t2*ny-rYes)3&7SaXquw zH&Q?qspPr zp7P(IYS8oknrFpvjC&X1yU&sV67nM-x>QCf={KGuo4TzWXlM9a6$!>IX@oVWC#FJOv7n!-c1gTEiK36t#+ z%r;bmS_qSzGs)5H-7l~n>Rg_B34&sh$P*@P;Aq0LEL$%4y((UAd#xm@l&qsfFfgy< zI52OoWwio`JVWIW@vyN&ndzlJLHo{)W>EQlJIZbEO*x~f=Y0F}x+O1oZS2)R>->~N zD0NABNw8^>81Q(#(_$;;AC)hz+y9Gz!UT52hS`gkTy@RPdcjj0c;J7@C0TRwUig88 zPdxTc8bTQ>GTPwjP0otXDmdSGRD2WgJ9lxX?zt_Xsr!zXP-9H86b4TV+Z0jLs0UQd zLBdScy}fZM4H~CTD;umcd)6pGPQxn<#H3h3Dgb#+=*)rmZ#?#O8GH-uUT!gUP|2K5 zj`0JSuUQ6nUML?;7lIXBK2dFuYzy@NDxfe~C2VthSr-vm^X+k>3^+9F-`Kcwwcjho ztkZ4>k6nm)4oiY4)dpN;T)OVE%PL)@rW0i{9IR-GVqU|v>%~XKLZ-Hm<4Y%|7ZCuP zB%KOG7PRVLi4oYfiq_P^07DfzRpUk9B~R3CmMQ)%GnNDDfiOtL{0Y}myQ@!=uL^0c z6?!od#(S2AoHQ(FHetVO?pqCCzi@Xm!B#IO>7LGe;pq;qlI`gI+|@0MF<$s+A2aGmcWWA+^^5 zIr$%OyXv(Nt4#AwUHNrkJHYKclqQMz_F7%H`N*(loETHsdX%R|7+QD z+xI6zXOb(o5NvgYDbr$btweMybE$?`Xfbg&F1J*mIp3*Tw6;2m`u@4~qc7T~pc7k= zBrpBm%rn1aLanObjP@@%3End_tJ8#R-6N}Ppxd}&n+|Cm>)^|b>eE9a{~J5HthM2( zwxh^oD=d@1o`Uiy{e@qMdL2b7nJuE4K)_GcI;*$N_@@Sr+NFAbMTHdpS;s{`cz~L= z@`w9gcsef@C=06Yo0@72oeA5G5lxa=kQ!*C&a$M5c$QD-%*h2-q7Y5n^kI~)4P?L~ zZBUj}HP)sc7`qZq^KF3)3X^QFo}B*`j45;`s`r9&#UgdR5l&-fw^V!pkk>&lDwJ@l zs=%{shgY3Ey;8kJwB$$D{%n5jO|1uijlNJZ7a&)ewFr-~%Ww(^3zc-|N$`Lno86$* zk2wa1ucuV*BpmoyD!(boXPJr~0h(9OIA%DTL#45+M!di$tAC=3B9319O)@&B zbAP7%F9LoaeeR|1kfR7^l*K&)xfGj-tux1Y*&JPu9sFK0hW!GR^S*(S>;X!dyxUgq zrSfO{IRM5fQW%+@{_H_GXK{HHZx&M0H*RiuQh)_wW=<#4)CD1{yA z%~ahnB>Vu^(Cn0M>btD|2EXoon=!LgNGu9tqUuUPjXr-fT&Su;&>kkFt3YuKDy(?J z5h$+7fl3ZB0Abm-SGRgZsa1)m<4#KdYFh~b5(q4(?t9w2t+!m6btY3-z%OZR8&ESE zK~b{%oK|2fttBDL|KLPABoJ7>-VYrRSP*XRQ1D(VUIxwt^f{6Ht(b*_H7$LQYRT22 z#E=(XV6st5mxqMbt$b7A@{76;Z~tpNZKYds8ZaJtEqNF3g*kRrNqD_k%2`GwRUQ{w zSIQCtImgaiC;}D9BX&J$ntAej1!`X-cn*_%1<8kDD*gA=m2(T?&5I?iPx~yI0YnS; zwDiqWs}yfec%Z|{wr^Qj>?7tM@guWJ3SwY1bEaFci}#1zmVbrZT6sx-L9+RxPc?m6 z?Gzkl`j^<5aYzg7yV61@V)$oYeyp2pkFml?SI@@QaA~bgNKHtfv}3_^8+I|%WVWSqNyZw{c(E4jD<3sNlF6}a`M)nmt^CYUn-5Vu1i+NkH935PF zt}jS8U-a&!*PetjiY<}ZnxXo^72;YxHBmCtqAwto|F?t{a$<3Ubn3=2SJScD!&v^_ zdf9ms4$GPrpH%S~hCy5f8ve!&!o|jd{@n+T!H%szB!1yJpGQX`w0DAcm{!-Y2_(7q za|K_qq`g;zma$|nN|UpnPec}9PY4cYk?)B1jd%y^6&drysVw6^ww zUKu@aCzkUM&@`__I|K2%hnbKvpD9cWiMtGM7{2MGl1g=!9BKaGCQkOO1spS{?uYj@ zAFp!vkbxWnIUGM%%#7zh>iK8$TbIwu@KxaUtr#i&qENPbSGdZV&L7_O7-l8`2}xjh zi2YZSVfdM45)eU$Agy=V!@V~soTZ$LsF>nQ?zaYTHebL~!0$u4(V;AWHv*A6RpW>C zky;rgOUtWN)@r@g$u$YkveJ>!6!)fSt;&b}4CMx@%HJEG?haCk z>j?XvPx1iyU@Kn2l%#{9a(^jkNsOEBf}cs~%LvWjqWBFs5Drx|CH9nz?AJ!`*aOVYxxOuCaiM4XRm2 z+vlGvTf>Bu+vH3^Ih@u;1y#uTRxr~MP2B0Q5iQ9Th|w#^-}!}1o3Yj zkoZNTNw(|xs*J)ec*SwBq_brAa{!$e5hYhbHYu3Fne&I~Lv!=WOE&BdfCh4ty+=MO z4xVQPXB3Pok1Tm>zld$2GaFbLEsO~a3@`F7Eg!QvBinD=SK7T94x<~nAS zT6JTkO)l@G4fvTGQ+{L}&iq=>8Y-7&o8!DTj2pH(hXl@APiChEXGUe2ig-XVD26*7 zh{9He27|8*^?fWd_1n{TV)=(mXHTxkKl#S9c2#`CyRWoD7vr0RDc{o5>WJc|h^70i zxov8^5D{&UYWa{94Gmf%A$><+&P#X_;Z%^+DKG7qzYyhi6UsImfz*JZ%Dw+qyz>{D zJ*RrdIi;=xHLTS-es#Qt29@jY3i0T(0|-H24o!QlSgtLLs+485Dh~QWvDGO%NA|m7 z%9Crzhxi@`ktgT8_;{_I^NFR4KA735_$xZHM~7bTSl+RmBk{Dqq1NxTnze#4$OA{p z7AA62U6;&X)>@GvB37I+ELy9zdzMDg;T$WWI!dV_S6x!=rIxy)4O!y+h4JPh09W(W ziw&ffVBlQuGbnVoA(o?IJ03pjKBWj^=G^4ANk=d-ihb86awRK3BMroRYx z8rmzPgrKUdx8|h=IQ!&kL-Kl_TtsmxN;S*FoWLjFdL>^_;=oJU+E0(^x2ej0#^gG?XeYrK**D`2>6_K%||EIp9tIETYrP z5j^Jj4UEO;IE!hj>sY}ud1i;Of3GHeFEjZc=xg_YMJ??d zK3y=-K9GE3x*~UvKvG#unfae6H zUjT;Xs}RNgf+f8U{lSR)b7`w3j3w)w(`3?d)xzh z{y7Q0($#WWCQ!e1a|SOf)cMSlItf z9y<@H>5PDq4wg-M1ov;kqEEvb_HP5HYkP}fpDnvBDZ-uNI4^AT%N%r;j}Dp7pJDCg z0=zCn^3Vl(FjSot=CrTpy8L}#p(SxW7b1lhqMqXNr5;aH&yJeeHhFuFvT8FB4x;v+O| z8Y}N^4BKsiKYa|LEzks+(~>uzP6=G9Z-qRoAfb1BbsqEg(j#QwSk%%xiJUwY)vxC? z=d@`Yl4N{s+E~QWB(nEJCBenHsJr0yjar#Wi{IcdZ!6pBmFT*v2g&>oeDzl=)C!Sc zuU%K4>T*^|t-6wzA~CAHYBiGUn~j-yg9bqd3x8fabD0cShU8m(50#08GWWduI`6v` zY~_R|P>-u$6l3#2T<(%yzJhv3MQbPH%-_8_fDQb;>%nD})D`B?8NxDqt|3Z-X<-?q z?s?w~w_z#upaQq2P3x%{YKNgPR=B5|R3wcnwuxD74_=vfwO?VD-NQfxYHwm94;FIK zPSt*gQ3~q=HH=Q)$?Ep>i%1V{Bv#MhCFl#~rFgku$xOT@IMe+Tmhu|58XPa1kyYh{ zo#gK5)>>%e+P3mHf}rc?t+HIy_CWh~|?=^}x zSu7ouS4M4z6#@(UrpxwY0rWl!P$xEZNqd!x_ZN)xg@Yt-$5;_&X9J&aYt;y9`F7Qu z<7%6xxnJ;F+0RV+tFO`%M0B3E2kmhSP@&8n{l7XgRo9(L2KoaH8(ktZqY*nWvASYy= zG6UKI2GmQ}PPPDTs(?vGtK}gZ3X1!oLMii{%{$!WwM>kzmYLdKg?2aC-k}Fmvzd1R zJ{Y~BA0b(Z%t*xHs9OIX0l&7IGZmw#KQ64MW^8#2CN?KD&^Axf6luJy+kpjh^>k}H zH=G4Vj5_Ja@-Ca#9KnTsjOE%T(HxFa1vHQ_EoM&r}MbI>=rEI!jmJU3^E=|j|xb(dk zDS8x@spa@dvgnz<=HV2%`Tflm3*XUi$o~WfKLv^#B{^I<#su9vl-7jKv`9Y3*IbR% z_B9rt%{xaPnG{Uj)$=NyE(WMp@4ij`DkHE=Cjke;;KKycy>Y48{?hUY2zUOPPL7p6 z=NNs~)jP$R91mMNZH_UKUe5v(O7mp1{CX@lVQfpc^;Wk@mf<^zF1O>&eJjo*yVQR4 z2<~k14=fJ>#W_^C){2%~LP-V>lp^LhA6_{?^Bfw2SJwZx%bt&mf$ROV$^XDG5O+ty zsoMqCP|oP-uRmAIMJlk9mRQnKev(}Ei$GF|`y<6CjLIkd09Hj>YXj>Zr9vtarw)j$ z*IT-FoB^($inn88>_e%{&>8FyQ|4+1$oK=e`t~?0?5mo4Avn5M?F9(ge*zebYMw{8 zL!3UokX(H=roT^xW%h9sw2|ZlJ0EHpMJQ54?B-jXM}g(>~QtAa&0nC4o2m*}+l1gvl zTG03bAA*p-@kyI=VjDMJnZ!KARDTA)`(&yBIcdpEKr4*UcC*KjRkKe*z3YK1j-?ky zc;A8-BQ1OVuOn^y5<&BYdeCf?MURr#QH9f0H}yxx^gSJ)rY>Q)7zn}j#n+czdHJxx zN~w9!n_)CmLD+(c{Ida;m3ZL91z$^!&`zZ?w|NtyIyMRo2?7(F6fodSe+%kX!*|jC zm*jLJmhw7_@x43^e{|bQ`&0}(Xra*dkIMpFog)_*{UNX@N8SWW1Q&ondzoUn6&1D!m^%ADJ}c<_eu*zN3^` z6>V(<;s=QW^Fl{^0EG}M0N$wz>nd0j;k>h*1kbw%1{D@{wvJb((w-sR@nK@qg%hP{ zDh$>bAApxd&0$Ay~e|*r_<=Y>5|wP-wWfQ4>HU3Au$FRcE$`3zP??&y2yfrC@=vq zgHQfwj+dr>%!Ev?rT_8+W|1I>c8XcE0F#an*}*`aaO3p`XGNY;3t^k-D)&_ zXEjutX*7&ZzYF-#-3D80OtI@>A zpo9Um%zZ=TeLlp%&@UduI{1-Zbavt?(@y9b(_)wMx`J?Sv{yiVcNBGid>dqr6AsLO zv808jSs5GhU>!dp71W0To5uiU<-ed?(#9oIkPd#O?3ib`{nZGV|M*_T7hDFgWZ4N+ zU2`%YExCzCdhS6+w+H87TB{92^~bIkh(cLP$qnv@(a_p0EdMZ)Z)pbJUf#!yb^H&? z)?UdcXeZUhzdgU>Rkn>0sVY4g8Qy*XU*zy&IZN`rz>Pb#Q%~u>aFK=C?K$#_N!(wE z@w_enLR4)A1Ot0Y@5i_H**h>>l&RhkoLyEs3Fy)?C&yCY#!iVa0fT&gP65n8{ZOTd zt^O301B~4j)`*7|rkW_!TneYu`Y$mfX^||XH`+=iy2LhGi|OG9FmD67bI}ZzKeKGe zV#>Wx4%2RnFEXKtG_39P*CsPjhTP;h1^ixD93!x|WzZX&40Hd)az^o3Ry^&j4G0<> zhy&7l+f;Azur>(Tzo37?9wyNGg|qV$&o`&L-pJI|(2u~L??ck$&EO~RpD)~{^biR$iXs8TxaX7KU}gm(K&$|Pi&72~I*YkpCgwcM191zBu*6(5 zyG7z7^q%P?oi@LAE5<(G-T!&Qp|KxxkA+NMDPBgPcGs6GQj)~cq7{-(uYC%>(Ut$W zfA#at(X9=B`z@>ck8LA4FCf~?o8PF3!z8c^ia={4_YmV!Y=30`F&uIb%PgFG>ev5$d+YV8TFHSO_aZnd-T&yu%kq)iFt`eUq~QONDd zyH{G$%2WZYm5@EITYGt9)xRmeEvz$QQDV3w{XI;)lS1vFtVYy~6~pvH@GaGiDf4f$ zHgT#*!4+R+PWIY@5h;@W98QF%67uMHKz7|6{Tn(IWf0c~JNSI56;kYy@^HxT%+3mx zv*V*ShxiHf031^lnd$<;E3?V_b{bEf>;;_ssj1$2uv!`t)GN18IUM*N zkLR}8^}Qom@^Vv9y~hh6aOX0oniL>aoJ1%K|3V&5I8J@oqGlfn$6R zc8E~jD-h>8IAfrZBDlmUh&MNUUk2dRHM;)m{<~qsu?L@HGxh&C1hS2k^Fn$2OD4=H z*;n5uq%jo{7l-OZ#-L!EIH7s#!jmDr40DOCY;tHHgP8U7?c1=JG=;N3DLo3@+q;%a_bM-+8CG=3px00Q;_ME2x1)8VPIDpw*VGDHG?^5+*ZNW>H!^!G^W1K12?sM{suCzKaPv9fE@m8lB>}d}HM( zYxPHm%3z53=ulW2Tu@%~Oc*6c0GM@3U^y(X;r-D9REzT-#z<19VBn-N8PREr^gM+! z26L`n0{xEgn-^o*c-hc-0?UzuW$&KCY^=yPm|6KFPk)s~G7y*8;1(AcYXN>xx!U;g zVbSeRlzo9gLnbCr6mlF2`?W*l5Y zY`g~9i`2@D@jv*iL#+{oGW#~J3Rh4cIxxI{FB%W?UZV ztim8A-_fDjwsVB$Xj%)AtcGL1~dhFBhi5-SO4!aG!(oIg2zn@%>4oVw@!2H(S3HC>AAXEt zXWk60SVu-bu?!PU!m57)_De$;<{Hk`r^4euJlwA5xMe-*zS3wkjE3tFCWA6aC z{G0dMKXn>NSrx6eH zo5%w+p0`%6Btq$h$u`?+ZW~sas@ANG95N{%@&YZ?qK02H@$!4NqP|1cucCXQywm*s z7LE589XRDoCEn0|YYLwxT>r~W_Ma`CTZZ(~ECuU-{;Bd=zTNrF4{m6yY&lW+X!rI@ z>`(nK@!ylwzjfxpO_Rru?^>FLUpk^@Ce?lTeQ{-)($zA##5lQ{h8q)8MGDI4r*Bt| zD9b42rp$Wk!go^Lt4s1pAxV+(0T`*KL{hWn#RoopP@~m>m;d}Td64D}KU1h_GSeHy zJ$YFezT4JtmvNjsw{`wdpfIuU>Mp+=U2bnZZaU;1-`B+9HPaOH4X(2TU0KetU( z#Usu4?uNO{HFYGDOW$A&GCb+pp-tHGQuFMkGf5WTwqWx`v{wuL@splz&fABJhM5S> zLUVToC+R*lFG+_zm`fo+p~qblUk`yHu8t`9jY=u2aA7ulLy==mE;3R%7igyM_*tyO zHKKQM6PPEo6laS<`n79&53oItJYJ(r=8w{=i#1HR2WKp5e%5c@5q2ER`YE;4V4E0O z6@FE2aAi2>PkL86sRHGbfYKm=!Zi*<7_vrAyIP zT@f>TDhK=_^XIWmAd@5Ym5H|X#OjJgX-0~YovjX{&(~$gS;N9=qDd4a=Am3AkEj+H zL^85G==x>g zFSpg5)!2zYjU6zjKMVqg5Z}Nr921HN&-c>@vMU0GHT$OT0O8+m&HnK!9+!e`?tdI# zyaxg8w>)KxOtd}j$Xdtt?by!2EhAw8(zs3u>MMA!H+DX=mWnb@Ys6fSPs$p_BnS?f~tF?9lF zb|pu*(Nydo^dFd=5|~1?rWF`SkW(A{io$+V|4+#KMi{U=)-bSxWl@rGFF3R{H3PYrig;jgZi z%mdE2y!P!!Z*Cc~YQ*@EwTUQ?gsAtH6l~UppEMDUtHzng*B1clQla(U%`o@2`gzRk zeWl3B>y@9>kVws>jDyBxMbU4Iil$Uq><&XREkTr13~EPeO2a?{ZUr{BG6zU8jMI%T zCcf}WEWbf%+m;S%DEq>8vR+F;t$E^Bz2y$_uYo4@s+XmfBj2K{mCiac^Py0*R@ zgzeabM>bsXKER9ni*^@7=ewe0RlOm&6Sp~s7jbf}2z+4H^lMGu7I=K;tJlLPU&Y~S zVA5t^vfC;=2K|<1yhIt2>to01SD=x`xf-VgnEd4?Gb`YV-g1j;xK!DAJB1R0tWKu* zw(jIZujXjbg@=98qy|%3OoxiWq2y5R4-2z(;j{34@!pr~F)~&w_cP={kxYo!%gnz}E)`s1OtB=W~Ol*27>_ zyrZZsxvKvII{YKvd7)k^4gS`w1=TyqmiOM z{Ps)CGNQ609!y`EPEK$(*o1jCkByCui;+im+2JcvEKroNu1xGTy5&)~lOK32LWakw zPwJ(mJ>uX7kPhdjH##SCa5WApbg-R*U&wuz75|oBBX%1Pyj~Hxr|ROqMEd*qXpD5Y z>VtUy)r&Yc&CY<0T(L}+j@0x$ostu?3x9qv0a>Am)q~;61Xz#=4k77K8_v%gK0nwW zk4VXPv_1@q<9X!-d3od6Mn^Wrvx{WW|LA=W{h{E# z)9os-vd%Mn{hvqp>wjPW^5u)YzJ&8ZOZcTx>r(vn4-VGBK@S_+M&-HV&v)IJwGR+S zc7koiqw{yZ#|xmtUI3eIT{imYU52QUjf%-y+jAxs=`nV@UCwQyV#CiTiaR3`Co;<} zqtiZHJD9r=3}-R}Iq|;{i+$6WghbuGmW%kFxa_X3hM5T)Tj}k* zf~$bhV5NSgi)ifYAi8GJQ)J%~o`R57WVXwXw}Qgo33ONQCu#~E+<3R_fB9~A3{fsC zb`L2*nVwqj*q6=Nv+|OwRNI}_`0FQ(kPql z8AC+Zp0l;MTeQxi-x2nB*MQ#qp??9Lu~gKsbqCyBV%(^KMYx2_BO{Ps8ulsxx(&x( zZM?hbU;~4H3V2qlD)$J*)kN`j#BaM#)rf()a|hu%p=)Z!RnGLtAg6a5#ILp5H*%2q z#ekl1D?w-%j?ZXsHp zt<}RFKm!IQm)F>tV{qYfdGgsssp}0rVYnGT9?%HTT>J8GGwunA7^$S{pB1h%jAxOS zU(lny9NrsbdLO-4aiI~XTBv3vyoD8e?iLmOZTP}2OI$Wz4gQ~!2f~|4KUE)uuva`3 z+wbhT32UF(>)iG`E+6bdm6W=AUn%4#%8DL+LC4|k^Q&<<2Xof&m-C|R-y;0NP7D0? z9|ALE6Qrzk*Xo02^1$ab)Fb(?7l4Rlkzb2{jFFznno=SzsiIhel@8if`k2?XGV4dM z!UfJnMMy@RNL|#&2Wegj=y5|1$C{5K41#1S|MGnJ3aE=n6?(W$)`_yxuKpKhwjeB` zTW!XMYk+@Ezf^Zx*vI2m;UTe^=%kN)-az2%(xb7Q`h&^e_WI4G>$jtEj_;hC0!NfX zK6B7gI>kuRTm>S5+`0pbNp1#Cw>Dvw4ku#!MegBFk>sEcX^f;FeH5}Di#{lDFs~30 zWp~_3a6|Cs-da*nR0w+=u37^IHwxuwr5lNiskj&Z?iu{u^YlBc`>+mOJg1u>{=wkN zvy%AuDtjo!Lg7b2rHw!njb2R`q75dXASpX_j>+G+s!H|n$vX^kOp1IL*W!wd-&>)g zFDC}An3(TIa_aV0acEzVMk|WigiIZOVsNH<;{Wk|wj4Rf(@1$0tRZ4+VrK0*=Vtra zU*ab)yJ)vY?u%*UiFPfjKu_*OPnxc>#3zG%^n+(owu<07&Zf8F&AnTDgg{y7i@{J@ z*~I0Z^QhH^bH1)uS!FuI;P^zKtV5+s;V~9VJdOEH7@l{VaYG!J>?dZ^&mbzKgoW3W zjxe|~9~W`o>Ug&2C14CO#st~t(j?OKNw*gQKVuUQpbGn6O4SvNal}%DGuRNfWFy%x2oX`Tl zQ3gK;ta5Nj-?>&tewDhnq0?>)$atDQr^I>9BVq|)2^_%^7{29r2t`^`2#Ahc%sw* zx%HL>rp&5E*s`3f#qs)i(%ULdWa76Mz6>YH5}5p7m>Si^`I&F*TJv(XoKcf4J*~W{ zWAcu_UZ1irrDO+pG$|{9_-CePKqVMsZ=^2A?g^OV0P@msp z-8BDt8R@MoUEq2~Mn;}{EO4j@>Z_}x3=Yu;-_5snZ=|Q*A36R!W(|QXK04m*PtvtQ zP_~Q&aEMVY`!Kd>+WmKgGYn!y{AJxUX4^o8l|;$2Q>A{ZJp=HhVeJgM(8aOq(-`HZ?Y zW@ZMudSrvzYxazs7Z;`b?o4m1!S>sfSp@yKoF|o7!?Gqnislj_LjV(bt5Q#VvcX@j zg*mq|3-Lg>@^*nGvnpCs-Kurgx_jEEh;`GmPIlx&WLNJC*A9zs!M0l2SFJqzzS0Y- za{UO%vkNQl>C`;ZOd@BQobp?!p1$@cNys+@d07v?&uG~V8=J#Y*Kp5+d3BS|ibd*H z=a+AG?(99w)h7u46HF9LFa8&iQ7H?=SA$gnMCUJHcci-h$dfLNwXZ&JNHW-zNxNn= zF?r9lmZGyvihh-d=F9C&t8hv!1<$N!{R3yWVDzb#-i4Z1`3(EonTM=%BN6D;Pj%vj zdUd|_g6xsU3P34UX(YSCDR~p-1Nn(h@TCpraQ-XFaLq(lzff{{4*7nNXbV&fr+Cb~ z&`J%+QZ$+TYYc;Dq;?wMF^#lVBr55q4M8eyy=7g%{a|K}!-_|==VPyl4p;T*6G|2h!UPaK_~J2g7%{;a=Zqm?E6 z>#dI&2MmgY_^((Lt$En_O(+>q7A?KT9&Ojvt$drSn%rN(D*595L#ygKnAtA}B27i@ z7%b&lk^Ha`#X5bIxJ-3T-tzN_QJ0D)7iv1UGx^Ow1iREys41!)LOk`mlt~R?E5&_0 zxE?)Q%jARID+ML~S(2i> z7!l5Q&~GJ5xMGLRKVCd3$E|gRa}Fo09fP%tjEz0fC6sfz3mpaH0H|#}fm@h=w*ItB zU&Tg}QLW6?mj>n1%m=#MLP^3eL7PB@=xvbqsq12o9TA=ra)cpJGr|8aP$eQcCdlf# z>qE6o`6>rZEX3NQ_?qWrj5rO;P5dh0L^L!sOsDUdjW)R;a!VFPO?*F~wX=mz zJ&zc=Ao5k}o`T^m8}&+U!ca?|(2IEcGDnv`MYuP}yql1P{jMM^sNhIqxX2E*Y+WDs z?|4~nfq1j-(BG(icrXjHQ+bqCsm0mF1K!=(>L3u++|rxUOUE9cP6OCYy&v(p(A(6}UQ%RyJ#lxPQ!h*h4`3#o!<@b$MNu(t98{JyERT-sN zbpaqD$VyQv%(uCH?F6pru1$=y6*_z;)%O33kfr6G`xxxEW0IcFddqe$66tHLXCGR2 z4q=Oq8Vr6;xC)w^OW6y(I32Gk|4E%ULhNAU=cTM?@xM&$N1W`$jYSa4pA95~?gR)j zRP9nTBY(ux&0EVc#RZ$gAVpXIX)8SuH;w+NY`Ozn#+q*qE=1I5LwMPcT8QxRW&m^; zG!tDJtJCBt^mn5RVCw4I>5%!S`j!HyJz@dT&d%O@MnPg@;lu&Rhd`N!i@oA>{u?&# zn7y&iL4Lz7_v%_~*soZ}(z&#&!#gi9q_F%EaN7jk>PMQ9Vr0vOQ^9Vu4DbOEgv1h> zcB%hVO2er&wOx}%w9amO%)fs7#d$o&n$>!JPW0+e_;?z5|8U`o7Mp*TRZ$v~=vUF= z@qc7}c|6qH|NmPmpKi&$S5Z`MBovJzlF&wE%N}C1%3j&mac{S}kz89smKX`yvSl4D zT1Ha#ERABaZ)0m1=Jz`9p}ODSe?2tkea?Bkp67L5ujlJ^&Pn0_%Nj+Uv>7^MUe6S> zdDZ-~hEFJ)V+1tb_`oiC%O?N~*6I;Yp>gYuZ}}(c5}hbBwR-={M$xV6OgA)35)q8L z!E4GH?*DOjJXJ!xh_4^&pEn`gqs)Es+_%*$e&!sm6@rpESKwwat~*z9G_vUNC`8+A zc_*28X|&;=nd3n7W>}vLW$cnzo^=NaDS((gz=vAK6OA)w#TJ!`RT}?9$Vqf??fqg! zy{Km^A1t{2_`a?=J#eikk%kFDby2hz(w0qEQzxmSP zp}EER(Q*BlVK(gu*TMHWDhV70GP?}8f1D;A%6|Bpi~HGC7Y6xNmfLhz}X6`H&K;2lukE zs-mod!I_OqjVT-tmIdyhV8_sA1qmF;2L9pE^vG`U=`)@dmAsaG*zhv7t5_e`UU=iwoLG~5cfT{aC+JFk5lo)TH=(jn!i zOa%W(3_y?LhGrg1qfl4n9iv4ez4+E9H^6FnDxPXCe}c0*FAci`jj6O-DgJ|Wwg&UA za+*fDwy?G={XTLwmA6jUa|Y~OzOb9qFV$=Q?DZYEMZl)J%5=-z6c*|YUEAi3a3C~| z&)wd?IeYx$IWRu7Ow!YSh%UOq4cUJ!&9lO(Fk=)z`dSj-Dj!Nejf7W+7O^nR>5k~f z3|t)QLf}%v&*LvtW-66ayk!xoO>|Ri`lmSDMCtUZ3yPfiO$fW1kX}QFj_vmFj}x3N zpz2a75VXD;DiqiySIiiPObV+to`A>xCXGaRZ!+;ZwR3Ikd*DUaZ-JdPPEQ?^%X_8%R z+ehWNzO{Cqr$W4=Em3r#a; zN0tcPvI+Ya%0L(-z$LOO}z$fxRj zfQ~=8nLuiQd0j&{dsySV(;|=M_pSn3S!j6gYoBsNev}4%Q!dElkI}a#^P9KJU^X={ zA>}nj9Lt=$`R8r&_M_j}>o-Fp0C^sDK->)Vs?DJ(_QWeuXJWIlKp8O271yOWOQSWN z!~K3_t(Vn*LU4F7=lFda=?<8iaE37C4~tAH`waNKMUD_nB%S6M0&F%N?Bd&-Z~DO> z+e>}b5f2#P1S7q-btvc6SR0GcpdpVF@NC@*%(3?`M)y*;8~TKeIqW2Cn*|!>gmaB) zkYajA^B+~d{>=)3?$9J9E*1+MK6T>G8pFkz`i>g>NJR}EjiJ3|<0z@d#U0Zo5!V1uvl_>Bu| zFG*r4FSQyHM~s7b`_$0aoc@J-WyKohuE-t1%}1*k2XjfY21dVPx$iAz|u$+6fd zTxJIw^XDc>{jwVlk(QYQ9Vh>a;RGTrW1Jof>=|633=sN~WMtPY1|bt^!~>u8vVR+m z07ex0M|lCyiMnL4=G@-4w;2U?JXq_y73XUgR}}UfSu~CocfYE6)mitSO{Dc2M=~E= z;Yt%5VZq2*qm_%u*)Z#w{e3aIhq#$|Zv}hzu(z&=Huv>u`A+aY<^SCUW}6!dhix(i zBL#Vv)JXYM{u9aHX(iUtt9ZvImPdq=7T_yFi-Ju+NYl1`SQskZ57wdZU68a0*E$p& z=xa=Ly=1cpgtAu-e=kKLCPtnkJ#|rNq7GLW3UE5%eLmv2&-}yj2m%c*pFZrmQ z2i$;U-==vMM60Qx#pO6ozbNMn2cLYVk@0W;brEmwq_A{zi!#;Dk0%wDVEmQ$ELtVhRNU+gHBP^le z*jM=(q;siOl+i+MS=9Rw!5s4Iig3_T#d4#I5PU+_Ghi<@)OjH!>8c(abHT2gqnwS* z0OfA?ROtCglbL`JufdJ^HBxIRP5%(U|DqZQXnxYy73%S0(yul7m+*jo)gL|R^ zTl`cyFneVbsC39;nntVutrk9H@8pgoJ9MO?#qWJ_{-WH-xs`B*POH`3yH8XgK>7A2 zjV|s9J^8qxKusP-=vcUe1ZuGhbw@nH%{O*w@-Us}&Og;*)RoL=hr3OgrugNoT88y~ zw3nxy|0lfQhVoA|61uAAx#N%Q8uO7a3@s-gg1Sl?-1IQ%=w8lL1>tS|$RO0ZV*0gkOR48*c7KEN3T%fULo0`pEE$6aH}lMGMEtn z_n5rT2PW?=$WFu)WOwT3n^h93NDTj}oIxY=Z>yUd;70hlCwov!C|>+7Bxv8F5DJPe z06Y1s`T2`&mr_9$&_$>PBb5j9K~*cfC&D#yn?kB7={%)~b#s6gRkD^F9h{*wqXu!T zN;3ga_QuXg$yhL}90AL4rJNPH^?3b7Gtv%@q)|;#4X53Y%)jd3CjwO21-ud5jP;po z%*(Bm)M|UQgW$;er_X1>ajwE;IttqS&`ypr1!+ep+_Q~1y&eru!8+jGui{!c?Sgv- zj23BA|J~4N!-elDIoEQ(4m2e*6!}zk!0`6Ol1~2vk3F>hgwsJ#PStzL>P4o&wgVpB z@*R?>WW^HpV(;fxc$TxYk3>Qi2x8qcdOCV@;n0ihq|4vCy|-ohYz4w4vIdFT;iH8- z<@4Yg0Nh?GK`42f-LZHY%c@@v#*;(ylbRUIb^>@RP4kUyY{IMg{?IpxYlhO1L~4y=|(5Z?0__z(_DSpv?CbE#kaGBuOOiX=B$GTN9L#T1m5Ltg8vh+20mFz z!#jbO2w>h~D5D0xStjoA+5+TZ@C<<Rx93>=u9`t>x86jnMx0u3x<;oq&t z!>4`l=zfvmBO(|2`bQw{M_~&}2O}HZ2|ys$doe>R?;z+<(aM%Tx$Dh_ML1Mnxyjm)|I5;i~cz|aO(sh4iZk?_Bso_8*A&b;U;`o4%7ZSD+6FctQUe>LO0*p zqPm(GP|EujKGaDQ(6UQ~9PaSEr5YqiHe7ZvSg69pwZx|8E+=v*G~~#B`+XV0z+z?D z{6hfRaGc`XP$nFCsUWUN@oNQ0i?svIC$@2lb=G8RI}xsD!>-R|x&Eju12Nm3%oLe2 z;f7^x(Im3xpjP-EDqab;h|Sfj5H^*6uIPYyTz@#u5b=M5EOjJp_fa^Q@Msa8z@@de z{EjArdZnf|hdnf;&+F2Q*;=NZL0k>U*3vdl+qq1uoeU z5ye2K;B~h5z!=?(-c+^ z;eW{N%-X`I6Uo1RxTzJtS(a99=4^kVaNBhG`y#%@!QwqRir#Z`d zIeOe>%K?tDbiRyG95vNOagSCj;)ceanjl-@ZNfs|XxCIYPh$fpP?m`LF1LY$3eHg9qX z*bDEQ#}n%-Gj^yAlVQWYW3X@^jIR7&6;H+$dj^<@+CC!+t!z&Ir(mQTWY8y^Lwei4 z_Pgh{?MGq*LJD|V6|K3>v#+viB;`0244wg6&s*)~)q0L1-$1C=6AY-$6ZfPfyCX09V(mp!JCjiJHQ1J*HY%x2IAR-bI9?x zIk{M~7UJvd{RV`+HysMoUjx=mC1NSaf~31FI`NklMto`LmMCaM|541-!4w}h1-?;n z|1~tDTZ$o|BJ!V;94^=k5iL#wc%%PMM@hj16Q;JFHNXyd-wnyzhI7-v6Wd4+UNF}* z8@i&wOu5BS%YM=G_bSAuoOUnjth@}4_{$K*tss+m7YLT>>{erwLpOa#{ENY7d}ps$ zY!RonP-OpAf&H~Ef1yOvZ1{(%)!7CESrd1Q!7im~3mqM7swjkuefvIMyLq!6Ov`NE zH--gbUC!&MGfM}+v5nxeejnd9lvG>Yr;juW!;lB){1c58?a2Tf-z7vTr-HCij z)hGk&={m#GGn`0Z1*scsaiGdL&Z7kGN6vI4Fk(K3nB+sp>kCi%d1rlh;8E2IDZ74s z8OJWynYo`2ht=ODRKW@SDcD!Urnw$6m($cLZ4M^n6#iVlV$Qymn(2YtE!_EnBfnUn zm5&*!^ZHcRhqS#J6h*SLDYTieXZy}ORu$c4<2*&|Lj68n=xX>?3 z`*a8BKUUp5Bb-F2Uq9!k;lBeC=BW+dX#s1Fbpb! zWHK2t=21j=3~hXE2@1B>ErTK0vz;qt$z`8@s)8a*Yf3SHw? zpT$*k6a1k-f;X-j+0k0|Gq{1NJWoR81GIYg@Jt7TM2WiD_)r)BeME<~Uu~r)D^))ir9um@6?s+JN-~Ajh zf<(llEY*jvUg*>|gJUqLo&iP04Fa(67S7^~DU=oP&kUsby>YYq^ESHF`b|Ukdm>4@-v*`9h%Iu{o%mQdv3MLYYIt>m7h9@ zxZ!(Sp!M@v0a)*bY{dR>27IpCLpW_y1sr77>B{MLWGX^9+$BR>08p!IbmDQ?o`m%<5WMl*K#l-OO@ZY^E0AW_cO|K^Q27|6Nbo*S6TRqYRCT&Afod>Xay3a z5_hB2&7PA>R&w0Ini{dt9+utOEB8Hyjz@#{Wh`b35TERZR$>MDo+4`f*jItn)(j_y zDj`p>pd6mf>IwsQW(QxEh|^0>+pmiW3iHLx2I(97W7(WU*+_ZrgxO>1_U^piRdqHW z-%e`I1n386s?r)t*F3v*(&yl|%ZCVMzh2{A9nZV&_~AS^%`n5fxPL#bJ9ls0wu`5? zUMRNd&!8XL`){ND|BNn=?b5lCX7V`q{h_PUQ}Z3onlBUb#?PIrYYiV+_=DH$Wv8K9 z$4Dh#j#<;3+9lGv-c6)Nr~BbOW9NB#FR3V|yp|B7!%fTA+1X?hOSr?f`rFZ(9+-LS z2N&vE$d;7`KY+YUOmt+LjB)s;%E=qSJQ6g1btU0+za54FwWPE410qM9=lgS0IxU={ z(R=o}S+vN_kScR$e{frYDLjf`J>xPmLgk%_^P zy(!ju1cf+XU8Jup`j~pXz+?eRL7gmnt#%fTHI*WxHrwiNKu8m#!!1?n>|w3U>1VZP z^R9d?xiC@t0Q}8^qe_rS8r(0%4#Jx%(?tr6wXS^Iv#+$U8*XYn<}x1>t-YWeX*x{T zWa^LyUdZuKJ!6rQD?Y7R-${E7BKfDDDu&%{~}{mO1RmsZdB>RJ|k`3ejgQGGMM7@9^pK&!tG(E(%v86&J2d z>dng$;8ogG&_p(Yh`iM0x)-vhAr!b+TJa>_Bg0&6_jr*cqgXvmZ5=$!K`N`ykSR1; z=#4iHFe&z6?P@?BUmIP6v|8z+2+es1$piiZXr>yk>RVeY!nTL1hv}7h&)3yV}y zVg=LoIW=ZtN{kN)xk}O%)puLzM0`Xd9{NgQ6{EI?vjnC(Ziyz|iiA`3!VALDrbBXJ z)U;$t+vwO@f`SsvIczSV3&b2c5dvpms)MQ80y|;EUKRfbh@VT=#XSw-!W$YS;R zK7J0R=|iD9P~Fe~fP7qiJp8#}bEc(ofEn3uvYFjM>9IX*QQ|?ZQ>RV-sy#CPcGZYr+^YnoT!t*G7)Yqqa}I zdQsaOdox7jpuj)>}9GG0b$xf?WQqx>DIahS+u( z?fV=w+VkM!ZUXohC}(8SsE?Qg&&ux94K9 zY5RE}5U(;z<>O~KMYMj?dP^5Psx8`0Kb>-EJzaIdmqEq^Ji+n4COS$^KpGa%2IJ1Qad>4X1&6P=S|XSWjf}5n z{F2YZO(mExXRJ9EvQVOx&`)Mj%yVIwr!T5LYeF{BbArT}^Nup@|B`h!583`=cq#~c z)F0oDw9}-;jMjy^w;P0=O=@}P5S}inc5k0PX7Emfx;l)(t5#Mo?2yJf1@Q2i#6w?+bUdZ%)6b zWVVhEnd7Qne{EzNJ!yA)7aOa)ci(!psOM~}N!V~T6s;x`BB~9bKz8+-i zA;n%oOMrG)|1L9w@TTigRueV7l?gj3$Y zbU)4a({H{)^RU7$+P?|o+Pg>}qxu->F-WBG)v)@zoO-Tn9?06A6tN|C_jv}?$vqS_eN&-}Cc zqKh!(WtnAOml!Gl8Ze@w^p>|=cGpN*CXv5MscoFPGV7S>j;ra&%x1KRtS6=Tp>`rj z@J^^j0Vwy+!V-Dah1;5R9kSnk!-ayFKeAfEJzz>^bl2UX&m%CcsrEdFmcR#*n1>*% zLZhh(0U1D(&HNn^|Dno<(G9R<$gE`MA6X~Dax|#6U||EIARV!07iz{3K5I1PqGOCD z%@CwRSo$)SXueC=7365r_pKy#Kzf|JQNHvECnmdc! zChD&1ZW+|m9Y$;1rWd&(5wvfHJ$!eOgsk+k2#+0+YLv%5Tsrf`$-wv(?!<^ss9WTL zd1Qjjf@{B8BOH44ZEi-(r@*i4Qs+^KNWP|~zBZFNg@p9ZTu8V~vA!oSjzSkD?sg!r z_l4Kmep&#zA+&$N?`rc?M5<6lnvIRV1N+_TXfVp}MgFr%$0*kaRa}|SziuM2t8(Uj znGV>O)-Ntd%1)JPdv&=2v;6Fso&vi-w@Lbm4L@ztU;mxFzNnw*0eZ><2#+1#C?Zt& zS+2xpXH)66Fyiui2igAzi`(p~1;k8T4H8*~Mle~QXY|dXJ3eLdB4uHm;OtkKVyTMI-u#+JLh4!t_qVEipNn0T)by);V=FKHr>m8#` zgfW6IL=~Kg*JQo{8QWTBl_4v7vJM}s0#ld>-dY^$*nAh2a;6{5_=@XCz*B-}L;JF5 zS1!Nl2sa&y`0p0rTn9t0ZPsF1>KI`*gVq)UC3c*aOeG?`9us`wZowJCF&y`T)5V|8 z*u!%DW%Le`>$GC!Ei^4ux<$|SQpSSECucErk{S_-aMt*^ii{^qdHj{?nlRpg1~%_5Jass=-6b078Ry}+&I1IUF$mOn)Uxp=_At_$$4f?``zCI#&SFPwD zTr$_63#Jq7kHNMQd!RW!w(Tq=m2%&VzC@!T@7xYw=O@UD{Pg2dxONzU(ZAA~@+uPE z;^3x*Ae*WwviA@asgnM*GgiCsTLf5&)-^W>h9YJ?naH80zh%yb@#sp!&5Dzqt*~Br zO%_Jc=jREsAuO8Z3taTi;hq@Tir*n{4!Yuz{*iSC>VN`GD8fjTf3~SF+(0dc+w8tv zuDOer%7f`uaNo`!9x{g3|A;1@QoZ!V8F=miM2vGd|h=1zH_}-QN?Se+^eu3jF7h7Lj9Aks$~IQS2_o18<3;CYm+1 zU)y!zFypZTqlLSipFc1A8&KxOR`p#9TIz}{Jmb8;1A2<)JDCP-TJBjCe*gHsSi@=* zUD4m7C7`PY*=Q8puk>k~T#k9@lu43aAI35E+3FdHCa7W)?5f7^q(e+x_O9Z>t_K4Q z2D2g<`gh^T>1{j3441-4T&-TwfPxU87E$^&Fr4&Ak=?*Pu1rKfSMy7+Y)7AeYnYND zeI6|*Vbtkh>6Y~U!s)S=$N9UrZ^No=t^nREWJ1i0@NwUP1z0haBQIl%S`0#rQ14`M zD>Ct)$WRZ5LaXiDp#}75S7DW|uX@gB8KyiZ+VGlmW;YIn|AF0TKB){&B5cnijg4F2 z*lSWKMGz^rGC@&^D^S%dOp0AtjZ0n(WNbmA!<}Ae>+c<$^3^c}pJUD%VuSiN2SGa9 z=?WYj8WS~|=dX!DK+pii7NiIH?KM>82Zxua#z;#41$)m&3kGajy!x!2GOHB<2e_Gn zF`}e6`uAQ2PUjrmkr#xAgEML2HZ-ud z*z98vbo4}^s0y+Yx&COhy@~s0g-{QdYeH2$h-AOozi`D_BPbdLgUhuc@nTFlhFOAn zAy2uDSKRn?9!-L=$4KSuAta=e1Q3!((vM)dX4^`vZ>snVlAQ%}l@UMImlz!*%1#uJ zM+I<$p>1(j}S6%QKRM6!2as>wX#4wXq=pw^Y0plD+PEB>?K}VAULF}?=utHM-M_8or*Ie^D{Y?0LaVoM zOJyX@!5-@GC-%sxet7(&1^0W7Y4cE`AJcILKE`h+GMu$haN9mix0_Mf94AU2yu+CS zLJWpTHN^YCORrraNSF?IJg18#(X21-vS>u6p&3jAnQQd$I@p{13M?~+_cU>#)obBO zh`kGkYbxLSwI<^d-vScn)s9cT6?aTF;#H}Uk*|&yJk?C$M_9WfpreB&v8^u(F%y6K z@YZ?dDL^u!*%EVFMt2W$1%}P);b`-ze*t0{z4qWW?ScWWCO*)|^-573SzQL!o{S-qtxzJgh1RyJE#XYe$6{ABeK%*&%4o%Emnr55%$e#rxBJ z5>s)kWr1qyOMQOCpI$-m^VjIdP3gUqa~1#v2b{-Xngrv+w}2H)=H+ao?q6cK>B=ew zu;zhytdB4K_UXvz9yEo3Ji8vXf+zSCuE7)rw0@YlaW^i^V{$GR{#-m7jxHT}7F=6i zjgLnLjpt6mBE>QZ588Niaeg^?3w}9Sp$D&}UB;EQ8l+f2w}Sly;w@y_5)NQhroheh zd$e6AnWQfOfwJs3%AFIF>Xj z1138p(&c0qsA{|DB?*wTUD^)tBXKXINP2YU=rYWw8U)l{C~qAS6NyzNm3N#fsDhQQ zbEHcT#&0NMytJ2Io6jot+Ruf>u@n$U?JN59bKw+jN(6&oiF$aj74IQ^)CcX1iVE-($n4MqTHq3*mE$XVuL>xE zvsN{X#{GFg5Z-`CEMM@b%H%G66>LMy=rj0CMV=^In;*VkE_g4Qm3ik!ldyhH%;B4E ztvUc#5guV#A?Qa@1WD1WKsxu(d_`ftTu(N`P~{&Zrl>9CIgbk5@AL%iPKGtz{QR)7 z%0470MN7M($u_~?x2aQq3f$l_sWmJ`G}+2reRqiY;YHEFr+#mw7&m@uQcS7e9(B;n0D4YPiejG^0gK=f84Ple@w=l` zv$wih^+II=QiP)h?~3?}woJ>f2weDr`trcHy#33ts|(2!A~GJhw!L9Ep~esuChk@f z*Bb2eArCkLjSIY7!b9x6Anhuel)s>k24>S$DBPrPF$Czw9P%LgS20v|x_&%dVTqP7 zT90{vH>;JC6Q7Z&xREPxtJjD0h3j;Lk=)Ap`>eK$J!)7AUgNZ~Myz;P1~}3a7vTEL2)JoubCLs*eXs$Y7HYkZ9!$LRbc_4a`jv z+{nGF6Og`)=8krcJ-yfydKWI<+%kO}g%$2!zWDN0)If$(0R`7}z#SIr!qsyBSbJnB zX`^OI629sc_ijfLV)LPuCuh}H7!yRMUchy?0jpsx^qYn-h_D6@P1y0W2K5=<>?Pd;R$P1*j>5fYhG}7J!sDDV&j1BMbN>?k|96)+1==D^E$ z)%viE-V2JH8-&D;-_y5uwoREQ-u~_Z_1)yHI#^)~Qq`ysHXDfGi-E!BQogZ#gV7X| zh|-X~#FKr|Z=KA9U>Es{dLAxcFpC$Xd@k9iLQcoguKR?Np!qRmo^?tzGGB< z4Qfm6B`w3mpJKg{e%^zL0c;>5LoN*)FA%nG#bjA*BPrbj&P*JnnR>gKaM$QxO1rd# z0+)z2{Y%&^xDIv&gJhNrYBKq5eKmI&!$II6i1i?405XZm4{v)0?Ro18w_UnzMa;Hp z8-UO|mOIx7v?vzv%@E>t80T-^Oewa=>LfHpn$@QkAGjPC-odCo6}^r3#80f6$_K92 zPc9T73O>xs9b=w-@CPQl2W_n4CqS&SlBxk+@1u`WCoYt2v%}^V0gm#v2?LnpUI=kD zN&E{$bV2Mnx1a%GnLxBK4mIpz@BnSZEoU7rpY&5`ICYN4W+!Yd8l^7qD0E=h;>vYi z6$vn16APSxvY}Fg9#bhF8dh1sduouCQLgS$)%U_(5cyK81N)d;;CR#6vTX=zNke-O z)KIXe=I~WJd}a9}jU23Q?nUM9D$61BHtyzvt{SwMuVnthN9UO*8LUaqiJc24Z?zw9 zzTAtG5%Ai{6u1>3Lo;28@Vs;22Vbro8QlVhP&Z?K9q|VQ(pmanq15~JPozZmO}?sl z8VU_kP6CJlJC(6!4g0^W_2D{l% z5=!NFyV#kj;|$R z8c*r|S&Hh2$d6VLgM009TMc%r2!sfmxzJMxH=wvlY(dmmNZ#=6uc=@K$GkSq=IPbIPj^r)0k@2y%w zP3%Lz*>V8P51~6z_kOm{bnS)(e+5U{Id#p zYwNc0y8|tbl^?HTvRyP^*v(Vif%qSlWr9$Ofh^l=Sb55e=p zxFe?68fk9w)q^cqmCSH5YwPM{P+ef1zhuGz@KAj2!1eaMR-wl6A)NTLec3@ZN*!7_ z57{5#HuNXwOH`Rn0!IOx80jFhsqk$T_p2>Q+sE*HlPlII0wVb>!&)y6 z60pU{)``*Cjs|F6W6WJ4p%k$ozk%jN-!bs_`a=p`RCh{*BBS5B1@l1M64W=ZPC>FPECGOqpd7Yvf->}0 zWR~(ejrCP9(24nS5J0(LJ%rLeaB;tBfYhuxr_q$TbL26wez$Eyw`uM4cwnUFR`n8S zOqv(qOz-wQCd(mXdC8)zOs$pn(-%3+CDB@28rSiHDYcOlk^Aww|4a@*q5&FmVr> zeWS=%3ISsWX|ERkElCuB?)6d!oSWPa;akO~dCS#gq^7J7*}ZQxn_Rq1wZd2C3IpW` z8TZlYqda}>E@c4S)ALRQ&1 z80F_7rt|POs|TB~s{L+;3zviOxeXWIV?=uzE;!Zg)~a9YiK8iEqs7%F=T2*t9Ua^8Kxh6BHHwOGkLasuF1B-ER(lCxpcb5Y1{bz3cYFGckj}OC2!bx*h?h z<(q}-$-O$zO>Kqbd_px+>p_yavjS}k2S;xsw&Bqpm_WeVpw=(4jWkzShtHdo&tA~+8M-G!j5+=H@(nUbaszem5V;PI6RCKc?;y3|&1_`dg5LP!o4I@45iOw->k!ez z{fG;e5UqL`P=l710p(%_oF^S3B^c8#K1BnRwNXBm@kvmfX_;^{Tz*e{e%t+9EhXhSQx7B=CfX0sYI=HQqR#Y=To_}0oracB+dE{65`3^mdnAK_HhSZ% zs1Yxglt$2@=UB)i4Xt?q1u_eeb|ABYY@te7mvA&pe;3SAr62niJo*GRw0nA#8|%x1 zTxEMQ4%A#3k$tB^BlUgZ&8dT)DzG*nv;I1QH;~W+lDk6{k^KNE;-PMywfzE>Ru5lmX2g77u= z0+8y=FO3fa-hROc`Q$rzq7N8}+qIW&O5gth+;`vKxpF^`7Q@sIjoPAD*kqcu=w{{z zkR)zh|LzC6*O_XyJ^le5J}eIHF=IkZVI8~?e2T_0yC`r1S%#EkBb@+qa(NeXhnC8y zPFg@;pad^g0gQ)GpK77Jf=7-PpnEexKZIqp*di5?CCU3gNm0k{MUizqv*WjKnQ`Mv z>ZLiuy5qJ~Hc@j40=c)>H1zD&eu%{I!OfdEsEco8>+3AFEIhyC#{fzgT+p2URPnQm zq&cF75x7>F-oc!D#5hQyp1C7bzi~}|MI{rgroOCep4uvUI)+SX@FmF>%1|T z2y9rCloX$#HZ~MRHu3c-*=OtnPxEaC5B#b*+Ml074swokNj@W|_!hDaNv4@icoq^0 zV_0bkPrU5YGw!jjbd-0+jGP9!e zIv)XTAv)0r9yP*uvR z%^k2MT?A^^CDs~>=Xy8^3Vll*P8-DMqGBP2c0yhF_1qeugTPc3-{d=}25}7UsT@?k zvZn$nQyA(QuSFi;MdIkv`w_JsNE7Ehlmmr3@i`*i@BijW%BgJG?-#Z*i{x;H3meKs zX-WYjgr!F&kd=ftiF{18g}UCk4cu6Jw-1}7R}O=k9>MrxPQmzwpf2K9U^Na!f$gfY zK${s}FH>Y&MCKYer3}Gu5cm5!&=hym6aqCoUU`sIt`nb^ghpmlv1`N0LwjAoC-J;6Jph+gDW{a|Ozj$dJd@F46C#HSDh0#wBV&&8)2C{^I~ zAr;%U^+!O>Gfby9{4xwd8Gtl9MwVGKLUg)M=nfzcQG$GQy9Ipks3>_qB7wDcp? zQERM~E8`@&yl*2RJzCinjFIk1?D~)}bdt^{wb$gtn8Hjlj9_cA!)Id3VThE3rPwjH z6=Yqv#eGb{oNI#u5+Q1!lR~`LQF`WbHh`(&)NP^EY;XM zpA6jp`(CuyR^!@imqPWqV$!+(yiXYY^8cA_y2;}!_A zo?#495sxNtPAK7y%0lxq|t{so4ErEZ<=$X_De# z`<`+FyHo{L2ao`Gq#pbT;mf1GR_(btaL;vZyyW}YkpA60g;l|{U-_3qJ#n-YJV<+j za1y+j*SMt;LGh>(S^%V%xBuWZQ2*{iler-_j7Hs@1-QQW8@iE2l7wQ+)nF^(in40P z(~RGw67wW9q3S`CIfE&#Q88~uou{~qs)J>B@`Y#2l2qagWdhil5i zPcGqHRT;E%u*&95#n?Cdm!oumx%!l+Od*csW8VVKO75X*?sZ>yUjiE)=V zjBqj*LmLhC2f$(00Yf4KKxVTx)=f;!s-QN%+NBNLkSRr~+2Do>JUOxCG_Tze_P~@q z+1?p)nAZGIUJ7wdqlh~MCFo7~KIbca8EKNPXL@Pw)6EbUTIFx3u;^F}_>r~G?+;f9&(jrr%jNo1B( za1Ijp^$%C_nbSCRA8Q47!!E^K@*6Q5Us#_W<#pDmoI?n}lji}4Kfd6jyZ^}>cmoYF z(hJq(--dCgM+>$l=&7KAGAvGdy$zPZ6*yH9MaPAm@&__Eeow8Ff9pp()QE%#=HbNy z@EN{1tT8pU{Wo}bq}(q)AJax#)Vf8oc>aw(@8u;=XZ{8EL86~8YICa21GtmBM}cTd zf>`y~>RdDz4SSwl+W~QiDIS&*3I_sy0x{9dRXl+UVAI<^B*#4}i5 z_h>el^Z0a0Qq6Lh`|kKiO7$L2gK03ZN{5D>yPxF66a0oG~Gi>_ElE|L$s$bSlNG&m8~N70WovaIR0W9 z>z==caw9zUd=nke(BFow=~6TUoE5bIzpMl8euHAp8JU-O2NTucuHl8IEkk?T9(=bo zi3hO_T`Ki}fKC}DAs%eQlwe-+y}}Jhp@C5{q53*d*XLaP;m*S9rbxC4@{<$y!80YJ zSs=IJ5gy$w&<}1xg8Z7UAjx5-f>T%a8&=x9G6=z!3eZ}bbxK;3tO2o~P z9eAWWXEO*J=NnCCY5cLlAs_MqQmQKR-^6X$20a*>(<1}__;hZpYS8+1M!vv##xPu%z2uE&=mYh{tv*xS8&HLug=xxQ&x*@R2OMqJ%j7> z$1)os(E4TvaALk20BGKBgX%|JDA8A~_!2-Widxiygt~oFD%taKT2B4W7Df~Y0Hec zLjy0Dw|_X1rpHU6S+J{b_#IZA^Z(@8l5LfrFIWoC#`$=Gr*xcej(GyZ5>6=6i(6&2 zw}9f_ecEg(VEwDqXpvy-fZkE6?^;J^B0UHT?C*XmXeT*b)CM)XfA0@*_^RJYF1&o_ z0O;Xf&8O1WIV0o&x#^<~#XPr?ePVWE*n?;uh>TfpKb)GXH5G=4Cm{v+xZH=FitB=tI|QU?4m5cMfAOGl zf7Cz42J3nQg0`sGKgAHrgv&6}py02}i7d`n3CBSOFWD2fsDIihrFMZW{}_%Fd5WYL zXOF?nHhyAgHfhdOk^EFeQIT#9X`HGZUgkaVT>C?_cgL-W`eHO6D6@rr3b!= zX0^>XE1I8%(TUBsF=jPO&dSMngKgc=yro=$U%k*#MD=%fu_()VlVzAK^(vu0!>#Uf z$Z&9$EC`@>>#(5HLws1z-M!AvebH>Xt;sy(o3Yibhey;z+4O=fVKVxE*gal82r_aa z7;#ySPgKXLTbj%Vn#|wkvhqhxuJi6)m@SNE1~0=_o^~now_Zmt(qv{%Tht#^ zZq1R&DW>8dh_ZCbO_dwDuSq9t{?uC8x?>r(#v<)uzE@D4zNE5ha42@LL)<4FKXL1!ADGU-t%ClN65`nVGLE@Y?oH2 z_=0cyNxPwLXp=Nm)R3t+CvDd>+xBPeaGE9Lq&D1zjz3=1?roIVKh^AR%8%K+FSlSz z%^hFoJy&^Q!jB|{RsHee%NM!%NwR!o@!rahL8#l!0H=M-1{sBpdL=he!%wcS?pzZ* zX#zgGvb^HV5G)@}=I*&{zXOJ!{K=n6pAlC=&Gx*7Q)hbO-|X;;I|Z|I-f9NFU{x7? z>zI6wPg4JMBa!W2QHO@EnF>Hv$K14!w=S9WRO*vM1;tD{a^ZJ4t~Z<$1R1Cd)$W8l z^7l%m*q%G#XlG`!GI7^EpR?C$i{*9NJJ(>c7jGyQ5pIjRghr0Wway!diHyO~ zH=8cshsMKW4Wsq3lf|Z6Aq8Pl{P2@aw@lW%(Pj;58J~5<*eS3KvKK2Na}G>7(Y1W8 zcEb_SI*@TD3$U~DJ~6ZQsDX@U@Scv$@aBvGT~jgm%G_A-E<^XS+TsHHPJZ=Il@x1A z707mtMUH`WyB4#X|B{@a%etl-mLNF5sg?!So1{oDDx~am=fV}0mSM8%3?kbdt5Qv} z!zdeI4Z^KmD~EfY57C?96vl>;h_%NuoQJ>`vZA3d=e}iu0t{8JBBVtHzOa+@CSy5P zb)v&`oqlj6#h!l`Z|cnSR{_B+C~8aU{OD*cl->qjVu@pHxd z+RoW$0_;@4$4wvaBV@lS6$v}H0Z{!T2wT=PQBUm^23vJx7jI$CQ3K~ef{arx_*hp3 z(%t>Ztdif_<~o|Sf^#MSMjIld3ih>VF*`?gYo)XtPsw;>2w$+0psN*ajcy(|k~Gks z1%-A=!(cd-r>Jb-=IPTkj?rX`0+uIZYi-rTJQI8}vwgMe`=$bRuE#KkGhiqlL;J^! zI}gE4#QvU8}jpP(K5_*DSYH52Ox|a!JRdgobGHHFjsZXBa$wWX(RV3~erN?#d41Y;Z z?zljQI(uC-Ps>!{uJCu+lwoAd+@Wz+aerxW^6PdndQSXY?J(=$;@Vm`&lPxRpT(W% zqZuAYseEN>+d_0bX`pZW6>#{Z79tfpVK=*;(h{RcGt2v+t-(~k^W<`6L)FUyzLET4 zj0Enub~7dpDFo^r#TwLO9em+0vx!B(G0Og~7Z8`sdaPS2e?{eY^-tcG1_x@7uK-Ld znh^RQu<{ycamKzW53Rov*Mc~#sWi6>bH8b&Oz~ z{xR-s&1q^j&w`QcuXQ;oQX~i{@83i_C}AfL_grrTU!OiLLw6OW^Efr98f3GCX^&T8 zrqACLA2d}KrQZ-KX73YF|5QQ=7(Tyn4ZhOnR63F@$(Yt;s?K$<dvCD!&XA19r68+Po)5T;UGl8!PH+LmvcL+a1stzlX)Tnq z;FD@y*|-A^kYSHfD~6}N!4!znb4rO~y4Ib>mCugzz=WWX#aYAL68H()LhHkgIS&~6^Diay|mUU1<5i$(fD_aPWu?~LEtNXq8_h)+Nyyu+vInVQX zKA-2DQ*T;$gIhAhef|>FBjaO}xt$97tbX_Nl6b)(B~NYD*q=o-o*|`Nkd>vRUmQ`g z7m^HFIgbORm1zf0R@ocIWG3zu>*or2Da zmOGXTyGqqiSEW8#rq-&%6iJ8uD+3;P5~h91>B{bC*jRo#BddP~p8j$(^P`@6Z-ml> zc@R#IId?j4!~}yuc;yoW*Pe0EaL)g_&!e!>5{b`}e|@a{kWSx3Um))OZicquCU{ug zr+ea8sS2O;9DNW9<+ioF&n^PR7GU7e&)T_~FzSJ@nTwNYYJ|>SPwBWQV-EO@3Gh8# zwdS#d^(LjihW=!5!sBhjg>Ht0k`-tFvew=RYna6)j%-!@rN(`+4DpY8}*{OeQg~q4b z_Y&>p|F=x^FTkVG`^AaQSpX<|m!Hfk&u^qnY(fGa@r1A(q*?Sf4+Fq5q+4Dz#twM4 z)*!pnSD(krmcn|qe1M#oqq=$^T^PnVih@wFe%k>_O=p$W8%4Pbm&IZ1NO08ubEM8{ z-qa#m#z(z;eN6-r(dQY;?p{z2jBuLd8Di+Gz~j94e7w_qZvk^We+rvWySJazK}Ea$ zt9>s%bo>oKuI|8ZMEy(XqY5;Mjy9nog~@Aba=k^gg*?a(2+!kYoX#L>-lm?)ze8fi z6L>af?(x$h;q~ucJ!zsQw^HAq54(M&c5^~J?>REV#)%ymRxXJ{ zX+OdGWv6dq5tOdd15uBB$*I+Suh9uGLQCeXPQ^*E`LTv0lXx|`^dg$FanDVaO^D@~ za`F-v=SFhHc^a%2ustirMlh^5qyNv531Ab{dsSDp_4U9c=$G>s%K>1sv*G(+Y1Io% zovwGtjEC;uo_t~6w?izuc;pZfQD~3L=GO;aI%WQe*5);l?$d*ZfdBJ_pv>p*mFxVyU@llH2VNa(Cmm$#5NTP*5} zOCU&FIa7r$J6~a*d&O82ZFEY--b_HuzsZjcpU!+8bx*~}@D?Ea2-I-Tp9kFeKX4iN z2egWl$3;Pr%E;tyHRB-<6K8nR*;~wF6neY)Q*X6+nEDa+$9i?uPUOj)|#D=oilPQekrIgYbHWZ9PvM z%H-rBKWO_3m{{vsWGJJXU zhD>C3&IWi%j>vrVZ?9$RhELW_eXnG9AG(wODVt#fk)`L&1vfKgn69AUo{Hgm{{_tHNR8t@5n?-N+kjGp`N$2tRabY$Fnqc|&eXY~uo;5`irA3b^D0!2AK4v} zIri{XH6Eg$JrGT`Du+#v5rcicfAJl^ukQ}2ne|oOwR(&){e8!Ki(qgX>(4LHsOyQt zCnH08aR;7}Z45iTEVrOw01_R~9bq^TT9Hgqu^kge8+|On+fjw8_@zgjcwvc-=HW)Y z@VHuO8cUIVC*GH2+`ox@|WZh#Rv zpoR@GvUnuu)ZcM4(McFk2CpQFCte&WQmMCauZYAMMHbCPJ(V^bg}1!d2ciK2IHxeV zHm4hA#X}snXII_%yRB4FXEpv;1_hrHzQ@8A4#^HEEFJyyYW4jaWje2a-0<(*18rXO z;jjw?K@Ubd0O)%XYg%{255z-Pf_eAI9LJHpAOA5Z_wvz*W@ zk*)~HFjuR7tq-F@-&npf)bKwC;sN6=YynUWm{Yah!bK|*x2U()QV(ws`Z6eUyIxYB zq89sP^|gT$t$qv;kCZ9Rdt47iU#3TimrJux^^+c%X_m?U`;#+TQV7aURN>@Y1EV`KfA15@T zjVH7}Pqy}Y1iAqtloCGUDVL)b`|002U;eR~nsk3RZMdXna$#368c%=Dqv1ELw729c*+of* zgd5A;8-vcT2v~?7J0A@F;dRGcDE38=R=8^QxN`lJyH*6HGCOX@L<3G3{|pnBXtX9FW2oix#=N!maBC6Q%_I6<{w{7nP8M?v@;l{cB%!%aWxL{ zxCTu`o7{vGZ$eRf9AAP;gUKV*p(8W%iL6S73vOn7t!}MPlc5g1@slcaq12!s*b^Lm zzeVNT$+ddq=b2tzQW9GTMQj49mmWuUc5c&zv0Kv7x1kt|;+y*G8CKycY$MuJ#oY;u zroY=o(+?%%aP?D@fOtLjF`C_VNUrNpS7{1TLp4-@VjRvisS_NkZh7zDAy!`r)Lp;e zkXeH6ioMJb++afk%C;goYDVTjx%bzSRq+i{)1@J-D9+R5lw#D?5n=8$s9IMmL||{i zuOzGj9&l&n>+-&L0i(Nvf|`R%YUa{pa&Dp#>!{PC2rlLNP|Ygi`JW%wEb2|*O;-me zAWVw(pUW>Uf&6ws9I-5a=%pY->(eLCEOVvkOYa7|!neR^6mxcEnfHth7IdjtAZx%|GDpb zM(IdfWp5T??Wv>^1cHTaY?e1<5Ym7V`Cw6uqPR&SzJ z;hy8}heF;(^-Y$Vvp~uxL?}6)QKu_;4^2q)F1oy-!01&qC5Oa;ae+W$D}$FyUREfY z1cu`j-f|YQz$Q;O)sBrJCHVNhI=oIP5!uKuC~;nADoGx8eTytyly z#y$JfN>2b2I%Dgap~v0#M$HTA`Df7BQv%vwad%Nf^CP%d&`o$$Y}r>-qN1q{hWp^< zZ%2HVmFe@T$c?|_V;oQfRra#~aj&*gc)21umDNq*o7z=@Q-Z=CR$Ikz&S|_hy#f|3 z&=nI@%$FW%0d@6#;DLg_9>u3JP~8I<9&MTPVCLsL|HJ97QP>0@prfkyb?=Rnk}|#8 z+_=tlb2@}IJ%hyIVe*k8cW@D&!kF9orD?t@6yLX&k(5@_azhmS7djx#o#10lm49P; zS=cVJ_(h$$nS1ktf~Yj~l(tKhF^}=k&5JueLI!izfETg76D~Y;LPK~oc)8orYC19n zoZ#&gPHlH9|KFz+wUMj|bLfa9yI6zuBvUSx6EbD&m9FhM%m%&ZZ)B zJJ2zzX3v!8`Z)tfn3kmME={KBWyMiFV-#=C$^DubN$M)|t%-dxoyfk}3iLXPdrUi# zR+6hop(GIHC*e@ubGsMsf=!{2?xZyY0v8l)LQGLB890W;SC>uE#p=^F51(-b?n{p8 zQEJuBt!a1(#2++nWw`~2LCGZ-UmzK`&>#C)*O6JO0)sjWs~HyfV2=>~;+n^rcFcv_ zOY4pd)ixSQkFOFwb-Z{l&xNXpW9-pgA=;$MyGQWpGjXi~7HLtErvYYLvJzOtfO2u5 z(!4H_ANYm7TnbLg4`^NKvGoFCSyFJwuboTtS{+3B3i6(jjLisP7*+IUsvkVZW?n|A zU(~!bV0G~-{fqeK<@(j=T|Fk#HG&JZXqP~jL6?yG#d`RK0vNOs7c&?@d{bL z5Y3fQnF|N#*7EGSG~r(MD;S$eth5$E; zBydQlq7jLp=sEfGF!DY?luaIo4nn5Uu&6?AFRw;3Nez)Z*TalKT3o9&KAh0bCRGNB zFL4NozAf&`(1DfsGo12#7EP>U0T)Wi{XevDj9f71&OhWCr}cBA&(jDCC_k;t(^9kT z5dwgWN^ttS0`PsufaT;+!1Iqy1y5G^sW0y5hNal91)0@F%2=&dy{BBN8r`l9|{=-1tjOv{V>%ABP;?_+H0+3hTbzU`L!X z_~a#TldQB$^*-?`SR&T{+xNATY0vckr5Y+ax(;R9FxIi*fq<=a$Q}j3=3K}H`s>NN zCKf(%Ibcsb>WNrS@*iSDZY1@B2eiF@V&Wg@7JG;z6D7g#{mR^Hd>0Y1xfmG-it&SZ zY^D*wmi2f;0I=88`$2Lk0yJo$rhTyK`@mG2Nk@#aM*cs_MLQdDg5)TvQ!(0P^Gip3 zSJlSvlraO2GkD`)laF8Yw1YY*69dljhZ})OK2jelX92U@p}8oLvZs~WaZ^#8f6-HL zs4jz@zU3q3Meq@-bvk!du~VM3nP*tXcRc+)i`9}zd%==!g}!K66xrPhj_^$KzE$FS z9~jHriYxIb6vhg3E}V5>_s=hI)Miqco&;ar0FL|Dd}fa8XBN>e?QnbN*Iv zPuf|ipa9_nPP`?JY{BzKB|ZI_>7Al&Td~4AG9#aXq4Tm>SHp^YPRUpC2I*EXu7p#< z#{MUQ#^?b4b)P?>MY39Y=6Vks!e9^dxB2sg=D)zC==3@Mbj4lF_MnXGh}Cz{c3WxJ zwaKb^WsqZe)x-L;FNE9_&FSI~;jKChe89rVCnSD<^C`~>bt_g)TZB`WMYe^d3LEb! z;CE%-V~IB^cQZdY9<{h;3`}&0cjItAxD278x_B-uald#`tc}J!l#En~*9>;O|Uur4kE_dta{g*G(z> z;TZrp{0btCn-EH&H+q~x!{c&io=ch~7ZZy1P}_b)vmor+G91zF_z z*h5Wekxo>e?uG?uIt~o+Z5}TlbggXK$jf$h(32N7bV~cZIA!3MKMJouWAdA-AMB=+ zftBx%a*w_N$=NAjuscd|A(2IHLG7yIhpYaU*Ksmy!7pmvQzzG^pmvWMNeoaO!QrIL z*T;2A7}}ER^o@Fh?4{tbQuc?x`Yk)jn?WZ$4N#R-ZzHdB*i5BV0hY*eJO&KFy0}1w zF^qXL+tWcm?`%+~-vCD5BGyIY1(2{}Hw3?O zZ0X_^^RKBQQ%#N@aFlb6Su_)p4o5+~^O2#cJ zIrA`ka4XRp*ZDZKIN>h`6T0l#9p1L7@3UW1Hv~s(QAr=6l}BSVT1kxxHz(k}uHU$? zQHCN+^d3u@vDG{$^5$z>#aSJ%pmAe*fHy9%ym#ymsS7aI!5m3g%`dg`LuQx8GQ~AO zOls0Ys}Jf;Ch~=eu?M|BB=wf9xWU-I*gtR98=KyTbr9OB5`{0ZkE~28^%r(SD+Z1ip3teykR)ycTVIYgKt*Yezoi zu-hdN=Jd@2cA{J&q%u)ovG{OPFZDJP$&*Z=c*#C&Xep7rSU+*<7{1lGutR(QiT33f z$O5ho_Pa}{^SjKu>o6zuuR#;)i}h-Yj@5XwNGZzjf)tSC2=doYCAZ&23s#lK@Prjt z4lWdL>fSKz1?~OUCJVg7BH?cD3s!$-Ez~V`A6YZFhbO0en~=2lUnE!olD^}ArBg~z zv5Ps6ojt~aXh|;6fxGN@Pqe;C-zMNRsCkwA8f4sQuBx7{gob|mB@Chw^83~oADe4$6+oVmSjrW*f1~#u)93qLp#h5z}N)1 z&MI^~5g3keu@7g)nug%)n#y6hr#NpXl9-Y=TIX?@BjCENDkQA&{NfB{S;O__l*8{N6Lndopr-B~Hk&`0xZpLIg z79U%W7BbArLvG0JQXy%)57N4@8D}3eJiVi^n{wmWyi5oyc}^M>@kmF%YeA9wEJIKq z1B;{S9GODQ?1GJwJ<1yqUTw5?0df&9aJ!>;IUe8xa3}a=0IX+O;H~DZTfH5P-(cF+ zQq9e|#RN`0!4j!uUT8wL8-vt-XM6seel*u7&teZu0i~;_ zntOuY0ZR$#2s9r9UR#QcRjL1%K-vnh?Z4>t!$=sfZRX{GX45f=!Hu>&079YCuGm9Z zQ*HjzQ40rmc0)EOxp)dXceNVXjaoT(Lk_9xT8Beunp)9tP><}!uh0cNkH(c+6(C6z zbr$tIvM6|;YHw6HILei?89?m``1x!xWD8{mv5g}!WH@YU?|M;&?6MA?KH9E1)ExUP zIOul}l{W?UwBRA6UrvSWymKP?s8l{?wi&iJ3slDDcIBRw_jyj-iefc1fc04k9L3A! zJ4HnaOmsZ$h@%957}+_FGHzXwxUULysQAbqW7}?o0vKIV=OHr4Y`sw9)Vd3?bnp4M zO6BI17+$VbGIMqqPF|}&v#xc(^6b4F`!q>m3@wRZvh(I?3He8z$Kt&M`~0N}^B%#3 z=5YD{WhTh`f}ipCvetlzy!ltV@# z3Y>%&UjYk=uwGnoO&dn=&>K~hZsnci1PzGdpihsOe+humRuI{K6>M_Avql)7swd=s zBGA5bb|Z4f{=Z_S{Llz&!g#kKa0dLXk+}=yhxL?(V zLdGVX4}}t<$89DC(XbfTeuYp4^0eV@x*RT4zScBSHG_b^Lu~!U{)VD@Nu4NN7{0+$ z4gHOBy(jMlGUe2d|M4g(VgShg30I0g!ax}FL4%qd{p1)Bk07i~!2m?l!ukhRWvh0e zbz3D@Me%YCiv4&-QCtdARq*+L^h8C6`4uwmd#PAuOM!J8N)v_6?Ci+E`c;>#E~jj+ z+M!av)`X70Z%E>*6Yah7CPZZUM-Q-{Ug1cT`R$( z7%A*;e(2J60Xztf#{$cLq0kgP0iFE6dO8I>{peiDdhYAaXN@L_f{{hV=0ID>?NnJE zN!e2Mqz8p~po45b1g%v3$Hv3a`ch5*uk!Q`q~1TCqj_3+a2{G`kmj2{u19X7UXoKG z6%sc{WyE=_EGj$*#S+i0+(HHQ*4+qiHJ1l!#~wNrt%+Jd0qDuZRs^$b+0$6U9aL;W z_g+tVsL9EXi$jOcZmt~>4cIw{DvfvPR zmJwcVgtG^g7C&r|+PH3Qx_$NwYQWqatplP6mEGauYN}D8TTNL3iUl3-dgN){vr_lgM#IN~8A$6ZXvr z%~_Etg=tIYP|jG{hN863E~0YO7=AtR2FkfK_6HnBD=}_npNzm}y3Bj>FOK-cWtF`j z+=S>8)_KdK1aG86QG;BtroVE{7vY-u(=N zR{kgJFToL51=*cvmC*cxb=v1RoS5ZNE+19NxB(sEiZ(F~j+pYKU8a931!_7I?(}^C zo2Es#AB2NOmc}E!7j;@;JIZ^n*Qm%ZkW)v-X8H|D#(WIUL%&nl%!pY!8P|gny(v4Y zavb-hl-I%gInn^v{}vLYklWH|^>uPszm%z) zGF`Lu@W!o^ia8~Ik&iY?1vgIlmh4a=RiS+;SwV^A6UfgSbFaHcRH%+H;yh0>Hsng( zi&y40I;ZIKD?#l+LPJ(r29PT&4stjn7wNDY@#7bm$*WUSySKfBdxp=N0{{gD!8RKU zyS@1r>oo0EJHf)haQ2#JlX3j+gvI<6?y66YN1s;Of<3p7&1(VEQ{*I;WF^Q}<+fh+ z=3iur zoI*4zey3P)bg8`nH1qYKDP)8n8jq}33#BVf{D{tH7PDSN7Z9waTEuD(RZ z2_6WF@I0V3(V%IEL~s9{VHHW5sgY|3MIV#uW^D$iM)!J^!Ha8Otk>>ayI{7TUk46e zLC|~t=j-GJT4kZbR&_@XD8=bCF6+tsB6$6WqCBPEZns{Qlrza0>xqtlbK_XuV*x{JJE(A-Q4~5E5cdN4g<~!cGELSPs#8 z5#1Um0uGNdOtJ$b1WpIQAvmAc%S~JP5!LQAr$hdq`c%j`n}qKvV@a`t!KG>p&W?0! z$iO|xMGV*H4cpxZOjjKsHsW3!2ojv?L8BJ;K13BtSW0X!g?WPZ-kA@x@KbclY9_Ji zl?gt`6$nBwLH=ervC3{78!s94)nffO5j@MwrK9d zgQuTqzcpC-1NDatzUk$;ap0iJY=ns)1xRAlDH#Ux;09_c5I|5Txzk@^P_!(~_x=YC z#6jqH*`D7cEHSt&8R~gs$AVcE=~q{ipeM0^)l2rTd+N@aX^&fo&A^V z-jC>CI(A&~lum^h_^duS!e>?K^b`M2ZnnqfEP-r?V2QhSdS+bnOu#iqDxt6%B5g>k z1NEPs7b$0u?dt+WS`WXN{|9sU{Z1|#Kw$1f=T9(bPcQ@J_LBV0@-_+^Zy!ffjGeCk z4Ycza+HS*CC+QEEFQ!U2Km6VWvIp|rF+VgYr<_fmL{(0OJIC+`-)*uyH+fT9G9*ID zb_@Db@M^!=iecoo4O)crr92Pi?)Ei@BHt$)YH`=Za6 z0jc(hzp!{KgD`==|EaZ5W)Dd9C^#l5%M35#>b#^z7Rg|@0;RU=6sH`s2aW?Ra}*|i zTfU7Vz(wT2E0gB_T~N%7_M^ZYRLOfA5tbGBP?13U5|fB)&4=)@?_~b;AT0WcC=Lkf zFfo9*G#mEHG)$pCulRa=t?)3c_z`dh6VvlCp3&DkN;?cy>z^R`ZU33+mPv z`PkPrF{lTF-j}Xt`R4-)Kl|=o3VxfPy@lVqm+-U(cJIjxvGlC zPscR}3Qh$_2%H)a#uM5kyEjScbJ0FZycQZ7$TGvnPL_)KqvTt3Fyb)%Qym09$h|Zz z|E>4vVv}-Qf~-tXPv$|=t4OS-NeC+uEr0Aqte9Wa>jj)wE|awsSTm~OJ1sf>Nj~4A zZP=|>kwq^c!OiTVziYp@!zUP6oIp|J#AaU61YN%}2Ls5k9^R?^jJnPX8#3;$Q&sbj z{L%;Y`*s0=!^H6F{jQ}ykq<-Y>V@i%JR!>-<$C{#rAVw_FYSA#2)2rUw5T3Pk`@sjUs#pJ&e8<7rerXEnFJ}rpHU;-8Fe55knc{fwKlRZ{8aTz)q428H zAbO|BJ~&Yczb0}Gsyr(9aa9>ZDa`MERmnE_SYo~Zt}mBwt4ycVo26CJUyHt zQU4juimtew&EYUJ#fM#;3*4i8Pdajl1Yne8yoRG}C~>ipz;>mQ$6 zOBddVVDR({o|f+O<)*c}WosJeOp(8@X-$8)sOMfGc7DK6Yg*Fp0p`qBcFv5+MB9=5 z$#Vt9wo~Hic}lyRKUlNuj*oZDH7pF7m#rqmf75m?BROa>d>#k-zfbFnB>4X5UNfKx zr502BAIl7Tutv|nDJ7!CSG9=QsLL-}Hz3?+Oq~_n>$)*+-0k?s ziKS)li7|IshX0*ek*CETj}EphGvdF6mjug<=&oGPJ|H<+B%LL&r{;2YaDiIVB2K~m zN4~A=2Ftf?WiElKF`KdYRO4*DTlHd!mfD*Frfzp1sv&6n!Qai7onb!ey1nY`{+Wm} z>&dFINiSCd^Jeyewb9SqMipmYI6aMX4dh}rEOjhduQX>FStxX+i%OW@YyWu4B~bpR z2BPt*s8N&Qpt+Gu3BR@0)+Xe_C+`wiHj#|{Jn(pyz;bQn^o8uqEJi5g(Uq~x=ADtgi`c6)Ah}L+T8&HP69KNY+2zi#h%hi7nz9(sv()pWYG0*GXz?db;uz8A~ExOqOKzqGEPo(>_?- zd3VYuZM8sw3(=}ifWb?jf3=lKU_FHvw@vd*oBOnGmLLD%%a;J(Vq#(3D%5;_SXLov zJaT%VOg1-8=fv(6>aOe_@R6U>GZMEq=NJW38BNWk)$c*2h! z&z9P{=sj4XuRQv_wV1EtzwMioMxvuEPU2sQF}s<~iyCY>0gg$qL<_3>meohvKMu!J z=kMG*_DEKyO0BMBI?HV30TY8f9v_Y$yX9k?nXkE>j8o(Yx~>A|vyxXocM^$C^(ZGA z@`!8j*k3@0{7ce%aM zN84WQvtZS1LiUV5_?qYa^_^o5&0|Hl!=pg)ahjRCz48Au5kMu_M#>>ia8d+k&O7Y%QFfkr+=BH zY{46sZ%$~ccIA_wX;G55ox%jPx)yglhH9R+cS~ucu;w#}`ZJ|tuj5-ja}>hgM|Aya zwkh<5iqwm}o&s-&6SQ~H=A=g1__OUsh6kEVYahev=Q@E`qrY{Z-CM>SlC5FR@gs(Z zCKiZ<2eHOl40+!sjNul2;KeE_WjGw`OR`jP(`Dkyx`dg)+K$Nd*bEj$PkarovZsM)U+(0-WWz%L zWX-PX7uIe`!#jK$J$p8F;ZL;ThC*3Jln?nCws*i(sD`j4rg$?H2~aszjIQZ8xwcvF zi6u|pK8)d0pDkT+(!ss8UhYPUucE%0nO8k}?Vn}VQLCnDylldVjlXWm0XqQnrs~BK zZAzcuzRHxJJ)ZhGCUn*7nhzUt>~S_%N=mBcL3{7&>kWLeHHVYDkf$Fcg9oB2l777n zBlyw`YEOlP#})^*5?WhjYdG4Wy8hNBm%#M7ThU%GwqB$wClE3^eI1Vv1jc`}@xdU+ zUYvBCNFDRh6o~!Hg_=0w%2jo8>Fwv6u>cLa;>R4y?Ru&wR&*=htED|EyV9F=+#`%1 z*)w_FO)Gb$RBL1d;Q^)A|8NWSc%#^G#u}3Ieewr<-WWxktW3qbNX2>NSh~KODyh21 zK;L?9M4>o|`)}}L!I81$+>c_^T>`r^K~1&X3~>S+<$e4`oz}6!>msQO0fh&Vr|-U! z0{tB~v`}#1vjK;7+6n|%R1u5m&n;|S+2mQP(8s#VwQsTt={iYD|}!|ZKhwv^~I zdqp_#ns)zSl*frOCIcADt@}TuRrK(@z*7PBO?R?eZZuVRyVp)|h=-k~7XQ&~7IkyM z(Xj; zpWiBmchfRmBe+ksv#VG!wYYuTyYfWVx8{SyT}~uf@AKWTGh*A+QwR;N4jqYB-=5$U zW$Kzu$%G%TAFx&Fr28|lFCLYDm8;=UTtkBYG^hGoQ>5!xGVL|Ba+`yy#fp=`Y3-9a z?X|bas;pTFY=LAn6i;o?fb9n*R@#kzW$ycsr!@lGVHzJPhwEpXRaT7%1nkW7ydnMJ zeCn^C@!#wv|43au^P$-N$u}`oElnMOFJTRL?wv-EJq#cAq1yVv!^DVK6BDJ`&mliO z4O20ee2x*?(@(GE4s9nq63LCH_7`#@Qabsa59|l>oSyuSV3oiA%t5jQGWy$2df2~p zBpG$8Pm97Ba$a#lJj+H=*l+VH?W8TOOzob_z*HLzpl?Y+B6Y0<838BH4a&!5I~wBJ;e0qcS> zKmD?IqgX|BgLlwYWx9?VipuAc4h{NR;isvh8Q!|7h5k!jIHLVooW>pSvJCyo) z;!FOXZe54O%lymmO^PFnQtDHCJ^?+#@{OP@&!pvC5Tl-MW(cy zKpep|hOqzj;|*2si*upb4-~-z%OuDwTA{5Yty_mmkU88hXHTyM-+B@$B8ai7wYOoE$93~ZbY?SVnx?4o2XJ+#~kMI$>cyl zun9~u`jf-i+e{1Ys&1O;>D61?W|bzAYo6W}LGWe3vTgYGZ`h1fD{Xcs>$|5(av_%3 z-gfbH+ZcU{kEyMrQKjGtGrU$=@{TyeZ!~7BdO&S8`v9cm& z{g`0uBOD)xt*yb3u0g;Lw^yUOQq5DA{slw9#(3PEEF z9PLi_MxqF>07HJy*KO?!ME0j)gxa_eYV3FJ$UJXHNNSi+@ADQ7ivR*%imBN5M6nqd z4Iq|-2b!!|uSQk8pT(7oN12GjW(y0GPY#(hj@~+03$i)E6c1K_vtGEVWDVCakEbzw zv>T*HKd1Y2R}@lsh~@ISdLtvcPOizaNbr3jctM;cg}ZlQmjUD(yyBVPj}59WuyY1H_(hOjlq`{I{HY^&kl>7zw3|tWsVEM-B3Ln!q6*e?>Su zK(_D(H2YY6N{bV?RiWDzZ}M}t#U#jDs`NvtV|kD}gE^tVe843zp)RrgM&pKn>5>-KAK}4nE=-*j-|@}R z%G9XSpO@C$9^Cs7eU|xhNkG*g?TEKEt2ho$@4AtsyAZY>0QV4;9q$LnEMO1ayKv{=fNbuJv0c%z{Q8jiXMADi$-_C{9y39EkD7o?Pvs^jJ z9yU!_lN?BQ`*DF}bgNJVsZuKwPWA3ycl$vFf9++NoCqhZkQ{a(!K2~k^-CQZI68}e zPfdOD$g}nxNbMD)l_)|FQWPIXFr7Gb^1)=Ef&R-A`b)!&lFs)b<4`q~uV@4QCDxQu zGZD*VB)d}A4CZXlV=QMXW}k?ZuMomA4kN+Sa8#z^OOBlWEQ_zm$y#l{#fw%MvlHH8 zr1nw-vHCi-8N%MIpu|lyw1+BiOpmwoA-v;~XIJw-sLB<(7_H>EZwxr$Vc8dlrpB{6 zD@69t6CNXd?!BVC)-QefKJhAr5%%XQ!Z`r)kVlr=_%ZFcgA0(o3#}i8Wot$)FWcUQ$1Uk~TJ>TqO?^*CIoC{TjT|Ec z{;QeuqC0tJN<-nWD7m z3H4K7pS47Xzu;MGKO^2-L@HYX+Tl33jNtGwg_7L|!2Vb$LumX)Zhx|SycCO?bNl5wvndNQp@*1&1mwEdYB>+xZS$iL zRhK+a_DQKGHDf38DL&5JTg=?5K5S2lxr0r<`=)@+3!m|~d>>NBSdc}s2aCkSQEfvC z=_@@fItn%&i}aDI&n0L)L*WT`h)9V;hm)@_z=v1_32`Yls11#CqiibY?0IZpVuIKO zbZd=y+o2SgWy_g~JK?$d-T{Tc7adAncmStI)L#9abt@<1$5CcXmeKt&7x{F?b40hY zr$9*J<_b^cEkDe&t-yQ8O-Ch(DSoqTIalPvQ$Ab8Q9o8vfrHV_y^c@mh_P>*g_#nJ zVm?K46d%Oyi>4$vy+@af7hD3l46P8mKh41-akIYpw{B(+tR*2jD+E=O(YtXlMAyK$ zOg=>F^?%IfzSA?;SK8bFwLF283`b88W$n61AMII~u^!)!G!{<_ueLstrdhZ=v&-S) zz4em2Lz^)VzSQUxVwrK^eOd*Jvf7u4+x%ago?F};VAEICtIZ73P@sQv0}N5rgnZ(G zA$tC~&w|=4e+BpX>-oSsKIfO=#mki$NwI_;yIv&Vgh8$7QrePvYe;QjZ$gt1M8#N5 z%(E89sVK%aO+~uk>SyPRg)PQx=kBq3R~)_XxmXP$Ev#*841J+2H?ru(p^xD%f%`HJ zAfL84!Du^TZ{zf3tk#_hO|cUB^nG8UQzHWR?Z#VUw)n*PrtO!^<}t|MeL*aZ%>$P2!C?FlY!DnhzcSHLUfi> zQx3MMGu9w9&&X~Hs9mrVAR}ZhWfwOyAzvY`5Fdhx~7RjMVx%9F;7CC)M(A0Gr09{u8_8jZ^kzBw5!& z8z*HxUn$Z&Pp3a^Son*{9w0JWG;Yx2S?$o_Km0}N{5nsZoaOe1s9I|)*LdmzxHE)* zW4@mH{9}FTFT}D{EB9jb0pGn6 zPcZ(yHk~ry>Or*?0Qd0B+qUGIIN@>lC=-fKh}2b(h`HvT;ijor;iw^Hx}mJ!xe(OLS}nsia=v3tc)QZc_3{SFCkK4t9D>l^}1Mz zUm@QomUg!bA^hJ{rpl*|TU@>)A8SYyHM3w+GaK5%VI20gNB5J`<4=JZ#NIb_-iVR)F zZ*;JW)cQ93|LZjz%VbBrE;J*jysqERrBB1rtpW>$BN3azM;`YoC=6km5XntltA zN&~`fw@`LLl}u#>6VfsH%oDI!JcoD;4&H1$TfdR=;z*#mxQz?7e<~k?a8IZy)ig&3 zQ?8axzg#jK=SDPVGX+p~$%Or{t#i`QkX_{rt&yJ%;r^Ol5-jfFDQ6q=r%3(^fe;GP z3@n{FFF4P{!xPjJ8#%ISHYu>kFd4BMym!3*&Twwo*#WNoJ46hd--}CV{gxT_WuAgF zRk9{uE-j`!`#>}(^07Ido+(??L0iLEe(UXXs0-3mqyN2nr6rsXV@J(f`s(E9Q3j1H zw#2zQj6dY}d8s1zJT`W6PRG zAu}vT5B6sPo-=Pn0zn8qg71FInehCKN_ikUrO*5UUz@s(G; znPg2_c^xLeigVWZgwS}G&#ddKd)@wk0#kB^FZ};xj2{258X>>vV^Yl^EXjq-6 z*%6V+-{hM+NL$s)xZz(tT}#iwgT;UIm~ueAL@V55#uyax*XK+p3t-jNSW4Bn0PO?k0Z;3>HXX-hUQp{7=pWz*0lAM6|nD> z8Jg2yQM3{B6p$&36EaK?+~+0;O?%k?eAMxHjW`?m^m|Y^BtNoxl@1e_fJZ=lLkJ73 zO+N21%(udmG*ht&nPjGSRq_IDltskzheGaM*nS*|^`+&0FHG-{-GN4+uJ7U!quCD( z26dhCSCOiBr9x4;R@&p=*yDm#^M8{Vr&7;+TloX&8OKXj4mBHIw(=NIk4?-YVLq8M zd;-k_Ej$n1&fAl8a_O?VMRu@2Gh{@Zn5uVjAYd$l7l;j3O-3{817+tIrKhx!FXoTo zWv5Qy2Rfeh7#dY0c3Dd+!04@c96GdF)q?0X1~CcMAs!-@FSND5#I;ZHNmu~Rb^f7! zc)nZ!6EL8Vn{Ci8dFnLY%9}%)q5b+X=94asM?lOaut=VFN59n4<%Wfzdpz2sFN>Ru zHzMJy+rAas_dkdcjeY#|_1V+Cjbe&}3R{q#&^UH$rYRvDS9{9eNQq&*`P@Cfyzeg; z{$|6RI135OKL3~2EXH|(yL#qO+@zrvWa4yjJ{Rb7*_vxp3dk4I z{_lm?YX`9*i$5&%oAVMepDfF^S)6=(m1J~O!hiXA*FrRKu_ob2K>T%oU@&1s*Df1> zUYAAlx*$@2_uGt|{j_(BPltPwr8l{@5>*poH=lE5bnwX56mo8Sm($T#@Mb|=!XI_v zKbn7Hkp8(A6nBXcuhpZD1bDw}B!HdthXpf~9Vmzt(KJ40!`S_HX@oz+Vvwgpe#ee# zKcYx*CEX4OR5nT@6>de&3S{v`9evH2P~ z&)|&knPl`ChEz>$!8Fq^0utzud{n6Jcb}sBj=Z6UH0>>_dOn z)>C5-mYbIIz4>j(?OmMn1Xi%vW&5!Z$?1W1S%m+`Yr)hrKhQ>~iMLphs?6ail%dQv zN<{*Eyt0*yyvu{S7@*lS-|_oMwO0b zNEY?M%y{l82{t*fg+7RcdU!KPhcuAyp6SlMk56zmbI~3aJhBo?OEk!$$gYuJI_>CeOM>29Wh89BuImKT??BtdhWsiY9ctzRseI&eI8IEaLRabf1lzMWjz@sNLqGkSw%LMd1O6lz zAVV*$72G|&9Xa!OOkstCQCn+Xti=p>HCjpT;6tK|PI>?#`7&LdKG+7y;f)n&g>#Rr zO8?ye-bwLvI(+r7TjP{E*^!<~P_MmY=6_~zYJK@ z9K43^@N;v%W;bDc?8l|d1=9E-1Pb$~YhD=W`%hV(+uzBUNF+jQvq8^5hf3?SmS97) zzHFo6R?2gi-EGoKajBn204i#=B$nV5hW;Sq-Y6Z6r`l%TM{xS`iYBv!@|rgR3#Bav z{8L#V6~0efQXl6-ZeM5kcu#2VQ}wGDMd|IO6?2eBalSct|GBI5v_zASH(wvz@X3^! zC74=FzZDK;+(dof?!lG6eM&*f)-CD$@hI1 z>mUchd3$Yc9i>~&Sw%9G_Rw_K0`Fm_uO?@=J+qLCU;y7EpLq+Usj=XR9k1&c;d;gVN1=9c5-j)ATnZEz0RHi9>nu<`&){xyyQ%xMHV>xyel6^~-V{1Wwgdp+c9)Mao?7R)%`F8piY8Vw)CfnVCFqExF+Rd)ita!79TCS%`$|;hyZ6 z{5LDL`s6tjPp2gq!QgZo4oo!ojX5pX;t=Kb{61km`>;Jv-wZRiecGH{`aWArPC_Ij z4Da?a0?H6y%E;bk(@2-V?~G0r`hR>r_;=y!KnI-yWvCr3?87)~RF?s$oYr`pI+`4m ztz56T|GbAmwb{X)AS|yBMG$q=3EqITaak~EZZn~R8z(SH{+{uxRw|+)Q3vFxfS6^9 zLybwH3WLhBJE&2#$axCi%Y+fmX+eyQxgFq9r1aHVvdvU_*mfo=&PvrlqS|zKXFcw`?+1gd)(b~UHfD6$rGcv z3Pizh51H*gk9|JE=AJ2zqL+;(?thZt8NCs#=7Ljwyw3POb7HwS$Yp`&kzj%fTY*f` zx33mS`W+Ume>QC1yR_*mkcWSB+TV6ls`~9dOy}Es#agv-(=P9zX`@KB);!g(pP+^^ zlMBIg?ie)Ta}U{e7Tf#uA*AGI;4P7ZiPs;Pd@A{HAGl+ULhj2*;zXerN^9xlHK)XTe-?i9=x$T@t4HyBx+=H=%Sp?~^`JfBeX)#of&1<#*%? z3JSuzT2-v zK-BGd_e=?hfbw-vmzu_ftSX;_UFNs>ijJZLli8<|ZgN!+%#&%9 zM@HZ;k`hLOi+_S)#uwzqyfy=Jo77Fe?gM|5U4Y^L7JfKiK0lZ=yGFVD4p)W>l)T=8 zgH}7m#}S)uO*GF1iN2oX7LsqGh!ll01tt0rnoNsfVR6sGSGg?Lg##PkT>Hbn<)z5v zrCD%8Ew(fHc4(rZU&GmfcaZyD-L%O)=Dncx~uJ)qHY&5 z>FZE=73&UPe-|kns-JUb*3e-=C};|q}$)M^G*cKVrO?b4|O;u-fD`% z4yG zq9rnc#YbUaX%KT`gVE$@bOk3i4`O<`+Xiq7a>}B`~QJ$|gu3lbIZHDdF zWz-i+6Gkp}qn3Fw4#_jXPV-KKu4030?YYB_Vf&tPXWA#P)lsFACeOFwH>Co5jYUWW zN-G~e&MMR%a*}{yS_z3edT|dP*DWzh&FR4KZxBF}yrxNmDrNtIAS-#F!iW(kC3*!{ z=nK?78Q5(Y_@~SA6=t6Kp(l7cL(1}c<-Vu8*jwzEXm59={JK@AE<(ycAy^q%*Z3|p zaC+M2wJLX&*JA$@{1g*P@y12FVqVT~u_C@kh}m&`+*5=Fq(;3uzQa4q7y{toH6JvC zI6pdr0(si*lceK3oMtVsK1E(Ek}d1PThkS?HITe}eRc@W0xQrC&s2b&rod9laKjXN zzSIGlddTP;E^OG7?n{+99=u>!?Qa=xm6t6}7oPL_d~7X+cK_YhJmAh6@q+vr2@Im!5G7L@I5dmPzmA>PF zrgfMNLtWga9_TdJIb?ZeU5)d;rXfyUxGt* zF7SdCu2C;j2J9O$y>j1QGE8~|Mopd0VK(dUsoSo;@z%60Y1Sua-K~+Qsxu;vw(lh8 zTMg^zRRz9G0cc@f@VN5>ua*AV5n$(;?|$#sf6LRE8Tg;(9+}n+&N%?4NbcIDZE9y+i+}cU`Ngk+Dj4La~#Mz zzNkH|=F|(6d_?UhhE;FHOr17c+I370az~RGrSm%!J@fgeQhmC98P%viVy3MV`;m=eyn`3JBnpxS@c=c_t$Rkpp#5ASHjZ89l$ls|&#vx$^39pLng>mzQRsO#VbUwD1>13St*e zd7eC?&!={V^b;Ma!FB6dH0s-QS}D*Vxys)D+`utA^@u0Fq`lo43 zdiPulL<2UNsYKFFaXmyNXB#FgnHO(4)FokWNGtrASRZ(!2MA02*f9=f7U!*tGbJM% zfW@&(HQuBxTy4LU6cf#iPRPvr1WyIJ8`S3jK+46bw^!y#)bY+72J!B0@)lbhe7ZGu zsL{85{UP#jb^64JK3D^=EM9)d+7B41);?h6oq8?u1LgMcIsj*tyMxcVIsjdX%Ts>< z{K_EkGg;#qYiV^Li&AkKaXYYo`uZXSgc~5y*n7$JQfVl9<$rR&FTzT>{rw-KOPl4! z$gkHqs5fkya=wloDN*<=y1@J0pzaE>ygm@%7se6(LRIwIU%XaLyG|MXh?{&5T%=D` zyJ{R%YYTiQ+i#I=nGx)YnqPIpQebvF@fWwz@lD(KO_+P|Lj)BsqB2JAf7II>wqkPE zFK@770yA?6m^1F9UFcsRuqFw*GI!{4k2Kmh<^?KPCWV<+Z;lE{_mTfPfUF4Lh>qr5j*4-eC0Z2Xa&H^OHa+TeiHgN2YMq!w-T*%- zEP-JAFJvnU=V$OZft^*mYknvXgiWJ*M$Dv7rGdwh*muCAL$jjot1$6^>re3`B$S|+ zp!I&ieuo~46T68Zk3dP*nh&J9Lv#4VvxOht{sR6b)mN7v`#34dnB+;0hC57k=V-McJzzr+tX{sbfcYX^vrdy}TJ=b!`_+0Wd?gYK7JQwKn3U2UMAM{|CPXAPe7}nfnI58Knc{7?o z)&Lr?`?)GD5g=HKjILG!e{C*Mp{P$?m?+@&ygx)oXF+e{2&`M^nDnOmII*Qhkc6Kq z9#!p)$BYTybHL$$|LJDJPCa~K+ERXf_Kp=$%VXTGPzau_L+*q;i%M~-TUNjUc!j<0 zemV;WQ0+L;2flI8iMY1m@EUU`)R{{4OD)iNKFcP&aQTAH3)Mu}wc{zNxUfK=xK5|v z39UqT=+LjOMp_j68W(1#PcK4@`vs1#>G%UZ z1mF`((9@c80W#_9?mc@1Nn=n`SzQsuqTigb*`{6}@AFL5pIN%D^PWwjWCdbvf*O%$ zcqMmQ1@{kZFocE!9ZuM52lnSK#)Z8LljxX!lry*@iAA^f^AtQmte?+l|NfBeOZv5} zKuT8Qiu6=*rnZ6se@Qh2Q?BN-Ygcrnsq%NDQ{8Ca1XZm0S*(SbeO(+fb^<3L1>E5s zT$6C_xH^AqL+vn;GDq`nMPy18q%>H9eXcp{J$|Hr70zNPc|@lcR4g#8`Whi|>5*F; zc!rK<2($bC&imyNoXhZr9U)zfOdXenaMQdleiq68;VsSP!u1UV#b}vO>eft$frkOK zqT;+=`8fJiHQq}fG32j0_qZ@GgcnOJgEmDGq8x$U!MIP-DBcD)Z`#?d)r%p z3k!lGsI3c&HPJcA#ij4#S8Y$Q9qV9i0g$WwjAE)$HWaE#^Juh>dG4^UP;I^cFf7Qa z&!*NPiO6`g{3V#7zy`IMSvO3y_OQTdJCE6@uFHT@_6 z|Kn&fZZdUczR=fDXFwq=127e?`Jqt6$-WC$VB*efY>qX<-uqGBv4SB&<0D~rOS+%y@!dujlij1rz#I| zIb@WJLad-&_!qrY28s*_{~NV{h6RJZyyfA5u#O*5n<;{Q6_~sQ1Orq1d~l_Q)(@90_oFFcxZ2vOl zWHKKvbqb*pNSs>*>8}T!g+LDEZ$0q%SC`}kiAyB3%`oYAmOs`A_mSi*#&&ouwv}l{ zb@LNPSb0|Vl`EuANo+tJFM!uB>1$Us@O3xRPcrp4b?*KGf|-#yTrJW3r?eKaJ)_&D zo_97KOF*U0nN-bBU|pMvr^|bCEvfoUD(qTV&v0#s*#?;YYO>W_R96w&J0Al=C!!dlMt7A^amIzn{O*Ndr2JT-dST0b#!b(iMYkcw|cOLt5@C z3+L_VkdzOo9IRn+^QeNOoupTQa4&sL{)s%zMg=>`GDy+CSq(+18t$h!k^&ut;16aS zuz}&^YsF@8Z?W@t0a5;^Dtqx1hJD14DDJ4+8jjcq#oy#l@w?=c;7D7Kjxdq(>ako6 zUbP(^qWef*-_rTRk zS%}1{*v{*mok?3eKudv9KgsMc<)H?A*8Uju6C`|KLRlW2f~b#;hfaJ`#nZzDK`f8; z2f7fvFO6Q;ZHLGGkKk$S!h&sYdq94PinmJo5x6k@oz*a~Dxk7xy#}D;wFnwygN_P~ zN#z0%f`1B|rTO&{93?{Ng_qqaB2SOINp*J;yJTCH@6Vq6BsF+@THfP`qeE?~B7Dh3)ht>sy_haD_bJd`pu2clP6fF1Qt73QAEBkJ z=u=YG7GHtlh+4A8blji>k)P>WK z?k@z1j*h5i9>^hSg79Ibr?6hf@G)^oDqbw)Ur~G4&)kI%b9fDP?~-XE9OU-_7x@GQcU`oDWgwI4w_ibr9H3{I{F-tc{YB09|oIi@tsGUPhME5PiH zJA}n$Bxh3lX5o)KrVyr$T@4uc3?ZlUohP`>;gva+y4+>)nKg_4jp#w`!d}rmHb2$- z&Ci>!t|v_locP&u)x?nDa1h)iLsn(o2$+P;WQwUvB@NL_3bvDCWP~^}@gDKceu0ISpF__qnjrIQtvS z@K|0PNt?O$ZgqA|(OVu1_AD;tpJ@@=?w8!T^mj!@>&-j6g0sAz_t0!f?_Mfgpmbk& zC!{(yw-3wLu|4R;k>cNJjjq*Rx=-!im&J?S@!?crnn}_3O5ek2Z@!?0uOrwzhx}$v zFEacXMGy>9##epW3ao~C=agaK8}na?`h!Ki;>zC8&QRQHW}e5B&d(E#t}`hAlIA!L zD2}$F1m-%+mocp*MJGw$$7wq?SI_{p5fw>!Svy7PKkdFR%0<5J9}E@{H`auqh{QZ}D`D^^V>pxv9rF|YWNazHd) zyEti^fdqv}y(GEc(4~!iwPg1j&n&iYi5$X8&+6QY_t7SpNIp=9Zk3~RmKauX^1z^- zlOf|xHeG+86D-GM`N~a#0)A$odXUb7Iaf3^)N?iApuLgRUO#`kaXW8u49jgOTlqXh zwo^&*Z+R>RgDmuQZy4~M>&PuJCSf&n9z&*XoHmYRusRm&yx&_CM#L{w4kem&ZAjBU z)a;B1cqcR=9rvx)a^4fZJeaRYwzroHd+z@}`y?@|czk0p2Lc<;dV4$PmY1t`k%#xW zpt&`ADQ5X@_clxC6;iO^uG1;iVkzgG?Vh9>7#ak=v-lCLm7H_2>TJ5rZv=zQEN+GZOF0@r?pV5WQFV3kbDP<-N|{e3TZlbk0ywg&8?aC@f_*5G2ntZ}s_ zMarv_%7w=mPJ<6&W`i8;&;R=_6)>|G3;hpFWIRT@2M8U?ST@7nxH&Jz4>x&#<@?Ky z?Ni=ALyN<@k$}~bqB9(Jk*C>~L1Ii9%JF*a5`=Grh;vwWguRiO*HDt)*8AficQ17| z-_1cQW39A^*^&y_iXZ|v&m0K+EBd+?!KX&AAInz}8}bfPO7S1k0_p3QjI;9Sfp2q6 zg_-5B_2#bL=u~4{M^E{p@}zCXY@l$-D?$;=cIg!!j@1*7g9cl@>zx9g0S}Y_Rdt&F z{X&UqgdV$XE7*+nOMHB4QKAQFzR>L49E*`cMB$`I^S^X8*lL((|k#Ty~<9CrD zovf$gwCiVBW`^&_I6d~H+$s7VcFeq?w5#v2-ga&ok&^2~y{}i*Zvlfmzs!us8nI<9 zCXpA9_RO=)^2YaUn7?3X<YPR z=$(jMQd23glxxu2(bXdPvH`#G5ydoYR!%7d`PaF>zmn4>FCh@&yRT>1;{BzW~;7=+`GA2sN)sSbZeQH+Cn&@WZjm1z z!D0;Gd2wrv^DHH%GEEFrTtU6NPC)!?_`92vvrV0~H$mZ?g5B`*%OFhaUT%}`D}Coj zq7zHv&+iH@w*GW$XsWo6(oL#I-edrF*!n0vaC?qyXGQpSIFt((!^!CwWBUevDZPYa z=tlbLnZvC@Tut{V-HQ*C4pa{DnlFI*_wip}-te~X^pl=S;buJF8x9s= zBJo4!@xT8*j6Ln*@r?8C{eF@Zwn9yD%Cp^#_M> zt`fNuhoPrwsI5U&&%-yAZsQ=80T8|1zhR^6*pyaXS{=rV^tbZvfqI$V6|*zoYZ9Vq z1;QuBz9{J7=bMDABrj^1vRL`At;IU8@)>!7*|aaT@a1OwPIhm3izLx4+%EDWC>Yty zgIC)Je6>q~@*OpT&w!RN$=7MCzG218ayA6))AXkugW_cyd6%1U_5j#H{9ffmy+=2? zni9VO@@(TrXI;R1HE3A=w2UpV|^q}yJuE{yrK!eVE zplpQ7Rb+R@_F=A@2m5U$$?iiRk(lr z>FRk+N-EXBAZdNXjtW{X9&r?_Ipa5en9yOd%vPHW% zAzFvT%@?0?oSqr$mzp~-`Rgt$d)!_*CtfPfM>;z-DrYN8uhYfb0CHI8<-1SVsJH%5 zuyH#@Y+nylj|B;*4S7H~Sev>BleCbE^WX;*neCkvMUTGUW!s$rk~QeG+}r=uds zt+A!w!Uybc*LF>6kDrBMvpEXk_=;af;7D;*0Y*tEK8XuJx^V->uOtqs-02XaC&xVy z6w{|7KUk#&hD0k384^No@v`>9cb)0M`OcM zr`s;}@_iRGG6VzE> zoKHaRFZU%#VEI`vvIqO`Ccbmn8XZe)r;4%*63fT8&M7g{)%2k%LJrkktUGKJWB`t73?K7kFC@EGW!EB?G}FuR8z{<6dAJ+= z%h{yFL0CvhC(ZJ88PcX$ZW2WFMvvQuWqsz3H06gteqQ!#$N-!Axzth&8Gj3)YWI`?G?(aM~YE; z>~>vE%2y4%V*yGlJp@+eor4OAkGf0|w=>vodT2S%X`}(>*XXNOiC?R2-euGLPtee^ zfbZMLbSP(ve~uEA`9v6?dAP_)SY;{R))r?TNDbJt7U!kttd-%Wlvh7) zTp)bAOSTIHqgKBV8yi0eU#>HrZK-RTmYy0u3sh8BHnXk`3;1{|X8560rsOPo)2Ypf z!`D>o3(gSB-zdkzVmXXU@1t>nN;jY20f!Q$HxJ)SAyr-t`mR;-rwBhh{fv}z^Q|VY zNsAT7)K=3P2!%YttN)&QE=1E=_AM!80YQ!yy;L1<@Xe;E`js6m(!l4VFZg(CjT_4) z+Oltt(}gY`FeA1%&Pa1J)D0Quy*8vr4V~`NL_k^xVKJli(!XCK__V~GH`Q0hhpf-$ zy2I}1-y{iWRmvfWJT2-aNR*faWlAup3@U76obuKM$;oP4@vRr4t7RWf&6xj2UJUM8 zLVo*K#wQT^_cN1bR_DDYSRl@l&fFLO zKrgVr{6ZWhbMY#~fUwYQM?q|A?j|;)I2peLQ3vE?oUIi6(R%r0W8Y$nO1b{DiQ>y3 zoU?_F@!iM z7#o3*fG0ML)fMY?F=rP=&7!K}@K;*OPuiMx zV(nTGcS?m1a&nvtOxySb(@FVIQ86@#s{`O%m}~``e80ua)VmREXT#0bG)?y^Z#>sG z&$JWoPm==;xy^(egh>LELORk6S=fV|*i@&Ct-*Ub9i;$^!FS5R)q4|qqrf)%T3iXZ z4SB_b-IVBO_PG^r(u&`p&ySW;4p$C|L1IC6?~c>(H928+lVj-18IbCla?dojv)l}6 zJ|6Y*5WYr-NBy7S&*l>La;+7PK++WGHgBiS2C+eMYk|l}Y`p-) z1d{O4Gq9#u&*yvEXEoLSqbjK|`{fS7=Pk<*fNqGVB*Z1G-ImO9|2fM}98{)OOv%N& z3&hZn4jb8XgQdyJj7Caa&B%@`bIbCK5P^STqA8$5EzUuwU`$GE^-cXI8@}JvMf#*A zh6b+8TlLj+56kW>D!im&QPJ6@b+B}>V$8&D6aE&j%6b5hz)kPfL6MGqSA|&0w=ME| zJ-E03$Jar_NBn;yNSAyx8X8+p+cywAm-v`x6A=)L5{0=>v#y3P`G+uuH!wOILX#d_j~ ze&aqPxK*$S@9SwmUw&!bgRc!Zl@hDkF*`JR7?BRlz1+1RW|+-um^^FXBbjo%MX3JN z=@0%xcaMtcw`$22CB*n|{QdDj`m0M|ftX2LI*zf4gJb#t9=gt+YTX_9OUn0R&x{wT zx(6YNj+kA^QF#Nmtr2lYin>s>(f`hQsap>pZ)!GZ`4zNSrp)gU^zlBRTBcKmqq=_K6(aA zuVLzqX4z9Nt(mcnnX$Ztr55Ve0)*2+lUDZ6I`Br{{U8bSn^f&bu1Xi2NHDK$=a8Ef zVM(9c_D$VEv5O#ttPc#9ICjE<({RPk&zMs)@2vBh0^3kx#c|lm9!~ zx{Ie|T-Gkq;$7xe19|)oq3p(Jefcy}h9-x=&swyh5&i$(ODoM|_Dv;h4}`^#@aunw zo9%3WsCswsv~Q)^_i zwv|wr8#a9Izt`JJLQqiz`DEsCW5uXcaQ$32t-pmRC$$}K6?zb@)5Sfb(!)0}EeFB; z&d++Um~*es#!DXmO~@o0GPh#M?(!-AqAC7fiMO16oEPJsbA&zr-Yzu7f5L$24KAWN zQ1joh*o&rbmh&wL|CsgF^_t>u0ju;*s5((kvM^E1a8JV?&drQW{@`2 zu6#a$Vk|ru=;gzpT01}E(+DQFw;5-Go}pkBf(+@YNN>nYszL3@Fo7E>O%I|ik9HnU zuCSc!Ol`G|RFkGhSY0oo*EaA6!R~y2n`{)N+T!rY-e6k~xhRLgTY&y>H-DF;bD%gr ztdC*#U{*>V`K$0w=7~Vl|GtN476PUb&b(p;KI4DC|E<9PR^Wdt@V^!K|FQz5lx^1B Wwf31l8gz< Date: Fri, 6 Jan 2023 04:04:32 +0100 Subject: [PATCH 002/187] Add substance mesh loader --- .../plugins/load/load_mesh.py | 98 +++++++++++++++++++ 1 file changed, 98 insertions(+) create mode 100644 openpype/hosts/substancepainter/plugins/load/load_mesh.py diff --git a/openpype/hosts/substancepainter/plugins/load/load_mesh.py b/openpype/hosts/substancepainter/plugins/load/load_mesh.py new file mode 100644 index 0000000000..7cc5e35912 --- /dev/null +++ b/openpype/hosts/substancepainter/plugins/load/load_mesh.py @@ -0,0 +1,98 @@ +from openpype.pipeline import ( + load, + get_representation_path, +) +from openpype.pipeline import legacy_io + +import substance_painter.project +import qargparse + + +class SubstanceLoadProjectMesh(load.LoaderPlugin): + """Load mesh for project""" + + families = ["*"] + representations = ["abc", "fbx", "obj", "gltf"] + + label = "Load mesh" + order = -10 + icon = "code-fork" + color = "orange" + + options = [ + qargparse.Boolean( + "preserve_strokes", + default=True, + help="Preserve strokes positions on mesh.\n" + "(only relevant when loading into existing project)" + ), + qargparse.Boolean( + "import_cameras", + default=True, + help="Import cameras from the mesh file." + ) + ] + + def load(self, context, name, namespace, data): + + if not substance_painter.project.is_open(): + # Allow to 'initialize' a new project + # TODO: preferably these settings would come from the actual + # new project prompt of Substance (or something that is + # visually similar to still allow artist decisions) + settings = substance_painter.project.Settings( + default_texture_resolution=4096, + import_cameras=data.get("import_cameras", True), + ) + + substance_painter.project.create( + mesh_file_path=self.fname, + settings=settings + ) + return + + # Reload the mesh + settings = substance_painter.project.MeshReloadingSettings( + import_cameras=data.get("import_cameras", True), + preserve_strokes=data.get("preserve_strokes", True) + ) + + def on_mesh_reload(status: substance_painter.project.ReloadMeshStatus): + if status == substance_painter.project.ReloadMeshStatus.SUCCESS: + print("Reload succeeded") + else: + raise RuntimeError("Reload of mesh failed") + + path = self.fname + substance_painter.project.reload_mesh(path, settings, on_mesh_reload) + + # TODO: Register with the project so host.get_containers() can return + # the loaded content in manager + + def switch(self, container, representation): + self.update(container, representation) + + def update(self, container, representation): + + path = get_representation_path(representation) + + # Reload the mesh + # TODO: Re-use settings from first load? + settings = substance_painter.project.MeshReloadingSettings( + import_cameras=True, + preserve_strokes=True + ) + + def on_mesh_reload(status: substance_painter.project.ReloadMeshStatus): + if status == substance_painter.project.ReloadMeshStatus.SUCCESS: + print("Reload succeeded") + else: + raise RuntimeError("Reload of mesh failed") + + substance_painter.project.reload_mesh(path, settings, on_mesh_reload) + + def remove(self, container): + + # Remove OpenPype related settings about what model was loaded + # or close the project? + pass From 3cb797b10a04726183ca740a5f10b593be45aea1 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Fri, 6 Jan 2023 04:05:13 +0100 Subject: [PATCH 003/187] Add some fixes to stylesheet to avoid very odd looking OpenPype UIs in Substance Painter --- openpype/style/style.css | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/openpype/style/style.css b/openpype/style/style.css index a7a48cdb9d..ae1b9d2991 100644 --- a/openpype/style/style.css +++ b/openpype/style/style.css @@ -127,6 +127,7 @@ QPushButton { border-radius: 0.2em; padding: 3px 5px 3px 5px; background: {color:bg-buttons}; + min-width: 0px; /* Substance Painter fix */ } QPushButton:hover { @@ -328,7 +329,15 @@ QTabWidget::tab-bar { alignment: left; } +/* avoid QTabBar overrides in Substance Painter */ +QTabBar { + text-transform: none; + font-weight: normal; +} + QTabBar::tab { + text-transform: none; + font-weight: normal; border-top: 1px solid {color:border}; border-left: 1px solid {color:border}; border-right: 1px solid {color:border}; @@ -368,6 +377,7 @@ QHeaderView { QHeaderView::section { background: {color:bg-view-header}; padding: 4px; + border-top: 0px; /* Substance Painter fix */ border-right: 1px solid {color:bg-view}; border-radius: 0px; text-align: center; From e710a8dc70496e042e000da50c5ad2181376c84a Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Fri, 6 Jan 2023 05:03:19 +0100 Subject: [PATCH 004/187] Fix bug if file wasn't saved yet, file_path() would return None --- openpype/hosts/substancepainter/api/pipeline.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/substancepainter/api/pipeline.py b/openpype/hosts/substancepainter/api/pipeline.py index 3fd081ca1c..31c87f079d 100644 --- a/openpype/hosts/substancepainter/api/pipeline.py +++ b/openpype/hosts/substancepainter/api/pipeline.py @@ -112,7 +112,7 @@ class SubstanceHost(HostBase, IWorkfileHost, ILoadHost, IPublishHost): return None filepath = substance_painter.project.file_path() - if filepath.endswith(".spt"): + if filepath and filepath.endswith(".spt"): # When currently in a Substance Painter template assume our # scene isn't saved. This can be the case directly after doing # "New project", the path will then be the template used. This From 8468dbce679cc5dfee58e99e4015bb812f47080d Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Fri, 6 Jan 2023 05:04:53 +0100 Subject: [PATCH 005/187] Implement managing for Load Mesh (draft implementation) --- .../hosts/substancepainter/api/pipeline.py | 47 +++++++++++- .../plugins/load/load_mesh.py | 71 ++++++++++++++----- 2 files changed, 97 insertions(+), 21 deletions(-) diff --git a/openpype/hosts/substancepainter/api/pipeline.py b/openpype/hosts/substancepainter/api/pipeline.py index 31c87f079d..4d49fa83d7 100644 --- a/openpype/hosts/substancepainter/api/pipeline.py +++ b/openpype/hosts/substancepainter/api/pipeline.py @@ -123,7 +123,16 @@ class SubstanceHost(HostBase, IWorkfileHost, ILoadHost, IPublishHost): return filepath def get_containers(self): - return [] + + if not substance_painter.project.is_open(): + return + + metadata = substance_painter.project.Metadata("OpenPype") + containers = metadata.get("containers") + if containers: + for key, container in containers.items(): + container["objectName"] = key + yield container @staticmethod def create_context_node(): @@ -231,4 +240,38 @@ def on_open(): dialog.setMessage("There are outdated containers in " "your Substance scene.") dialog.on_clicked.connect(_on_show_inventory) - dialog.show() \ No newline at end of file + dialog.show() + + +def imprint_container(container, + name, + namespace, + context, + loader): + """Imprint a loaded container with metadata. + + Containerisation enables a tracking of version, author and origin + for loaded assets. + + Arguments: + container (dict): The (substance metadata) dictionary to imprint into. + name (str): Name of resulting assembly + namespace (str): Namespace under which to host container + context (dict): Asset information + loader (load.LoaderPlugin): loader instance used to produce container. + + Returns: + None + + """ + + data = [ + ("schema", "openpype:container-2.0"), + ("id", AVALON_CONTAINER_ID), + ("name", str(name)), + ("namespace", str(namespace) if namespace else None), + ("loader", str(loader.__class__.__name__)), + ("representation", str(context["representation"]["_id"])), + ] + for key, value in data: + container[key] = value diff --git a/openpype/hosts/substancepainter/plugins/load/load_mesh.py b/openpype/hosts/substancepainter/plugins/load/load_mesh.py index 7cc5e35912..519ed3ad4e 100644 --- a/openpype/hosts/substancepainter/plugins/load/load_mesh.py +++ b/openpype/hosts/substancepainter/plugins/load/load_mesh.py @@ -2,12 +2,27 @@ from openpype.pipeline import ( load, get_representation_path, ) -from openpype.pipeline import legacy_io +from openpype.hosts.substancepainter.api.pipeline import imprint_container import substance_painter.project import qargparse +def set_container(key, container): + metadata = substance_painter.project.Metadata("OpenPype") + containers = metadata.get("containers") or {} + containers[key] = container + metadata.set("containers", containers) + + +def remove_container(key): + metadata = substance_painter.project.Metadata("OpenPype") + containers = metadata.get("containers") + if containers: + containers.pop(key, None) + metadata.set("containers", containers) + + class SubstanceLoadProjectMesh(load.LoaderPlugin): """Load mesh for project""" @@ -33,6 +48,8 @@ class SubstanceLoadProjectMesh(load.LoaderPlugin): ) ] + container_key = "ProjectMesh" + def load(self, context, name, namespace, data): if not substance_painter.project.is_open(): @@ -49,25 +66,34 @@ class SubstanceLoadProjectMesh(load.LoaderPlugin): mesh_file_path=self.fname, settings=settings ) - return - # Reload the mesh - settings = substance_painter.project.MeshReloadingSettings( - import_cameras=data.get("import_cameras", True), - preserve_strokes=data.get("preserve_strokes", True) - ) + else: + # Reload the mesh + settings = substance_painter.project.MeshReloadingSettings( + import_cameras=data.get("import_cameras", True), + preserve_strokes=data.get("preserve_strokes", True) + ) - def on_mesh_reload(status: substance_painter.project.ReloadMeshStatus): - if status == substance_painter.project.ReloadMeshStatus.SUCCESS: - print("Reload succeeded") - else: - raise RuntimeError("Reload of mesh failed") + def on_mesh_reload(status: substance_painter.project.ReloadMeshStatus): # noqa + if status == substance_painter.project.ReloadMeshStatus.SUCCESS: # noqa + print("Reload succeeded") + else: + raise RuntimeError("Reload of mesh failed") - path = self.fname - substance_painter.project.reload_mesh(path, settings, on_mesh_reload) + path = self.fname + substance_painter.project.reload_mesh(path, + settings, + on_mesh_reload) - # TODO: Register with the project so host.get_containers() can return - # the loaded content in manager + # Store container + container = {} + imprint_container(container, + name=self.container_key, + namespace=self.container_key, + context=context, + loader=self) + container["options"] = data + set_container(self.container_key, container) def switch(self, container, representation): self.update(container, representation) @@ -78,9 +104,10 @@ class SubstanceLoadProjectMesh(load.LoaderPlugin): # Reload the mesh # TODO: Re-use settings from first load? + container_options = container.get("options", {}) settings = substance_painter.project.MeshReloadingSettings( - import_cameras=True, - preserve_strokes=True + import_cameras=container_options.get("import_cameras", True), + preserve_strokes=container_options.get("preserve_strokes", True) ) def on_mesh_reload(status: substance_painter.project.ReloadMeshStatus): @@ -91,8 +118,14 @@ class SubstanceLoadProjectMesh(load.LoaderPlugin): substance_painter.project.reload_mesh(path, settings, on_mesh_reload) + # Update container representation + container["representation"] = str(representation["_id"]) + set_container(self.container_key, container) + def remove(self, container): # Remove OpenPype related settings about what model was loaded # or close the project? - pass + # TODO: This is likely best 'hidden' away to the user because + # this will leave the project's mesh unmanaged. + remove_container(self.container_key) From 30764456afa4f92053b61d6a3e39576874c235a0 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Fri, 6 Jan 2023 05:22:59 +0100 Subject: [PATCH 006/187] Add launch with last workfile support for Substance Painter --- openpype/hooks/pre_add_last_workfile_arg.py | 1 + 1 file changed, 1 insertion(+) diff --git a/openpype/hooks/pre_add_last_workfile_arg.py b/openpype/hooks/pre_add_last_workfile_arg.py index 3609620917..d5a9a41e5a 100644 --- a/openpype/hooks/pre_add_last_workfile_arg.py +++ b/openpype/hooks/pre_add_last_workfile_arg.py @@ -23,6 +23,7 @@ class AddLastWorkfileToLaunchArgs(PreLaunchHook): "blender", "photoshop", "tvpaint", + "substance", "aftereffects" ] From bcac4d1fafde2a3a2b7ce6f426d603d586b4df05 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Fri, 6 Jan 2023 12:17:49 +0100 Subject: [PATCH 007/187] Add draft for workfile Creator --- .../plugins/create/create_workfile.py | 97 +++++++++++++++++++ 1 file changed, 97 insertions(+) create mode 100644 openpype/hosts/substancepainter/plugins/create/create_workfile.py diff --git a/openpype/hosts/substancepainter/plugins/create/create_workfile.py b/openpype/hosts/substancepainter/plugins/create/create_workfile.py new file mode 100644 index 0000000000..cec760040b --- /dev/null +++ b/openpype/hosts/substancepainter/plugins/create/create_workfile.py @@ -0,0 +1,97 @@ +# -*- coding: utf-8 -*- +"""Creator plugin for creating workfiles.""" + +from openpype.pipeline import CreatedInstance, AutoCreator +from openpype.pipeline import legacy_io +from openpype.client import get_asset_by_name + +import substance_painter.project + + +def set_workfile_data(data, update=False): + if update: + data = get_workfile_data().update(data) + metadata = substance_painter.project.Metadata("OpenPype") + metadata.set("workfile", data) + + +def get_workfile_data(): + metadata = substance_painter.project.Metadata("OpenPype") + return metadata.get("workfile") or {} + + +class CreateWorkfile(AutoCreator): + """Workfile auto-creator.""" + identifier = "io.openpype.creators.substancepainter.workfile" + label = "Workfile" + family = "workfile" + icon = "document" + + default_variant = "Main" + + def create(self): + + variant = self.default_variant + project_name = self.project_name + asset_name = legacy_io.Session["AVALON_ASSET"] + task_name = legacy_io.Session["AVALON_TASK"] + host_name = legacy_io.Session["AVALON_APP"] + + # Workfile instance should always exist and must only exist once. + # As such we'll first check if it already exists and is collected. + current_instance = next( + ( + instance for instance in self.create_context.instances + if instance.creator_identifier == self.identifier + ), None) + + if current_instance is None: + self.log.info("Auto-creating workfile instance...") + asset_doc = get_asset_by_name(project_name, asset_name) + subset_name = self.get_subset_name( + variant, task_name, asset_doc, project_name, host_name + ) + data = { + "asset": asset_name, + "task": task_name, + "variant": variant + } + current_instance = self.create_instance_in_context(subset_name, + data) + elif ( + current_instance["asset"] != asset_name + or current_instance["task"] != task_name + ): + # Update instance context if is not the same + asset_doc = get_asset_by_name(project_name, asset_name) + subset_name = self.get_subset_name( + variant, task_name, asset_doc, project_name, host_name + ) + current_instance["asset"] = asset_name + current_instance["task"] = task_name + current_instance["subset"] = subset_name + + set_workfile_data(current_instance.data_to_store()) + + def collect_instances(self): + workfile = get_workfile_data() + if not workfile: + return + self.create_instance_in_context_from_existing(workfile) + + def update_instances(self, update_list): + for instance, _changes in update_list: + set_workfile_data(instance.data_to_store(), update=True) + + # Helper methods (this might get moved into Creator class) + def create_instance_in_context(self, subset_name, data): + instance = CreatedInstance( + self.family, subset_name, data, self + ) + self.create_context.creator_adds_instance(instance) + return instance + + def create_instance_in_context_from_existing(self, data): + instance = CreatedInstance.from_existing(data, self) + self.create_context.creator_adds_instance(instance) + return instance From 1c4ff746adaee6e2ac34f765d57f64bda967765e Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Fri, 6 Jan 2023 16:10:26 +0100 Subject: [PATCH 008/187] Remove 'fix' which didn't originally fix the UI issue - it was a styleSheet issue --- openpype/hosts/substancepainter/addon.py | 3 --- 1 file changed, 3 deletions(-) diff --git a/openpype/hosts/substancepainter/addon.py b/openpype/hosts/substancepainter/addon.py index bb55f20189..6288ef1559 100644 --- a/openpype/hosts/substancepainter/addon.py +++ b/openpype/hosts/substancepainter/addon.py @@ -20,9 +20,6 @@ class SubstanceAddon(OpenPypeModule, IHostAddon): env["SUBSTANCE_PAINTER_PLUGINS_PATH"] = plugin_path - # Fix UI scale issue - env.pop("QT_AUTO_SCREEN_SCALE_FACTOR", None) - def get_launch_hook_paths(self, app): if app.host_name != self.host_name: return [] From 82639e8634587b7f63c703903c947c13f5e6f327 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Sat, 7 Jan 2023 16:18:07 +0100 Subject: [PATCH 009/187] Avoid trying to import blessed terminal coloring in Substance Painter --- openpype/hosts/substancepainter/addon.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/openpype/hosts/substancepainter/addon.py b/openpype/hosts/substancepainter/addon.py index 6288ef1559..2fbea139c5 100644 --- a/openpype/hosts/substancepainter/addon.py +++ b/openpype/hosts/substancepainter/addon.py @@ -20,6 +20,9 @@ class SubstanceAddon(OpenPypeModule, IHostAddon): env["SUBSTANCE_PAINTER_PLUGINS_PATH"] = plugin_path + # Log in Substance Painter doesn't support custom terminal colors + env["OPENPYPE_LOG_NO_COLORS"] = "Yes" + def get_launch_hook_paths(self, app): if app.host_name != self.host_name: return [] From c101f6a2cbce65bdf97d8ccc7812f85895f38bdc Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Sat, 7 Jan 2023 16:19:47 +0100 Subject: [PATCH 010/187] Cleanup OpenPype Qt widgets on Substance Painter shutdown --- .../deploy/plugins/openpype_plugin.py | 23 ++++++++++++++++++- 1 file changed, 22 insertions(+), 1 deletion(-) diff --git a/openpype/hosts/substancepainter/deploy/plugins/openpype_plugin.py b/openpype/hosts/substancepainter/deploy/plugins/openpype_plugin.py index 01779156f1..e7e1849546 100644 --- a/openpype/hosts/substancepainter/deploy/plugins/openpype_plugin.py +++ b/openpype/hosts/substancepainter/deploy/plugins/openpype_plugin.py @@ -1,13 +1,34 @@ + +def cleanup_openpype_qt_widgets(): + """ + Workaround for Substance failing to shut down correctly + when a Qt window was still open at the time of shutting down. + + This seems to work sometimes, but not all the time. + + """ + # TODO: Create a more reliable method to close down all OpenPype Qt widgets + from PySide2 import QtWidgets + import substance_painter.ui + + # Kill OpenPype Qt widgets + print("Killing OpenPype Qt widgets..") + for widget in QtWidgets.QApplication.topLevelWidgets(): + if widget.__module__.startswith("openpype."): + print(f"Deleting widget: {widget.__class__.__name__}") + substance_painter.ui.delete_ui_element(widget) + + def start_plugin(): from openpype.pipeline import install_host from openpype.hosts.substancepainter.api import SubstanceHost - install_host(SubstanceHost()) def close_plugin(): from openpype.pipeline import uninstall_host + cleanup_openpype_qt_widgets() uninstall_host() From ccb4371641b79275702bc5557fefdf3c8d39c0a6 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Sat, 7 Jan 2023 17:42:43 +0100 Subject: [PATCH 011/187] Refactor metadata code to allow more structure for future Substance Painter plugins --- .../hosts/substancepainter/api/pipeline.py | 54 ++++++++++++++++- .../plugins/create/create_workfile.py | 27 ++++----- .../plugins/load/load_mesh.py | 58 +++++++++---------- 3 files changed, 91 insertions(+), 48 deletions(-) diff --git a/openpype/hosts/substancepainter/api/pipeline.py b/openpype/hosts/substancepainter/api/pipeline.py index 4d49fa83d7..e7dbe5e5eb 100644 --- a/openpype/hosts/substancepainter/api/pipeline.py +++ b/openpype/hosts/substancepainter/api/pipeline.py @@ -36,6 +36,10 @@ LOAD_PATH = os.path.join(PLUGINS_DIR, "load") CREATE_PATH = os.path.join(PLUGINS_DIR, "create") INVENTORY_PATH = os.path.join(PLUGINS_DIR, "inventory") + +OPENPYPE_METADATA_KEY = "OpenPype" +OPENPYPE_METADATA_CONTAINERS_KEY = "containers" # child key + self = sys.modules[__name__] self.menu = None self.callbacks = [] @@ -127,8 +131,8 @@ class SubstanceHost(HostBase, IWorkfileHost, ILoadHost, IPublishHost): if not substance_painter.project.is_open(): return - metadata = substance_painter.project.Metadata("OpenPype") - containers = metadata.get("containers") + metadata = substance_painter.project.Metadata(OPENPYPE_METADATA_KEY) + containers = metadata.get(OPENPYPE_METADATA_CONTAINERS_KEY) if containers: for key, container in containers.items(): container["objectName"] = key @@ -275,3 +279,49 @@ def imprint_container(container, ] for key, value in data: container[key] = value + + +def set_project_metadata(key, data): + """Set a key in project's OpenPype metadata.""" + metadata = substance_painter.project.Metadata(OPENPYPE_METADATA_KEY) + metadata.set(key, data) + + +def get_project_metadata(key): + """Get a key from project's OpenPype metadata.""" + metadata = substance_painter.project.Metadata(OPENPYPE_METADATA_KEY) + return metadata.get(key) + + +def set_container_metadata(object_name, container_data, update=False): + """Helper method to directly set the data for a specific container + + Args: + object_name (str): The unique object name identifier for the container + container_data (dict): The data for the container. + Note 'objectName' data is derived from `object_name` and key in + `container_data` will be ignored. + update (bool): Whether to only update the dict data. + + """ + # The objectName is derived from the key in the metadata so won't be stored + # in the metadata in the container's data. + container_data.pop("objectName", None) + + metadata = substance_painter.project.Metadata(OPENPYPE_METADATA_KEY) + containers = metadata.get(OPENPYPE_METADATA_CONTAINERS_KEY) or {} + if update: + existing_data = containers.setdefault(object_name, {}) + existing_data.update(container_data) # mutable dict, in-place update + else: + containers[object_name] = container_data + metadata.set("containers", containers) + + +def remove_container_metadata(object_name): + """Helper method to remove the data for a specific container""" + metadata = substance_painter.project.Metadata(OPENPYPE_METADATA_KEY) + containers = metadata.get(OPENPYPE_METADATA_CONTAINERS_KEY) + if containers: + containers.pop(object_name, None) + metadata.set("containers", containers) diff --git a/openpype/hosts/substancepainter/plugins/create/create_workfile.py b/openpype/hosts/substancepainter/plugins/create/create_workfile.py index cec760040b..8b010ebe2c 100644 --- a/openpype/hosts/substancepainter/plugins/create/create_workfile.py +++ b/openpype/hosts/substancepainter/plugins/create/create_workfile.py @@ -5,20 +5,10 @@ from openpype.pipeline import CreatedInstance, AutoCreator from openpype.pipeline import legacy_io from openpype.client import get_asset_by_name -import substance_painter.project - - -def set_workfile_data(data, update=False): - if update: - data = get_workfile_data().update(data) - metadata = substance_painter.project.Metadata("OpenPype") - metadata.set("workfile", data) - - -def get_workfile_data(): - metadata = substance_painter.project.Metadata("OpenPype") - return metadata.get("workfile") or {} - +from openpype.hosts.substancepainter.api.pipeline import ( + set_project_metadata, + get_project_metadata +) class CreateWorkfile(AutoCreator): """Workfile auto-creator.""" @@ -71,17 +61,20 @@ class CreateWorkfile(AutoCreator): current_instance["task"] = task_name current_instance["subset"] = subset_name - set_workfile_data(current_instance.data_to_store()) + set_project_metadata("workfile", current_instance.data_to_store()) def collect_instances(self): - workfile = get_workfile_data() + workfile = get_project_metadata("workfile") if not workfile: return self.create_instance_in_context_from_existing(workfile) def update_instances(self, update_list): for instance, _changes in update_list: - set_workfile_data(instance.data_to_store(), update=True) + # Update project's workfile metadata + data = get_project_metadata("workfile") or {} + data.update(instance.data_to_store()) + set_project_metadata("workfile", data) # Helper methods (this might get moved into Creator class) def create_instance_in_context(self, subset_name, data): diff --git a/openpype/hosts/substancepainter/plugins/load/load_mesh.py b/openpype/hosts/substancepainter/plugins/load/load_mesh.py index 519ed3ad4e..3e62b90988 100644 --- a/openpype/hosts/substancepainter/plugins/load/load_mesh.py +++ b/openpype/hosts/substancepainter/plugins/load/load_mesh.py @@ -2,27 +2,16 @@ from openpype.pipeline import ( load, get_representation_path, ) -from openpype.hosts.substancepainter.api.pipeline import imprint_container +from openpype.hosts.substancepainter.api.pipeline import ( + imprint_container, + set_container_metadata, + remove_container_metadata +) import substance_painter.project import qargparse -def set_container(key, container): - metadata = substance_painter.project.Metadata("OpenPype") - containers = metadata.get("containers") or {} - containers[key] = container - metadata.set("containers", containers) - - -def remove_container(key): - metadata = substance_painter.project.Metadata("OpenPype") - containers = metadata.get("containers") - if containers: - containers.pop(key, None) - metadata.set("containers", containers) - - class SubstanceLoadProjectMesh(load.LoaderPlugin): """Load mesh for project""" @@ -48,10 +37,12 @@ class SubstanceLoadProjectMesh(load.LoaderPlugin): ) ] - container_key = "ProjectMesh" - def load(self, context, name, namespace, data): + # Get user inputs + import_cameras = data.get("import_cameras", True) + preserve_strokes = data.get("preserve_strokes", True) + if not substance_painter.project.is_open(): # Allow to 'initialize' a new project # TODO: preferably these settings would come from the actual @@ -59,7 +50,7 @@ class SubstanceLoadProjectMesh(load.LoaderPlugin): # visually similar to still allow artist decisions) settings = substance_painter.project.Settings( default_texture_resolution=4096, - import_cameras=data.get("import_cameras", True), + import_cameras=import_cameras, ) substance_painter.project.create( @@ -70,8 +61,8 @@ class SubstanceLoadProjectMesh(load.LoaderPlugin): else: # Reload the mesh settings = substance_painter.project.MeshReloadingSettings( - import_cameras=data.get("import_cameras", True), - preserve_strokes=data.get("preserve_strokes", True) + import_cameras=import_cameras, + preserve_strokes=preserve_strokes ) def on_mesh_reload(status: substance_painter.project.ReloadMeshStatus): # noqa @@ -87,13 +78,21 @@ class SubstanceLoadProjectMesh(load.LoaderPlugin): # Store container container = {} + project_mesh_object_name = "_ProjectMesh_" imprint_container(container, - name=self.container_key, - namespace=self.container_key, + name=project_mesh_object_name, + namespace=project_mesh_object_name, context=context, loader=self) - container["options"] = data - set_container(self.container_key, container) + + # We want store some options for updating to keep consistent behavior + # from the user's original choice. We don't store 'preserve_strokes' + # as we always preserve strokes on updates. + container["options"] = { + "import_cameras": import_cameras, + } + + set_container_metadata(project_mesh_object_name, container) def switch(self, container, representation): self.update(container, representation) @@ -107,7 +106,7 @@ class SubstanceLoadProjectMesh(load.LoaderPlugin): container_options = container.get("options", {}) settings = substance_painter.project.MeshReloadingSettings( import_cameras=container_options.get("import_cameras", True), - preserve_strokes=container_options.get("preserve_strokes", True) + preserve_strokes=True ) def on_mesh_reload(status: substance_painter.project.ReloadMeshStatus): @@ -119,8 +118,9 @@ class SubstanceLoadProjectMesh(load.LoaderPlugin): substance_painter.project.reload_mesh(path, settings, on_mesh_reload) # Update container representation - container["representation"] = str(representation["_id"]) - set_container(self.container_key, container) + object_name = container["objectName"] + update_data = {"representation": str(representation["_id"])} + set_container_metadata(object_name, update_data, update=True) def remove(self, container): @@ -128,4 +128,4 @@ class SubstanceLoadProjectMesh(load.LoaderPlugin): # or close the project? # TODO: This is likely best 'hidden' away to the user because # this will leave the project's mesh unmanaged. - remove_container(self.container_key) + remove_container_metadata(container["objectName"]) From cf92213dd1fde6efb5ab117a1d4e4b7a96b188d5 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Sat, 7 Jan 2023 17:42:55 +0100 Subject: [PATCH 012/187] Cosmetics --- .../hosts/substancepainter/plugins/create/create_workfile.py | 1 + 1 file changed, 1 insertion(+) diff --git a/openpype/hosts/substancepainter/plugins/create/create_workfile.py b/openpype/hosts/substancepainter/plugins/create/create_workfile.py index 8b010ebe2c..4b34f4cc8c 100644 --- a/openpype/hosts/substancepainter/plugins/create/create_workfile.py +++ b/openpype/hosts/substancepainter/plugins/create/create_workfile.py @@ -10,6 +10,7 @@ from openpype.hosts.substancepainter.api.pipeline import ( get_project_metadata ) + class CreateWorkfile(AutoCreator): """Workfile auto-creator.""" identifier = "io.openpype.creators.substancepainter.workfile" From c34f8fed24a7c84ce22a615b5f438798b2f461c4 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Mon, 9 Jan 2023 10:29:44 +0100 Subject: [PATCH 013/187] Bypass silently if a project was not open when querying metadata --- openpype/hosts/substancepainter/api/pipeline.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/openpype/hosts/substancepainter/api/pipeline.py b/openpype/hosts/substancepainter/api/pipeline.py index e7dbe5e5eb..70353039f5 100644 --- a/openpype/hosts/substancepainter/api/pipeline.py +++ b/openpype/hosts/substancepainter/api/pipeline.py @@ -289,6 +289,9 @@ def set_project_metadata(key, data): def get_project_metadata(key): """Get a key from project's OpenPype metadata.""" + if not substance_painter.project.is_open(): + return + metadata = substance_painter.project.Metadata(OPENPYPE_METADATA_KEY) return metadata.get(key) From 2c544246fd855de080387e1f86a053e5fd31e12f Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Mon, 9 Jan 2023 10:30:18 +0100 Subject: [PATCH 014/187] Do not auto create workfile instance if project isn't open. --- .../hosts/substancepainter/plugins/create/create_workfile.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/openpype/hosts/substancepainter/plugins/create/create_workfile.py b/openpype/hosts/substancepainter/plugins/create/create_workfile.py index 4b34f4cc8c..22e12b4079 100644 --- a/openpype/hosts/substancepainter/plugins/create/create_workfile.py +++ b/openpype/hosts/substancepainter/plugins/create/create_workfile.py @@ -10,6 +10,8 @@ from openpype.hosts.substancepainter.api.pipeline import ( get_project_metadata ) +import substance_painter.project + class CreateWorkfile(AutoCreator): """Workfile auto-creator.""" @@ -22,6 +24,9 @@ class CreateWorkfile(AutoCreator): def create(self): + if not substance_painter.project.is_open(): + return + variant = self.default_variant project_name = self.project_name asset_name = legacy_io.Session["AVALON_ASSET"] From ec2f10caf383a769fd90a3777ee47568054b6d41 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Mon, 9 Jan 2023 10:30:32 +0100 Subject: [PATCH 015/187] Simplify logic --- .../hosts/substancepainter/plugins/create/create_workfile.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/openpype/hosts/substancepainter/plugins/create/create_workfile.py b/openpype/hosts/substancepainter/plugins/create/create_workfile.py index 22e12b4079..729cc8f718 100644 --- a/openpype/hosts/substancepainter/plugins/create/create_workfile.py +++ b/openpype/hosts/substancepainter/plugins/create/create_workfile.py @@ -71,9 +71,8 @@ class CreateWorkfile(AutoCreator): def collect_instances(self): workfile = get_project_metadata("workfile") - if not workfile: - return - self.create_instance_in_context_from_existing(workfile) + if workfile: + self.create_instance_in_context_from_existing(workfile) def update_instances(self, update_list): for instance, _changes in update_list: From c3fca896d48f82026aea0f81055a996c366ea920 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Mon, 9 Jan 2023 11:16:23 +0100 Subject: [PATCH 016/187] Implement plug-ins to support workfile publishing --- .../plugins/publish/collect_current_file.py | 17 ++++++++++++ .../collect_workfile_representation.py | 26 +++++++++++++++++++ .../plugins/publish/increment_workfile.py | 23 ++++++++++++++++ .../plugins/publish/save_workfile.py | 23 ++++++++++++++++ 4 files changed, 89 insertions(+) create mode 100644 openpype/hosts/substancepainter/plugins/publish/collect_current_file.py create mode 100644 openpype/hosts/substancepainter/plugins/publish/collect_workfile_representation.py create mode 100644 openpype/hosts/substancepainter/plugins/publish/increment_workfile.py create mode 100644 openpype/hosts/substancepainter/plugins/publish/save_workfile.py diff --git a/openpype/hosts/substancepainter/plugins/publish/collect_current_file.py b/openpype/hosts/substancepainter/plugins/publish/collect_current_file.py new file mode 100644 index 0000000000..dac493bbf1 --- /dev/null +++ b/openpype/hosts/substancepainter/plugins/publish/collect_current_file.py @@ -0,0 +1,17 @@ +import pyblish.api + +from openpype.pipeline import registered_host + + +class CollectCurrentFile(pyblish.api.ContextPlugin): + """Inject the current working file into context""" + + order = pyblish.api.CollectorOrder - 0.49 + label = "Current Workfile" + hosts = ["substancepainter"] + + def process(self, context): + host = registered_host() + path = host.get_current_workfile() + context.data["currentFile"] = path + self.log.debug(f"Current workfile: {path}") \ No newline at end of file diff --git a/openpype/hosts/substancepainter/plugins/publish/collect_workfile_representation.py b/openpype/hosts/substancepainter/plugins/publish/collect_workfile_representation.py new file mode 100644 index 0000000000..563c2d4c07 --- /dev/null +++ b/openpype/hosts/substancepainter/plugins/publish/collect_workfile_representation.py @@ -0,0 +1,26 @@ +import os +import pyblish.api + + +class CollectWorkfileRepresentation(pyblish.api.InstancePlugin): + """Create a publish representation for the current workfile instance.""" + + order = pyblish.api.CollectorOrder + label = "Workfile representation" + hosts = ['substancepainter'] + families = ["workfile"] + + def process(self, instance): + + context = instance.context + current_file = context.data["currentFile"] + + folder, file = os.path.split(current_file) + filename, ext = os.path.splitext(file) + + instance.data['representations'] = [{ + 'name': ext.lstrip("."), + 'ext': ext.lstrip("."), + 'files': file, + "stagingDir": folder, + }] diff --git a/openpype/hosts/substancepainter/plugins/publish/increment_workfile.py b/openpype/hosts/substancepainter/plugins/publish/increment_workfile.py new file mode 100644 index 0000000000..b45d66fbb1 --- /dev/null +++ b/openpype/hosts/substancepainter/plugins/publish/increment_workfile.py @@ -0,0 +1,23 @@ +import pyblish.api + +from openpype.lib import version_up +from openpype.pipeline import registered_host + + +class IncrementWorkfileVersion(pyblish.api.ContextPlugin): + """Increment current workfile version.""" + + order = pyblish.api.IntegratorOrder + 1 + label = "Increment Workfile Version" + optional = True + hosts = ["substancepainter"] + + def process(self, context): + + assert all(result["success"] for result in context.data["results"]), ( + "Publishing not successful so version is not increased.") + + host = registered_host() + path = context.data["currentFile"] + self.log.info(f"Incrementing current workfile to: {path}") + host.save_workfile(version_up(path)) diff --git a/openpype/hosts/substancepainter/plugins/publish/save_workfile.py b/openpype/hosts/substancepainter/plugins/publish/save_workfile.py new file mode 100644 index 0000000000..5e86785e0d --- /dev/null +++ b/openpype/hosts/substancepainter/plugins/publish/save_workfile.py @@ -0,0 +1,23 @@ +import pyblish.api + +from openpype.pipeline import registered_host + + +class SaveCurrentWorkfile(pyblish.api.ContextPlugin): + """Save current workfile""" + + label = "Save current workfile" + order = pyblish.api.ExtractorOrder - 0.49 + hosts = ["substancepainter"] + + def process(self, context): + + host = registered_host() + assert context.data['currentFile'] == host.get_current_workfile() + + if host.has_unsaved_changes(): + self.log.info("Saving current file..") + host.save_workfile() + else: + self.log.debug("Skipping workfile save because there are no " + "unsaved changes.") From 564e8f4d40febfb08b65fc31e10b710d38cbddc7 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Mon, 9 Jan 2023 11:17:25 +0100 Subject: [PATCH 017/187] Cosmetics --- .../substancepainter/plugins/publish/collect_current_file.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/substancepainter/plugins/publish/collect_current_file.py b/openpype/hosts/substancepainter/plugins/publish/collect_current_file.py index dac493bbf1..9a37eb0d1c 100644 --- a/openpype/hosts/substancepainter/plugins/publish/collect_current_file.py +++ b/openpype/hosts/substancepainter/plugins/publish/collect_current_file.py @@ -14,4 +14,4 @@ class CollectCurrentFile(pyblish.api.ContextPlugin): host = registered_host() path = host.get_current_workfile() context.data["currentFile"] = path - self.log.debug(f"Current workfile: {path}") \ No newline at end of file + self.log.debug(f"Current workfile: {path}") From f9d3c9f77227fef2ddcf43649e69d0fb88d4e2bd Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Mon, 9 Jan 2023 18:13:49 +0100 Subject: [PATCH 018/187] Early prototype for Texture publishing in Substance Painter (WIP - not functional; doesn't integrate yet) --- .../plugins/create/create_textures.py | 149 ++++++++++++++++++ .../plugins/publish/extract_textures.py | 71 +++++++++ 2 files changed, 220 insertions(+) create mode 100644 openpype/hosts/substancepainter/plugins/create/create_textures.py create mode 100644 openpype/hosts/substancepainter/plugins/publish/extract_textures.py diff --git a/openpype/hosts/substancepainter/plugins/create/create_textures.py b/openpype/hosts/substancepainter/plugins/create/create_textures.py new file mode 100644 index 0000000000..af2e23b3bf --- /dev/null +++ b/openpype/hosts/substancepainter/plugins/create/create_textures.py @@ -0,0 +1,149 @@ +# -*- coding: utf-8 -*- +"""Creator plugin for creating textures.""" +import os + +from openpype.pipeline import CreatedInstance, Creator + +from openpype.hosts.substancepainter.api.pipeline import ( + set_project_metadata, + get_project_metadata +) + +from openpype.lib import ( + EnumDef, + UILabelDef, + NumberDef +) + +import substance_painter.project +import substance_painter.resource + + +def get_export_presets(): + import substance_painter.resource + + preset_resources = {} + + # TODO: Find more optimal way to find all export templates + for shelf in substance_painter.resource.Shelves.all(): + shelf_path = os.path.normpath(shelf.path()) + + presets_path = os.path.join(shelf_path, "export-presets") + if not os.path.exists(presets_path): + continue + + for fname in os.listdir(presets_path): + if fname.endswith(".spexp"): + template_name = os.path.splitext(fname)[0] + + resource = substance_painter.resource.ResourceID( + context=shelf.name(), + name=template_name + ) + resource_url = resource.url() + + preset_resources[resource_url] = template_name + + # Sort by template name + export_templates = dict(sorted(preset_resources.items(), + key=lambda x: x[1])) + + return export_templates + + +class CreateTextures(Creator): + """Create a texture set.""" + identifier = "io.openpype.creators.substancepainter.textures" + label = "Textures" + family = "textures" + icon = "picture-o" + + default_variant = "Main" + + def create(self, subset_name, instance_data, pre_create_data): + + if not substance_painter.project.is_open(): + return + + instance = self.create_instance_in_context(subset_name, instance_data) + set_project_metadata("textures", instance.data_to_store()) + + def collect_instances(self): + workfile = get_project_metadata("textures") + if workfile: + self.create_instance_in_context_from_existing(workfile) + + def update_instances(self, update_list): + for instance, _changes in update_list: + # Update project's metadata + data = get_project_metadata("textures") or {} + data.update(instance.data_to_store()) + set_project_metadata("textures", data) + + def remove_instances(self, instances): + for instance in instances: + # TODO: Implement removal + # api.remove_instance(instance) + self._remove_instance_from_context(instance) + + # Helper methods (this might get moved into Creator class) + def create_instance_in_context(self, subset_name, data): + instance = CreatedInstance( + self.family, subset_name, data, self + ) + self.create_context.creator_adds_instance(instance) + return instance + + def create_instance_in_context_from_existing(self, data): + instance = CreatedInstance.from_existing(data, self) + self.create_context.creator_adds_instance(instance) + return instance + + def get_instance_attr_defs(self): + + return [ + EnumDef("exportPresetUrl", + items=get_export_presets(), + label="Output Template"), + EnumDef("exportFileFormat", + items={ + None: "Based on output template", + # TODO: implement extensions + }, + label="File type"), + EnumDef("exportSize", + items={ + None: "Based on each Texture Set's size", + # The key is size of the texture file in log2. + # (i.e. 10 means 2^10 = 1024) + 7: "128", + 8: "256", + 9: "512", + 10: "1024", + 11: "2048", + 12: "4096" + }, + label="Size"), + + EnumDef("exportPadding", + items={ + "passthrough": "No padding (passthrough)", + "infinite": "Dilation infinite", + "transparent": "Dilation + transparent", + "color": "Dilation + default background color", + "diffusion": "Dilation + diffusion" + }, + label="Padding"), + NumberDef("exportDilationDistance", + minimum=0, + maximum=256, + decimals=0, + default=16, + label="Dilation Distance"), + UILabelDef("Note: Dilation Distance is only used with " + "'Dilation + ' padding options"), + ] + + def get_pre_create_attr_defs(self): + # Use same attributes as for instance attributes + return self.get_instance_attr_defs() diff --git a/openpype/hosts/substancepainter/plugins/publish/extract_textures.py b/openpype/hosts/substancepainter/plugins/publish/extract_textures.py new file mode 100644 index 0000000000..93e0c8cb31 --- /dev/null +++ b/openpype/hosts/substancepainter/plugins/publish/extract_textures.py @@ -0,0 +1,71 @@ +from openpype.pipeline import KnownPublishError, publish + +import substance_painter.export + + +class ExtractTextures(publish.Extractor): + """Extract Textures using an output template config""" + + label = "Extract Texture Sets" + hosts = ['substancepainter'] + families = ["textures"] + + def process(self, instance): + + staging_dir = self.staging_dir(instance) + + # See: https://substance3d.adobe.com/documentation/ptpy/api/substance_painter/export # noqa + creator_attrs = instance.data["creator_attributes"] + config = { + "exportShaderParams": True, + "exportPath": staging_dir, + "defaultExportPreset": creator_attrs["exportPresetUrl"], + + # Custom overrides to the exporter + "exportParameters": [ + { + "parameters": { + "fileFormat": creator_attrs["exportFileFormat"], + "sizeLog2": creator_attrs["exportSize"], + "paddingAlgorithm": creator_attrs["exportPadding"], + "dilationDistance": creator_attrs["exportDilationDistance"] # noqa + } + } + ] + } + + # Create the list of Texture Sets to export. + config["exportList"] = [] + for texture_set in substance_painter.textureset.all_texture_sets(): + # stack = texture_set.get_stack() + config["exportList"].append({"rootPath": texture_set.name()}) + + # Consider None values optionals + for override in config["exportParameters"]: + parameters = override.get("parameters") + for key, value in dict(parameters).items(): + if value is None: + parameters.pop(key) + + result = substance_painter.export.export_project_textures(config) + + if result.status != substance_painter.export.ExportStatus.Success: + raise KnownPublishError( + "Failed to export texture set: {}".format(result.message) + ) + + files = [] + for stack, maps in result.textures.items(): + for texture_map in maps: + self.log.info(f"Exported texture: {texture_map}") + files.append(texture_map) + + # TODO: add the representations so they integrate the way we'd want + """ + instance.data['representations'] = [{ + 'name': ext.lstrip("."), + 'ext': ext.lstrip("."), + 'files': file, + "stagingDir": folder, + }] + """ From 0741c9850861779974e95cf764c3a7d2f0b097cc Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Mon, 9 Jan 2023 18:15:06 +0100 Subject: [PATCH 019/187] Cosmetics --- .../hosts/substancepainter/plugins/publish/extract_textures.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/substancepainter/plugins/publish/extract_textures.py b/openpype/hosts/substancepainter/plugins/publish/extract_textures.py index 93e0c8cb31..d72d9920fd 100644 --- a/openpype/hosts/substancepainter/plugins/publish/extract_textures.py +++ b/openpype/hosts/substancepainter/plugins/publish/extract_textures.py @@ -55,7 +55,7 @@ class ExtractTextures(publish.Extractor): ) files = [] - for stack, maps in result.textures.items(): + for _stack, maps in result.textures.items(): for texture_map in maps: self.log.info(f"Exported texture: {texture_map}") files.append(texture_map) From 87f23c978d44d587e74adfb2d517da798dfecafe Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Tue, 10 Jan 2023 00:52:07 +0100 Subject: [PATCH 020/187] Add the built-in `export-preset-generator` template entries --- .../plugins/create/create_textures.py | 15 ++++++++++++++- 1 file changed, 14 insertions(+), 1 deletion(-) diff --git a/openpype/hosts/substancepainter/plugins/create/create_textures.py b/openpype/hosts/substancepainter/plugins/create/create_textures.py index af2e23b3bf..41de2ad946 100644 --- a/openpype/hosts/substancepainter/plugins/create/create_textures.py +++ b/openpype/hosts/substancepainter/plugins/create/create_textures.py @@ -48,7 +48,20 @@ def get_export_presets(): export_templates = dict(sorted(preset_resources.items(), key=lambda x: x[1])) - return export_templates + # Add default built-ins at the start + # TODO: find the built-ins automatically; scraped with https://gist.github.com/BigRoy/97150c7c6f0a0c916418207b9a2bc8f1 # noqa + result = { + "export-preset-generator://viewport2d": "2D View", # noqa + "export-preset-generator://doc-channel-normal-no-alpha": "Document channels + Normal + AO (No Alpha)", # noqa + "export-preset-generator://doc-channel-normal-with-alpha": "Document channels + Normal + AO (With Alpha)", # noqa + "export-preset-generator://sketchfab": "Sketchfab", # noqa + "export-preset-generator://adobe-standard-material": "Substance 3D Stager", # noqa + "export-preset-generator://usd": "USD PBR Metal Roughness", # noqa + "export-preset-generator://gltf": "glTF PBR Metal Roughness", # noqa + "export-preset-generator://gltf-displacement": "glTF PBR Metal Roughness + Displacement texture (experimental)" # noqa + } + result.update(export_templates) + return result class CreateTextures(Creator): From 9a4f5650199000658e93e189810cca7b1482e9ed Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Tue, 10 Jan 2023 01:21:08 +0100 Subject: [PATCH 021/187] Shorten label --- .../hosts/substancepainter/plugins/create/create_textures.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/hosts/substancepainter/plugins/create/create_textures.py b/openpype/hosts/substancepainter/plugins/create/create_textures.py index 41de2ad946..c1d907a974 100644 --- a/openpype/hosts/substancepainter/plugins/create/create_textures.py +++ b/openpype/hosts/substancepainter/plugins/create/create_textures.py @@ -153,8 +153,8 @@ class CreateTextures(Creator): decimals=0, default=16, label="Dilation Distance"), - UILabelDef("Note: Dilation Distance is only used with " - "'Dilation + ' padding options"), + UILabelDef("*only used with " + "'Dilation + ' padding"), ] def get_pre_create_attr_defs(self): From 139eafb5c7e951dcc08fa1c1a8e7e5bf2a4928d1 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Tue, 10 Jan 2023 01:21:31 +0100 Subject: [PATCH 022/187] Debug log used Substance Painter export preset --- .../substancepainter/plugins/publish/extract_textures.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/openpype/hosts/substancepainter/plugins/publish/extract_textures.py b/openpype/hosts/substancepainter/plugins/publish/extract_textures.py index d72d9920fd..8ebad3193f 100644 --- a/openpype/hosts/substancepainter/plugins/publish/extract_textures.py +++ b/openpype/hosts/substancepainter/plugins/publish/extract_textures.py @@ -14,12 +14,15 @@ class ExtractTextures(publish.Extractor): staging_dir = self.staging_dir(instance) - # See: https://substance3d.adobe.com/documentation/ptpy/api/substance_painter/export # noqa creator_attrs = instance.data["creator_attributes"] + preset_url = creator_attrs["exportPresetUrl"] + self.log.debug(f"Exporting using preset: {preset_url}") + + # See: https://substance3d.adobe.com/documentation/ptpy/api/substance_painter/export # noqa config = { "exportShaderParams": True, "exportPath": staging_dir, - "defaultExportPreset": creator_attrs["exportPresetUrl"], + "defaultExportPreset": preset_url, # Custom overrides to the exporter "exportParameters": [ From 391ba1ada24ffb275443a47f008b6afce2feba52 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Tue, 10 Jan 2023 11:21:55 +0100 Subject: [PATCH 023/187] Remove unusued imports --- openpype/hosts/substancepainter/api/pipeline.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/openpype/hosts/substancepainter/api/pipeline.py b/openpype/hosts/substancepainter/api/pipeline.py index 70353039f5..aae1f39a3e 100644 --- a/openpype/hosts/substancepainter/api/pipeline.py +++ b/openpype/hosts/substancepainter/api/pipeline.py @@ -8,9 +8,7 @@ from functools import partial # Substance 3D Painter modules import substance_painter.ui import substance_painter.event -import substance_painter.export import substance_painter.project -import substance_painter.textureset from openpype.host import HostBase, IWorkfileHost, ILoadHost, IPublishHost From c1abd00bba43cb98501efd649462c990414f720c Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Tue, 10 Jan 2023 16:33:17 +0100 Subject: [PATCH 024/187] Store menu and callbacks on the SubstanceHost instance --- .../hosts/substancepainter/api/pipeline.py | 120 +++++++++--------- 1 file changed, 57 insertions(+), 63 deletions(-) diff --git a/openpype/hosts/substancepainter/api/pipeline.py b/openpype/hosts/substancepainter/api/pipeline.py index aae1f39a3e..db4bb47401 100644 --- a/openpype/hosts/substancepainter/api/pipeline.py +++ b/openpype/hosts/substancepainter/api/pipeline.py @@ -34,14 +34,9 @@ LOAD_PATH = os.path.join(PLUGINS_DIR, "load") CREATE_PATH = os.path.join(PLUGINS_DIR, "create") INVENTORY_PATH = os.path.join(PLUGINS_DIR, "inventory") - OPENPYPE_METADATA_KEY = "OpenPype" OPENPYPE_METADATA_CONTAINERS_KEY = "containers" # child key -self = sys.modules[__name__] -self.menu = None -self.callbacks = [] - class SubstanceHost(HostBase, IWorkfileHost, ILoadHost, IPublishHost): name = "substancepainter" @@ -49,6 +44,8 @@ class SubstanceHost(HostBase, IWorkfileHost, ILoadHost, IPublishHost): def __init__(self): super(SubstanceHost, self).__init__() self._has_been_setup = False + self.menu = None + self.callbacks = [] def install(self): pyblish.api.register_host("substancepainter") @@ -59,20 +56,20 @@ class SubstanceHost(HostBase, IWorkfileHost, ILoadHost, IPublishHost): log.info("Installing callbacks ... ") # register_event_callback("init", on_init) - _register_callbacks() + self._register_callbacks() # register_event_callback("before.save", before_save) # register_event_callback("save", on_save) register_event_callback("open", on_open) # register_event_callback("new", on_new) log.info("Installing menu ... ") - _install_menu() + self._install_menu() self._has_been_setup = True def uninstall(self): - _uninstall_menu() - _deregister_callbacks() + self._uninstall_menu() + self._deregister_callbacks() def has_unsaved_changes(self): @@ -146,74 +143,71 @@ class SubstanceHost(HostBase, IWorkfileHost, ILoadHost, IPublishHost): def get_context_data(self): pass + def _install_menu(self): + from PySide2 import QtWidgets + from openpype.tools.utils import host_tools -def _install_menu(): - from PySide2 import QtWidgets - from openpype.tools.utils import host_tools + parent = substance_painter.ui.get_main_window() - parent = substance_painter.ui.get_main_window() + menu = QtWidgets.QMenu("OpenPype") - menu = QtWidgets.QMenu("OpenPype") + action = menu.addAction("Load...") + action.triggered.connect( + lambda: host_tools.show_loader(parent=parent, use_context=True) + ) - action = menu.addAction("Load...") - action.triggered.connect( - lambda: host_tools.show_loader(parent=parent, use_context=True) - ) + action = menu.addAction("Publish...") + action.triggered.connect( + lambda: host_tools.show_publisher(parent=parent) + ) - action = menu.addAction("Publish...") - action.triggered.connect( - lambda: host_tools.show_publisher(parent=parent) - ) + action = menu.addAction("Manage...") + action.triggered.connect( + lambda: host_tools.show_scene_inventory(parent=parent) + ) - action = menu.addAction("Manage...") - action.triggered.connect( - lambda: host_tools.show_scene_inventory(parent=parent) - ) + action = menu.addAction("Library...") + action.triggered.connect( + lambda: host_tools.show_library_loader(parent=parent) + ) - action = menu.addAction("Library...") - action.triggered.connect( - lambda: host_tools.show_library_loader(parent=parent) - ) + menu.addSeparator() + action = menu.addAction("Work Files...") + action.triggered.connect( + lambda: host_tools.show_workfiles(parent=parent) + ) - menu.addSeparator() - action = menu.addAction("Work Files...") - action.triggered.connect( - lambda: host_tools.show_workfiles(parent=parent) - ) + substance_painter.ui.add_menu(menu) - substance_painter.ui.add_menu(menu) + def on_menu_destroyed(): + self.menu = None - def on_menu_destroyed(): - self.menu = None + menu.destroyed.connect(on_menu_destroyed) - menu.destroyed.connect(on_menu_destroyed) + self.menu = menu - self.menu = menu + def _uninstall_menu(self): + if self.menu: + self.menu.destroy() + self.menu = None + + def _register_callbacks(self): + # Prepare emit event callbacks + open_callback = partial(emit_event, "open") + + # Connect to the Substance Painter events + dispatcher = substance_painter.event.DISPATCHER + for event, callback in [ + (substance_painter.event.ProjectOpened, open_callback) + ]: + dispatcher.connect(event, callback) + # Keep a reference so we can deregister if needed + self.callbacks.append((event, callback)) -def _uninstall_menu(): - if self.menu: - self.menu.destroy() - self.menu = None - - -def _register_callbacks(): - # Prepare emit event callbacks - open_callback = partial(emit_event, "open") - - # Connect to the Substance Painter events - dispatcher = substance_painter.event.DISPATCHER - for event, callback in [ - (substance_painter.event.ProjectOpened, open_callback) - ]: - dispatcher.connect(event, callback) - # Keep a reference so we can deregister if needed - self.callbacks.append((event, callback)) - - -def _deregister_callbacks(): - for event, callback in self.callbacks: - substance_painter.event.DISPATCHER.disconnect(event, callback) + def _deregister_callbacks(self): + for event, callback in self.callbacks: + substance_painter.event.DISPATCHER.disconnect(event, callback) def on_open(): From df5300ed32a0a4cff5af52a930c535773238deda Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Tue, 10 Jan 2023 16:33:33 +0100 Subject: [PATCH 025/187] Cosmetics --- openpype/hosts/substancepainter/api/pipeline.py | 1 - 1 file changed, 1 deletion(-) diff --git a/openpype/hosts/substancepainter/api/pipeline.py b/openpype/hosts/substancepainter/api/pipeline.py index db4bb47401..48adc107e2 100644 --- a/openpype/hosts/substancepainter/api/pipeline.py +++ b/openpype/hosts/substancepainter/api/pipeline.py @@ -204,7 +204,6 @@ class SubstanceHost(HostBase, IWorkfileHost, ILoadHost, IPublishHost): # Keep a reference so we can deregister if needed self.callbacks.append((event, callback)) - def _deregister_callbacks(self): for event, callback in self.callbacks: substance_painter.event.DISPATCHER.disconnect(event, callback) From 3b4f9feaadfaaee4ae763a78744a274cd467e744 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Tue, 10 Jan 2023 16:34:20 +0100 Subject: [PATCH 026/187] Remove unused import --- openpype/hosts/substancepainter/api/pipeline.py | 1 - 1 file changed, 1 deletion(-) diff --git a/openpype/hosts/substancepainter/api/pipeline.py b/openpype/hosts/substancepainter/api/pipeline.py index 48adc107e2..df705bb010 100644 --- a/openpype/hosts/substancepainter/api/pipeline.py +++ b/openpype/hosts/substancepainter/api/pipeline.py @@ -1,7 +1,6 @@ # -*- coding: utf-8 -*- """Pipeline tools for OpenPype Gaffer integration.""" import os -import sys import logging from functools import partial From 5a7c5762847ed22f89a26d09f062a0948c34397b Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Tue, 10 Jan 2023 16:44:09 +0100 Subject: [PATCH 027/187] Remove debug print message --- openpype/hosts/substancepainter/api/pipeline.py | 1 - 1 file changed, 1 deletion(-) diff --git a/openpype/hosts/substancepainter/api/pipeline.py b/openpype/hosts/substancepainter/api/pipeline.py index df705bb010..3a68a7fa86 100644 --- a/openpype/hosts/substancepainter/api/pipeline.py +++ b/openpype/hosts/substancepainter/api/pipeline.py @@ -210,7 +210,6 @@ class SubstanceHost(HostBase, IWorkfileHost, ILoadHost, IPublishHost): def on_open(): log.info("Running callback on open..") - print("Run") if any_outdated_containers(): from openpype.widgets import popup From 24b6583c63ea14920bc6a56649c7db6ed1e3176c Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Tue, 10 Jan 2023 17:58:47 +0100 Subject: [PATCH 028/187] Set explicit defaults for creator --- .../hosts/substancepainter/plugins/create/create_textures.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/openpype/hosts/substancepainter/plugins/create/create_textures.py b/openpype/hosts/substancepainter/plugins/create/create_textures.py index c1d907a974..6d4f816961 100644 --- a/openpype/hosts/substancepainter/plugins/create/create_textures.py +++ b/openpype/hosts/substancepainter/plugins/create/create_textures.py @@ -123,6 +123,7 @@ class CreateTextures(Creator): None: "Based on output template", # TODO: implement extensions }, + default=None, label="File type"), EnumDef("exportSize", items={ @@ -136,6 +137,7 @@ class CreateTextures(Creator): 11: "2048", 12: "4096" }, + default=None, label="Size"), EnumDef("exportPadding", @@ -146,6 +148,7 @@ class CreateTextures(Creator): "color": "Dilation + default background color", "diffusion": "Dilation + diffusion" }, + default="infinite", label="Padding"), NumberDef("exportDilationDistance", minimum=0, From 61710d614d5753b2287c9c5be5110147bd4612b0 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Thu, 12 Jan 2023 13:23:51 +0100 Subject: [PATCH 029/187] TODO was already resolved --- openpype/hosts/substancepainter/plugins/load/load_mesh.py | 1 - 1 file changed, 1 deletion(-) diff --git a/openpype/hosts/substancepainter/plugins/load/load_mesh.py b/openpype/hosts/substancepainter/plugins/load/load_mesh.py index 3e62b90988..00f808199f 100644 --- a/openpype/hosts/substancepainter/plugins/load/load_mesh.py +++ b/openpype/hosts/substancepainter/plugins/load/load_mesh.py @@ -102,7 +102,6 @@ class SubstanceLoadProjectMesh(load.LoaderPlugin): path = get_representation_path(representation) # Reload the mesh - # TODO: Re-use settings from first load? container_options = container.get("options", {}) settings = substance_painter.project.MeshReloadingSettings( import_cameras=container_options.get("import_cameras", True), From 2177877713f538f70217a944014212fc183c7412 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Thu, 12 Jan 2023 14:47:38 +0100 Subject: [PATCH 030/187] Load OpenPype plug-in on first run of Substance Painter through OpenPype --- .../startup/openpype_load_on_first_run.py | 43 +++++++++++++++++++ 1 file changed, 43 insertions(+) create mode 100644 openpype/hosts/substancepainter/deploy/startup/openpype_load_on_first_run.py diff --git a/openpype/hosts/substancepainter/deploy/startup/openpype_load_on_first_run.py b/openpype/hosts/substancepainter/deploy/startup/openpype_load_on_first_run.py new file mode 100644 index 0000000000..90b1ec6bbd --- /dev/null +++ b/openpype/hosts/substancepainter/deploy/startup/openpype_load_on_first_run.py @@ -0,0 +1,43 @@ +"""Ease the OpenPype on-boarding process by loading the plug-in on first run""" + +OPENPYPE_PLUGIN_NAME = "openpype_plugin" + + +def start_plugin(): + try: + # This isn't exposed in the official API so we keep it in a try-except + from painter_plugins_ui import ( + get_settings, + LAUNCH_AT_START_KEY, + ON_STATE, + PLUGINS_MENU, + plugin_manager + ) + + # The `painter_plugins_ui` plug-in itself is also a startup plug-in + # we need to take into account that it could run either earlier or + # later than this startup script, we check whether its menu initialized + is_before_plugins_menu = PLUGINS_MENU is None + + settings = get_settings(OPENPYPE_PLUGIN_NAME) + if settings.value(LAUNCH_AT_START_KEY, None) is not None: + print("Initializing OpenPype plug-in on first run...") + if is_before_plugins_menu: + print("- running before 'painter_plugins_ui'") + # Delay the launch to the painter_plugins_ui initialization + settings.setValue(LAUNCH_AT_START_KEY, ON_STATE) + else: + # Launch now + print("- running after 'painter_plugins_ui'") + plugin_manager(OPENPYPE_PLUGIN_NAME)(True) + + # Set the checked state in the menu to avoid confusion + action = next(action for action in PLUGINS_MENU._menu.actions() + if action.text() == OPENPYPE_PLUGIN_NAME) + if action is not None: + action.blockSignals(True) + action.setChecked(True) + action.blockSignals(False) + + except Exception as exc: + print(exc) From d1d15683983db8d3d9ca9e1a121b794b9b0acf3e Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Thu, 12 Jan 2023 14:54:07 +0100 Subject: [PATCH 031/187] Fix logic --- .../deploy/startup/openpype_load_on_first_run.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/substancepainter/deploy/startup/openpype_load_on_first_run.py b/openpype/hosts/substancepainter/deploy/startup/openpype_load_on_first_run.py index 90b1ec6bbd..04b610b4df 100644 --- a/openpype/hosts/substancepainter/deploy/startup/openpype_load_on_first_run.py +++ b/openpype/hosts/substancepainter/deploy/startup/openpype_load_on_first_run.py @@ -20,7 +20,7 @@ def start_plugin(): is_before_plugins_menu = PLUGINS_MENU is None settings = get_settings(OPENPYPE_PLUGIN_NAME) - if settings.value(LAUNCH_AT_START_KEY, None) is not None: + if settings.value(LAUNCH_AT_START_KEY, None) is None: print("Initializing OpenPype plug-in on first run...") if is_before_plugins_menu: print("- running before 'painter_plugins_ui'") From d2baa5ec4d9f92c143172f95719bb7b319ae79a2 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Thu, 12 Jan 2023 15:38:22 +0100 Subject: [PATCH 032/187] Allow to configure custom shelves for Substance Painter in project settings --- openpype/hosts/substancepainter/api/lib.py | 57 +++++++++++++++++++ .../hosts/substancepainter/api/pipeline.py | 28 +++++++++ .../project_settings/substancepainter.json | 3 + .../schemas/projects_schema/schema_main.json | 4 ++ .../schema_project_substancepainter.json | 18 ++++++ 5 files changed, 110 insertions(+) create mode 100644 openpype/hosts/substancepainter/api/lib.py create mode 100644 openpype/settings/defaults/project_settings/substancepainter.json create mode 100644 openpype/settings/entities/schemas/projects_schema/schema_project_substancepainter.json diff --git a/openpype/hosts/substancepainter/api/lib.py b/openpype/hosts/substancepainter/api/lib.py new file mode 100644 index 0000000000..d468f6cc45 --- /dev/null +++ b/openpype/hosts/substancepainter/api/lib.py @@ -0,0 +1,57 @@ +import os +import re +import substance_painter.resource + + +def load_shelf(path, name=None): + """Add shelf to substance painter (for current application session) + + This will dynamically add a Shelf for the current session. It's good + to note however that these will *not* persist on restart of the host. + + Note: + Consider the loaded shelf a static library of resources. + + The shelf will *not* be visible in application preferences in + Edit > Settings > Libraries. + + The shelf will *not* show in the Assets browser if it has no existing + assets + + The shelf will *not* be a selectable option for selecting it as a + destination to import resources too. + + """ + + # Ensure expanded path with forward slashes + path = os.path.expandvars(path) + path = os.path.abspath(path) + path = path.replace("\\", "/") + + # Path must exist + if not os.path.isdir(path): + raise ValueError(f"Path is not an existing folder: {path}") + + # This name must be unique and must only contain lowercase letters, + # numbers, underscores or hyphens. + if name is None: + name = os.path.basename(path) + + name = name.lower() + name = re.sub(r"[^a-z0-9_\-]", "_", name) # sanitize to underscores + + if substance_painter.resource.Shelves.exists(name): + shelf = next( + shelf for shelf in substance_painter.resource.Shelves.all() + if shelf.name() == name + ) + if os.path.normpath(shelf.path()) != os.path.normpath(path): + raise ValueError(f"Shelf with name '{name}' already exists " + f"for a different path: '{shelf.path()}") + + return + + print(f"Adding Shelf '{name}' to path: {path}") + substance_painter.resource.Shelves.add(name, path) + + return name diff --git a/openpype/hosts/substancepainter/api/pipeline.py b/openpype/hosts/substancepainter/api/pipeline.py index 3a68a7fa86..f4d4c5b00c 100644 --- a/openpype/hosts/substancepainter/api/pipeline.py +++ b/openpype/hosts/substancepainter/api/pipeline.py @@ -10,6 +10,7 @@ import substance_painter.event import substance_painter.project from openpype.host import HostBase, IWorkfileHost, ILoadHost, IPublishHost +from openpype.settings import get_current_project_settings import pyblish.api @@ -25,6 +26,8 @@ from openpype.lib import ( from openpype.pipeline.load import any_outdated_containers from openpype.hosts.substancepainter import SUBSTANCE_HOST_DIR +from . import lib + log = logging.getLogger("openpype.hosts.substance") PLUGINS_DIR = os.path.join(SUBSTANCE_HOST_DIR, "plugins") @@ -45,6 +48,7 @@ class SubstanceHost(HostBase, IWorkfileHost, ILoadHost, IPublishHost): self._has_been_setup = False self.menu = None self.callbacks = [] + self.shelves = [] def install(self): pyblish.api.register_host("substancepainter") @@ -64,9 +68,13 @@ class SubstanceHost(HostBase, IWorkfileHost, ILoadHost, IPublishHost): log.info("Installing menu ... ") self._install_menu() + project_settings = get_current_project_settings() + self._install_shelves(project_settings) + self._has_been_setup = True def uninstall(self): + self._uninstall_shelves() self._uninstall_menu() self._deregister_callbacks() @@ -206,6 +214,26 @@ class SubstanceHost(HostBase, IWorkfileHost, ILoadHost, IPublishHost): def _deregister_callbacks(self): for event, callback in self.callbacks: substance_painter.event.DISPATCHER.disconnect(event, callback) + self.callbacks.clear() + + def _install_shelves(self, project_settings): + + shelves = project_settings["substancepainter"].get("shelves", {}) + for name, path in shelves.items(): + # TODO: Allow formatting with anatomy for the paths + shelf_name = None + try: + shelf_name = lib.load_shelf(path, name=name) + except ValueError as exc: + print(f"Failed to load shelf -> {exc}") + + if shelf_name: + self.shelves.append(shelf_name) + + def _uninstall_shelves(self): + for shelf_name in self.shelves: + substance_painter.resource.Shelves.remove(shelf_name) + self.shelves.clear() def on_open(): diff --git a/openpype/settings/defaults/project_settings/substancepainter.json b/openpype/settings/defaults/project_settings/substancepainter.json new file mode 100644 index 0000000000..a424a923da --- /dev/null +++ b/openpype/settings/defaults/project_settings/substancepainter.json @@ -0,0 +1,3 @@ +{ + "shelves": {} +} \ No newline at end of file diff --git a/openpype/settings/entities/schemas/projects_schema/schema_main.json b/openpype/settings/entities/schemas/projects_schema/schema_main.json index 0b9fbf7470..b3c5c62a89 100644 --- a/openpype/settings/entities/schemas/projects_schema/schema_main.json +++ b/openpype/settings/entities/schemas/projects_schema/schema_main.json @@ -114,6 +114,10 @@ "type": "schema", "name": "schema_project_photoshop" }, + { + "type": "schema", + "name": "schema_project_substancepainter" + }, { "type": "schema", "name": "schema_project_harmony" diff --git a/openpype/settings/entities/schemas/projects_schema/schema_project_substancepainter.json b/openpype/settings/entities/schemas/projects_schema/schema_project_substancepainter.json new file mode 100644 index 0000000000..4a02a9d8ca --- /dev/null +++ b/openpype/settings/entities/schemas/projects_schema/schema_project_substancepainter.json @@ -0,0 +1,18 @@ +{ + "type": "dict", + "collapsible": true, + "key": "substancepainter", + "label": "Substance Painter", + "is_file": true, + "children": [ + { + "type": "dict-modifiable", + "key": "shelves", + "label": "Shelves", + "use_label_wrap": true, + "object_type": { + "type": "text" + } + } + ] +} From 42b207445ed49dab7d5ce23556d7cbd0e7316ba3 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Fri, 13 Jan 2023 12:32:38 +0100 Subject: [PATCH 033/187] Implement working WIP draft for Texture Publishing --- .../hosts/substancepainter/api/colorspace.py | 157 +++++++++++++ openpype/hosts/substancepainter/api/lib.py | 139 ++++++++++++ .../plugins/create/create_textures.py | 71 +----- .../publish/collect_textureset_images.py | 207 ++++++++++++++++++ .../plugins/publish/extract_textures.py | 87 +++----- 5 files changed, 548 insertions(+), 113 deletions(-) create mode 100644 openpype/hosts/substancepainter/api/colorspace.py create mode 100644 openpype/hosts/substancepainter/plugins/publish/collect_textureset_images.py diff --git a/openpype/hosts/substancepainter/api/colorspace.py b/openpype/hosts/substancepainter/api/colorspace.py new file mode 100644 index 0000000000..f7b9f7694a --- /dev/null +++ b/openpype/hosts/substancepainter/api/colorspace.py @@ -0,0 +1,157 @@ +"""Substance Painter OCIO management + +Adobe Substance 3D Painter supports OCIO color management using a per project +configuration. Output color spaces are defined at the project level + +More information see: + - https://substance3d.adobe.com/documentation/spdoc/color-management-223053233.html # noqa + - https://substance3d.adobe.com/documentation/spdoc/color-management-with-opencolorio-225969419.html # noqa + +""" +import substance_painter.export +import substance_painter.js +import json + +from .lib import ( + get_document_structure, + get_channel_format +) + + +def _iter_document_stack_channels(): + """Yield all stack paths and channels project""" + + for material in get_document_structure()["materials"]: + material_name = material["name"] + for stack in material["stacks"]: + stack_name = stack["name"] + for channel in stack["channels"]: + if stack_name: + stack_path = [material_name, stack_name] + else: + stack_path = material_name + yield stack_path, channel + + +def _get_first_color_and_data_stack_and_channel(): + """Return first found color channel and data channel.""" + color_channel = None + data_channel = None + for stack_path, channel in _iter_document_stack_channels(): + channel_format = get_channel_format(stack_path, channel) + if channel_format["color"]: + color_channel = (stack_path, channel) + else: + data_channel = (stack_path, channel) + + if color_channel and data_channel: + return color_channel, data_channel + + return color_channel, data_channel + + +def get_project_channel_data(): + """Return colorSpace settings for the current substance painter project. + + In Substance Painter only color channels have Color Management enabled + whereas data channels have no color management applied. This can't be + changed. The artist can only customize the export color space for color + channels per bit-depth for 8 bpc, 16 bpc and 32 bpc. + + As such this returns the color space for 'data' and for per bit-depth + for color channels. + + Example output: + { + "data": {'colorSpace': 'Utility - Raw'}, + "8": {"colorSpace": "ACES - AcesCG"}, + "16": {"colorSpace": "ACES - AcesCG"}, + "16f": {"colorSpace": "ACES - AcesCG"}, + "32f": {"colorSpace": "ACES - AcesCG"} + } + + """ + + keys = ["colorSpace"] + query = {key: f"${key}" for key in keys} + + config = { + "exportPath": "/", + "exportShaderParams": False, + "defaultExportPreset": "query_preset", + + "exportPresets": [{ + "name": "query_preset", + + # List of maps making up this export preset. + "maps": [{ + "fileName": json.dumps(query), + # List of source/destination defining which channels will + # make up the texture file. + "channels": [], + "parameters": { + "fileFormat": "exr", + "bitDepth": "32f", + "dithering": False, + "sizeLog2": 4, + "paddingAlgorithm": "passthrough", + "dilationDistance": 16 + } + }] + }], + } + + def _get_query_output(config): + # Return the basename of the single output path we defined + result = substance_painter.export.list_project_textures(config) + path = next(iter(result.values()))[0] + # strip extension and slash since we know relevant json data starts + # and ends with { and } characters + path = path.strip("/\\.exr") + return json.loads(path) + + # Query for each type of channel (color and data) + color_channel, data_channel = _get_first_color_and_data_stack_and_channel() + colorspaces = {} + for key, channel_data in { + "data": data_channel, + "color": color_channel + }.items(): + if channel_data is None: + # No channel of that datatype anywhere in the Stack. We're + # unable to identify the output color space of the project + colorspaces[key] = None + continue + + stack, channel = channel_data + + # Stack must be a string + if not isinstance(stack, str): + # Assume iterable + stack = "/".join(stack) + + # Define the temp output config + config["exportList"] = [{"rootPath": stack}] + config_map = config["exportPresets"][0]["maps"][0] + config_map["channels"] = [ + { + "destChannel": x, + "srcChannel": x, + "srcMapType": "documentMap", + "srcMapName": channel + } for x in "RGB" + ] + + if key == "color": + # Query for each bit depth + # Color space definition can have a different OCIO config set + # for 8-bit, 16-bit and 32-bit outputs so we need to check each + # bit depth + for depth in ["8", "16", "16f", "32f"]: + config_map["parameters"]["bitDepth"] = depth # noqa + colorspaces[key + depth] = _get_query_output(config) + else: + # Data channel (not color managed) + colorspaces[key] = _get_query_output(config) + + return colorspaces diff --git a/openpype/hosts/substancepainter/api/lib.py b/openpype/hosts/substancepainter/api/lib.py index d468f6cc45..b929f881a8 100644 --- a/openpype/hosts/substancepainter/api/lib.py +++ b/openpype/hosts/substancepainter/api/lib.py @@ -1,6 +1,145 @@ import os import re +import json + import substance_painter.resource +import substance_painter.js + + +def get_export_presets(): + """Return Export Preset resource URLs for all available Export Presets. + + Returns: + dict: {Resource url: GUI Label} + + """ + # TODO: Find more optimal way to find all export templates + + preset_resources = {} + for shelf in substance_painter.resource.Shelves.all(): + shelf_path = os.path.normpath(shelf.path()) + + presets_path = os.path.join(shelf_path, "export-presets") + if not os.path.exists(presets_path): + continue + + for filename in os.listdir(presets_path): + if filename.endswith(".spexp"): + template_name = os.path.splitext(filename)[0] + + resource = substance_painter.resource.ResourceID( + context=shelf.name(), + name=template_name + ) + resource_url = resource.url() + + preset_resources[resource_url] = template_name + + # Sort by template name + export_templates = dict(sorted(preset_resources.items(), + key=lambda x: x[1])) + + # Add default built-ins at the start + # TODO: find the built-ins automatically; scraped with https://gist.github.com/BigRoy/97150c7c6f0a0c916418207b9a2bc8f1 # noqa + result = { + "export-preset-generator://viewport2d": "2D View", # noqa + "export-preset-generator://doc-channel-normal-no-alpha": "Document channels + Normal + AO (No Alpha)", # noqa + "export-preset-generator://doc-channel-normal-with-alpha": "Document channels + Normal + AO (With Alpha)", # noqa + "export-preset-generator://sketchfab": "Sketchfab", # noqa + "export-preset-generator://adobe-standard-material": "Substance 3D Stager", # noqa + "export-preset-generator://usd": "USD PBR Metal Roughness", # noqa + "export-preset-generator://gltf": "glTF PBR Metal Roughness", # noqa + "export-preset-generator://gltf-displacement": "glTF PBR Metal Roughness + Displacement texture (experimental)" # noqa + } + result.update(export_templates) + return result + + +def _convert_stack_path_to_cmd_str(stack_path): + """Convert stack path `str` or `[str, str]` for javascript query + + Example usage: + >>> stack_path = _convert_stack_path_to_cmd_str(stack_path) + >>> cmd = f"alg.mapexport.channelIdentifiers({stack_path})" + >>> substance_painter.js.evaluate(cmd) + + Args: + stack_path (list or str): Path to the stack, could be + "Texture set name" or ["Texture set name", "Stack name"] + + Returns: + str: Stack path usable as argument in javascript query. + + """ + return json.dumps(stack_path) + + +def get_channel_identifiers(stack_path=None): + """Return the list of channel identifiers. + + If a context is passed (texture set/stack), + return only used channels with resolved user channels. + + Channel identifiers are: + basecolor, height, specular, opacity, emissive, displacement, + glossiness, roughness, anisotropylevel, anisotropyangle, transmissive, + scattering, reflection, ior, metallic, normal, ambientOcclusion, + diffuse, specularlevel, blendingmask, [custom user names]. + + Args: + stack_path (list or str, Optional): Path to the stack, could be + "Texture set name" or ["Texture set name", "Stack name"] + + Returns: + list: List of channel identifiers. + + """ + if stack_path is None: + stack_path = "" + else: + stack_path = _convert_stack_path_to_cmd_str(stack_path) + cmd = f"alg.mapexport.channelIdentifiers({stack_path})" + return substance_painter.js.evaluate(cmd) + + +def get_channel_format(stack_path, channel): + """Retrieve the channel format of a specific stack channel. + + See `alg.mapexport.channelFormat` (javascript API) for more details. + + The channel format data is: + "label" (str): The channel format label: could be one of + [sRGB8, L8, RGB8, L16, RGB16, L16F, RGB16F, L32F, RGB32F] + "color" (bool): True if the format is in color, False is grayscale + "floating" (bool): True if the format uses floating point + representation, false otherwise + "bitDepth" (int): Bit per color channel (could be 8, 16 or 32 bpc) + + Args: + stack_path (list or str): Path to the stack, could be + "Texture set name" or ["Texture set name", "Stack name"] + channel (str): Identifier of the channel to export + (see `get_channel_identifiers`) + + Returns: + dict: The channel format data. + + """ + stack_path = _convert_stack_path_to_cmd_str(stack_path) + cmd = f"alg.mapexport.channelFormat({stack_path}, '{channel}')" + return substance_painter.js.evaluate(cmd) + + +def get_document_structure(): + """Dump the document structure. + + See `alg.mapexport.documentStructure` (javascript API) for more details. + + Returns: + dict: Document structure or None when no project is open + + """ + return substance_painter.js.evaluate("alg.mapexport.documentStructure()") def load_shelf(path, name=None): diff --git a/openpype/hosts/substancepainter/plugins/create/create_textures.py b/openpype/hosts/substancepainter/plugins/create/create_textures.py index 6d4f816961..9d641215dc 100644 --- a/openpype/hosts/substancepainter/plugins/create/create_textures.py +++ b/openpype/hosts/substancepainter/plugins/create/create_textures.py @@ -1,74 +1,27 @@ # -*- coding: utf-8 -*- """Creator plugin for creating textures.""" -import os from openpype.pipeline import CreatedInstance, Creator - -from openpype.hosts.substancepainter.api.pipeline import ( - set_project_metadata, - get_project_metadata -) - from openpype.lib import ( EnumDef, UILabelDef, NumberDef ) +from openpype.hosts.substancepainter.api.pipeline import ( + set_project_metadata, + get_project_metadata +) +from openpype.hosts.substancepainter.api.lib import get_export_presets + import substance_painter.project -import substance_painter.resource - - -def get_export_presets(): - import substance_painter.resource - - preset_resources = {} - - # TODO: Find more optimal way to find all export templates - for shelf in substance_painter.resource.Shelves.all(): - shelf_path = os.path.normpath(shelf.path()) - - presets_path = os.path.join(shelf_path, "export-presets") - if not os.path.exists(presets_path): - continue - - for fname in os.listdir(presets_path): - if fname.endswith(".spexp"): - template_name = os.path.splitext(fname)[0] - - resource = substance_painter.resource.ResourceID( - context=shelf.name(), - name=template_name - ) - resource_url = resource.url() - - preset_resources[resource_url] = template_name - - # Sort by template name - export_templates = dict(sorted(preset_resources.items(), - key=lambda x: x[1])) - - # Add default built-ins at the start - # TODO: find the built-ins automatically; scraped with https://gist.github.com/BigRoy/97150c7c6f0a0c916418207b9a2bc8f1 # noqa - result = { - "export-preset-generator://viewport2d": "2D View", # noqa - "export-preset-generator://doc-channel-normal-no-alpha": "Document channels + Normal + AO (No Alpha)", # noqa - "export-preset-generator://doc-channel-normal-with-alpha": "Document channels + Normal + AO (With Alpha)", # noqa - "export-preset-generator://sketchfab": "Sketchfab", # noqa - "export-preset-generator://adobe-standard-material": "Substance 3D Stager", # noqa - "export-preset-generator://usd": "USD PBR Metal Roughness", # noqa - "export-preset-generator://gltf": "glTF PBR Metal Roughness", # noqa - "export-preset-generator://gltf-displacement": "glTF PBR Metal Roughness + Displacement texture (experimental)" # noqa - } - result.update(export_templates) - return result class CreateTextures(Creator): """Create a texture set.""" - identifier = "io.openpype.creators.substancepainter.textures" + identifier = "io.openpype.creators.substancepainter.textureset" label = "Textures" - family = "textures" + family = "textureSet" icon = "picture-o" default_variant = "Main" @@ -79,19 +32,19 @@ class CreateTextures(Creator): return instance = self.create_instance_in_context(subset_name, instance_data) - set_project_metadata("textures", instance.data_to_store()) + set_project_metadata("textureSet", instance.data_to_store()) def collect_instances(self): - workfile = get_project_metadata("textures") + workfile = get_project_metadata("textureSet") if workfile: self.create_instance_in_context_from_existing(workfile) def update_instances(self, update_list): for instance, _changes in update_list: # Update project's metadata - data = get_project_metadata("textures") or {} + data = get_project_metadata("textureSet") or {} data.update(instance.data_to_store()) - set_project_metadata("textures", data) + set_project_metadata("textureSet", data) def remove_instances(self, instances): for instance in instances: diff --git a/openpype/hosts/substancepainter/plugins/publish/collect_textureset_images.py b/openpype/hosts/substancepainter/plugins/publish/collect_textureset_images.py new file mode 100644 index 0000000000..96f2daa525 --- /dev/null +++ b/openpype/hosts/substancepainter/plugins/publish/collect_textureset_images.py @@ -0,0 +1,207 @@ +import os +import copy +import clique +import pyblish.api + +from openpype.pipeline import publish + +import substance_painter.export +from openpype.hosts.substancepainter.api.colorspace import ( + get_project_channel_data, +) + + +def get_project_color_spaces(): + """Return unique color space names used for exports. + + This is based on the Color Management preferences of the project. + + See also: + func:`get_project_channel_data` + + """ + return set( + data["colorSpace"] for data in get_project_channel_data().values() + ) + + +def _get_channel_name(path, + texture_set_name, + project_colorspaces): + """Return expected 'name' for the output image. + + This will be used as a suffix to the separate image publish subsets. + + """ + # TODO: This will require improvement before being production ready. + # TODO(Question): Should we preserve the texture set name in the suffix + # TODO so that exports with multiple texture sets can work within a single + # TODO parent textureSet, like `texture{Variant}.{TextureSet}{Channel}` + name = os.path.basename(path) # filename + name = os.path.splitext(name)[0] # no extension + # Usually the channel identifier comes after $textureSet in + # the export preset. Unfortunately getting the export maps + # and channels explicitly is not trivial so for now we just + # assume this will generate a nice identifier for the end user + name = name.split(f"{texture_set_name}_", 1)[-1] + + # TODO: We need more explicit ways to detect the color space part + for colorspace in project_colorspaces: + if name.endswith(f"_{colorspace}"): + name = name[:-len(f"_{colorspace}")] + break + + return name + + +class CollectTextureSet(pyblish.api.InstancePlugin): + """Extract Textures using an output template config""" + # TODO: More explicitly detect UDIM tiles + # TODO: Get color spaces + # TODO: Detect what source data channels end up in each file + + label = "Collect Texture Set images" + hosts = ['substancepainter'] + families = ["textureSet"] + order = pyblish.api.CollectorOrder + + def process(self, instance): + + config = self.get_export_config(instance) + textures = substance_painter.export.list_project_textures(config) + + instance.data["exportConfig"] = config + + colorspaces = get_project_color_spaces() + + outputs = {} + for (texture_set_name, stack_name), maps in textures.items(): + + # Log our texture outputs + self.log.debug(f"Processing stack: {stack_name}") + for texture_map in maps: + self.log.debug(f"Expecting texture: {texture_map}") + + # For now assume the UDIM textures end with .. and + # when no trailing number is present before the extension then it's + # considered to *not* be a UDIM export. + collections, remainder = clique.assemble( + maps, + patterns=[clique.PATTERNS["frames"]], + minimum_items=True + ) + + outputs = {} + if collections: + # UDIM tile sequence + for collection in collections: + name = _get_channel_name(collection.head, + texture_set_name=texture_set_name, + project_colorspaces=colorspaces) + outputs[name] = collection + self.log.info(f"UDIM Collection: {collection}") + else: + # Single file per channel without UDIM number + for path in remainder: + name = _get_channel_name(path, + texture_set_name=texture_set_name, + project_colorspaces=colorspaces) + outputs[name] = path + self.log.info(f"Single file: {path}") + + # Let's break the instance into multiple instances to integrate + # a subset per generated texture or texture UDIM sequence + context = instance.context + for map_name, map_output in outputs.items(): + + is_udim = isinstance(map_output, clique.Collection) + if is_udim: + first_file = list(map_output)[0] + map_fnames = [os.path.basename(path) for path in map_output] + else: + first_file = map_output + map_fnames = map_output + + ext = os.path.splitext(first_file)[1] + assert ext.lstrip('.'), f"No extension: {ext}" + + # Define the suffix we want to give this particular texture + # set and set up a remapped subset naming for it. + suffix = f".{map_name}" + image_subset = instance.data["subset"][len("textureSet"):] + image_subset = "texture" + image_subset + suffix + + # TODO: Retrieve and store color space with the representation + + # Clone the instance + image_instance = context.create_instance(instance.name) + image_instance[:] = instance[:] + image_instance.data.update(copy.deepcopy(instance.data)) + image_instance.data["name"] = image_subset + image_instance.data["label"] = image_subset + image_instance.data["subset"] = image_subset + image_instance.data["family"] = "image" + image_instance.data["families"] = ["image", "textures"] + image_instance.data['representations'] = [{ + 'name': ext.lstrip("."), + 'ext': ext.lstrip("."), + 'files': map_fnames, + }] + + instance.append(image_instance) + + def get_export_config(self, instance): + """Return an export configuration dict for texture exports. + + This config can be supplied to: + - `substance_painter.export.export_project_textures` + - `substance_painter.export.list_project_textures` + + See documentation on substance_painter.export module about the + formatting of the configuration dictionary. + + Args: + instance (pyblish.api.Instance): Texture Set instance to be + published. + + Returns: + dict: Export config + + """ + + creator_attrs = instance.data["creator_attributes"] + preset_url = creator_attrs["exportPresetUrl"] + self.log.debug(f"Exporting using preset: {preset_url}") + + # See: https://substance3d.adobe.com/documentation/ptpy/api/substance_painter/export # noqa + config = { # noqa + "exportShaderParams": True, + "exportPath": publish.get_instance_staging_dir(instance), + "defaultExportPreset": preset_url, + + # Custom overrides to the exporter + "exportParameters": [ + { + "parameters": { + "fileFormat": creator_attrs["exportFileFormat"], + "sizeLog2": creator_attrs["exportSize"], + "paddingAlgorithm": creator_attrs["exportPadding"], + "dilationDistance": creator_attrs["exportDilationDistance"] # noqa + } + } + ] + } + + # Create the list of Texture Sets to export. + config["exportList"] = [] + for texture_set in substance_painter.textureset.all_texture_sets(): + config["exportList"].append({"rootPath": texture_set.name()}) + + # Consider None values from the creator attributes optionals + for override in config["exportParameters"]: + parameters = override.get("parameters") + for key, value in dict(parameters).items(): + if value is None: + parameters.pop(key) + + return config diff --git a/openpype/hosts/substancepainter/plugins/publish/extract_textures.py b/openpype/hosts/substancepainter/plugins/publish/extract_textures.py index 8ebad3193f..e99b93cac9 100644 --- a/openpype/hosts/substancepainter/plugins/publish/extract_textures.py +++ b/openpype/hosts/substancepainter/plugins/publish/extract_textures.py @@ -1,55 +1,28 @@ from openpype.pipeline import KnownPublishError, publish - import substance_painter.export class ExtractTextures(publish.Extractor): - """Extract Textures using an output template config""" + """Extract Textures using an output template config. - label = "Extract Texture Sets" + Note: + This Extractor assumes that `collect_textureset_images` has prepared + the relevant export config and has also collected the individual image + instances for publishing including its representation. That is why this + particular Extractor doesn't specify representations to integrate. + + """ + # TODO: More explicitly detect UDIM tiles + # TODO: Get color spaces + # TODO: Detect what source data channels end up in each file + + label = "Extract Texture Set" hosts = ['substancepainter'] - families = ["textures"] + families = ["textureSet"] def process(self, instance): - staging_dir = self.staging_dir(instance) - - creator_attrs = instance.data["creator_attributes"] - preset_url = creator_attrs["exportPresetUrl"] - self.log.debug(f"Exporting using preset: {preset_url}") - - # See: https://substance3d.adobe.com/documentation/ptpy/api/substance_painter/export # noqa - config = { - "exportShaderParams": True, - "exportPath": staging_dir, - "defaultExportPreset": preset_url, - - # Custom overrides to the exporter - "exportParameters": [ - { - "parameters": { - "fileFormat": creator_attrs["exportFileFormat"], - "sizeLog2": creator_attrs["exportSize"], - "paddingAlgorithm": creator_attrs["exportPadding"], - "dilationDistance": creator_attrs["exportDilationDistance"] # noqa - } - } - ] - } - - # Create the list of Texture Sets to export. - config["exportList"] = [] - for texture_set in substance_painter.textureset.all_texture_sets(): - # stack = texture_set.get_stack() - config["exportList"].append({"rootPath": texture_set.name()}) - - # Consider None values optionals - for override in config["exportParameters"]: - parameters = override.get("parameters") - for key, value in dict(parameters).items(): - if value is None: - parameters.pop(key) - + config = instance.data["exportConfig"] result = substance_painter.export.export_project_textures(config) if result.status != substance_painter.export.ExportStatus.Success: @@ -57,18 +30,24 @@ class ExtractTextures(publish.Extractor): "Failed to export texture set: {}".format(result.message) ) - files = [] - for _stack, maps in result.textures.items(): + for (texture_set_name, stack_name), maps in result.textures.items(): + # Log our texture outputs + self.log.info(f"Processing stack: {stack_name}") for texture_map in maps: self.log.info(f"Exported texture: {texture_map}") - files.append(texture_map) - # TODO: add the representations so they integrate the way we'd want - """ - instance.data['representations'] = [{ - 'name': ext.lstrip("."), - 'ext': ext.lstrip("."), - 'files': file, - "stagingDir": folder, - }] - """ + # TODO: Confirm outputs match what we collected + # TODO: Confirm the files indeed exist + # TODO: make sure representations are registered + + # Add a fake representation which won't be integrated so the + # Integrator leaves us alone - otherwise it would error + # TODO: Add `instance.data["integrate"] = False` support in Integrator? + instance.data["representations"] = [ + { + "name": "_fake", + "ext": "_fake", + "delete": True, + "files": [] + } + ] From bd73709463440b520deafb6e9ac82995b6e6e430 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Fri, 13 Jan 2023 12:33:43 +0100 Subject: [PATCH 034/187] Fix indentation --- openpype/hosts/substancepainter/api/colorspace.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/openpype/hosts/substancepainter/api/colorspace.py b/openpype/hosts/substancepainter/api/colorspace.py index f7b9f7694a..a9df3eb066 100644 --- a/openpype/hosts/substancepainter/api/colorspace.py +++ b/openpype/hosts/substancepainter/api/colorspace.py @@ -135,10 +135,10 @@ def get_project_channel_data(): config_map = config["exportPresets"][0]["maps"][0] config_map["channels"] = [ { - "destChannel": x, - "srcChannel": x, - "srcMapType": "documentMap", - "srcMapName": channel + "destChannel": x, + "srcChannel": x, + "srcMapType": "documentMap", + "srcMapName": channel } for x in "RGB" ] From fbcb88b457faa1e468b71104a158da03558a4c23 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Fri, 13 Jan 2023 12:35:00 +0100 Subject: [PATCH 035/187] Include texture set name in the logging --- .../hosts/substancepainter/plugins/publish/extract_textures.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/substancepainter/plugins/publish/extract_textures.py b/openpype/hosts/substancepainter/plugins/publish/extract_textures.py index e99b93cac9..a32a81db48 100644 --- a/openpype/hosts/substancepainter/plugins/publish/extract_textures.py +++ b/openpype/hosts/substancepainter/plugins/publish/extract_textures.py @@ -32,7 +32,7 @@ class ExtractTextures(publish.Extractor): for (texture_set_name, stack_name), maps in result.textures.items(): # Log our texture outputs - self.log.info(f"Processing stack: {stack_name}") + self.log.info(f"Processing stack: {texture_set_name} {stack_name}") for texture_map in maps: self.log.info(f"Exported texture: {texture_map}") From 78c4875dcb26488cae3e8ccb27b6bc7f6f8c4350 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Fri, 13 Jan 2023 18:03:34 +0100 Subject: [PATCH 036/187] Add support for thumbnail generation of extracted textures from Substance Painter --- .../plugins/publish/collect_textureset_images.py | 6 ++++++ .../substancepainter/plugins/publish/extract_textures.py | 3 +++ openpype/plugins/publish/extract_thumbnail.py | 4 ++-- 3 files changed, 11 insertions(+), 2 deletions(-) diff --git a/openpype/hosts/substancepainter/plugins/publish/collect_textureset_images.py b/openpype/hosts/substancepainter/plugins/publish/collect_textureset_images.py index 96f2daa525..5a179f7526 100644 --- a/openpype/hosts/substancepainter/plugins/publish/collect_textureset_images.py +++ b/openpype/hosts/substancepainter/plugins/publish/collect_textureset_images.py @@ -148,6 +148,12 @@ class CollectTextureSet(pyblish.api.InstancePlugin): 'files': map_fnames, }] + # Set up the representation for thumbnail generation + # TODO: Simplify this once thumbnail extraction is refactored + staging_dir = os.path.dirname(first_file) + image_instance.data["representations"][0]["tags"] = ["review"] + image_instance.data["representations"][0]["stagingDir"] = staging_dir # noqa + instance.append(image_instance) def get_export_config(self, instance): diff --git a/openpype/hosts/substancepainter/plugins/publish/extract_textures.py b/openpype/hosts/substancepainter/plugins/publish/extract_textures.py index a32a81db48..22acf07284 100644 --- a/openpype/hosts/substancepainter/plugins/publish/extract_textures.py +++ b/openpype/hosts/substancepainter/plugins/publish/extract_textures.py @@ -20,6 +20,9 @@ class ExtractTextures(publish.Extractor): hosts = ['substancepainter'] families = ["textureSet"] + # Run before thumbnail extractors + order = publish.Extractor.order - 0.1 + def process(self, instance): config = instance.data["exportConfig"] diff --git a/openpype/plugins/publish/extract_thumbnail.py b/openpype/plugins/publish/extract_thumbnail.py index 14b43beae8..dcdb8341ba 100644 --- a/openpype/plugins/publish/extract_thumbnail.py +++ b/openpype/plugins/publish/extract_thumbnail.py @@ -19,9 +19,9 @@ class ExtractThumbnail(pyblish.api.InstancePlugin): order = pyblish.api.ExtractorOrder families = [ "imagesequence", "render", "render2d", "prerender", - "source", "clip", "take" + "source", "clip", "take", "image" ] - hosts = ["shell", "fusion", "resolve", "traypublisher"] + hosts = ["shell", "fusion", "resolve", "traypublisher", "substancepainter"] enabled = False # presetable attribute From 5c0a7e30ed59b63bd177ff64c07c5f55417556f3 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Fri, 13 Jan 2023 18:14:18 +0100 Subject: [PATCH 037/187] Group textures together to look like a package/textureSet --- .../plugins/publish/collect_textureset_images.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/openpype/hosts/substancepainter/plugins/publish/collect_textureset_images.py b/openpype/hosts/substancepainter/plugins/publish/collect_textureset_images.py index 5a179f7526..3832f724d4 100644 --- a/openpype/hosts/substancepainter/plugins/publish/collect_textureset_images.py +++ b/openpype/hosts/substancepainter/plugins/publish/collect_textureset_images.py @@ -148,6 +148,9 @@ class CollectTextureSet(pyblish.api.InstancePlugin): 'files': map_fnames, }] + # Group the textures together in the loader + image_instance.data["subsetGroup"] = instance.data["subset"] + # Set up the representation for thumbnail generation # TODO: Simplify this once thumbnail extraction is refactored staging_dir = os.path.dirname(first_file) From cba71b9e0d22da265429fe2fcbcba1d77dd63a3e Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Fri, 13 Jan 2023 18:29:59 +0100 Subject: [PATCH 038/187] Fix full path in representation for single images (non-UDIM) --- .../plugins/publish/collect_textureset_images.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/substancepainter/plugins/publish/collect_textureset_images.py b/openpype/hosts/substancepainter/plugins/publish/collect_textureset_images.py index 3832f724d4..851a22c1ee 100644 --- a/openpype/hosts/substancepainter/plugins/publish/collect_textureset_images.py +++ b/openpype/hosts/substancepainter/plugins/publish/collect_textureset_images.py @@ -120,7 +120,7 @@ class CollectTextureSet(pyblish.api.InstancePlugin): map_fnames = [os.path.basename(path) for path in map_output] else: first_file = map_output - map_fnames = map_output + map_fnames = os.path.basename(map_output) ext = os.path.splitext(first_file)[1] assert ext.lstrip('.'), f"No extension: {ext}" From b17ca1efeac834d9038555f522c8602bc4701035 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Sat, 14 Jan 2023 15:38:22 +0100 Subject: [PATCH 039/187] More explicit parsing of extracted textures, prepare for color space data --- openpype/hosts/substancepainter/api/lib.py | 328 +++++++++++++++++- .../publish/collect_textureset_images.py | 177 +++------- .../plugins/publish/extract_textures.py | 3 - 3 files changed, 379 insertions(+), 129 deletions(-) diff --git a/openpype/hosts/substancepainter/api/lib.py b/openpype/hosts/substancepainter/api/lib.py index b929f881a8..2406680a68 100644 --- a/openpype/hosts/substancepainter/api/lib.py +++ b/openpype/hosts/substancepainter/api/lib.py @@ -1,7 +1,9 @@ import os import re import json +from collections import defaultdict +import substance_painter.project import substance_painter.resource import substance_painter.js @@ -115,7 +117,7 @@ def get_channel_format(stack_path, channel): representation, false otherwise "bitDepth" (int): Bit per color channel (could be 8, 16 or 32 bpc) - Args: + Arguments: stack_path (list or str): Path to the stack, could be "Texture set name" or ["Texture set name", "Stack name"] channel (str): Identifier of the channel to export @@ -142,6 +144,330 @@ def get_document_structure(): return substance_painter.js.evaluate("alg.mapexport.documentStructure()") +def get_export_templates(config, format="png", strip_folder=True): + """Return export config outputs. + + This use the Javascript API `alg.mapexport.getPathsExportDocumentMaps` + which returns a different output than using the Python equivalent + `substance_painter.export.list_project_textures(config)`. + + The nice thing about the Javascript API version is that it returns the + output textures grouped by filename template. + + A downside is that it doesn't return all the UDIM tiles but per template + always returns a single file. + + Note: + The file format needs to be explicitly passed to the Javascript API + but upon exporting through the Python API the file format can be based + on the output preset. So it's likely the file extension will mismatch + + Warning: + Even though the function appears to solely get the expected outputs + the Javascript API will actually create the config's texture output + folder if it does not exist yet. As such, a valid path must be set. + + Example output: + { + "DefaultMaterial": { + "$textureSet_BaseColor(_$colorSpace)(.$udim)": "DefaultMaterial_BaseColor_ACES - ACEScg.1002.png", # noqa + "$textureSet_Emissive(_$colorSpace)(.$udim)": "DefaultMaterial_Emissive_ACES - ACEScg.1002.png", # noqa + "$textureSet_Height(_$colorSpace)(.$udim)": "DefaultMaterial_Height_Utility - Raw.1002.png", # noqa + "$textureSet_Metallic(_$colorSpace)(.$udim)": "DefaultMaterial_Metallic_Utility - Raw.1002.png", # noqa + "$textureSet_Normal(_$colorSpace)(.$udim)": "DefaultMaterial_Normal_Utility - Raw.1002.png", # noqa + "$textureSet_Roughness(_$colorSpace)(.$udim)": "DefaultMaterial_Roughness_Utility - Raw.1002.png" # noqa + } + } + + Arguments: + config (dict) Export config + format (str, Optional): Output format to write to, defaults to 'png' + strip_folder (bool, Optional): Whether to strip the output folder + from the output filenames. + + Returns: + dict: The expected output maps. + + """ + folder = config["exportPath"] + preset = config["defaultExportPreset"] + cmd = f'alg.mapexport.getPathsExportDocumentMaps("{preset}", "{folder}", "{format}")' # noqa + result = substance_painter.js.evaluate(cmd) + + if strip_folder: + for stack, maps in result.items(): + for map_template, map_filepath in maps.items(): + map_filename = map_filepath[len(folder):].lstrip("/") + maps[map_template] = map_filename + + return result + + +def _templates_to_regex(templates, + texture_set, + colorspaces, + project, + mesh): + """Return regex based on a Substance Painter expot filename template. + + This converts Substance Painter export filename templates like + `$mesh_$textureSet_BaseColor(_$colorSpace)(.$udim)` into a regex + which can be used to query an output filename to help retrieve: + + - Which template filename the file belongs to. + - Which color space the file is written with. + - Which udim tile it is exactly. + + This is used by `get_parsed_export_maps` which tries to as explicitly + as possible match the filename pattern against the known possible outputs. + That's why Texture Set name, Color spaces, Project path and mesh path must + be provided. By doing so we get the best shot at correctly matching the + right template because otherwise $texture_set could basically be any string + and thus match even that of a color space or mesh. + + Arguments: + templates (list): List of templates to convert to regex. + texture_set (str): The texture set to match against. + colorspaces (list): The colorspaces defined in the current project. + project (str): Filepath of current substance project. + mesh (str): Path to mesh file used in current project. + + Returns: + dict: Template: Template regex pattern + + """ + def _filename_no_ext(path): + return os.path.splitext(os.path.basename(path))[0] + + if colorspaces and any(colorspaces): + colorspace_match = ( + "(" + "|".join(re.escape(c) for c in colorspaces) + ")" + ) + else: + # No colorspace support enabled + colorspace_match = "" + + # Key to regex valid search values + key_matches = { + "$project": re.escape(_filename_no_ext(project)), + "$mesh": re.escape(_filename_no_ext(mesh)), + "$textureSet": re.escape(texture_set), + "$colorSpace": colorspace_match, + "$udim": "([0-9]{4})" + } + + # Turn the templates into regexes + regexes = {} + for template in templates: + + # We need to tweak a temp + search_regex = re.escape(template) + + # Let's assume that any ( and ) character in the file template was + # intended as an optional template key and do a simple `str.replace` + # Note: we are matching against re.escape(template) so will need to + # search for the escaped brackets. + search_regex = search_regex.replace(re.escape("("), "(") + search_regex = search_regex.replace(re.escape(")"), ")?") + + # Substitute each key into a named group + for key, key_expected_regex in key_matches.items(): + + # We want to use the template as a regex basis in the end so will + # escape the whole thing first. Note that thus we'll need to + # search for the escaped versions of the keys too. + escaped_key = re.escape(key) + key_label = key[1:] # key without $ prefix + + key_expected_grp_regex = f"(?P<{key_label}>{key_expected_regex})" + search_regex = search_regex.replace(escaped_key, + key_expected_grp_regex) + + # The filename templates don't include the extension so we add it + # to be able to match the out filename beginning to end + ext_regex = "(?P\.[A-Za-z][A-Za-z0-9-]*)" + search_regex = rf"^{search_regex}{ext_regex}$" + + regexes[template] = search_regex + + return regexes + + +def strip_template(template, strip="._ "): + """Return static characters in a substance painter filename template. + + >>> strip_template("$textureSet_HELLO(.$udim)") + # HELLO + >>> strip_template("$mesh_$textureSet_HELLO_WORLD_$colorSpace(.$udim)") + # HELLO_WORLD + >>> strip_template("$textureSet_HELLO(.$udim)", strip=None) + # _HELLO + >>> strip_template("$mesh_$textureSet_$colorSpace(.$udim)", strip=None) + # _HELLO_ + >>> strip_template("$textureSet_HELLO(.$udim)") + # _HELLO + + Arguments: + template (str): Filename template to strip. + strip (str, optional): Characters to strip from beginning and end + of the static string in template. Defaults to: `._ `. + + Returns: + str: The static string in filename template. + + """ + # Return only characters that were part of the template that were static. + # Remove all keys + keys = ["$project", "$mesh", "$textureSet", "$udim", "$colorSpace"] + stripped_template = template + for key in keys: + stripped_template = stripped_template.replace(key, "") + + # Everything inside an optional bracket space is excluded since it's not + # static. We keep a counter to track whether we are currently iterating + # over parts of the template that are inside an 'optional' group or not. + counter = 0 + result = "" + for char in stripped_template: + if char == "(": + counter += 1 + elif char == ")": + counter -= 1 + if counter < 0: + counter = 0 + else: + if counter == 0: + result += char + + if strip: + # Strip of any trailing start/end characters. Technically these are + # static but usually start and end separators like space or underscore + # aren't wanted. + result = result.strip(strip) + + return result + + +def get_parsed_export_maps(config): + """ + + This tries to parse the texture outputs using a Python API export config. + + Parses template keys: $project, $mesh, $textureSet, $colorSpace, $udim + + Example: + {("DefaultMaterial", ""): { + "$mesh_$textureSet_BaseColor(_$colorSpace)(.$udim)": [ + { + // OUTPUT DATA FOR FILE #1 OF THE TEMPLATE + }, + { + // OUTPUT DATA FOR FILE #2 OF THE TEMPLATE + }, + ] + }, + }} + + File output data (all outputs are `str`). + 1) Parsed tokens: These are parsed tokens from the template, they will + only exist if found in the filename template and output filename. + + project: Workfile filename without extension + mesh: Filename of the loaded mesh without extension + textureSet: The texture set, e.g. "DefaultMaterial", + colorSpace: The color space, e.g. "ACES - ACEScg", + udim: The udim tile, e.g. "1001" + + 2) Template and file outputs + + filepath: Full path to the resulting texture map, e.g. + "/path/to/mesh_DefaultMaterial_BaseColor_ACES - ACEScg.1002.png", + output: "mesh_DefaultMaterial_BaseColor_ACES - ACEScg.1002.png" + Note: if template had slashes (folders) then `output` will too. + So `output` might include a folder. + + channel: The stripped static characters of the filename template which + usually look like an identifier for that map, e.g. "BaseColor". + See `_stripped_template` + + Returns: + dict: [texture_set, stack]: {template: [file1_data, file2_data]} + + """ + import substance_painter.export + from .colorspace import get_project_channel_data + + outputs = substance_painter.export.list_project_textures(config) + templates = get_export_templates(config) + + # Get all color spaces set for the current project + project_colorspaces = set( + data["colorSpace"] for data in get_project_channel_data().values() + ) + + # Get current project mesh path and project path to explicitly match + # the $mesh and $project tokens + project_mesh_path = substance_painter.project.last_imported_mesh_path() + project_path = substance_painter.project.file_path() + + # Get the current export path to strip this of the beginning of filepath + # results, since filename templates don't have these we'll match without + # that part of the filename. + export_path = config["exportPath"] + export_path = export_path.replace("\\", "/") + if not export_path.endswith("/"): + export_path += "/" + + # Parse the outputs + result = {} + for key, filepaths in outputs.items(): + texture_set, stack = key + + if stack: + stack_path = f"{texture_set}/{stack}" + else: + stack_path = texture_set + + stack_templates = list(templates[stack_path].keys()) + + template_regex = _templates_to_regex(stack_templates, + texture_set=texture_set, + colorspaces=project_colorspaces, + mesh=project_mesh_path, + project=project_path) + + # Let's precompile the regexes + for template, regex in template_regex.items(): + template_regex[template] = re.compile(regex) + + stack_results = defaultdict(list) + for filepath in sorted(filepaths): + # We strip explicitly using the full parent export path instead of + # using `os.path.basename` because export template is allowed to + # have subfolders in its template which we want to match against + assert filepath.startswith(export_path) + filename = filepath[len(export_path):] + + for template, regex in template_regex.items(): + match = regex.match(filename) + if match: + parsed = match.groupdict(default={}) + + # Include some special outputs for convenience + parsed["filepath"] = filepath + parsed["output"] = filename + + stack_results[template].append(parsed) + break + else: + raise ValueError(f"Unable to match {filename} against any " + f"template in: {list(template_regex.keys())}") + + result[key] = dict(stack_results) + + return result + + def load_shelf(path, name=None): """Add shelf to substance painter (for current application session) diff --git a/openpype/hosts/substancepainter/plugins/publish/collect_textureset_images.py b/openpype/hosts/substancepainter/plugins/publish/collect_textureset_images.py index 851a22c1ee..6928bdb36c 100644 --- a/openpype/hosts/substancepainter/plugins/publish/collect_textureset_images.py +++ b/openpype/hosts/substancepainter/plugins/publish/collect_textureset_images.py @@ -1,63 +1,19 @@ import os import copy -import clique import pyblish.api from openpype.pipeline import publish -import substance_painter.export -from openpype.hosts.substancepainter.api.colorspace import ( - get_project_channel_data, +import substance_painter.textureset +from openpype.hosts.substancepainter.api.lib import ( + get_parsed_export_maps, + strip_template ) -def get_project_color_spaces(): - """Return unique color space names used for exports. - - This is based on the Color Management preferences of the project. - - See also: - func:`get_project_channel_data` - - """ - return set( - data["colorSpace"] for data in get_project_channel_data().values() - ) - - -def _get_channel_name(path, - texture_set_name, - project_colorspaces): - """Return expected 'name' for the output image. - - This will be used as a suffix to the separate image publish subsets. - - """ - # TODO: This will require improvement before being production ready. - # TODO(Question): Should we preserve the texture set name in the suffix - # TODO so that exports with multiple texture sets can work within a single - # TODO parent textureSet, like `texture{Variant}.{TextureSet}{Channel}` - name = os.path.basename(path) # filename - name = os.path.splitext(name)[0] # no extension - # Usually the channel identifier comes after $textureSet in - # the export preset. Unfortunately getting the export maps - # and channels explicitly is not trivial so for now we just - # assume this will generate a nice identifier for the end user - name = name.split(f"{texture_set_name}_", 1)[-1] - - # TODO: We need more explicit ways to detect the color space part - for colorspace in project_colorspaces: - if name.endswith(f"_{colorspace}"): - name = name[:-len(f"_{colorspace}")] - break - - return name - - class CollectTextureSet(pyblish.api.InstancePlugin): """Extract Textures using an output template config""" - # TODO: More explicitly detect UDIM tiles - # TODO: Get color spaces + # TODO: Production-test usage of color spaces # TODO: Detect what source data channels end up in each file label = "Collect Texture Set images" @@ -68,96 +24,67 @@ class CollectTextureSet(pyblish.api.InstancePlugin): def process(self, instance): config = self.get_export_config(instance) - textures = substance_painter.export.list_project_textures(config) instance.data["exportConfig"] = config - - colorspaces = get_project_color_spaces() - - outputs = {} - for (texture_set_name, stack_name), maps in textures.items(): - - # Log our texture outputs - self.log.debug(f"Processing stack: {stack_name}") - for texture_map in maps: - self.log.debug(f"Expecting texture: {texture_map}") - - # For now assume the UDIM textures end with .. and - # when no trailing number is present before the extension then it's - # considered to *not* be a UDIM export. - collections, remainder = clique.assemble( - maps, - patterns=[clique.PATTERNS["frames"]], - minimum_items=True - ) - - outputs = {} - if collections: - # UDIM tile sequence - for collection in collections: - name = _get_channel_name(collection.head, - texture_set_name=texture_set_name, - project_colorspaces=colorspaces) - outputs[name] = collection - self.log.info(f"UDIM Collection: {collection}") - else: - # Single file per channel without UDIM number - for path in remainder: - name = _get_channel_name(path, - texture_set_name=texture_set_name, - project_colorspaces=colorspaces) - outputs[name] = path - self.log.info(f"Single file: {path}") + maps = get_parsed_export_maps(config) # Let's break the instance into multiple instances to integrate # a subset per generated texture or texture UDIM sequence + for (texture_set_name, stack_name), template_maps in maps.items(): + self.log.info(f"Processing {texture_set_name}/{stack_name}") + for template, outputs in template_maps.items(): + self.log.info(f"Processing {template}") + self.create_image_instance(instance, template, outputs) + + def create_image_instance(self, instance, template, outputs): + context = instance.context - for map_name, map_output in outputs.items(): + first_filepath = outputs[0]["filepath"] + fnames = [os.path.basename(output["filepath"]) for output in outputs] + ext = os.path.splitext(first_filepath)[1] + assert ext.lstrip('.'), f"No extension: {ext}" - is_udim = isinstance(map_output, clique.Collection) - if is_udim: - first_file = list(map_output)[0] - map_fnames = [os.path.basename(path) for path in map_output] - else: - first_file = map_output - map_fnames = os.path.basename(map_output) + map_identifier = strip_template(template) - ext = os.path.splitext(first_file)[1] - assert ext.lstrip('.'), f"No extension: {ext}" + # Define the suffix we want to give this particular texture + # set and set up a remapped subset naming for it. + suffix = f".{map_identifier}" + image_subset = instance.data["subset"][len("textureSet"):] + image_subset = "texture" + image_subset + suffix + # Prepare representation + representation = { + 'name': ext.lstrip("."), + 'ext': ext.lstrip("."), + 'files': fnames, + } - # Define the suffix we want to give this particular texture - # set and set up a remapped subset naming for it. - suffix = f".{map_name}" - image_subset = instance.data["subset"][len("textureSet"):] - image_subset = "texture" + image_subset + suffix + # Mark as UDIM explicitly if it has UDIM tiles. + if bool(outputs[0].get("udim")): + representation["udim"] = True - # TODO: Retrieve and store color space with the representation + # TODO: Store color space with the representation - # Clone the instance - image_instance = context.create_instance(instance.name) - image_instance[:] = instance[:] - image_instance.data.update(copy.deepcopy(instance.data)) - image_instance.data["name"] = image_subset - image_instance.data["label"] = image_subset - image_instance.data["subset"] = image_subset - image_instance.data["family"] = "image" - image_instance.data["families"] = ["image", "textures"] - image_instance.data['representations'] = [{ - 'name': ext.lstrip("."), - 'ext': ext.lstrip("."), - 'files': map_fnames, - }] + # Clone the instance + image_instance = context.create_instance(instance.name) + image_instance[:] = instance[:] + image_instance.data.update(copy.deepcopy(instance.data)) + image_instance.data["name"] = image_subset + image_instance.data["label"] = image_subset + image_instance.data["subset"] = image_subset + image_instance.data["family"] = "image" + image_instance.data["families"] = ["image", "textures"] + image_instance.data['representations'] = [representation] - # Group the textures together in the loader - image_instance.data["subsetGroup"] = instance.data["subset"] + # Group the textures together in the loader + image_instance.data["subsetGroup"] = instance.data["subset"] - # Set up the representation for thumbnail generation - # TODO: Simplify this once thumbnail extraction is refactored - staging_dir = os.path.dirname(first_file) - image_instance.data["representations"][0]["tags"] = ["review"] - image_instance.data["representations"][0]["stagingDir"] = staging_dir # noqa + # Set up the representation for thumbnail generation + # TODO: Simplify this once thumbnail extraction is refactored + staging_dir = os.path.dirname(first_filepath) + image_instance.data["representations"][0]["tags"] = ["review"] + image_instance.data["representations"][0]["stagingDir"] = staging_dir - instance.append(image_instance) + instance.append(image_instance) def get_export_config(self, instance): """Return an export configuration dict for texture exports. diff --git a/openpype/hosts/substancepainter/plugins/publish/extract_textures.py b/openpype/hosts/substancepainter/plugins/publish/extract_textures.py index 22acf07284..a5bb274b78 100644 --- a/openpype/hosts/substancepainter/plugins/publish/extract_textures.py +++ b/openpype/hosts/substancepainter/plugins/publish/extract_textures.py @@ -12,9 +12,6 @@ class ExtractTextures(publish.Extractor): particular Extractor doesn't specify representations to integrate. """ - # TODO: More explicitly detect UDIM tiles - # TODO: Get color spaces - # TODO: Detect what source data channels end up in each file label = "Extract Texture Set" hosts = ['substancepainter'] From 04b32350202e17877ddce8832767668e34e95715 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Sat, 14 Jan 2023 20:32:05 +0100 Subject: [PATCH 040/187] Cosmetics --- .../plugins/publish/collect_textureset_images.py | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/openpype/hosts/substancepainter/plugins/publish/collect_textureset_images.py b/openpype/hosts/substancepainter/plugins/publish/collect_textureset_images.py index 6928bdb36c..f85861d0eb 100644 --- a/openpype/hosts/substancepainter/plugins/publish/collect_textureset_images.py +++ b/openpype/hosts/substancepainter/plugins/publish/collect_textureset_images.py @@ -37,13 +37,17 @@ class CollectTextureSet(pyblish.api.InstancePlugin): self.create_image_instance(instance, template, outputs) def create_image_instance(self, instance, template, outputs): + f"""Create a new instance per image or UDIM sequence. + + The new instances will be of family `image`. + + """ context = instance.context first_filepath = outputs[0]["filepath"] fnames = [os.path.basename(output["filepath"]) for output in outputs] ext = os.path.splitext(first_filepath)[1] assert ext.lstrip('.'), f"No extension: {ext}" - map_identifier = strip_template(template) # Define the suffix we want to give this particular texture @@ -51,6 +55,7 @@ class CollectTextureSet(pyblish.api.InstancePlugin): suffix = f".{map_identifier}" image_subset = instance.data["subset"][len("textureSet"):] image_subset = "texture" + image_subset + suffix + # Prepare representation representation = { 'name': ext.lstrip("."), @@ -84,6 +89,7 @@ class CollectTextureSet(pyblish.api.InstancePlugin): image_instance.data["representations"][0]["tags"] = ["review"] image_instance.data["representations"][0]["stagingDir"] = staging_dir + # Store the instance in the original instance as a member instance.append(image_instance) def get_export_config(self, instance): From d80e20482b96b388ab91edece375f067f2b9e6b4 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Sat, 14 Jan 2023 20:33:19 +0100 Subject: [PATCH 041/187] Cosmetics + add assertion --- openpype/hosts/substancepainter/api/lib.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/openpype/hosts/substancepainter/api/lib.py b/openpype/hosts/substancepainter/api/lib.py index 2406680a68..bf4415af8a 100644 --- a/openpype/hosts/substancepainter/api/lib.py +++ b/openpype/hosts/substancepainter/api/lib.py @@ -195,8 +195,9 @@ def get_export_templates(config, format="png", strip_folder=True): result = substance_painter.js.evaluate(cmd) if strip_folder: - for stack, maps in result.items(): + for _stack, maps in result.items(): for map_template, map_filepath in maps.items(): + assert map_filepath.startswith(folder) map_filename = map_filepath[len(folder):].lstrip("/") maps[map_template] = map_filename From 196b91896bf9f55414ef766eb2e72631ef066e51 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Sat, 14 Jan 2023 20:35:43 +0100 Subject: [PATCH 042/187] Shush hound --- openpype/hosts/substancepainter/api/lib.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/openpype/hosts/substancepainter/api/lib.py b/openpype/hosts/substancepainter/api/lib.py index bf4415af8a..5b32e3a9aa 100644 --- a/openpype/hosts/substancepainter/api/lib.py +++ b/openpype/hosts/substancepainter/api/lib.py @@ -241,9 +241,8 @@ def _templates_to_regex(templates, return os.path.splitext(os.path.basename(path))[0] if colorspaces and any(colorspaces): - colorspace_match = ( - "(" + "|".join(re.escape(c) for c in colorspaces) + ")" - ) + colorspace_match = "|".join(re.escape(c) for c in set(colorspaces)) + colorspace_match = f"({colorspace_match})" else: # No colorspace support enabled colorspace_match = "" From 5bfb010fbfc0211c7266993fb1b9ddbc2d21162d Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Sat, 14 Jan 2023 20:36:23 +0100 Subject: [PATCH 043/187] Shush hound - fix invalid escape sequence --- openpype/hosts/substancepainter/api/lib.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/substancepainter/api/lib.py b/openpype/hosts/substancepainter/api/lib.py index 5b32e3a9aa..278a23ce01 100644 --- a/openpype/hosts/substancepainter/api/lib.py +++ b/openpype/hosts/substancepainter/api/lib.py @@ -285,7 +285,7 @@ def _templates_to_regex(templates, # The filename templates don't include the extension so we add it # to be able to match the out filename beginning to end - ext_regex = "(?P\.[A-Za-z][A-Za-z0-9-]*)" + ext_regex = r"(?P\.[A-Za-z][A-Za-z0-9-]*)" search_regex = rf"^{search_regex}{ext_regex}$" regexes[template] = search_regex From 2335facfff9d800b32bd3b09f71cbb4daf57035e Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Sat, 14 Jan 2023 20:37:35 +0100 Subject: [PATCH 044/187] Fix docstring --- openpype/hosts/substancepainter/api/lib.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/substancepainter/api/lib.py b/openpype/hosts/substancepainter/api/lib.py index 278a23ce01..7a10ae1eb6 100644 --- a/openpype/hosts/substancepainter/api/lib.py +++ b/openpype/hosts/substancepainter/api/lib.py @@ -349,7 +349,7 @@ def strip_template(template, strip="._ "): def get_parsed_export_maps(config): - """ + """Return Export Config's expected output textures with parsed data. This tries to parse the texture outputs using a Python API export config. From aa0c62b4d7e73d10e63f7384a9d534a12c8fd16e Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Sat, 14 Jan 2023 20:38:56 +0100 Subject: [PATCH 045/187] Cleanup --- .../plugins/publish/collect_textureset_images.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/openpype/hosts/substancepainter/plugins/publish/collect_textureset_images.py b/openpype/hosts/substancepainter/plugins/publish/collect_textureset_images.py index f85861d0eb..53319ba96d 100644 --- a/openpype/hosts/substancepainter/plugins/publish/collect_textureset_images.py +++ b/openpype/hosts/substancepainter/plugins/publish/collect_textureset_images.py @@ -37,10 +37,10 @@ class CollectTextureSet(pyblish.api.InstancePlugin): self.create_image_instance(instance, template, outputs) def create_image_instance(self, instance, template, outputs): - f"""Create a new instance per image or UDIM sequence. - + """Create a new instance per image or UDIM sequence. + The new instances will be of family `image`. - + """ context = instance.context From cb04f6bb8b07b776544ed0666fe8440ff52a2ce1 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Sat, 14 Jan 2023 20:56:29 +0100 Subject: [PATCH 046/187] Fix/Cleanup docstring --- openpype/hosts/substancepainter/api/lib.py | 6 +----- 1 file changed, 1 insertion(+), 5 deletions(-) diff --git a/openpype/hosts/substancepainter/api/lib.py b/openpype/hosts/substancepainter/api/lib.py index 7a10ae1eb6..22dc3059fc 100644 --- a/openpype/hosts/substancepainter/api/lib.py +++ b/openpype/hosts/substancepainter/api/lib.py @@ -378,7 +378,7 @@ def get_parsed_export_maps(config): colorSpace: The color space, e.g. "ACES - ACEScg", udim: The udim tile, e.g. "1001" - 2) Template and file outputs + 2) Template output and filepath filepath: Full path to the resulting texture map, e.g. "/path/to/mesh_DefaultMaterial_BaseColor_ACES - ACEScg.1002.png", @@ -386,10 +386,6 @@ def get_parsed_export_maps(config): Note: if template had slashes (folders) then `output` will too. So `output` might include a folder. - channel: The stripped static characters of the filename template which - usually look like an identifier for that map, e.g. "BaseColor". - See `_stripped_template` - Returns: dict: [texture_set, stack]: {template: [file1_data, file2_data]} From 33aafc3ff6f7e1b4f213345e7baa80f50d4e1f51 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Sun, 15 Jan 2023 01:30:43 +0100 Subject: [PATCH 047/187] Implement OCIO support for Substance Painter + publish color space with textures --- openpype/hooks/pre_host_set_ocio.py | 37 +++++++++++++++++++ .../publish/collect_textureset_images.py | 9 ++++- .../plugins/publish/extract_textures.py | 19 +++++++++- .../project_settings/substancepainter.json | 10 +++++ .../schema_project_substancepainter.json | 17 +++++++++ 5 files changed, 89 insertions(+), 3 deletions(-) create mode 100644 openpype/hooks/pre_host_set_ocio.py diff --git a/openpype/hooks/pre_host_set_ocio.py b/openpype/hooks/pre_host_set_ocio.py new file mode 100644 index 0000000000..b9e2b79bf4 --- /dev/null +++ b/openpype/hooks/pre_host_set_ocio.py @@ -0,0 +1,37 @@ +from openpype.lib import PreLaunchHook + +from openpype.pipeline.colorspace import get_imageio_config +from openpype.pipeline.template_data import get_template_data_with_names + + +class PreLaunchHostSetOCIO(PreLaunchHook): + """Set OCIO environment for the host""" + + order = 0 + app_groups = ["substancepainter"] + + def execute(self): + """Hook entry method.""" + + anatomy_data = get_template_data_with_names( + project_name=self.data["project_doc"]["name"], + asset_name=self.data["asset_doc"]["name"], + task_name=self.data["task_name"], + host_name=self.host_name, + system_settings=self.data["system_settings"] + ) + + ocio_config = get_imageio_config( + project_name=self.data["project_doc"]["name"], + host_name=self.host_name, + project_settings=self.data["project_settings"], + anatomy_data=anatomy_data, + anatomy=self.data["anatomy"] + ) + + if ocio_config: + ocio_path = ocio_config["path"] + self.log.info(f"Setting OCIO config path: {ocio_path}") + self.launch_context.env["OCIO"] = ocio_path + else: + self.log.debug("OCIO not set or enabled") diff --git a/openpype/hosts/substancepainter/plugins/publish/collect_textureset_images.py b/openpype/hosts/substancepainter/plugins/publish/collect_textureset_images.py index 53319ba96d..0e445c9c1c 100644 --- a/openpype/hosts/substancepainter/plugins/publish/collect_textureset_images.py +++ b/openpype/hosts/substancepainter/plugins/publish/collect_textureset_images.py @@ -67,8 +67,6 @@ class CollectTextureSet(pyblish.api.InstancePlugin): if bool(outputs[0].get("udim")): representation["udim"] = True - # TODO: Store color space with the representation - # Clone the instance image_instance = context.create_instance(instance.name) image_instance[:] = instance[:] @@ -83,6 +81,13 @@ class CollectTextureSet(pyblish.api.InstancePlugin): # Group the textures together in the loader image_instance.data["subsetGroup"] = instance.data["subset"] + # Store color space with the instance + # Note: The extractor will assign it to the representation + colorspace = outputs[0].get("colorSpace") + if colorspace: + self.log.debug(f"{image_subset} colorspace: {colorspace}") + image_instance.data["colorspace"] = colorspace + # Set up the representation for thumbnail generation # TODO: Simplify this once thumbnail extraction is refactored staging_dir = os.path.dirname(first_filepath) diff --git a/openpype/hosts/substancepainter/plugins/publish/extract_textures.py b/openpype/hosts/substancepainter/plugins/publish/extract_textures.py index a5bb274b78..e66ce6dbf6 100644 --- a/openpype/hosts/substancepainter/plugins/publish/extract_textures.py +++ b/openpype/hosts/substancepainter/plugins/publish/extract_textures.py @@ -2,7 +2,7 @@ from openpype.pipeline import KnownPublishError, publish import substance_painter.export -class ExtractTextures(publish.Extractor): +class ExtractTextures(publish.ExtractorColormanaged): """Extract Textures using an output template config. Note: @@ -40,6 +40,23 @@ class ExtractTextures(publish.Extractor): # TODO: Confirm the files indeed exist # TODO: make sure representations are registered + # We'll insert the color space data for each image instance that we + # added into this texture set. The collector couldn't do so because + # some anatomy and other instance data needs to be collected prior + context = instance.context + for image_instance in instance: + + colorspace = image_instance.data.get("colorspace") + if not colorspace: + self.log.debug("No color space data present for instance: " + f"{image_instance}") + continue + + for representation in image_instance.data["representations"]: + self.set_representation_colorspace(representation, + context=context, + colorspace=colorspace) + # Add a fake representation which won't be integrated so the # Integrator leaves us alone - otherwise it would error # TODO: Add `instance.data["integrate"] = False` support in Integrator? diff --git a/openpype/settings/defaults/project_settings/substancepainter.json b/openpype/settings/defaults/project_settings/substancepainter.json index a424a923da..0f9f1af71e 100644 --- a/openpype/settings/defaults/project_settings/substancepainter.json +++ b/openpype/settings/defaults/project_settings/substancepainter.json @@ -1,3 +1,13 @@ { + "imageio": { + "ocio_config": { + "enabled": true, + "filepath": [] + }, + "file_rules": { + "enabled": true, + "rules": {} + } + }, "shelves": {} } \ No newline at end of file diff --git a/openpype/settings/entities/schemas/projects_schema/schema_project_substancepainter.json b/openpype/settings/entities/schemas/projects_schema/schema_project_substancepainter.json index 4a02a9d8ca..79a39b8e6e 100644 --- a/openpype/settings/entities/schemas/projects_schema/schema_project_substancepainter.json +++ b/openpype/settings/entities/schemas/projects_schema/schema_project_substancepainter.json @@ -5,6 +5,23 @@ "label": "Substance Painter", "is_file": true, "children": [ + { + "key": "imageio", + "type": "dict", + "label": "Color Management (ImageIO)", + "is_group": true, + "children": [ + { + "type": "schema", + "name": "schema_imageio_config" + }, + { + "type": "schema", + "name": "schema_imageio_file_rules" + } + + ] + }, { "type": "dict-modifiable", "key": "shelves", From eecf109cab26ab34940ece267e7b26ecd6dc6177 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Sun, 15 Jan 2023 01:32:42 +0100 Subject: [PATCH 048/187] Support single image (otherwise integrator will fail) --- .../plugins/publish/collect_textureset_images.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/substancepainter/plugins/publish/collect_textureset_images.py b/openpype/hosts/substancepainter/plugins/publish/collect_textureset_images.py index 53319ba96d..18d1e59c4c 100644 --- a/openpype/hosts/substancepainter/plugins/publish/collect_textureset_images.py +++ b/openpype/hosts/substancepainter/plugins/publish/collect_textureset_images.py @@ -60,7 +60,7 @@ class CollectTextureSet(pyblish.api.InstancePlugin): representation = { 'name': ext.lstrip("."), 'ext': ext.lstrip("."), - 'files': fnames, + 'files': fnames if len(fnames) > 1 else fnames[0], } # Mark as UDIM explicitly if it has UDIM tiles. From 30ae52770d551bca7d35c0b1cdd9893140cf6db7 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Sun, 15 Jan 2023 01:33:21 +0100 Subject: [PATCH 049/187] Rename application group to substancepainter for consistency and clarity --- openpype/hooks/pre_add_last_workfile_arg.py | 2 +- openpype/settings/defaults/system_settings/applications.json | 2 +- .../system_schema/host_settings/schema_substancepainter.json | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/openpype/hooks/pre_add_last_workfile_arg.py b/openpype/hooks/pre_add_last_workfile_arg.py index d5a9a41e5a..49fb54d263 100644 --- a/openpype/hooks/pre_add_last_workfile_arg.py +++ b/openpype/hooks/pre_add_last_workfile_arg.py @@ -23,7 +23,7 @@ class AddLastWorkfileToLaunchArgs(PreLaunchHook): "blender", "photoshop", "tvpaint", - "substance", + "substancepainter", "aftereffects" ] diff --git a/openpype/settings/defaults/system_settings/applications.json b/openpype/settings/defaults/system_settings/applications.json index 30c692d0e6..d78b54fa05 100644 --- a/openpype/settings/defaults/system_settings/applications.json +++ b/openpype/settings/defaults/system_settings/applications.json @@ -1315,7 +1315,7 @@ } } }, - "substance": { + "substancepainter": { "enabled": true, "label": "Substance Painter", "icon": "app_icons/substancepainter.png", diff --git a/openpype/settings/entities/schemas/system_schema/host_settings/schema_substancepainter.json b/openpype/settings/entities/schemas/system_schema/host_settings/schema_substancepainter.json index 513f98c610..fb3b21e63f 100644 --- a/openpype/settings/entities/schemas/system_schema/host_settings/schema_substancepainter.json +++ b/openpype/settings/entities/schemas/system_schema/host_settings/schema_substancepainter.json @@ -1,6 +1,6 @@ { "type": "dict", - "key": "substance", + "key": "substancepainter", "label": "Substance Painter", "collapsible": true, "checkbox_key": "enabled", From 313cb0d550174bacb0a9377829a62283f3520523 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Sun, 15 Jan 2023 01:34:00 +0100 Subject: [PATCH 050/187] Ensure safeguarding against forward/backslashes differences --- openpype/hosts/substancepainter/api/lib.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/openpype/hosts/substancepainter/api/lib.py b/openpype/hosts/substancepainter/api/lib.py index 22dc3059fc..9bd408f0f2 100644 --- a/openpype/hosts/substancepainter/api/lib.py +++ b/openpype/hosts/substancepainter/api/lib.py @@ -189,7 +189,7 @@ def get_export_templates(config, format="png", strip_folder=True): dict: The expected output maps. """ - folder = config["exportPath"] + folder = config["exportPath"].replace("\\", "/") preset = config["defaultExportPreset"] cmd = f'alg.mapexport.getPathsExportDocumentMaps("{preset}", "{folder}", "{format}")' # noqa result = substance_painter.js.evaluate(cmd) @@ -197,6 +197,7 @@ def get_export_templates(config, format="png", strip_folder=True): if strip_folder: for _stack, maps in result.items(): for map_template, map_filepath in maps.items(): + map_filepath = map_filepath.replace("\\", "/") assert map_filepath.startswith(folder) map_filename = map_filepath[len(folder):].lstrip("/") maps[map_template] = map_filename @@ -441,7 +442,10 @@ def get_parsed_export_maps(config): # We strip explicitly using the full parent export path instead of # using `os.path.basename` because export template is allowed to # have subfolders in its template which we want to match against - assert filepath.startswith(export_path) + filepath = filepath.replace("\\", "/") + assert filepath.startswith(export_path), ( + f"Filepath {filepath} must start with folder {export_path}" + ) filename = filepath[len(export_path):] for template, regex in template_regex.items(): From ece0e7ded2d721dfe92849a8d246bfb4ef0464cd Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Sun, 15 Jan 2023 01:36:04 +0100 Subject: [PATCH 051/187] No need to strip folder for the templates, we're not using the filename values of the result. --- openpype/hosts/substancepainter/api/lib.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/substancepainter/api/lib.py b/openpype/hosts/substancepainter/api/lib.py index 9bd408f0f2..754f8a2bd6 100644 --- a/openpype/hosts/substancepainter/api/lib.py +++ b/openpype/hosts/substancepainter/api/lib.py @@ -395,7 +395,7 @@ def get_parsed_export_maps(config): from .colorspace import get_project_channel_data outputs = substance_painter.export.list_project_textures(config) - templates = get_export_templates(config) + templates = get_export_templates(config, strip_folder=False) # Get all color spaces set for the current project project_colorspaces = set( From 31e37e5a33298718c541bb1969e464ff7ae930e9 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Sun, 15 Jan 2023 02:07:00 +0100 Subject: [PATCH 052/187] Use project doc and asset doc directly for `get_template_data` --- openpype/hooks/pre_host_set_ocio.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/openpype/hooks/pre_host_set_ocio.py b/openpype/hooks/pre_host_set_ocio.py index b9e2b79bf4..3620d88db6 100644 --- a/openpype/hooks/pre_host_set_ocio.py +++ b/openpype/hooks/pre_host_set_ocio.py @@ -1,7 +1,7 @@ from openpype.lib import PreLaunchHook from openpype.pipeline.colorspace import get_imageio_config -from openpype.pipeline.template_data import get_template_data_with_names +from openpype.pipeline.template_data import get_template_data class PreLaunchHostSetOCIO(PreLaunchHook): @@ -13,9 +13,9 @@ class PreLaunchHostSetOCIO(PreLaunchHook): def execute(self): """Hook entry method.""" - anatomy_data = get_template_data_with_names( - project_name=self.data["project_doc"]["name"], - asset_name=self.data["asset_doc"]["name"], + anatomy_data = get_template_data( + project_doc=self.data["project_doc"], + asset_doc=self.data["asset_doc"], task_name=self.data["task_name"], host_name=self.host_name, system_settings=self.data["system_settings"] From 9329ff28d57f75d54dec1ba5aa25f390e02f7f3d Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Tue, 17 Jan 2023 15:39:59 +0100 Subject: [PATCH 053/187] Show new project prompt with mesh preloaded --- openpype/hosts/substancepainter/api/lib.py | 126 ++++++++++++++++++ .../plugins/load/load_mesh.py | 17 +-- 2 files changed, 131 insertions(+), 12 deletions(-) diff --git a/openpype/hosts/substancepainter/api/lib.py b/openpype/hosts/substancepainter/api/lib.py index 754f8a2bd6..e552caee6d 100644 --- a/openpype/hosts/substancepainter/api/lib.py +++ b/openpype/hosts/substancepainter/api/lib.py @@ -520,3 +520,129 @@ def load_shelf(path, name=None): substance_painter.resource.Shelves.add(name, path) return name + + +def _get_new_project_action(): + """Return QAction which triggers Substance Painter's new project dialog""" + from PySide2 import QtGui + + main_window = substance_painter.ui.get_main_window() + + # Find the file menu's New file action + menubar = main_window.menuBar() + new_action = None + for action in menubar.actions(): + menu = action.menu() + if not menu: + continue + + if menu.objectName() != "file": + continue + + # Find the action with the CTRL+N key sequence + new_action = next(action for action in menu.actions() + if action.shortcut() == QtGui.QKeySequence.New) + break + + return new_action + + +def prompt_new_file_with_mesh(mesh_filepath): + """Prompts the user for a new file using Substance Painter's own dialog. + + This will set the mesh path to load to the given mesh and disables the + dialog box to disallow the user to change the path. This way we can allow + user configuration of a project but set the mesh path ourselves. + + Warning: + This is very hacky and experimental. + + Note: + If a project is currently open using the same mesh filepath it can't + accurately detect whether the user had actually accepted the new project + dialog or whether the project afterwards is still the original project, + for example when the user might have cancelled the operation. + + """ + from PySide2 import QtWidgets, QtCore + + app = QtWidgets.QApplication.instance() + assert os.path.isfile(mesh_filepath), \ + f"Mesh filepath does not exist: {mesh_filepath}" + + def _setup_file_dialog(): + """Set filepath in QFileDialog and trigger accept result""" + file_dialog = app.activeModalWidget() + assert isinstance(file_dialog, QtWidgets.QFileDialog) + + # Quickly hide the dialog + file_dialog.hide() + app.processEvents(QtCore.QEventLoop.ExcludeUserInputEvents, 1000) + + file_dialog.setDirectory(os.path.dirname(mesh_filepath)) + url = QtCore.QUrl.fromLocalFile(os.path.basename(mesh_filepath)) + file_dialog.selectUrl(url) + + # Give the explorer window time to refresh to the folder and select + # the file + while not file_dialog.selectedFiles(): + app.processEvents(QtCore.QEventLoop.ExcludeUserInputEvents, 1000) + print(f"Selected: {file_dialog.selectedFiles()}") + + # Set it again now we know the path is refreshed - without this + # accepting the dialog will often not trigger the correct filepath + file_dialog.setDirectory(os.path.dirname(mesh_filepath)) + url = QtCore.QUrl.fromLocalFile(os.path.basename(mesh_filepath)) + file_dialog.selectUrl(url) + + file_dialog.done(file_dialog.Accepted) + app.processEvents(QtCore.QEventLoop.AllEvents) + + def _setup_prompt(): + app.processEvents(QtCore.QEventLoop.ExcludeUserInputEvents) + dialog = app.activeModalWidget() + assert dialog.objectName() == "NewProjectDialog" + + # Set the window title + mesh = os.path.basename(mesh_filepath) + dialog.setWindowTitle(f"New Project with mesh: {mesh}") + + # Get the select mesh file button + mesh_select = dialog.findChild(QtWidgets.QPushButton, "meshSelect") + + # Hide the select mesh button to the user to block changing of mesh + mesh_select.setVisible(False) + + # Ensure UI is visually up-to-date + app.processEvents(QtCore.QEventLoop.ExcludeUserInputEvents) + + # Trigger the 'select file' dialog to set the path and have the + # new file dialog to use the path. + QtCore.QTimer.singleShot(10, _setup_file_dialog) + mesh_select.click() + + app.processEvents(QtCore.QEventLoop.AllEvents, 5000) + + mesh_filename = dialog.findChild(QtWidgets.QFrame, "meshFileName") + mesh_filename_label = mesh_filename.findChild(QtWidgets.QLabel) + if not mesh_filename_label.text(): + dialog.close() + raise RuntimeError(f"Failed to set mesh path: {mesh_filepath}") + + new_action = _get_new_project_action() + if not new_action: + raise RuntimeError("Unable to detect new file action..") + + QtCore.QTimer.singleShot(0, _setup_prompt) + new_action.trigger() + app.processEvents(QtCore.QEventLoop.AllEvents, 5000) + + if not substance_painter.project.is_open(): + return + + # Confirm mesh was set as expected + project_mesh = substance_painter.project.last_imported_mesh_path() + if os.path.normpath(project_mesh) != os.path.normpath(mesh_filepath): + return + + return project_mesh diff --git a/openpype/hosts/substancepainter/plugins/load/load_mesh.py b/openpype/hosts/substancepainter/plugins/load/load_mesh.py index 00f808199f..4e800bd623 100644 --- a/openpype/hosts/substancepainter/plugins/load/load_mesh.py +++ b/openpype/hosts/substancepainter/plugins/load/load_mesh.py @@ -7,6 +7,7 @@ from openpype.hosts.substancepainter.api.pipeline import ( set_container_metadata, remove_container_metadata ) +from openpype.hosts.substancepainter.api.lib import prompt_new_file_with_mesh import substance_painter.project import qargparse @@ -45,18 +46,10 @@ class SubstanceLoadProjectMesh(load.LoaderPlugin): if not substance_painter.project.is_open(): # Allow to 'initialize' a new project - # TODO: preferably these settings would come from the actual - # new project prompt of Substance (or something that is - # visually similar to still allow artist decisions) - settings = substance_painter.project.Settings( - default_texture_resolution=4096, - import_cameras=import_cameras, - ) - - substance_painter.project.create( - mesh_file_path=self.fname, - settings=settings - ) + result = prompt_new_file_with_mesh(mesh_filepath=self.fname) + if not result: + self.log.info("User cancelled new project prompt.") + return else: # Reload the mesh From 033d37ca283e6fed6d9a9337e4001e5978b12271 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Tue, 17 Jan 2023 17:01:59 +0100 Subject: [PATCH 054/187] Early draft for Substance Painter documentation --- website/docs/artist_hosts_substancepainter.md | 80 +++++++++++++++++++ 1 file changed, 80 insertions(+) create mode 100644 website/docs/artist_hosts_substancepainter.md diff --git a/website/docs/artist_hosts_substancepainter.md b/website/docs/artist_hosts_substancepainter.md new file mode 100644 index 0000000000..9ed83421af --- /dev/null +++ b/website/docs/artist_hosts_substancepainter.md @@ -0,0 +1,80 @@ +--- +id: artist_hosts_substancepainter +title: Substance Painter +sidebar_label: Substance Painter +--- + +## OpenPype global tools + +- [Work Files](artist_tools.md#workfiles) +- [Load](artist_tools.md#loader) +- [Manage (Inventory)](artist_tools.md#inventory) +- [Publish](artist_tools.md#publisher) +- [Library Loader](artist_tools.md#library-loader) + +## Working with OpenPype in Substance Painter + +The Substance Painter OpenPype integration allows you to: + +- Set the project mesh and easily keep it in sync with updates of the model +- Easily export your textures as versioned publishes for others to load and update. + +## Setting the project mesh + +Substance Painter requires a project file to have a mesh path configured. +As such, you can't start a workfile without choosing a mesh path. + +To start a new project using a published model you can _without an open project_ +use OpenPype > Load.. > Load Mesh on a supported publish. This will prompt you +with a New Project prompt preset to that particular mesh file. + +If you already have a project open, you can also replace (reload) your mesh +using the same Load Mesh functionality. + +After having the project mesh loaded or reloaded through the loader +tool the mesh will be _managed_ by OpenPype. For example, you'll be notified +on workfile open whether the mesh in your workfile is outdated. You can also +set it to specific version using OpenPype > Manage.. where you can right click +on the project mesh to perform _Set Version_ + +:::info +A Substance Painter project will always have only one mesh set. Whenever you +trigger _Load Mesh_ from the loader this will **replace** your currently loaded +mesh for your open project. +::: + +## Publishing textures + +To publish your textures we must first create a `textureSet` +publish instance. + +To create a **TextureSet instance** we will use OpenPype's publisher tool. Go +to **OpenPype → Publish... → TextureSet** + +The texture set instance will define what Substance Painter export template `.spexp` to +use and thus defines what texture maps will be exported from your workfile. + +:::info +The TextureSet instance gets saved with your Substance Painter project. As such, +you will only need to configure this once for your workfile. Next time you can +just click **OpenPype → Publish...** and start publishing directly with the +same settings. +::: + + +### Known issues + +#### Can't see the OpenPype menu? + +If you're unable to see the OpenPype top level menu in Substance Painter make +sure you have launched Substance Painter through OpenPype and that the OpenPype +Integration plug-in is loaded inside Substance Painter: **Python > openpype_plugin** + +#### Substance Painter + Steam + +Running the steam version of Substance Painter within OpenPype will require you +to close the Steam executable before launching Substance Painter through OpenPype. +Otherwise the Substance Painter process is launched using Steam's existing +environment and thus will not be able to pick up the pipeline integration. + +This appears to be a limitation of how Steam works. \ No newline at end of file From 1c77d2b002527a450c8be21d93040bccd588413e Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Wed, 18 Jan 2023 10:18:01 +0100 Subject: [PATCH 055/187] Fix UDIM integration --- .../plugins/publish/collect_textureset_images.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/openpype/hosts/substancepainter/plugins/publish/collect_textureset_images.py b/openpype/hosts/substancepainter/plugins/publish/collect_textureset_images.py index 18d1e59c4c..5f06880663 100644 --- a/openpype/hosts/substancepainter/plugins/publish/collect_textureset_images.py +++ b/openpype/hosts/substancepainter/plugins/publish/collect_textureset_images.py @@ -65,7 +65,10 @@ class CollectTextureSet(pyblish.api.InstancePlugin): # Mark as UDIM explicitly if it has UDIM tiles. if bool(outputs[0].get("udim")): - representation["udim"] = True + # The representation for a UDIM sequence should have a `udim` key + # that is a list of all udim tiles (str) like: ["1001", "1002"] + # strings. See CollectTextures plug-in and Integrators. + representation["udim"] = [output["udim"] for output in outputs] # TODO: Store color space with the representation From f9f95b84e68da86ce53f9881ee59b98acb6d9aef Mon Sep 17 00:00:00 2001 From: Simone Barbieri Date: Wed, 25 Jan 2023 11:09:19 +0000 Subject: [PATCH 056/187] Basic implementation of the new Creator --- openpype/hosts/unreal/api/__init__.py | 6 +- openpype/hosts/unreal/api/pipeline.py | 53 ++++++- openpype/hosts/unreal/api/plugin.py | 209 +++++++++++++++++++++++++- 3 files changed, 262 insertions(+), 6 deletions(-) diff --git a/openpype/hosts/unreal/api/__init__.py b/openpype/hosts/unreal/api/__init__.py index ca9db259e6..2618a7677c 100644 --- a/openpype/hosts/unreal/api/__init__.py +++ b/openpype/hosts/unreal/api/__init__.py @@ -1,7 +1,11 @@ # -*- coding: utf-8 -*- """Unreal Editor OpenPype host API.""" -from .plugin import Loader +from .plugin import ( + UnrealActorCreator, + UnrealAssetCreator, + Loader +) from .pipeline import ( install, diff --git a/openpype/hosts/unreal/api/pipeline.py b/openpype/hosts/unreal/api/pipeline.py index 2081c8fd13..7a21effcbc 100644 --- a/openpype/hosts/unreal/api/pipeline.py +++ b/openpype/hosts/unreal/api/pipeline.py @@ -1,5 +1,6 @@ # -*- coding: utf-8 -*- import os +import json import logging from typing import List from contextlib import contextmanager @@ -16,13 +17,14 @@ from openpype.pipeline import ( ) from openpype.tools.utils import host_tools import openpype.hosts.unreal -from openpype.host import HostBase, ILoadHost +from openpype.host import HostBase, ILoadHost, IPublishHost import unreal # noqa - logger = logging.getLogger("openpype.hosts.unreal") + OPENPYPE_CONTAINERS = "OpenPypeContainers" +CONTEXT_CONTAINER = "OpenPype/context.json" UNREAL_VERSION = semver.VersionInfo( *os.getenv("OPENPYPE_UNREAL_VERSION").split(".") ) @@ -35,7 +37,7 @@ CREATE_PATH = os.path.join(PLUGINS_DIR, "create") INVENTORY_PATH = os.path.join(PLUGINS_DIR, "inventory") -class UnrealHost(HostBase, ILoadHost): +class UnrealHost(HostBase, ILoadHost, IPublishHost): """Unreal host implementation. For some time this class will re-use functions from module based @@ -60,6 +62,26 @@ class UnrealHost(HostBase, ILoadHost): show_tools_dialog() + def update_context_data(self, data, changes): + unreal.log_warning("update_context_data") + unreal.log_warning(data) + content_path = unreal.Paths.project_content_dir() + op_ctx = content_path + CONTEXT_CONTAINER + with open(op_ctx, "w+") as f: + json.dump(data, f) + with open(op_ctx, "r") as fp: + test = eval(json.load(fp)) + unreal.log_warning(test) + + def get_context_data(self): + content_path = unreal.Paths.project_content_dir() + op_ctx = content_path + CONTEXT_CONTAINER + if not os.path.isfile(op_ctx): + return {} + with open(op_ctx, "r") as fp: + data = eval(json.load(fp)) + return data + def install(): """Install Unreal configuration for OpenPype.""" @@ -133,6 +155,31 @@ def ls(): yield data +def lsinst(): + ar = unreal.AssetRegistryHelpers.get_asset_registry() + # UE 5.1 changed how class name is specified + class_name = [ + "/Script/OpenPype", + "OpenPypePublishInstance" + ] if ( + UNREAL_VERSION.major == 5 + and UNREAL_VERSION.minor > 0 + ) else "OpenPypePublishInstance" # noqa + instances = ar.get_assets_by_class(class_name, True) + + # get_asset_by_class returns AssetData. To get all metadata we need to + # load asset. get_tag_values() work only on metadata registered in + # Asset Registry Project settings (and there is no way to set it with + # python short of editing ini configuration file). + for asset_data in instances: + asset = asset_data.get_asset() + data = unreal.EditorAssetLibrary.get_metadata_tag_values(asset) + data["objectName"] = asset_data.asset_name + data = cast_map_to_str_dict(data) + + yield data + + def parse_container(container): """To get data from container, AssetContainer must be loaded. diff --git a/openpype/hosts/unreal/api/plugin.py b/openpype/hosts/unreal/api/plugin.py index 6fc00cb71c..f89ff153b1 100644 --- a/openpype/hosts/unreal/api/plugin.py +++ b/openpype/hosts/unreal/api/plugin.py @@ -1,7 +1,212 @@ # -*- coding: utf-8 -*- -from abc import ABC +import sys +import six +from abc import ( + ABC, + ABCMeta, + abstractmethod +) -from openpype.pipeline import LoaderPlugin +import unreal + +from .pipeline import ( + create_publish_instance, + imprint, + lsinst +) +from openpype.lib import BoolDef +from openpype.pipeline import ( + Creator, + LoaderPlugin, + CreatorError, + CreatedInstance +) + + +class OpenPypeCreatorError(CreatorError): + pass + + +@six.add_metaclass(ABCMeta) +class UnrealBaseCreator(Creator): + """Base class for Unreal creator plugins.""" + root = "/Game/OpenPype/PublishInstances" + suffix = "_INS" + + @staticmethod + def cache_subsets(shared_data): + """Cache instances for Creators to shared data. + + Create `unreal_cached_subsets` key when needed in shared data and + fill it with all collected instances from the scene under its + respective creator identifiers. + + If legacy instances are detected in the scene, create + `unreal_cached_legacy_subsets` there and fill it with + all legacy subsets under family as a key. + + Args: + Dict[str, Any]: Shared data. + + Return: + Dict[str, Any]: Shared data dictionary. + + """ + if shared_data.get("unreal_cached_subsets") is None: + shared_data["unreal_cached_subsets"] = {} + if shared_data.get("unreal_cached_legacy_subsets") is None: + shared_data["unreal_cached_legacy_subsets"] = {} + cached_instances = lsinst() + for i in cached_instances: + if not i.get("creator_identifier"): + # we have legacy instance + family = i.get("family") + if (family not in + shared_data["unreal_cached_legacy_subsets"]): + shared_data[ + "unreal_cached_legacy_subsets"][family] = [i] + else: + shared_data[ + "unreal_cached_legacy_subsets"][family].append(i) + continue + + creator_id = i.get("creator_identifier") + if creator_id not in shared_data["unreal_cached_subsets"]: + shared_data["unreal_cached_subsets"][creator_id] = [i] + else: + shared_data["unreal_cached_subsets"][creator_id].append(i) + return shared_data + + @abstractmethod + def create(self, subset_name, instance_data, pre_create_data): + pass + + def collect_instances(self): + # cache instances if missing + self.cache_subsets(self.collection_shared_data) + for instance in self.collection_shared_data[ + "unreal_cached_subsets"].get(self.identifier, []): + created_instance = CreatedInstance.from_existing(instance, self) + self._add_instance_to_context(created_instance) + + def update_instances(self, update_list): + unreal.log_warning(f"Update instances: {update_list}") + for created_inst, _changes in update_list: + instance_node = created_inst.get("instance_path", "") + + if not instance_node: + unreal.log_warning( + f"Instance node not found for {created_inst}") + + new_values = { + key: new_value + for key, (_old_value, new_value) in _changes.items() + } + imprint( + instance_node, + new_values + ) + + def remove_instances(self, instances): + for instance in instances: + instance_node = instance.data.get("instance_path", "") + if instance_node: + unreal.EditorAssetLibrary.delete_asset(instance_node) + + self._remove_instance_from_context(instance) + + def get_pre_create_attr_defs(self): + return [ + BoolDef("use_selection", label="Use selection") + ] + + +@six.add_metaclass(ABCMeta) +class UnrealAssetCreator(UnrealBaseCreator): + """Base class for Unreal creator plugins based on assets.""" + + def create(self, subset_name, instance_data, pre_create_data): + """Create instance of the asset. + + Args: + subset_name (str): Name of the subset. + instance_data (dict): Data for the instance. + pre_create_data (dict): Data for the instance. + + Returns: + CreatedInstance: Created instance. + """ + try: + selection = [] + + if pre_create_data.get("use_selection"): + sel_objects = unreal.EditorUtilityLibrary.get_selected_assets() + selection = [a.get_path_name() for a in sel_objects] + + instance_name = f"{subset_name}{self.suffix}" + create_publish_instance(instance_name, self.root) + instance_data["members"] = selection + instance_data["subset"] = subset_name + instance_data["instance_path"] = f"{self.root}/{instance_name}" + instance = CreatedInstance( + self.family, + subset_name, + instance_data, + self) + self._add_instance_to_context(instance) + + imprint(f"{self.root}/{instance_name}", instance_data) + + except Exception as er: + six.reraise( + OpenPypeCreatorError, + OpenPypeCreatorError(f"Creator error: {er}"), + sys.exc_info()[2]) + + +@six.add_metaclass(ABCMeta) +class UnrealActorCreator(UnrealBaseCreator): + """Base class for Unreal creator plugins based on actors.""" + + def create(self, subset_name, instance_data, pre_create_data): + """Create instance of the asset. + + Args: + subset_name (str): Name of the subset. + instance_data (dict): Data for the instance. + pre_create_data (dict): Data for the instance. + + Returns: + CreatedInstance: Created instance. + """ + try: + selection = [] + + if pre_create_data.get("use_selection"): + sel_objects = unreal.EditorUtilityLibrary.get_selected_actors() + selection = [a.get_path_name() for a in sel_objects] + + instance_name = f"{subset_name}{self.suffix}" + create_publish_instance(instance_name, self.root) + instance_data["members"] = selection + instance_data[ + "level"] = unreal.EditorLevelLibrary.get_editor_world() + instance_data["subset"] = subset_name + instance_data["instance_path"] = f"{self.root}/{instance_name}" + instance = CreatedInstance( + self.family, + subset_name, + instance_data, + self) + self._add_instance_to_context(instance) + + imprint(f"{self.root}/{instance_name}", instance_data) + + except Exception as er: + six.reraise( + OpenPypeCreatorError, + OpenPypeCreatorError(f"Creator error: {er}"), + sys.exc_info()[2]) class Loader(LoaderPlugin, ABC): From fc09f0b532cf3a1ee496a9f74ae22d55753e7841 Mon Sep 17 00:00:00 2001 From: Simone Barbieri Date: Thu, 26 Jan 2023 17:35:11 +0000 Subject: [PATCH 057/187] Improved basic creator --- openpype/hosts/unreal/api/plugin.py | 95 ++++++++++++++++++----------- 1 file changed, 58 insertions(+), 37 deletions(-) diff --git a/openpype/hosts/unreal/api/plugin.py b/openpype/hosts/unreal/api/plugin.py index f89ff153b1..6a561420fa 100644 --- a/openpype/hosts/unreal/api/plugin.py +++ b/openpype/hosts/unreal/api/plugin.py @@ -4,7 +4,6 @@ import six from abc import ( ABC, ABCMeta, - abstractmethod ) import unreal @@ -12,7 +11,8 @@ import unreal from .pipeline import ( create_publish_instance, imprint, - lsinst + lsinst, + UNREAL_VERSION ) from openpype.lib import BoolDef from openpype.pipeline import ( @@ -77,9 +77,28 @@ class UnrealBaseCreator(Creator): shared_data["unreal_cached_subsets"][creator_id].append(i) return shared_data - @abstractmethod def create(self, subset_name, instance_data, pre_create_data): - pass + try: + instance_name = f"{subset_name}{self.suffix}" + create_publish_instance(instance_name, self.root) + + instance_data["subset"] = subset_name + instance_data["instance_path"] = f"{self.root}/{instance_name}" + + instance = CreatedInstance( + self.family, + subset_name, + instance_data, + self) + self._add_instance_to_context(instance) + + imprint(f"{self.root}/{instance_name}", instance_data) + + except Exception as er: + six.reraise( + OpenPypeCreatorError, + OpenPypeCreatorError(f"Creator error: {er}"), + sys.exc_info()[2]) def collect_instances(self): # cache instances if missing @@ -117,7 +136,7 @@ class UnrealBaseCreator(Creator): def get_pre_create_attr_defs(self): return [ - BoolDef("use_selection", label="Use selection") + BoolDef("use_selection", label="Use selection", default=True) ] @@ -137,25 +156,21 @@ class UnrealAssetCreator(UnrealBaseCreator): CreatedInstance: Created instance. """ try: - selection = [] + # Check if instance data has members, filled by the plugin. + # If not, use selection. + if not instance_data.get("members"): + selection = [] - if pre_create_data.get("use_selection"): - sel_objects = unreal.EditorUtilityLibrary.get_selected_assets() - selection = [a.get_path_name() for a in sel_objects] + if pre_create_data.get("use_selection"): + sel_objects = unreal.EditorUtilityLibrary.get_selected_assets() + selection = [a.get_path_name() for a in sel_objects] - instance_name = f"{subset_name}{self.suffix}" - create_publish_instance(instance_name, self.root) - instance_data["members"] = selection - instance_data["subset"] = subset_name - instance_data["instance_path"] = f"{self.root}/{instance_name}" - instance = CreatedInstance( - self.family, + instance_data["members"] = selection + + super(UnrealAssetCreator, self).create( subset_name, instance_data, - self) - self._add_instance_to_context(instance) - - imprint(f"{self.root}/{instance_name}", instance_data) + pre_create_data) except Exception as er: six.reraise( @@ -180,27 +195,33 @@ class UnrealActorCreator(UnrealBaseCreator): CreatedInstance: Created instance. """ try: - selection = [] + if UNREAL_VERSION.major == 5: + world = unreal.UnrealEditorSubsystem().get_editor_world() + else: + world = unreal.EditorLevelLibrary.get_editor_world() - if pre_create_data.get("use_selection"): - sel_objects = unreal.EditorUtilityLibrary.get_selected_actors() - selection = [a.get_path_name() for a in sel_objects] + # Check if the level is saved + if world.get_path_name().startswith("/Temp/"): + raise OpenPypeCreatorError( + "Level must be saved before creating instances.") - instance_name = f"{subset_name}{self.suffix}" - create_publish_instance(instance_name, self.root) - instance_data["members"] = selection - instance_data[ - "level"] = unreal.EditorLevelLibrary.get_editor_world() - instance_data["subset"] = subset_name - instance_data["instance_path"] = f"{self.root}/{instance_name}" - instance = CreatedInstance( - self.family, + # Check if instance data has members, filled by the plugin. + # If not, use selection. + if not instance_data.get("members"): + selection = [] + + if pre_create_data.get("use_selection"): + sel_objects = unreal.EditorUtilityLibrary.get_selected_actors() + selection = [a.get_path_name() for a in sel_objects] + + instance_data["members"] = selection + + instance_data["level"] = world.get_path_name() + + super(UnrealActorCreator, self).create( subset_name, instance_data, - self) - self._add_instance_to_context(instance) - - imprint(f"{self.root}/{instance_name}", instance_data) + pre_create_data) except Exception as er: six.reraise( From f57a6775cc0c0a88ec85002432fbbcaa394cf8ca Mon Sep 17 00:00:00 2001 From: Simone Barbieri Date: Thu, 26 Jan 2023 17:35:42 +0000 Subject: [PATCH 058/187] Updated creators to be compatible with new publisher --- .../unreal/plugins/create/create_camera.py | 44 +++---------- .../unreal/plugins/create/create_layout.py | 39 ++--------- .../unreal/plugins/create/create_look.py | 64 +++++++++---------- .../plugins/create/create_staticmeshfbx.py | 34 ++-------- .../unreal/plugins/create/create_uasset.py | 44 ++++--------- 5 files changed, 65 insertions(+), 160 deletions(-) diff --git a/openpype/hosts/unreal/plugins/create/create_camera.py b/openpype/hosts/unreal/plugins/create/create_camera.py index bf1489d688..239dc87db5 100644 --- a/openpype/hosts/unreal/plugins/create/create_camera.py +++ b/openpype/hosts/unreal/plugins/create/create_camera.py @@ -1,41 +1,13 @@ -import unreal -from unreal import EditorAssetLibrary as eal -from unreal import EditorLevelLibrary as ell - -from openpype.hosts.unreal.api.pipeline import instantiate -from openpype.pipeline import LegacyCreator +# -*- coding: utf-8 -*- +from openpype.hosts.unreal.api.plugin import ( + UnrealActorCreator, +) -class CreateCamera(LegacyCreator): - """Layout output for character rigs""" +class CreateCamera(UnrealActorCreator): + """Create Camera.""" - name = "layoutMain" + identifier = "io.openpype.creators.unreal.camera" label = "Camera" family = "camera" - icon = "cubes" - - root = "/Game/OpenPype/Instances" - suffix = "_INS" - - def __init__(self, *args, **kwargs): - super(CreateCamera, self).__init__(*args, **kwargs) - - def process(self): - data = self.data - - name = data["subset"] - - data["level"] = ell.get_editor_world().get_path_name() - - if not eal.does_directory_exist(self.root): - eal.make_directory(self.root) - - factory = unreal.LevelSequenceFactoryNew() - tools = unreal.AssetToolsHelpers().get_asset_tools() - tools.create_asset(name, f"{self.root}/{name}", None, factory) - - asset_name = f"{self.root}/{name}/{name}.{name}" - - data["members"] = [asset_name] - - instantiate(f"{self.root}", name, data, None, self.suffix) + icon = "camera" diff --git a/openpype/hosts/unreal/plugins/create/create_layout.py b/openpype/hosts/unreal/plugins/create/create_layout.py index c1067b00d9..1d2e800a13 100644 --- a/openpype/hosts/unreal/plugins/create/create_layout.py +++ b/openpype/hosts/unreal/plugins/create/create_layout.py @@ -1,42 +1,13 @@ # -*- coding: utf-8 -*- -from unreal import EditorLevelLibrary - -from openpype.pipeline import LegacyCreator -from openpype.hosts.unreal.api.pipeline import instantiate +from openpype.hosts.unreal.api.plugin import ( + UnrealActorCreator, +) -class CreateLayout(LegacyCreator): +class CreateLayout(UnrealActorCreator): """Layout output for character rigs.""" - name = "layoutMain" + identifier = "io.openpype.creators.unreal.layout" label = "Layout" family = "layout" icon = "cubes" - - root = "/Game" - suffix = "_INS" - - def __init__(self, *args, **kwargs): - super(CreateLayout, self).__init__(*args, **kwargs) - - def process(self): - data = self.data - - name = data["subset"] - - selection = [] - # if (self.options or {}).get("useSelection"): - # sel_objects = unreal.EditorUtilityLibrary.get_selected_assets() - # selection = [a.get_path_name() for a in sel_objects] - - data["level"] = EditorLevelLibrary.get_editor_world().get_path_name() - - data["members"] = [] - - if (self.options or {}).get("useSelection"): - # Set as members the selected actors - for actor in EditorLevelLibrary.get_selected_level_actors(): - data["members"].append("{}.{}".format( - actor.get_outer().get_name(), actor.get_name())) - - instantiate(self.root, name, data, selection, self.suffix) diff --git a/openpype/hosts/unreal/plugins/create/create_look.py b/openpype/hosts/unreal/plugins/create/create_look.py index 4abf3f6095..08d61ab9f8 100644 --- a/openpype/hosts/unreal/plugins/create/create_look.py +++ b/openpype/hosts/unreal/plugins/create/create_look.py @@ -1,56 +1,53 @@ # -*- coding: utf-8 -*- -"""Create look in Unreal.""" -import unreal # noqa -from openpype.hosts.unreal.api import pipeline, plugin -from openpype.pipeline import LegacyCreator +import unreal + +from openpype.hosts.unreal.api.pipeline import ( + create_folder +) +from openpype.hosts.unreal.api.plugin import ( + UnrealAssetCreator +) -class CreateLook(LegacyCreator): +class CreateLook(UnrealAssetCreator): """Shader connections defining shape look.""" - name = "unrealLook" - label = "Unreal - Look" + identifier = "io.openpype.creators.unreal.look" + label = "Look" family = "look" icon = "paint-brush" - root = "/Game/Avalon/Assets" - suffix = "_INS" - - def __init__(self, *args, **kwargs): - super(CreateLook, self).__init__(*args, **kwargs) - - def process(self): - name = self.data["subset"] - + def create(self, subset_name, instance_data, pre_create_data): selection = [] - if (self.options or {}).get("useSelection"): + if pre_create_data.get("use_selection"): sel_objects = unreal.EditorUtilityLibrary.get_selected_assets() selection = [a.get_path_name() for a in sel_objects] + if len(selection) != 1: + raise RuntimeError("Please select only one asset.") + + selected_asset = selection[0] + + look_directory = "/Game/OpenPype/Looks" + # Create the folder - path = f"{self.root}/{self.data['asset']}" - new_name = pipeline.create_folder(path, name) - full_path = f"{path}/{new_name}" + folder_name = create_folder(look_directory, subset_name) + path = f"{look_directory}/{folder_name}" # Create a new cube static mesh ar = unreal.AssetRegistryHelpers.get_asset_registry() cube = ar.get_asset_by_object_path("/Engine/BasicShapes/Cube.Cube") - # Create the avalon publish instance object - container_name = f"{name}{self.suffix}" - pipeline.create_publish_instance( - instance=container_name, path=full_path) - # Get the mesh of the selected object - original_mesh = ar.get_asset_by_object_path(selection[0]).get_asset() - materials = original_mesh.get_editor_property('materials') + original_mesh = ar.get_asset_by_object_path(selected_asset).get_asset() + materials = original_mesh.get_editor_property('static_materials') - self.data["members"] = [] + instance_data["members"] = [] # Add the materials to the cube for material in materials: - name = material.get_editor_property('material_slot_name') - object_path = f"{full_path}/{name}.{name}" + mat_name = material.get_editor_property('material_slot_name') + object_path = f"{path}/{mat_name}.{mat_name}" unreal_object = unreal.EditorAssetLibrary.duplicate_loaded_asset( cube.get_asset(), object_path ) @@ -61,8 +58,11 @@ class CreateLook(LegacyCreator): unreal_object.add_material( material.get_editor_property('material_interface')) - self.data["members"].append(object_path) + instance_data["members"].append(object_path) unreal.EditorAssetLibrary.save_asset(object_path) - pipeline.imprint(f"{full_path}/{container_name}", self.data) + super(CreateLook, self).create( + subset_name, + instance_data, + pre_create_data) diff --git a/openpype/hosts/unreal/plugins/create/create_staticmeshfbx.py b/openpype/hosts/unreal/plugins/create/create_staticmeshfbx.py index 45d517d27d..1acf7084d1 100644 --- a/openpype/hosts/unreal/plugins/create/create_staticmeshfbx.py +++ b/openpype/hosts/unreal/plugins/create/create_staticmeshfbx.py @@ -1,35 +1,13 @@ # -*- coding: utf-8 -*- -"""Create Static Meshes as FBX geometry.""" -import unreal # noqa -from openpype.hosts.unreal.api.pipeline import ( - instantiate, +from openpype.hosts.unreal.api.plugin import ( + UnrealAssetCreator, ) -from openpype.pipeline import LegacyCreator -class CreateStaticMeshFBX(LegacyCreator): - """Static FBX geometry.""" +class CreateStaticMeshFBX(UnrealAssetCreator): + """Create Static Meshes as FBX geometry.""" - name = "unrealStaticMeshMain" - label = "Unreal - Static Mesh" + identifier = "io.openpype.creators.unreal.staticmeshfbx" + label = "Static Mesh (FBX)" family = "unrealStaticMesh" icon = "cube" - asset_types = ["StaticMesh"] - - root = "/Game" - suffix = "_INS" - - def __init__(self, *args, **kwargs): - super(CreateStaticMeshFBX, self).__init__(*args, **kwargs) - - def process(self): - - name = self.data["subset"] - - selection = [] - if (self.options or {}).get("useSelection"): - sel_objects = unreal.EditorUtilityLibrary.get_selected_assets() - selection = [a.get_path_name() for a in sel_objects] - - unreal.log("selection: {}".format(selection)) - instantiate(self.root, name, self.data, selection, self.suffix) diff --git a/openpype/hosts/unreal/plugins/create/create_uasset.py b/openpype/hosts/unreal/plugins/create/create_uasset.py index ee584ac00c..2d6fcc1d59 100644 --- a/openpype/hosts/unreal/plugins/create/create_uasset.py +++ b/openpype/hosts/unreal/plugins/create/create_uasset.py @@ -1,36 +1,25 @@ -"""Create UAsset.""" +# -*- coding: utf-8 -*- from pathlib import Path import unreal -from openpype.hosts.unreal.api import pipeline -from openpype.pipeline import LegacyCreator +from openpype.hosts.unreal.api.plugin import ( + UnrealAssetCreator, +) -class CreateUAsset(LegacyCreator): - """UAsset.""" +class CreateUAsset(UnrealAssetCreator): + """Create UAsset.""" - name = "UAsset" + identifier = "io.openpype.creators.unreal.uasset" label = "UAsset" family = "uasset" icon = "cube" - root = "/Game/OpenPype" - suffix = "_INS" + def create(self, subset_name, instance_data, pre_create_data): + if pre_create_data.get("use_selection"): + ar = unreal.AssetRegistryHelpers.get_asset_registry() - def __init__(self, *args, **kwargs): - super(CreateUAsset, self).__init__(*args, **kwargs) - - def process(self): - ar = unreal.AssetRegistryHelpers.get_asset_registry() - - subset = self.data["subset"] - path = f"{self.root}/PublishInstances/" - - unreal.EditorAssetLibrary.make_directory(path) - - selection = [] - if (self.options or {}).get("useSelection"): sel_objects = unreal.EditorUtilityLibrary.get_selected_assets() selection = [a.get_path_name() for a in sel_objects] @@ -50,12 +39,7 @@ class CreateUAsset(LegacyCreator): if Path(sys_path).suffix != ".uasset": raise RuntimeError(f"{Path(sys_path).name} is not a UAsset.") - unreal.log("selection: {}".format(selection)) - container_name = f"{subset}{self.suffix}" - pipeline.create_publish_instance( - instance=container_name, path=path) - - data = self.data.copy() - data["members"] = selection - - pipeline.imprint(f"{path}/{container_name}", data) + super(CreateUAsset, self).create( + subset_name, + instance_data, + pre_create_data) From e411e197379e487a5dd5342e867bba2501ad8442 Mon Sep 17 00:00:00 2001 From: Simone Barbieri Date: Fri, 27 Jan 2023 16:53:39 +0000 Subject: [PATCH 059/187] Updated render creator --- .../unreal/plugins/create/create_render.py | 174 ++++++++++-------- 1 file changed, 94 insertions(+), 80 deletions(-) diff --git a/openpype/hosts/unreal/plugins/create/create_render.py b/openpype/hosts/unreal/plugins/create/create_render.py index a85d17421b..de3efdad74 100644 --- a/openpype/hosts/unreal/plugins/create/create_render.py +++ b/openpype/hosts/unreal/plugins/create/create_render.py @@ -1,117 +1,131 @@ +# -*- coding: utf-8 -*- import unreal -from openpype.hosts.unreal.api import pipeline -from openpype.pipeline import LegacyCreator +from openpype.hosts.unreal.api.pipeline import ( + get_subsequences +) +from openpype.hosts.unreal.api.plugin import ( + UnrealAssetCreator, +) -class CreateRender(LegacyCreator): +class CreateRender(UnrealAssetCreator): """Create instance for sequence for rendering""" - name = "unrealRender" - label = "Unreal - Render" + identifier = "io.openpype.creators.unreal.render" + label = "Render" family = "render" - icon = "cube" - asset_types = ["LevelSequence"] - - root = "/Game/OpenPype/PublishInstances" - suffix = "_INS" - - def process(self): - subset = self.data["subset"] + icon = "eye" + def create(self, subset_name, instance_data, pre_create_data): ar = unreal.AssetRegistryHelpers.get_asset_registry() - # The asset name is the the third element of the path which contains - # the map. - # The index of the split path is 3 because the first element is an - # empty string, as the path begins with "/Content". - a = unreal.EditorUtilityLibrary.get_selected_assets()[0] - asset_name = a.get_path_name().split("/")[3] - - # Get the master sequence and the master level. - # There should be only one sequence and one level in the directory. - filter = unreal.ARFilter( - class_names=["LevelSequence"], - package_paths=[f"/Game/OpenPype/{asset_name}"], - recursive_paths=False) - sequences = ar.get_assets(filter) - ms = sequences[0].get_editor_property('object_path') - filter = unreal.ARFilter( - class_names=["World"], - package_paths=[f"/Game/OpenPype/{asset_name}"], - recursive_paths=False) - levels = ar.get_assets(filter) - ml = levels[0].get_editor_property('object_path') - - selection = [] - if (self.options or {}).get("useSelection"): + if pre_create_data.get("use_selection"): sel_objects = unreal.EditorUtilityLibrary.get_selected_assets() selection = [ a.get_path_name() for a in sel_objects - if a.get_class().get_name() in self.asset_types] + if a.get_class().get_name() == "LevelSequence"] else: - selection.append(self.data['sequence']) + selection = [instance_data['sequence']] - unreal.log(f"selection: {selection}") + seq_data = None - path = f"{self.root}" - unreal.EditorAssetLibrary.make_directory(path) + for sel in selection: + selected_asset = ar.get_asset_by_object_path(sel).get_asset() + selected_asset_path = selected_asset.get_path_name() - ar = unreal.AssetRegistryHelpers.get_asset_registry() + # Check if the selected asset is a level sequence asset. + if selected_asset.get_class().get_name() != "LevelSequence": + unreal.log_warning( + f"Skipping {selected_asset.get_name()}. It isn't a Level " + "Sequence.") - for a in selection: - ms_obj = ar.get_asset_by_object_path(ms).get_asset() + # The asset name is the the third element of the path which + # contains the map. + # To take the asset name, we remove from the path the prefix + # "/Game/OpenPype/" and then we split the path by "/". + sel_path = selected_asset_path + asset_name = sel_path.replace("/Game/OpenPype/", "").split("/")[0] - seq_data = None + # Get the master sequence and the master level. + # There should be only one sequence and one level in the directory. + ar_filter = unreal.ARFilter( + class_names=["LevelSequence"], + package_paths=[f"/Game/OpenPype/{asset_name}"], + recursive_paths=False) + sequences = ar.get_assets(ar_filter) + master_seq = sequences[0].get_asset().get_path_name() + master_seq_obj = sequences[0].get_asset() + ar_filter = unreal.ARFilter( + class_names=["World"], + package_paths=[f"/Game/OpenPype/{asset_name}"], + recursive_paths=False) + levels = ar.get_assets(ar_filter) + master_lvl = levels[0].get_asset().get_path_name() - if a == ms: - seq_data = { - "sequence": ms_obj, - "output": f"{ms_obj.get_name()}", - "frame_range": ( - ms_obj.get_playback_start(), ms_obj.get_playback_end()) - } + # If the selected asset is the master sequence, we get its data + # and then we create the instance for the master sequence. + # Otherwise, we cycle from the master sequence to find the selected + # sequence and we get its data. This data will be used to create + # the instance for the selected sequence. In particular, + # we get the frame range of the selected sequence and its final + # output path. + master_seq_data = { + "sequence": master_seq_obj, + "output": f"{master_seq_obj.get_name()}", + "frame_range": ( + master_seq_obj.get_playback_start(), + master_seq_obj.get_playback_end())} + + if selected_asset_path == master_seq: + seq_data = master_seq_data else: - seq_data_list = [{ - "sequence": ms_obj, - "output": f"{ms_obj.get_name()}", - "frame_range": ( - ms_obj.get_playback_start(), ms_obj.get_playback_end()) - }] + seq_data_list = [master_seq_data] - for s in seq_data_list: - subscenes = pipeline.get_subsequences(s.get('sequence')) + for seq in seq_data_list: + subscenes = get_subsequences(seq.get('sequence')) - for ss in subscenes: + for sub_seq in subscenes: + sub_seq_obj = sub_seq.get_sequence() curr_data = { - "sequence": ss.get_sequence(), - "output": (f"{s.get('output')}/" - f"{ss.get_sequence().get_name()}"), + "sequence": sub_seq_obj, + "output": (f"{seq.get('output')}/" + f"{sub_seq_obj.get_name()}"), "frame_range": ( - ss.get_start_frame(), ss.get_end_frame() - 1) - } + sub_seq.get_start_frame(), + sub_seq.get_end_frame() - 1)} - if ss.get_sequence().get_path_name() == a: + # If the selected asset is the current sub-sequence, + # we get its data and we break the loop. + # Otherwise, we add the current sub-sequence data to + # the list of sequences to check. + if sub_seq_obj.get_path_name() == selected_asset_path: seq_data = curr_data break + seq_data_list.append(curr_data) + # If we found the selected asset, we break the loop. if seq_data is not None: break + # If we didn't find the selected asset, we don't create the + # instance. if not seq_data: + unreal.log_warning( + f"Skipping {selected_asset.get_name()}. It isn't a " + "sub-sequence of the master sequence.") continue - d = self.data.copy() - d["members"] = [a] - d["sequence"] = a - d["master_sequence"] = ms - d["master_level"] = ml - d["output"] = seq_data.get('output') - d["frameStart"] = seq_data.get('frame_range')[0] - d["frameEnd"] = seq_data.get('frame_range')[1] + instance_data["members"] = [selected_asset_path] + instance_data["sequence"] = selected_asset_path + instance_data["master_sequence"] = master_seq + instance_data["master_level"] = master_lvl + instance_data["output"] = seq_data.get('output') + instance_data["frameStart"] = seq_data.get('frame_range')[0] + instance_data["frameEnd"] = seq_data.get('frame_range')[1] - container_name = f"{subset}{self.suffix}" - pipeline.create_publish_instance( - instance=container_name, path=path) - pipeline.imprint(f"{path}/{container_name}", d) + super(CreateRender, self).create( + subset_name, + instance_data, + pre_create_data) From 575eb50c03e02227e2c9dedf8fc7c2a32f558c85 Mon Sep 17 00:00:00 2001 From: Simone Barbieri Date: Mon, 30 Jan 2023 11:17:21 +0000 Subject: [PATCH 060/187] Hound fixes --- openpype/hosts/unreal/api/plugin.py | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/openpype/hosts/unreal/api/plugin.py b/openpype/hosts/unreal/api/plugin.py index 6a561420fa..71ce0c18a7 100644 --- a/openpype/hosts/unreal/api/plugin.py +++ b/openpype/hosts/unreal/api/plugin.py @@ -104,7 +104,7 @@ class UnrealBaseCreator(Creator): # cache instances if missing self.cache_subsets(self.collection_shared_data) for instance in self.collection_shared_data[ - "unreal_cached_subsets"].get(self.identifier, []): + "unreal_cached_subsets"].get(self.identifier, []): created_instance = CreatedInstance.from_existing(instance, self) self._add_instance_to_context(created_instance) @@ -162,7 +162,8 @@ class UnrealAssetCreator(UnrealBaseCreator): selection = [] if pre_create_data.get("use_selection"): - sel_objects = unreal.EditorUtilityLibrary.get_selected_assets() + utility_lib = unreal.EditorUtilityLibrary + sel_objects = utility_lib.get_selected_assets() selection = [a.get_path_name() for a in sel_objects] instance_data["members"] = selection @@ -211,7 +212,8 @@ class UnrealActorCreator(UnrealBaseCreator): selection = [] if pre_create_data.get("use_selection"): - sel_objects = unreal.EditorUtilityLibrary.get_selected_actors() + utility_lib = unreal.EditorUtilityLibrary + sel_objects = utility_lib.get_selected_assets() selection = [a.get_path_name() for a in sel_objects] instance_data["members"] = selection From af2737a99f608ef6598d54ae8d098a3509a6223b Mon Sep 17 00:00:00 2001 From: Simone Barbieri Date: Tue, 31 Jan 2023 16:05:01 +0000 Subject: [PATCH 061/187] Collect instances is no longer needed with the new publisher --- .../plugins/publish/collect_instances.py | 67 ------------------- 1 file changed, 67 deletions(-) delete mode 100644 openpype/hosts/unreal/plugins/publish/collect_instances.py diff --git a/openpype/hosts/unreal/plugins/publish/collect_instances.py b/openpype/hosts/unreal/plugins/publish/collect_instances.py deleted file mode 100644 index 27b711cad6..0000000000 --- a/openpype/hosts/unreal/plugins/publish/collect_instances.py +++ /dev/null @@ -1,67 +0,0 @@ -# -*- coding: utf-8 -*- -"""Collect publishable instances in Unreal.""" -import ast -import unreal # noqa -import pyblish.api -from openpype.hosts.unreal.api.pipeline import UNREAL_VERSION -from openpype.pipeline.publish import KnownPublishError - - -class CollectInstances(pyblish.api.ContextPlugin): - """Gather instances by OpenPypePublishInstance class - - This collector finds all paths containing `OpenPypePublishInstance` class - asset - - Identifier: - id (str): "pyblish.avalon.instance" - - """ - - label = "Collect Instances" - order = pyblish.api.CollectorOrder - 0.1 - hosts = ["unreal"] - - def process(self, context): - - ar = unreal.AssetRegistryHelpers.get_asset_registry() - class_name = [ - "/Script/OpenPype", - "OpenPypePublishInstance" - ] if ( - UNREAL_VERSION.major == 5 - and UNREAL_VERSION.minor > 0 - ) else "OpenPypePublishInstance" # noqa - instance_containers = ar.get_assets_by_class(class_name, True) - - for container_data in instance_containers: - asset = container_data.get_asset() - data = unreal.EditorAssetLibrary.get_metadata_tag_values(asset) - data["objectName"] = container_data.asset_name - # convert to strings - data = {str(key): str(value) for (key, value) in data.items()} - if not data.get("family"): - raise KnownPublishError("instance has no family") - - # content of container - members = ast.literal_eval(data.get("members")) - self.log.debug(members) - self.log.debug(asset.get_path_name()) - # remove instance container - self.log.info("Creating instance for {}".format(asset.get_name())) - - instance = context.create_instance(asset.get_name()) - instance[:] = members - - # Store the exact members of the object set - instance.data["setMembers"] = members - instance.data["families"] = [data.get("family")] - instance.data["level"] = data.get("level") - instance.data["parent"] = data.get("parent") - - label = "{0} ({1})".format(asset.get_name()[:-4], - data["asset"]) - - instance.data["label"] = label - - instance.data.update(data) From c93fc9aad0743d4252d6bb58c33ac21365b7eac7 Mon Sep 17 00:00:00 2001 From: Simone Barbieri Date: Thu, 2 Feb 2023 11:22:36 +0000 Subject: [PATCH 062/187] Use External Data in the Unreal Publish Instance to store members Not possible with all the families. Some families require to store actors in a scenes, and we cannot store them in the External Data. --- openpype/hosts/unreal/api/plugin.py | 24 ++++++--- .../unreal/plugins/create/create_look.py | 6 ++- .../publish/collect_instance_members.py | 49 +++++++++++++++++++ .../unreal/plugins/publish/extract_look.py | 4 +- .../unreal/plugins/publish/extract_uasset.py | 8 ++- 5 files changed, 78 insertions(+), 13 deletions(-) create mode 100644 openpype/hosts/unreal/plugins/publish/collect_instance_members.py diff --git a/openpype/hosts/unreal/api/plugin.py b/openpype/hosts/unreal/api/plugin.py index 71ce0c18a7..da571af9be 100644 --- a/openpype/hosts/unreal/api/plugin.py +++ b/openpype/hosts/unreal/api/plugin.py @@ -80,7 +80,7 @@ class UnrealBaseCreator(Creator): def create(self, subset_name, instance_data, pre_create_data): try: instance_name = f"{subset_name}{self.suffix}" - create_publish_instance(instance_name, self.root) + pub_instance = create_publish_instance(instance_name, self.root) instance_data["subset"] = subset_name instance_data["instance_path"] = f"{self.root}/{instance_name}" @@ -92,6 +92,15 @@ class UnrealBaseCreator(Creator): self) self._add_instance_to_context(instance) + pub_instance.set_editor_property('add_external_assets', True) + assets = pub_instance.get_editor_property('asset_data_external') + + ar = unreal.AssetRegistryHelpers.get_asset_registry() + + for member in pre_create_data.get("members", []): + obj = ar.get_asset_by_object_path(member).get_asset() + assets.add(obj) + imprint(f"{self.root}/{instance_name}", instance_data) except Exception as er: @@ -158,15 +167,14 @@ class UnrealAssetCreator(UnrealBaseCreator): try: # Check if instance data has members, filled by the plugin. # If not, use selection. - if not instance_data.get("members"): - selection = [] + if not pre_create_data.get("members"): + pre_create_data["members"] = [] if pre_create_data.get("use_selection"): - utility_lib = unreal.EditorUtilityLibrary - sel_objects = utility_lib.get_selected_assets() - selection = [a.get_path_name() for a in sel_objects] - - instance_data["members"] = selection + utilib = unreal.EditorUtilityLibrary + sel_objects = utilib.get_selected_assets() + pre_create_data["members"] = [ + a.get_path_name() for a in sel_objects] super(UnrealAssetCreator, self).create( subset_name, diff --git a/openpype/hosts/unreal/plugins/create/create_look.py b/openpype/hosts/unreal/plugins/create/create_look.py index 08d61ab9f8..047764ef2a 100644 --- a/openpype/hosts/unreal/plugins/create/create_look.py +++ b/openpype/hosts/unreal/plugins/create/create_look.py @@ -34,6 +34,8 @@ class CreateLook(UnrealAssetCreator): folder_name = create_folder(look_directory, subset_name) path = f"{look_directory}/{folder_name}" + instance_data["look"] = path + # Create a new cube static mesh ar = unreal.AssetRegistryHelpers.get_asset_registry() cube = ar.get_asset_by_object_path("/Engine/BasicShapes/Cube.Cube") @@ -42,7 +44,7 @@ class CreateLook(UnrealAssetCreator): original_mesh = ar.get_asset_by_object_path(selected_asset).get_asset() materials = original_mesh.get_editor_property('static_materials') - instance_data["members"] = [] + pre_create_data["members"] = [] # Add the materials to the cube for material in materials: @@ -58,7 +60,7 @@ class CreateLook(UnrealAssetCreator): unreal_object.add_material( material.get_editor_property('material_interface')) - instance_data["members"].append(object_path) + pre_create_data["members"].append(object_path) unreal.EditorAssetLibrary.save_asset(object_path) diff --git a/openpype/hosts/unreal/plugins/publish/collect_instance_members.py b/openpype/hosts/unreal/plugins/publish/collect_instance_members.py new file mode 100644 index 0000000000..74969f5033 --- /dev/null +++ b/openpype/hosts/unreal/plugins/publish/collect_instance_members.py @@ -0,0 +1,49 @@ +import unreal + +import pyblish.api + + +class CollectInstanceMembers(pyblish.api.InstancePlugin): + """ + Collect members of instance. + + This collector will collect the assets for the families that support to + have them included as External Data, and will add them to the instance + as members. + """ + + order = pyblish.api.CollectorOrder + 0.1 + hosts = ["unreal"] + families = ["look", "unrealStaticMesh", "uasset"] + label = "Collect Instance Members" + + def process(self, instance): + """Collect members of instance.""" + self.log.info("Collecting instance members") + + ar = unreal.AssetRegistryHelpers.get_asset_registry() + + inst_path = instance.data.get('instance_path') + inst_name = instance.data.get('objectName') + + pub_instance = ar.get_asset_by_object_path( + f"{inst_path}.{inst_name}").get_asset() + + if not pub_instance: + self.log.error(f"{inst_path}.{inst_name}") + raise RuntimeError(f"Instance {instance} not found.") + + if not pub_instance.get_editor_property("add_external_assets"): + # No external assets in the instance + return + + assets = pub_instance.get_editor_property('asset_data_external') + + members = [] + + for asset in assets: + members.append(asset.get_path_name()) + + self.log.debug(f"Members: {members}") + + instance.data["members"] = members diff --git a/openpype/hosts/unreal/plugins/publish/extract_look.py b/openpype/hosts/unreal/plugins/publish/extract_look.py index f999ad8651..4b32b4eb95 100644 --- a/openpype/hosts/unreal/plugins/publish/extract_look.py +++ b/openpype/hosts/unreal/plugins/publish/extract_look.py @@ -29,13 +29,13 @@ class ExtractLook(publish.Extractor): for member in instance: asset = ar.get_asset_by_object_path(member) - object = asset.get_asset() + obj = asset.get_asset() name = asset.get_editor_property('asset_name') json_element = {'material': str(name)} - material_obj = object.get_editor_property('static_materials')[0] + material_obj = obj.get_editor_property('static_materials')[0] material = material_obj.material_interface base_color = mat_lib.get_material_property_input_node( diff --git a/openpype/hosts/unreal/plugins/publish/extract_uasset.py b/openpype/hosts/unreal/plugins/publish/extract_uasset.py index 89d779d368..f719df2a82 100644 --- a/openpype/hosts/unreal/plugins/publish/extract_uasset.py +++ b/openpype/hosts/unreal/plugins/publish/extract_uasset.py @@ -22,7 +22,13 @@ class ExtractUAsset(publish.Extractor): staging_dir = self.staging_dir(instance) filename = "{}.uasset".format(instance.name) - obj = instance[0] + members = instance.data.get("members", []) + + if not members: + raise RuntimeError("No members found in instance.") + + # UAsset publishing supports only one member + obj = members[0] asset = ar.get_asset_by_object_path(obj).get_asset() sys_path = unreal.SystemLibrary.get_system_path(asset) From 20227c686d5339968b3f1e3c4fc8119b0dd8a8df Mon Sep 17 00:00:00 2001 From: Simone Barbieri Date: Thu, 2 Feb 2023 12:18:03 +0000 Subject: [PATCH 063/187] Improved attributes for the creators --- openpype/hosts/unreal/api/plugin.py | 20 +++++++++++++------ .../unreal/plugins/create/create_render.py | 6 ++++++ 2 files changed, 20 insertions(+), 6 deletions(-) diff --git a/openpype/hosts/unreal/api/plugin.py b/openpype/hosts/unreal/api/plugin.py index da571af9be..7121aea20b 100644 --- a/openpype/hosts/unreal/api/plugin.py +++ b/openpype/hosts/unreal/api/plugin.py @@ -14,7 +14,10 @@ from .pipeline import ( lsinst, UNREAL_VERSION ) -from openpype.lib import BoolDef +from openpype.lib import ( + BoolDef, + UILabelDef +) from openpype.pipeline import ( Creator, LoaderPlugin, @@ -143,11 +146,6 @@ class UnrealBaseCreator(Creator): self._remove_instance_from_context(instance) - def get_pre_create_attr_defs(self): - return [ - BoolDef("use_selection", label="Use selection", default=True) - ] - @six.add_metaclass(ABCMeta) class UnrealAssetCreator(UnrealBaseCreator): @@ -187,6 +185,11 @@ class UnrealAssetCreator(UnrealBaseCreator): OpenPypeCreatorError(f"Creator error: {er}"), sys.exc_info()[2]) + def get_pre_create_attr_defs(self): + return [ + BoolDef("use_selection", label="Use selection", default=True) + ] + @six.add_metaclass(ABCMeta) class UnrealActorCreator(UnrealBaseCreator): @@ -239,6 +242,11 @@ class UnrealActorCreator(UnrealBaseCreator): OpenPypeCreatorError(f"Creator error: {er}"), sys.exc_info()[2]) + def get_pre_create_attr_defs(self): + return [ + UILabelDef("Select actors to create instance from them.") + ] + class Loader(LoaderPlugin, ABC): """This serves as skeleton for future OpenPype specific functionality""" diff --git a/openpype/hosts/unreal/plugins/create/create_render.py b/openpype/hosts/unreal/plugins/create/create_render.py index de3efdad74..8100a5016c 100644 --- a/openpype/hosts/unreal/plugins/create/create_render.py +++ b/openpype/hosts/unreal/plugins/create/create_render.py @@ -7,6 +7,7 @@ from openpype.hosts.unreal.api.pipeline import ( from openpype.hosts.unreal.api.plugin import ( UnrealAssetCreator, ) +from openpype.lib import UILabelDef class CreateRender(UnrealAssetCreator): @@ -129,3 +130,8 @@ class CreateRender(UnrealAssetCreator): subset_name, instance_data, pre_create_data) + + def get_pre_create_attr_defs(self): + return [ + UILabelDef("Select the sequence to render.") + ] From 65e08973fe423c5f456a5a9654fc59d711e06adb Mon Sep 17 00:00:00 2001 From: Simone Barbieri Date: Thu, 2 Feb 2023 16:15:03 +0000 Subject: [PATCH 064/187] Fix render creator problem with selection --- .../hosts/unreal/plugins/create/create_render.py | 15 ++++++++------- 1 file changed, 8 insertions(+), 7 deletions(-) diff --git a/openpype/hosts/unreal/plugins/create/create_render.py b/openpype/hosts/unreal/plugins/create/create_render.py index 8100a5016c..a1e3e43a78 100644 --- a/openpype/hosts/unreal/plugins/create/create_render.py +++ b/openpype/hosts/unreal/plugins/create/create_render.py @@ -6,6 +6,7 @@ from openpype.hosts.unreal.api.pipeline import ( ) from openpype.hosts.unreal.api.plugin import ( UnrealAssetCreator, + OpenPypeCreatorError ) from openpype.lib import UILabelDef @@ -21,13 +22,13 @@ class CreateRender(UnrealAssetCreator): def create(self, subset_name, instance_data, pre_create_data): ar = unreal.AssetRegistryHelpers.get_asset_registry() - if pre_create_data.get("use_selection"): - sel_objects = unreal.EditorUtilityLibrary.get_selected_assets() - selection = [ - a.get_path_name() for a in sel_objects - if a.get_class().get_name() == "LevelSequence"] - else: - selection = [instance_data['sequence']] + sel_objects = unreal.EditorUtilityLibrary.get_selected_assets() + selection = [ + a.get_path_name() for a in sel_objects + if a.get_class().get_name() == "LevelSequence"] + + if len(selection) == 0: + raise RuntimeError("Please select at least one Level Sequence.") seq_data = None From 106f9ca2bb750ebed02016264e3f46b199aa494f Mon Sep 17 00:00:00 2001 From: Simone Barbieri Date: Thu, 2 Feb 2023 16:17:23 +0000 Subject: [PATCH 065/187] Hound fixes --- openpype/hosts/unreal/plugins/create/create_render.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/openpype/hosts/unreal/plugins/create/create_render.py b/openpype/hosts/unreal/plugins/create/create_render.py index a1e3e43a78..c957e50e29 100644 --- a/openpype/hosts/unreal/plugins/create/create_render.py +++ b/openpype/hosts/unreal/plugins/create/create_render.py @@ -5,8 +5,7 @@ from openpype.hosts.unreal.api.pipeline import ( get_subsequences ) from openpype.hosts.unreal.api.plugin import ( - UnrealAssetCreator, - OpenPypeCreatorError + UnrealAssetCreator ) from openpype.lib import UILabelDef From 8e30e565fdefb3a567567cd9651182eb0da2f68d Mon Sep 17 00:00:00 2001 From: Simone Barbieri Date: Wed, 15 Feb 2023 11:41:57 +0000 Subject: [PATCH 066/187] Implemented suggestions from review --- openpype/hosts/unreal/api/pipeline.py | 3 - openpype/hosts/unreal/api/plugin.py | 69 +++++++------------ .../unreal/plugins/create/create_camera.py | 2 +- .../unreal/plugins/create/create_look.py | 14 ++-- 4 files changed, 37 insertions(+), 51 deletions(-) diff --git a/openpype/hosts/unreal/api/pipeline.py b/openpype/hosts/unreal/api/pipeline.py index 7a21effcbc..0fe8c02ec5 100644 --- a/openpype/hosts/unreal/api/pipeline.py +++ b/openpype/hosts/unreal/api/pipeline.py @@ -69,9 +69,6 @@ class UnrealHost(HostBase, ILoadHost, IPublishHost): op_ctx = content_path + CONTEXT_CONTAINER with open(op_ctx, "w+") as f: json.dump(data, f) - with open(op_ctx, "r") as fp: - test = eval(json.load(fp)) - unreal.log_warning(test) def get_context_data(self): content_path = unreal.Paths.project_content_dir() diff --git a/openpype/hosts/unreal/api/plugin.py b/openpype/hosts/unreal/api/plugin.py index 7121aea20b..fc724105b6 100644 --- a/openpype/hosts/unreal/api/plugin.py +++ b/openpype/hosts/unreal/api/plugin.py @@ -1,10 +1,8 @@ # -*- coding: utf-8 -*- +import collections import sys import six -from abc import ( - ABC, - ABCMeta, -) +from abc import ABC import unreal @@ -26,11 +24,6 @@ from openpype.pipeline import ( ) -class OpenPypeCreatorError(CreatorError): - pass - - -@six.add_metaclass(ABCMeta) class UnrealBaseCreator(Creator): """Base class for Unreal creator plugins.""" root = "/Game/OpenPype/PublishInstances" @@ -56,28 +49,20 @@ class UnrealBaseCreator(Creator): """ if shared_data.get("unreal_cached_subsets") is None: - shared_data["unreal_cached_subsets"] = {} - if shared_data.get("unreal_cached_legacy_subsets") is None: - shared_data["unreal_cached_legacy_subsets"] = {} - cached_instances = lsinst() - for i in cached_instances: - if not i.get("creator_identifier"): - # we have legacy instance - family = i.get("family") - if (family not in - shared_data["unreal_cached_legacy_subsets"]): - shared_data[ - "unreal_cached_legacy_subsets"][family] = [i] - else: - shared_data[ - "unreal_cached_legacy_subsets"][family].append(i) - continue - - creator_id = i.get("creator_identifier") - if creator_id not in shared_data["unreal_cached_subsets"]: - shared_data["unreal_cached_subsets"][creator_id] = [i] + unreal_cached_subsets = collections.defaultdict(list) + unreal_cached_legacy_subsets = collections.defaultdict(list) + for instance in lsinst(): + creator_id = instance.get("creator_identifier") + if creator_id: + unreal_cached_subsets[creator_id].append(instance) else: - shared_data["unreal_cached_subsets"][creator_id].append(i) + family = instance.get("family") + unreal_cached_legacy_subsets[family].append(instance) + + shared_data["unreal_cached_subsets"] = unreal_cached_subsets + shared_data["unreal_cached_legacy_subsets"] = ( + unreal_cached_legacy_subsets + ) return shared_data def create(self, subset_name, instance_data, pre_create_data): @@ -108,8 +93,8 @@ class UnrealBaseCreator(Creator): except Exception as er: six.reraise( - OpenPypeCreatorError, - OpenPypeCreatorError(f"Creator error: {er}"), + CreatorError, + CreatorError(f"Creator error: {er}"), sys.exc_info()[2]) def collect_instances(self): @@ -121,17 +106,17 @@ class UnrealBaseCreator(Creator): self._add_instance_to_context(created_instance) def update_instances(self, update_list): - unreal.log_warning(f"Update instances: {update_list}") - for created_inst, _changes in update_list: + for created_inst, changes in update_list: instance_node = created_inst.get("instance_path", "") if not instance_node: unreal.log_warning( f"Instance node not found for {created_inst}") + continue new_values = { - key: new_value - for key, (_old_value, new_value) in _changes.items() + key: changes[key].new_value + for key in changes.changed_keys } imprint( instance_node, @@ -147,7 +132,6 @@ class UnrealBaseCreator(Creator): self._remove_instance_from_context(instance) -@six.add_metaclass(ABCMeta) class UnrealAssetCreator(UnrealBaseCreator): """Base class for Unreal creator plugins based on assets.""" @@ -181,8 +165,8 @@ class UnrealAssetCreator(UnrealBaseCreator): except Exception as er: six.reraise( - OpenPypeCreatorError, - OpenPypeCreatorError(f"Creator error: {er}"), + CreatorError, + CreatorError(f"Creator error: {er}"), sys.exc_info()[2]) def get_pre_create_attr_defs(self): @@ -191,7 +175,6 @@ class UnrealAssetCreator(UnrealBaseCreator): ] -@six.add_metaclass(ABCMeta) class UnrealActorCreator(UnrealBaseCreator): """Base class for Unreal creator plugins based on actors.""" @@ -214,7 +197,7 @@ class UnrealActorCreator(UnrealBaseCreator): # Check if the level is saved if world.get_path_name().startswith("/Temp/"): - raise OpenPypeCreatorError( + raise CreatorError( "Level must be saved before creating instances.") # Check if instance data has members, filled by the plugin. @@ -238,8 +221,8 @@ class UnrealActorCreator(UnrealBaseCreator): except Exception as er: six.reraise( - OpenPypeCreatorError, - OpenPypeCreatorError(f"Creator error: {er}"), + CreatorError, + CreatorError(f"Creator error: {er}"), sys.exc_info()[2]) def get_pre_create_attr_defs(self): diff --git a/openpype/hosts/unreal/plugins/create/create_camera.py b/openpype/hosts/unreal/plugins/create/create_camera.py index 239dc87db5..00815e1ed4 100644 --- a/openpype/hosts/unreal/plugins/create/create_camera.py +++ b/openpype/hosts/unreal/plugins/create/create_camera.py @@ -10,4 +10,4 @@ class CreateCamera(UnrealActorCreator): identifier = "io.openpype.creators.unreal.camera" label = "Camera" family = "camera" - icon = "camera" + icon = "fa.camera" diff --git a/openpype/hosts/unreal/plugins/create/create_look.py b/openpype/hosts/unreal/plugins/create/create_look.py index 047764ef2a..cecb88bca3 100644 --- a/openpype/hosts/unreal/plugins/create/create_look.py +++ b/openpype/hosts/unreal/plugins/create/create_look.py @@ -7,6 +7,7 @@ from openpype.hosts.unreal.api.pipeline import ( from openpype.hosts.unreal.api.plugin import ( UnrealAssetCreator ) +from openpype.lib import UILabelDef class CreateLook(UnrealAssetCreator): @@ -18,10 +19,10 @@ class CreateLook(UnrealAssetCreator): icon = "paint-brush" def create(self, subset_name, instance_data, pre_create_data): - selection = [] - if pre_create_data.get("use_selection"): - sel_objects = unreal.EditorUtilityLibrary.get_selected_assets() - selection = [a.get_path_name() for a in sel_objects] + # We need to set this to True for the parent class to work + pre_create_data["use_selection"] = True + sel_objects = unreal.EditorUtilityLibrary.get_selected_assets() + selection = [a.get_path_name() for a in sel_objects] if len(selection) != 1: raise RuntimeError("Please select only one asset.") @@ -68,3 +69,8 @@ class CreateLook(UnrealAssetCreator): subset_name, instance_data, pre_create_data) + + def get_pre_create_attr_defs(self): + return [ + UILabelDef("Select the asset from which to create the look.") + ] From fa3a7419409598ba0b3b2c9cb42d1c42be20822b Mon Sep 17 00:00:00 2001 From: Simone Barbieri Date: Wed, 15 Feb 2023 11:45:30 +0000 Subject: [PATCH 067/187] Fixed problem with the instance metadata --- openpype/hosts/unreal/api/pipeline.py | 2 +- openpype/hosts/unreal/api/plugin.py | 10 +++++++++- 2 files changed, 10 insertions(+), 2 deletions(-) diff --git a/openpype/hosts/unreal/api/pipeline.py b/openpype/hosts/unreal/api/pipeline.py index 0fe8c02ec5..0810ec7c07 100644 --- a/openpype/hosts/unreal/api/pipeline.py +++ b/openpype/hosts/unreal/api/pipeline.py @@ -76,7 +76,7 @@ class UnrealHost(HostBase, ILoadHost, IPublishHost): if not os.path.isfile(op_ctx): return {} with open(op_ctx, "r") as fp: - data = eval(json.load(fp)) + data = json.load(fp) return data diff --git a/openpype/hosts/unreal/api/plugin.py b/openpype/hosts/unreal/api/plugin.py index fc724105b6..a852ed9bb1 100644 --- a/openpype/hosts/unreal/api/plugin.py +++ b/openpype/hosts/unreal/api/plugin.py @@ -1,4 +1,5 @@ # -*- coding: utf-8 -*- +import ast import collections import sys import six @@ -89,7 +90,9 @@ class UnrealBaseCreator(Creator): obj = ar.get_asset_by_object_path(member).get_asset() assets.add(obj) - imprint(f"{self.root}/{instance_name}", instance_data) + imprint(f"{self.root}/{instance_name}", instance.data_to_store()) + + return instance except Exception as er: six.reraise( @@ -102,6 +105,11 @@ class UnrealBaseCreator(Creator): self.cache_subsets(self.collection_shared_data) for instance in self.collection_shared_data[ "unreal_cached_subsets"].get(self.identifier, []): + # Unreal saves metadata as string, so we need to convert it back + instance['creator_attributes'] = ast.literal_eval( + instance.get('creator_attributes', '{}')) + instance['publish_attributes'] = ast.literal_eval( + instance.get('publish_attributes', '{}')) created_instance = CreatedInstance.from_existing(instance, self) self._add_instance_to_context(created_instance) From 614bcb320c3a6bde5e717000065b5c17088ccdc6 Mon Sep 17 00:00:00 2001 From: Simone Barbieri Date: Wed, 15 Feb 2023 16:56:21 +0000 Subject: [PATCH 068/187] Creator allows to create a new level sequence with render instance --- .../unreal/plugins/create/create_render.py | 126 +++++++++++++++--- 1 file changed, 111 insertions(+), 15 deletions(-) diff --git a/openpype/hosts/unreal/plugins/create/create_render.py b/openpype/hosts/unreal/plugins/create/create_render.py index c957e50e29..bc39b43802 100644 --- a/openpype/hosts/unreal/plugins/create/create_render.py +++ b/openpype/hosts/unreal/plugins/create/create_render.py @@ -2,12 +2,17 @@ import unreal from openpype.hosts.unreal.api.pipeline import ( - get_subsequences + UNREAL_VERSION, + create_folder, + get_subsequences, ) from openpype.hosts.unreal.api.plugin import ( UnrealAssetCreator ) -from openpype.lib import UILabelDef +from openpype.lib import ( + BoolDef, + UILabelDef +) class CreateRender(UnrealAssetCreator): @@ -18,7 +23,88 @@ class CreateRender(UnrealAssetCreator): family = "render" icon = "eye" - def create(self, subset_name, instance_data, pre_create_data): + def create_instance( + self, instance_data, subset_name, pre_create_data, + selected_asset_path, master_seq, master_lvl, seq_data + ): + instance_data["members"] = [selected_asset_path] + instance_data["sequence"] = selected_asset_path + instance_data["master_sequence"] = master_seq + instance_data["master_level"] = master_lvl + instance_data["output"] = seq_data.get('output') + instance_data["frameStart"] = seq_data.get('frame_range')[0] + instance_data["frameEnd"] = seq_data.get('frame_range')[1] + + super(CreateRender, self).create( + subset_name, + instance_data, + pre_create_data) + + def create_with_new_sequence( + self, subset_name, instance_data, pre_create_data + ): + # If the option to create a new level sequence is selected, + # create a new level sequence and a master level. + + root = f"/Game/OpenPype/Sequences" + + # Create a new folder for the sequence in root + sequence_dir_name = create_folder(root, subset_name) + sequence_dir = f"{root}/{sequence_dir_name}" + + unreal.log_warning(f"sequence_dir: {sequence_dir}") + + # Create the level sequence + asset_tools = unreal.AssetToolsHelpers.get_asset_tools() + seq = asset_tools.create_asset( + asset_name=subset_name, + package_path=sequence_dir, + asset_class=unreal.LevelSequence, + factory=unreal.LevelSequenceFactoryNew()) + unreal.EditorAssetLibrary.save_asset(seq.get_path_name()) + + # Create the master level + prev_level = None + if UNREAL_VERSION.major >= 5: + curr_level = unreal.LevelEditorSubsystem().get_current_level() + else: + world = unreal.EditorLevelLibrary.get_editor_world() + levels = unreal.EditorLevelUtils.get_levels(world) + curr_level = levels[0] if len(levels) else None + if not curr_level: + raise RuntimeError("No level loaded.") + curr_level_path = curr_level.get_outer().get_path_name() + + # If the level path does not start with "/Game/", the current + # level is a temporary, unsaved level. + if curr_level_path.startswith("/Game/"): + prev_level = curr_level_path + if UNREAL_VERSION.major >= 5: + unreal.LevelEditorSubsystem().save_current_level() + else: + unreal.EditorLevelLibrary.save_current_level() + + ml_path = f"{sequence_dir}/{subset_name}_MasterLevel" + + if UNREAL_VERSION.major >= 5: + unreal.LevelEditorSubsystem().new_level(ml_path) + else: + unreal.EditorLevelLibrary.new_level(ml_path) + + seq_data = { + "sequence": seq, + "output": f"{seq.get_name()}", + "frame_range": ( + seq.get_playback_start(), + seq.get_playback_end())} + + self.create_instance( + instance_data, subset_name, pre_create_data, + seq.get_path_name(), seq.get_path_name(), ml_path, seq_data) + + def create_from_existing_sequence( + self, subset_name, instance_data, pre_create_data + ): ar = unreal.AssetRegistryHelpers.get_asset_registry() sel_objects = unreal.EditorUtilityLibrary.get_selected_assets() @@ -118,20 +204,30 @@ class CreateRender(UnrealAssetCreator): "sub-sequence of the master sequence.") continue - instance_data["members"] = [selected_asset_path] - instance_data["sequence"] = selected_asset_path - instance_data["master_sequence"] = master_seq - instance_data["master_level"] = master_lvl - instance_data["output"] = seq_data.get('output') - instance_data["frameStart"] = seq_data.get('frame_range')[0] - instance_data["frameEnd"] = seq_data.get('frame_range')[1] + self.create_instance( + instance_data, subset_name, pre_create_data, + selected_asset_path, master_seq, master_lvl, seq_data) - super(CreateRender, self).create( - subset_name, - instance_data, - pre_create_data) + def create(self, subset_name, instance_data, pre_create_data): + if pre_create_data.get("create_seq"): + self.create_with_new_sequence( + subset_name, instance_data, pre_create_data) + else: + self.create_from_existing_sequence( + subset_name, instance_data, pre_create_data) def get_pre_create_attr_defs(self): return [ - UILabelDef("Select the sequence to render.") + UILabelDef( + "Select a Level Sequence to render or create a new one." + ), + BoolDef( + "create_seq", + label="Create a new Level Sequence", + default=False + ), + UILabelDef( + "WARNING: If you create a new Level Sequence, the current " + "level will be saved and a new Master Level will be created." + ) ] From f94cae429e0cb7b056153211adec7fa7813b28f8 Mon Sep 17 00:00:00 2001 From: Simone Barbieri Date: Thu, 16 Feb 2023 11:46:10 +0000 Subject: [PATCH 069/187] Allow the user to set frame range of new sequence --- .../unreal/plugins/create/create_render.py | 22 +++++++++++++++++-- 1 file changed, 20 insertions(+), 2 deletions(-) diff --git a/openpype/hosts/unreal/plugins/create/create_render.py b/openpype/hosts/unreal/plugins/create/create_render.py index bc39b43802..b999f9ae20 100644 --- a/openpype/hosts/unreal/plugins/create/create_render.py +++ b/openpype/hosts/unreal/plugins/create/create_render.py @@ -11,7 +11,7 @@ from openpype.hosts.unreal.api.plugin import ( ) from openpype.lib import ( BoolDef, - UILabelDef + NumberDef ) @@ -61,6 +61,10 @@ class CreateRender(UnrealAssetCreator): package_path=sequence_dir, asset_class=unreal.LevelSequence, factory=unreal.LevelSequenceFactoryNew()) + + seq.set_playback_start(pre_create_data.get("start_frame")) + seq.set_playback_end(pre_create_data.get("end_frame")) + unreal.EditorAssetLibrary.save_asset(seq.get_path_name()) # Create the master level @@ -229,5 +233,19 @@ class CreateRender(UnrealAssetCreator): UILabelDef( "WARNING: If you create a new Level Sequence, the current " "level will be saved and a new Master Level will be created." - ) + ), + NumberDef( + "start_frame", + label="Start Frame", + default=0, + minimum=-999999, + maximum=999999 + ), + NumberDef( + "end_frame", + label="Start Frame", + default=150, + minimum=-999999, + maximum=999999 + ), ] From e2ea7fad1a7f4d0aaec178dd18e76ffa18e3f3af Mon Sep 17 00:00:00 2001 From: Simone Barbieri Date: Thu, 16 Feb 2023 11:47:26 +0000 Subject: [PATCH 070/187] Added option to not include hierarchy when creating a render instance --- .../unreal/plugins/create/create_render.py | 43 +++++++++++++------ 1 file changed, 31 insertions(+), 12 deletions(-) diff --git a/openpype/hosts/unreal/plugins/create/create_render.py b/openpype/hosts/unreal/plugins/create/create_render.py index b999f9ae20..6f2049693f 100644 --- a/openpype/hosts/unreal/plugins/create/create_render.py +++ b/openpype/hosts/unreal/plugins/create/create_render.py @@ -1,4 +1,6 @@ # -*- coding: utf-8 -*- +from pathlib import Path + import unreal from openpype.hosts.unreal.api.pipeline import ( @@ -10,6 +12,8 @@ from openpype.hosts.unreal.api.plugin import ( UnrealAssetCreator ) from openpype.lib import ( + UILabelDef, + UISeparatorDef, BoolDef, NumberDef ) @@ -68,7 +72,6 @@ class CreateRender(UnrealAssetCreator): unreal.EditorAssetLibrary.save_asset(seq.get_path_name()) # Create the master level - prev_level = None if UNREAL_VERSION.major >= 5: curr_level = unreal.LevelEditorSubsystem().get_current_level() else: @@ -82,7 +85,6 @@ class CreateRender(UnrealAssetCreator): # If the level path does not start with "/Game/", the current # level is a temporary, unsaved level. if curr_level_path.startswith("/Game/"): - prev_level = curr_level_path if UNREAL_VERSION.major >= 5: unreal.LevelEditorSubsystem().save_current_level() else: @@ -131,25 +133,31 @@ class CreateRender(UnrealAssetCreator): f"Skipping {selected_asset.get_name()}. It isn't a Level " "Sequence.") - # The asset name is the the third element of the path which - # contains the map. - # To take the asset name, we remove from the path the prefix - # "/Game/OpenPype/" and then we split the path by "/". - sel_path = selected_asset_path - asset_name = sel_path.replace("/Game/OpenPype/", "").split("/")[0] + if pre_create_data.get("use_hierarchy"): + # The asset name is the the third element of the path which + # contains the map. + # To take the asset name, we remove from the path the prefix + # "/Game/OpenPype/" and then we split the path by "/". + sel_path = selected_asset_path + asset_name = sel_path.replace( + "/Game/OpenPype/", "").split("/")[0] + + search_path = f"/Game/OpenPype/{asset_name}" + else: + search_path = Path(selected_asset_path).parent.as_posix() # Get the master sequence and the master level. # There should be only one sequence and one level in the directory. ar_filter = unreal.ARFilter( class_names=["LevelSequence"], - package_paths=[f"/Game/OpenPype/{asset_name}"], + package_paths=[search_path], recursive_paths=False) sequences = ar.get_assets(ar_filter) master_seq = sequences[0].get_asset().get_path_name() master_seq_obj = sequences[0].get_asset() ar_filter = unreal.ARFilter( class_names=["World"], - package_paths=[f"/Game/OpenPype/{asset_name}"], + package_paths=[search_path], recursive_paths=False) levels = ar.get_assets(ar_filter) master_lvl = levels[0].get_asset().get_path_name() @@ -168,7 +176,8 @@ class CreateRender(UnrealAssetCreator): master_seq_obj.get_playback_start(), master_seq_obj.get_playback_end())} - if selected_asset_path == master_seq: + if (selected_asset_path == master_seq or + pre_create_data.get("use_hierarchy")): seq_data = master_seq_data else: seq_data_list = [master_seq_data] @@ -231,7 +240,7 @@ class CreateRender(UnrealAssetCreator): default=False ), UILabelDef( - "WARNING: If you create a new Level Sequence, the current " + "WARNING: If you create a new Level Sequence, the current\n" "level will be saved and a new Master Level will be created." ), NumberDef( @@ -248,4 +257,14 @@ class CreateRender(UnrealAssetCreator): minimum=-999999, maximum=999999 ), + UISeparatorDef(), + UILabelDef( + "The following settings are valid only if you are not\n" + "creating a new sequence." + ), + BoolDef( + "use_hierarchy", + label="Use Hierarchy", + default=False + ), ] From d2403bcbdace79d8e645b6dbd68e439dbb144e03 Mon Sep 17 00:00:00 2001 From: Simone Barbieri Date: Thu, 16 Feb 2023 12:00:48 +0000 Subject: [PATCH 071/187] Hanldes IndexError when looking for hierarchy for selected sequence --- .../unreal/plugins/create/create_render.py | 30 +++++++++++-------- 1 file changed, 17 insertions(+), 13 deletions(-) diff --git a/openpype/hosts/unreal/plugins/create/create_render.py b/openpype/hosts/unreal/plugins/create/create_render.py index 6f2049693f..b2a246d3a8 100644 --- a/openpype/hosts/unreal/plugins/create/create_render.py +++ b/openpype/hosts/unreal/plugins/create/create_render.py @@ -148,19 +148,23 @@ class CreateRender(UnrealAssetCreator): # Get the master sequence and the master level. # There should be only one sequence and one level in the directory. - ar_filter = unreal.ARFilter( - class_names=["LevelSequence"], - package_paths=[search_path], - recursive_paths=False) - sequences = ar.get_assets(ar_filter) - master_seq = sequences[0].get_asset().get_path_name() - master_seq_obj = sequences[0].get_asset() - ar_filter = unreal.ARFilter( - class_names=["World"], - package_paths=[search_path], - recursive_paths=False) - levels = ar.get_assets(ar_filter) - master_lvl = levels[0].get_asset().get_path_name() + try: + ar_filter = unreal.ARFilter( + class_names=["LevelSequence"], + package_paths=[search_path], + recursive_paths=False) + sequences = ar.get_assets(ar_filter) + master_seq = sequences[0].get_asset().get_path_name() + master_seq_obj = sequences[0].get_asset() + ar_filter = unreal.ARFilter( + class_names=["World"], + package_paths=[search_path], + recursive_paths=False) + levels = ar.get_assets(ar_filter) + master_lvl = levels[0].get_asset().get_path_name() + except IndexError: + raise RuntimeError( + f"Could not find the hierarchy for the selected sequence.") # If the selected asset is the master sequence, we get its data # and then we create the instance for the master sequence. From a31b6035fe81ff0fe71b335fbd96e6c6f8e5ab9e Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Thu, 2 Mar 2023 17:18:05 +0800 Subject: [PATCH 072/187] add model creator, extractors and loaders --- .../hosts/max/plugins/create/create_model.py | 26 +++++ .../hosts/max/plugins/load/load_max_scene.py | 3 +- openpype/hosts/max/plugins/load/load_model.py | 98 +++++++++++++++++++ .../hosts/max/plugins/load/load_pointcache.py | 3 +- .../plugins/publish/extract_max_scene_raw.py | 3 +- .../max/plugins/publish/extract_model.py | 74 ++++++++++++++ 6 files changed, 203 insertions(+), 4 deletions(-) create mode 100644 openpype/hosts/max/plugins/create/create_model.py create mode 100644 openpype/hosts/max/plugins/load/load_model.py create mode 100644 openpype/hosts/max/plugins/publish/extract_model.py diff --git a/openpype/hosts/max/plugins/create/create_model.py b/openpype/hosts/max/plugins/create/create_model.py new file mode 100644 index 0000000000..a78a30e0c7 --- /dev/null +++ b/openpype/hosts/max/plugins/create/create_model.py @@ -0,0 +1,26 @@ +# -*- coding: utf-8 -*- +"""Creator plugin for model.""" +from openpype.hosts.max.api import plugin +from openpype.pipeline import CreatedInstance + + +class CreateModel(plugin.MaxCreator): + identifier = "io.openpype.creators.max.model" + label = "Model" + family = "model" + icon = "gear" + + def create(self, subset_name, instance_data, pre_create_data): + from pymxs import runtime as rt + sel_obj = list(rt.selection) + instance = super(CreateModel, self).create( + subset_name, + instance_data, + pre_create_data) # type: CreatedInstance + container = rt.getNodeByName(instance.data.get("instance_node")) + # TODO: Disable "Add to Containers?" Panel + # parent the selected cameras into the container + for obj in sel_obj: + obj.parent = container + # for additional work on the node: + # instance_node = rt.getNodeByName(instance.get("instance_node")) diff --git a/openpype/hosts/max/plugins/load/load_max_scene.py b/openpype/hosts/max/plugins/load/load_max_scene.py index b863b9363f..d37d3439fb 100644 --- a/openpype/hosts/max/plugins/load/load_max_scene.py +++ b/openpype/hosts/max/plugins/load/load_max_scene.py @@ -9,7 +9,8 @@ from openpype.hosts.max.api import lib class MaxSceneLoader(load.LoaderPlugin): """Max Scene Loader""" - families = ["camera"] + families = ["camera", + "model"] representations = ["max"] order = -8 icon = "code-fork" diff --git a/openpype/hosts/max/plugins/load/load_model.py b/openpype/hosts/max/plugins/load/load_model.py new file mode 100644 index 0000000000..e6262b4f86 --- /dev/null +++ b/openpype/hosts/max/plugins/load/load_model.py @@ -0,0 +1,98 @@ + +import os +from openpype.pipeline import ( + load, get_representation_path +) +from openpype.hosts.max.api.pipeline import containerise +from openpype.hosts.max.api import lib + + +class ModelAbcLoader(load.LoaderPlugin): + """Loading model with the Alembic loader.""" + + families = ["model"] + label = "Load Model(Alembic)" + representations = ["abc"] + order = -10 + icon = "code-fork" + color = "orange" + + def load(self, context, name=None, namespace=None, data=None): + from pymxs import runtime as rt + + file_path = os.path.normpath(self.fname) + + abc_before = { + c for c in rt.rootNode.Children + if rt.classOf(c) == rt.AlembicContainer + } + + abc_import_cmd = (f""" +AlembicImport.ImportToRoot = false +AlembicImport.CustomAttributes = true +AlembicImport.UVs = true +AlembicImport.VertexColors = true + +importFile @"{file_path}" #noPrompt + """) + + self.log.debug(f"Executing command: {abc_import_cmd}") + rt.execute(abc_import_cmd) + + abc_after = { + c for c in rt.rootNode.Children + if rt.classOf(c) == rt.AlembicContainer + } + + # This should yield new AlembicContainer node + abc_containers = abc_after.difference(abc_before) + + if len(abc_containers) != 1: + self.log.error("Something failed when loading.") + + abc_container = abc_containers.pop() + + return containerise( + name, [abc_container], context, loader=self.__class__.__name__) + + def update(self, container, representation): + from pymxs import runtime as rt + + path = get_representation_path(representation) + node = rt.getNodeByName(container["instance_node"]) + + alembic_objects = self.get_container_children(node, "AlembicObject") + for alembic_object in alembic_objects: + alembic_object.source = path + + lib.imprint(container["instance_node"], { + "representation": str(representation["_id"]) + }) + + def switch(self, container, representation): + self.update(container, representation) + + def remove(self, container): + from pymxs import runtime as rt + + node = rt.getNodeByName(container["instance_node"]) + rt.delete(node) + + @staticmethod + def get_container_children(parent, type_name): + from pymxs import runtime as rt + + def list_children(node): + children = [] + for c in node.Children: + children.append(c) + children += list_children(c) + return children + + filtered = [] + for child in list_children(parent): + class_type = str(rt.classOf(child.baseObject)) + if class_type == type_name: + filtered.append(child) + + return filtered diff --git a/openpype/hosts/max/plugins/load/load_pointcache.py b/openpype/hosts/max/plugins/load/load_pointcache.py index f7a72ece25..b3e12adc7b 100644 --- a/openpype/hosts/max/plugins/load/load_pointcache.py +++ b/openpype/hosts/max/plugins/load/load_pointcache.py @@ -15,8 +15,7 @@ from openpype.hosts.max.api import lib class AbcLoader(load.LoaderPlugin): """Alembic loader.""" - families = ["model", - "camera", + families = ["camera", "animation", "pointcache"] label = "Load Alembic" diff --git a/openpype/hosts/max/plugins/publish/extract_max_scene_raw.py b/openpype/hosts/max/plugins/publish/extract_max_scene_raw.py index cacc84c591..aa01ad1a3a 100644 --- a/openpype/hosts/max/plugins/publish/extract_max_scene_raw.py +++ b/openpype/hosts/max/plugins/publish/extract_max_scene_raw.py @@ -20,7 +20,8 @@ class ExtractMaxSceneRaw(publish.Extractor, order = pyblish.api.ExtractorOrder - 0.2 label = "Extract Max Scene (Raw)" hosts = ["max"] - families = ["camera"] + families = ["camera", + "model"] optional = True def process(self, instance): diff --git a/openpype/hosts/max/plugins/publish/extract_model.py b/openpype/hosts/max/plugins/publish/extract_model.py new file mode 100644 index 0000000000..710ad5f97d --- /dev/null +++ b/openpype/hosts/max/plugins/publish/extract_model.py @@ -0,0 +1,74 @@ +import os +import pyblish.api +from openpype.pipeline import ( + publish, + OptionalPyblishPluginMixin +) +from pymxs import runtime as rt +from openpype.hosts.max.api import ( + maintained_selection, + get_all_children +) + + +class ExtractModel(publish.Extractor, + OptionalPyblishPluginMixin): + """ + Extract Geometry in Alembic Format + """ + + order = pyblish.api.ExtractorOrder - 0.1 + label = "Extract Geometry (Alembic)" + hosts = ["max"] + families = ["model"] + optional = True + + def process(self, instance): + if not self.is_active(instance.data): + return + + container = instance.data["instance_node"] + + self.log.info("Extracting Geometry ...") + + stagingdir = self.staging_dir(instance) + filename = "{name}.abc".format(**instance.data) + filepath = os.path.join(stagingdir, filename) + + # We run the render + self.log.info("Writing alembic '%s' to '%s'" % (filename, + stagingdir)) + + export_cmd = ( + f""" +AlembicExport.ArchiveType = #ogawa +AlembicExport.CoordinateSystem = #maya +AlembicExport.CustomAttributes = true +AlembicExport.UVs = true +AlembicExport.VertexColors = true +AlembicExport.PreserveInstances = true + +exportFile @"{filepath}" #noPrompt selectedOnly:on using:AlembicExport + + """) + + self.log.debug(f"Executing command: {export_cmd}") + + with maintained_selection(): + # select and export + rt.select(get_all_children(rt.getNodeByName(container))) + rt.execute(export_cmd) + + self.log.info("Performing Extraction ...") + if "representations" not in instance.data: + instance.data["representations"] = [] + + representation = { + 'name': 'abc', + 'ext': 'abc', + 'files': filename, + "stagingDir": stagingdir, + } + instance.data["representations"].append(representation) + self.log.info("Extracted instance '%s' to: %s" % (instance.name, + filepath)) From f18455717c95b67846558e59a785143961d5fc58 Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Thu, 2 Mar 2023 17:25:45 +0800 Subject: [PATCH 073/187] OP-4245 - Data Exchange: geometry --- .../max/plugins/publish/extract_model_usd.py | 112 ++++++++++++++++++ 1 file changed, 112 insertions(+) create mode 100644 openpype/hosts/max/plugins/publish/extract_model_usd.py diff --git a/openpype/hosts/max/plugins/publish/extract_model_usd.py b/openpype/hosts/max/plugins/publish/extract_model_usd.py new file mode 100644 index 0000000000..1c8bf073da --- /dev/null +++ b/openpype/hosts/max/plugins/publish/extract_model_usd.py @@ -0,0 +1,112 @@ +import os +import pyblish.api +from openpype.pipeline import ( + publish, + OptionalPyblishPluginMixin +) +from pymxs import runtime as rt +from openpype.hosts.max.api import ( + maintained_selection, + get_all_children +) + + +class ExtractModelUSD(publish.Extractor, + OptionalPyblishPluginMixin): + """ + Extract Geometry in USDA Format + """ + + order = pyblish.api.ExtractorOrder - 0.05 + label = "Extract Geometry (USD)" + hosts = ["max"] + families = ["model"] + optional = True + + def process(self, instance): + if not self.is_active(instance.data): + return + + container = instance.data["instance_node"] + + self.log.info("Extracting Geometry ...") + + stagingdir = self.staging_dir(instance) + asset_filename = "{name}.usda".format(**instance.data) + asset_filepath = os.path.join(stagingdir, + asset_filename) + self.log.info("Writing USD '%s' to '%s'" % (asset_filepath, + stagingdir)) + + log_filename ="{name}.txt".format(**instance.data) + log_filepath = os.path.join(stagingdir, + log_filename) + self.log.info("Writing log '%s' to '%s'" % (log_filepath, + stagingdir)) + + # get the nodes which need to be exported + export_options = self.get_export_options(log_filepath) + with maintained_selection(): + # select and export + node_list = self.get_node_list(container) + rt.USDExporter.ExportFile(asset_filepath, + exportOptions=export_options, + nodeList=node_list) + + self.log.info("Performing Extraction ...") + if "representations" not in instance.data: + instance.data["representations"] = [] + + representation = { + 'name': 'usda', + 'ext': 'usda', + 'files': asset_filename, + "stagingDir": stagingdir, + } + instance.data["representations"].append(representation) + + log_representation = { + 'name': 'txt', + 'ext': 'txt', + 'files': log_filename, + "stagingDir": stagingdir, + } + instance.data["representations"].append(log_representation) + + self.log.info("Extracted instance '%s' to: %s" % (instance.name, + asset_filepath)) + + def get_node_list(self, container): + """ + Get the target nodes which are + the children of the container + """ + node_list = [] + + container_node = rt.getNodeByName(container) + target_node = container_node.Children + rt.select(target_node) + for sel in rt.selection: + node_list.append(sel) + + return node_list + + def get_export_options(self, log_path): + """Set Export Options for USD Exporter""" + + export_options = rt.USDExporter.createOptions() + + export_options.Meshes = True + export_options.Lights = False + export_options.Cameras = False + export_options.Materials = False + export_options.FileFormat = rt.name('ascii') + export_options.UpAxis = rt.name('y') + export_options.LogLevel = rt.name('info') + export_options.LogPath = log_path + export_options.PreserveEdgeOrientation = True + export_options.TimeMode = rt.name('current') + + rt.USDexporter.UIOptions = export_options + + return export_options From a7c11f0aece3b0484d94b64e92955103fc5b93e2 Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Thu, 2 Mar 2023 17:30:25 +0800 Subject: [PATCH 074/187] hound fix --- openpype/hosts/max/plugins/publish/extract_model_usd.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/openpype/hosts/max/plugins/publish/extract_model_usd.py b/openpype/hosts/max/plugins/publish/extract_model_usd.py index 1c8bf073da..0f8d283907 100644 --- a/openpype/hosts/max/plugins/publish/extract_model_usd.py +++ b/openpype/hosts/max/plugins/publish/extract_model_usd.py @@ -6,8 +6,7 @@ from openpype.pipeline import ( ) from pymxs import runtime as rt from openpype.hosts.max.api import ( - maintained_selection, - get_all_children + maintained_selection ) @@ -38,7 +37,7 @@ class ExtractModelUSD(publish.Extractor, self.log.info("Writing USD '%s' to '%s'" % (asset_filepath, stagingdir)) - log_filename ="{name}.txt".format(**instance.data) + log_filename = "{name}.txt".format(**instance.data) log_filepath = os.path.join(stagingdir, log_filename) self.log.info("Writing log '%s' to '%s'" % (log_filepath, From b5d748f466858557d09680923a30f1851cc8e6a2 Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Fri, 3 Mar 2023 13:02:24 +0800 Subject: [PATCH 075/187] add export options to the usd extractors and add usd loader --- .../hosts/max/plugins/load/load_model_usd.py | 59 +++++++++++++++++++ .../max/plugins/publish/extract_model_usd.py | 2 + 2 files changed, 61 insertions(+) create mode 100644 openpype/hosts/max/plugins/load/load_model_usd.py diff --git a/openpype/hosts/max/plugins/load/load_model_usd.py b/openpype/hosts/max/plugins/load/load_model_usd.py new file mode 100644 index 0000000000..c6c414b91c --- /dev/null +++ b/openpype/hosts/max/plugins/load/load_model_usd.py @@ -0,0 +1,59 @@ +import os +from openpype.pipeline import ( + load, get_representation_path +) +from openpype.hosts.max.api.pipeline import containerise +from openpype.hosts.max.api import lib + + +class ModelUSDLoader(load.LoaderPlugin): + """Loading model with the USD loader.""" + + families = ["model"] + label = "Load Model(USD)" + representations = ["usda"] + order = -10 + icon = "code-fork" + color = "orange" + + def load(self, context, name=None, namespace=None, data=None): + from pymxs import runtime as rt + # asset_filepath + filepath = os.path.normpath(self.fname) + import_options = rt.USDImporter.CreateOptions() + base_filename = os.path.basename(filepath) + filename, ext = os.path.splitext(base_filename) + log_filepath = filepath.replace(ext, "txt") + + rt.LogPath = log_filepath + rt.LogLevel = rt.name('info') + rt.USDImporter.importFile(filepath, + importOptions=import_options) + + asset = rt.getNodeByName(f"{name}") + + return containerise( + name, [asset], context, loader=self.__class__.__name__) + + def update(self, container, representation): + from pymxs import runtime as rt + + path = get_representation_path(representation) + node = rt.getNodeByName(container["instance_node"]) + + usd_objects = self.get_container_children(node) + for usd_object in usd_objects: + usd_object.source = path + + lib.imprint(container["instance_node"], { + "representation": str(representation["_id"]) + }) + + def switch(self, container, representation): + self.update(container, representation) + + def remove(self, container): + from pymxs import runtime as rt + + node = rt.getNodeByName(container["instance_node"]) + rt.delete(node) diff --git a/openpype/hosts/max/plugins/publish/extract_model_usd.py b/openpype/hosts/max/plugins/publish/extract_model_usd.py index 0f8d283907..2f89e4de16 100644 --- a/openpype/hosts/max/plugins/publish/extract_model_usd.py +++ b/openpype/hosts/max/plugins/publish/extract_model_usd.py @@ -96,9 +96,11 @@ class ExtractModelUSD(publish.Extractor, export_options = rt.USDExporter.createOptions() export_options.Meshes = True + export_options.Shapes = True export_options.Lights = False export_options.Cameras = False export_options.Materials = False + export_options.MeshFormat = rt.name('fromScene') export_options.FileFormat = rt.name('ascii') export_options.UpAxis = rt.name('y') export_options.LogLevel = rt.name('info') From 519cef018529e17fb94c7c8bb197885c762ede93 Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Fri, 3 Mar 2023 13:34:35 +0800 Subject: [PATCH 076/187] add validator for model family --- .../max/plugins/publish/extract_model_usd.py | 2 +- .../publish/validate_model_contents.py | 44 +++++++++++++++++++ 2 files changed, 45 insertions(+), 1 deletion(-) create mode 100644 openpype/hosts/max/plugins/publish/validate_model_contents.py diff --git a/openpype/hosts/max/plugins/publish/extract_model_usd.py b/openpype/hosts/max/plugins/publish/extract_model_usd.py index 2f89e4de16..b20fd45eae 100644 --- a/openpype/hosts/max/plugins/publish/extract_model_usd.py +++ b/openpype/hosts/max/plugins/publish/extract_model_usd.py @@ -96,7 +96,7 @@ class ExtractModelUSD(publish.Extractor, export_options = rt.USDExporter.createOptions() export_options.Meshes = True - export_options.Shapes = True + export_options.Shapes = False export_options.Lights = False export_options.Cameras = False export_options.Materials = False diff --git a/openpype/hosts/max/plugins/publish/validate_model_contents.py b/openpype/hosts/max/plugins/publish/validate_model_contents.py new file mode 100644 index 0000000000..01ae869c30 --- /dev/null +++ b/openpype/hosts/max/plugins/publish/validate_model_contents.py @@ -0,0 +1,44 @@ +# -*- coding: utf-8 -*- +import pyblish.api +from openpype.pipeline import PublishValidationError +from pymxs import runtime as rt + + +class ValidateModelContent(pyblish.api.InstancePlugin): + """Validates Model instance contents. + + A model instance may only hold either geometry + or editable meshes. + """ + + order = pyblish.api.ValidatorOrder + families = ["model"] + hosts = ["max"] + label = "Model Contents" + + def process(self, instance): + invalid = self.get_invalid(instance) + if invalid: + raise PublishValidationError("Model instance must only include" + "Geometry and Editable Mesh") + + def get_invalid(self, instance): + """ + Get invalid nodes if the instance is not camera + """ + invalid = list() + container = instance.data["instance_node"] + self.log.info("Validating look content for " + "{}".format(container)) + + con = rt.getNodeByName(container) + selection_list = list(con.Children) + for sel in selection_list: + if rt.classOf(sel) in rt.Camera.classes: + invalid.append(sel) + if rt.classOf(sel) in rt.Light.classes: + invalid.append(sel) + if rt.classOf(sel) in rt.Shape.classes: + invalid.append(sel) + + return invalid From 12211d70371354fafad96f980d05743542be6c5e Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Fri, 3 Mar 2023 13:35:52 +0800 Subject: [PATCH 077/187] add info in docstring for the validator --- openpype/hosts/max/plugins/publish/validate_model_contents.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/hosts/max/plugins/publish/validate_model_contents.py b/openpype/hosts/max/plugins/publish/validate_model_contents.py index 01ae869c30..dd9c8de2cf 100644 --- a/openpype/hosts/max/plugins/publish/validate_model_contents.py +++ b/openpype/hosts/max/plugins/publish/validate_model_contents.py @@ -7,8 +7,8 @@ from pymxs import runtime as rt class ValidateModelContent(pyblish.api.InstancePlugin): """Validates Model instance contents. - A model instance may only hold either geometry - or editable meshes. + A model instance may only hold either geometry-related + object(excluding Shapes) or editable meshes. """ order = pyblish.api.ValidatorOrder From c98160691b9e1273de8294ad1080792e8080c8a5 Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Fri, 3 Mar 2023 16:01:22 +0800 Subject: [PATCH 078/187] add usdmodel as families --- .../max/plugins/create/create_model_usd.py | 22 +++++++++++++++++++ .../hosts/max/plugins/load/load_model_usd.py | 2 +- .../max/plugins/publish/extract_model_usd.py | 2 +- .../publish/validate_model_contents.py | 2 +- openpype/plugins/publish/integrate.py | 1 + openpype/plugins/publish/integrate_legacy.py | 1 + 6 files changed, 27 insertions(+), 3 deletions(-) create mode 100644 openpype/hosts/max/plugins/create/create_model_usd.py diff --git a/openpype/hosts/max/plugins/create/create_model_usd.py b/openpype/hosts/max/plugins/create/create_model_usd.py new file mode 100644 index 0000000000..237ae8f4ae --- /dev/null +++ b/openpype/hosts/max/plugins/create/create_model_usd.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +"""Creator plugin for model exported in USD format.""" +from openpype.hosts.max.api import plugin +from openpype.pipeline import CreatedInstance + + +class CreateUSDModel(plugin.MaxCreator): + identifier = "io.openpype.creators.max.usdmodel" + label = "USD Model" + family = "usdmodel" + icon = "gear" + + def create(self, subset_name, instance_data, pre_create_data): + from pymxs import runtime as rt + _ = super(CreateUSDModel, self).create( + subset_name, + instance_data, + pre_create_data) # type: CreatedInstance + # TODO: Disable "Add to Containers?" Panel + # parent the selected cameras into the container + # for additional work on the node: + # instance_node = rt.getNodeByName(instance.get("instance_node")) diff --git a/openpype/hosts/max/plugins/load/load_model_usd.py b/openpype/hosts/max/plugins/load/load_model_usd.py index c6c414b91c..ac318fbb57 100644 --- a/openpype/hosts/max/plugins/load/load_model_usd.py +++ b/openpype/hosts/max/plugins/load/load_model_usd.py @@ -9,7 +9,7 @@ from openpype.hosts.max.api import lib class ModelUSDLoader(load.LoaderPlugin): """Loading model with the USD loader.""" - families = ["model"] + families = ["usdmodel"] label = "Load Model(USD)" representations = ["usda"] order = -10 diff --git a/openpype/hosts/max/plugins/publish/extract_model_usd.py b/openpype/hosts/max/plugins/publish/extract_model_usd.py index b20fd45eae..e0ad3bb23e 100644 --- a/openpype/hosts/max/plugins/publish/extract_model_usd.py +++ b/openpype/hosts/max/plugins/publish/extract_model_usd.py @@ -19,7 +19,7 @@ class ExtractModelUSD(publish.Extractor, order = pyblish.api.ExtractorOrder - 0.05 label = "Extract Geometry (USD)" hosts = ["max"] - families = ["model"] + families = ["usdmodel"] optional = True def process(self, instance): diff --git a/openpype/hosts/max/plugins/publish/validate_model_contents.py b/openpype/hosts/max/plugins/publish/validate_model_contents.py index dd9c8de2cf..34578e6920 100644 --- a/openpype/hosts/max/plugins/publish/validate_model_contents.py +++ b/openpype/hosts/max/plugins/publish/validate_model_contents.py @@ -12,7 +12,7 @@ class ValidateModelContent(pyblish.api.InstancePlugin): """ order = pyblish.api.ValidatorOrder - families = ["model"] + families = ["model", "usdmodel"] hosts = ["max"] label = "Model Contents" diff --git a/openpype/plugins/publish/integrate.py b/openpype/plugins/publish/integrate.py index b117006871..fc098b416a 100644 --- a/openpype/plugins/publish/integrate.py +++ b/openpype/plugins/publish/integrate.py @@ -124,6 +124,7 @@ class IntegrateAsset(pyblish.api.InstancePlugin): "xgen", "hda", "usd", + "usdmodel", "staticMesh", "skeletalMesh", "mvLook", diff --git a/openpype/plugins/publish/integrate_legacy.py b/openpype/plugins/publish/integrate_legacy.py index b93abab1d8..ba32c376d8 100644 --- a/openpype/plugins/publish/integrate_legacy.py +++ b/openpype/plugins/publish/integrate_legacy.py @@ -120,6 +120,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): "xgen", "hda", "usd", + "usdmodel", "staticMesh", "skeletalMesh", "mvLook", From fd6aa8302eee6cfcb44cb4d80f30466cd994485d Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Fri, 3 Mar 2023 16:02:24 +0800 Subject: [PATCH 079/187] add usdmodel as families --- openpype/hosts/max/plugins/create/create_model_usd.py | 1 - 1 file changed, 1 deletion(-) diff --git a/openpype/hosts/max/plugins/create/create_model_usd.py b/openpype/hosts/max/plugins/create/create_model_usd.py index 237ae8f4ae..21407ae1f3 100644 --- a/openpype/hosts/max/plugins/create/create_model_usd.py +++ b/openpype/hosts/max/plugins/create/create_model_usd.py @@ -11,7 +11,6 @@ class CreateUSDModel(plugin.MaxCreator): icon = "gear" def create(self, subset_name, instance_data, pre_create_data): - from pymxs import runtime as rt _ = super(CreateUSDModel, self).create( subset_name, instance_data, From 5b4eff51acd3fdb3b6700fa154986fc34cb022a6 Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Fri, 3 Mar 2023 20:30:05 +0800 Subject: [PATCH 080/187] include only model family --- openpype/hosts/max/plugins/publish/validate_model_contents.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/max/plugins/publish/validate_model_contents.py b/openpype/hosts/max/plugins/publish/validate_model_contents.py index 34578e6920..dd9c8de2cf 100644 --- a/openpype/hosts/max/plugins/publish/validate_model_contents.py +++ b/openpype/hosts/max/plugins/publish/validate_model_contents.py @@ -12,7 +12,7 @@ class ValidateModelContent(pyblish.api.InstancePlugin): """ order = pyblish.api.ValidatorOrder - families = ["model", "usdmodel"] + families = ["model"] hosts = ["max"] label = "Model Contents" From 1511ddbccf7f89f5ce90d934536d8a5d1b0eeb71 Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Mon, 6 Mar 2023 16:22:17 +0800 Subject: [PATCH 081/187] usdmodel extractor with selected node and remove usdmodel family --- .../max/plugins/create/create_model_usd.py | 21 ------------------- .../hosts/max/plugins/load/load_model_usd.py | 2 +- .../max/plugins/publish/extract_model_usd.py | 3 ++- openpype/plugins/publish/integrate.py | 1 - openpype/plugins/publish/integrate_legacy.py | 1 - 5 files changed, 3 insertions(+), 25 deletions(-) delete mode 100644 openpype/hosts/max/plugins/create/create_model_usd.py diff --git a/openpype/hosts/max/plugins/create/create_model_usd.py b/openpype/hosts/max/plugins/create/create_model_usd.py deleted file mode 100644 index 21407ae1f3..0000000000 --- a/openpype/hosts/max/plugins/create/create_model_usd.py +++ /dev/null @@ -1,21 +0,0 @@ -# -*- coding: utf-8 -*- -"""Creator plugin for model exported in USD format.""" -from openpype.hosts.max.api import plugin -from openpype.pipeline import CreatedInstance - - -class CreateUSDModel(plugin.MaxCreator): - identifier = "io.openpype.creators.max.usdmodel" - label = "USD Model" - family = "usdmodel" - icon = "gear" - - def create(self, subset_name, instance_data, pre_create_data): - _ = super(CreateUSDModel, self).create( - subset_name, - instance_data, - pre_create_data) # type: CreatedInstance - # TODO: Disable "Add to Containers?" Panel - # parent the selected cameras into the container - # for additional work on the node: - # instance_node = rt.getNodeByName(instance.get("instance_node")) diff --git a/openpype/hosts/max/plugins/load/load_model_usd.py b/openpype/hosts/max/plugins/load/load_model_usd.py index ac318fbb57..c6c414b91c 100644 --- a/openpype/hosts/max/plugins/load/load_model_usd.py +++ b/openpype/hosts/max/plugins/load/load_model_usd.py @@ -9,7 +9,7 @@ from openpype.hosts.max.api import lib class ModelUSDLoader(load.LoaderPlugin): """Loading model with the USD loader.""" - families = ["usdmodel"] + families = ["model"] label = "Load Model(USD)" representations = ["usda"] order = -10 diff --git a/openpype/hosts/max/plugins/publish/extract_model_usd.py b/openpype/hosts/max/plugins/publish/extract_model_usd.py index e0ad3bb23e..0bed2d855e 100644 --- a/openpype/hosts/max/plugins/publish/extract_model_usd.py +++ b/openpype/hosts/max/plugins/publish/extract_model_usd.py @@ -19,7 +19,7 @@ class ExtractModelUSD(publish.Extractor, order = pyblish.api.ExtractorOrder - 0.05 label = "Extract Geometry (USD)" hosts = ["max"] - families = ["usdmodel"] + families = ["model"] optional = True def process(self, instance): @@ -50,6 +50,7 @@ class ExtractModelUSD(publish.Extractor, node_list = self.get_node_list(container) rt.USDExporter.ExportFile(asset_filepath, exportOptions=export_options, + contentSource=rt.name("selected"), nodeList=node_list) self.log.info("Performing Extraction ...") diff --git a/openpype/plugins/publish/integrate.py b/openpype/plugins/publish/integrate.py index fc098b416a..b117006871 100644 --- a/openpype/plugins/publish/integrate.py +++ b/openpype/plugins/publish/integrate.py @@ -124,7 +124,6 @@ class IntegrateAsset(pyblish.api.InstancePlugin): "xgen", "hda", "usd", - "usdmodel", "staticMesh", "skeletalMesh", "mvLook", diff --git a/openpype/plugins/publish/integrate_legacy.py b/openpype/plugins/publish/integrate_legacy.py index ba32c376d8..b93abab1d8 100644 --- a/openpype/plugins/publish/integrate_legacy.py +++ b/openpype/plugins/publish/integrate_legacy.py @@ -120,7 +120,6 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): "xgen", "hda", "usd", - "usdmodel", "staticMesh", "skeletalMesh", "mvLook", From 6064fa2d45ca59269cf101b6f19edcf557996f24 Mon Sep 17 00:00:00 2001 From: Simone Barbieri Date: Mon, 6 Mar 2023 11:09:49 +0000 Subject: [PATCH 082/187] Added settings for rendering --- .../settings/defaults/project_settings/unreal.json | 2 ++ .../schemas/projects_schema/schema_project_unreal.json | 10 ++++++++++ 2 files changed, 12 insertions(+) diff --git a/openpype/settings/defaults/project_settings/unreal.json b/openpype/settings/defaults/project_settings/unreal.json index 75cee11bd9..ff290ef254 100644 --- a/openpype/settings/defaults/project_settings/unreal.json +++ b/openpype/settings/defaults/project_settings/unreal.json @@ -11,6 +11,8 @@ }, "level_sequences_for_layouts": false, "delete_unmatched_assets": false, + "render_config_path": "", + "preroll_frames": 0, "project_setup": { "dev_mode": true } diff --git a/openpype/settings/entities/schemas/projects_schema/schema_project_unreal.json b/openpype/settings/entities/schemas/projects_schema/schema_project_unreal.json index 8988dd2ff0..40bbb40ccc 100644 --- a/openpype/settings/entities/schemas/projects_schema/schema_project_unreal.json +++ b/openpype/settings/entities/schemas/projects_schema/schema_project_unreal.json @@ -32,6 +32,16 @@ "key": "delete_unmatched_assets", "label": "Delete assets that are not matched" }, + { + "type": "text", + "key": "render_config_path", + "label": "Render Config Path" + }, + { + "type": "number", + "key": "preroll_frames", + "label": "Pre-roll frames" + }, { "type": "dict", "collapsible": true, From 095c792ad229d23e0b0d2b5f4fa44eb0ae229862 Mon Sep 17 00:00:00 2001 From: Simone Barbieri Date: Mon, 6 Mar 2023 11:10:42 +0000 Subject: [PATCH 083/187] Uses settings for rendering --- openpype/hosts/unreal/api/rendering.py | 54 +++++++++++++++++--------- 1 file changed, 36 insertions(+), 18 deletions(-) diff --git a/openpype/hosts/unreal/api/rendering.py b/openpype/hosts/unreal/api/rendering.py index 29e4747f6e..5ef4792000 100644 --- a/openpype/hosts/unreal/api/rendering.py +++ b/openpype/hosts/unreal/api/rendering.py @@ -2,6 +2,7 @@ import os import unreal +from openpype.settings import get_project_settings from openpype.pipeline import Anatomy from openpype.hosts.unreal.api import pipeline @@ -66,6 +67,13 @@ def start_rendering(): ar = unreal.AssetRegistryHelpers.get_asset_registry() + data = get_project_settings(project) + config = None + config_path = str(data.get("unreal").get("render_config_path")) + if config_path and unreal.EditorAssetLibrary.does_asset_exist(config_path): + unreal.log("Found saved render configuration") + config = ar.get_asset_by_object_path(config_path).get_asset() + for i in inst_data: sequence = ar.get_asset_by_object_path(i["sequence"]).get_asset() @@ -81,47 +89,50 @@ def start_rendering(): # Get all the sequences to render. If there are subsequences, # add them and their frame ranges to the render list. We also # use the names for the output paths. - for s in sequences: - subscenes = pipeline.get_subsequences(s.get('sequence')) + for seq in sequences: + subscenes = pipeline.get_subsequences(seq.get('sequence')) if subscenes: - for ss in subscenes: + for sub_seq in subscenes: sequences.append({ - "sequence": ss.get_sequence(), - "output": (f"{s.get('output')}/" - f"{ss.get_sequence().get_name()}"), + "sequence": sub_seq.get_sequence(), + "output": (f"{seq.get('output')}/" + f"{sub_seq.get_sequence().get_name()}"), "frame_range": ( - ss.get_start_frame(), ss.get_end_frame()) + sub_seq.get_start_frame(), sub_seq.get_end_frame()) }) else: # Avoid rendering camera sequences - if "_camera" not in s.get('sequence').get_name(): - render_list.append(s) + if "_camera" not in seq.get('sequence').get_name(): + render_list.append(seq) # Create the rendering jobs and add them to the queue. - for r in render_list: + for render_setting in render_list: job = queue.allocate_new_job(unreal.MoviePipelineExecutorJob) job.sequence = unreal.SoftObjectPath(i["master_sequence"]) job.map = unreal.SoftObjectPath(i["master_level"]) job.author = "OpenPype" + # If we have a saved configuration, copy it to the job. + if config: + job.get_configuration().copy_from(config) + # User data could be used to pass data to the job, that can be # read in the job's OnJobFinished callback. We could, # for instance, pass the AvalonPublishInstance's path to the job. # job.user_data = "" + output_dir = render_setting.get('output') + shot_name = render_setting.get('sequence').get_name() + settings = job.get_configuration().find_or_add_setting_by_class( unreal.MoviePipelineOutputSetting) settings.output_resolution = unreal.IntPoint(1920, 1080) - settings.custom_start_frame = r.get("frame_range")[0] - settings.custom_end_frame = r.get("frame_range")[1] + settings.custom_start_frame = render_setting.get("frame_range")[0] + settings.custom_end_frame = render_setting.get("frame_range")[1] settings.use_custom_playback_range = True - settings.file_name_format = "{sequence_name}.{frame_number}" - settings.output_directory.path = f"{render_dir}/{r.get('output')}" - - renderPass = job.get_configuration().find_or_add_setting_by_class( - unreal.MoviePipelineDeferredPassBase) - renderPass.disable_multisample_effects = True + settings.file_name_format = f"{shot_name}" + ".{frame_number}" + settings.output_directory.path = f"{render_dir}/{output_dir}" job.get_configuration().find_or_add_setting_by_class( unreal.MoviePipelineImageSequenceOutput_PNG) @@ -130,6 +141,13 @@ def start_rendering(): if queue.get_jobs(): global executor executor = unreal.MoviePipelinePIEExecutor() + + preroll_frames = data.get("unreal").get("preroll_frames", 0) + + settings = unreal.MoviePipelinePIEExecutorSettings() + settings.set_editor_property( + "initial_delay_frame_count", preroll_frames) + executor.on_executor_finished_delegate.add_callable_unique( _queue_finish_callback) executor.on_individual_job_finished_delegate.add_callable_unique( From 839d5834ca611c20f042c3036bcf422ce5ee32ce Mon Sep 17 00:00:00 2001 From: Simone Barbieri Date: Wed, 8 Mar 2023 11:12:24 +0100 Subject: [PATCH 084/187] Fix merge problem --- openpype/hosts/unreal/plugins/create/create_camera.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/openpype/hosts/unreal/plugins/create/create_camera.py b/openpype/hosts/unreal/plugins/create/create_camera.py index 33a0662d7d..642924e2d6 100644 --- a/openpype/hosts/unreal/plugins/create/create_camera.py +++ b/openpype/hosts/unreal/plugins/create/create_camera.py @@ -7,8 +7,6 @@ from openpype.hosts.unreal.api.plugin import ( UnrealAssetCreator, ) -class CreateCamera(UnrealActorCreator): - """Create Camera.""" class CreateCamera(UnrealAssetCreator): """Create Camera.""" From d8efd09797467cf1464d06c36b654f8ec3e02b17 Mon Sep 17 00:00:00 2001 From: moonyuet Date: Thu, 9 Mar 2023 07:03:53 +0100 Subject: [PATCH 085/187] update the mesh format to poly mesh --- openpype/hosts/max/plugins/publish/extract_model_usd.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/max/plugins/publish/extract_model_usd.py b/openpype/hosts/max/plugins/publish/extract_model_usd.py index 0bed2d855e..f70a14ba0b 100644 --- a/openpype/hosts/max/plugins/publish/extract_model_usd.py +++ b/openpype/hosts/max/plugins/publish/extract_model_usd.py @@ -101,7 +101,7 @@ class ExtractModelUSD(publish.Extractor, export_options.Lights = False export_options.Cameras = False export_options.Materials = False - export_options.MeshFormat = rt.name('fromScene') + export_options.MeshFormat = rt.name('polyMesh') export_options.FileFormat = rt.name('ascii') export_options.UpAxis = rt.name('y') export_options.LogLevel = rt.name('info') From 861d60ca0cd3144e75e8ddb135ce071d0b1b65ae Mon Sep 17 00:00:00 2001 From: moonyuet Date: Mon, 13 Mar 2023 11:31:42 +0100 Subject: [PATCH 086/187] fbx obj extractors and oaders --- .../hosts/max/plugins/load/load_camera_fbx.py | 2 - .../hosts/max/plugins/load/load_model_fbx.py | 62 ++++++++++++++++ .../hosts/max/plugins/load/load_model_obj.py | 56 ++++++++++++++ .../max/plugins/publish/extract_model_fbx.py | 74 +++++++++++++++++++ .../max/plugins/publish/extract_model_obj.py | 59 +++++++++++++++ 5 files changed, 251 insertions(+), 2 deletions(-) create mode 100644 openpype/hosts/max/plugins/load/load_model_fbx.py create mode 100644 openpype/hosts/max/plugins/load/load_model_obj.py create mode 100644 openpype/hosts/max/plugins/publish/extract_model_fbx.py create mode 100644 openpype/hosts/max/plugins/publish/extract_model_obj.py diff --git a/openpype/hosts/max/plugins/load/load_camera_fbx.py b/openpype/hosts/max/plugins/load/load_camera_fbx.py index 3a6947798e..205e815dc8 100644 --- a/openpype/hosts/max/plugins/load/load_camera_fbx.py +++ b/openpype/hosts/max/plugins/load/load_camera_fbx.py @@ -36,8 +36,6 @@ importFile @"{filepath}" #noPrompt using:FBXIMP self.log.debug(f"Executing command: {fbx_import_cmd}") rt.execute(fbx_import_cmd) - container_name = f"{name}_CON" - asset = rt.getNodeByName(f"{name}") return containerise( diff --git a/openpype/hosts/max/plugins/load/load_model_fbx.py b/openpype/hosts/max/plugins/load/load_model_fbx.py new file mode 100644 index 0000000000..38b8555d28 --- /dev/null +++ b/openpype/hosts/max/plugins/load/load_model_fbx.py @@ -0,0 +1,62 @@ +import os +from openpype.pipeline import ( + load, + get_representation_path +) +from openpype.hosts.max.api.pipeline import containerise +from openpype.hosts.max.api import lib + + +class FbxModelLoader(load.LoaderPlugin): + """Fbx Model Loader""" + + families = ["model"] + representations = ["fbx"] + order = -9 + icon = "code-fork" + color = "white" + + def load(self, context, name=None, namespace=None, data=None): + from pymxs import runtime as rt + + filepath = os.path.normpath(self.fname) + + fbx_import_cmd = ( + f""" + +FBXImporterSetParam "Animation" false +FBXImporterSetParam "Cameras" false +FBXImporterSetParam "AxisConversionMethod" true +FbxExporterSetParam "UpAxis" "Y" +FbxExporterSetParam "Preserveinstances" true + +importFile @"{filepath}" #noPrompt using:FBXIMP + """) + + self.log.debug(f"Executing command: {fbx_import_cmd}") + rt.execute(fbx_import_cmd) + + asset = rt.getNodeByName(f"{name}") + + return containerise( + name, [asset], context, loader=self.__class__.__name__) + + def update(self, container, representation): + from pymxs import runtime as rt + + path = get_representation_path(representation) + node = rt.getNodeByName(container["instance_node"]) + + fbx_objects = self.get_container_children(node) + for fbx_object in fbx_objects: + fbx_object.source = path + + lib.imprint(container["instance_node"], { + "representation": str(representation["_id"]) + }) + + def remove(self, container): + from pymxs import runtime as rt + + node = rt.getNodeByName(container["instance_node"]) + rt.delete(node) diff --git a/openpype/hosts/max/plugins/load/load_model_obj.py b/openpype/hosts/max/plugins/load/load_model_obj.py new file mode 100644 index 0000000000..06b411cb5c --- /dev/null +++ b/openpype/hosts/max/plugins/load/load_model_obj.py @@ -0,0 +1,56 @@ +import os +from openpype.pipeline import ( + load, + get_representation_path +) +from openpype.hosts.max.api.pipeline import containerise +from openpype.hosts.max.api import lib + + +class ObjLoader(load.LoaderPlugin): + """Obj Loader""" + + families = ["model"] + representations = ["obj"] + order = -9 + icon = "code-fork" + color = "white" + + def load(self, context, name=None, namespace=None, data=None): + from pymxs import runtime as rt + + filepath = os.path.normpath(self.fname) + self.log.debug(f"Executing command to import..") + + rt.execute(f'importFile @"{filepath}" #noPrompt using:ObjImp') + # get current selection + for selection in rt.getCurrentSelection(): + # create "missing" container for obj import + container = rt.container() + container.name = f"{name}" + selection.Parent = container + + asset = rt.getNodeByName(f"{name}") + + return containerise( + name, [asset], context, loader=self.__class__.__name__) + + def update(self, container, representation): + from pymxs import runtime as rt + + path = get_representation_path(representation) + node = rt.getNodeByName(container["instance_node"]) + + objects = self.get_container_children(node) + for obj in objects: + obj.source = path + + lib.imprint(container["instance_node"], { + "representation": str(representation["_id"]) + }) + + def remove(self, container): + from pymxs import runtime as rt + + node = rt.getNodeByName(container["instance_node"]) + rt.delete(node) diff --git a/openpype/hosts/max/plugins/publish/extract_model_fbx.py b/openpype/hosts/max/plugins/publish/extract_model_fbx.py new file mode 100644 index 0000000000..ce58e8cc17 --- /dev/null +++ b/openpype/hosts/max/plugins/publish/extract_model_fbx.py @@ -0,0 +1,74 @@ +import os +import pyblish.api +from openpype.pipeline import ( + publish, + OptionalPyblishPluginMixin +) +from pymxs import runtime as rt +from openpype.hosts.max.api import ( + maintained_selection, + get_all_children +) + + +class ExtractModelFbx(publish.Extractor, + OptionalPyblishPluginMixin): + """ + Extract Geometry in FBX Format + """ + + order = pyblish.api.ExtractorOrder - 0.05 + label = "Extract FBX" + hosts = ["max"] + families = ["model"] + optional = True + + def process(self, instance): + if not self.is_active(instance.data): + return + + container = instance.data["instance_node"] + + self.log.info("Extracting Geometry ...") + + stagingdir = self.staging_dir(instance) + filename = "{name}.fbx".format(**instance.data) + filepath = os.path.join(stagingdir, + filename) + self.log.info("Writing FBX '%s' to '%s'" % (filepath, + stagingdir)) + + export_fbx_cmd = ( + f""" +FBXExporterSetParam "Animation" false +FBXExporterSetParam "Cameras" false +FBXExporterSetParam "Lights" false +FBXExporterSetParam "PointCache" false +FBXExporterSetParam "AxisConversionMethod" "Animation" +FbxExporterSetParam "UpAxis" "Y" +FbxExporterSetParam "Preserveinstances" true + +exportFile @"{filepath}" #noPrompt selectedOnly:true using:FBXEXP + + """) + + self.log.debug(f"Executing command: {export_fbx_cmd}") + + with maintained_selection(): + # select and export + rt.select(get_all_children(rt.getNodeByName(container))) + rt.execute(export_fbx_cmd) + + self.log.info("Performing Extraction ...") + if "representations" not in instance.data: + instance.data["representations"] = [] + + representation = { + 'name': 'fbx', + 'ext': 'fbx', + 'files': filename, + "stagingDir": stagingdir, + } + instance.data["representations"].append(representation) + self.log.info("Extracted instance '%s' to: %s" % (instance.name, + filepath)) diff --git a/openpype/hosts/max/plugins/publish/extract_model_obj.py b/openpype/hosts/max/plugins/publish/extract_model_obj.py new file mode 100644 index 0000000000..298e19151d --- /dev/null +++ b/openpype/hosts/max/plugins/publish/extract_model_obj.py @@ -0,0 +1,59 @@ +import os +import pyblish.api +from openpype.pipeline import ( + publish, + OptionalPyblishPluginMixin +) +from pymxs import runtime as rt +from openpype.hosts.max.api import ( + maintained_selection, + get_all_children +) + + +class ExtractModelObj(publish.Extractor, + OptionalPyblishPluginMixin): + """ + Extract Geometry in OBJ Format + """ + + order = pyblish.api.ExtractorOrder - 0.05 + label = "Extract OBJ" + hosts = ["max"] + families = ["model"] + optional = True + + def process(self, instance): + if not self.is_active(instance.data): + return + + container = instance.data["instance_node"] + + self.log.info("Extracting Geometry ...") + + stagingdir = self.staging_dir(instance) + filename = "{name}.obj".format(**instance.data) + filepath = os.path.join(stagingdir, + filename) + self.log.info("Writing OBJ '%s' to '%s'" % (filepath, + stagingdir)) + + with maintained_selection(): + # select and export + rt.select(get_all_children(rt.getNodeByName(container))) + rt.execute(f'exportFile @"{filepath}" #noPrompt selectedOnly:true using:ObjExp') + + self.log.info("Performing Extraction ...") + if "representations" not in instance.data: + instance.data["representations"] = [] + + representation = { + 'name': 'obj', + 'ext': 'obj', + 'files': filename, + "stagingDir": stagingdir, + } + + instance.data["representations"].append(representation) + self.log.info("Extracted instance '%s' to: %s" % (instance.name, + filepath)) From 64e8ff68b54d420c142c9276674e6cac74646ce0 Mon Sep 17 00:00:00 2001 From: moonyuet Date: Mon, 13 Mar 2023 11:32:47 +0100 Subject: [PATCH 087/187] cosmetic issue fixed --- openpype/hosts/max/plugins/publish/extract_model_obj.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/max/plugins/publish/extract_model_obj.py b/openpype/hosts/max/plugins/publish/extract_model_obj.py index 298e19151d..7bda237880 100644 --- a/openpype/hosts/max/plugins/publish/extract_model_obj.py +++ b/openpype/hosts/max/plugins/publish/extract_model_obj.py @@ -41,7 +41,7 @@ class ExtractModelObj(publish.Extractor, with maintained_selection(): # select and export rt.select(get_all_children(rt.getNodeByName(container))) - rt.execute(f'exportFile @"{filepath}" #noPrompt selectedOnly:true using:ObjExp') + rt.execute(f'exportFile @"{filepath}" #noPrompt selectedOnly:true using:ObjExp') # noqa self.log.info("Performing Extraction ...") if "representations" not in instance.data: From 55a10a87932130828eeca112f7098e4a4cf5a24f Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Mon, 13 Mar 2023 22:55:00 +0100 Subject: [PATCH 088/187] Use new style `ColormanagedPyblishPluginMixin` --- .../hosts/substancepainter/plugins/publish/extract_textures.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/openpype/hosts/substancepainter/plugins/publish/extract_textures.py b/openpype/hosts/substancepainter/plugins/publish/extract_textures.py index e66ce6dbf6..469f8501f7 100644 --- a/openpype/hosts/substancepainter/plugins/publish/extract_textures.py +++ b/openpype/hosts/substancepainter/plugins/publish/extract_textures.py @@ -2,7 +2,8 @@ from openpype.pipeline import KnownPublishError, publish import substance_painter.export -class ExtractTextures(publish.ExtractorColormanaged): +class ExtractTextures(publish.Extractor, + publish.ColormanagedPyblishPluginMixin): """Extract Textures using an output template config. Note: From d780974b1b1e0f2e49fdeffddc8ed8d44e673f0e Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Fri, 17 Mar 2023 17:23:44 +0800 Subject: [PATCH 089/187] usd mesh format to trimesh and adjustment on update function in loaders. --- openpype/hosts/max/plugins/load/load_model_fbx.py | 2 +- openpype/hosts/max/plugins/load/load_model_obj.py | 2 +- openpype/hosts/max/plugins/load/load_model_usd.py | 2 +- openpype/hosts/max/plugins/publish/extract_model_usd.py | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/openpype/hosts/max/plugins/load/load_model_fbx.py b/openpype/hosts/max/plugins/load/load_model_fbx.py index 38b8555d28..1729874a6b 100644 --- a/openpype/hosts/max/plugins/load/load_model_fbx.py +++ b/openpype/hosts/max/plugins/load/load_model_fbx.py @@ -47,7 +47,7 @@ importFile @"{filepath}" #noPrompt using:FBXIMP path = get_representation_path(representation) node = rt.getNodeByName(container["instance_node"]) - fbx_objects = self.get_container_children(node) + fbx_objects = node.Children for fbx_object in fbx_objects: fbx_object.source = path diff --git a/openpype/hosts/max/plugins/load/load_model_obj.py b/openpype/hosts/max/plugins/load/load_model_obj.py index 06b411cb5c..281a986934 100644 --- a/openpype/hosts/max/plugins/load/load_model_obj.py +++ b/openpype/hosts/max/plugins/load/load_model_obj.py @@ -41,7 +41,7 @@ class ObjLoader(load.LoaderPlugin): path = get_representation_path(representation) node = rt.getNodeByName(container["instance_node"]) - objects = self.get_container_children(node) + objects = node.Children for obj in objects: obj.source = path diff --git a/openpype/hosts/max/plugins/load/load_model_usd.py b/openpype/hosts/max/plugins/load/load_model_usd.py index c6c414b91c..b6a41f4e68 100644 --- a/openpype/hosts/max/plugins/load/load_model_usd.py +++ b/openpype/hosts/max/plugins/load/load_model_usd.py @@ -41,7 +41,7 @@ class ModelUSDLoader(load.LoaderPlugin): path = get_representation_path(representation) node = rt.getNodeByName(container["instance_node"]) - usd_objects = self.get_container_children(node) + usd_objects = node.Children for usd_object in usd_objects: usd_object.source = path diff --git a/openpype/hosts/max/plugins/publish/extract_model_usd.py b/openpype/hosts/max/plugins/publish/extract_model_usd.py index f70a14ba0b..60dddc8670 100644 --- a/openpype/hosts/max/plugins/publish/extract_model_usd.py +++ b/openpype/hosts/max/plugins/publish/extract_model_usd.py @@ -101,7 +101,7 @@ class ExtractModelUSD(publish.Extractor, export_options.Lights = False export_options.Cameras = False export_options.Materials = False - export_options.MeshFormat = rt.name('polyMesh') + export_options.MeshFormat = rt.name('triMesh') export_options.FileFormat = rt.name('ascii') export_options.UpAxis = rt.name('y') export_options.LogLevel = rt.name('info') From fd2d210522fbeddd27f707b0683e2f7411affd8e Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Mon, 20 Mar 2023 11:26:48 +0100 Subject: [PATCH 090/187] Use create context environment Co-authored-by: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> --- .../substancepainter/plugins/create/create_workfile.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/openpype/hosts/substancepainter/plugins/create/create_workfile.py b/openpype/hosts/substancepainter/plugins/create/create_workfile.py index 729cc8f718..29191a1714 100644 --- a/openpype/hosts/substancepainter/plugins/create/create_workfile.py +++ b/openpype/hosts/substancepainter/plugins/create/create_workfile.py @@ -29,9 +29,9 @@ class CreateWorkfile(AutoCreator): variant = self.default_variant project_name = self.project_name - asset_name = legacy_io.Session["AVALON_ASSET"] - task_name = legacy_io.Session["AVALON_TASK"] - host_name = legacy_io.Session["AVALON_APP"] + asset_name = self.create_context.get_current_asset_name() + task_name = self.create_context.get_current_task_name() + host_name = self.create_context.host_name # Workfile instance should always exist and must only exist once. # As such we'll first check if it already exists and is collected. From eeaa807588317b10e641e83566a07e278f3be6a7 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Mon, 20 Mar 2023 11:41:54 +0100 Subject: [PATCH 091/187] Remove unused import --- .../hosts/substancepainter/plugins/create/create_workfile.py | 1 - 1 file changed, 1 deletion(-) diff --git a/openpype/hosts/substancepainter/plugins/create/create_workfile.py b/openpype/hosts/substancepainter/plugins/create/create_workfile.py index 29191a1714..4e316f3b64 100644 --- a/openpype/hosts/substancepainter/plugins/create/create_workfile.py +++ b/openpype/hosts/substancepainter/plugins/create/create_workfile.py @@ -2,7 +2,6 @@ """Creator plugin for creating workfiles.""" from openpype.pipeline import CreatedInstance, AutoCreator -from openpype.pipeline import legacy_io from openpype.client import get_asset_by_name from openpype.hosts.substancepainter.api.pipeline import ( From 9020bf23d325b706485ed7374d22f6073aa71e79 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Mon, 20 Mar 2023 11:44:56 +0100 Subject: [PATCH 092/187] Implement `get_context_data` and `update_context_data` --- .../hosts/substancepainter/api/pipeline.py | 19 +++++++++++++------ 1 file changed, 13 insertions(+), 6 deletions(-) diff --git a/openpype/hosts/substancepainter/api/pipeline.py b/openpype/hosts/substancepainter/api/pipeline.py index f4d4c5b00c..b377db1641 100644 --- a/openpype/hosts/substancepainter/api/pipeline.py +++ b/openpype/hosts/substancepainter/api/pipeline.py @@ -38,6 +38,7 @@ INVENTORY_PATH = os.path.join(PLUGINS_DIR, "inventory") OPENPYPE_METADATA_KEY = "OpenPype" OPENPYPE_METADATA_CONTAINERS_KEY = "containers" # child key +OPENPYPE_METADATA_CONTEXT_KEY = "context" # child key class SubstanceHost(HostBase, IWorkfileHost, ILoadHost, IPublishHost): @@ -140,15 +141,21 @@ class SubstanceHost(HostBase, IWorkfileHost, ILoadHost, IPublishHost): container["objectName"] = key yield container - @staticmethod - def create_context_node(): - pass - def update_context_data(self, data, changes): - pass + + if not substance_painter.project.is_open(): + return + + metadata = substance_painter.project.Metadata(OPENPYPE_METADATA_KEY) + metadata.set(OPENPYPE_METADATA_CONTEXT_KEY, data) def get_context_data(self): - pass + + if not substance_painter.project.is_open(): + return + + metadata = substance_painter.project.Metadata(OPENPYPE_METADATA_KEY) + return metadata.get(OPENPYPE_METADATA_CONTEXT_KEY) or {} def _install_menu(self): from PySide2 import QtWidgets From eeb2388475d664aa95dff4b09fdef9fc6ed17549 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Mon, 20 Mar 2023 14:13:21 +0100 Subject: [PATCH 093/187] Use `openpype.pipeline.create.get_subset_name` to define the subset name --- .../publish/collect_textureset_images.py | 25 ++++++++++++++++--- 1 file changed, 21 insertions(+), 4 deletions(-) diff --git a/openpype/hosts/substancepainter/plugins/publish/collect_textureset_images.py b/openpype/hosts/substancepainter/plugins/publish/collect_textureset_images.py index 04187d4079..b368c86749 100644 --- a/openpype/hosts/substancepainter/plugins/publish/collect_textureset_images.py +++ b/openpype/hosts/substancepainter/plugins/publish/collect_textureset_images.py @@ -9,6 +9,8 @@ from openpype.hosts.substancepainter.api.lib import ( get_parsed_export_maps, strip_template ) +from openpype.pipeline.create import get_subset_name +from openpype.client import get_asset_by_name class CollectTextureSet(pyblish.api.InstancePlugin): @@ -24,6 +26,10 @@ class CollectTextureSet(pyblish.api.InstancePlugin): def process(self, instance): config = self.get_export_config(instance) + asset_doc = get_asset_by_name( + project_name=instance.context.data["projectName"], + asset_name=instance.data["asset"] + ) instance.data["exportConfig"] = config maps = get_parsed_export_maps(config) @@ -34,9 +40,11 @@ class CollectTextureSet(pyblish.api.InstancePlugin): self.log.info(f"Processing {texture_set_name}/{stack_name}") for template, outputs in template_maps.items(): self.log.info(f"Processing {template}") - self.create_image_instance(instance, template, outputs) + self.create_image_instance(instance, template, outputs, + asset_doc=asset_doc) - def create_image_instance(self, instance, template, outputs): + def create_image_instance(self, instance, template, outputs, + asset_doc): """Create a new instance per image or UDIM sequence. The new instances will be of family `image`. @@ -53,8 +61,17 @@ class CollectTextureSet(pyblish.api.InstancePlugin): # Define the suffix we want to give this particular texture # set and set up a remapped subset naming for it. suffix = f".{map_identifier}" - image_subset = instance.data["subset"][len("textureSet"):] - image_subset = "texture" + image_subset + suffix + image_subset = get_subset_name( + # TODO: The family actually isn't 'texture' currently but for now + # this is only done so the subset name starts with 'texture' + family="texture", + variant=instance.data["variant"] + suffix, + task_name=instance.data.get("task"), + asset_doc=asset_doc, + project_name=context.data["projectName"], + host_name=context.data["hostName"], + project_settings=context.data["project_settings"] + ) # Prepare representation representation = { From f8a3e24c606048883fa0942c01df1f7aff893436 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Mon, 20 Mar 2023 20:04:36 +0100 Subject: [PATCH 094/187] Explain how Texture Sets are split into separate publishes per output map in documentation --- website/docs/artist_hosts_substancepainter.md | 33 ++++++++++++++++-- ...ter_pbrmetallicroughness_export_preset.png | Bin 0 -> 45842 bytes ...painter_pbrmetallicroughness_published.png | Bin 0 -> 7497 bytes 3 files changed, 30 insertions(+), 3 deletions(-) create mode 100644 website/docs/assets/substancepainter_pbrmetallicroughness_export_preset.png create mode 100644 website/docs/assets/substancepainter_pbrmetallicroughness_published.png diff --git a/website/docs/artist_hosts_substancepainter.md b/website/docs/artist_hosts_substancepainter.md index 9ed83421af..86bcbba82e 100644 --- a/website/docs/artist_hosts_substancepainter.md +++ b/website/docs/artist_hosts_substancepainter.md @@ -51,8 +51,9 @@ publish instance. To create a **TextureSet instance** we will use OpenPype's publisher tool. Go to **OpenPype → Publish... → TextureSet** -The texture set instance will define what Substance Painter export template `.spexp` to -use and thus defines what texture maps will be exported from your workfile. +The texture set instance will define what Substance Painter export template (`.spexp`) to +use and thus defines what texture maps will be exported from your workfile. This +can be set with the **Output Template** attribute on the instance. :::info The TextureSet instance gets saved with your Substance Painter project. As such, @@ -61,8 +62,34 @@ just click **OpenPype → Publish...** and start publishing directly with the same settings. ::: +#### Publish per output map of the Substance Painter preset -### Known issues +The Texture Set instance generates a publish per output map that is defined in +the Substance Painter's export preset. For example a publish from a default +PBR Metallic Roughness texture set results in six separate published subsets +(if all the channels exist in your file). + +![Substance Painter PBR Metallic Roughness Export Preset](assets/substancepainter_pbrmetallicroughness_export_preset.png) + +When publishing for example a texture set with variant **Main** six instances will +be published with the variants: +- Main.**BaseColor** +- Main.**Emissive** +- Main.**Height** +- Main.**Metallic** +- Main.**Normal** +- Main.**Roughness** + +The bold output map name for the publish is based on the string that is pulled +from the what is considered to be the static part of the filename templates in +the export preset. The tokens like `$mesh` and `(_$colorSpace)` are ignored. +So `$mesh_$textureSet_BaseColor(_$colorSpace)(.$udim)` becomes `BaseColor`. + +An example output for PBR Metallic Roughness would be: + +![Substance Painter PBR Metallic Roughness Publish Example in Loader](assets/substancepainter_pbrmetallicroughness_published.png) + +## Known issues #### Can't see the OpenPype menu? diff --git a/website/docs/assets/substancepainter_pbrmetallicroughness_export_preset.png b/website/docs/assets/substancepainter_pbrmetallicroughness_export_preset.png new file mode 100644 index 0000000000000000000000000000000000000000..35a4545f83563332e983ada1698433955498a969 GIT binary patch literal 45842 zcmc$`1yq$?w>C_7ZbU&^Ksp4Z5fG(8q`SL2q`NmIASDgb-Q6Wfr;;KqB}nt#8{~QP zdEfJ%^N;bL?{nyIxMlCX)?9PVdChBH^A47g7C}dOhyn!#g)S!gLKX@NHVz63Iv5EC zc*Vn!M;iDG+D=yFIaKiw;Rf&zI3qzRK`5xQ2-FK5c;Mg2)}ktQP*4~hkU!8pRv+}C zpvrT_UI@xNYwlcH*vTtZuZ4d9G-7pu0h6RjK=aca_J$iTQ5C*E?_Rv zk1yBGA_yNQ+7-n{ydiE8?I6?(KdsDaJ8}IMeNb|BR&xEO*M_hM^7|MAQK=p5cHDAN zC85bTG)cO!1zzCo5Z*c-Ionyq8 z4h@m}3~aF5dJ+MjXSA>cnw)WYVa@eL_h0V-5lD==bG?@iz!xUF2TCX# ztzoyV+7r~Z{PkViypL*=Ey;gQ-7JcTQ70uu1TKEe;_DUn?RQfaRvZRFR`Xd4vhW$F zT0e*XZZRWKewSmpLf87W7ikZjua*RlQm-~%>5iY`xLyCa)>9LMxfTrTEPQt2WSn+5 zJe*5-e?@O}KuD-I?get~`}%kK;X8*kkMo({4|v5?xSdhGdkVZgby9MAguTq4-~EkB z7@TwIMVzo`b%P0|IPbISi$#`4W!tt>8WNySeYZ_~{=ZL4$^TAo* zN+e91G#t`;`DKtq(Pn)pcw6W)7l5Pln9YLcT@Z}?1Dx%kHg8#T6F;x<{heYOodN~P zkaq*aEegGtiF!iq-%3Q5+LtBJry`$fV?3&538JJg9U6y{Wt@aFpg*FoNwv0EP^0oz zyy!jW;9&Fd&6%~lSl&fPEz3qQBoUI4I=H#kIK+9LjlqT?AGW2MBF>ii4XR>dPk=2a zW=W9ZQKbsWJW=z47W+vR>Q#*5q~TTKVOkQoSe_h#c*EI9uPrXzG(L(1owtHBZ$T&d#Q!4pu};OL&=1qb_W@?5#MChnH~}BH-_SD4wcF2+myt$?P6b$F%3QhjEv7zF6k>BO`?Y+Kx4uBcA%|I0WOIO9G+-`8 zbSEYd+FqC79v_>apF3zO#lXAJFUWMixtdw{5YC; ztd3(C=ZAh_YKV?1%al%j&p1!{ypB;*_Z_2`zEo}jgLJ#xSUIho;P3#*ob_*;fc+GR zVknJeV^y{*zqsJeN=N*_%Cj(SmPbxv%1`uzIC7Fng;IVaDxA6mIb0Ul%4rrCnc!oz z)whM6i-HiOVl>t0Qg40<_yf)uk=*K(r}RNg8*0}wQNi!gJd-7s%X2Hl-x3{R!$*?q z*iD+;g*EUoCH-pVb19*|4E+sa5a*6pq{>CMs$xD%bRvClzMxH9{X2~G=C59g2C6N5 z@8R+e7$d{dP9dgHTC`GYr@h8apV-~b=ggV+4lpxbF#|jUw*P7NV_XT6Q0#er`huCIO>&MuLMhPjM2~OybjUxm!V1d^T~nZRkb}MpuOlgP z$~_x!|BqwvAiv#BBoC2LzDXj4MiD^Vzs)E!dO%?Qm0R!_bs$**2>N^h|KEOe;mx9Nn-YG@v|L?sr^gWYvMvgu{G$!VxBG*W^Q4NjiOZ5h@{9_LK zDqt(QNz5e_eVZSD{3tAmpWOQRc5=0SvUP)5YxMK8gEb8;CcASquY5Vo^#mFf%&W}= zdht(inpHKHO>A=X9g-vdT+vbS)H;X$=2vf29~tohJCQCZ9k2IusPE|S@Au?rblzVM zB=O*MKdV%K#tiu`5cM|qxBx>L4`rcg?I_ypmf|y2mAv<(+_MBLJj8=&vQ6!<9%X6`W=^3fmzPGoRdi5i>q~+?3xw)h|BJ8(V z3Kf{Iqi1K__hP1zBW z4AH{0uP3(Y#DtQG+@Xxo)=W0a%vN{6I5lioexkJ;^+p=(4Kl5oI=2nCc)HQKqqEbXuKjEZ~tT`mE(Ivvn@-UW_`SZH=X z=V&<{jHOfhdT{0MLMav}pp)AkBy*iDuDtoWmt1_hHOouSsdAHE*~QC zws*8|i~LeLi|8KsGGU2oVE*jY{(Ca(#8q4;)cR)A1}KiCbxPH9HRv9d^U}jSUkNdbZ99xQ}#V z<$j8kh2pKPei?a1-Cp1GR!Hm9{U*SJA9YjSX65Oq9D+W!i!yrA4$|`Ha}#ucU##rx zpuB+|B1W?ee97wy?};$xotCMQUmfFGVUzr^z_B%sw@sL|Zs+o2OB3LQh=`E;CGCVn zLC@)Vb)ja}baXst6+d7gI^EaTx7&RF{b$SdnsC76ggvh=h9sv8HlN>I7ETeeI1!`!-H<@=&`Ux_!LKNvS^W>YD6%`UR|Y(;!B zb-OTPhAH)3e>2|anD56hLF^cLE*6oJfgeFxTj0IH3ATgM$+IBU+4W@Z%y?<(S@VqT z2Tt82`ZK)tlb^dKddh4md>_YhUuEO&M_ivoM3z2a()9b-w|?2S2Is!##!l$*rRM=89w$rf{;1bG zEtfU?<_KrI{MT#OXvnV1oxz@mR#$^1IJQ5&gMHy+2skVk35e6kiXT_fABsm8VmM%m zzz*}tkVQ)AQgxcLFi(&ZfgD6>@I{p?$)chorD$2#hcpL2W@LJ6yDS$;DDYTKM%ZD` zrG)zRbZb+qE2iI9hu!BXnq-VZi`~SH07_2 zJSaM?jlcf#wFJ({aRZW}g%rd?KykD8DYkb~(=Lf-YdjBd*uQ)3;pD$^RW6{as--VJ zI!gU~B`rdZR`G<8_WSBKGO5n1_q?Yi*GDakc%)ZD@A=M;rnH3G#nszMu1`cvs{GwN zU-YgHMV7HdrkO*+dIK6Q1+L2wI~2pfwSqMebL-(HEKotm4RbQZPgTSi&c8U7Hv4{W zyu=9_+AS1)spiXtplT%H;}ATzGZT!++(8my?qtoy^%LH~s3_>DPtpb5GC1 zZ-a8V(t^9?O0J7Z6u5Pt@mUlRKyT3jyPC%@fgI)_71H9PN$Dd?N!MTIR-7-jZFY9Tjpy3}u7E=&eUW?p zBX!siZ)5MCqpO$p>Q5ckegz#(v1l$}LEte9Wzz;y9eh255B_afnRz;8ew4Ek=0;O7 zht6BRh?t~0@$vE3Tb`FCzzy9^H*-3qDO|oy#g1h3RT`0{n>tO1{CWJS5^O9OzH;Pb z`YjEYyVD45SwoC#E!RJ9_UqEfdDyD}N+e;mR z64|3~Izt~Y4W;rk34&Mf-YNYz?}_mEL5?&TL-ndTnVE&fhh14i!~NiE-^odBCl^>Q z@IS7+y3CL7&5sY$4ry9zYilHYZ|uK)#JAxB$Fw$rGKVBzeLAaeO!sa9iExK~j1bG- z0fQ^FXa-4X{1QO2nFb*-d|kG60CwC~?0tY`7HXxi)mT;gc%E@2I!(oGOS!7PJ9e=| z&x)bn-#CgLM0mcQ-f1vgWS~FtmG5(&fs^ykd2YwT`s&(RMX5ye>CBq?7{2mx_PA1; z)|H+JBO{}=Z{N}ks2)G=+~Sl@^s8DfPaif{3$Fe$VslKnx|}5y2>M~Zl~?u>bl*o{ zfc;9%4GDdu){o;=fsdgLdPY_?R3(Yx$Q9C#b5y8RTLpIB4t;$&8s)nu*Te{!7T4u! zO|E(G1I-q7SV4BLK5xpungh<;f?mhf)wQcwj`|*OBb)2%RrBQE^SQ>IEj5Dem;qmj z@{$eVO{CAjvSe|2(b#Xh!LF`cU+Wu*E(@Zqq7Z^lqfpS-h%i>pV8{ zD8g{)A5g`t)+`_op3v{THjZSgw%7|_bSY#aisM5hu`VC?c!ud)<|8ibZ>>J!JF$#d zS)wV4_f>gI4-H*bs)%mJUf_DPwA?{k*)D>G&fB9Zfzc;(SYQ^)J0`wEJY1dDwut%U zq%6YreqV1Oi4K>tDRByuZ@eg0q$iH~-Z4`{c!N<1$PIJ+?Mg?)UT$sq3{Fjb17yP@ z?X$XE+FV6{cNl3Qd%iQWSc#zCp!k*>3vaKurfybn-b=O5*0L3&8UZhBrDkoS|JX|Cb z9hu;PEfm!c+L!q+9Rxo2rdtU5wz4rTt+5Oe8C47 zRRPx~E@n@T6g#(r)ripiTexS)B`oxi=ccBp96MWEpH9VF8|Lp&RgB_sKFFqx#_W7e($HR3rfNB@k}%~B9Lz}f z66h)}X2_V%^q6x>z|WNRn$UPPkWYQ0cFdMb8ypad_EnTNg$Y)BWnxq4EOz0_kwKIA zhJR5e>Ypl21-_)93LX0ZjKHuc}(+N8?H1zWF5(z+$C%GB&jLV+J zp$78(v;2gjZ;+~Ni^eDONw=s&pD<>MwtuB`Ht*9%+hUWvuS!%Z|9SlNl7cWoPW6na zhAeY!KGsPc8bJW|v!3$)R8};95?`rfNra?P?JJx-AsvqshrA(Tpb`-F!39s63kR25 zAE>=tN!s`@jlC^iskf+am^~BU%tuZJ3!!u?5j+c+pmc+TW0$#!nOKT7_VQ(RGTXvMo z#m9U8+Y$!+TLhy2SaeOj~6V}K# zV0rX?#%52e>S~>Jnxke`v&)kR$Z^#)$aIcyjAW!D?HMOe)?!-B;4!bUK3JnYMCd(Ai1eDk?Vr?yZNVugp|7;~#zdkXkvv9XkL5NTc06YMNcUuOdKaIe z;BA;a$RkzG80G~6|4D6z=e67&e#phA9ogE zJmUF$b2UTxtv=W88^9&wy@ejtVXZcYjZ@%MKteztm%VC8BSwqvRpK-dIO_T03w68k ztZpiy$x`BrB*cByWSRr5R%f|9Jp>D*-F6@g=_x<0(PH{O-UzO+>IGq0jC~ZtH4z&& zC;Dfi2Cf_^GCLbFIx+lGw0j1WHr9JGvd5DR)lx8+ z`A;@kas=mwK!u&`X-!<@S0u@-wpiR^%&3f;uS&Bm#eH{V9(q+*H~}$#`2Mqj1!cK@VY1&N#BFp z2!tkJ*PV{3fnz7^;rg(h<~2>}V;E1Lsc^hp{&T&^GFbcOhHbaDdQD@Wf{t4=)~a-Q z5Rt#d{s#~;+YNFwH#fJo-aFY}5kjVtPR1){<>uZ5O|~P#Zo3rO4Qu~LC^Q&T8i+~4 zNV%K7X1zPl=O>2P!?nj;(rHb&mIi>mlCbb_vC}WY%rrT10DyD(1MzjlQf?iEj=9!07;+H|t1I$im4P1mzc; zBK+j1Tvigr2`5zaW~cqx%w*em(A1~1QB2N7rdDyHe3#kvA;y>(W%IHfmlO@8Lvge_ z`2LTaS9n=DQ7twRlqY>2rL7{@lm8}{g1&xFXrOaz+x-SJ{;H>vqD!52=PxxnmzQ-t zJw2QKQnoi}VneY^x>)oLcJ$wl7OB(X4s5aV#TdtjyhMYMH+vL7(r?P2qwq9CGn_vP z7iT@)aB{pB1v#`hI5PkPsT9GlIG%$cn8|; z5Gy-wPY)(>?25GZc@ZL`N^!{L({S+-W(UZ*1hRi3wIS+f+@J8lhsvepM`>O(-Q!Y5 z8WVw+&7BnN?-DCazFLgV$V?xxPEn!L(F*XcMTJ?;OKC#=-6V1##cd?@{`gW_5+JmK zhH0zE9h+H?XlLQd6-fLBQxhL|8qh~U*K2E$DkjWIJFw48RhKDx5<2p?HG+OW>*Y@X z2jPYfZPe`NFYYTW`BfdASd;y&{prwnfiX^OY3*D7r%UHoGPTE`FoCJrg43bL=d;xk z)9whsaylXbQ?GCqkvojHLP!t|b@hv9FhkAF&k#fd2L(ROkjO8W84HQ^^cEr_Q*#qE zyDwOqMpNn>C3q*tjE~2D{VBBrD(RmY1Du#y^bw>q2alo1il9SssJW+S%QSXVQxg#4 z7y7B*I_23iTOOh*;N~}>?7vg*JGUvm$a8>86Mvwg{akBv`RpzUgte+#-*#v^BSv#`&Nvhq2i#o*u@?Xl&IF;s$!OIUT-H)}oW83!IPL&!8`C`$psxMVb7!2p zY9&1_{jON~NkJct-{%in=9H#N=5sv;Dt%R=hG>-d?0uvmOz}ZL0lObb>9r0HQ5m8S(6c?U@R45imd! z+{A3a(Iy2gN`t1XuP#oNfM^LgaSC+$>8x3aU^Pp{u)q@UBtCHL0M|JF1ox|L8Yh|L z8l9)dMmC+fr`pT&$FsvFhq1Jm@zxu2u7~l9GeP5yd!wF=!$|jxCb#CZF`El6RzJn^ zYu5gdvZlF?J$n_i61l&>KM?QvIAE#i=X{JJ0>ET4symbGn3`_p@?UNMY_{b>^X1&a z)p5+LXHjW`cYYAt)+Pu?D!I)D6r8I^wt+1iv?pFrARgF=MB_ai#Gor;#go%A7&Igg zezQ8xywoBApZyx>&wL*GjC04*o694NgW8u*YZo!5W`vy_6Ouc1G0%39^llu!(05iF zy98P~uN8>ZVTSk*GlX~^52-uDbeVLm2Qmq^dmti*3d5t@JtBDFalYh_+2Y^&r8b|b zAxgl;u<45TdL#CJ9AY9hk4WdKaf!u8x2c^oZH(dZ2Z2vBuv+3ix$N z^Vw#5yAPa&@a6|^NL1a~+QQ+{)z*fBJ_X%>UMf$!FQ+i~SV6vPs5LX^=ul2GPjRUR zbpH{@{)F~4SVH++3|+;E_mLn?_w!XLw(iF;W&&#q&wlP&sR$JlfkF z{b9fO8X5J2u90Ced!81w{pOrii#z2K5OPEa9y#0E9sm~duaEt8F@UfvI-bN~H6P&> z1I-1Tcu;C=c5d)JzWl)9oRba*(ezFwR6{RuZs)v6pMuleEK>vzTGm2D4?3ME#r-9= zCOPck8}L3hP(S@K@}D}mPp4#YWhaRV`YF>StCKjdWB7Q_;jB@0gkhqiwqf}%wJcekncHX=gh zj%)VVa0^$Ug}?B0^yai%LcW*F_v-Rk2x-jpVnrROBti0gWXRs(=mQH8iz@cC9Vv$Kbz$N@Be&h%ufxuOZT-Wi>#-Lpx5|o zSO}5vSGClua|0J9mHm6hngbqEFC@Id1u8>XSLk*6=0glmQXLx2fj%ukCM@Rz*vfEk z7z2F!tZllXabmtSZAw}l>{L&x;Mi|(XgrVIjAGm^<|u(?3XoTIiN+B^ozbnucN_?F z_1Ff6)1Y>eZ#NZN>MU5dX;;kEA}ft(9LS*BNC;~uPpr80DZQZCAOIJ-8usuyBB*HgQD<5->lkk(wnGO zy$3?F@$uUY)f7jaL^@sk`Plzinf%iJ#|nHxW$t&!%dS8dIA{*KKzlT)<7H5 zU{Y+GbKH?2H188G(<|2*_yX49AY-~|)K_eWyIDgYGNJ9ggTp&3%tZWV99ijV_Ej{T zCGf4GZZXzhDW}PVUW?y;{j40%qKw(WI9-;@-dKEFojcWXVeI1a#fz$PYxg=@#}|iA z4cSb(PHm6qCN|Ua;LGfOPJ9I?&PelG#Q-;s423Nzkc5cmctq$)c`)mP_X0MKmj)al z>Y?J6WpwWIu*a0<4%w~b%J0WRoCxmV0U`oIC%8Ah6fya$7sv}`9j5%u4iK2^wm+A`SRE)VvA zAM~_IO0G_)dnhu(RA;MPr+Fe&hX{AvKrr9)E;?{Iwi6;H;o^&K*>+A9BZn*3}1Z_RaV5t z)%-=CmciX7DPep_S&v&=SA^BE?{U?H2!O&m?cR`erl*860xv*GKVmv8M2#0#zQk0{XP{c<19B ztH$Rc6aBVT^)h(}g$4EMp@=Fzr0>9+-WtRF?_gd}Y_fbhwMHIjr+DajT@ZqDF9cTu z7G9V4p`rBn2!fzwDKT<>hYBY-hflyf99Wx=cP=!yRUF@Bgjs`kP5fw!xGXFgr*qK~ zc7Akrd6@6?I#7{nHL_0>rg-xw+M@ctMPH&S>~v@ipP5!0*uJg0h#dBEs54b%VOn6# zg7_xQwd zJvlYz#j|u8T*h&fr&3I23{n4kEeW*aGn5wSzX%M++9y)1ni7KZ1lR=Bv*d)!RJOuAcZ`@)(KZl$`%;|qBT z*DPJ_+%zaF>Da<`bdi0Tw#8_;J50W*5zbj{Id6q0g57tdcUGyCe2zntm#ieIj! z%-1DJqyi27b()DlBV(~(d-E#jeu=X$xPtsPZg@m4e(j54q6!NOOG8&wQcC4@VR`n9 z(FVLS{?CKPrrv#PaF1>rP7oGFV1i)Lgz!qSDPv!dr&}6vO!}T=TNHkYPj@#Wt8C#1 zDhD>GCKyZ(f}Uv|*k$}WG$Od@AW;8&oHmMayYq$i*I6FQot571G>4MgI#(>prIx`V-#89P(vnq+t@jH)4=!Jyt86yB) z8k;r}zQPbX-+u{`E89r_gTm^t8U2heZBNz=wKWq{~o2T1YT$OlMd- zqxL~I=#fcIcLmWvyfd56=@a*SmVpgprVXSwQ26TMR;X>1A-rWc=312v)H;F}UD+)= zVzDRewoa&?s1x-Y)o=^^j;5$_A+Z@zEhXbbT{tQcXAY(f2q`r#Ko-M-Pedo7_(0Un zSR70j{vY;YP6(m9W@cvU>bQ(&f`gIwxxp1jp!-!s6Fie}g4s2@pFk(M_e~#DK;46p z4@H5Y!+`PGAf;RM67@mXVutssxbK7{{#rk^+YDAZ3`BU;l}X9El3yvu0l!@ez1_86 zmZL4K;oHP8t-}ld=;$-{o(n8`hr;J1XaC8PpK${+tWX-mO1qAa=;z!b1*oI~UNj(F z9zWLqa^I~&U&>-h*?8Jr=l;3BFw+<8W)0t6XnM^Sh4m#}d$IZl7R=Fy>=BTvL|{x= zt&s2eFaz@W`B<);R(es%a9ljmm}O`G;DQq}k71)=&kXiW7ABCGo}bq&+q#G1zxFA$ zq_|j-HWp~f5^>qSVB{R~hI9mfPn(tq{^|>(Rl1Z_*e!a}IICJ6oH^C7+d#mPHpV8R zc<=KZG5@7OYIi2AZ+yI2{~Rq9R+upEOB_veYOszL5kI~mECC^X&Ns11tj}?2@zUvZ zh>e&JyQ6e$nN&RNp_K*QatfyNd{v+<%_kRCJi@%LeddgN^m*HT=5%}D98Fv2+M`Za ztIwZjZOYGu@}M*$GGjC=6y~P;RsFShFP{lay3GqgADNKJ-?uLHI*mxLCyZmAlgH-< zPw{{h@d*iEK(%+yhh6Eqjm8{%%MOkSFX$dX7XcJkeb|YUQq_-wlSAJmbsa*4SJs(} zor4*DPZ6J-H7LPex3mXfUX)bLs@&2CTWRddon~HgrdAJmCqa5!Gv79jIb@8^_+j(R zkG?1}P&+ngr_k;T4mH;|WNOdbH5(1!B4N`?u4A?7zL`{%YqMyR2$-Ft> z!sO-AJOOF9(+KFpeBw$MH{)#OJnB^Rfe7a+ zoips5Nt5%qkvX^4*sDZ$%?WV!i)qsf$xjp|Ec8au?<4|Id|WK^m3%e(mPCqk&v{wof_%l&AaaMnHa#fsWQeZbRigq-E0N`-Y*~$s!JqJd_WFMGYfU))3M1w z9Hp8bOpWr)RS|_`f+G10Pry_QHw8|*dBciob*&cAXm)QYU_{qL(Fxi49MLAXp=<5b_efGLXA_=*nK0vyur3ygm8|ewfsYT_7iQd;9}aFa z;+F>;O&=+0O0R+f2vX7St{Gv6wQaLO?pRmA4e`UVu z-PC7%O-!H}!9iu(8gUe&3g&ASpH4m**O5yZL_};7Fy&=}-g<4(FQFo<5NpYsC^4M^ zG(Z1}YXLwL00R?K;uI)(U^P8X4JhuhZhfw^0|>40v=lEv=6gNzkpqz0K;pVifLtb7 z!!!G8?CbXH{K)tIu_kPjaVT^O`s} zE9GIzj$G+BUh2x`_T($wh>Dco3sCE`9~)Deu2Qt6E1WwpmuF{NC8^vF;;dWCU7;C5 zbMR-ldb-r$JkogBF2H-P?7W2`{v&>w+cy)coDy>VBi(0FHqOsLKQ1ymq^kh^GK9a1 z?7?W{FJ=&+Qo6wFJ3E8%rDq=XSBBID6CkJgr1PZqwC%+80)dN-nDVxX@jBTQbGrwu zBN2A7WjSY)KALSP94;S)gQTH36A@sa*_i2h5s%G^A@dGiCo<=5-EM$=5Rl#if-mtP zfKIeN-ky(_ zs|CMY#dn!#+I?58!NS7oihPxrCJN-2qZC#O_FwrmC% zd6KD0t7B7DFY9%^YeVJ&zAwqNB%5o#z%=oYfPjFIkk{ja6Or3y8SaWHE({F*ZiuWV z=^+@4r(rN9uFU6pe{RO{w!#1gd-MQh!I0wb7+-vz?$bXZK7>Ap2-2!yZYml_itRs@ zJU2}IEhSxa>-qb>Z~$xV#-*dL4`)$YUw;Nb%l&_&VRK@ zCc0HM0W12~P7^({TJkncTsl43Q;sst#PntQK~im@ihQ&|l*P52ZmI_3*hvXMGtL7K znD?*d+HEJvRH6O3;igE1DbK`=q}Ma!g0(832?BUFM50NUJ9pCyIXUbOC(sgZO<>X~ znv_?S{cs9>9Hv;pJv4M<0L*O2_}E-`rLx+WSugNVK~=^Z1LhAF}Za%so`3 zC9}>YFIWM8#JG3&|Jz!O7N{%jS7vO_kY9!1x>V3y1@qkk8t-_x2*q zYCAX_0iyAhl@-t$r!>(&S}~Z!yzeAxJ+H3-5oT9X=&wxf`eXfd*_J@d(5FxQZ+@(Q zOkiEMC8WBuX7Xhy;O-_t{gY1fE(tkQtPcrhBLi8o8RDb0eXJVD>ny+D%m_<*W>NpC~`pX ziNX5|+eLHNdOM>70N!&oRtW=G0X|kDpqqcFPsfUies4%G~d8S5r)P{pmiJ#3WMh&M*oBR*J6_-FuOsn~V^$FT;)6e&nEM9(1)=TXK$Z~RW zK-1+_iF%#w`SA|Wbg4;oI|#mOG!P*dpkZJ8$AOK8x*`^~8 z*HnU?FLpS20Id~-VoZ5aYe@=eCU`@d2@r}P@Qw*@wtU{M*wSQ;T<*}}vI{JYHg>b- za6y60g1<6eR%<^CJjO0DCPZguHc^G!%)l(K>}`bnK9=%`xlmOj)5xTVn~e>9jlk;S zWMhm_&gj8W~l(E{4j7{E5Ku) zXQ-lsHI@n$=>VR0lG{%6-SfwG$zX4Ge=nj$&F8M!%y5(qY#Lsw*|z?OK2^+Y8b!q) z;{02){ot*dPan%~1ET~cLqn54=#tqg0OLFE%rfCTyE<8cxZ{JClP)DK_oo~jPHRI% z$g{WL^iQPyDXKIkGHX?rR4h+!AbWIe_2}qejn~wFlp$`)=*?qTN;^p>F zhqJn69~o&q2^1Z8=K6d{dnNTBC>e31m;IWr?6&QxfO$7#sFNiW#0ubPh*E$nWU!`&G&^bqXg#aSr3STyI9VL{= z(t|w#dL3v5ddusl#_j|Bg>LI zHc-kojYq~j3p1smE9Nx+eg(cTm3GS?{i$S<_vucuRr4@=zcnt45#?HtiXE+0D&{Nn zO3=wxp*sxVrBRiq9DZ-uJ@|?Xpx(8LPtRQU7QNNy?%d1K#%BKwJ`rRU@jOOuk`ZOwPvV>_9|5Xy6=*yK8*?SMF4?RpKn$ zW0F)TH$k6ffJzrxIv4&cO3y7UFgd=kva)LSgO3CUL7zDnh)bqDyl0_aZuL{hCTH_? zWxCkZ)QDA+e7n}uTuqD5*;n?EUjILg!4bZ?6^Nmsbw88gehvnMJZ&I`RB(#K+SrbR zYP|-7H+Dg+|JHy)8F^yMsaj2MfOb@@>Cy1G&vf0nsjbMMIn{d()SDY%c(|9T*YoTR zwJ`fl#|pxiL#&zjwsayECPLdqmTunSCf@;VS0X$-ES?N}$30Ulh@^g+bDvohg==N*yA(+FaVu#u(xsn#Y*TcSQBaghrA6!vu#G+cBd=@ zmbFjM-iAQka~yX+p?QpFO?1#p&5jLyq}RJ@(3`l0g+l13A?q*COeB-%?d?PJfu9*Q z;|W=8cvEKpcNv9-(5CS_JaPCC_@P2P9&@jzEY}xCqEDnlaJ8)oy4kI7^b+t6ObXWnXjOL2RdBdSgJT+p1D~hPpzFad z*Hy*iMQRA~4K{3f>YIE`K z5MnmpFkwjk2j!YoYN!+pUj&@4rVrs*VysW!?7dZuJxYUc{)zr1SdY>ta1@ zthbKm0rcLUrs;+m--JK{IUit3O-V`LVQ|A>^b4N&&_E?uaRyan>gEEHe7n%PJeuy5&s=&p^3}sJu#zje3t770y&oz z#!VwvO3$L~7)h{(Vtcp2kBIgCxXm%&9bEtKOV+{CieX%pcA9YoQdT2cCOEXJ(x!KO95%1zy^`G>oJ86hRep7oEiZ*( z-f58G$pJXma(ENbe&?<)WUi%^I*knwF_3|VeDPB?Z994M7*O@GN*Qa_yKj^wE9O+c z(zSBJU)*%>L$n0K4vSVfZh)J{g>cgg2n0QmN4|XwK7m2kGHNG7pD&6`4Q72y_X3;L_w0a7SNi0jnly1&&JMaL%o<5}%hLPm#3c3xye~w&8 z0W;7qk`OB|%we9k>6b}16+CW0B+XH0+*|}#NB}}-K=m?M|ZR-kY*_rQZJ7tQ^JJBmJ zgX?edNfLPn7!j&6G06!`#Ug;i;gqnRyuug(>+%eJhcF?(Cy8GY1{p6$=^Gub|=ly6xi-yrb&8Up@zh+Y^4 zRe-K3ieIt>o)r*bIMCI1k&0%QOaz&@yR*#yoXw>>N=5B45yc&?C}j!no|V8#!Yp+n&+XtT~FPc1S|u74rx;92nT=cFuVV z!Jyj+zl=oD?f|?0P&)P5C7_cOgLfp%)kxEVh}MCbTN6W#M(5Y|#ti~3x{_#Jx1vc1 zExXokb5O>Od%Yn9-*FXU^@<@PNupZ=&x^hpA_V=5|F%hR)8;y1I`>Y5?Bn(@cetLW|Gu1O?C~ktB=Z zk{*+B$G}w_&XHEYH+3+TYATkW$c!?UWSTfbI8gF-ws_T&lX%C3+zk}~w9#L*(zX`B ze>U9j9WK<$f62cXVVdXHHL)oUebv(A|CLLXxyer=rQE$m-t12W$4QGa%gum+%}vWn zk^~Ud-i%Vv-CWsU0|l|{@8LvO|Dl%n-=b322P_1DxCVQRX7rH%GkFB?e#w7PaVuVc z0{YX?(9qzZ;DEHGWDxN9rrQzWF@-;vo&VT*{e$28d-g7=jHI?oGtuNgPDg^34-aQg z4s)CIl#OJA6EcB!tB(fgy`sBP^7!Z|nCJ;JvzJ!@NRgkpnygFAo<@wj_+g|G07Bl@Wh{@Dje(7>(+qzZ zSGR(*4_1|pns21_-FTJP?As^nD6!(+NyaOxyj51OK2hWz$|)Ne0oBve4W|rDMFXQd zAw=F1r=U2h%>5A>Z)pbU=U#DCu8;GKEtKgctG*0x{DZFr$c4KpBXml7HiMl~w&)}@ zC|)qb&^B$>p!2P$95AD*;9W@vDJmanwX=)a(yPhU02x6u;1>s+d^wP9A^5wa z^02eBb8Oc7On#-JVXn#iaGLesbY03J$40_%P{dVd8p$>nv^ z0{A92-4TcxFHX=RKpG2h?PQYNf2jaQY;M`n8I&9Uor@}E(rg5tiqQzi1=hRGk^i`d z-I$T<5eDwN~F zFOAnRAhg$=^7cqcP4)b-K9ay{)Vazj{YM7}JcbopwA*+HaJ5dwfK2%6eD?v0|I(7q z-vlb>BJf<4ii!%E;o))|9GqpK&*uud-xv1vd7Uj`E)MVr2G_G6CPiS4zCydTp_wPR z>Y(fdDFX;sn|&JMoP_ZIimDAA{RnmD&@AhZ^^)Lv(39tr<6+j7-4f^Ar$2E8tmriA z1IETE=BH1O#(-zp^vBWxqYGC%R-Wg5RH=95CU9W=i;JW%SdWhleInQwMR3`&*v5RH zKzq)_5mBTT_S)J+!=8x{bIw8iXQDP}RB}Z~bb7OfNyEqdi}uy1#363&T1(OM*+*H> z|5rKMVVU;lu>gk_8nZ52%~C0ZTtLFl6I`&W%L??LsBRkwlY&I$K~ukUx(LWmxw-p4 zg5YPLaOc28P!Fm;A;_1Z#l;&BNB@cPy@hB<^TZ4^<_; zih>CT=ydBk0qjaYt$fiXQL`jiuxNEaP(I?Jx&bjYZ%s)b z6ga0szOJ#I&jj9n`6SUD;jt|rW_|ulzrWf>r+{xBR zJ~Nt-{rWQEJk0yc!S8ZtTTmzmA_RuEm^|F+6rJ^n$r}^XmZi&^I{_$fwr7*=?Lx45 zBzopF&)v-<4){i&5VqDI0Q1{jwuRNgUa|?2gvxyVkKcYcoBze+R*r@x4Zo~AFQW2% zj;w#ka5)_%fhH3lftkdwkbO#nDKq9S@!qll#z{G!ZA+a^K?*#xrHGGX7Dn(vrl#jT zQ*45%ER@a8F4Y_`d`<_xqESl|pU^iGYFqr4ru;1fQ%(@Gq{z-EiHbr?ZoQtaL{wmv zWTpN&PI-d8`~6dU_WLdX@b&vX{O1|@g+@~ZHpYhhFbfA2diwisT9$8!fL+UXK|fr} zOsHc|kf+qu)xQvLb`Ozs7Lo=Ps!;X6y?N~x{TbYiSK*Y4B`ZT(SVhB!Pr7-t8iigJ z%v|$+1n=fUZ)J(5&;GIM%EVR+-OJRY8z!Z81v1zO526sM7VS7>1Wt6<|8m1~<%Tn| z)SCpe-$Oe}Y`PrXTq=oB!mNYOsQKd!%nuaW3%6h=6QT>^IhG^<+RwE7-oue&ee`*&=Z%QH6kT_Y|GFZ@crKF<+} z7!$=63cje7Lm=2VTj}xU2A6`9^CPh-+Dr3{7~nrTuN!4d-#F#=H55 z_E}qBB8}*M>lD}UjxFf3rNg)|TcFp}!sC7Ot>|OK?Ycer*~|7MO6NX`0A}TOqV-O> z6xS)I-$%96I*>1H>*So)hcO9^!nr=M>~`0}mI&C0RIXnp7$V8H!cv~~eJtu3X` z7m)#`wcHQV=bzw_6qzOR2%Zi{2*(o=b?9*e{^J_0*fPo{A;^eU*{V-hF}FrPU!9To z;7Dp(K_Gf3PcOH|YVb*Y#QDUz@{=E2b^CZHw|zF28Jj>RS8W&|kWK9UjXP&xR9gK$ z6z+jKADnMXQ@HeOnr2?2JPIZ@=B)yfqCh019KSCSw+7VH7kisaT1w7$R#WiBkzk{X7&jgVnTR znL2)+ZbGlgS6WS6d}F(7Q|wr{R+~6NPf*UsvO%o`1fgT=(5|a!9o`T>vNIWENLgN^ z?&hDag}u9{BN`|je07RGH8sWToFba>{qMr37hAZGZ__`n@HeXtXYs7^_khH>GO)SA zj4b(E(0!K{{f&o2<{0yWC~dcOEPf1U@zjCgtYPTY;p|2eC`JgKq@{#YRi6*0Cd{M2 zFtoh2Azb7!w_h%C#rYt1uRDV1!*LS*gmi<0)ut*ig(dqjh4p1;>uQxPPu)jAK;wTn z1~JPM%aBul}iut4hXh&Q8J>s@ItSRjPN$Dx zEKdHgcmFUnl-?O_n|6 zmTX&r*$QZFb#)a4dQOPU@d=k2B2513->RiC`1Gl}aS|hv6)*HV)QSGMgFUwC_vo^y zGGMk)ClO>Qf!_Jrx5mANg^wNx*Pm{n$kqP+F!thyKa3eXSY1uaG!_fvZ=n)k5g`Zo z7Xh14Jo}lAE3)e?z zaA_7;O5`is5_iOfvRTE`ExO-X8o4kTg;5B3>X#k{ec;o9_=kRqKEKXpFLzEF8)leUvPaYu8OO)mm7;u&$_;Xn*4UJXt+K;pxI8&c>wm(~=39 ze!P`dPjIzGNx!S)L@(qNSH~0RCvOYTVb3+7&G&J3@&gOJ_0%~Yf&bT28QAm*9Mu^C*tug% zKYMeSzo99xfwi@^4kNC@;Xn$UOF(hZc>X0W`tV)AJXaRVl7@PsxRFhMnIhi$7cmW~ zk?bW=AnWLr;ib~Rq+HI)sirkjs{ev!b#88M56cdidvUWHFy7hmQH!F!zGDEJ6sL?_ zC|9+U2+jYJ&xReC+$>%Cdays$0Ujy0ovO93eCVZKr>S0$Tn0|Q?b zFu=V*_v=M{HgaM*9R_Ub(i+%t(Hf$0O!%cd+svn+K<)ct7o(fC^>qUN%-Y&pdTq-w zuA=k@DR_Dwh}l{nsqDNNdljZ52NwTZVTdqcNuLclc0Piv0}O6{IS>qS1#=E zrEJX^;Zr8e(zT^AWzNf3__;p;7$xxyKtqm@7o#1sF&Ltx9qH;t#A5|k0A!)5!`O1R z_bN(20yQVzE?=o;J>D`rJRA)jom|K>88(I29_EZd`aE~X7Xh|Z`z!%4e8H}t)P;-9 z;xhr{sLuZ-@84kjlsB*nA|m{=8Pe4;>0eV^atklt!2n&uaEuWbx6>lNeEy@e_7}sfUYlq|e!)C_lWntq zfZYP(`7`@DJ~(mLYh76m=BMWATS-^8o`FxUzg;s(&~AI4*4Az z-thAGIt>$_g>GU^L&H*XO#sFp;P83vIfWBmvcQmX%#HG_Vc5>&ot7#iOA<;!T^oUy zJiZy0oV0MzuEvF}OgVsMKwXEYH0~**6>4%#eF;h}F`agAr!3v7ArW)OXbS#0KM1%A zFzON@vGpLeF?<;Zcwld{qV7RyFE}Lf$vCmX z!alJism{Pz-9K3ACFUd&M-s0z;2{^&*pRY@at;c4eQjDrTeaGC7ohs&X zjS2;VM)dWFB%gKdkUMe)R=B|Vh2MR>3_Fq~DV0~Cb-`nlEMmWF-GqLG07m0qHldAP!AvP$CByp~d>yq4pe+OsHrTQ%wZ$zf&(|fkV2)Pkucyg%U{K$tj zhiR?dz_0Zlu_f@|xU}~wVgEF7B}JFvigmuHBSTimTX$S}Z5o1@g>)Ym@AGp(QTZ6v zoo;nT=m|Q$BA(Bp4va1z?7OfyKqe)Tpw~&!5z8 z?B0|wdG)C!b<7sNbr@c53;YB)Jb)&SZehKqZ9pbGbU#oTP|9S&38c(qrD`^C<+ySBnE-neDgcmC; z4>}+;Y^<)b;Br;1c69i=0KVQ-|9dy7U(1&O!2y3TuU61Qv(QNGCOB8)ECU}8(|Ox# z$w%lrnXjXl@GK3ua{)Qlq__>bKDN3jr9Q5ml&|!K=A)!RoaL!(f39a zHegHL3_wVLC0R(+$Dbpm^mrjVUeM%difK|W6YYdgFaNz4a(vT?=_$4VN%SPTZvL4z z@4zmnprGf$=DQA#hP{t&@B5&zcL9p^LmC-b*^}*Dp+<*=KEQqMbu=~Ox!Fs9IRd`o z%pf`~IG%kR3V2+#Hc&G-)Q+`IVhv9?O?S3n>Y4&UPe?M;xS_V^5he+P(jV3ehP8O* zt4>{P+$H^25WrSsym80~Rm{=68)8%{COr8LY@vlj+cFhI+LL<_qGvq!TZ3jqXKl4d z&h5Hy|M$Q%aeZOrNH2FB?2l+!geIX}%Nuk*Ue}1@dnl;>yzzEcxP1-MnYkZ8NB)D$ z;u^N4qvP{LTHr+GC<70$3J}_!)0gH471^2IanRTMU-wqWm&qF~D27lhprk*2+If?f zc!8x376FceqcBIssi1K7njl*2jdC~;>^&I`U;pAxlxu8wCPajqC(hqnjB*ESc;-Ad zu)q182(aql$OlnG{2n*FDB(c zGW+r|bBxd<0+wu7;HR`eqPvNRoV?a8^}Y@B^J?Gw8P@zDwfLM|*hndWl>*M9NuHpj zE6sQwtuZ67H=hYkVz$P_!~g;X6R7yo$zSe&FY^b7Grnvvqrkr}I}wPSN^|*gG@ZN^ z&s4CY3t)knWs2_y85CF%<%vc;nh>{Hgd?M{k*X3dSY|2s?AyAeFkuGVuM@V5qh7*2 zCmJ8nLM58vSo!>-X*p@19vNrIbb_d?jtu9~)B4;9@nH+>^Ebv%H{d@2#B_p%jDOS3 zoezw^5i%Wo&0jFa7IXNLO#7tXR5VnqUCLnee`ufM?8-mEnLo*Vn;Dm*1f2x6RG6ir zlt~n=hT0gdggR?i(X6aAoxi+ruEcK^q=DI}%T|zp{^q#J9PX%(0?l;!c6rQ?K;*BJ z{GWly$t}=BA%BkIE};QDX?2g!P;0YbORS9%Eh7a<@H=f(vEis1_i;*GHSe&(Hg&*m@;%#}npFbU?W zmAiO3pL^7xDgc{@s4{O&*1??H)NrZCx*JWt-)QW*T6rjux>7)XDpp_i&=3?V#2+C~ zk^lMk?x&+%wQUZ5q2S%?FUN|kDs>yo|E(xNge-kSnZ-siEz0#!u9jRl&P6}e`>ukB z+!8e$w}pyVQ5-R-iMzBNaKO;SuO7_<@y?w?t>a!7^>@YsF0(`5%%?iPt}?4*4-pNG z5|WS#-Irg9J2vUePRjQ)f6-rabRA9N<;-7w$*6$k1>%&~@jCxf8K)GOLbGbA`}j}W zUhWxAPd&@2UESPXM3w^&IW5VqY7$Uoy&8gfFkxpaVrcFWo!-qDAgDI?^|cDN?Di9C95m{VvW) zXZPVv3Dm6S;cLG=fWLOJ3jw1xuaUmCodqWLPA)H4t;0G;0+tHJV)7(SO#a7uvfuI& z*E+Ur;r~Pq0ippHH_QdpTQS3vw{B*sHcSOyxLYBCuqOZMJKwF6!Z%uOK&S4%024p&R#%?5}DMM zWa3NGF$1jZjY(iDd(I!M>{|X1J8+)h@%>%Z;xDKZmjH>MBj!um>BEX&X{YDpydT3$ z7CniKApbl@f&^uYk-nRKHftS472bimb0R5C(pnk`1+fd3P8o^|t?Ev%@i8huVjN`I z2=(b+9PpV1BPQf1bJ%eKl3b^!YIrJ~$~-@f>{36B>?Q|wq_polU;s0_TD$_k^P5*` zu6w-I5piG-yKqZ=N5-s-j?C%x+r~Gac^B4MN7d=OROF08?armV0i#WH%4}A8gqtV6 zn~gTY^R44UvIYOXeZ38)LIxi0E;JYHG4Aziuh1=2)NbSAb3^-_!&@XB$*%i>;Vnq0 zIj~GhV0cS4s;LMUT^3VC{J1=QT$a^+GBg~q1>v=pL_DJ)w6b}9{G?Z$QTEBW%q_Fu zS8fi|{aIbT~PF zP`ed+?79_nlXVHl_n3AE7F9lQGsqMX41nM(OB+$<9)|s9SZ~C;NLy5Q*1oEbBv z6}e)^{3##++6P`e`u584@$pNE$bsbB)NI}M*uTvR9}*9;RdW_DtQ>xGr$mKm%uJ^U zur9DQk_nRw67dU}(*>ynXC)H9b1rh48$$Zhy`6uPE%#-ke=k0MoB=`%52L}5bmZ-k zcBq=!cT}gYL#35v(|}ifBgF?1+xgSO49#JJQr#@%SO;xJupY6Z0A;WQFdWD;ki85GO6o;Rh<|Poq=imJG>GUua$jJ z$rdH%rB|IG>=&zM#lMSjr&1`K0jTV z66{B!h&QF_(*)O5awl~^D10~Rvq_EDAU;lJ^}VnXGsi$8ki+9BlKxaN z)D37ZZ9&pn#VvKB1BbQ5t?*E>(kB(6s11-u=Jq(;}#VZC1-X( zF7cjiKiNXeO|B;M8`^HKTMgU(bH@`vCIskIWH^AHc#gLN_G*e6T1kPLI+}%wexl7e zo&whYraS_SPTtV?({5i^HhJ)x3pR!chfZiuMF1QAejSBwasd%x3ru=L6pTPlpQM{ymI|GNYniTud{_f;$`DU{}Of z*zqR9XN(Bd@-Pv)kr>XTf&0Vud{>30f79wN*(o(1%VcaN>#BLj(rEkrdq=9MGNb?) zIS+K7^PDcAELg|}EqR&pr#SrG?rM9w{F60d#h{7zNcDzwk{n@OB zh9SD_(4HKST9_@V+&`VUY3B(^S$R>ZP5mWx1Smu#1w+kYhrw;ZVF0S&!i;+{RqW+o zzx|>9F^0kWpKyQ+i|qxExZ1aWjc#Q@S3?AuUy59cIV)2_T`cEKh zfCU+hvZ#A{ar*cNu7Iom>DqmD6FWOhXe1mr&f{6A6OcE$S8=T8HRv*mSNAYx5#Jz? zFKHuUHDsmP4zf;8rFg!AZK=BlNa{n?koX#;2O_{L-Z`8D zToCZw|0mCu7*4b7an1wkS zfdY@>e2cXz_=ezy&UIRxX>D$jtOk=PVjLO^|E_9j({ISMw1Wvb$5-HPZyW#Nj!^Opv|LRUHSYGP(@L$6({5C&pS#WJ~Wec7I)ELCd4~j@pSWHn&<~)>lT(6qL_>v*~$QBXUvHJk(QZ zk|ME*dgFMk7aL(UWIsO0bL4YsB z>-BO9kur4jG*UuWqS96zYV-M>LZJ1~{gC2}SAg#dg7Y%9K)gBI>i63JXm1_pOx0?5 zeJS|@t#s$7rZ>%6uMWKK5Rvm=IX01XU$h3~S$lj>Sn>?lpK+pz#A$3@9;4yO|KP;A1%EQ zLR2FGFzdia?-rwXvaDuJ87X*nyao0uY%cVpqN3K?PG<^Tiv4#E#{k9pfZ*(?`M~S+ z>xiDmmeA?xr^Dv6!_H6C5}$qNNnI{BKQvBuKw=FjlRE)v%kkAl$2I%z>zQ&Uy_1bmlJ2qOhq~p}(Cxfc3i4tjH9G>jR;~i| zppe~wPRCo=KnJDg;c%MnYlj6uhiX1qdnk198RwD$0st9rj|zdiPZ|@OQd6VfHBlU_ zdVO-5t2`TdbSR{bp?l|{JB3@;Bbd@%kNxHjIbQn%dQw?y*trMpyB^E5>1sCfyu^3l zLxN{vD&oe-4?-P(U|S&e4m&Au4i5~r#w44(ah`&fJ37R~t~s2RP7|%M<^3wu$Z-}g zq;}@AKE;%^ChNwyb3UG}OWc04K8s!^yG?SH&7`g^MWnI z1r2)e&Ox{XqgoNIFQK4m4s)2voojd>=1-{_`j-WuQsxHRlQ^*|xu z%?FwQ#|myB?GVM}q$TXhtx|p@sB<$ES-6 zXOb9>#%Bv>7WLnsSFMXJ&WGbDbttVC^y)i@k+jNJCt~|T>7*hQCZJ!RH*lJ0a6NYD zn}o?-D=Fjyz5^U8GKe{q^B20<MKEQ^g)z7BBcp{-nRrvYPKKe)%|8%~0pX z7C6;kQ~s=q1MYTx#r9+As$ z=fct^6enhp7nk-ix1)&0y;7mZ)4emb@4iYfNl|6;nkUWvef}S@VS z)Q;rNf==~B`A3iG1zB1^Y}6F^t{?ZwEq6ND&-@C+bQNc_wn2sVum*%oE{4$KQ1t?E zN+zQ;PIFb!tz}_%A0KhSe|;Y_tqe9YR51y0q)cDrb;v-!iVm1S%H`rKjzUh8o(Yh; zRw@4}LVw5u`Do0eyCb3{+a37@wfc?-On9$Cl(Iu>YcUxGJKXpNbQL9gzB4F71I<}^ zHZtDKPf21ilXILhGR}$CXX!@_HvLy94}kKHfPlvl*pKG5!7-0+DMdjZ%(U0|fjLw* z2W29_un4nMc3DlH0asA=RjfP<{ti9$PCOnQWI?t^aa)xbt_)~C**hzpg*SUhja*m} zrXJZ-daEfnh)$W9I;2afA@$WUPNC9PcjP;X7Z!G;_fDLj@bEhM7u#vtQLM(m0vC7* zM!n>p{6SuuSG!1EJUN#Igr6qgpJrQ-;t<*kK1;Xh##Pnf4E~MGF4GRYM>ZHNAg=u=fyJ zCg7i2)Z43JGT2GkP=5z~)WBG#Mg`6Ir3k0O;dA9#$k1y0OAf63M`UN1k4EOIjXunl zh1IV2PeG;k59@#PRVx>DSq-zZE#lBbC;`ZKuk+n1L{j2IugRF*`aL}|tls3gIJjeS zW75!Ic~CUlVR7*J_-jYOY2B+MWkT+{Vw_|PKd!k`@dj)7?aZv&(e zHX5{oAg<^q`$OeM6nAi`UtAfy5Ay(_{bea1i86tFymg-I=Qz_af3ow8Bww6x3jfjh zw(SZ^=7&)6e)(=<#=~lqXeA>{Xuqr|PER{V<_cP`*&-I1g+;Z&V>Ko#B4{Klmj{zj zE82Pb)Zt)={!b+R<%NN{boD5gKC}Iv@$%DFBiL>)E?$4Nvke?rv38do3^WR_Cm>pD zLHQEuFCdME0dNKTkF@%^lO9Z>Z}qMTu#WLXhP%`%7{OXarnIsWSgXKbb23>NwNOit zlGe*NjT`%X$KqG+ybb?)93)M0tpG#l8JTL~fhf-fG8Q~pxfr-Xj)`aQ`1M4xzBHUh z>di=8DOCb6t``f)^qWnm=Wa+LNuEB)8D-k}%FO)?wbhR*`(TJ&L%N*~b!|)x3NG>+ z-9{q|ngTuVF8U6W%g-zlm$}MdMg}J0h*XPU z;uJ~vF5BU&C|Dx+b$CX(hWajTKEU+TLUi#OCzDH&tEFIRKYS7oWH#u5%m$SAqa}hQ z690?TM*Ef22KgMo*-=d2vSoxYK;}P5$QNUld!hgj2I>64AYj)hGZjlT{MsdVHOXKb zxM>K$cKVR+!!jW5GfEKJoeVy=B4g zP)SLQqewC#ZJmcB{hm83A}}PwDgDXpo2TXN2(!=n7@s+reWyRheB!FM6!|bZg-y>5 zz80N~a(bJ`&@}~pUO$p_f=%qC?i%I?93Zs;b&=W-2oB4Qct|~YO`IAG)VH2-+1UR6 zaqb;x{k{H2R*2vFCux4v-WlV3+6CAwC4>&K;8p+{qMjTf5s?Td06$s0ak)glC0fw-PpJQ?R09(b zoC4@f7*Pps{}CMMR!8~kcEW0{f$Rw|EgGTxjw#(f-_09%W;+;Km-GcRZ*rDM@jEHC z0C@)8y3!&M(8IB74_JBz#l>>l1yy#s;A+{8?>IDufA7M@@xkGl~S?0Qs1IF11J^sy&!R}48ki) zh5NZu5l-p2O=kHsxuFFp6>YyM6`*SfLniuf9br~50^cu{-2!w9XZ(+RE3G<;F}I|0 zdp3>WjtM^C@SEDTTcv&$oVwwYLj0Y_!Ps+c^%A1v%g)d>glp)>%zE>@JN}1O0xV=Z}(ear0A>`XYfuavBbN%(%GMtZT4QDWXxhSbyG!JT^lK7 zY@#os-^YfVxi_9vSBxEALFYsv+zjQVt%O+ELN z9lB>i?eSyWH}y{cCYr}TU7AwN)YTLO8?K_su7zRr%oAoI)JasVC_Z%hS(yVB2?-H< z>7AssMx1o*123e5N7)G0?@)YFf@^DeL)L>TQYO2;7dBTi@w_g5f_mZOYVXTBPhEfH zsu3kL==n_f5H@w|(|gsGvXZFEQ@RP>h9U|x9)B>>=Y_MGtqvqy!u%s#a`}$k&?{J2 z#XynIvtnj?poSuLjA-eKoTL&=TjW|JHx2MLl)Za`9czg1WQW0Ez+%mNTB zGQW~9aN>B@46n-?FXb8>Oa;g)Yi-ngc_=op9ZKKw{_3Qm$&^#XYB%|p5UDgWAeX->Y8WvJqvFU$ghN5!;{G(<9;LOKZS}M+Ph}X zL&t-hoXqyBmoEk3vsdHzV5u*8-IUFBpq)&w2J`pT`I3uz*$@WN9wbd{EQl?gEQ+Pj zP)?E#H_C4FaqG23`~qyP0Ls+j`wI+VAzuI8QU}jMC|VZmoiA@+>`~7e3v&Y9Ur>gI zIf#nsIW*oVzLZ^0VRZv=m4@Wm-U4S40jn%SCFC|~l;r2BuH3ei*UtUa-a5J!c}aam zlFN1X-}mUNi)Zrh;FK8dC8JL72zJOhWAK=(c_4?{TLu=mCkh z1X6fODm>B;lN-<_6C2Rhx{4YvA?|Jp_IH5mdhniEnFtE6j-)=*A*JdwZN&{*3uO5Nv?*NJ#=7*TBdPr_an4s{VEA-lgmefV7voU_tqw+4BQ_= zP9C=jnD`JIjyL=7Gnj%Y$X#7sc_RGKe)>Of^ykdsn5VI2%ql&jN!wMfpsvO2^F$98 z3ViY*DU8bMn>Qn>JIsr|{wKW;Jg-L}#r`Y}8>{ip#!n3KY+(+;=1Q&0P@kmf&vRIH z8+LX#r$|lC82XOOL;iFedTn#=R*s6pP%qJ1*~ZP;Fg3#zjY8jq6De9 zkMCV{oDa400p>IigmiL_5{ky=cj6;c4Z4n-0)23csAC0sV=T0>uk2PSfPA~7@c;^f zrRm5aA}3ExNVvaU3^Wcr`YUWdFr5r9Bd)4boMpeBE+=~@;7TTGvJ%nfScOt|YPdB2 zqGAK!dIQkq^Oms)TKTV9Xj$$BlT$N}E-{P-D)TwdK8(p%XkBVF=Mz_Fj6+y;{k$@m zonDG}n$n)8(!SUL|Kn>b#Yjaaej5RpHg)*2YL~#FT6WAcf3$g@;7kJ8+j7?V zY-mU!zU)9)`4c|0+i5KtZcJ`a=Qxm!pu6?F%(a2lnHW5Y|D8v5QV6p83{Jv-mK`De z6<_)#1585ySx@`Lqe>+1k9AkS>u#rxC4Zm32G6)rw-eY>S z#PO%Ohf5C5qeqV}K{(w&R11iSIO1I@tLIjFpnec=$^)g@u~G{FCJdN=N2C5|Cc7G@ zxN6|oe%uCoT0^}8eW%xq9UhR|B)rbt$GjL-IGf?Px=z1&6p^%7-9i_cIi87GU%h?# zEUzjJzvyiv&hnXGplmgZUmr<{zhSY@x07D{uxGC z1-5gJ@)6-UWf#ibKJ66)_k8t^2H^)bpK8-!h+W2|%?z|iVuD%vy>~gS`xEHblr}Ax zT4$3%sYF`@>xH}3IlCredz~gwWb%8loW!un=e;54a42we>etq4K#P8k|9MshV_Q`= z%15}}LgxEY`g3z1@wmBO2!${j`vWD1FwuE=K7hD52d{jkoYGJU%0=zEz) zbmb&Uc|Y|tez0WyTSQ4zIKuxeR*`@~fbcM(N-{L51YSl&x(z>;_ZJ`ob#pOW&QV{_ z!*tAAgZM z=)1|M2+RdCUGdV@BD|uQ`Cp^?;`@dt8SF&p9;${D6X{8HeS}l(v4`r+MjOp=v{@e7 zK{OS3X@dwjx!+EpLG~h76=ncr*3#lVj&*?T%v=WOV9r#R^4x#ck`T}4UiQb9gy#(= z|9_VxJUZ<|99h;YKn6J6*~~MG*?o-zQ`;88(~8-rHk57oO|YWDSaEukDi8%-r3NHy6@E3|xRBg{)X;n$-YQ zf{*tTl9CRW9(cJf^u+?8v$NgW=9AAc4`|Fe&&N1d51NOed(d7Z%{x#hRjlPWk&Y!V zDJ1oUdeWJz_Kse!=CbNf6XwLsYy~vYZWo_So0JV0zAmD`F3;xjLR6$4ljeMtxDgap zjhNG-Mz>!SH0B?iWM~Peh-3dzIfB z7*bC#@Kk3te9cTG?_4WX9b44UEJR0>Z9jkzMv*~gH=o(9O|D`H3E)xt>75;p0m|f| zY#Y$fyRnsaM(PK&A~pcJ5g=J!Mx(m0e|ZVuA^v;`0H;#JzLf>GK2&KEpwpFVu?}Gw zMc#hn48s~2BKz=GtUShm6>S3w6NGZdfcF}y@$V`aCuWYo6WKe)0h}w*u#kM&|Fi=3 zc>UvvPRHDwp_iA}kF)3#zVoxF@)M|^FC`V?V}Z9L^S4$E*i!;*Y~<%?=4}q&jNkln zBh5Lqxo_b*VmX}OBSq$f{lqG!f+E1(G3c=rops^wXkp_f6p_`T5@-cAP&L%&{x~ny!omU-ssFzzu@PhfesJH(W8Sv>4)?bjk3^ zs?i+hEi2r>W|6DT^*;(}ZOUn+^2lyWZHOa6UhUGSls=Uw|M#F`07Qy`^?n54Jz-V2 zwvS3>A0{O(8iIdcr2zjv4!SQ723fJAU$+!Pq5gP{91I3G7jXhD=8dSj{D*I+>^T_S zG#1R9o>(s%CQYb)xPy7pak%CImCqcO+hvn0F+U%>$SOMZv6u20c?uUVTD8YoLd$N2 zbO4SXDudRWXFEc-7d2Ss(Lqh>CLf@;^WR}V>|jf6Npv-Oan*Zw1-JGCz^l~BKnxrN zzf(dR6@hD?3H2ermwzg`U9?t%-|hjkdA%c_*?Eh!)AzlT1_=wRSV$%Ag{cTwPlZ6i zJw>a$@Wm+V!%v6Amf?#`F#S@|T+5QOXN9lT_$Q|4#sNuqUj@CQI>%<3j& zLuBZavf|H@?dP23Uh=5=x-zJgW4Vn-il5d>eNPcKlm7GP{#&y(oxcV5seOrh+cGtx zIpTSR1Id5cZE<@O#wJ>CkRT+8js~6@cDsN~Wa>|BIrnuJb2=J(FO4}kj%)_@?Nqv< zoYjwK=k0|pOVed1>ysu2EI?gS@)(`Wwh<>ay&)3(r*ir-e{pYUKgJ81$e{$!3kAO> zZAn)vFLgog>%QX2?c!KVyv_Gdm2JePZz3#)SbuFe`c^OS1Rn5u9IRHp1IU$>V?R3R zb+omQ1z`r7nwo&Rg{*tu=e4(K6ael=$=HllAYaDptO}nbIIv_CJiCocPHx;4|Lh~l zVwkyA7+S>4QHhiE>HEhc#ff~uHLtDf*`IL}%X(w&3%Ur1F4u2ufWlZt(ipC+@{_EA?94146m3k(-Y~vuao~2!Z~5RkkP6cp z`#CY_yl{0q@gaI=KGoW^DWR12ag#t_ErHL8^45H;IgR&+4%&}V`&Q+j@@GC}DTNnJ zSeUYM+m@l0ky}%p5_k3wcec-l)ycsrm35&x%KWaI<&?Xh3}&qK8&-9;fieQ576vmI z%O5Sd*TEuvv;J9!U7gLY`e$|6K^IffKh$BL`#t^^5eMu5H9LBtE5dRIes(rqTAPO# zwUugE4YXpFQhM08Wj&ka!}H;b8S);ZP zt!y9wh39(ZS1=yVo*z|7-RNVE?-TyJ5W>zYpi)66;4+x?Cu~ZgunV^_!_3*Iw_%3OxI23m=M~THQ-@IpbJsjL~?K!3NI6}uANI+q| zn7AJUT+G!nj{~RjEoTUxiW8iGs!{kC38;yGk${p7(##9NO^}D}kCm#f2y@WlCxvub z)8*zy0zolNjS^J?CY4klm3XTSN@)hTVFi^k)I&ri$^uQDF9#HC1{w)>=hwYd6h4V! z{p7+a5C7o86)F4D?>JjzHQw{-9hrOJI9~v8_zUb9!A!V9DA^Bu1D!i|!i}8Tx&>*s zqQvN^kzW>lSD^RuwJ8xOxizA*nj7%mVDhSP-Jq1c4- zv_uEMaB$hQzJbJTmZb|Od1)(yIN&ai9aW&qit7w!bX|e_8r}yL7P9xU>l_HaIawk09PmM%V_dr{XmG7- zZ7R0-HgE2a^h1yCTU4w*8?@F%!P^6ce<4Lj*I!k=lz_+QqXf!0OgdLQ#T+Y9K$p#SmD94 zITRlPxh+owpqX~WgK)TBVfV455T9j#yfLDJSCxZ58>!ZbWu;`Qzg`gW=0ai2`#S5K zug8z=R=>@}0~J72q*`5{!naeHF7|tkH9CaMb*?W`|M&VLrIO`MNPjhhw{Q{zQ-9H+ z1p}@l-!)Flfhuh-({n@s{@QO4E6#vRtaz$9VPZ_t~HISH+V`9zT8We ze&1;B7oA-D2$IR`nlxO)^0k#5pGgx;IYhDX2&idTP54C>c5`#>O%?KLs;kq? zR|YW7&Yyd*UjJE%5D~76N8#Ah1W=$ye*#P}ov}}#y^IawxCjSX${+*ZeF1oQOi&oP zv84bXhs%@KISs{`Fs1Lp{LM>b*%c*jUMzPb?X$nqaegyvrLaY#2Xh{n`W8UF)o-~MaK90` zhTt)-d+`nQCR#{$^~DN1{Ab;QE_B5imU!QwnoCl32(5F`rwzE1$HwTLJ6c+xU>sSMCTcJEva7<*hajn3& zEz!7FlTwu^yKv1AzOgbFGVT7!V!bG62{~Pbxl(kL`#(gxD?hwuh}Aa0M+92|A_p*8 zez0mJIJZ_ujG?Gh+McCVNg05myz_~edy!nH*P*%yhI_3> z$j+1=g$GaOId^cQF~u9FT{g(iPL@+ZK;XuW=tV6+r~A4(QJK4Z6{E)pMj;rWB)+jv z3S54aSLVy(RM011sARS5lwCj`e%Ht~fAm&VGfIb*+adSJ4`L+Zg<@5YBKV1^>zf#c zc}nAzB4z-o{U6s$;{o;3sdjiq@fkrKhWu_s;7=b?uzN*Tf|NB_Hm?dr{x5Ykq_fvT z1D_K5wM%IB&`XKV64(WgUak&J z)1Gbiv5JSdEdHi)x?r2-t#JE@V9nEb&9;b2o2oqpBF38x%8LJtErN85PrR1)m0?z zJ^%`2h6JUo?}MZMq`Q!5M=^zUr&fJJ53r`yy~I;3 fdi3se;*8=eQ;T9bv|T6^@K5rd+}#pUec%5B#G+Ut literal 0 HcmV?d00001 diff --git a/website/docs/assets/substancepainter_pbrmetallicroughness_published.png b/website/docs/assets/substancepainter_pbrmetallicroughness_published.png new file mode 100644 index 0000000000000000000000000000000000000000..15b0e5b87687a8675c481a8265e5f0c5639de8fe GIT binary patch literal 7497 zcmd5>cQ~8<-lz7~Qk15s+IwrG_NXd4jVd)8YL^NzYsDy9ZLNx}>Y-MYR>U4v8e+!? zsB-2*m<+Dy+$19-FCcv$ zp*=x*Zw+uE?I0ufzNxQ8R*E{iK-!=LX&PygkyRuz9N1Ekw&^^tSa_3>F}EDQ$U8jV zI*^f_#Tw{n-uAOzfi-yC!#wU79R1?%!o?6yR>tzCSyyBr?D6VYrQ` zE`PIDB&<;*>=}6*6?F@i+BuG1L-EA&19~HErJUz>WVT{N9^>ivn+5f7nvOZ_&aBt0 zm%=92jG*zs8~bq%`*CL+2lOB;VY3bvvcd=7Gx0|_DcK_}7fKlTb>Ppjbo`@XTgJ3# zj(o6IbTuE6O{ZS=1j&Ndlyf^`Z;cLvKC#{i7R-|i}ql)!(mH8vy zrljLb!T1e0O6q$Fr3herN`f??n`sct&}0lcbnvO~!!Y>r>%{=Pb%nio7>)L@g zTn~xUx-zD=iwUB%if64}5a%I|aGtYIW#-phrSX%KI#qEnxQoXY=@wEc{E_=`9Pv9# zFd@%|a!h@1A4hDvPnFlPCH2XI^}{1%xWhGJ%UfVZjm*8gxKr_)5t<%b?yLopF6>&} zX?57coeB)=V+zyhORp}kv|LTuiRkN z?S&k9?o+Pp46>GKd1>j7?7vN|ACkB-LU*AA-f{I|2pfuXpMnx&Jo8$9pSuWj$`?V?^vx;OxNM8tmqy_0 z&I-K?Hv}(q^IW|1x}2t`ncck%%+>Y=YCXiE)qQ1p4t%uL6M&;y1&1I%9!}AO zgi~9q`EN@%NnS@_{T*{S%K7YgSYoW1|5yTq`vFJQ)HD?C+n)B{)7(AfOM6C`+GM~b z`q?1xS`_aX6|&3NgjQC-@qQVhtVMdAY=3N$$V<#w&!T}g@wa^|nO{0xGYC9-Oz~AZ zvI7|lnOhND|MDb{-Twg|rPuUfpW@!cHUwHt)j0(21M;ti$ss8kRQEkZ0K~(uDP_$C z5~dxF_tU?d;sN;P(o?jQpQ~QLz4!T8Ud{ss#6%cYbk~{aS(`aXmP7$G3T$h_!gfeaZ8d)c>leKr$ou{9 z?;w>(#$L7_^9#U#4rZ#HBMjqAhquETSha40rngoJi+6ezxf8Pj{R0I%*>caR1s@y+ zOk_7CSC`_F63{vkeq zswo>6dzIQii0?{#<|5q|nKmQjfC0n@aCxX=5O=SVuMbfmG>CY)k!u1mqtJ!SsSk)t zuV*XvcMc5zZn^(Na0c_@#G+K?%Bp3+K zD;y_Jw|h_bimKz03dD%Pq|%JWI8|S5OcH2rB3n>4&Q#EnRJe*GLsbLr zhC~^`d)xbNf$QKNW7Zo5!$8+?7&ezc8)>vbela(GN%M~GA)YRxT!|h+h0G4`$-jMF zLV_E!YCCGJa2b1M#N4*pf~LXLF6XToPQnUP4fIJ5P>_=kn$6eL3ckB@#<>!^+xGSR zO@VfV93ihZuNIru%idOq#XOifRq%x27(%LFbUut7>F3`P>gUO)~ zmY@|jA>gRkboNHcC66ggzKvMqj4p16lDOcm5U@gP^2Cfb%qJ~Fh zpka}Z=4PSUX^NbT?dkLQx7CHO|8FIokG zn78G4lGSGQoDB=z%#bG|&~8aXsq-2LbCKB+Fs7R^je@rrSK09g2lrcW^|&r*B(To= zhJH01Vs2{Q^-C$nJ=}A6q_OWt{5wIdy zE?xZwq!P~}5H+>EPiVaP#CID4@$rxKDar<)W$>Ruu&JMw^q;KNY-4;|8{y?AEM{dl1G_!)crLB zNCTv%55q(xkr^zqAAue0%63uMuJ=PDlD^U1VBx*IhefXT5DLZW(LtM?d7Mo-&n4oq zO(Xku#Z5^y{TAwHU}ZkrVMsl9#n!X40&47&&7HXVa`i71YK!$23##<)f)dUUi5J)$ zrq2EKFX1c6;I#}@%Z3oYlfSscwM@O%8nX~}pJ^^MWW_JwThodFg8n}5s$KDeQ4 z2og`Q{+|KaqNW?yEj&576Hq}X2h_*aH1`jKV$Arxa|D)y#u4>Wz1P!WPx&GP>Izpp zns@C&Lixs=14Etmp>xYx48Uy`#2avGRQ#s|C6R9>>hr7W+Em~t#m+^kgNR-RoL6DE zKM_5Cu)pDvW)>ca+EGSubxqRAY$okUC5K_~r6N}F7@EEa5G&+-=Ska;P=&E=U)(*0 z_$whQz{ZDYzC3n%-9A|Mu1<0ie^w*;tuQV7`o8+SB$E8vHPS(`^i-bfEcGV`+p zN>>|+$VP~=LuKL~n@C!Kc+%sykZ}ds*2veXe)R%RulZ$ZIX?qleJ8X8yDuSiUeT^j zJlrc@Z*CC+(z?Z-1p&Q|H~c_;{;nyG^+i^A1m))ht@%a-?;6G)d0-w)JbQv;JpN0X zT9X}$Xk{s)UMy2(9{6w=vC%qX7?yC2;5mrug1s^QpIGBH<()oM53ie=909x{i9Lgh zMwi^EthQTEYrq!Jd7J>DRE5ClcbUp?%WUKWIB-2;Hhf&jQbbSS3Sg#Tuw?`9n8QNu!klI9x`=xph={%B|Id~l=5#}5RJ!TGR zghTKkUh4H4qn&8Ptp|=169uOIB%faKl)$~VYOaD5-wPs-3XV06j%qs5OW%U9uQ+z` z=b~0E$LFq(lmBkv|1Cqv3?Y66f`5DX_b^~vv9-EB+CElnHq9BBG;_bf(rS$1PjZ+k zHq#t-A~}ig>#r`DL_-LK^&Q;f5S03z^)IWhajI(c%<&G+Tg>e9 zrI%T@@1}AcG>x`<_F7Ivn$%rL@2K(GKOhA(bSw;Rj|S(IQ9Xn`2E#C z`j;OL0R4=?%ID5nQQMz>vzjWW8i3`sDs#uzt^dt+SPHU6(ZW}FFJ}n)#b5_N$k}OL z!Alj?&A-47YCOm8vfw`(*4?HRlY89pk_XA7|2%9)8T9knXo&wsN&%4B+dSc*2-ID z*hq=W*Mg6LMplM|joXZLt`ngAPRJZ*T->LCHa|1!8SZ#gz5PclhN)0QO)JOtUueP3 zh8BCM%9sgzMa6UBNmYj~eDf_x{D-9;J1Td8m32R`_WD{S6 z)o_xmiGPf3&gw04x20F_aFu=b`P>doaCqfZ@||Tz?-$tjvAauF>1(=0<~}74bEfgI zpz~8rra9Mumjk{$F0A<7EFdpndXK#VXHsJr2;|tLtT#*b^sQG1=l|wj(nV+@ADm9f=ys-7@M?D>{iVSZ0w_Db)I7#L8rsU z?z9%=f~)0n&vf6%^+iaVv~8IW{yK;7h+$q+LuTqRu(pR*Ws z0Abu&k3=h9O$j_@th~>5^S}P(1s*pcO8-z=Z%VTilS;%P>b(8H#+$HP znET(N{12dWElpenS<&*7q~X6L{~xEDOCa}4;Kc1kM*>w={!TGi95yi}9l<(ip>kXY zFxKkXI@0>7Frc5O$6@cCN{v}%QQT~b+ ztxl|ug{V7N)*bF-ln0ggtgN|SBLnae#DA$7rTg`u>z}a-#Za9 z)F$#FeaI{OGwZ^5A6_&~Z&3VH)AnHa)t)_l1OV`FPFwm<#$5hvae( z$HtJ}KSD%86-;}%)wB1zVZTIG#g^&{$qgLR(OBcknMj>L+*bj1rB0e}(MqK>EU5$c zvovU3^0HIj$H!>D?EX^g*XNwi#CuCZ5x)p45Fhh}>fJ0Ta!H%*7iCI%<1VRSfjhAC zI_Y1=h5KxCFAt3YS0TT3WrKZNL}N{4E5Ec0j+32~ECxq+_GoIIaMYI-V!k0)Q({xl ze}xB=7VVhti-h~&4Kj_mx~e<}lRk<(cFF04Ku<5*NX z-CJ#q*+XrsyjEa?iln_p8eNx*ud+SkJkgdU7lJgDm&||h-SfuckXFkQa9^h}{GC>s z?;~H-pO|Mj4hXVZ#{mJ!b&UDPK~Oz3tDrZ$#zHN=>NzaNT<DcvyJnGq8d$VPR%YmWq9oz1dN?g{~51?BX&#CV4&rDsMdF-z<+TGM!nO7RN#C6r?G}#p9aaPFfj!7~ zCZ&FJy+9^EutWy-^kF@jwz{skP77_KD7eG!Sko^vPAE6_U* zy*T){X`tC|VPfWee2rOIvozsCM?)~Fi{LyUg@;QFh2RdevanLL-@&)8nV(2spTSKo z5i>|ge?6f*=^x1c3+WvuYHsD~0T|X&|JOQ!efl?HS7~EKo!OM8vli$>P(NF=y6TS>MixIgkKE*4e3jzI z8n)ozLaG;)AL9>FZj&GGFZ?^0_ zV0l}HzVuDO`9@X~u-3yKUe-bQduT9wNoT*}y;xjh@R#zFJD0=~d<%rC#Ol+2=Z>vux@egw; zlGztMC3^4u=GPT|!ZL|L%EKgLm_kHVg-r*59>)-s1Nngdq3^DVa07b?)cUq;Kd_4j za+fqM*7>fS$)z|)hhryw^U7_YknVAUx1PiZpE&ohbphj23XB(oK$$+B!{-u&^u`0? zT#IbT(R$q$i@-*wLd1-g^pD#TbH1zgV-O0GVwC^v?OMg2Q~>EIpFlueUtm`}m}h5T zahED#gNP);c<}PLr;jsm1&>;Fm5G|-e-e0i>*Vg?yb@)T;W+gf)o%I}6;t00w*D&8 zZ^Y5{=>RWfl%K1^v<-1i$KJ48Ub8p5{<%VImZC&by!4Ud!kJ5>hBo(IjS?Z;yn1W` zyu9XOy(nvwL@AT!zp(F0+ia-xEYZV5>6+TpB%}Tf|2_)|(LV`bk#x@&s4Hf||111$ z$ekjmCo0U3D?t|RogkvO$ZF{SB;Efnl)mQ>wddJue+Y1&H`B3Lwlj`F4@qedfDK^*Y*iv1=2uj(*j{BzN!Xi}VWd4pONdxro@- z>v?yIR(nLc*|;P93OiSgs`u_k9vQo%V0G`pjx9w*#HW{(mMkLd$?kA9;Nu~c!TkmUx z`(^7t8HN0*_L7sNjv2GJ>lxKVJ~FmWCTG@?{>JQ-e#jQ3)CBf6eG%6rmQN?$ z?j_y@_xx3jkz^!2a-qb#b76g z;V0!OCYXWqv)viT!CH60EYn!NPXh`6E_KQ5Z$u%a;THynZ9_m{B`)o?XBujfd(j8M zK8&_AcjY2?8#|WF_Iy7d7B5*vYp;@)7rZ_{sxFFjlMR_I13NdT6ixyP^nY}QKSIJZ z1^d)*guw)%=_AP4(YiMzvPhhE0+qp9;@I*HsgN_~-4tb_1!K4<^4mI^L)r3izg Date: Mon, 20 Mar 2023 20:07:23 +0100 Subject: [PATCH 095/187] Simplify setting review tag and stagingDir for thumbnail on representation --- .../plugins/publish/collect_textureset_images.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/openpype/hosts/substancepainter/plugins/publish/collect_textureset_images.py b/openpype/hosts/substancepainter/plugins/publish/collect_textureset_images.py index b368c86749..f7187b638f 100644 --- a/openpype/hosts/substancepainter/plugins/publish/collect_textureset_images.py +++ b/openpype/hosts/substancepainter/plugins/publish/collect_textureset_images.py @@ -87,6 +87,12 @@ class CollectTextureSet(pyblish.api.InstancePlugin): # strings. See CollectTextures plug-in and Integrators. representation["udim"] = [output["udim"] for output in outputs] + # Set up the representation for thumbnail generation + # TODO: Simplify this once thumbnail extraction is refactored + staging_dir = os.path.dirname(first_filepath) + representation["tags"] = ["review"] + representation["stagingDir"] = staging_dir + # Clone the instance image_instance = context.create_instance(instance.name) image_instance[:] = instance[:] @@ -108,12 +114,6 @@ class CollectTextureSet(pyblish.api.InstancePlugin): self.log.debug(f"{image_subset} colorspace: {colorspace}") image_instance.data["colorspace"] = colorspace - # Set up the representation for thumbnail generation - # TODO: Simplify this once thumbnail extraction is refactored - staging_dir = os.path.dirname(first_filepath) - image_instance.data["representations"][0]["tags"] = ["review"] - image_instance.data["representations"][0]["stagingDir"] = staging_dir - # Store the instance in the original instance as a member instance.append(image_instance) From 0b3cb6942dc03e231743fd1713f3e919fdc785f7 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Mon, 20 Mar 2023 20:27:34 +0100 Subject: [PATCH 096/187] Add todo about a potentially critical issue to still be solved. --- .../plugins/publish/collect_textureset_images.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/openpype/hosts/substancepainter/plugins/publish/collect_textureset_images.py b/openpype/hosts/substancepainter/plugins/publish/collect_textureset_images.py index f7187b638f..14168138b6 100644 --- a/openpype/hosts/substancepainter/plugins/publish/collect_textureset_images.py +++ b/openpype/hosts/substancepainter/plugins/publish/collect_textureset_images.py @@ -60,6 +60,9 @@ class CollectTextureSet(pyblish.api.InstancePlugin): # Define the suffix we want to give this particular texture # set and set up a remapped subset naming for it. + # TODO (Critical) Support needs to be added to have multiple materials + # with each their own maps. So we might need to include the + # material or alike in the variant suffix too? suffix = f".{map_identifier}" image_subset = get_subset_name( # TODO: The family actually isn't 'texture' currently but for now From 700927c1645fc9183a739abfd4529f4a94e027d2 Mon Sep 17 00:00:00 2001 From: Simone Barbieri Date: Tue, 21 Mar 2023 15:21:35 +0000 Subject: [PATCH 097/187] Restored lost changes --- .../unreal/plugins/create/create_render.py | 212 ++++++++++++++---- 1 file changed, 174 insertions(+), 38 deletions(-) diff --git a/openpype/hosts/unreal/plugins/create/create_render.py b/openpype/hosts/unreal/plugins/create/create_render.py index 5834d2e7a7..b2a246d3a8 100644 --- a/openpype/hosts/unreal/plugins/create/create_render.py +++ b/openpype/hosts/unreal/plugins/create/create_render.py @@ -1,14 +1,22 @@ # -*- coding: utf-8 -*- +from pathlib import Path + import unreal -from openpype.pipeline import CreatorError from openpype.hosts.unreal.api.pipeline import ( - get_subsequences + UNREAL_VERSION, + create_folder, + get_subsequences, ) from openpype.hosts.unreal.api.plugin import ( UnrealAssetCreator ) -from openpype.lib import UILabelDef +from openpype.lib import ( + UILabelDef, + UISeparatorDef, + BoolDef, + NumberDef +) class CreateRender(UnrealAssetCreator): @@ -19,7 +27,90 @@ class CreateRender(UnrealAssetCreator): family = "render" icon = "eye" - def create(self, subset_name, instance_data, pre_create_data): + def create_instance( + self, instance_data, subset_name, pre_create_data, + selected_asset_path, master_seq, master_lvl, seq_data + ): + instance_data["members"] = [selected_asset_path] + instance_data["sequence"] = selected_asset_path + instance_data["master_sequence"] = master_seq + instance_data["master_level"] = master_lvl + instance_data["output"] = seq_data.get('output') + instance_data["frameStart"] = seq_data.get('frame_range')[0] + instance_data["frameEnd"] = seq_data.get('frame_range')[1] + + super(CreateRender, self).create( + subset_name, + instance_data, + pre_create_data) + + def create_with_new_sequence( + self, subset_name, instance_data, pre_create_data + ): + # If the option to create a new level sequence is selected, + # create a new level sequence and a master level. + + root = f"/Game/OpenPype/Sequences" + + # Create a new folder for the sequence in root + sequence_dir_name = create_folder(root, subset_name) + sequence_dir = f"{root}/{sequence_dir_name}" + + unreal.log_warning(f"sequence_dir: {sequence_dir}") + + # Create the level sequence + asset_tools = unreal.AssetToolsHelpers.get_asset_tools() + seq = asset_tools.create_asset( + asset_name=subset_name, + package_path=sequence_dir, + asset_class=unreal.LevelSequence, + factory=unreal.LevelSequenceFactoryNew()) + + seq.set_playback_start(pre_create_data.get("start_frame")) + seq.set_playback_end(pre_create_data.get("end_frame")) + + unreal.EditorAssetLibrary.save_asset(seq.get_path_name()) + + # Create the master level + if UNREAL_VERSION.major >= 5: + curr_level = unreal.LevelEditorSubsystem().get_current_level() + else: + world = unreal.EditorLevelLibrary.get_editor_world() + levels = unreal.EditorLevelUtils.get_levels(world) + curr_level = levels[0] if len(levels) else None + if not curr_level: + raise RuntimeError("No level loaded.") + curr_level_path = curr_level.get_outer().get_path_name() + + # If the level path does not start with "/Game/", the current + # level is a temporary, unsaved level. + if curr_level_path.startswith("/Game/"): + if UNREAL_VERSION.major >= 5: + unreal.LevelEditorSubsystem().save_current_level() + else: + unreal.EditorLevelLibrary.save_current_level() + + ml_path = f"{sequence_dir}/{subset_name}_MasterLevel" + + if UNREAL_VERSION.major >= 5: + unreal.LevelEditorSubsystem().new_level(ml_path) + else: + unreal.EditorLevelLibrary.new_level(ml_path) + + seq_data = { + "sequence": seq, + "output": f"{seq.get_name()}", + "frame_range": ( + seq.get_playback_start(), + seq.get_playback_end())} + + self.create_instance( + instance_data, subset_name, pre_create_data, + seq.get_path_name(), seq.get_path_name(), ml_path, seq_data) + + def create_from_existing_sequence( + self, subset_name, instance_data, pre_create_data + ): ar = unreal.AssetRegistryHelpers.get_asset_registry() sel_objects = unreal.EditorUtilityLibrary.get_selected_assets() @@ -27,8 +118,8 @@ class CreateRender(UnrealAssetCreator): a.get_path_name() for a in sel_objects if a.get_class().get_name() == "LevelSequence"] - if not selection: - raise CreatorError("Please select at least one Level Sequence.") + if len(selection) == 0: + raise RuntimeError("Please select at least one Level Sequence.") seq_data = None @@ -42,28 +133,38 @@ class CreateRender(UnrealAssetCreator): f"Skipping {selected_asset.get_name()}. It isn't a Level " "Sequence.") - # The asset name is the third element of the path which - # contains the map. - # To take the asset name, we remove from the path the prefix - # "/Game/OpenPype/" and then we split the path by "/". - sel_path = selected_asset_path - asset_name = sel_path.replace("/Game/OpenPype/", "").split("/")[0] + if pre_create_data.get("use_hierarchy"): + # The asset name is the the third element of the path which + # contains the map. + # To take the asset name, we remove from the path the prefix + # "/Game/OpenPype/" and then we split the path by "/". + sel_path = selected_asset_path + asset_name = sel_path.replace( + "/Game/OpenPype/", "").split("/")[0] + + search_path = f"/Game/OpenPype/{asset_name}" + else: + search_path = Path(selected_asset_path).parent.as_posix() # Get the master sequence and the master level. # There should be only one sequence and one level in the directory. - ar_filter = unreal.ARFilter( - class_names=["LevelSequence"], - package_paths=[f"/Game/OpenPype/{asset_name}"], - recursive_paths=False) - sequences = ar.get_assets(ar_filter) - master_seq = sequences[0].get_asset().get_path_name() - master_seq_obj = sequences[0].get_asset() - ar_filter = unreal.ARFilter( - class_names=["World"], - package_paths=[f"/Game/OpenPype/{asset_name}"], - recursive_paths=False) - levels = ar.get_assets(ar_filter) - master_lvl = levels[0].get_asset().get_path_name() + try: + ar_filter = unreal.ARFilter( + class_names=["LevelSequence"], + package_paths=[search_path], + recursive_paths=False) + sequences = ar.get_assets(ar_filter) + master_seq = sequences[0].get_asset().get_path_name() + master_seq_obj = sequences[0].get_asset() + ar_filter = unreal.ARFilter( + class_names=["World"], + package_paths=[search_path], + recursive_paths=False) + levels = ar.get_assets(ar_filter) + master_lvl = levels[0].get_asset().get_path_name() + except IndexError: + raise RuntimeError( + f"Could not find the hierarchy for the selected sequence.") # If the selected asset is the master sequence, we get its data # and then we create the instance for the master sequence. @@ -79,7 +180,8 @@ class CreateRender(UnrealAssetCreator): master_seq_obj.get_playback_start(), master_seq_obj.get_playback_end())} - if selected_asset_path == master_seq: + if (selected_asset_path == master_seq or + pre_create_data.get("use_hierarchy")): seq_data = master_seq_data else: seq_data_list = [master_seq_data] @@ -119,20 +221,54 @@ class CreateRender(UnrealAssetCreator): "sub-sequence of the master sequence.") continue - instance_data["members"] = [selected_asset_path] - instance_data["sequence"] = selected_asset_path - instance_data["master_sequence"] = master_seq - instance_data["master_level"] = master_lvl - instance_data["output"] = seq_data.get('output') - instance_data["frameStart"] = seq_data.get('frame_range')[0] - instance_data["frameEnd"] = seq_data.get('frame_range')[1] + self.create_instance( + instance_data, subset_name, pre_create_data, + selected_asset_path, master_seq, master_lvl, seq_data) - super(CreateRender, self).create( - subset_name, - instance_data, - pre_create_data) + def create(self, subset_name, instance_data, pre_create_data): + if pre_create_data.get("create_seq"): + self.create_with_new_sequence( + subset_name, instance_data, pre_create_data) + else: + self.create_from_existing_sequence( + subset_name, instance_data, pre_create_data) def get_pre_create_attr_defs(self): return [ - UILabelDef("Select the sequence to render.") + UILabelDef( + "Select a Level Sequence to render or create a new one." + ), + BoolDef( + "create_seq", + label="Create a new Level Sequence", + default=False + ), + UILabelDef( + "WARNING: If you create a new Level Sequence, the current\n" + "level will be saved and a new Master Level will be created." + ), + NumberDef( + "start_frame", + label="Start Frame", + default=0, + minimum=-999999, + maximum=999999 + ), + NumberDef( + "end_frame", + label="Start Frame", + default=150, + minimum=-999999, + maximum=999999 + ), + UISeparatorDef(), + UILabelDef( + "The following settings are valid only if you are not\n" + "creating a new sequence." + ), + BoolDef( + "use_hierarchy", + label="Use Hierarchy", + default=False + ), ] From 423cbf9e5465ee146523460376efde0595e44374 Mon Sep 17 00:00:00 2001 From: Simone Barbieri Date: Tue, 21 Mar 2023 17:06:36 +0000 Subject: [PATCH 098/187] Fix level sequence not being added to instance --- openpype/hosts/unreal/plugins/create/create_render.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/openpype/hosts/unreal/plugins/create/create_render.py b/openpype/hosts/unreal/plugins/create/create_render.py index b2a246d3a8..b9c443c456 100644 --- a/openpype/hosts/unreal/plugins/create/create_render.py +++ b/openpype/hosts/unreal/plugins/create/create_render.py @@ -69,6 +69,8 @@ class CreateRender(UnrealAssetCreator): seq.set_playback_start(pre_create_data.get("start_frame")) seq.set_playback_end(pre_create_data.get("end_frame")) + pre_create_data["members"] = [seq.get_path_name()] + unreal.EditorAssetLibrary.save_asset(seq.get_path_name()) # Create the master level From 7d1e376761f8c4532af04f649355f9aead58e61f Mon Sep 17 00:00:00 2001 From: Simone Barbieri Date: Wed, 22 Mar 2023 11:35:20 +0000 Subject: [PATCH 099/187] Added warning if no assets selected when starting rendering --- openpype/hosts/unreal/api/rendering.py | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/openpype/hosts/unreal/api/rendering.py b/openpype/hosts/unreal/api/rendering.py index 29e4747f6e..25faa2ac2c 100644 --- a/openpype/hosts/unreal/api/rendering.py +++ b/openpype/hosts/unreal/api/rendering.py @@ -4,6 +4,7 @@ import unreal from openpype.pipeline import Anatomy from openpype.hosts.unreal.api import pipeline +from openpype.widgets.message_window import Window queue = None @@ -37,6 +38,15 @@ def start_rendering(): # Get selected sequences assets = unreal.EditorUtilityLibrary.get_selected_assets() + if not assets: + Window( + parent=None, + title="No assets selected", + message="No assets selected. Select a render instance.", + level="warning") + raise RuntimeError( + "No assets selected. You need to select a render instance.") + # instances = pipeline.ls_inst() instances = [ a for a in assets From 217b9dd70822ecccfaf6e2d45b4caac0d479835b Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Thu, 23 Mar 2023 10:54:18 +0100 Subject: [PATCH 100/187] Move and refactor PySide2 imports to `qtpy` and top of file --- openpype/hosts/substancepainter/api/lib.py | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/openpype/hosts/substancepainter/api/lib.py b/openpype/hosts/substancepainter/api/lib.py index e552caee6d..e299ab03de 100644 --- a/openpype/hosts/substancepainter/api/lib.py +++ b/openpype/hosts/substancepainter/api/lib.py @@ -7,6 +7,8 @@ import substance_painter.project import substance_painter.resource import substance_painter.js +from qtpy import QtGui, QtWidgets, QtCore + def get_export_presets(): """Return Export Preset resource URLs for all available Export Presets. @@ -391,8 +393,6 @@ def get_parsed_export_maps(config): dict: [texture_set, stack]: {template: [file1_data, file2_data]} """ - import substance_painter.export - from .colorspace import get_project_channel_data outputs = substance_painter.export.list_project_textures(config) templates = get_export_templates(config, strip_folder=False) @@ -524,7 +524,6 @@ def load_shelf(path, name=None): def _get_new_project_action(): """Return QAction which triggers Substance Painter's new project dialog""" - from PySide2 import QtGui main_window = substance_painter.ui.get_main_window() @@ -564,7 +563,6 @@ def prompt_new_file_with_mesh(mesh_filepath): for example when the user might have cancelled the operation. """ - from PySide2 import QtWidgets, QtCore app = QtWidgets.QApplication.instance() assert os.path.isfile(mesh_filepath), \ From 1cc2db14bbd0be5a380fadc7108f0ed646f95abc Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Thu, 23 Mar 2023 10:56:14 +0100 Subject: [PATCH 101/187] Add back in imports that accidentally got removed --- openpype/hosts/substancepainter/api/lib.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/openpype/hosts/substancepainter/api/lib.py b/openpype/hosts/substancepainter/api/lib.py index e299ab03de..2cd08f862e 100644 --- a/openpype/hosts/substancepainter/api/lib.py +++ b/openpype/hosts/substancepainter/api/lib.py @@ -6,6 +6,7 @@ from collections import defaultdict import substance_painter.project import substance_painter.resource import substance_painter.js +import substance_painter.export from qtpy import QtGui, QtWidgets, QtCore @@ -393,6 +394,8 @@ def get_parsed_export_maps(config): dict: [texture_set, stack]: {template: [file1_data, file2_data]} """ + # Import is here to avoid recursive lib <-> colorspace imports + from .colorspace import get_project_channel_data outputs = substance_painter.export.list_project_textures(config) templates = get_export_templates(config, strip_folder=False) From 8b3ce3044a9368663d91ba45279c7a63fcb3876e Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Thu, 23 Mar 2023 10:56:58 +0100 Subject: [PATCH 102/187] Raise KnownPublishError instead of assert Co-authored-by: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> --- .../hosts/substancepainter/plugins/publish/save_workfile.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/openpype/hosts/substancepainter/plugins/publish/save_workfile.py b/openpype/hosts/substancepainter/plugins/publish/save_workfile.py index 5e86785e0d..2bd342cda1 100644 --- a/openpype/hosts/substancepainter/plugins/publish/save_workfile.py +++ b/openpype/hosts/substancepainter/plugins/publish/save_workfile.py @@ -13,7 +13,8 @@ class SaveCurrentWorkfile(pyblish.api.ContextPlugin): def process(self, context): host = registered_host() - assert context.data['currentFile'] == host.get_current_workfile() + if context.data['currentFile'] != host.get_current_workfile(): + raise KnownPublishError("Workfile has changed during publishing!") if host.has_unsaved_changes(): self.log.info("Saving current file..") From 17fc4ed9251551c37f5405101f12af8e1bc8e890 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Thu, 23 Mar 2023 10:58:04 +0100 Subject: [PATCH 103/187] Fix import --- .../hosts/substancepainter/plugins/publish/save_workfile.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/openpype/hosts/substancepainter/plugins/publish/save_workfile.py b/openpype/hosts/substancepainter/plugins/publish/save_workfile.py index 2bd342cda1..f19deccb0e 100644 --- a/openpype/hosts/substancepainter/plugins/publish/save_workfile.py +++ b/openpype/hosts/substancepainter/plugins/publish/save_workfile.py @@ -1,6 +1,9 @@ import pyblish.api -from openpype.pipeline import registered_host +from openpype.pipeline import ( + registered_host, + KnownPublishError +) class SaveCurrentWorkfile(pyblish.api.ContextPlugin): From 4fdb31611dc9810346a45a10c50ea9a209d7a99f Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Thu, 23 Mar 2023 11:03:54 +0100 Subject: [PATCH 104/187] Allow to mark an instance to skip integration explicitly Use `instance.data["integrate"] = False` --- .../plugins/publish/extract_textures.py | 15 ++++----------- openpype/plugins/publish/integrate.py | 5 +++++ 2 files changed, 9 insertions(+), 11 deletions(-) diff --git a/openpype/hosts/substancepainter/plugins/publish/extract_textures.py b/openpype/hosts/substancepainter/plugins/publish/extract_textures.py index 469f8501f7..bd933610f4 100644 --- a/openpype/hosts/substancepainter/plugins/publish/extract_textures.py +++ b/openpype/hosts/substancepainter/plugins/publish/extract_textures.py @@ -58,14 +58,7 @@ class ExtractTextures(publish.Extractor, context=context, colorspace=colorspace) - # Add a fake representation which won't be integrated so the - # Integrator leaves us alone - otherwise it would error - # TODO: Add `instance.data["integrate"] = False` support in Integrator? - instance.data["representations"] = [ - { - "name": "_fake", - "ext": "_fake", - "delete": True, - "files": [] - } - ] + # The TextureSet instance should not be integrated. It generates no + # output data. Instead the separated texture instances are generated + # from it which themselves integrate into the database. + instance.data["integrate"] = False diff --git a/openpype/plugins/publish/integrate.py b/openpype/plugins/publish/integrate.py index 6a0327ec84..c24758ba0f 100644 --- a/openpype/plugins/publish/integrate.py +++ b/openpype/plugins/publish/integrate.py @@ -160,6 +160,11 @@ class IntegrateAsset(pyblish.api.InstancePlugin): "Instance is marked to be processed on farm. Skipping") return + # Instance is marked to not get integrated + if instance.data.get("integrate", True): + self.log.info("Instance is marked to skip integrating. Skipping") + return + filtered_repres = self.filter_representations(instance) # Skip instance if there are not representations to integrate # all representations should not be integrated From 5b3af11f0f6bbd53dcc590de49f51660dbdeb556 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Thu, 23 Mar 2023 11:04:25 +0100 Subject: [PATCH 105/187] Fix the if statement --- openpype/plugins/publish/integrate.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/plugins/publish/integrate.py b/openpype/plugins/publish/integrate.py index c24758ba0f..fa29d2a58b 100644 --- a/openpype/plugins/publish/integrate.py +++ b/openpype/plugins/publish/integrate.py @@ -161,7 +161,7 @@ class IntegrateAsset(pyblish.api.InstancePlugin): return # Instance is marked to not get integrated - if instance.data.get("integrate", True): + if not instance.data.get("integrate", True): self.log.info("Instance is marked to skip integrating. Skipping") return From ddc0117aeda6fd1542d96ee54fb374a1339d8aae Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Thu, 23 Mar 2023 11:14:39 +0100 Subject: [PATCH 106/187] Update openpype/settings/defaults/project_settings/substancepainter.json Co-authored-by: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> --- .../settings/defaults/project_settings/substancepainter.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/settings/defaults/project_settings/substancepainter.json b/openpype/settings/defaults/project_settings/substancepainter.json index 0f9f1af71e..60929e85fd 100644 --- a/openpype/settings/defaults/project_settings/substancepainter.json +++ b/openpype/settings/defaults/project_settings/substancepainter.json @@ -10,4 +10,4 @@ } }, "shelves": {} -} \ No newline at end of file +} From 57b84f18bc343b4892382d642927847496f3e43e Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Thu, 23 Mar 2023 11:18:37 +0100 Subject: [PATCH 107/187] Fix docstring --- openpype/hosts/substancepainter/api/pipeline.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/substancepainter/api/pipeline.py b/openpype/hosts/substancepainter/api/pipeline.py index b377db1641..652ec9ec7d 100644 --- a/openpype/hosts/substancepainter/api/pipeline.py +++ b/openpype/hosts/substancepainter/api/pipeline.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -"""Pipeline tools for OpenPype Gaffer integration.""" +"""Pipeline tools for OpenPype Substance Painter integration.""" import os import logging from functools import partial From f4d423dc4f7b1a42310540c74230ba3a1dcd20ab Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Thu, 23 Mar 2023 14:39:48 +0100 Subject: [PATCH 108/187] Add Create... menu entry to match other hosts --- openpype/hosts/substancepainter/api/pipeline.py | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/openpype/hosts/substancepainter/api/pipeline.py b/openpype/hosts/substancepainter/api/pipeline.py index 652ec9ec7d..df41d9bb70 100644 --- a/openpype/hosts/substancepainter/api/pipeline.py +++ b/openpype/hosts/substancepainter/api/pipeline.py @@ -165,6 +165,12 @@ class SubstanceHost(HostBase, IWorkfileHost, ILoadHost, IPublishHost): menu = QtWidgets.QMenu("OpenPype") + action = menu.addAction("Create...") + action.triggered.connect( + lambda: host_tools.show_publisher(parent=parent, + tab="create") + ) + action = menu.addAction("Load...") action.triggered.connect( lambda: host_tools.show_loader(parent=parent, use_context=True) @@ -172,7 +178,8 @@ class SubstanceHost(HostBase, IWorkfileHost, ILoadHost, IPublishHost): action = menu.addAction("Publish...") action.triggered.connect( - lambda: host_tools.show_publisher(parent=parent) + lambda: host_tools.show_publisher(parent=parent, + tab="publish") ) action = menu.addAction("Manage...") From d4a0c6634cd0d9c31ea8f1cf12b92fee5e7ba797 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Thu, 23 Mar 2023 15:45:13 +0100 Subject: [PATCH 109/187] Optimize logic --- openpype/hosts/substancepainter/api/colorspace.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/openpype/hosts/substancepainter/api/colorspace.py b/openpype/hosts/substancepainter/api/colorspace.py index a9df3eb066..375b61b39b 100644 --- a/openpype/hosts/substancepainter/api/colorspace.py +++ b/openpype/hosts/substancepainter/api/colorspace.py @@ -25,11 +25,11 @@ def _iter_document_stack_channels(): material_name = material["name"] for stack in material["stacks"]: stack_name = stack["name"] + if stack_name: + stack_path = [material_name, stack_name] + else: + stack_path = material_name for channel in stack["channels"]: - if stack_name: - stack_path = [material_name, stack_name] - else: - stack_path = material_name yield stack_path, channel From 22d628d054809a9e8f1d816994a7426197d864f8 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Thu, 23 Mar 2023 18:09:13 +0100 Subject: [PATCH 110/187] Store instances in single project metadata key by id + fix adding/removing instances --- .../hosts/substancepainter/api/pipeline.py | 67 ++++++++++++++----- .../plugins/create/create_textures.py | 39 ++++++----- .../plugins/create/create_workfile.py | 27 +++++--- 3 files changed, 93 insertions(+), 40 deletions(-) diff --git a/openpype/hosts/substancepainter/api/pipeline.py b/openpype/hosts/substancepainter/api/pipeline.py index df41d9bb70..b995c9030d 100644 --- a/openpype/hosts/substancepainter/api/pipeline.py +++ b/openpype/hosts/substancepainter/api/pipeline.py @@ -39,6 +39,7 @@ INVENTORY_PATH = os.path.join(PLUGINS_DIR, "inventory") OPENPYPE_METADATA_KEY = "OpenPype" OPENPYPE_METADATA_CONTAINERS_KEY = "containers" # child key OPENPYPE_METADATA_CONTEXT_KEY = "context" # child key +OPENPYPE_METADATA_INSTANCES_KEY = "instances" # child key class SubstanceHost(HostBase, IWorkfileHost, ILoadHost, IPublishHost): @@ -312,21 +313,6 @@ def imprint_container(container, container[key] = value -def set_project_metadata(key, data): - """Set a key in project's OpenPype metadata.""" - metadata = substance_painter.project.Metadata(OPENPYPE_METADATA_KEY) - metadata.set(key, data) - - -def get_project_metadata(key): - """Get a key from project's OpenPype metadata.""" - if not substance_painter.project.is_open(): - return - - metadata = substance_painter.project.Metadata(OPENPYPE_METADATA_KEY) - return metadata.get(key) - - def set_container_metadata(object_name, container_data, update=False): """Helper method to directly set the data for a specific container @@ -359,3 +345,54 @@ def remove_container_metadata(object_name): if containers: containers.pop(object_name, None) metadata.set("containers", containers) + + +def set_instance(instance_id, instance_data, update=False): + """Helper method to directly set the data for a specific container + + Args: + instance_id (str): Unique identifier for the instance + instance_data (dict): The instance data to store in the metaadata. + """ + set_instances({instance_id: instance_data}, update=update) + + +def set_instances(instance_data_by_id, update=False): + """Store data for multiple instances at the same time. + + This is more optimal than querying and setting them in the metadata one + by one. + """ + metadata = substance_painter.project.Metadata(OPENPYPE_METADATA_KEY) + instances = metadata.get(OPENPYPE_METADATA_INSTANCES_KEY) or {} + + for instance_id, instance_data in instance_data_by_id.items(): + if update: + existing_data = instances.get(instance_id, {}) + existing_data.update(instance_data) + else: + instances[instance_id] = instance_data + + metadata.set("instances", instances) + + +def remove_instance(instance_id): + """Helper method to remove the data for a specific container""" + metadata = substance_painter.project.Metadata(OPENPYPE_METADATA_KEY) + instances = metadata.get(OPENPYPE_METADATA_INSTANCES_KEY) or {} + instances.pop(instance_id, None) + metadata.set("instances", instances) + + +def get_instances_by_id(): + """Return all instances stored in the project instances metadata""" + if not substance_painter.project.is_open(): + return {} + + metadata = substance_painter.project.Metadata(OPENPYPE_METADATA_KEY) + return metadata.get(OPENPYPE_METADATA_INSTANCES_KEY) or {} + + +def get_instances(): + """Return all instances stored in the project instances as a list""" + return list(get_instances_by_id().values()) diff --git a/openpype/hosts/substancepainter/plugins/create/create_textures.py b/openpype/hosts/substancepainter/plugins/create/create_textures.py index 9d641215dc..19133768a5 100644 --- a/openpype/hosts/substancepainter/plugins/create/create_textures.py +++ b/openpype/hosts/substancepainter/plugins/create/create_textures.py @@ -1,7 +1,7 @@ # -*- coding: utf-8 -*- """Creator plugin for creating textures.""" -from openpype.pipeline import CreatedInstance, Creator +from openpype.pipeline import CreatedInstance, Creator, CreatorError from openpype.lib import ( EnumDef, UILabelDef, @@ -9,8 +9,10 @@ from openpype.lib import ( ) from openpype.hosts.substancepainter.api.pipeline import ( - set_project_metadata, - get_project_metadata + get_instances, + set_instance, + set_instances, + remove_instance ) from openpype.hosts.substancepainter.api.lib import get_export_presets @@ -29,27 +31,34 @@ class CreateTextures(Creator): def create(self, subset_name, instance_data, pre_create_data): if not substance_painter.project.is_open(): - return + raise CreatorError("Can't create a Texture Set instance without " + "an open project.") - instance = self.create_instance_in_context(subset_name, instance_data) - set_project_metadata("textureSet", instance.data_to_store()) + instance = self.create_instance_in_context(subset_name, + instance_data) + set_instance( + instance_id=instance["instance_id"], + instance_data=instance.data_to_store() + ) def collect_instances(self): - workfile = get_project_metadata("textureSet") - if workfile: - self.create_instance_in_context_from_existing(workfile) + for instance in get_instances(): + if (instance.get("creator_identifier") == self.identifier or + instance.get("family") == self.family): + self.create_instance_in_context_from_existing(instance) def update_instances(self, update_list): + instance_data_by_id = {} for instance, _changes in update_list: - # Update project's metadata - data = get_project_metadata("textureSet") or {} - data.update(instance.data_to_store()) - set_project_metadata("textureSet", data) + # Persist the data + instance_id = instance.get("instance_id") + instance_data = instance.data_to_store() + instance_data_by_id[instance_id] = instance_data + set_instances(instance_data_by_id, update=True) def remove_instances(self, instances): for instance in instances: - # TODO: Implement removal - # api.remove_instance(instance) + remove_instance(instance["instance_id"]) self._remove_instance_from_context(instance) # Helper methods (this might get moved into Creator class) diff --git a/openpype/hosts/substancepainter/plugins/create/create_workfile.py b/openpype/hosts/substancepainter/plugins/create/create_workfile.py index 4e316f3b64..d7f31f9dcf 100644 --- a/openpype/hosts/substancepainter/plugins/create/create_workfile.py +++ b/openpype/hosts/substancepainter/plugins/create/create_workfile.py @@ -5,8 +5,9 @@ from openpype.pipeline import CreatedInstance, AutoCreator from openpype.client import get_asset_by_name from openpype.hosts.substancepainter.api.pipeline import ( - set_project_metadata, - get_project_metadata + set_instances, + set_instance, + get_instances ) import substance_painter.project @@ -66,19 +67,25 @@ class CreateWorkfile(AutoCreator): current_instance["task"] = task_name current_instance["subset"] = subset_name - set_project_metadata("workfile", current_instance.data_to_store()) + set_instance( + instance_id=current_instance.get("instance_id"), + instance_data=current_instance.data_to_store() + ) def collect_instances(self): - workfile = get_project_metadata("workfile") - if workfile: - self.create_instance_in_context_from_existing(workfile) + for instance in get_instances(): + if (instance.get("creator_identifier") == self.identifier or + instance.get("family") == self.family): + self.create_instance_in_context_from_existing(instance) def update_instances(self, update_list): + instance_data_by_id = {} for instance, _changes in update_list: - # Update project's workfile metadata - data = get_project_metadata("workfile") or {} - data.update(instance.data_to_store()) - set_project_metadata("workfile", data) + # Persist the data + instance_id = instance.get("instance_id") + instance_data = instance.data_to_store() + instance_data_by_id[instance_id] = instance_data + set_instances(instance_data_by_id, update=True) # Helper methods (this might get moved into Creator class) def create_instance_in_context(self, subset_name, data): From 4dd58e15d89383a870890562e9f084ee3fb189bf Mon Sep 17 00:00:00 2001 From: Simone Barbieri Date: Mon, 27 Mar 2023 11:13:04 +0100 Subject: [PATCH 111/187] Fixed error on rendering --- openpype/hosts/unreal/api/rendering.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/openpype/hosts/unreal/api/rendering.py b/openpype/hosts/unreal/api/rendering.py index 5ef4792000..e197f9075d 100644 --- a/openpype/hosts/unreal/api/rendering.py +++ b/openpype/hosts/unreal/api/rendering.py @@ -134,6 +134,9 @@ def start_rendering(): settings.file_name_format = f"{shot_name}" + ".{frame_number}" settings.output_directory.path = f"{render_dir}/{output_dir}" + job.get_configuration().find_or_add_setting_by_class( + unreal.MoviePipelineDeferredPassBase) + job.get_configuration().find_or_add_setting_by_class( unreal.MoviePipelineImageSequenceOutput_PNG) From 45ea981efb5af84deaae232a8737e0aae6abab21 Mon Sep 17 00:00:00 2001 From: Simone Barbieri Date: Mon, 27 Mar 2023 11:15:20 +0100 Subject: [PATCH 112/187] Added setting for rendering format --- openpype/hosts/unreal/api/rendering.py | 18 +++++++++++++++--- .../defaults/project_settings/unreal.json | 1 + .../projects_schema/schema_project_unreal.json | 12 ++++++++++++ 3 files changed, 28 insertions(+), 3 deletions(-) diff --git a/openpype/hosts/unreal/api/rendering.py b/openpype/hosts/unreal/api/rendering.py index e197f9075d..a2be041c18 100644 --- a/openpype/hosts/unreal/api/rendering.py +++ b/openpype/hosts/unreal/api/rendering.py @@ -33,7 +33,7 @@ def start_rendering(): """ Start the rendering process. """ - print("Starting rendering...") + unreal.log("Starting rendering...") # Get selected sequences assets = unreal.EditorUtilityLibrary.get_selected_assets() @@ -137,8 +137,20 @@ def start_rendering(): job.get_configuration().find_or_add_setting_by_class( unreal.MoviePipelineDeferredPassBase) - job.get_configuration().find_or_add_setting_by_class( - unreal.MoviePipelineImageSequenceOutput_PNG) + render_format = data.get("unreal").get("render_format", "png") + + if render_format == "png": + job.get_configuration().find_or_add_setting_by_class( + unreal.MoviePipelineImageSequenceOutput_PNG) + elif render_format == "exr": + job.get_configuration().find_or_add_setting_by_class( + unreal.MoviePipelineImageSequenceOutput_EXR) + elif render_format == "jpg": + job.get_configuration().find_or_add_setting_by_class( + unreal.MoviePipelineImageSequenceOutput_JPG) + elif render_format == "bmp": + job.get_configuration().find_or_add_setting_by_class( + unreal.MoviePipelineImageSequenceOutput_BMP) # If there are jobs in the queue, start the rendering process. if queue.get_jobs(): diff --git a/openpype/settings/defaults/project_settings/unreal.json b/openpype/settings/defaults/project_settings/unreal.json index ff290ef254..737a17d289 100644 --- a/openpype/settings/defaults/project_settings/unreal.json +++ b/openpype/settings/defaults/project_settings/unreal.json @@ -13,6 +13,7 @@ "delete_unmatched_assets": false, "render_config_path": "", "preroll_frames": 0, + "render_format": "png", "project_setup": { "dev_mode": true } diff --git a/openpype/settings/entities/schemas/projects_schema/schema_project_unreal.json b/openpype/settings/entities/schemas/projects_schema/schema_project_unreal.json index 40bbb40ccc..35eb0b24f1 100644 --- a/openpype/settings/entities/schemas/projects_schema/schema_project_unreal.json +++ b/openpype/settings/entities/schemas/projects_schema/schema_project_unreal.json @@ -42,6 +42,18 @@ "key": "preroll_frames", "label": "Pre-roll frames" }, + { + "key": "render_format", + "label": "Render format", + "type": "enum", + "multiselection": false, + "enum_items": [ + {"png": "PNG"}, + {"exr": "EXR"}, + {"jpg": "JPG"}, + {"bmp": "BMP"} + ] + }, { "type": "dict", "collapsible": true, From a579dfc860b7e22d344c617afefef37899dae994 Mon Sep 17 00:00:00 2001 From: Simone Barbieri Date: Mon, 27 Mar 2023 12:31:02 +0100 Subject: [PATCH 113/187] Get the correct frame range data --- .../hosts/unreal/plugins/publish/validate_sequence_frames.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/openpype/hosts/unreal/plugins/publish/validate_sequence_frames.py b/openpype/hosts/unreal/plugins/publish/validate_sequence_frames.py index 87f1338ee8..e6584e130f 100644 --- a/openpype/hosts/unreal/plugins/publish/validate_sequence_frames.py +++ b/openpype/hosts/unreal/plugins/publish/validate_sequence_frames.py @@ -20,6 +20,7 @@ class ValidateSequenceFrames(pyblish.api.InstancePlugin): def process(self, instance): representations = instance.data.get("representations") for repr in representations: + data = instance.data.get("assetEntity", {}).get("data", {}) patterns = [clique.PATTERNS["frames"]] collections, remainder = clique.assemble( repr["files"], minimum_items=1, patterns=patterns) @@ -30,8 +31,8 @@ class ValidateSequenceFrames(pyblish.api.InstancePlugin): frames = list(collection.indexes) current_range = (frames[0], frames[-1]) - required_range = (instance.data["frameStart"], - instance.data["frameEnd"]) + required_range = (data["frameStart"], + data["frameEnd"]) if current_range != required_range: raise ValueError(f"Invalid frame range: {current_range} - " From c20f45e88136371dd2a8a35eca66cf28f7ac3ee8 Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Mon, 27 Mar 2023 23:48:27 +0800 Subject: [PATCH 114/187] skip unrelated script --- openpype/hosts/max/plugins/load/load_camera_fbx.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/openpype/hosts/max/plugins/load/load_camera_fbx.py b/openpype/hosts/max/plugins/load/load_camera_fbx.py index 205e815dc8..3a6947798e 100644 --- a/openpype/hosts/max/plugins/load/load_camera_fbx.py +++ b/openpype/hosts/max/plugins/load/load_camera_fbx.py @@ -36,6 +36,8 @@ importFile @"{filepath}" #noPrompt using:FBXIMP self.log.debug(f"Executing command: {fbx_import_cmd}") rt.execute(fbx_import_cmd) + container_name = f"{name}_CON" + asset = rt.getNodeByName(f"{name}") return containerise( From 32bb42e37922dd2de79f01c6e133b17ee8e7c6fa Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Tue, 28 Mar 2023 17:26:04 +0800 Subject: [PATCH 115/187] update the obj loader and add maintained_selection for loaders --- openpype/hosts/max/plugins/load/load_model.py | 12 ++++++++---- openpype/hosts/max/plugins/load/load_model_fbx.py | 4 ++++ openpype/hosts/max/plugins/load/load_model_obj.py | 12 ++++++++---- openpype/hosts/max/plugins/load/load_model_usd.py | 4 ++++ 4 files changed, 24 insertions(+), 8 deletions(-) diff --git a/openpype/hosts/max/plugins/load/load_model.py b/openpype/hosts/max/plugins/load/load_model.py index c248d75718..95ee014e07 100644 --- a/openpype/hosts/max/plugins/load/load_model.py +++ b/openpype/hosts/max/plugins/load/load_model.py @@ -5,6 +5,7 @@ from openpype.pipeline import ( ) from openpype.hosts.max.api.pipeline import containerise from openpype.hosts.max.api import lib +from openpype.hosts.max.api.lib import maintained_selection class ModelAbcLoader(load.LoaderPlugin): @@ -57,12 +58,8 @@ importFile @"{file_path}" #noPrompt def update(self, container, representation): from pymxs import runtime as rt - path = get_representation_path(representation) node = rt.getNodeByName(container["instance_node"]) - lib.imprint(container["instance_node"], { - "representation": str(representation["_id"]) - }) rt.select(node.Children) for alembic in rt.selection: @@ -76,6 +73,13 @@ importFile @"{file_path}" #noPrompt alembic_obj = rt.getNodeByName(abc_obj.name) alembic_obj.source = path + with maintained_selection(): + rt.select(node) + + lib.imprint(container["instance_node"], { + "representation": str(representation["_id"]) + }) + def switch(self, container, representation): self.update(container, representation) diff --git a/openpype/hosts/max/plugins/load/load_model_fbx.py b/openpype/hosts/max/plugins/load/load_model_fbx.py index d8f4011277..88b8f1ed89 100644 --- a/openpype/hosts/max/plugins/load/load_model_fbx.py +++ b/openpype/hosts/max/plugins/load/load_model_fbx.py @@ -5,6 +5,7 @@ from openpype.pipeline import ( ) from openpype.hosts.max.api.pipeline import containerise from openpype.hosts.max.api import lib +from openpype.hosts.max.api.lib import maintained_selection class FbxModelLoader(load.LoaderPlugin): @@ -59,6 +60,9 @@ importFile @"{path}" #noPrompt using:FBXIMP """) rt.execute(fbx_reimport_cmd) + with maintained_selection(): + rt.select(node) + lib.imprint(container["instance_node"], { "representation": str(representation["_id"]) }) diff --git a/openpype/hosts/max/plugins/load/load_model_obj.py b/openpype/hosts/max/plugins/load/load_model_obj.py index 63ae058ae0..c55e462111 100644 --- a/openpype/hosts/max/plugins/load/load_model_obj.py +++ b/openpype/hosts/max/plugins/load/load_model_obj.py @@ -5,6 +5,7 @@ from openpype.pipeline import ( ) from openpype.hosts.max.api.pipeline import containerise from openpype.hosts.max.api import lib +from openpype.hosts.max.api.lib import maintained_selection class ObjLoader(load.LoaderPlugin): @@ -42,16 +43,19 @@ class ObjLoader(load.LoaderPlugin): path = get_representation_path(representation) node_name = container["instance_node"] node = rt.getNodeByName(node_name) + instance_name, _ = node_name.split("_") + container = rt.getNodeByName(instance_name) + for n in container.Children: + rt.delete(n) rt.execute(f'importFile @"{path}" #noPrompt using:ObjImp') - # create "missing" container for obj import - container = rt.container() - container.name = f"{instance_name}" # get current selection for selection in rt.getCurrentSelection(): selection.Parent = container - container.Parent = node + + with maintained_selection(): + rt.select(node) lib.imprint(node_name, { "representation": str(representation["_id"]) diff --git a/openpype/hosts/max/plugins/load/load_model_usd.py b/openpype/hosts/max/plugins/load/load_model_usd.py index 2237426187..143f91f40b 100644 --- a/openpype/hosts/max/plugins/load/load_model_usd.py +++ b/openpype/hosts/max/plugins/load/load_model_usd.py @@ -4,6 +4,7 @@ from openpype.pipeline import ( ) from openpype.hosts.max.api.pipeline import containerise from openpype.hosts.max.api import lib +from openpype.hosts.max.api.lib import maintained_selection class ModelUSDLoader(load.LoaderPlugin): @@ -60,6 +61,9 @@ class ModelUSDLoader(load.LoaderPlugin): asset = rt.getNodeByName(f"{instance_name}") asset.Parent = node + with maintained_selection(): + rt.select(node) + lib.imprint(node_name, { "representation": str(representation["_id"]) }) From 76c0a0266f9ea976d992718dc0c3a4a3ca0c62c3 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Mon, 3 Apr 2023 11:59:23 +0200 Subject: [PATCH 116/187] Apply suggestions from code review MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: Ondřej Samohel <33513211+antirotor@users.noreply.github.com> --- .../hosts/substancepainter/plugins/load/load_mesh.py | 4 ++-- .../plugins/publish/collect_textureset_images.py | 12 ++++++------ .../publish/collect_workfile_representation.py | 10 +++++----- .../plugins/publish/extract_textures.py | 2 +- .../plugins/publish/save_workfile.py | 2 +- 5 files changed, 15 insertions(+), 15 deletions(-) diff --git a/openpype/hosts/substancepainter/plugins/load/load_mesh.py b/openpype/hosts/substancepainter/plugins/load/load_mesh.py index 4e800bd623..a93b830de0 100644 --- a/openpype/hosts/substancepainter/plugins/load/load_mesh.py +++ b/openpype/hosts/substancepainter/plugins/load/load_mesh.py @@ -62,7 +62,7 @@ class SubstanceLoadProjectMesh(load.LoaderPlugin): if status == substance_painter.project.ReloadMeshStatus.SUCCESS: # noqa print("Reload succeeded") else: - raise RuntimeError("Reload of mesh failed") + raise LoadError("Reload of mesh failed") path = self.fname substance_painter.project.reload_mesh(path, @@ -105,7 +105,7 @@ class SubstanceLoadProjectMesh(load.LoaderPlugin): if status == substance_painter.project.ReloadMeshStatus.SUCCESS: print("Reload succeeded") else: - raise RuntimeError("Reload of mesh failed") + raise LoaderError("Reload of mesh failed") substance_painter.project.reload_mesh(path, settings, on_mesh_reload) diff --git a/openpype/hosts/substancepainter/plugins/publish/collect_textureset_images.py b/openpype/hosts/substancepainter/plugins/publish/collect_textureset_images.py index 14168138b6..56694614eb 100644 --- a/openpype/hosts/substancepainter/plugins/publish/collect_textureset_images.py +++ b/openpype/hosts/substancepainter/plugins/publish/collect_textureset_images.py @@ -19,7 +19,7 @@ class CollectTextureSet(pyblish.api.InstancePlugin): # TODO: Detect what source data channels end up in each file label = "Collect Texture Set images" - hosts = ['substancepainter'] + hosts = ["substancepainter"] families = ["textureSet"] order = pyblish.api.CollectorOrder @@ -55,7 +55,7 @@ class CollectTextureSet(pyblish.api.InstancePlugin): first_filepath = outputs[0]["filepath"] fnames = [os.path.basename(output["filepath"]) for output in outputs] ext = os.path.splitext(first_filepath)[1] - assert ext.lstrip('.'), f"No extension: {ext}" + assert ext.lstrip("."), f"No extension: {ext}" map_identifier = strip_template(template) # Define the suffix we want to give this particular texture @@ -78,9 +78,9 @@ class CollectTextureSet(pyblish.api.InstancePlugin): # Prepare representation representation = { - 'name': ext.lstrip("."), - 'ext': ext.lstrip("."), - 'files': fnames if len(fnames) > 1 else fnames[0], + "name": ext.lstrip("."), + "ext": ext.lstrip("."), + "files": fnames if len(fnames) > 1 else fnames[0], } # Mark as UDIM explicitly if it has UDIM tiles. @@ -105,7 +105,7 @@ class CollectTextureSet(pyblish.api.InstancePlugin): image_instance.data["subset"] = image_subset image_instance.data["family"] = "image" image_instance.data["families"] = ["image", "textures"] - image_instance.data['representations'] = [representation] + image_instance.data["representations"] = [representation] # Group the textures together in the loader image_instance.data["subsetGroup"] = instance.data["subset"] diff --git a/openpype/hosts/substancepainter/plugins/publish/collect_workfile_representation.py b/openpype/hosts/substancepainter/plugins/publish/collect_workfile_representation.py index 563c2d4c07..8d98d0b014 100644 --- a/openpype/hosts/substancepainter/plugins/publish/collect_workfile_representation.py +++ b/openpype/hosts/substancepainter/plugins/publish/collect_workfile_representation.py @@ -7,7 +7,7 @@ class CollectWorkfileRepresentation(pyblish.api.InstancePlugin): order = pyblish.api.CollectorOrder label = "Workfile representation" - hosts = ['substancepainter'] + hosts = ["substancepainter"] families = ["workfile"] def process(self, instance): @@ -18,9 +18,9 @@ class CollectWorkfileRepresentation(pyblish.api.InstancePlugin): folder, file = os.path.split(current_file) filename, ext = os.path.splitext(file) - instance.data['representations'] = [{ - 'name': ext.lstrip("."), - 'ext': ext.lstrip("."), - 'files': file, + instance.data["representations"] = [{ + "name": ext.lstrip("."), + "ext": ext.lstrip("."), + "files": file, "stagingDir": folder, }] diff --git a/openpype/hosts/substancepainter/plugins/publish/extract_textures.py b/openpype/hosts/substancepainter/plugins/publish/extract_textures.py index bd933610f4..b9654947db 100644 --- a/openpype/hosts/substancepainter/plugins/publish/extract_textures.py +++ b/openpype/hosts/substancepainter/plugins/publish/extract_textures.py @@ -15,7 +15,7 @@ class ExtractTextures(publish.Extractor, """ label = "Extract Texture Set" - hosts = ['substancepainter'] + hosts = ["substancepainter"] families = ["textureSet"] # Run before thumbnail extractors diff --git a/openpype/hosts/substancepainter/plugins/publish/save_workfile.py b/openpype/hosts/substancepainter/plugins/publish/save_workfile.py index f19deccb0e..4874b5e5c7 100644 --- a/openpype/hosts/substancepainter/plugins/publish/save_workfile.py +++ b/openpype/hosts/substancepainter/plugins/publish/save_workfile.py @@ -16,7 +16,7 @@ class SaveCurrentWorkfile(pyblish.api.ContextPlugin): def process(self, context): host = registered_host() - if context.data['currentFile'] != host.get_current_workfile(): + if context.data["currentFile"] != host.get_current_workfile(): raise KnownPublishError("Workfile has changed during publishing!") if host.has_unsaved_changes(): From 35428df6b0942e779a0bbaa50578e0c0fbfa2921 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Mon, 3 Apr 2023 12:00:51 +0200 Subject: [PATCH 117/187] Fix LoadError --- openpype/hosts/substancepainter/plugins/load/load_mesh.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/openpype/hosts/substancepainter/plugins/load/load_mesh.py b/openpype/hosts/substancepainter/plugins/load/load_mesh.py index a93b830de0..2450a9316e 100644 --- a/openpype/hosts/substancepainter/plugins/load/load_mesh.py +++ b/openpype/hosts/substancepainter/plugins/load/load_mesh.py @@ -2,6 +2,7 @@ from openpype.pipeline import ( load, get_representation_path, ) +from openpype.pipeline.load import LoadError from openpype.hosts.substancepainter.api.pipeline import ( imprint_container, set_container_metadata, @@ -105,7 +106,7 @@ class SubstanceLoadProjectMesh(load.LoaderPlugin): if status == substance_painter.project.ReloadMeshStatus.SUCCESS: print("Reload succeeded") else: - raise LoaderError("Reload of mesh failed") + raise LoadError("Reload of mesh failed") substance_painter.project.reload_mesh(path, settings, on_mesh_reload) From 5c0dee53188e12b7ddb8eec364495596b36de29c Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Mon, 3 Apr 2023 12:01:24 +0200 Subject: [PATCH 118/187] Log instead of print --- openpype/hosts/substancepainter/plugins/load/load_mesh.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/hosts/substancepainter/plugins/load/load_mesh.py b/openpype/hosts/substancepainter/plugins/load/load_mesh.py index 2450a9316e..822095641d 100644 --- a/openpype/hosts/substancepainter/plugins/load/load_mesh.py +++ b/openpype/hosts/substancepainter/plugins/load/load_mesh.py @@ -61,7 +61,7 @@ class SubstanceLoadProjectMesh(load.LoaderPlugin): def on_mesh_reload(status: substance_painter.project.ReloadMeshStatus): # noqa if status == substance_painter.project.ReloadMeshStatus.SUCCESS: # noqa - print("Reload succeeded") + self.log.info("Reload succeeded") else: raise LoadError("Reload of mesh failed") @@ -104,7 +104,7 @@ class SubstanceLoadProjectMesh(load.LoaderPlugin): def on_mesh_reload(status: substance_painter.project.ReloadMeshStatus): if status == substance_painter.project.ReloadMeshStatus.SUCCESS: - print("Reload succeeded") + self.log.info("Reload succeeded") else: raise LoadError("Reload of mesh failed") From 4300939199f9cfcd4626c0bcbdafdf5a05926649 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Mon, 3 Apr 2023 12:17:48 +0200 Subject: [PATCH 119/187] Allow formatting shelf path using anatomy data --- .../hosts/substancepainter/api/pipeline.py | 39 ++++++++++++++++--- 1 file changed, 34 insertions(+), 5 deletions(-) diff --git a/openpype/hosts/substancepainter/api/pipeline.py b/openpype/hosts/substancepainter/api/pipeline.py index b995c9030d..9406fb8edb 100644 --- a/openpype/hosts/substancepainter/api/pipeline.py +++ b/openpype/hosts/substancepainter/api/pipeline.py @@ -9,17 +9,23 @@ import substance_painter.ui import substance_painter.event import substance_painter.project -from openpype.host import HostBase, IWorkfileHost, ILoadHost, IPublishHost -from openpype.settings import get_current_project_settings - import pyblish.api +from openpype.host import HostBase, IWorkfileHost, ILoadHost, IPublishHost +from openpype.settings import ( + get_current_project_settings, + get_system_settings +) + +from openpype.pipeline.template_data import get_template_data_with_names from openpype.pipeline import ( register_creator_plugin_path, register_loader_plugin_path, - AVALON_CONTAINER_ID + AVALON_CONTAINER_ID, + Anatomy ) from openpype.lib import ( + StringTemplate, register_event_callback, emit_event, ) @@ -234,9 +240,32 @@ class SubstanceHost(HostBase, IWorkfileHost, ILoadHost, IPublishHost): def _install_shelves(self, project_settings): shelves = project_settings["substancepainter"].get("shelves", {}) + if not shelves: + return + + # Prepare formatting data if we detect any path which might have + # template tokens like {asset} in there. + formatting_data = {} + has_formatting_entries = any("{" in path for path in shelves.values()) + if has_formatting_entries: + project_name = self.get_current_project_name() + asset_name = self.get_current_asset_name() + task_name = self.get_current_asset_name() + system_settings = get_system_settings() + formatting_data = get_template_data_with_names(project_name, + asset_name, + task_name, + system_settings) + anatomy = Anatomy(project_name) + formatting_data["root"] = anatomy.roots + for name, path in shelves.items(): - # TODO: Allow formatting with anatomy for the paths shelf_name = None + + # Allow formatting with anatomy for the paths + if "{" in path: + path = StringTemplate.format_template(path, formatting_data) + try: shelf_name = lib.load_shelf(path, name=name) except ValueError as exc: From 9d68db0e16bc91a87f0b4fd4f7935426c70a8ffb Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Mon, 3 Apr 2023 16:03:57 +0200 Subject: [PATCH 120/187] Validate the generated output maps for missing channels --- .../plugins/create/create_textures.py | 10 +- .../publish/collect_textureset_images.py | 2 +- .../plugins/publish/extract_textures.py | 18 ++- .../plugins/publish/validate_ouput_maps.py | 108 ++++++++++++++++++ 4 files changed, 126 insertions(+), 12 deletions(-) create mode 100644 openpype/hosts/substancepainter/plugins/publish/validate_ouput_maps.py diff --git a/openpype/hosts/substancepainter/plugins/create/create_textures.py b/openpype/hosts/substancepainter/plugins/create/create_textures.py index 19133768a5..6070a06367 100644 --- a/openpype/hosts/substancepainter/plugins/create/create_textures.py +++ b/openpype/hosts/substancepainter/plugins/create/create_textures.py @@ -5,7 +5,8 @@ from openpype.pipeline import CreatedInstance, Creator, CreatorError from openpype.lib import ( EnumDef, UILabelDef, - NumberDef + NumberDef, + BoolDef ) from openpype.hosts.substancepainter.api.pipeline import ( @@ -80,6 +81,13 @@ class CreateTextures(Creator): EnumDef("exportPresetUrl", items=get_export_presets(), label="Output Template"), + BoolDef("allowSkippedMaps", + label="Allow Skipped Output Maps", + tooltip="When enabled this allows the publish to ignore " + "output maps in the used output template if one " + "or more maps are skipped due to the required " + "channels not being present in the current file.", + default=True), EnumDef("exportFileFormat", items={ None: "Based on output template", diff --git a/openpype/hosts/substancepainter/plugins/publish/collect_textureset_images.py b/openpype/hosts/substancepainter/plugins/publish/collect_textureset_images.py index 56694614eb..50a96b94ae 100644 --- a/openpype/hosts/substancepainter/plugins/publish/collect_textureset_images.py +++ b/openpype/hosts/substancepainter/plugins/publish/collect_textureset_images.py @@ -97,7 +97,7 @@ class CollectTextureSet(pyblish.api.InstancePlugin): representation["stagingDir"] = staging_dir # Clone the instance - image_instance = context.create_instance(instance.name) + image_instance = context.create_instance(image_subset) image_instance[:] = instance[:] image_instance.data.update(copy.deepcopy(instance.data)) image_instance.data["name"] = image_subset diff --git a/openpype/hosts/substancepainter/plugins/publish/extract_textures.py b/openpype/hosts/substancepainter/plugins/publish/extract_textures.py index b9654947db..bb6f15ead9 100644 --- a/openpype/hosts/substancepainter/plugins/publish/extract_textures.py +++ b/openpype/hosts/substancepainter/plugins/publish/extract_textures.py @@ -1,6 +1,7 @@ -from openpype.pipeline import KnownPublishError, publish import substance_painter.export +from openpype.pipeline import KnownPublishError, publish + class ExtractTextures(publish.Extractor, publish.ColormanagedPyblishPluginMixin): @@ -31,21 +32,19 @@ class ExtractTextures(publish.Extractor, "Failed to export texture set: {}".format(result.message) ) + # Log what files we generated for (texture_set_name, stack_name), maps in result.textures.items(): # Log our texture outputs - self.log.info(f"Processing stack: {texture_set_name} {stack_name}") + self.log.info(f"Exported stack: {texture_set_name} {stack_name}") for texture_map in maps: self.log.info(f"Exported texture: {texture_map}") - # TODO: Confirm outputs match what we collected - # TODO: Confirm the files indeed exist - # TODO: make sure representations are registered - # We'll insert the color space data for each image instance that we # added into this texture set. The collector couldn't do so because # some anatomy and other instance data needs to be collected prior context = instance.context for image_instance in instance: + representation = next(iter(image_instance.data["representations"])) colorspace = image_instance.data.get("colorspace") if not colorspace: @@ -53,10 +52,9 @@ class ExtractTextures(publish.Extractor, f"{image_instance}") continue - for representation in image_instance.data["representations"]: - self.set_representation_colorspace(representation, - context=context, - colorspace=colorspace) + self.set_representation_colorspace(representation, + context=context, + colorspace=colorspace) # The TextureSet instance should not be integrated. It generates no # output data. Instead the separated texture instances are generated diff --git a/openpype/hosts/substancepainter/plugins/publish/validate_ouput_maps.py b/openpype/hosts/substancepainter/plugins/publish/validate_ouput_maps.py new file mode 100644 index 0000000000..203cf7c5fe --- /dev/null +++ b/openpype/hosts/substancepainter/plugins/publish/validate_ouput_maps.py @@ -0,0 +1,108 @@ +import copy +import os + +import pyblish.api + +import substance_painter.export + +from openpype.pipeline import PublishValidationError + + +class ValidateOutputMaps(pyblish.api.InstancePlugin): + """Validate all output maps for Output Template are generated. + + Output maps will be skipped by Substance Painter if it is an output + map in the Substance Output Template which uses channels that the current + substance painter project has not painted or generated. + + """ + + order = pyblish.api.ValidatorOrder + label = "Validate output maps" + hosts = ["substancepainter"] + families = ["textureSet"] + + def process(self, instance): + + config = instance.data["exportConfig"] + + # Substance Painter API does not allow to query the actual output maps + # it will generate without actually exporting the files. So we try to + # generate the smallest size / fastest export as possible + config = copy.deepcopy(config) + parameters = config["exportParameters"][0]["parameters"] + parameters["sizeLog2"] = [1, 1] # output 2x2 images (smallest) + parameters["paddingAlgorithm"] = "passthrough" # no dilation (faster) + parameters["dithering"] = False # no dithering (faster) + config["exportParameters"][0]["parameters"]["sizeLog2"] = [1, 1] + + result = substance_painter.export.export_project_textures(config) + if result.status != substance_painter.export.ExportStatus.Success: + raise PublishValidationError( + "Failed to export texture set: {}".format(result.message) + ) + + generated_files = set() + for texture_maps in result.textures.values(): + for texture_map in texture_maps: + generated_files.add(os.path.normpath(texture_map)) + # Directly clean up our temporary export + os.remove(texture_map) + + creator_attributes = instance.data.get("creator_attributes", {}) + allow_skipped_maps = creator_attributes.get("allowSkippedMaps", True) + error_report_missing = [] + for image_instance in instance: + + # Confirm whether the instance has its expected files generated. + # We assume there's just one representation and that it is + # the actual texture representation from the collector. + representation = next(iter(image_instance.data["representations"])) + staging_dir = representation["stagingDir"] + filenames = representation["files"] + if not isinstance(filenames, (list, tuple)): + # Convert single file to list + filenames = [filenames] + + missing = [] + for filename in filenames: + filepath = os.path.join(staging_dir, filename) + filepath = os.path.normpath(filepath) + if filepath not in generated_files: + self.log.warning(f"Missing texture: {filepath}") + missing.append(filepath) + + if allow_skipped_maps: + # TODO: This is changing state on the instance's which + # usually should not be done during validation. + self.log.warning(f"Disabling texture instance: " + f"{image_instance}") + image_instance.data["active"] = False + image_instance.data["integrate"] = False + representation.setdefault("tags", []).append("delete") + continue + + if missing: + error_report_missing.append((image_instance, missing)) + + if error_report_missing: + + message = ( + "The Texture Set skipped exporting some output maps which are " + "defined in the Output Template. This happens if the Output " + "Templates exports maps from channels which you do not " + "have in your current Substance Painter project.\n\n" + "To allow this enable the *Allow Skipped Output Maps* setting " + "on the instance.\n\n" + f"Instance {instance} skipped exporting output maps:\n" + "" + ) + + for image_instance, missing in error_report_missing: + missing_str = ", ".join(missing) + message += f"- **{image_instance}** skipped: {missing_str}\n" + + raise PublishValidationError( + message=message, + title="Missing output maps" + ) From 23568e5b060caff2a56d65ba3229cc74f588b62c Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Tue, 4 Apr 2023 00:11:49 +0200 Subject: [PATCH 121/187] Fix allow skipped maps logic --- .../plugins/publish/validate_ouput_maps.py | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/openpype/hosts/substancepainter/plugins/publish/validate_ouput_maps.py b/openpype/hosts/substancepainter/plugins/publish/validate_ouput_maps.py index 203cf7c5fe..e3d4c733e1 100644 --- a/openpype/hosts/substancepainter/plugins/publish/validate_ouput_maps.py +++ b/openpype/hosts/substancepainter/plugins/publish/validate_ouput_maps.py @@ -72,17 +72,19 @@ class ValidateOutputMaps(pyblish.api.InstancePlugin): self.log.warning(f"Missing texture: {filepath}") missing.append(filepath) + if not missing: + continue + if allow_skipped_maps: # TODO: This is changing state on the instance's which - # usually should not be done during validation. + # should not be done during validation. self.log.warning(f"Disabling texture instance: " f"{image_instance}") image_instance.data["active"] = False image_instance.data["integrate"] = False representation.setdefault("tags", []).append("delete") continue - - if missing: + else: error_report_missing.append((image_instance, missing)) if error_report_missing: From 5059cf74b5bddfa85b4b9157fd2ffe7f346cc203 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Tue, 4 Apr 2023 00:13:50 +0200 Subject: [PATCH 122/187] Support multiple texture sets + stacks --- .../publish/collect_textureset_images.py | 33 +++++++++++++++---- 1 file changed, 26 insertions(+), 7 deletions(-) diff --git a/openpype/hosts/substancepainter/plugins/publish/collect_textureset_images.py b/openpype/hosts/substancepainter/plugins/publish/collect_textureset_images.py index 50a96b94ae..d11abd1019 100644 --- a/openpype/hosts/substancepainter/plugins/publish/collect_textureset_images.py +++ b/openpype/hosts/substancepainter/plugins/publish/collect_textureset_images.py @@ -41,10 +41,12 @@ class CollectTextureSet(pyblish.api.InstancePlugin): for template, outputs in template_maps.items(): self.log.info(f"Processing {template}") self.create_image_instance(instance, template, outputs, - asset_doc=asset_doc) + asset_doc=asset_doc, + texture_set_name=texture_set_name, + stack_name=stack_name) def create_image_instance(self, instance, template, outputs, - asset_doc): + asset_doc, texture_set_name, stack_name): """Create a new instance per image or UDIM sequence. The new instances will be of family `image`. @@ -56,14 +58,27 @@ class CollectTextureSet(pyblish.api.InstancePlugin): fnames = [os.path.basename(output["filepath"]) for output in outputs] ext = os.path.splitext(first_filepath)[1] assert ext.lstrip("."), f"No extension: {ext}" - map_identifier = strip_template(template) + + always_include_texture_set_name = False # todo: make this configurable + all_texture_sets = substance_painter.textureset.all_texture_sets() + texture_set = substance_painter.textureset.TextureSet.from_name( + texture_set_name + ) # Define the suffix we want to give this particular texture # set and set up a remapped subset naming for it. - # TODO (Critical) Support needs to be added to have multiple materials - # with each their own maps. So we might need to include the - # material or alike in the variant suffix too? - suffix = f".{map_identifier}" + suffix = "" + if always_include_texture_set_name or len(all_texture_sets) > 1: + # More than one texture set, include texture set name + suffix += f".{texture_set_name}" + if texture_set.is_layered_material() and stack_name: + # More than one stack, include stack name + suffix += f".{stack_name}" + + # Always include the map identifier + map_identifier = strip_template(template) + suffix += f".{map_identifier}" + image_subset = get_subset_name( # TODO: The family actually isn't 'texture' currently but for now # this is only done so the subset name starts with 'texture' @@ -110,6 +125,10 @@ class CollectTextureSet(pyblish.api.InstancePlugin): # Group the textures together in the loader image_instance.data["subsetGroup"] = instance.data["subset"] + # Store the texture set name and stack name on the instance + image_instance.data["textureSetName"] = texture_set_name + image_instance.data["textureStackName"] = stack_name + # Store color space with the instance # Note: The extractor will assign it to the representation colorspace = outputs[0].get("colorSpace") From d9c67a0bd50fb5c8625632d942c6bf4bf85eb908 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Fri, 7 Apr 2023 16:43:53 +0200 Subject: [PATCH 123/187] Improve speed of logging for when its validating a node with many prims. --- .../publish/validate_vdb_output_node.py | 18 ++++++++++++++---- 1 file changed, 14 insertions(+), 4 deletions(-) diff --git a/openpype/hosts/houdini/plugins/publish/validate_vdb_output_node.py b/openpype/hosts/houdini/plugins/publish/validate_vdb_output_node.py index f9f88b3bf9..e7908ab119 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_vdb_output_node.py +++ b/openpype/hosts/houdini/plugins/publish/validate_vdb_output_node.py @@ -2,6 +2,7 @@ import pyblish.api import hou from openpype.pipeline import PublishValidationError +import clique class ValidateVDBOutputNode(pyblish.api.InstancePlugin): @@ -56,12 +57,21 @@ class ValidateVDBOutputNode(pyblish.api.InstancePlugin): nr_of_prims = len(prims) # All primitives must be hou.VDB - invalid_prim = False + invalid_prims = [] for prim in prims: if not isinstance(prim, hou.VDB): - cls.log.error("Found non-VDB primitive: %s" % prim) - invalid_prim = True - if invalid_prim: + invalid_prims.append(prim) + if invalid_prims: + # Log all invalid primitives in a short readable way, like 0-5 + collections, remainder = clique.assemble( + str(prim.number()) for prim in invalid_prims + ) + collection = collections[0] + cls.log.error("Found non-VDB primitives for '{}', " + "primitive indices: {}".format( + node.path(), + collection.format("{ranges}") + )) return [instance] nr_of_points = len(geometry.points()) From e2e03346fa5592c39fdd4cf3904a479f8f029f75 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Fri, 7 Apr 2023 17:35:29 +0200 Subject: [PATCH 124/187] Fix VDB validation --- ..._node.xml => validate_vdb_output_node.xml} | 0 .../publish/validate_vdb_input_node.py | 52 ------------------- .../publish/validate_vdb_output_node.py | 27 +++++----- 3 files changed, 13 insertions(+), 66 deletions(-) rename openpype/hosts/houdini/plugins/publish/help/{validate_vdb_input_node.xml => validate_vdb_output_node.xml} (100%) delete mode 100644 openpype/hosts/houdini/plugins/publish/validate_vdb_input_node.py diff --git a/openpype/hosts/houdini/plugins/publish/help/validate_vdb_input_node.xml b/openpype/hosts/houdini/plugins/publish/help/validate_vdb_output_node.xml similarity index 100% rename from openpype/hosts/houdini/plugins/publish/help/validate_vdb_input_node.xml rename to openpype/hosts/houdini/plugins/publish/help/validate_vdb_output_node.xml diff --git a/openpype/hosts/houdini/plugins/publish/validate_vdb_input_node.py b/openpype/hosts/houdini/plugins/publish/validate_vdb_input_node.py deleted file mode 100644 index 1f9ccc9c42..0000000000 --- a/openpype/hosts/houdini/plugins/publish/validate_vdb_input_node.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -import pyblish.api -from openpype.pipeline import ( - PublishValidationError -) - - -class ValidateVDBInputNode(pyblish.api.InstancePlugin): - """Validate that the node connected to the output node is of type VDB. - - Regardless of the amount of VDBs create the output will need to have an - equal amount of VDBs, points, primitives and vertices - - A VDB is an inherited type of Prim, holds the following data: - - Primitives: 1 - - Points: 1 - - Vertices: 1 - - VDBs: 1 - - """ - - order = pyblish.api.ValidatorOrder + 0.1 - families = ["vdbcache"] - hosts = ["houdini"] - label = "Validate Input Node (VDB)" - - def process(self, instance): - invalid = self.get_invalid(instance) - if invalid: - raise PublishValidationError( - self, - "Node connected to the output node is not of type VDB", - title=self.label - ) - - @classmethod - def get_invalid(cls, instance): - - node = instance.data["output_node"] - - prims = node.geometry().prims() - nr_of_prims = len(prims) - - nr_of_points = len(node.geometry().points()) - if nr_of_points != nr_of_prims: - cls.log.error("The number of primitives and points do not match") - return [instance] - - for prim in prims: - if prim.numVertices() != 1: - cls.log.error("Found primitive with more than 1 vertex!") - return [instance] diff --git a/openpype/hosts/houdini/plugins/publish/validate_vdb_output_node.py b/openpype/hosts/houdini/plugins/publish/validate_vdb_output_node.py index e7908ab119..ee3b9a0a6a 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_vdb_output_node.py +++ b/openpype/hosts/houdini/plugins/publish/validate_vdb_output_node.py @@ -1,8 +1,7 @@ # -*- coding: utf-8 -*- import pyblish.api import hou -from openpype.pipeline import PublishValidationError -import clique +from openpype.pipeline import PublishXmlValidationError class ValidateVDBOutputNode(pyblish.api.InstancePlugin): @@ -27,9 +26,9 @@ class ValidateVDBOutputNode(pyblish.api.InstancePlugin): def process(self, instance): invalid = self.get_invalid(instance) if invalid: - raise PublishValidationError( - "Node connected to the output node is not" " of type VDB!", - title=self.label + raise PublishXmlValidationError( + self, + "Node connected to the output node is not" " of type VDB!" ) @classmethod @@ -62,16 +61,16 @@ class ValidateVDBOutputNode(pyblish.api.InstancePlugin): if not isinstance(prim, hou.VDB): invalid_prims.append(prim) if invalid_prims: - # Log all invalid primitives in a short readable way, like 0-5 - collections, remainder = clique.assemble( - str(prim.number()) for prim in invalid_prims + # TODO Log all invalid primitives in a short readable way, like 0-5 + # This logging can be really slow for many primitives, say 20000+ + # which might be fixed by logging only consecutive ranges + cls.log.error( + "Found non-VDB primitives for '{}', " + "primitive indices: {}".format( + node.path(), + ", ".join(prim.number() for prim in invalid_prims) + ) ) - collection = collections[0] - cls.log.error("Found non-VDB primitives for '{}', " - "primitive indices: {}".format( - node.path(), - collection.format("{ranges}") - )) return [instance] nr_of_points = len(geometry.points()) From 3e71ace6b762806d3b4ee097d4bd523d13dbe627 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Fri, 7 Apr 2023 17:37:35 +0200 Subject: [PATCH 125/187] Fix logic --- .../hosts/houdini/plugins/publish/validate_vdb_output_node.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/houdini/plugins/publish/validate_vdb_output_node.py b/openpype/hosts/houdini/plugins/publish/validate_vdb_output_node.py index ee3b9a0a6a..a8fb5007cf 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_vdb_output_node.py +++ b/openpype/hosts/houdini/plugins/publish/validate_vdb_output_node.py @@ -68,7 +68,7 @@ class ValidateVDBOutputNode(pyblish.api.InstancePlugin): "Found non-VDB primitives for '{}', " "primitive indices: {}".format( node.path(), - ", ".join(prim.number() for prim in invalid_prims) + ", ".join(str(prim.number()) for prim in invalid_prims) ) ) return [instance] From 3f404002e5abc8eee6778fda6a7363a29273329f Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Fri, 7 Apr 2023 17:41:41 +0200 Subject: [PATCH 126/187] Cosmetics + less aggresive message (no exclamation point) --- .../hosts/houdini/plugins/publish/validate_vdb_output_node.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/houdini/plugins/publish/validate_vdb_output_node.py b/openpype/hosts/houdini/plugins/publish/validate_vdb_output_node.py index a8fb5007cf..dd9ffc2a12 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_vdb_output_node.py +++ b/openpype/hosts/houdini/plugins/publish/validate_vdb_output_node.py @@ -28,7 +28,7 @@ class ValidateVDBOutputNode(pyblish.api.InstancePlugin): if invalid: raise PublishXmlValidationError( self, - "Node connected to the output node is not" " of type VDB!" + "Node connected to the output node is not of type VDB." ) @classmethod From 13b72fa57ccdb1353d515eac1da797e024175774 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Fri, 7 Apr 2023 17:59:36 +0200 Subject: [PATCH 127/187] Improve logging speed + readability for large number of primitives --- .../publish/validate_vdb_output_node.py | 42 +++++++++++++++++-- 1 file changed, 38 insertions(+), 4 deletions(-) diff --git a/openpype/hosts/houdini/plugins/publish/validate_vdb_output_node.py b/openpype/hosts/houdini/plugins/publish/validate_vdb_output_node.py index dd9ffc2a12..98a0796fec 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_vdb_output_node.py +++ b/openpype/hosts/houdini/plugins/publish/validate_vdb_output_node.py @@ -4,6 +4,39 @@ import hou from openpype.pipeline import PublishXmlValidationError +def group_consecutive_numbers(nums): + """ + Args: + nums (list): List of sorted integer numbers. + + Yields: + str: Group ranges as {start}-{end} if more than one number in the range + else it yields {end} + + """ + start = None + end = None + + def _result(a, b): + if a == b: + return "{}".format(a) + else: + return "{}-{}".format(a, b) + + for num in nums: + if start is None: + start = num + end = num + elif num == end + 1: + end = num + else: + yield _result(start, end) + start = num + end = num + if start is not None: + yield _result(start, end) + + class ValidateVDBOutputNode(pyblish.api.InstancePlugin): """Validate that the node connected to the output node is of type VDB. @@ -61,14 +94,15 @@ class ValidateVDBOutputNode(pyblish.api.InstancePlugin): if not isinstance(prim, hou.VDB): invalid_prims.append(prim) if invalid_prims: - # TODO Log all invalid primitives in a short readable way, like 0-5 - # This logging can be really slow for many primitives, say 20000+ - # which might be fixed by logging only consecutive ranges + # Log prim numbers as consecutive ranges so logging isn't very + # slow for large number of primitives cls.log.error( "Found non-VDB primitives for '{}', " "primitive indices: {}".format( node.path(), - ", ".join(str(prim.number()) for prim in invalid_prims) + ", ".join(group_consecutive_numbers( + prim.number() for prim in invalid_prims + )) ) ) return [instance] From 97f13a169b421ec8341f6f3c1b02a1cd5d1b4206 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Fri, 7 Apr 2023 18:11:35 +0200 Subject: [PATCH 128/187] Allow output node to be not collected, then correctly show error --- .../hosts/houdini/plugins/publish/validate_vdb_output_node.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/houdini/plugins/publish/validate_vdb_output_node.py b/openpype/hosts/houdini/plugins/publish/validate_vdb_output_node.py index 98a0796fec..b2b5c63799 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_vdb_output_node.py +++ b/openpype/hosts/houdini/plugins/publish/validate_vdb_output_node.py @@ -67,7 +67,7 @@ class ValidateVDBOutputNode(pyblish.api.InstancePlugin): @classmethod def get_invalid(cls, instance): - node = instance.data["output_node"] + node = instance.data.get("output_node") if node is None: cls.log.error( "SOP path is not correctly set on " From a15d8fde0145dc9e7d5fb41a248f7b25af5d3592 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Wed, 12 Apr 2023 15:01:49 +0200 Subject: [PATCH 129/187] Specify per Creator where it is listed in Tab search + Add a null node in COP2 or SOP network when generated there --- .../hosts/houdini/api/creator_node_shelves.py | 57 +++++++++++++++---- .../plugins/create/create_alembic_camera.py | 8 +++ .../plugins/create/create_composite.py | 16 +++++- .../plugins/create/create_pointcache.py | 9 +++ .../plugins/create/create_vbd_cache.py | 8 +++ 5 files changed, 87 insertions(+), 11 deletions(-) diff --git a/openpype/hosts/houdini/api/creator_node_shelves.py b/openpype/hosts/houdini/api/creator_node_shelves.py index 3638e14296..bc02b258b7 100644 --- a/openpype/hosts/houdini/api/creator_node_shelves.py +++ b/openpype/hosts/houdini/api/creator_node_shelves.py @@ -12,26 +12,35 @@ import tempfile import logging import os +from openpype.client import get_asset_by_name from openpype.pipeline import registered_host from openpype.pipeline.create import CreateContext from openpype.resources import get_openpype_icon_filepath import hou +import stateutils +import soptoolutils +import cop2toolutils + log = logging.getLogger(__name__) CREATE_SCRIPT = """ from openpype.hosts.houdini.api.creator_node_shelves import create_interactive -create_interactive("{identifier}") +create_interactive("{identifier}", **kwargs) """ -def create_interactive(creator_identifier): +def create_interactive(creator_identifier, **kwargs): """Create a Creator using its identifier interactively. This is used by the generated shelf tools as callback when a user selects the creator from the node tab search menu. + The `kwargs` should be what Houdini passes to the tool create scripts + context. For more information see: + https://www.sidefx.com/docs/houdini/hom/tool_script.html#arguments + Args: creator_identifier (str): The creator identifier of the Creator plugin to create. @@ -58,6 +67,33 @@ def create_interactive(creator_identifier): host = registered_host() context = CreateContext(host) + creator = context.manual_creators.get(creator_identifier) + if not creator: + raise RuntimeError("Invalid creator identifier: " + "{}".format(creator_identifier)) + + pane = stateutils.activePane(kwargs) + if isinstance(pane, hou.NetworkEditor): + pwd = pane.pwd() + subset_name = creator.get_subset_name( + variant=variant, + task_name=context.get_current_task_name(), + asset_doc=get_asset_by_name( + project_name=context.get_current_project_name(), + asset_name=context.get_current_asset_name() + ), + project_name=context.get_current_project_name(), + host_name=context.host_name + ) + + tool_fn = { + hou.sopNodeTypeCategory(): soptoolutils.genericTool, + hou.cop2NodeTypeCategory(): cop2toolutils.genericTool + }.get(pwd.childTypeCategory()) + + if tool_fn != None: + out_null = tool_fn(kwargs, "null") + out_null.setName("OUT_{}".format(subset_name), unique_name=True) before = context.instances_by_id.copy() @@ -135,12 +171,17 @@ def install(): log.debug("Writing OpenPype Creator nodes to shelf: {}".format(filepath)) tools = [] + + default_network_categories = [hou.ropNodeTypeCategory()] with shelves_change_block(): for identifier, creator in create_context.manual_creators.items(): - # TODO: Allow the creator plug-in itself to override the categories - # for where they are shown, by e.g. defining - # `Creator.get_network_categories()` + # Allow the creator plug-in itself to override the categories + # for where they are shown with `Creator.get_network_categories()` + if hasattr(creator, "get_network_categories"): + network_categories = creator.get_network_categories() + else: + network_categories = default_network_categories key = "openpype_create.{}".format(identifier) log.debug(f"Registering {key}") @@ -153,17 +194,13 @@ def install(): creator.label ), "help_url": None, - "network_categories": [ - hou.ropNodeTypeCategory(), - hou.sopNodeTypeCategory() - ], + "network_categories": network_categories, "viewer_categories": [], "cop_viewer_categories": [], "network_op_type": None, "viewer_op_type": None, "locations": ["OpenPype"] } - label = "Create {}".format(creator.label) tool = hou.shelves.tool(key) if tool: diff --git a/openpype/hosts/houdini/plugins/create/create_alembic_camera.py b/openpype/hosts/houdini/plugins/create/create_alembic_camera.py index fec64eb4a1..8c8a5e9eed 100644 --- a/openpype/hosts/houdini/plugins/create/create_alembic_camera.py +++ b/openpype/hosts/houdini/plugins/create/create_alembic_camera.py @@ -3,6 +3,8 @@ from openpype.hosts.houdini.api import plugin from openpype.pipeline import CreatedInstance, CreatorError +import hou + class CreateAlembicCamera(plugin.HoudiniCreator): """Single baked camera from Alembic ROP.""" @@ -47,3 +49,9 @@ class CreateAlembicCamera(plugin.HoudiniCreator): self.lock_parameters(instance_node, to_lock) instance_node.parm("trange").set(1) + + def get_network_categories(self): + return [ + hou.ropNodeTypeCategory(), + hou.objNodeTypeCategory() + ] diff --git a/openpype/hosts/houdini/plugins/create/create_composite.py b/openpype/hosts/houdini/plugins/create/create_composite.py index 45af2b0630..9d4f7969bb 100644 --- a/openpype/hosts/houdini/plugins/create/create_composite.py +++ b/openpype/hosts/houdini/plugins/create/create_composite.py @@ -1,7 +1,9 @@ # -*- coding: utf-8 -*- """Creator plugin for creating composite sequences.""" from openpype.hosts.houdini.api import plugin -from openpype.pipeline import CreatedInstance +from openpype.pipeline import CreatedInstance, CreatorError + +import hou class CreateCompositeSequence(plugin.HoudiniCreator): @@ -35,8 +37,20 @@ class CreateCompositeSequence(plugin.HoudiniCreator): "copoutput": filepath } + if self.selected_nodes: + if len(self.selected_nodes) > 1: + raise CreatorError("More than one item selected.") + path = self.selected_nodes[0].path() + parms["coppath"] = path + instance_node.setParms(parms) # Lock any parameters in this list to_lock = ["prim_to_detail_pattern"] self.lock_parameters(instance_node, to_lock) + + def get_network_categories(self): + return [ + hou.ropNodeTypeCategory(), + hou.cop2NodeTypeCategory() + ] diff --git a/openpype/hosts/houdini/plugins/create/create_pointcache.py b/openpype/hosts/houdini/plugins/create/create_pointcache.py index 6b6b277422..6efa96a42b 100644 --- a/openpype/hosts/houdini/plugins/create/create_pointcache.py +++ b/openpype/hosts/houdini/plugins/create/create_pointcache.py @@ -3,6 +3,8 @@ from openpype.hosts.houdini.api import plugin from openpype.pipeline import CreatedInstance +import hou + class CreatePointCache(plugin.HoudiniCreator): """Alembic ROP to pointcache""" @@ -49,3 +51,10 @@ class CreatePointCache(plugin.HoudiniCreator): # Lock any parameters in this list to_lock = ["prim_to_detail_pattern"] self.lock_parameters(instance_node, to_lock) + + def get_network_categories(self): + return [ + hou.ropNodeTypeCategory(), + hou.sopNodeTypeCategory() + ] + diff --git a/openpype/hosts/houdini/plugins/create/create_vbd_cache.py b/openpype/hosts/houdini/plugins/create/create_vbd_cache.py index 1a5011745f..c015cebd49 100644 --- a/openpype/hosts/houdini/plugins/create/create_vbd_cache.py +++ b/openpype/hosts/houdini/plugins/create/create_vbd_cache.py @@ -3,6 +3,8 @@ from openpype.hosts.houdini.api import plugin from openpype.pipeline import CreatedInstance +import hou + class CreateVDBCache(plugin.HoudiniCreator): """OpenVDB from Geometry ROP""" @@ -34,3 +36,9 @@ class CreateVDBCache(plugin.HoudiniCreator): parms["soppath"] = self.selected_nodes[0].path() instance_node.setParms(parms) + + def get_network_categories(self): + return [ + hou.ropNodeTypeCategory(), + hou.sopNodeTypeCategory() + ] From c6a0b7ff4546bddd687a617cdb05edd4e88f5447 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Wed, 12 Apr 2023 15:23:37 +0200 Subject: [PATCH 130/187] Shush hound --- openpype/hosts/houdini/api/creator_node_shelves.py | 2 +- openpype/hosts/houdini/plugins/create/create_pointcache.py | 1 - 2 files changed, 1 insertion(+), 2 deletions(-) diff --git a/openpype/hosts/houdini/api/creator_node_shelves.py b/openpype/hosts/houdini/api/creator_node_shelves.py index bc02b258b7..cd14090104 100644 --- a/openpype/hosts/houdini/api/creator_node_shelves.py +++ b/openpype/hosts/houdini/api/creator_node_shelves.py @@ -91,7 +91,7 @@ def create_interactive(creator_identifier, **kwargs): hou.cop2NodeTypeCategory(): cop2toolutils.genericTool }.get(pwd.childTypeCategory()) - if tool_fn != None: + if tool_fn is not None: out_null = tool_fn(kwargs, "null") out_null.setName("OUT_{}".format(subset_name), unique_name=True) diff --git a/openpype/hosts/houdini/plugins/create/create_pointcache.py b/openpype/hosts/houdini/plugins/create/create_pointcache.py index 6efa96a42b..df74070fee 100644 --- a/openpype/hosts/houdini/plugins/create/create_pointcache.py +++ b/openpype/hosts/houdini/plugins/create/create_pointcache.py @@ -57,4 +57,3 @@ class CreatePointCache(plugin.HoudiniCreator): hou.ropNodeTypeCategory(), hou.sopNodeTypeCategory() ] - From b3044398fc9181db2d2230f9f0f5cc1de7e9d297 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Wed, 19 Apr 2023 23:43:47 +0200 Subject: [PATCH 131/187] Improve validation report + allow to select the invalid node --- openpype/hosts/houdini/api/action.py | 46 +++++++ .../publish/help/validate_vdb_output_node.xml | 25 ++-- .../publish/validate_vdb_output_node.py | 112 ++++++++++++------ 3 files changed, 135 insertions(+), 48 deletions(-) create mode 100644 openpype/hosts/houdini/api/action.py diff --git a/openpype/hosts/houdini/api/action.py b/openpype/hosts/houdini/api/action.py new file mode 100644 index 0000000000..27e8ce55bb --- /dev/null +++ b/openpype/hosts/houdini/api/action.py @@ -0,0 +1,46 @@ +import pyblish.api +import hou + +from openpype.pipeline.publish import get_errored_instances_from_context + + +class SelectInvalidAction(pyblish.api.Action): + """Select invalid nodes in Maya when plug-in failed. + + To retrieve the invalid nodes this assumes a static `get_invalid()` + method is available on the plugin. + + """ + label = "Select invalid" + on = "failed" # This action is only available on a failed plug-in + icon = "search" # Icon from Awesome Icon + + def process(self, context, plugin): + + errored_instances = get_errored_instances_from_context(context) + + # Apply pyblish.logic to get the instances for the plug-in + instances = pyblish.api.instances_by_plugin(errored_instances, plugin) + + # Get the invalid nodes for the plug-ins + self.log.info("Finding invalid nodes..") + invalid = list() + for instance in instances: + invalid_nodes = plugin.get_invalid(instance) + if invalid_nodes: + if isinstance(invalid_nodes, (list, tuple)): + invalid.extend(invalid_nodes) + else: + self.log.warning("Plug-in returned to be invalid, " + "but has no selectable nodes.") + + hou.clearAllSelected() + if invalid: + self.log.info("Selecting invalid nodes: {}".format( + ", ".join(node.path() for node in invalid) + )) + for node in invalid: + node.setSelected(True) + node.setCurrent(True) + else: + self.log.info("No invalid nodes found.") diff --git a/openpype/hosts/houdini/plugins/publish/help/validate_vdb_output_node.xml b/openpype/hosts/houdini/plugins/publish/help/validate_vdb_output_node.xml index 0f92560bf7..eb83bfffe3 100644 --- a/openpype/hosts/houdini/plugins/publish/help/validate_vdb_output_node.xml +++ b/openpype/hosts/houdini/plugins/publish/help/validate_vdb_output_node.xml @@ -1,21 +1,28 @@ -Scene setting +Invalid VDB -## Invalid input node +## Invalid VDB output + +All primitives of the output geometry must be VDBs, no other primitive +types are allowed. That means that regardless of the amount of VDBs in the +geometry it will have an equal amount of VDBs, points, primitives and +vertices since each VDB primitive is one point, one vertex and one VDB. + +This validation only checks the geometry on the first frame of the export +frame range. + -VDB input must have the same number of VDBs, points, primitives and vertices as output. -### __Detailed Info__ (optional) +### Detailed Info + +ROP node `{rop_path}` is set to export SOP path `{sop_path}`. + +{message} -A VDB is an inherited type of Prim, holds the following data: - - Primitives: 1 - - Points: 1 - - Vertices: 1 - - VDBs: 1 \ No newline at end of file diff --git a/openpype/hosts/houdini/plugins/publish/validate_vdb_output_node.py b/openpype/hosts/houdini/plugins/publish/validate_vdb_output_node.py index b2b5c63799..3fa75e5822 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_vdb_output_node.py +++ b/openpype/hosts/houdini/plugins/publish/validate_vdb_output_node.py @@ -1,7 +1,9 @@ # -*- coding: utf-8 -*- import pyblish.api import hou + from openpype.pipeline import PublishXmlValidationError +from openpype.hosts.houdini.api.action import SelectInvalidAction def group_consecutive_numbers(nums): @@ -40,8 +42,13 @@ def group_consecutive_numbers(nums): class ValidateVDBOutputNode(pyblish.api.InstancePlugin): """Validate that the node connected to the output node is of type VDB. - Regardless of the amount of VDBs create the output will need to have an - equal amount of VDBs, points, primitives and vertices + All primitives of the output geometry must be VDBs, no other primitive + types are allowed. That means that regardless of the amount of VDBs in the + geometry it will have an equal amount of VDBs, points, primitives and + vertices since each VDB primitive is one point, one vertex and one VDB. + + This validation only checks the geometry on the first frame of the export + frame range for optimization purposes. A VDB is an inherited type of Prim, holds the following data: - Primitives: 1 @@ -55,64 +62,91 @@ class ValidateVDBOutputNode(pyblish.api.InstancePlugin): families = ["vdbcache"] hosts = ["houdini"] label = "Validate Output Node (VDB)" + actions = [SelectInvalidAction] def process(self, instance): - invalid = self.get_invalid(instance) - if invalid: + invalid_nodes, message = self.get_invalid_with_message(instance) + if invalid_nodes: raise PublishXmlValidationError( self, - "Node connected to the output node is not of type VDB." + "Node connected to the output node is not of type VDB.", + formatting_data={ + "message": message, + "rop_path": instance.data.get("instance_node"), + "sop_path": instance.data.get("output_node") + } ) @classmethod - def get_invalid(cls, instance): + def get_invalid_with_message(cls, instance): node = instance.data.get("output_node") if node is None: - cls.log.error( + instance_node = instance.data.get("instance_node") + error = ( "SOP path is not correctly set on " - "ROP node '%s'." % instance.data.get("instance_node") + "ROP node `%s`." % instance_node ) - return [instance] + return [instance_node, error] frame = instance.data.get("frameStart", 0) + node.cook(force=True, frame_range=(frame, frame)) geometry = node.geometryAtFrame(frame) if geometry is None: # No geometry data on this node, maybe the node hasn't cooked? - cls.log.error( + error = ( "SOP node has no geometry data. " "Is it cooked? %s" % node.path() ) - return [node] + return [node, error] - prims = geometry.prims() - nr_of_prims = len(prims) - - # All primitives must be hou.VDB - invalid_prims = [] - for prim in prims: - if not isinstance(prim, hou.VDB): - invalid_prims.append(prim) - if invalid_prims: - # Log prim numbers as consecutive ranges so logging isn't very - # slow for large number of primitives - cls.log.error( - "Found non-VDB primitives for '{}', " - "primitive indices: {}".format( - node.path(), - ", ".join(group_consecutive_numbers( - prim.number() for prim in invalid_prims - )) - ) + num_prims = geometry.intrinsicValue("primitivecount") + num_points = geometry.intrinsicValue("pointcount") + if num_prims == 0 and num_points == 0: + # Since we are only checking the first frame it doesn't mean there + # won't be VDB prims in a few frames. As such we'll assume for now + # the user knows what he or she is doing + cls.log.warning( + "SOP node `{}` has no primitives on start frame {}. " + "Validation is skipped and it is assumed elsewhere in the " + "frame range VDB prims and only VDB prims will exist." + "".format(node.path(), int(frame)) ) - return [instance] + return [None, None] - nr_of_points = len(geometry.points()) - if nr_of_points != nr_of_prims: - cls.log.error("The number of primitives and points do not match") - return [instance] + num_vdb_prims = geometry.countPrimType(hou.primType.VDB) + cls.log.debug("Detected {} VDB primitives".format(num_vdb_prims)) + if num_prims != num_vdb_prims: + # There's at least one primitive that is not a VDB. + # Search them and report them to the artist. + prims = geometry.prims() + invalid_prims = [prim for prim in prims + if not isinstance(prim, hou.VDB)] + if invalid_prims: + # Log prim numbers as consecutive ranges so logging isn't very + # slow for large number of primitives + error = ( + "Found non-VDB primitives for `{}`. " + "Primitive indices {} are not VDB primitives.".format( + node.path(), + ", ".join(group_consecutive_numbers( + prim.number() for prim in invalid_prims + )) + ) + ) + return [node, error] - for prim in prims: - if prim.numVertices() != 1: - cls.log.error("Found primitive with more than 1 vertex!") - return [instance] + if num_points != num_vdb_prims: + # We have points unrelated to the VDB primitives. + error = ( + "The number of primitives and points do not match in '{}'. " + "This likely means you have unconnected points, which we do " + "not allow in the VDB output.".format(node.path())) + return [node, error] + + return [None, None] + + @classmethod + def get_invalid(cls, instance): + nodes, _ = cls.get_invalid_with_message(instance) + return nodes From bb24b823649c3cf124fafb9c465a9fd5709d193a Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Thu, 20 Apr 2023 00:05:07 +0200 Subject: [PATCH 132/187] Fix type bug --- .../houdini/plugins/publish/validate_vdb_output_node.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/openpype/hosts/houdini/plugins/publish/validate_vdb_output_node.py b/openpype/hosts/houdini/plugins/publish/validate_vdb_output_node.py index 3fa75e5822..def9595e9a 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_vdb_output_node.py +++ b/openpype/hosts/houdini/plugins/publish/validate_vdb_output_node.py @@ -67,13 +67,18 @@ class ValidateVDBOutputNode(pyblish.api.InstancePlugin): def process(self, instance): invalid_nodes, message = self.get_invalid_with_message(instance) if invalid_nodes: + + # instance_node is str, but output_node is hou.Node so we convert + output = instance.data.get("output_node") + output_path = output.path() if output else None + raise PublishXmlValidationError( self, "Node connected to the output node is not of type VDB.", formatting_data={ "message": message, "rop_path": instance.data.get("instance_node"), - "sop_path": instance.data.get("output_node") + "sop_path": output_path } ) From 9484bd4a51c465957b49c83c915b0995f1a4de98 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Thu, 20 Apr 2023 00:05:47 +0200 Subject: [PATCH 133/187] Force geometry update, otherwise manual update mode will fail to get the geometry correctly --- .../publish/validate_vdb_output_node.py | 22 +++++++++++++++++-- 1 file changed, 20 insertions(+), 2 deletions(-) diff --git a/openpype/hosts/houdini/plugins/publish/validate_vdb_output_node.py b/openpype/hosts/houdini/plugins/publish/validate_vdb_output_node.py index def9595e9a..43da4b0528 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_vdb_output_node.py +++ b/openpype/hosts/houdini/plugins/publish/validate_vdb_output_node.py @@ -1,4 +1,6 @@ # -*- coding: utf-8 -*- +import contextlib + import pyblish.api import hou @@ -39,6 +41,23 @@ def group_consecutive_numbers(nums): yield _result(start, end) +@contextlib.contextmanager +def update_mode_context(mode): + original = hou.updateModeSetting() + try: + hou.setUpdateMode(mode) + yield + finally: + hou.setUpdateMode(original) + + +def get_geometry_at_frame(sop_node, frame, force=True): + """Return geometry at frame but force a cooked value.""" + with update_mode_context(hou.updateMode.AutoUpdate): + sop_node.cook(force=force, frame_range=(frame, frame)) + return sop_node.geometryAtFrame(frame) + + class ValidateVDBOutputNode(pyblish.api.InstancePlugin): """Validate that the node connected to the output node is of type VDB. @@ -95,8 +114,7 @@ class ValidateVDBOutputNode(pyblish.api.InstancePlugin): return [instance_node, error] frame = instance.data.get("frameStart", 0) - node.cook(force=True, frame_range=(frame, frame)) - geometry = node.geometryAtFrame(frame) + geometry = get_geometry_at_frame(node, frame) if geometry is None: # No geometry data on this node, maybe the node hasn't cooked? error = ( From cbd88a616c0420448c3cb3b9028d6e15482a314c Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Thu, 20 Apr 2023 00:06:10 +0200 Subject: [PATCH 134/187] Tweak formatting, fix type bug for instance node --- .../houdini/plugins/publish/validate_vdb_output_node.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/openpype/hosts/houdini/plugins/publish/validate_vdb_output_node.py b/openpype/hosts/houdini/plugins/publish/validate_vdb_output_node.py index 43da4b0528..bd1fb0b887 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_vdb_output_node.py +++ b/openpype/hosts/houdini/plugins/publish/validate_vdb_output_node.py @@ -109,17 +109,17 @@ class ValidateVDBOutputNode(pyblish.api.InstancePlugin): instance_node = instance.data.get("instance_node") error = ( "SOP path is not correctly set on " - "ROP node `%s`." % instance_node + "ROP node `{}`.".format(instance_node) ) - return [instance_node, error] + return [hou.node(instance_node), error] frame = instance.data.get("frameStart", 0) geometry = get_geometry_at_frame(node, frame) if geometry is None: # No geometry data on this node, maybe the node hasn't cooked? error = ( - "SOP node has no geometry data. " - "Is it cooked? %s" % node.path() + "SOP node `{}` has no geometry data. " + "Was it unable to cook?".format(node.path()) ) return [node, error] From 175db5407403dcb8e0b3a3f7a49b39463b2ceb56 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Thu, 20 Apr 2023 00:09:50 +0200 Subject: [PATCH 135/187] Tweak logged message for non-UI report --- .../hosts/houdini/plugins/publish/validate_vdb_output_node.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/houdini/plugins/publish/validate_vdb_output_node.py b/openpype/hosts/houdini/plugins/publish/validate_vdb_output_node.py index bd1fb0b887..674782179c 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_vdb_output_node.py +++ b/openpype/hosts/houdini/plugins/publish/validate_vdb_output_node.py @@ -93,7 +93,7 @@ class ValidateVDBOutputNode(pyblish.api.InstancePlugin): raise PublishXmlValidationError( self, - "Node connected to the output node is not of type VDB.", + "Invalid VDB content: {}".format(message), formatting_data={ "message": message, "rop_path": instance.data.get("instance_node"), From 16b169205ef8816099d1d94ff263069298d406cc Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Thu, 20 Apr 2023 01:29:51 +0200 Subject: [PATCH 136/187] Allow camera path to not be set correctly in review instance until validation --- .../plugins/publish/collect_review_data.py | 10 +++--- .../plugins/publish/validate_scene_review.py | 33 ++++++++++++++----- 2 files changed, 30 insertions(+), 13 deletions(-) diff --git a/openpype/hosts/houdini/plugins/publish/collect_review_data.py b/openpype/hosts/houdini/plugins/publish/collect_review_data.py index e321dcb2fa..3ab93dc491 100644 --- a/openpype/hosts/houdini/plugins/publish/collect_review_data.py +++ b/openpype/hosts/houdini/plugins/publish/collect_review_data.py @@ -18,6 +18,9 @@ class CollectHoudiniReviewData(pyblish.api.InstancePlugin): instance.data["handleStart"] = 0 instance.data["handleEnd"] = 0 + # Enable ftrack functionality + instance.data.setdefault("families", []).append('ftrack') + # Get the camera from the rop node to collect the focal length ropnode_path = instance.data["instance_node"] ropnode = hou.node(ropnode_path) @@ -25,8 +28,9 @@ class CollectHoudiniReviewData(pyblish.api.InstancePlugin): camera_path = ropnode.parm("camera").eval() camera_node = hou.node(camera_path) if not camera_node: - raise RuntimeError("No valid camera node found on review node: " - "{}".format(camera_path)) + self.log.warning("No valid camera node found on review node: " + "{}".format(camera_path)) + return # Collect focal length. focal_length_parm = camera_node.parm("focal") @@ -48,5 +52,3 @@ class CollectHoudiniReviewData(pyblish.api.InstancePlugin): # Store focal length in `burninDataMembers` burnin_members = instance.data.setdefault("burninDataMembers", {}) burnin_members["focalLength"] = focal_length - - instance.data.setdefault("families", []).append('ftrack') diff --git a/openpype/hosts/houdini/plugins/publish/validate_scene_review.py b/openpype/hosts/houdini/plugins/publish/validate_scene_review.py index ade01d4b90..58d8a37240 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_scene_review.py +++ b/openpype/hosts/houdini/plugins/publish/validate_scene_review.py @@ -16,13 +16,17 @@ class ValidateSceneReview(pyblish.api.InstancePlugin): label = "Scene Setting for review" def process(self, instance): - invalid = self.get_invalid_scene_path(instance) report = [] + instance_node = hou.node(instance.data.get("instance_node")) + + invalid = self.get_invalid_scene_path(instance_node) if invalid: - report.append( - "Scene path does not exist: '%s'" % invalid[0], - ) + report.append(invalid) + + invalid = self.get_invalid_camera_path(instance_node) + if invalid: + report.append(invalid) invalid = self.get_invalid_resolution(instance) if invalid: @@ -33,13 +37,24 @@ class ValidateSceneReview(pyblish.api.InstancePlugin): "\n\n".join(report), title=self.label) - def get_invalid_scene_path(self, instance): - - node = hou.node(instance.data.get("instance_node")) - scene_path_parm = node.parm("scenepath") + def get_invalid_scene_path(self, rop_node): + scene_path_parm = rop_node.parm("scenepath") scene_path_node = scene_path_parm.evalAsNode() if not scene_path_node: - return [scene_path_parm.evalAsString()] + path = scene_path_parm.evalAsString() + return "Scene path does not exist: '{}'".format(path) + + def get_invalid_camera_path(self, rop_node): + camera_path_parm = rop_node.parm("camera") + camera_node = camera_path_parm.evalAsNode() + path = camera_path_parm.evalAsString() + if not camera_node: + return "Camera path does not exist: '{}'".format(path) + type_name = camera_node.type().name() + if type_name != "cam": + return "Camera path is not a camera: '{}' (type: {})".format( + path, type_name + ) def get_invalid_resolution(self, instance): node = hou.node(instance.data.get("instance_node")) From 0424f66164717b5127f89612f0d83b7865bece63 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Thu, 20 Apr 2023 01:34:01 +0200 Subject: [PATCH 137/187] Re-use instance node --- .../houdini/plugins/publish/validate_scene_review.py | 11 +++++------ 1 file changed, 5 insertions(+), 6 deletions(-) diff --git a/openpype/hosts/houdini/plugins/publish/validate_scene_review.py b/openpype/hosts/houdini/plugins/publish/validate_scene_review.py index 58d8a37240..a44b7e1597 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_scene_review.py +++ b/openpype/hosts/houdini/plugins/publish/validate_scene_review.py @@ -28,7 +28,7 @@ class ValidateSceneReview(pyblish.api.InstancePlugin): if invalid: report.append(invalid) - invalid = self.get_invalid_resolution(instance) + invalid = self.get_invalid_resolution(instance_node) if invalid: report.extend(invalid) @@ -56,18 +56,17 @@ class ValidateSceneReview(pyblish.api.InstancePlugin): path, type_name ) - def get_invalid_resolution(self, instance): - node = hou.node(instance.data.get("instance_node")) + def get_invalid_resolution(self, rop_node): # The resolution setting is only used when Override Camera Resolution # is enabled. So we skip validation if it is disabled. - override = node.parm("tres").eval() + override = rop_node.parm("tres").eval() if not override: return invalid = [] - res_width = node.parm("res1").eval() - res_height = node.parm("res2").eval() + res_width = rop_node.parm("res1").eval() + res_height = rop_node.parm("res2").eval() if res_width == 0: invalid.append("Override Resolution width is set to zero.") if res_height == 0: From a8e5a0c5fc37a1236f70dc5c890e3160f8b5f6f3 Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Thu, 20 Apr 2023 12:10:55 +0200 Subject: [PATCH 138/187] :art: calculate hash for tx texture --- openpype/hosts/maya/plugins/publish/extract_look.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/maya/plugins/publish/extract_look.py b/openpype/hosts/maya/plugins/publish/extract_look.py index 520951a5e6..3cc95a0b2e 100644 --- a/openpype/hosts/maya/plugins/publish/extract_look.py +++ b/openpype/hosts/maya/plugins/publish/extract_look.py @@ -280,7 +280,7 @@ class MakeTX(TextureProcessor): # Do nothing if the source file is already a .tx file. return TextureResult( path=source, - file_hash=None, # todo: unknown texture hash? + file_hash=source_hash(source), colorspace=colorspace, transfer_mode=COPY ) From ef192d3edd1da53736ed54f176e662923c718e7b Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Thu, 20 Apr 2023 12:16:40 +0200 Subject: [PATCH 139/187] Add `get_network_categories` to `CreateUSD` --- openpype/hosts/houdini/plugins/create/create_usd.py | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/openpype/hosts/houdini/plugins/create/create_usd.py b/openpype/hosts/houdini/plugins/create/create_usd.py index 51ed8237c5..e05d254863 100644 --- a/openpype/hosts/houdini/plugins/create/create_usd.py +++ b/openpype/hosts/houdini/plugins/create/create_usd.py @@ -3,6 +3,8 @@ from openpype.hosts.houdini.api import plugin from openpype.pipeline import CreatedInstance +import hou + class CreateUSD(plugin.HoudiniCreator): """Universal Scene Description""" @@ -13,7 +15,6 @@ class CreateUSD(plugin.HoudiniCreator): enabled = False def create(self, subset_name, instance_data, pre_create_data): - import hou # noqa instance_data.pop("active", None) instance_data.update({"node_type": "usd"}) @@ -43,3 +44,9 @@ class CreateUSD(plugin.HoudiniCreator): "id", ] self.lock_parameters(instance_node, to_lock) + + def get_network_categories(self): + return [ + hou.ropNodeTypeCategory(), + hou.lopNodeTypeCategory() + ] From 96b1b3e19d6a3e7dd7387b4477224c208eeaba90 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Thu, 20 Apr 2023 12:28:44 +0200 Subject: [PATCH 140/187] Implement `get_network_categories` on Houdini base creator plugin --- .../hosts/houdini/api/creator_node_shelves.py | 13 ++++++++----- openpype/hosts/houdini/api/plugin.py | 16 ++++++++++++++++ 2 files changed, 24 insertions(+), 5 deletions(-) diff --git a/openpype/hosts/houdini/api/creator_node_shelves.py b/openpype/hosts/houdini/api/creator_node_shelves.py index cd14090104..8a15d902b5 100644 --- a/openpype/hosts/houdini/api/creator_node_shelves.py +++ b/openpype/hosts/houdini/api/creator_node_shelves.py @@ -172,16 +172,19 @@ def install(): log.debug("Writing OpenPype Creator nodes to shelf: {}".format(filepath)) tools = [] - default_network_categories = [hou.ropNodeTypeCategory()] with shelves_change_block(): for identifier, creator in create_context.manual_creators.items(): # Allow the creator plug-in itself to override the categories # for where they are shown with `Creator.get_network_categories()` - if hasattr(creator, "get_network_categories"): - network_categories = creator.get_network_categories() - else: - network_categories = default_network_categories + if not hasattr(creator, "get_network_categories"): + log.debug("Creator {} has no `get_network_categories` method " + "and will not be added to TAB search.") + continue + + network_categories = creator.get_network_categories() + if not network_categories: + continue key = "openpype_create.{}".format(identifier) log.debug(f"Registering {key}") diff --git a/openpype/hosts/houdini/api/plugin.py b/openpype/hosts/houdini/api/plugin.py index 340a7f0770..1e7eaa7e22 100644 --- a/openpype/hosts/houdini/api/plugin.py +++ b/openpype/hosts/houdini/api/plugin.py @@ -276,3 +276,19 @@ class HoudiniCreator(NewCreator, HoudiniCreatorBase): color = hou.Color((0.616, 0.871, 0.769)) node.setUserData('nodeshape', shape) node.setColor(color) + + def get_network_categories(self): + """Return in which network view type this creator should show. + + The node type categories returned here will be used to define where + the creator will show up in the TAB search for nodes in Houdini's + Network View. + + This can be overridden in inherited classes to define where that + particular Creator should be visible in the TAB search. + + Returns: + list: List of houdini node type categories + + """ + return [hou.ropNodeTypeCategory()] From 3cbeda17a8cfefb31fdf2b35314b53779334867c Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Thu, 20 Apr 2023 12:29:08 +0200 Subject: [PATCH 141/187] Support auto `null` node in LOPs --- openpype/hosts/houdini/api/creator_node_shelves.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/openpype/hosts/houdini/api/creator_node_shelves.py b/openpype/hosts/houdini/api/creator_node_shelves.py index 8a15d902b5..96e843b3a9 100644 --- a/openpype/hosts/houdini/api/creator_node_shelves.py +++ b/openpype/hosts/houdini/api/creator_node_shelves.py @@ -20,6 +20,7 @@ from openpype.resources import get_openpype_icon_filepath import hou import stateutils import soptoolutils +import loptoolutils import cop2toolutils @@ -88,7 +89,8 @@ def create_interactive(creator_identifier, **kwargs): tool_fn = { hou.sopNodeTypeCategory(): soptoolutils.genericTool, - hou.cop2NodeTypeCategory(): cop2toolutils.genericTool + hou.cop2NodeTypeCategory(): cop2toolutils.genericTool, + hou.lopNodeTypeCategory(): loptoolutils.genericTool }.get(pwd.childTypeCategory()) if tool_fn is not None: From 0941469c248c5d0503c8c40fadb0b1a280b55d94 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Thu, 20 Apr 2023 12:31:37 +0200 Subject: [PATCH 142/187] Move variable to module level --- openpype/hosts/houdini/api/creator_node_shelves.py | 14 ++++++++------ 1 file changed, 8 insertions(+), 6 deletions(-) diff --git a/openpype/hosts/houdini/api/creator_node_shelves.py b/openpype/hosts/houdini/api/creator_node_shelves.py index 96e843b3a9..1cc28add86 100644 --- a/openpype/hosts/houdini/api/creator_node_shelves.py +++ b/openpype/hosts/houdini/api/creator_node_shelves.py @@ -26,6 +26,13 @@ import cop2toolutils log = logging.getLogger(__name__) +CATEGORY_GENERIC_TOOL = { + hou.sopNodeTypeCategory(): soptoolutils.genericTool, + hou.cop2NodeTypeCategory(): cop2toolutils.genericTool, + hou.lopNodeTypeCategory(): loptoolutils.genericTool +} + + CREATE_SCRIPT = """ from openpype.hosts.houdini.api.creator_node_shelves import create_interactive create_interactive("{identifier}", **kwargs) @@ -87,12 +94,7 @@ def create_interactive(creator_identifier, **kwargs): host_name=context.host_name ) - tool_fn = { - hou.sopNodeTypeCategory(): soptoolutils.genericTool, - hou.cop2NodeTypeCategory(): cop2toolutils.genericTool, - hou.lopNodeTypeCategory(): loptoolutils.genericTool - }.get(pwd.childTypeCategory()) - + tool_fn = CATEGORY_GENERIC_TOOL.get(pwd.childTypeCategory()) if tool_fn is not None: out_null = tool_fn(kwargs, "null") out_null.setName("OUT_{}".format(subset_name), unique_name=True) From 9012b9f18f45562c03ecbf7c9d1ac807a0019f93 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Thu, 20 Apr 2023 12:34:14 +0200 Subject: [PATCH 143/187] Add todo for later --- openpype/hosts/houdini/api/creator_node_shelves.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/openpype/hosts/houdini/api/creator_node_shelves.py b/openpype/hosts/houdini/api/creator_node_shelves.py index 1cc28add86..7c6122cffe 100644 --- a/openpype/hosts/houdini/api/creator_node_shelves.py +++ b/openpype/hosts/houdini/api/creator_node_shelves.py @@ -80,6 +80,10 @@ def create_interactive(creator_identifier, **kwargs): raise RuntimeError("Invalid creator identifier: " "{}".format(creator_identifier)) + # TODO: Once more elaborate unique create behavior should exist per Creator + # instead of per network editor area then we should move this from here + # to a method on the Creators for which this could be the default + # implementation. pane = stateutils.activePane(kwargs) if isinstance(pane, hou.NetworkEditor): pwd = pane.pwd() From 95c802047cff3dc211c7f0ad037497befbff0c14 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Thu, 20 Apr 2023 16:40:49 +0200 Subject: [PATCH 144/187] Don't make ExtractOpenGL optional --- .../hosts/houdini/plugins/publish/extract_opengl.py | 11 ++--------- 1 file changed, 2 insertions(+), 9 deletions(-) diff --git a/openpype/hosts/houdini/plugins/publish/extract_opengl.py b/openpype/hosts/houdini/plugins/publish/extract_opengl.py index c26d0813a6..6c36dec5f5 100644 --- a/openpype/hosts/houdini/plugins/publish/extract_opengl.py +++ b/openpype/hosts/houdini/plugins/publish/extract_opengl.py @@ -2,27 +2,20 @@ import os import pyblish.api -from openpype.pipeline import ( - publish, - OptionalPyblishPluginMixin -) +from openpype.pipeline import publish from openpype.hosts.houdini.api.lib import render_rop import hou -class ExtractOpenGL(publish.Extractor, - OptionalPyblishPluginMixin): +class ExtractOpenGL(publish.Extractor): order = pyblish.api.ExtractorOrder - 0.01 label = "Extract OpenGL" families = ["review"] hosts = ["houdini"] - optional = True def process(self, instance): - if not self.is_active(instance.data): - return ropnode = hou.node(instance.data.get("instance_node")) output = ropnode.evalParm("picture") From f05f7510b4256964de741b6fd982327da9e4e1aa Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 20 Apr 2023 21:39:49 +0200 Subject: [PATCH 145/187] adding slate condition to plugin --- openpype/plugins/publish/validate_sequence_frames.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/openpype/plugins/publish/validate_sequence_frames.py b/openpype/plugins/publish/validate_sequence_frames.py index 0dba99b07c..239008ee21 100644 --- a/openpype/plugins/publish/validate_sequence_frames.py +++ b/openpype/plugins/publish/validate_sequence_frames.py @@ -49,7 +49,12 @@ class ValidateSequenceFrames(pyblish.api.InstancePlugin): collection = collections[0] frames = list(collection.indexes) + if instance.data.get("slate"): + # Slate is not part of the frame range + frames = frames[1:] + current_range = (frames[0], frames[-1]) + required_range = (instance.data["frameStart"], instance.data["frameEnd"]) From aa2d683dd9402268d355d74df452ce72e8c09e6a Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 20 Apr 2023 21:49:58 +0200 Subject: [PATCH 146/187] adding test routine for the slate condition --- .../publish/test_validate_sequence_frames.py | 18 ++++++++++++++++++ 1 file changed, 18 insertions(+) diff --git a/tests/unit/openpype/plugins/publish/test_validate_sequence_frames.py b/tests/unit/openpype/plugins/publish/test_validate_sequence_frames.py index 58d9de011d..17e47c9f64 100644 --- a/tests/unit/openpype/plugins/publish/test_validate_sequence_frames.py +++ b/tests/unit/openpype/plugins/publish/test_validate_sequence_frames.py @@ -180,5 +180,23 @@ class TestValidateSequenceFrames(BaseTest): plugin.process(instance) assert ("Missing frames: [1002]" in str(excinfo.value)) + def test_validate_sequence_frames_slate(self, instance, plugin): + representations = [ + { + "ext": "exr", + "files": [ + "Main_beauty.1000.exr", + "Main_beauty.1001.exr", + "Main_beauty.1002.exr", + "Main_beauty.1003.exr" + ] + } + ] + instance.data["slate"] = True + instance.data["representations"] = representations + instance.data["frameEnd"] = 1003 + + plugin.process(instance) + test_case = TestValidateSequenceFrames() From a2f79419bcb51731546e5422292e51cbd66bd52f Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Fri, 21 Apr 2023 11:59:41 +0200 Subject: [PATCH 147/187] Clear publisher comment on successful publish or on window close (#4885) --- openpype/tools/publisher/window.py | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/openpype/tools/publisher/window.py b/openpype/tools/publisher/window.py index 8826e0f849..0615157e1b 100644 --- a/openpype/tools/publisher/window.py +++ b/openpype/tools/publisher/window.py @@ -284,6 +284,9 @@ class PublisherWindow(QtWidgets.QDialog): controller.event_system.add_callback( "publish.has_validated.changed", self._on_publish_validated_change ) + controller.event_system.add_callback( + "publish.finished.changed", self._on_publish_finished_change + ) controller.event_system.add_callback( "publish.process.stopped", self._on_publish_stop ) @@ -400,6 +403,7 @@ class PublisherWindow(QtWidgets.QDialog): # TODO capture changes and ask user if wants to save changes on close if not self._controller.host_context_has_changed: self._save_changes(False) + self._comment_input.setText("") # clear comment self._reset_on_show = True self._controller.clear_thumbnail_temp_dir_path() super(PublisherWindow, self).closeEvent(event) @@ -777,6 +781,11 @@ class PublisherWindow(QtWidgets.QDialog): if event["value"]: self._validate_btn.setEnabled(False) + def _on_publish_finished_change(self, event): + if event["value"]: + # Successful publish, remove comment from UI + self._comment_input.setText("") + def _on_publish_stop(self): self._set_publish_overlay_visibility(False) self._reset_btn.setEnabled(True) From 5b1854e9022ed7e6fc994b08ed160543572851c2 Mon Sep 17 00:00:00 2001 From: Kayla Man <64118225+moonyuet@users.noreply.github.com> Date: Fri, 21 Apr 2023 18:17:01 +0800 Subject: [PATCH 148/187] Add fps as instance.data in collect review in Houdini. (#4888) * add fps as instance data in collect review data * Trllo's feedback --- openpype/hosts/houdini/plugins/publish/collect_review_data.py | 1 + 1 file changed, 1 insertion(+) diff --git a/openpype/hosts/houdini/plugins/publish/collect_review_data.py b/openpype/hosts/houdini/plugins/publish/collect_review_data.py index e321dcb2fa..8118e6d558 100644 --- a/openpype/hosts/houdini/plugins/publish/collect_review_data.py +++ b/openpype/hosts/houdini/plugins/publish/collect_review_data.py @@ -17,6 +17,7 @@ class CollectHoudiniReviewData(pyblish.api.InstancePlugin): # which isn't the actual frame range that this instance renders. instance.data["handleStart"] = 0 instance.data["handleEnd"] = 0 + instance.data["fps"] = instance.context.data["fps"] # Get the camera from the rop node to collect the focal length ropnode_path = instance.data["instance_node"] From cac990cd3cb707fa3528b2f302fb5791a783b678 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Fri, 21 Apr 2023 12:20:10 +0200 Subject: [PATCH 149/187] Code: Tweak docstrings and return type hints (#4875) * Tweak docstrings and return type hints * Remove test import of `typing` * Fix indentations * Fix indentations * Fix typos * Update openpype/client/entities.py Co-authored-by: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> * `fields` as `Optional` iterable of strings. --------- Co-authored-by: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> --- openpype/client/entities.py | 229 +++++++++++++++++++++--------------- 1 file changed, 135 insertions(+), 94 deletions(-) diff --git a/openpype/client/entities.py b/openpype/client/entities.py index 376157d210..8004dc3019 100644 --- a/openpype/client/entities.py +++ b/openpype/client/entities.py @@ -69,6 +69,19 @@ def convert_ids(in_ids): def get_projects(active=True, inactive=False, fields=None): + """Yield all project entity documents. + + Args: + active (Optional[bool]): Include active projects. Defaults to True. + inactive (Optional[bool]): Include inactive projects. + Defaults to False. + fields (Optional[Iterable[str]]): Fields that should be returned. All + fields are returned if 'None' is passed. + + Yields: + dict: Project entity data which can be reduced to specified 'fields'. + None is returned if project with specified filters was not found. + """ mongodb = get_project_database() for project_name in mongodb.collection_names(): if project_name in ("system.indexes",): @@ -81,6 +94,20 @@ def get_projects(active=True, inactive=False, fields=None): def get_project(project_name, active=True, inactive=True, fields=None): + """Return project entity document by project name. + + Args: + project_name (str): Name of project. + active (Optional[bool]): Allow active project. Defaults to True. + inactive (Optional[bool]): Allow inactive project. Defaults to True. + fields (Optional[Iterable[str]]): Fields that should be returned. All + fields are returned if 'None' is passed. + + Returns: + Union[Dict, None]: Project entity data which can be reduced to + specified 'fields'. None is returned if project with specified + filters was not found. + """ # Skip if both are disabled if not active and not inactive: return None @@ -124,17 +151,18 @@ def get_whole_project(project_name): def get_asset_by_id(project_name, asset_id, fields=None): - """Receive asset data by it's id. + """Receive asset data by its id. Args: project_name (str): Name of project where to look for queried entities. asset_id (Union[str, ObjectId]): Asset's id. - fields (Iterable[str]): Fields that should be returned. All fields are - returned if 'None' is passed. + fields (Optional[Iterable[str]]): Fields that should be returned. All + fields are returned if 'None' is passed. Returns: - dict: Asset entity data. - None: Asset was not found by id. + Union[Dict, None]: Asset entity data which can be reduced to + specified 'fields'. None is returned if asset with specified + filters was not found. """ asset_id = convert_id(asset_id) @@ -147,17 +175,18 @@ def get_asset_by_id(project_name, asset_id, fields=None): def get_asset_by_name(project_name, asset_name, fields=None): - """Receive asset data by it's name. + """Receive asset data by its name. Args: project_name (str): Name of project where to look for queried entities. asset_name (str): Asset's name. - fields (Iterable[str]): Fields that should be returned. All fields are - returned if 'None' is passed. + fields (Optional[Iterable[str]]): Fields that should be returned. All + fields are returned if 'None' is passed. Returns: - dict: Asset entity data. - None: Asset was not found by name. + Union[Dict, None]: Asset entity data which can be reduced to + specified 'fields'. None is returned if asset with specified + filters was not found. """ if not asset_name: @@ -195,8 +224,8 @@ def _get_assets( parent_ids (Iterable[Union[str, ObjectId]]): Parent asset ids. standard (bool): Query standard assets (type 'asset'). archived (bool): Query archived assets (type 'archived_asset'). - fields (Iterable[str]): Fields that should be returned. All fields are - returned if 'None' is passed. + fields (Optional[Iterable[str]]): Fields that should be returned. All + fields are returned if 'None' is passed. Returns: Cursor: Query cursor as iterable which returns asset documents matching @@ -261,8 +290,8 @@ def get_assets( asset_names (Iterable[str]): Name assets that should be found. parent_ids (Iterable[Union[str, ObjectId]]): Parent asset ids. archived (bool): Add also archived assets. - fields (Iterable[str]): Fields that should be returned. All fields are - returned if 'None' is passed. + fields (Optional[Iterable[str]]): Fields that should be returned. All + fields are returned if 'None' is passed. Returns: Cursor: Query cursor as iterable which returns asset documents matching @@ -300,8 +329,8 @@ def get_archived_assets( be found. asset_names (Iterable[str]): Name assets that should be found. parent_ids (Iterable[Union[str, ObjectId]]): Parent asset ids. - fields (Iterable[str]): Fields that should be returned. All fields are - returned if 'None' is passed. + fields (Optional[Iterable[str]]): Fields that should be returned. All + fields are returned if 'None' is passed. Returns: Cursor: Query cursor as iterable which returns asset documents matching @@ -356,17 +385,18 @@ def get_asset_ids_with_subsets(project_name, asset_ids=None): def get_subset_by_id(project_name, subset_id, fields=None): - """Single subset entity data by it's id. + """Single subset entity data by its id. Args: project_name (str): Name of project where to look for queried entities. subset_id (Union[str, ObjectId]): Id of subset which should be found. - fields (Iterable[str]): Fields that should be returned. All fields are - returned if 'None' is passed. + fields (Optional[Iterable[str]]): Fields that should be returned. All + fields are returned if 'None' is passed. Returns: - None: If subset with specified filters was not found. - Dict: Subset document which can be reduced to specified 'fields'. + Union[Dict, None]: Subset entity data which can be reduced to + specified 'fields'. None is returned if subset with specified + filters was not found. """ subset_id = convert_id(subset_id) @@ -379,20 +409,19 @@ def get_subset_by_id(project_name, subset_id, fields=None): def get_subset_by_name(project_name, subset_name, asset_id, fields=None): - """Single subset entity data by it's name and it's version id. + """Single subset entity data by its name and its version id. Args: project_name (str): Name of project where to look for queried entities. subset_name (str): Name of subset. asset_id (Union[str, ObjectId]): Id of parent asset. - fields (Iterable[str]): Fields that should be returned. All fields are - returned if 'None' is passed. + fields (Optional[Iterable[str]]): Fields that should be returned. All + fields are returned if 'None' is passed. Returns: - Union[None, Dict[str, Any]]: None if subset with specified filters was - not found or dict subset document which can be reduced to - specified 'fields'. - + Union[Dict, None]: Subset entity data which can be reduced to + specified 'fields'. None is returned if subset with specified + filters was not found. """ if not subset_name: return None @@ -434,8 +463,8 @@ def get_subsets( names_by_asset_ids (dict[ObjectId, List[str]]): Complex filtering using asset ids and list of subset names under the asset. archived (bool): Look for archived subsets too. - fields (Iterable[str]): Fields that should be returned. All fields are - returned if 'None' is passed. + fields (Optional[Iterable[str]]): Fields that should be returned. All + fields are returned if 'None' is passed. Returns: Cursor: Iterable cursor yielding all matching subsets. @@ -520,17 +549,18 @@ def get_subset_families(project_name, subset_ids=None): def get_version_by_id(project_name, version_id, fields=None): - """Single version entity data by it's id. + """Single version entity data by its id. Args: project_name (str): Name of project where to look for queried entities. version_id (Union[str, ObjectId]): Id of version which should be found. - fields (Iterable[str]): Fields that should be returned. All fields are - returned if 'None' is passed. + fields (Optional[Iterable[str]]): Fields that should be returned. All + fields are returned if 'None' is passed. Returns: - None: If version with specified filters was not found. - Dict: Version document which can be reduced to specified 'fields'. + Union[Dict, None]: Version entity data which can be reduced to + specified 'fields'. None is returned if version with specified + filters was not found. """ version_id = convert_id(version_id) @@ -546,18 +576,19 @@ def get_version_by_id(project_name, version_id, fields=None): def get_version_by_name(project_name, version, subset_id, fields=None): - """Single version entity data by it's name and subset id. + """Single version entity data by its name and subset id. Args: project_name (str): Name of project where to look for queried entities. - version (int): name of version entity (it's version). + version (int): name of version entity (its version). subset_id (Union[str, ObjectId]): Id of version which should be found. - fields (Iterable[str]): Fields that should be returned. All fields are - returned if 'None' is passed. + fields (Optional[Iterable[str]]): Fields that should be returned. All + fields are returned if 'None' is passed. Returns: - None: If version with specified filters was not found. - Dict: Version document which can be reduced to specified 'fields'. + Union[Dict, None]: Version entity data which can be reduced to + specified 'fields'. None is returned if version with specified + filters was not found. """ subset_id = convert_id(subset_id) @@ -574,7 +605,7 @@ def get_version_by_name(project_name, version, subset_id, fields=None): def version_is_latest(project_name, version_id): - """Is version the latest from it's subset. + """Is version the latest from its subset. Note: Hero versions are considered as latest. @@ -680,8 +711,8 @@ def get_versions( versions (Iterable[int]): Version names (as integers). Filter ignored if 'None' is passed. hero (bool): Look also for hero versions. - fields (Iterable[str]): Fields that should be returned. All fields are - returned if 'None' is passed. + fields (Optional[Iterable[str]]): Fields that should be returned. All + fields are returned if 'None' is passed. Returns: Cursor: Iterable cursor yielding all matching versions. @@ -705,12 +736,13 @@ def get_hero_version_by_subset_id(project_name, subset_id, fields=None): project_name (str): Name of project where to look for queried entities. subset_id (Union[str, ObjectId]): Subset id under which is hero version. - fields (Iterable[str]): Fields that should be returned. All fields are - returned if 'None' is passed. + fields (Optional[Iterable[str]]): Fields that should be returned. All + fields are returned if 'None' is passed. Returns: - None: If hero version for passed subset id does not exists. - Dict: Hero version entity data. + Union[Dict, None]: Hero version entity data which can be reduced to + specified 'fields'. None is returned if hero version with specified + filters was not found. """ subset_id = convert_id(subset_id) @@ -730,17 +762,18 @@ def get_hero_version_by_subset_id(project_name, subset_id, fields=None): def get_hero_version_by_id(project_name, version_id, fields=None): - """Hero version by it's id. + """Hero version by its id. Args: project_name (str): Name of project where to look for queried entities. version_id (Union[str, ObjectId]): Hero version id. - fields (Iterable[str]): Fields that should be returned. All fields are - returned if 'None' is passed. + fields (Optional[Iterable[str]]): Fields that should be returned. All + fields are returned if 'None' is passed. Returns: - None: If hero version with passed id was not found. - Dict: Hero version entity data. + Union[Dict, None]: Hero version entity data which can be reduced to + specified 'fields'. None is returned if hero version with specified + filters was not found. """ version_id = convert_id(version_id) @@ -773,8 +806,8 @@ def get_hero_versions( should look for hero versions. Filter ignored if 'None' is passed. version_ids (Iterable[Union[str, ObjectId]]): Hero version ids. Filter ignored if 'None' is passed. - fields (Iterable[str]): Fields that should be returned. All fields are - returned if 'None' is passed. + fields (Optional[Iterable[str]]): Fields that should be returned. All + fields are returned if 'None' is passed. Returns: Cursor|list: Iterable yielding hero versions matching passed filters. @@ -801,8 +834,8 @@ def get_output_link_versions(project_name, version_id, fields=None): project_name (str): Name of project where to look for queried entities. version_id (Union[str, ObjectId]): Version id which can be used as input link for other versions. - fields (Iterable[str]): Fields that should be returned. All fields are - returned if 'None' is passed. + fields (Optional[Iterable[str]]): Fields that should be returned. All + fields are returned if 'None' is passed. Returns: Iterable: Iterable cursor yielding versions that are used as input @@ -828,8 +861,8 @@ def get_last_versions(project_name, subset_ids, fields=None): Args: project_name (str): Name of project where to look for queried entities. subset_ids (Iterable[Union[str, ObjectId]]): List of subset ids. - fields (Iterable[str]): Fields that should be returned. All fields are - returned if 'None' is passed. + fields (Optional[Iterable[str]]): Fields that should be returned. All + fields are returned if 'None' is passed. Returns: dict[ObjectId, int]: Key is subset id and value is last version name. @@ -913,12 +946,13 @@ def get_last_version_by_subset_id(project_name, subset_id, fields=None): Args: project_name (str): Name of project where to look for queried entities. subset_id (Union[str, ObjectId]): Id of version which should be found. - fields (Iterable[str]): Fields that should be returned. All fields are - returned if 'None' is passed. + fields (Optional[Iterable[str]]): Fields that should be returned. All + fields are returned if 'None' is passed. Returns: - None: If version with specified filters was not found. - Dict: Version document which can be reduced to specified 'fields'. + Union[Dict, None]: Version entity data which can be reduced to + specified 'fields'. None is returned if version with specified + filters was not found. """ subset_id = convert_id(subset_id) @@ -945,12 +979,13 @@ def get_last_version_by_subset_name( asset_id (Union[str, ObjectId]): Asset id which is parent of passed subset name. asset_name (str): Asset name which is parent of passed subset name. - fields (Iterable[str]): Fields that should be returned. All fields are - returned if 'None' is passed. + fields (Optional[Iterable[str]]): Fields that should be returned. All + fields are returned if 'None' is passed. Returns: - None: If version with specified filters was not found. - Dict: Version document which can be reduced to specified 'fields'. + Union[Dict, None]: Version entity data which can be reduced to + specified 'fields'. None is returned if version with specified + filters was not found. """ if not asset_id and not asset_name: @@ -972,18 +1007,18 @@ def get_last_version_by_subset_name( def get_representation_by_id(project_name, representation_id, fields=None): - """Representation entity data by it's id. + """Representation entity data by its id. Args: project_name (str): Name of project where to look for queried entities. representation_id (Union[str, ObjectId]): Representation id. - fields (Iterable[str]): Fields that should be returned. All fields are - returned if 'None' is passed. + fields (Optional[Iterable[str]]): Fields that should be returned. All + fields are returned if 'None' is passed. Returns: - None: If representation with specified filters was not found. - Dict: Representation entity data which can be reduced - to specified 'fields'. + Union[Dict, None]: Representation entity data which can be reduced to + specified 'fields'. None is returned if representation with + specified filters was not found. """ if not representation_id: @@ -1004,19 +1039,19 @@ def get_representation_by_id(project_name, representation_id, fields=None): def get_representation_by_name( project_name, representation_name, version_id, fields=None ): - """Representation entity data by it's name and it's version id. + """Representation entity data by its name and its version id. Args: project_name (str): Name of project where to look for queried entities. representation_name (str): Representation name. version_id (Union[str, ObjectId]): Id of parent version entity. - fields (Iterable[str]): Fields that should be returned. All fields are - returned if 'None' is passed. + fields (Optional[Iterable[str]]): Fields that should be returned. All + fields are returned if 'None' is passed. Returns: - None: If representation with specified filters was not found. - Dict: Representation entity data which can be reduced - to specified 'fields'. + Union[dict[str, Any], None]: Representation entity data which can be + reduced to specified 'fields'. None is returned if representation + with specified filters was not found. """ version_id = convert_id(version_id) @@ -1202,8 +1237,8 @@ def get_representations( names_by_version_ids (dict[ObjectId, list[str]]): Complex filtering using version ids and list of names under the version. archived (bool): Output will also contain archived representations. - fields (Iterable[str]): Fields that should be returned. All fields are - returned if 'None' is passed. + fields (Optional[Iterable[str]]): Fields that should be returned. All + fields are returned if 'None' is passed. Returns: Cursor: Iterable cursor yielding all matching representations. @@ -1247,8 +1282,8 @@ def get_archived_representations( representation context fields. names_by_version_ids (dict[ObjectId, List[str]]): Complex filtering using version ids and list of names under the version. - fields (Iterable[str]): Fields that should be returned. All fields are - returned if 'None' is passed. + fields (Optional[Iterable[str]]): Fields that should be returned. All + fields are returned if 'None' is passed. Returns: Cursor: Iterable cursor yielding all matching representations. @@ -1377,8 +1412,8 @@ def get_thumbnail_id_from_source(project_name, src_type, src_id): src_id (Union[str, ObjectId]): Id of source entity. Returns: - ObjectId: Thumbnail id assigned to entity. - None: If Source entity does not have any thumbnail id assigned. + Union[ObjectId, None]: Thumbnail id assigned to entity. If Source + entity does not have any thumbnail id assigned. """ if not src_type or not src_id: @@ -1397,14 +1432,14 @@ def get_thumbnails(project_name, thumbnail_ids, fields=None): """Receive thumbnails entity data. Thumbnail entity can be used to receive binary content of thumbnail based - on it's content and ThumbnailResolvers. + on its content and ThumbnailResolvers. Args: project_name (str): Name of project where to look for queried entities. thumbnail_ids (Iterable[Union[str, ObjectId]]): Ids of thumbnail entities. - fields (Iterable[str]): Fields that should be returned. All fields are - returned if 'None' is passed. + fields (Optional[Iterable[str]]): Fields that should be returned. All + fields are returned if 'None' is passed. Returns: cursor: Cursor of queried documents. @@ -1429,12 +1464,13 @@ def get_thumbnail(project_name, thumbnail_id, fields=None): Args: project_name (str): Name of project where to look for queried entities. thumbnail_id (Union[str, ObjectId]): Id of thumbnail entity. - fields (Iterable[str]): Fields that should be returned. All fields are - returned if 'None' is passed. + fields (Optional[Iterable[str]]): Fields that should be returned. All + fields are returned if 'None' is passed. Returns: - None: If thumbnail with specified id was not found. - Dict: Thumbnail entity data which can be reduced to specified 'fields'. + Union[Dict, None]: Thumbnail entity data which can be reduced to + specified 'fields'.None is returned if thumbnail with specified + filters was not found. """ if not thumbnail_id: @@ -1458,8 +1494,13 @@ def get_workfile_info( project_name (str): Name of project where to look for queried entities. asset_id (Union[str, ObjectId]): Id of asset entity. task_name (str): Task name on asset. - fields (Iterable[str]): Fields that should be returned. All fields are - returned if 'None' is passed. + fields (Optional[Iterable[str]]): Fields that should be returned. All + fields are returned if 'None' is passed. + + Returns: + Union[Dict, None]: Workfile entity data which can be reduced to + specified 'fields'.None is returned if workfile with specified + filters was not found. """ if not asset_id or not task_name or not filename: From b751c539c3d3f0d2aa9ed6846bac01ce1ad91eb5 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Fri, 21 Apr 2023 12:22:11 +0200 Subject: [PATCH 150/187] Publisher: Make sure to reset asset widget when hidden and reshown (#4886) * Make sure to reset asset widget when hidden and reshown * change '_soft_reset_enabled' only on controller reset --------- Co-authored-by: Jakub Trllo --- openpype/tools/publisher/widgets/assets_widget.py | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/openpype/tools/publisher/widgets/assets_widget.py b/openpype/tools/publisher/widgets/assets_widget.py index 3c559af259..a750d8d540 100644 --- a/openpype/tools/publisher/widgets/assets_widget.py +++ b/openpype/tools/publisher/widgets/assets_widget.py @@ -211,6 +211,10 @@ class AssetsDialog(QtWidgets.QDialog): layout.addWidget(asset_view, 1) layout.addLayout(btns_layout, 0) + controller.event_system.add_callback( + "controller.reset.finished", self._on_controller_reset + ) + asset_view.double_clicked.connect(self._on_ok_clicked) filter_input.textChanged.connect(self._on_filter_change) ok_btn.clicked.connect(self._on_ok_clicked) @@ -245,6 +249,10 @@ class AssetsDialog(QtWidgets.QDialog): new_pos.setY(new_pos.y() - int(self.height() / 2)) self.move(new_pos) + def _on_controller_reset(self): + # Change reset enabled so model is reset on show event + self._soft_reset_enabled = True + def showEvent(self, event): """Refresh asset model on show.""" super(AssetsDialog, self).showEvent(event) From d5ccdcbaab3b7946ad62730d968498ab0e19f612 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 21 Apr 2023 13:21:46 +0200 Subject: [PATCH 151/187] fixing nightly workflow --- .github/workflows/nightly_merge.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/nightly_merge.yml b/.github/workflows/nightly_merge.yml index f1850762d9..3f8c75dce3 100644 --- a/.github/workflows/nightly_merge.yml +++ b/.github/workflows/nightly_merge.yml @@ -25,5 +25,5 @@ jobs: - name: Invoke pre-release workflow uses: benc-uk/workflow-dispatch@v1 with: - workflow: Nightly Prerelease + workflow: prerelease.yml token: ${{ secrets.YNPUT_BOT_TOKEN }} From edccc0f9e915d05843dbd0e1b1dc1513cc464aa3 Mon Sep 17 00:00:00 2001 From: Ynbot Date: Fri, 21 Apr 2023 11:23:24 +0000 Subject: [PATCH 152/187] [Automated] Bump version --- openpype/version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/version.py b/openpype/version.py index 1d41f1aa5d..b9090cd8a1 100644 --- a/openpype/version.py +++ b/openpype/version.py @@ -1,3 +1,3 @@ # -*- coding: utf-8 -*- """Package declaring Pype version.""" -__version__ = "3.15.4" +__version__ = "3.15.5-nightly.1" From d03200238bbb1a0e57f14e88fe39902daed6c98f Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 21 Apr 2023 13:27:10 +0200 Subject: [PATCH 153/187] prerelease step with workflow dispatch for update bug. --- .github/workflows/prerelease.yml | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/.github/workflows/prerelease.yml b/.github/workflows/prerelease.yml index e8c619c6eb..8c5c733c08 100644 --- a/.github/workflows/prerelease.yml +++ b/.github/workflows/prerelease.yml @@ -65,3 +65,9 @@ jobs: source_ref: 'main' target_branch: 'develop' commit_message_template: '[Automated] Merged {source_ref} into {target_branch}' + + - name: Invoke Update bug report workflow + uses: benc-uk/workflow-dispatch@v1 + with: + workflow: update_bug_report.yml + token: ${{ secrets.YNPUT_BOT_TOKEN }} \ No newline at end of file From 34b1ad105b76e7d69094741f668927b96d406f4d Mon Sep 17 00:00:00 2001 From: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> Date: Fri, 21 Apr 2023 15:18:41 +0200 Subject: [PATCH 154/187] implemented collector for review instances to fix extract review issues (#4891) --- .../plugins/publish/collect_review_frames.py | 43 +++++++++++++++++++ 1 file changed, 43 insertions(+) create mode 100644 openpype/hosts/traypublisher/plugins/publish/collect_review_frames.py diff --git a/openpype/hosts/traypublisher/plugins/publish/collect_review_frames.py b/openpype/hosts/traypublisher/plugins/publish/collect_review_frames.py new file mode 100644 index 0000000000..6b41c0dd21 --- /dev/null +++ b/openpype/hosts/traypublisher/plugins/publish/collect_review_frames.py @@ -0,0 +1,43 @@ +# -*- coding: utf-8 -*- +import pyblish.api + + +class CollectReviewInfo(pyblish.api.InstancePlugin): + """Collect data required for review instances. + + ExtractReview plugin requires frame start/end, fps on instance data which + are missing on instances from TrayPublishes. + + Warning: + This is temporary solution to "make it work". Contains removed changes + from https://github.com/ynput/OpenPype/pull/4383 reduced only for + review instances. + """ + + label = "Collect Review Info" + order = pyblish.api.CollectorOrder + 0.491 + families = ["review"] + hosts = ["traypublisher"] + + def process(self, instance): + asset_entity = instance.data.get("assetEntity") + if instance.data.get("frameStart") is not None or not asset_entity: + self.log.debug("Missing required data on instance") + return + + asset_data = asset_entity["data"] + # Store collected data for logging + collected_data = {} + for key in ( + "fps", + "frameStart", + "frameEnd", + "handleStart", + "handleEnd", + ): + if key in instance.data or key not in asset_data: + continue + value = asset_data[key] + collected_data[key] = value + instance.data[key] = value + self.log.debug("Collected data: {}".format(str(collected_data))) From cf7e704964d1db85476e0d2eacc7e5c53485a6ef Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Fri, 21 Apr 2023 18:43:01 +0200 Subject: [PATCH 155/187] Collect `currentFile` context data separate from workfile instance (#4883) --- .../plugins/publish/collect_current_file.py | 32 +++-------------- .../plugins/publish/collect_workfile.py | 36 +++++++++++++++++++ 2 files changed, 41 insertions(+), 27 deletions(-) create mode 100644 openpype/hosts/houdini/plugins/publish/collect_workfile.py diff --git a/openpype/hosts/houdini/plugins/publish/collect_current_file.py b/openpype/hosts/houdini/plugins/publish/collect_current_file.py index caf679f98b..7b55778803 100644 --- a/openpype/hosts/houdini/plugins/publish/collect_current_file.py +++ b/openpype/hosts/houdini/plugins/publish/collect_current_file.py @@ -4,15 +4,14 @@ import hou import pyblish.api -class CollectHoudiniCurrentFile(pyblish.api.InstancePlugin): +class CollectHoudiniCurrentFile(pyblish.api.ContextPlugin): """Inject the current working file into context""" - order = pyblish.api.CollectorOrder - 0.01 + order = pyblish.api.CollectorOrder - 0.1 label = "Houdini Current File" hosts = ["houdini"] - families = ["workfile"] - def process(self, instance): + def process(self, context): """Inject the current working file""" current_file = hou.hipFile.path() @@ -34,26 +33,5 @@ class CollectHoudiniCurrentFile(pyblish.api.InstancePlugin): "saved correctly." ) - instance.context.data["currentFile"] = current_file - - folder, file = os.path.split(current_file) - filename, ext = os.path.splitext(file) - - instance.data.update({ - "setMembers": [current_file], - "frameStart": instance.context.data['frameStart'], - "frameEnd": instance.context.data['frameEnd'], - "handleStart": instance.context.data['handleStart'], - "handleEnd": instance.context.data['handleEnd'] - }) - - instance.data['representations'] = [{ - 'name': ext.lstrip("."), - 'ext': ext.lstrip("."), - 'files': file, - "stagingDir": folder, - }] - - self.log.info('Collected instance: {}'.format(file)) - self.log.info('Scene path: {}'.format(current_file)) - self.log.info('staging Dir: {}'.format(folder)) + context.data["currentFile"] = current_file + self.log.info('Current workfile path: {}'.format(current_file)) diff --git a/openpype/hosts/houdini/plugins/publish/collect_workfile.py b/openpype/hosts/houdini/plugins/publish/collect_workfile.py new file mode 100644 index 0000000000..a6e94ec29e --- /dev/null +++ b/openpype/hosts/houdini/plugins/publish/collect_workfile.py @@ -0,0 +1,36 @@ +import os + +import pyblish.api + + +class CollectWorkfile(pyblish.api.InstancePlugin): + """Inject workfile representation into instance""" + + order = pyblish.api.CollectorOrder - 0.01 + label = "Houdini Workfile Data" + hosts = ["houdini"] + families = ["workfile"] + + def process(self, instance): + + current_file = instance.context.data["currentFile"] + folder, file = os.path.split(current_file) + filename, ext = os.path.splitext(file) + + instance.data.update({ + "setMembers": [current_file], + "frameStart": instance.context.data['frameStart'], + "frameEnd": instance.context.data['frameEnd'], + "handleStart": instance.context.data['handleStart'], + "handleEnd": instance.context.data['handleEnd'] + }) + + instance.data['representations'] = [{ + 'name': ext.lstrip("."), + 'ext': ext.lstrip("."), + 'files': file, + "stagingDir": folder, + }] + + self.log.info('Collected instance: {}'.format(file)) + self.log.info('staging Dir: {}'.format(folder)) From f4ee2a7537ad393ed6991ea835e5cdb95d77f8c6 Mon Sep 17 00:00:00 2001 From: Ynbot Date: Sat, 22 Apr 2023 03:25:51 +0000 Subject: [PATCH 156/187] [Automated] Bump version --- openpype/version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/version.py b/openpype/version.py index b9090cd8a1..b43cc436bb 100644 --- a/openpype/version.py +++ b/openpype/version.py @@ -1,3 +1,3 @@ # -*- coding: utf-8 -*- """Package declaring Pype version.""" -__version__ = "3.15.5-nightly.1" +__version__ = "3.15.5-nightly.2" From 60d386b127badba113199c94111bd76de1dee041 Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Mon, 24 Apr 2023 12:53:17 +0200 Subject: [PATCH 157/187] :bug: fix missing review flag on instance with pre-render --- openpype/hosts/nuke/plugins/publish/collect_writes.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/nuke/plugins/publish/collect_writes.py b/openpype/hosts/nuke/plugins/publish/collect_writes.py index 536a0698f3..6697a1e59a 100644 --- a/openpype/hosts/nuke/plugins/publish/collect_writes.py +++ b/openpype/hosts/nuke/plugins/publish/collect_writes.py @@ -190,7 +190,7 @@ class CollectNukeWrites(pyblish.api.InstancePlugin, # make sure rendered sequence on farm will # be used for extract review - if not instance.data["review"]: + if not instance.data.get("review"): instance.data["useSequenceForReview"] = False self.log.debug("instance.data: {}".format(pformat(instance.data))) From ed1fd82ff21877eb517c14865ce48da7149637e8 Mon Sep 17 00:00:00 2001 From: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> Date: Mon, 24 Apr 2023 13:16:38 +0200 Subject: [PATCH 158/187] Scene inventory: Model refresh fix with cherry picking (#4895) * fix bug in model refresh * fix signal callbacks * rename '_refresh_callback' to '_on_refresh_request' --- openpype/tools/sceneinventory/model.py | 169 +++++++++++++----------- openpype/tools/sceneinventory/window.py | 9 +- 2 files changed, 98 insertions(+), 80 deletions(-) diff --git a/openpype/tools/sceneinventory/model.py b/openpype/tools/sceneinventory/model.py index 63d2945145..5cc849bb9e 100644 --- a/openpype/tools/sceneinventory/model.py +++ b/openpype/tools/sceneinventory/model.py @@ -199,90 +199,103 @@ class InventoryModel(TreeModel): """Refresh the model""" host = registered_host() - if not items: # for debugging or testing, injecting items from outside + # for debugging or testing, injecting items from outside + if items is None: if isinstance(host, ILoadHost): items = host.get_containers() - else: + elif hasattr(host, "ls"): items = host.ls() + else: + items = [] self.clear() - - if self._hierarchy_view and selected: - if not hasattr(host.pipeline, "update_hierarchy"): - # If host doesn't support hierarchical containers, then - # cherry-pick only. - self.add_items((item for item in items - if item["objectName"] in selected)) - return - - # Update hierarchy info for all containers - items_by_name = {item["objectName"]: item - for item in host.pipeline.update_hierarchy(items)} - - selected_items = set() - - def walk_children(names): - """Select containers and extend to chlid containers""" - for name in [n for n in names if n not in selected_items]: - selected_items.add(name) - item = items_by_name[name] - yield item - - for child in walk_children(item["children"]): - yield child - - items = list(walk_children(selected)) # Cherry-picked and extended - - # Cut unselected upstream containers - for item in items: - if not item.get("parent") in selected_items: - # Parent not in selection, this is root item. - item["parent"] = None - - parents = [self._root_item] - - # The length of `items` array is the maximum depth that a - # hierarchy could be. - # Take this as an easiest way to prevent looping forever. - maximum_loop = len(items) - count = 0 - while items: - if count > maximum_loop: - self.log.warning("Maximum loop count reached, possible " - "missing parent node.") - break - - _parents = list() - for parent in parents: - _unparented = list() - - def _children(): - """Child item provider""" - for item in items: - if item.get("parent") == parent.get("objectName"): - # (NOTE) - # Since `self._root_node` has no "objectName" - # entry, it will be paired with root item if - # the value of key "parent" is None, or not - # having the key. - yield item - else: - # Not current parent's child, try next - _unparented.append(item) - - self.add_items(_children(), parent) - - items[:] = _unparented - - # Parents of next level - for group_node in parent.children(): - _parents += group_node.children() - - parents[:] = _parents - count += 1 - - else: + if not selected or not self._hierarchy_view: self.add_items(items) + return + + if ( + not hasattr(host, "pipeline") + or not hasattr(host.pipeline, "update_hierarchy") + ): + # If host doesn't support hierarchical containers, then + # cherry-pick only. + self.add_items(( + item + for item in items + if item["objectName"] in selected + )) + return + + # TODO find out what this part does. Function 'update_hierarchy' is + # available only in 'blender' at this moment. + + # Update hierarchy info for all containers + items_by_name = { + item["objectName"]: item + for item in host.pipeline.update_hierarchy(items) + } + + selected_items = set() + + def walk_children(names): + """Select containers and extend to chlid containers""" + for name in [n for n in names if n not in selected_items]: + selected_items.add(name) + item = items_by_name[name] + yield item + + for child in walk_children(item["children"]): + yield child + + items = list(walk_children(selected)) # Cherry-picked and extended + + # Cut unselected upstream containers + for item in items: + if not item.get("parent") in selected_items: + # Parent not in selection, this is root item. + item["parent"] = None + + parents = [self._root_item] + + # The length of `items` array is the maximum depth that a + # hierarchy could be. + # Take this as an easiest way to prevent looping forever. + maximum_loop = len(items) + count = 0 + while items: + if count > maximum_loop: + self.log.warning("Maximum loop count reached, possible " + "missing parent node.") + break + + _parents = list() + for parent in parents: + _unparented = list() + + def _children(): + """Child item provider""" + for item in items: + if item.get("parent") == parent.get("objectName"): + # (NOTE) + # Since `self._root_node` has no "objectName" + # entry, it will be paired with root item if + # the value of key "parent" is None, or not + # having the key. + yield item + else: + # Not current parent's child, try next + _unparented.append(item) + + self.add_items(_children(), parent) + + items[:] = _unparented + + # Parents of next level + for group_node in parent.children(): + _parents += group_node.children() + + parents[:] = _parents + count += 1 def add_items(self, items, parent=None): """Add the items to the model. diff --git a/openpype/tools/sceneinventory/window.py b/openpype/tools/sceneinventory/window.py index 89424fd746..6ee1c0d38e 100644 --- a/openpype/tools/sceneinventory/window.py +++ b/openpype/tools/sceneinventory/window.py @@ -107,8 +107,8 @@ class SceneInventoryWindow(QtWidgets.QDialog): view.hierarchy_view_changed.connect( self._on_hierarchy_view_change ) - view.data_changed.connect(self.refresh) - refresh_button.clicked.connect(self.refresh) + view.data_changed.connect(self._on_refresh_request) + refresh_button.clicked.connect(self._on_refresh_request) update_all_button.clicked.connect(self._on_update_all) self._update_all_button = update_all_button @@ -139,6 +139,11 @@ class SceneInventoryWindow(QtWidgets.QDialog): """ + def _on_refresh_request(self): + """Signal callback to trigger 'refresh' without any arguments.""" + + self.refresh() + def refresh(self, items=None): with preserve_expanded_rows( tree_view=self._view, From ea83a40f8b5e25e0528775f2a5c6689391ac278e Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Mon, 24 Apr 2023 15:24:04 +0200 Subject: [PATCH 159/187] Attribute is already set in `parameters` above --- .../substancepainter/plugins/publish/validate_ouput_maps.py | 1 - 1 file changed, 1 deletion(-) diff --git a/openpype/hosts/substancepainter/plugins/publish/validate_ouput_maps.py b/openpype/hosts/substancepainter/plugins/publish/validate_ouput_maps.py index e3d4c733e1..1f4dbaba13 100644 --- a/openpype/hosts/substancepainter/plugins/publish/validate_ouput_maps.py +++ b/openpype/hosts/substancepainter/plugins/publish/validate_ouput_maps.py @@ -34,7 +34,6 @@ class ValidateOutputMaps(pyblish.api.InstancePlugin): parameters["sizeLog2"] = [1, 1] # output 2x2 images (smallest) parameters["paddingAlgorithm"] = "passthrough" # no dilation (faster) parameters["dithering"] = False # no dithering (faster) - config["exportParameters"][0]["parameters"]["sizeLog2"] = [1, 1] result = substance_painter.export.export_project_textures(config) if result.status != substance_painter.export.ExportStatus.Success: From 2ff7d7ee1d8e24412bb50be1c2da12886d104e0a Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Mon, 24 Apr 2023 15:24:30 +0200 Subject: [PATCH 160/187] Cosmetics --- .../substancepainter/plugins/publish/validate_ouput_maps.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/substancepainter/plugins/publish/validate_ouput_maps.py b/openpype/hosts/substancepainter/plugins/publish/validate_ouput_maps.py index 1f4dbaba13..b57cf4c5a2 100644 --- a/openpype/hosts/substancepainter/plugins/publish/validate_ouput_maps.py +++ b/openpype/hosts/substancepainter/plugins/publish/validate_ouput_maps.py @@ -31,7 +31,7 @@ class ValidateOutputMaps(pyblish.api.InstancePlugin): # generate the smallest size / fastest export as possible config = copy.deepcopy(config) parameters = config["exportParameters"][0]["parameters"] - parameters["sizeLog2"] = [1, 1] # output 2x2 images (smallest) + parameters["sizeLog2"] = [1, 1] # output 2x2 images (smallest) parameters["paddingAlgorithm"] = "passthrough" # no dilation (faster) parameters["dithering"] = False # no dithering (faster) From 042efaae33c495999ad5b0fdfedbff0feab77af3 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Mon, 24 Apr 2023 15:34:15 +0200 Subject: [PATCH 161/187] Implement output template extensions override --- .../plugins/create/create_textures.py | 29 ++++++++++++++++++- 1 file changed, 28 insertions(+), 1 deletion(-) diff --git a/openpype/hosts/substancepainter/plugins/create/create_textures.py b/openpype/hosts/substancepainter/plugins/create/create_textures.py index 6070a06367..dece4b2cc1 100644 --- a/openpype/hosts/substancepainter/plugins/create/create_textures.py +++ b/openpype/hosts/substancepainter/plugins/create/create_textures.py @@ -91,7 +91,34 @@ class CreateTextures(Creator): EnumDef("exportFileFormat", items={ None: "Based on output template", - # TODO: implement extensions + # TODO: Get available extensions from substance API + "bmp": "bmp", + "ico": "ico", + "jpeg": "jpeg", + "jng": "jng", + "pbm": "pbm", + "pgm": "pgm", + "png": "png", + "ppm": "ppm", + "tga": "targa", + "tif": "tiff", + "wap": "wap", + "wbmp": "wbmp", + "xpm": "xpm", + "gif": "gif", + "hdr": "hdr", + "exr": "exr", + "j2k": "j2k", + "jp2": "jp2", + "pfm": "pfm", + "webp": "webp", + # TODO: Unsure why jxr format fails to export + # "jxr": "jpeg-xr", + # TODO: File formats that combine the exported textures + # like psd are not correctly supported due to + # publishing only a single file + # "psd": "psd", + # "sbsar": "sbsar", }, default=None, label="File type"), From a1b264de9b2b910f1c7a5b7aadd0b931103fcb5d Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Mon, 24 Apr 2023 16:19:44 +0200 Subject: [PATCH 162/187] Fix houdini workfile icon --- openpype/hosts/houdini/plugins/create/create_workfile.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/hosts/houdini/plugins/create/create_workfile.py b/openpype/hosts/houdini/plugins/create/create_workfile.py index 0c6d840810..5f5aa306ee 100644 --- a/openpype/hosts/houdini/plugins/create/create_workfile.py +++ b/openpype/hosts/houdini/plugins/create/create_workfile.py @@ -14,7 +14,7 @@ class CreateWorkfile(plugin.HoudiniCreatorBase, AutoCreator): identifier = "io.openpype.creators.houdini.workfile" label = "Workfile" family = "workfile" - icon = "document" + icon = "file-o" default_variant = "Main" @@ -90,4 +90,4 @@ class CreateWorkfile(plugin.HoudiniCreatorBase, AutoCreator): for created_inst, _changes in update_list: if created_inst["creator_identifier"] == self.identifier: workfile_data = {"workfile": created_inst.data_to_store()} - imprint(op_ctx, workfile_data, update=True) + imprint(op_ctx, workfile_data, update=True) \ No newline at end of file From e2fc8564e6e2fe64b47d3d8561f0f288dec35b98 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Mon, 24 Apr 2023 16:23:41 +0200 Subject: [PATCH 163/187] Fix accidental newline at end of file removal --- openpype/hosts/houdini/plugins/create/create_workfile.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/houdini/plugins/create/create_workfile.py b/openpype/hosts/houdini/plugins/create/create_workfile.py index 5f5aa306ee..9884fca325 100644 --- a/openpype/hosts/houdini/plugins/create/create_workfile.py +++ b/openpype/hosts/houdini/plugins/create/create_workfile.py @@ -90,4 +90,4 @@ class CreateWorkfile(plugin.HoudiniCreatorBase, AutoCreator): for created_inst, _changes in update_list: if created_inst["creator_identifier"] == self.identifier: workfile_data = {"workfile": created_inst.data_to_store()} - imprint(op_ctx, workfile_data, update=True) \ No newline at end of file + imprint(op_ctx, workfile_data, update=True) From ebcd48d13875f472a4c5d1eddc9e4a834b37133d Mon Sep 17 00:00:00 2001 From: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> Date: Mon, 24 Apr 2023 17:36:26 +0200 Subject: [PATCH 164/187] Publisher: Keep track about current context and fix context selection widget (#4892) * keep track about last context so it can be updated on context change * don't use '_asset_name' attribute for validation of selected asset * use current context after publisher window close --- .../tools/publisher/widgets/create_widget.py | 39 ++++++++++++++++++- openpype/tools/publisher/window.py | 3 ++ 2 files changed, 41 insertions(+), 1 deletion(-) diff --git a/openpype/tools/publisher/widgets/create_widget.py b/openpype/tools/publisher/widgets/create_widget.py index ef9c5b98fe..db20b21ed7 100644 --- a/openpype/tools/publisher/widgets/create_widget.py +++ b/openpype/tools/publisher/widgets/create_widget.py @@ -282,6 +282,9 @@ class CreateWidget(QtWidgets.QWidget): thumbnail_widget.thumbnail_created.connect(self._on_thumbnail_create) thumbnail_widget.thumbnail_cleared.connect(self._on_thumbnail_clear) + controller.event_system.add_callback( + "main.window.closed", self._on_main_window_close + ) controller.event_system.add_callback( "plugins.refresh.finished", self._on_plugins_refresh ) @@ -316,6 +319,10 @@ class CreateWidget(QtWidgets.QWidget): self._first_show = True self._last_thumbnail_path = None + self._last_current_context_asset = None + self._last_current_context_task = None + self._use_current_context = True + @property def current_asset_name(self): return self._controller.current_asset_name @@ -356,12 +363,39 @@ class CreateWidget(QtWidgets.QWidget): if check_prereq: self._invalidate_prereq() + def _on_main_window_close(self): + """Publisher window was closed.""" + + # Use current context on next refresh + self._use_current_context = True + def refresh(self): + current_asset_name = self._controller.current_asset_name + current_task_name = self._controller.current_task_name + # Get context before refresh to keep selection of asset and # task widgets asset_name = self._get_asset_name() task_name = self._get_task_name() + # Replace by current context if last loaded context was + # 'current context' before reset + if ( + self._use_current_context + or ( + self._last_current_context_asset + and asset_name == self._last_current_context_asset + and task_name == self._last_current_context_task + ) + ): + asset_name = current_asset_name + task_name = current_task_name + + # Store values for future refresh + self._last_current_context_asset = current_asset_name + self._last_current_context_task = current_task_name + self._use_current_context = False + self._prereq_available = False # Disable context widget so refresh of asset will use context asset @@ -398,7 +432,10 @@ class CreateWidget(QtWidgets.QWidget): prereq_available = False creator_btn_tooltips.append("Creator is not selected") - if self._context_change_is_enabled() and self._asset_name is None: + if ( + self._context_change_is_enabled() + and self._get_asset_name() is None + ): # QUESTION how to handle invalid asset? prereq_available = False creator_btn_tooltips.append("Context is not selected") diff --git a/openpype/tools/publisher/window.py b/openpype/tools/publisher/window.py index 0615157e1b..e94979142a 100644 --- a/openpype/tools/publisher/window.py +++ b/openpype/tools/publisher/window.py @@ -406,6 +406,9 @@ class PublisherWindow(QtWidgets.QDialog): self._comment_input.setText("") # clear comment self._reset_on_show = True self._controller.clear_thumbnail_temp_dir_path() + # Trigger custom event that should be captured only in UI + # - backend (controller) must not be dependent on this event topic!!! + self._controller.event_system.emit("main.window.closed", {}, "window") super(PublisherWindow, self).closeEvent(event) def leaveEvent(self, event): From afa3f563e43be117af30bd2896983b7bd7027d9f Mon Sep 17 00:00:00 2001 From: Ynbot Date: Mon, 24 Apr 2023 15:41:57 +0000 Subject: [PATCH 165/187] [Automated] Release --- CHANGELOG.md | 303 ++++++++++++++++++++++++++++++++++++++++++++ openpype/version.py | 2 +- pyproject.toml | 2 +- 3 files changed, 305 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 5aeb546c14..16deaaa4fd 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,6 +1,309 @@ # Changelog +## [3.15.5](https://github.com/ynput/OpenPype/tree/3.15.5) + + +[Full Changelog](https://github.com/ynput/OpenPype/compare/3.15.4...3.15.5) + +### **🚀 Enhancements** + + +
+Maya: Playblast profiles #4777 + +Support playblast profiles.This enables studios to customize what playblast settings should be on a per task and/or subset basis. For example `modeling` should have `Wireframe On Shaded` enabled, while all other tasks should have it disabled. + + +___ + +
+ + +
+Maya: Support .abc files directly for Arnold standin look assignment #4856 + +If `.abc` file is loaded into arnold standin support look assignment through the `cbId` attributes in the alembic file. + + +___ + +
+ + +
+Maya: Hide animation instance in creator #4872 + +- Hide animation instance in creator +- Add inventory action to recreate animation publish instance for loaded rigs + + +___ + +
+ + +
+Unreal: Render Creator enhancements #4477 + +Improvements to the creator for render family + +This PR introduces some enhancements to the creator for the render family in Unreal Engine: +- Added the option to create a new, empty sequence for the render. +- Added the option to not include the whole hierarchy for the selected sequence. +- Improvements of the error messages. + + +___ + +
+ + +
+Unreal: Added settings for rendering #4575 + +Added settings for rendering in Unreal Engine. + +Two settings has been added: +- Pre roll frames, to set how many frames are used to load the scene before starting the actual rendering. +- Configuration path, to allow to save a preset of settings from Unreal, and use it for rendering. + + +___ + +
+ + +
+Global: Optimize anatomy formatting by only formatting used templates instead #4784 + +Optimization to not format full anatomy when only a single template is used. Instead format only the single template instead. + + +___ + +
+ + +
+Patchelf version locked #4853 + +For Centos dockerfile it is necessary to lock the patchelf version to the older, otherwise the build process fails. + +___ + +
+ + +
+Houdini: Implement `switch` method on loaders #4866 + +Implement `switch` method on loaders + + +___ + +
+ + +
+Code: Tweak docstrings and return type hints #4875 + +Tweak docstrings and return type hints for functions in `openpype.client.entities`. + + +___ + +
+ + +
+Publisher: Clear comment on successful publish and on window close #4885 + +Clear comment text field on successful publish and on window close. + + +___ + +
+ + +
+Publisher: Make sure to reset asset widget when hidden and reshown #4886 + +Make sure to reset asset widget when hidden and reshown. Without this the asset list would never refresh in the set asset widget when changing context on an existing instance and thus would not show new assets from after the first time launching that widget. + + +___ + +
+ +### **🐛 Bug fixes** + + +
+Maya: Fix nested model instances. #4852 + +Fix nested model instance under review instance, where data collection was not including "Display Lights" and "Focal Length". + + +___ + +
+ + +
+Maya: Make default namespace naming backwards compatible #4873 + +Namespaces of loaded references are now _by default_ back to what they were before #4511 + + +___ + +
+ + +
+Nuke: Legacy convertor skips deprecation warnings #4846 + +Nuke legacy convertor was triggering deprecated function which is causing a lot of logs which slows down whole process. Changed the convertor to skip all nodes without `AVALON_TAB` to avoid the warnings. + + +___ + +
+ + +
+3dsmax: move startup script logic to hook #4849 + +Startup script for OpenPype was interfering with Open Last Workfile feature. Moving this loggic from simple command line argument in the Settings to pre-launch hook is solving the order of command line arguments and making both features work. + + +___ + +
+ + +
+Maya: Don't change time slider ranges in `get_frame_range` #4858 + +Don't change time slider ranges in `get_frame_range` + + +___ + +
+ + +
+Maya: Looks - calculate hash for tx texture #4878 + +Texture hash is calculated for textures used in published look and it is used as key in dictionary. In recent changes, this hash is not calculated for TX files, resulting in `None` value as key in dictionary, crashing publishing. This PR is adding texture hash for TX files to solve that issue. + + +___ + +
+ + +
+Houdini: Collect `currentFile` context data separate from workfile instance #4883 + +Fix publishing without an active workfile instance due to missing `currentFile` data.Now collect `currentFile` into context in houdini through context plugin no matter the active instances. + + +___ + +
+ + +
+Nuke: fixed broken slate workflow once published on deadline #4887 + +Slate workflow is now working as expected and Validate Sequence Frames is not raising the once slate frame is included. + + +___ + +
+ + +
+Add fps as instance.data in collect review in Houdini. #4888 + +fix the bug of failing to publish extract review in HoudiniOriginal error: +```python + File "OpenPype\build\exe.win-amd64-3.9\openpype\plugins\publish\extract_review.py", line 516, in prepare_temp_data + "fps": float(instance.data["fps"]), +KeyError: 'fps' +``` + + +___ + +
+ + +
+TrayPublisher: Fill missing data for instances with review #4891 + +Fill required data to instance in traypublisher if instance has review family. The data are required by ExtractReview and it would be complicated to do proper fix at this moment! The collector does for review instances what did https://github.com/ynput/OpenPype/pull/4383 + + +___ + +
+ + +
+Publisher: Keep track about current context and fix context selection widget #4892 + +Change selected context to current context on reset. Fix bug when context widget is re-enabled. + + +___ + +
+ + +
+Scene inventory: Model refresh fix with cherry picking #4895 + +Fix cherry pick issue in scene inventory. + + +___ + +
+ + +
+Nuke: Pre-render and missing review flag on instance causing crash #4897 + +If instance created in nuke was missing `review` flag, collector crashed. + + +___ + +
+ +### **Merged pull requests** + + +
+After Effects: fix handles KeyError #4727 + +Sometimes when publishing with AE (we only saw this error on AE 2023), we got a KeyError for the handles in the "Collect Workfile" step. So I did get the handles from the context if ther's no handles in the asset entity. + + +___ + +
+ + + + ## [3.15.4](https://github.com/ynput/OpenPype/tree/3.15.4) diff --git a/openpype/version.py b/openpype/version.py index b43cc436bb..02537af762 100644 --- a/openpype/version.py +++ b/openpype/version.py @@ -1,3 +1,3 @@ # -*- coding: utf-8 -*- """Package declaring Pype version.""" -__version__ = "3.15.5-nightly.2" +__version__ = "3.15.5" diff --git a/pyproject.toml b/pyproject.toml index b97ad8923c..2f40d58f56 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "OpenPype" -version = "3.15.4" # OpenPype +version = "3.15.5" # OpenPype description = "Open VFX and Animation pipeline with support." authors = ["OpenPype Team "] license = "MIT License" From 55957621645e3ddb6e313916509cbcad275a76e8 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Mon, 24 Apr 2023 17:47:33 +0200 Subject: [PATCH 166/187] Fusion: Simplify creator icons code (#4899) * Simplify setting creator icons * Use font-awesome 5 explicitly --- openpype/hosts/fusion/plugins/create/create_saver.py | 6 +----- openpype/hosts/fusion/plugins/create/create_workfile.py | 6 +----- 2 files changed, 2 insertions(+), 10 deletions(-) diff --git a/openpype/hosts/fusion/plugins/create/create_saver.py b/openpype/hosts/fusion/plugins/create/create_saver.py index 56085b0a06..cedc4029fa 100644 --- a/openpype/hosts/fusion/plugins/create/create_saver.py +++ b/openpype/hosts/fusion/plugins/create/create_saver.py @@ -1,7 +1,5 @@ import os -import qtawesome - from openpype.hosts.fusion.api import ( get_current_comp, comp_lock_and_undo_chunk, @@ -28,6 +26,7 @@ class CreateSaver(Creator): family = "render" default_variants = ["Main", "Mask"] description = "Fusion Saver to generate image sequence" + icon = "fa5.eye" instance_attributes = ["reviewable"] @@ -89,9 +88,6 @@ class CreateSaver(Creator): self._add_instance_to_context(created_instance) - def get_icon(self): - return qtawesome.icon("fa.eye", color="white") - def update_instances(self, update_list): for created_inst, _changes in update_list: new_data = created_inst.data_to_store() diff --git a/openpype/hosts/fusion/plugins/create/create_workfile.py b/openpype/hosts/fusion/plugins/create/create_workfile.py index 0bb3a0d3d4..40721ea88a 100644 --- a/openpype/hosts/fusion/plugins/create/create_workfile.py +++ b/openpype/hosts/fusion/plugins/create/create_workfile.py @@ -1,5 +1,3 @@ -import qtawesome - from openpype.hosts.fusion.api import ( get_current_comp ) @@ -15,6 +13,7 @@ class FusionWorkfileCreator(AutoCreator): identifier = "workfile" family = "workfile" label = "Workfile" + icon = "fa5.file" default_variant = "Main" @@ -104,6 +103,3 @@ class FusionWorkfileCreator(AutoCreator): existing_instance["asset"] = asset_name existing_instance["task"] = task_name existing_instance["subset"] = subset_name - - def get_icon(self): - return qtawesome.icon("fa.file-o", color="white") From 3a096bcf8bf4ff60ead25495a63ec2bcf6054d18 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Mon, 24 Apr 2023 17:51:40 +0200 Subject: [PATCH 167/187] Use explicit font awesome 5 name --- openpype/hosts/houdini/plugins/create/create_workfile.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/houdini/plugins/create/create_workfile.py b/openpype/hosts/houdini/plugins/create/create_workfile.py index 9884fca325..1a8537adcd 100644 --- a/openpype/hosts/houdini/plugins/create/create_workfile.py +++ b/openpype/hosts/houdini/plugins/create/create_workfile.py @@ -14,7 +14,7 @@ class CreateWorkfile(plugin.HoudiniCreatorBase, AutoCreator): identifier = "io.openpype.creators.houdini.workfile" label = "Workfile" family = "workfile" - icon = "file-o" + icon = "fa5.file" default_variant = "Main" From 0ef59fcb39a033a11b94e0d3884b1b48029e75eb Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Tue, 25 Apr 2023 08:18:46 +0200 Subject: [PATCH 168/187] adding ci user and email --- .github/workflows/update_bug_report.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.github/workflows/update_bug_report.yml b/.github/workflows/update_bug_report.yml index 9f44d7c7a6..7a1bfb7bfd 100644 --- a/.github/workflows/update_bug_report.yml +++ b/.github/workflows/update_bug_report.yml @@ -18,6 +18,8 @@ jobs: uses: ynput/gha-populate-form-version@main with: github_token: ${{ secrets.YNPUT_BOT_TOKEN }} + github_user: ${{ secrets.CI_USER }} + github_email: ${{ secrets.CI_EMAIL }} registry: github dropdown: _version limit_to: 100 From 0567701ddb827f9644e9f9631f56d4b3c73d01c5 Mon Sep 17 00:00:00 2001 From: Ynbot Date: Tue, 25 Apr 2023 06:32:26 +0000 Subject: [PATCH 169/187] chore(): update bug report / version --- .github/ISSUE_TEMPLATE/bug_report.yml | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/.github/ISSUE_TEMPLATE/bug_report.yml b/.github/ISSUE_TEMPLATE/bug_report.yml index c4073ed1af..fe86a8400b 100644 --- a/.github/ISSUE_TEMPLATE/bug_report.yml +++ b/.github/ISSUE_TEMPLATE/bug_report.yml @@ -35,6 +35,10 @@ body: label: Version description: What version are you running? Look to OpenPype Tray options: + - 3.15.5 + - 3.15.5-nightly.2 + - 3.15.5-nightly.1 + - 3.15.4 - 3.15.4-nightly.3 - 3.15.4-nightly.2 - 3.15.4-nightly.1 @@ -131,10 +135,6 @@ body: - 3.13.1-nightly.2 - 3.13.1-nightly.1 - 3.13.0 - - 3.13.0-nightly.1 - - 3.12.3-nightly.3 - - 3.12.3-nightly.2 - - 3.12.3-nightly.1 validations: required: true - type: dropdown @@ -166,8 +166,8 @@ body: label: Are there any labels you wish to add? description: Please search labels and identify those related to your bug. options: - - label: I have added the relevant labels to the bug report. - required: true + - label: I have added the relevant labels to the bug report. + required: true - type: textarea id: logs attributes: From 4ed1c1f65d6f99ece0f35c404e6ca40c3ee2c5fd Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Tue, 25 Apr 2023 10:29:12 +0200 Subject: [PATCH 170/187] Enhancement: Fix PySide 6.5 support for loader (#4900) * Reverse inheritance order to avoid PySide6.5 bug `PYSIDE-2294` & `PYSIDE-2304` * Fix PySide6 support --- openpype/tools/loader/model.py | 2 +- openpype/tools/publisher/widgets/list_view_widgets.py | 3 ++- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/openpype/tools/loader/model.py b/openpype/tools/loader/model.py index 14671e341f..e5d8400031 100644 --- a/openpype/tools/loader/model.py +++ b/openpype/tools/loader/model.py @@ -123,7 +123,7 @@ class BaseRepresentationModel(object): self.remote_provider = remote_provider -class SubsetsModel(TreeModel, BaseRepresentationModel): +class SubsetsModel(BaseRepresentationModel, TreeModel): doc_fetched = QtCore.Signal() refreshed = QtCore.Signal(bool) diff --git a/openpype/tools/publisher/widgets/list_view_widgets.py b/openpype/tools/publisher/widgets/list_view_widgets.py index 227ae7bda9..cb5a203130 100644 --- a/openpype/tools/publisher/widgets/list_view_widgets.py +++ b/openpype/tools/publisher/widgets/list_view_widgets.py @@ -1039,7 +1039,8 @@ class InstanceListView(AbstractInstanceView): proxy_index = proxy_model.mapFromSource(select_indexes[0]) selection_model.setCurrentIndex( proxy_index, - selection_model.ClearAndSelect | selection_model.Rows + QtCore.QItemSelectionModel.ClearAndSelect + | QtCore.QItemSelectionModel.Rows ) return From 38347ece5a7e60f23d643568e1268e3900f8fa21 Mon Sep 17 00:00:00 2001 From: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> Date: Tue, 25 Apr 2023 10:37:49 +0200 Subject: [PATCH 171/187] Publisher: Small style changes (#4894) * border hover has color without alpha * changed border radius to 0.2em * removed border from scroll area * variant hint button has 0.5em width * inputs in attribute definitions have smaller padding * label is shown only to value inputs and added tooltips * change spacing for attribute befinitions * align labels to right * implemented 'ComboBox' which ignores wheel events and has styled delegate * PixmalLabel has minimum sizeHint * cards are smaller * renamed 'Options' to 'Context' * implemented active state changes in card view * set object name of main window to "PublishWindow" * plugin don't have to pass 'title' to an error * fix PySide6 support for custom keysequences * check for exact match for all bindings * added validation of exact match for save shortcut --- openpype/pipeline/publish/publish_plugins.py | 2 +- openpype/style/data.json | 2 +- openpype/style/style.css | 29 ++++++-- openpype/tools/attribute_defs/widgets.py | 10 ++- openpype/tools/publisher/constants.py | 5 +- openpype/tools/publisher/control.py | 11 ++- .../publisher/widgets/card_view_widgets.py | 72 ++++++++++++++++--- .../tools/publisher/widgets/create_widget.py | 4 ++ .../publisher/widgets/list_view_widgets.py | 5 +- .../publisher/widgets/precreate_widget.py | 14 +++- openpype/tools/publisher/widgets/widgets.py | 33 +++++++-- openpype/tools/publisher/window.py | 19 +++-- openpype/tools/utils/__init__.py | 2 + openpype/tools/utils/widgets.py | 34 ++++++++- 14 files changed, 208 insertions(+), 34 deletions(-) diff --git a/openpype/pipeline/publish/publish_plugins.py b/openpype/pipeline/publish/publish_plugins.py index 331235fadc..a38896ec8e 100644 --- a/openpype/pipeline/publish/publish_plugins.py +++ b/openpype/pipeline/publish/publish_plugins.py @@ -45,7 +45,7 @@ class PublishValidationError(Exception): def __init__(self, message, title=None, description=None, detail=None): self.message = message - self.title = title or "< Missing title >" + self.title = title self.description = description or message self.detail = detail super(PublishValidationError, self).__init__(message) diff --git a/openpype/style/data.json b/openpype/style/data.json index 404ca6944c..bea2a3d407 100644 --- a/openpype/style/data.json +++ b/openpype/style/data.json @@ -48,7 +48,7 @@ "bg-view-selection-hover": "rgba(92, 173, 214, .8)", "border": "#373D48", - "border-hover": "rgba(168, 175, 189, .3)", + "border-hover": "rgb(92, 99, 111)", "border-focus": "rgb(92, 173, 214)", "restart-btn-bg": "#458056", diff --git a/openpype/style/style.css b/openpype/style/style.css index da477eeefa..29abb1d351 100644 --- a/openpype/style/style.css +++ b/openpype/style/style.css @@ -35,6 +35,11 @@ QWidget:disabled { color: {color:font-disabled}; } +/* Some DCCs have set borders to solid color */ +QScrollArea { + border: none; +} + QLabel { background: transparent; } @@ -42,7 +47,7 @@ QLabel { /* Inputs */ QAbstractSpinBox, QLineEdit, QPlainTextEdit, QTextEdit { border: 1px solid {color:border}; - border-radius: 0.3em; + border-radius: 0.2em; background: {color:bg-inputs}; padding: 0.1em; } @@ -226,7 +231,7 @@ QMenu::separator { /* Combobox */ QComboBox { border: 1px solid {color:border}; - border-radius: 3px; + border-radius: 0.2em; padding: 1px 3px 1px 3px; background: {color:bg-inputs}; } @@ -474,7 +479,6 @@ QAbstractItemView:disabled{ } QAbstractItemView::item:hover { - /* color: {color:bg-view-hover}; */ background: {color:bg-view-hover}; } @@ -743,7 +747,7 @@ OverlayMessageWidget QWidget { #TypeEditor, #ToolEditor, #NameEditor, #NumberEditor { background: transparent; - border-radius: 0.3em; + border-radius: 0.2em; } #TypeEditor:focus, #ToolEditor:focus, #NameEditor:focus, #NumberEditor:focus { @@ -860,7 +864,13 @@ OverlayMessageWidget QWidget { background: {color:bg-view-hover}; } -/* New Create/Publish UI */ +/* Publisher UI (Create/Publish) */ +#PublishWindow QAbstractSpinBox, QLineEdit, QPlainTextEdit, QTextEdit { + padding: 1px; +} +#PublishWindow QComboBox { + padding: 1px 1px 1px 0.2em; +} PublisherTabsWidget { background: {color:publisher:tab-bg}; } @@ -944,6 +954,7 @@ PixmapButton:disabled { border-top-left-radius: 0px; padding-top: 0.5em; padding-bottom: 0.5em; + width: 0.5em; } #VariantInput[state="new"], #VariantInput[state="new"]:focus, #VariantInput[state="new"]:hover { border-color: {color:publisher:success}; @@ -1072,7 +1083,7 @@ ValidationArtistMessage QLabel { #AssetNameInputWidget { background: {color:bg-inputs}; border: 1px solid {color:border}; - border-radius: 0.3em; + border-radius: 0.2em; } #AssetNameInputWidget QWidget { @@ -1465,6 +1476,12 @@ CreateNextPageOverlay { } /* Attribute Definition widgets */ +AttributeDefinitionsWidget QAbstractSpinBox, QLineEdit, QPlainTextEdit, QTextEdit { + padding: 1px; +} +AttributeDefinitionsWidget QComboBox { + padding: 1px 1px 1px 0.2em; +} InViewButton, InViewButton:disabled { background: transparent; } diff --git a/openpype/tools/attribute_defs/widgets.py b/openpype/tools/attribute_defs/widgets.py index 0d4e1e88a9..d46c238da1 100644 --- a/openpype/tools/attribute_defs/widgets.py +++ b/openpype/tools/attribute_defs/widgets.py @@ -1,4 +1,3 @@ -import uuid import copy from qtpy import QtWidgets, QtCore @@ -126,7 +125,7 @@ class AttributeDefinitionsWidget(QtWidgets.QWidget): row = 0 for attr_def in attr_defs: - if not isinstance(attr_def, UIDef): + if attr_def.is_value_def: if attr_def.key in self._current_keys: raise KeyError( "Duplicated key \"{}\"".format(attr_def.key)) @@ -144,11 +143,16 @@ class AttributeDefinitionsWidget(QtWidgets.QWidget): col_num = 2 - expand_cols - if attr_def.label: + if attr_def.is_value_def and attr_def.label: label_widget = QtWidgets.QLabel(attr_def.label, self) tooltip = attr_def.tooltip if tooltip: label_widget.setToolTip(tooltip) + if attr_def.is_label_horizontal: + label_widget.setAlignment( + QtCore.Qt.AlignRight + | QtCore.Qt.AlignVCenter + ) layout.addWidget( label_widget, row, 0, 1, expand_cols ) diff --git a/openpype/tools/publisher/constants.py b/openpype/tools/publisher/constants.py index 5d23886aa8..660fccecf1 100644 --- a/openpype/tools/publisher/constants.py +++ b/openpype/tools/publisher/constants.py @@ -2,7 +2,7 @@ from qtpy import QtCore, QtGui # ID of context item in instance view CONTEXT_ID = "context" -CONTEXT_LABEL = "Options" +CONTEXT_LABEL = "Context" # Not showed anywhere - used as identifier CONTEXT_GROUP = "__ContextGroup__" @@ -15,6 +15,9 @@ VARIANT_TOOLTIP = ( "\nnumerical characters (0-9) dot (\".\") or underscore (\"_\")." ) +INPUTS_LAYOUT_HSPACING = 4 +INPUTS_LAYOUT_VSPACING = 2 + # Roles for instance views INSTANCE_ID_ROLE = QtCore.Qt.UserRole + 1 SORT_VALUE_ROLE = QtCore.Qt.UserRole + 2 diff --git a/openpype/tools/publisher/control.py b/openpype/tools/publisher/control.py index 7754e4aa02..4b083d4bc8 100644 --- a/openpype/tools/publisher/control.py +++ b/openpype/tools/publisher/control.py @@ -163,7 +163,7 @@ class AssetDocsCache: return copy.deepcopy(self._full_asset_docs_by_name[asset_name]) -class PublishReport: +class PublishReportMaker: """Report for single publishing process. Report keeps current state of publishing and currently processed plugin. @@ -784,6 +784,13 @@ class PublishValidationErrors: # Make sure the cached report is cleared plugin_id = self._plugins_proxy.get_plugin_id(plugin) + if not error.title: + if hasattr(plugin, "label") and plugin.label: + plugin_label = plugin.label + else: + plugin_label = plugin.__name__ + error.title = plugin_label + self._error_items.append( ValidationErrorItem.from_result(plugin_id, error, instance) ) @@ -1674,7 +1681,7 @@ class PublisherController(BasePublisherController): # pyblish.api.Context self._publish_context = None # Pyblish report - self._publish_report = PublishReport(self) + self._publish_report = PublishReportMaker(self) # Store exceptions of validation error self._publish_validation_errors = PublishValidationErrors() diff --git a/openpype/tools/publisher/widgets/card_view_widgets.py b/openpype/tools/publisher/widgets/card_view_widgets.py index 0734e1bc27..13715bc73c 100644 --- a/openpype/tools/publisher/widgets/card_view_widgets.py +++ b/openpype/tools/publisher/widgets/card_view_widgets.py @@ -9,7 +9,7 @@ Only one item can be selected at a time. ``` : Icon. Can have Warning icon when context is not right ┌──────────────────────┐ -│ Options │ +│ Context │ │ ────────── │ │ [x]│ │ [x]│ @@ -202,7 +202,7 @@ class ConvertorItemsGroupWidget(BaseGroupWidget): class InstanceGroupWidget(BaseGroupWidget): """Widget wrapping instances under group.""" - active_changed = QtCore.Signal() + active_changed = QtCore.Signal(str, str, bool) def __init__(self, group_icons, *args, **kwargs): super(InstanceGroupWidget, self).__init__(*args, **kwargs) @@ -253,13 +253,16 @@ class InstanceGroupWidget(BaseGroupWidget): instance, group_icon, self ) widget.selected.connect(self._on_widget_selection) - widget.active_changed.connect(self.active_changed) + widget.active_changed.connect(self._on_active_changed) self._widgets_by_id[instance.id] = widget self._content_layout.insertWidget(widget_idx, widget) widget_idx += 1 self._update_ordered_item_ids() + def _on_active_changed(self, instance_id, value): + self.active_changed.emit(self.group_name, instance_id, value) + class CardWidget(BaseClickableFrame): """Clickable card used as bigger button.""" @@ -332,7 +335,7 @@ class ContextCardWidget(CardWidget): icon_layout.addWidget(icon_widget) layout = QtWidgets.QHBoxLayout(self) - layout.setContentsMargins(0, 5, 10, 5) + layout.setContentsMargins(0, 2, 10, 2) layout.addLayout(icon_layout, 0) layout.addWidget(label_widget, 1) @@ -363,7 +366,7 @@ class ConvertorItemCardWidget(CardWidget): icon_layout.addWidget(icon_widget) layout = QtWidgets.QHBoxLayout(self) - layout.setContentsMargins(0, 5, 10, 5) + layout.setContentsMargins(0, 2, 10, 2) layout.addLayout(icon_layout, 0) layout.addWidget(label_widget, 1) @@ -377,7 +380,7 @@ class ConvertorItemCardWidget(CardWidget): class InstanceCardWidget(CardWidget): """Card widget representing instance.""" - active_changed = QtCore.Signal() + active_changed = QtCore.Signal(str, bool) def __init__(self, instance, group_icon, parent): super(InstanceCardWidget, self).__init__(parent) @@ -424,7 +427,7 @@ class InstanceCardWidget(CardWidget): top_layout.addWidget(expand_btn, 0) layout = QtWidgets.QHBoxLayout(self) - layout.setContentsMargins(0, 5, 10, 5) + layout.setContentsMargins(0, 2, 10, 2) layout.addLayout(top_layout) layout.addWidget(detail_widget) @@ -445,6 +448,10 @@ class InstanceCardWidget(CardWidget): def set_active_toggle_enabled(self, enabled): self._active_checkbox.setEnabled(enabled) + @property + def is_active(self): + return self._active_checkbox.isChecked() + def set_active(self, new_value): """Set instance as active.""" checkbox_value = self._active_checkbox.isChecked() @@ -515,7 +522,7 @@ class InstanceCardWidget(CardWidget): return self.instance["active"] = new_value - self.active_changed.emit() + self.active_changed.emit(self._id, new_value) def _on_expend_clicked(self): self._set_expanded() @@ -584,6 +591,45 @@ class InstanceCardView(AbstractInstanceView): result.setWidth(width) return result + def _toggle_instances(self, value): + if not self._active_toggle_enabled: + return + + widgets = self._get_selected_widgets() + changed = False + for widget in widgets: + if not isinstance(widget, InstanceCardWidget): + continue + + is_active = widget.is_active + if value == -1: + widget.set_active(not is_active) + changed = True + continue + + _value = bool(value) + if is_active is not _value: + widget.set_active(_value) + changed = True + + if changed: + self.active_changed.emit() + + def keyPressEvent(self, event): + if event.key() == QtCore.Qt.Key_Space: + self._toggle_instances(-1) + return True + + elif event.key() == QtCore.Qt.Key_Backspace: + self._toggle_instances(0) + return True + + elif event.key() == QtCore.Qt.Key_Return: + self._toggle_instances(1) + return True + + return super(InstanceCardView, self).keyPressEvent(event) + def _get_selected_widgets(self): output = [] if ( @@ -742,7 +788,15 @@ class InstanceCardView(AbstractInstanceView): for widget in self._widgets_by_group.values(): widget.update_instance_values() - def _on_active_changed(self): + def _on_active_changed(self, group_name, instance_id, value): + group_widget = self._widgets_by_group[group_name] + instance_widget = group_widget.get_widget_by_item_id(instance_id) + if instance_widget.is_selected: + for widget in self._get_selected_widgets(): + if isinstance(widget, InstanceCardWidget): + widget.set_active(value) + else: + self._select_item_clear(instance_id, group_name, instance_widget) self.active_changed.emit() def _on_widget_selection(self, instance_id, group_name, selection_type): diff --git a/openpype/tools/publisher/widgets/create_widget.py b/openpype/tools/publisher/widgets/create_widget.py index db20b21ed7..30980af03d 100644 --- a/openpype/tools/publisher/widgets/create_widget.py +++ b/openpype/tools/publisher/widgets/create_widget.py @@ -22,6 +22,8 @@ from ..constants import ( CREATOR_IDENTIFIER_ROLE, CREATOR_THUMBNAIL_ENABLED_ROLE, CREATOR_SORT_ROLE, + INPUTS_LAYOUT_HSPACING, + INPUTS_LAYOUT_VSPACING, ) SEPARATORS = ("---separator---", "---") @@ -198,6 +200,8 @@ class CreateWidget(QtWidgets.QWidget): variant_subset_layout = QtWidgets.QFormLayout(variant_subset_widget) variant_subset_layout.setContentsMargins(0, 0, 0, 0) + variant_subset_layout.setHorizontalSpacing(INPUTS_LAYOUT_HSPACING) + variant_subset_layout.setVerticalSpacing(INPUTS_LAYOUT_VSPACING) variant_subset_layout.addRow("Variant", variant_widget) variant_subset_layout.addRow("Subset", subset_name_input) diff --git a/openpype/tools/publisher/widgets/list_view_widgets.py b/openpype/tools/publisher/widgets/list_view_widgets.py index cb5a203130..557e6559c8 100644 --- a/openpype/tools/publisher/widgets/list_view_widgets.py +++ b/openpype/tools/publisher/widgets/list_view_widgets.py @@ -11,7 +11,7 @@ selection can be enabled disabled using checkbox or keyboard key presses: - Backspace - disable selection ``` -|- Options +|- Context |- [x] | |- [x] | |- [x] @@ -486,6 +486,9 @@ class InstanceListView(AbstractInstanceView): group_widget.set_expanded(expanded) def _on_toggle_request(self, toggle): + if not self._active_toggle_enabled: + return + selected_instance_ids = self._instance_view.get_selected_instance_ids() if toggle == -1: active = None diff --git a/openpype/tools/publisher/widgets/precreate_widget.py b/openpype/tools/publisher/widgets/precreate_widget.py index 3037a0e12d..3bf0bc3657 100644 --- a/openpype/tools/publisher/widgets/precreate_widget.py +++ b/openpype/tools/publisher/widgets/precreate_widget.py @@ -2,6 +2,8 @@ from qtpy import QtWidgets, QtCore from openpype.tools.attribute_defs import create_widget_for_attr_def +from ..constants import INPUTS_LAYOUT_HSPACING, INPUTS_LAYOUT_VSPACING + class PreCreateWidget(QtWidgets.QWidget): def __init__(self, parent): @@ -81,6 +83,8 @@ class AttributesWidget(QtWidgets.QWidget): layout = QtWidgets.QGridLayout(self) layout.setContentsMargins(0, 0, 0, 0) + layout.setHorizontalSpacing(INPUTS_LAYOUT_HSPACING) + layout.setVerticalSpacing(INPUTS_LAYOUT_VSPACING) self._layout = layout @@ -117,8 +121,16 @@ class AttributesWidget(QtWidgets.QWidget): col_num = 2 - expand_cols - if attr_def.label: + if attr_def.is_value_def and attr_def.label: label_widget = QtWidgets.QLabel(attr_def.label, self) + tooltip = attr_def.tooltip + if tooltip: + label_widget.setToolTip(tooltip) + if attr_def.is_label_horizontal: + label_widget.setAlignment( + QtCore.Qt.AlignRight + | QtCore.Qt.AlignVCenter + ) self._layout.addWidget( label_widget, row, 0, 1, expand_cols ) diff --git a/openpype/tools/publisher/widgets/widgets.py b/openpype/tools/publisher/widgets/widgets.py index d2ce1fbcb2..cd1f1f5a96 100644 --- a/openpype/tools/publisher/widgets/widgets.py +++ b/openpype/tools/publisher/widgets/widgets.py @@ -9,7 +9,7 @@ import collections from qtpy import QtWidgets, QtCore, QtGui import qtawesome -from openpype.lib.attribute_definitions import UnknownDef, UIDef +from openpype.lib.attribute_definitions import UnknownDef from openpype.tools.attribute_defs import create_widget_for_attr_def from openpype.tools import resources from openpype.tools.flickcharm import FlickCharm @@ -36,6 +36,8 @@ from .icons import ( from ..constants import ( VARIANT_TOOLTIP, ResetKeySequence, + INPUTS_LAYOUT_HSPACING, + INPUTS_LAYOUT_VSPACING, ) @@ -1098,6 +1100,8 @@ class GlobalAttrsWidget(QtWidgets.QWidget): btns_layout.addWidget(cancel_btn) main_layout = QtWidgets.QFormLayout(self) + main_layout.setHorizontalSpacing(INPUTS_LAYOUT_HSPACING) + main_layout.setVerticalSpacing(INPUTS_LAYOUT_VSPACING) main_layout.addRow("Variant", variant_input) main_layout.addRow("Asset", asset_value_widget) main_layout.addRow("Task", task_value_widget) @@ -1346,6 +1350,8 @@ class CreatorAttrsWidget(QtWidgets.QWidget): content_layout.setColumnStretch(0, 0) content_layout.setColumnStretch(1, 1) content_layout.setAlignment(QtCore.Qt.AlignTop) + content_layout.setHorizontalSpacing(INPUTS_LAYOUT_HSPACING) + content_layout.setVerticalSpacing(INPUTS_LAYOUT_VSPACING) row = 0 for attr_def, attr_instances, values in result: @@ -1371,9 +1377,19 @@ class CreatorAttrsWidget(QtWidgets.QWidget): col_num = 2 - expand_cols - label = attr_def.label or attr_def.key + label = None + if attr_def.is_value_def: + label = attr_def.label or attr_def.key if label: label_widget = QtWidgets.QLabel(label, self) + tooltip = attr_def.tooltip + if tooltip: + label_widget.setToolTip(tooltip) + if attr_def.is_label_horizontal: + label_widget.setAlignment( + QtCore.Qt.AlignRight + | QtCore.Qt.AlignVCenter + ) content_layout.addWidget( label_widget, row, 0, 1, expand_cols ) @@ -1474,6 +1490,8 @@ class PublishPluginAttrsWidget(QtWidgets.QWidget): attr_def_layout = QtWidgets.QGridLayout(attr_def_widget) attr_def_layout.setColumnStretch(0, 0) attr_def_layout.setColumnStretch(1, 1) + attr_def_layout.setHorizontalSpacing(INPUTS_LAYOUT_HSPACING) + attr_def_layout.setVerticalSpacing(INPUTS_LAYOUT_VSPACING) content_layout = QtWidgets.QVBoxLayout(content_widget) content_layout.addWidget(attr_def_widget, 0) @@ -1501,12 +1519,19 @@ class PublishPluginAttrsWidget(QtWidgets.QWidget): expand_cols = 1 col_num = 2 - expand_cols - label = attr_def.label or attr_def.key + label = None + if attr_def.is_value_def: + label = attr_def.label or attr_def.key if label: label_widget = QtWidgets.QLabel(label, content_widget) tooltip = attr_def.tooltip if tooltip: label_widget.setToolTip(tooltip) + if attr_def.is_label_horizontal: + label_widget.setAlignment( + QtCore.Qt.AlignRight + | QtCore.Qt.AlignVCenter + ) attr_def_layout.addWidget( label_widget, row, 0, 1, expand_cols ) @@ -1517,7 +1542,7 @@ class PublishPluginAttrsWidget(QtWidgets.QWidget): ) row += 1 - if isinstance(attr_def, UIDef): + if not attr_def.is_value_def: continue widget.value_changed.connect(self._input_value_changed) diff --git a/openpype/tools/publisher/window.py b/openpype/tools/publisher/window.py index e94979142a..b3471163ae 100644 --- a/openpype/tools/publisher/window.py +++ b/openpype/tools/publisher/window.py @@ -46,6 +46,8 @@ class PublisherWindow(QtWidgets.QDialog): def __init__(self, parent=None, controller=None, reset_on_show=None): super(PublisherWindow, self).__init__(parent) + self.setObjectName("PublishWindow") + self.setWindowTitle("OpenPype publisher") icon = QtGui.QIcon(resources.get_openpype_icon_filepath()) @@ -440,15 +442,24 @@ class PublisherWindow(QtWidgets.QDialog): event.accept() return - if event.matches(QtGui.QKeySequence.Save): + save_match = event.matches(QtGui.QKeySequence.Save) + if save_match == QtGui.QKeySequence.ExactMatch: if not self._controller.publish_has_started: self._save_changes(True) event.accept() return - if ResetKeySequence.matches( - QtGui.QKeySequence(event.key() | event.modifiers()) - ): + # PySide6 Support + if hasattr(event, "keyCombination"): + reset_match_result = ResetKeySequence.matches( + QtGui.QKeySequence(event.keyCombination()) + ) + else: + reset_match_result = ResetKeySequence.matches( + QtGui.QKeySequence(event.modifiers() | event.key()) + ) + + if reset_match_result == QtGui.QKeySequence.ExactMatch: if not self.controller.publish_is_running: self.reset() event.accept() diff --git a/openpype/tools/utils/__init__.py b/openpype/tools/utils/__init__.py index 4292e2d726..4149763f80 100644 --- a/openpype/tools/utils/__init__.py +++ b/openpype/tools/utils/__init__.py @@ -1,6 +1,7 @@ from .widgets import ( FocusSpinBox, FocusDoubleSpinBox, + ComboBox, CustomTextComboBox, PlaceholderLineEdit, BaseClickableFrame, @@ -38,6 +39,7 @@ from .overlay_messages import ( __all__ = ( "FocusSpinBox", "FocusDoubleSpinBox", + "ComboBox", "CustomTextComboBox", "PlaceholderLineEdit", "BaseClickableFrame", diff --git a/openpype/tools/utils/widgets.py b/openpype/tools/utils/widgets.py index b416c56797..bae89aeb09 100644 --- a/openpype/tools/utils/widgets.py +++ b/openpype/tools/utils/widgets.py @@ -41,7 +41,28 @@ class FocusDoubleSpinBox(QtWidgets.QDoubleSpinBox): super(FocusDoubleSpinBox, self).wheelEvent(event) -class CustomTextComboBox(QtWidgets.QComboBox): +class ComboBox(QtWidgets.QComboBox): + """Base of combobox with pre-implement changes used in tools. + + Combobox is using styled delegate by default so stylesheets are propagated. + + Items are not changed on scroll until the combobox is in focus. + """ + + def __init__(self, *args, **kwargs): + super(ComboBox, self).__init__(*args, **kwargs) + delegate = QtWidgets.QStyledItemDelegate() + self.setItemDelegate(delegate) + self.setFocusPolicy(QtCore.Qt.StrongFocus) + + self._delegate = delegate + + def wheelEvent(self, event): + if self.hasFocus(): + return super(ComboBox, self).wheelEvent(event) + + +class CustomTextComboBox(ComboBox): """Combobox which can have different text showed.""" def __init__(self, *args, **kwargs): @@ -253,6 +274,9 @@ class PixmapLabel(QtWidgets.QLabel): self._empty_pixmap = QtGui.QPixmap(0, 0) self._source_pixmap = pixmap + self._last_width = 0 + self._last_height = 0 + def set_source_pixmap(self, pixmap): """Change source image.""" self._source_pixmap = pixmap @@ -263,6 +287,12 @@ class PixmapLabel(QtWidgets.QLabel): size += size % 2 return size, size + def minimumSizeHint(self): + width, height = self._get_pix_size() + if width != self._last_width or height != self._last_height: + self._set_resized_pix() + return QtCore.QSize(width, height) + def _set_resized_pix(self): if self._source_pixmap is None: self.setPixmap(self._empty_pixmap) @@ -276,6 +306,8 @@ class PixmapLabel(QtWidgets.QLabel): QtCore.Qt.SmoothTransformation ) ) + self._last_width = width + self._last_height = height def resizeEvent(self, event): self._set_resized_pix() From a724bd1c77ca9ded191967648e96c2adda8619ea Mon Sep 17 00:00:00 2001 From: Ynbot Date: Wed, 26 Apr 2023 03:25:35 +0000 Subject: [PATCH 172/187] [Automated] Bump version --- openpype/version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/version.py b/openpype/version.py index 02537af762..080fd6eece 100644 --- a/openpype/version.py +++ b/openpype/version.py @@ -1,3 +1,3 @@ # -*- coding: utf-8 -*- """Package declaring Pype version.""" -__version__ = "3.15.5" +__version__ = "3.15.6-nightly.1" From 5d14869180d0c04c744edcf5f88abcca22cbb579 Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Wed, 26 Apr 2023 18:35:14 +0800 Subject: [PATCH 173/187] validator and selected nodes use for containers --- .../hosts/max/plugins/create/create_model.py | 8 ++-- .../plugins/publish/validate_usd_plugin.py | 38 +++++++++++++++++++ 2 files changed, 43 insertions(+), 3 deletions(-) create mode 100644 openpype/hosts/max/plugins/publish/validate_usd_plugin.py diff --git a/openpype/hosts/max/plugins/create/create_model.py b/openpype/hosts/max/plugins/create/create_model.py index a78a30e0c7..e7ae3af9db 100644 --- a/openpype/hosts/max/plugins/create/create_model.py +++ b/openpype/hosts/max/plugins/create/create_model.py @@ -12,7 +12,6 @@ class CreateModel(plugin.MaxCreator): def create(self, subset_name, instance_data, pre_create_data): from pymxs import runtime as rt - sel_obj = list(rt.selection) instance = super(CreateModel, self).create( subset_name, instance_data, @@ -20,7 +19,10 @@ class CreateModel(plugin.MaxCreator): container = rt.getNodeByName(instance.data.get("instance_node")) # TODO: Disable "Add to Containers?" Panel # parent the selected cameras into the container - for obj in sel_obj: - obj.parent = container + sel_obj = None + if self.selected_nodes: + sel_obj = list(self.selected_nodes) + for obj in sel_obj: + obj.parent = container # for additional work on the node: # instance_node = rt.getNodeByName(instance.get("instance_node")) diff --git a/openpype/hosts/max/plugins/publish/validate_usd_plugin.py b/openpype/hosts/max/plugins/publish/validate_usd_plugin.py new file mode 100644 index 0000000000..8a92263884 --- /dev/null +++ b/openpype/hosts/max/plugins/publish/validate_usd_plugin.py @@ -0,0 +1,38 @@ +# -*- coding: utf-8 -*- +import pyblish.api +from openpype.pipeline import PublishValidationError +from pymxs import runtime as rt + + +class ValidateUSDPlugin(pyblish.api.InstancePlugin): + """Validates if USD plugin is installed or loaded in Max + """ + + order = pyblish.api.ValidatorOrder - 0.01 + families = ["model"] + hosts = ["max"] + label = "USD Plugin" + + def process(self, instance): + #usdimport.dli + #usdexport.dle + plugin_mgr = rt.pluginManager + plugin_count = plugin_mgr.pluginDllCount + plugin_info = self.get_plugins(plugin_mgr, + plugin_count) + usd_import = "usdimport.dli" + if usd_import not in plugin_info: + raise PublishValidationError("USD Plugin {}" + " not found".format(usd_import)) + usd_export = "usdexport.dle" + if usd_export not in plugin_info: + raise PublishValidationError("USD Plugin {}" + " not found".format(usd_export)) + + def get_plugins(self, manager, count): + plugin_info_list = list() + for p in range(1, count + 1): + plugin_info = manager.pluginDllName(p) + plugin_info_list.append(plugin_info) + + return plugin_info_list From 12c9d10ba1faebae7c71bcb1f15fdd293d946e29 Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Wed, 26 Apr 2023 18:36:27 +0800 Subject: [PATCH 174/187] hound fix --- openpype/hosts/max/plugins/publish/validate_usd_plugin.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/openpype/hosts/max/plugins/publish/validate_usd_plugin.py b/openpype/hosts/max/plugins/publish/validate_usd_plugin.py index 8a92263884..747147020a 100644 --- a/openpype/hosts/max/plugins/publish/validate_usd_plugin.py +++ b/openpype/hosts/max/plugins/publish/validate_usd_plugin.py @@ -14,8 +14,6 @@ class ValidateUSDPlugin(pyblish.api.InstancePlugin): label = "USD Plugin" def process(self, instance): - #usdimport.dli - #usdexport.dle plugin_mgr = rt.pluginManager plugin_count = plugin_mgr.pluginDllCount plugin_info = self.get_plugins(plugin_mgr, From fdbe5ac3a1b033bcf4ec7e28b916106914fda951 Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Wed, 26 Apr 2023 18:45:39 +0800 Subject: [PATCH 175/187] adjustment --- openpype/hosts/max/plugins/publish/validate_model_contents.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/max/plugins/publish/validate_model_contents.py b/openpype/hosts/max/plugins/publish/validate_model_contents.py index dd9c8de2cf..dd782674ff 100644 --- a/openpype/hosts/max/plugins/publish/validate_model_contents.py +++ b/openpype/hosts/max/plugins/publish/validate_model_contents.py @@ -32,7 +32,7 @@ class ValidateModelContent(pyblish.api.InstancePlugin): "{}".format(container)) con = rt.getNodeByName(container) - selection_list = list(con.Children) + selection_list = list(con.Children) or rt.getCurrentSelection() for sel in selection_list: if rt.classOf(sel) in rt.Camera.classes: invalid.append(sel) From 61c37ebb2263af58666b314186652636186f3896 Mon Sep 17 00:00:00 2001 From: Seyedmohammadreza Hashemizadeh Date: Tue, 25 Apr 2023 15:54:25 +0200 Subject: [PATCH 176/187] add display handle setting for maya load references --- openpype/hosts/maya/plugins/load/load_reference.py | 9 ++++++--- openpype/settings/defaults/project_settings/maya.json | 3 ++- .../projects_schema/schemas/schema_maya_load.json | 8 ++++++++ 3 files changed, 16 insertions(+), 4 deletions(-) diff --git a/openpype/hosts/maya/plugins/load/load_reference.py b/openpype/hosts/maya/plugins/load/load_reference.py index 0dbdb03bb7..3309d7c207 100644 --- a/openpype/hosts/maya/plugins/load/load_reference.py +++ b/openpype/hosts/maya/plugins/load/load_reference.py @@ -162,9 +162,12 @@ class ReferenceLoader(openpype.hosts.maya.api.plugin.ReferenceLoader): with parent_nodes(roots, parent=None): cmds.xform(group_name, zeroTransformPivots=True) - cmds.setAttr("{}.displayHandle".format(group_name), 1) - settings = get_project_settings(os.environ['AVALON_PROJECT']) + + display_handle = settings['maya']['load'].get('reference_loader', {}).get( + 'display_handle', True) + cmds.setAttr("{}.displayHandle".format(group_name), display_handle) + colors = settings['maya']['load']['colors'] c = colors.get(family) if c is not None: @@ -174,7 +177,7 @@ class ReferenceLoader(openpype.hosts.maya.api.plugin.ReferenceLoader): (float(c[1]) / 255), (float(c[2]) / 255)) - cmds.setAttr("{}.displayHandle".format(group_name), 1) + cmds.setAttr("{}.displayHandle".format(group_name), display_handle) # get bounding box bbox = cmds.exactWorldBoundingBox(group_name) # get pivot position on world space diff --git a/openpype/settings/defaults/project_settings/maya.json b/openpype/settings/defaults/project_settings/maya.json index 12223216cd..72b330ce7a 100644 --- a/openpype/settings/defaults/project_settings/maya.json +++ b/openpype/settings/defaults/project_settings/maya.json @@ -1460,7 +1460,8 @@ }, "reference_loader": { "namespace": "{asset_name}_{subset}_##_", - "group_name": "_GRP" + "group_name": "_GRP", + "display_handle": true } }, "workfile_build": { diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_load.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_load.json index c1895c4824..4b6b97ab4e 100644 --- a/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_load.json +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_load.json @@ -111,6 +111,14 @@ { "type": "label", "label": "Here's a link to the doc where you can find explanations about customing the naming of referenced assets: https://openpype.io/docs/admin_hosts_maya#load-plugins" + }, + { + "type": "separator" + }, + { + "type": "boolean", + "key": "display_handle", + "label": "Display Handle On Load References" } ] } From 0d4fb1d8162f5647f53abc6d66419bd5f7cce5ba Mon Sep 17 00:00:00 2001 From: Seyedmohammadreza Hashemizadeh Date: Wed, 26 Apr 2023 10:06:00 +0200 Subject: [PATCH 177/187] linting clean up --- openpype/hosts/maya/plugins/load/load_reference.py | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/openpype/hosts/maya/plugins/load/load_reference.py b/openpype/hosts/maya/plugins/load/load_reference.py index 3309d7c207..86c2a92a07 100644 --- a/openpype/hosts/maya/plugins/load/load_reference.py +++ b/openpype/hosts/maya/plugins/load/load_reference.py @@ -164,9 +164,10 @@ class ReferenceLoader(openpype.hosts.maya.api.plugin.ReferenceLoader): settings = get_project_settings(os.environ['AVALON_PROJECT']) - display_handle = settings['maya']['load'].get('reference_loader', {}).get( - 'display_handle', True) - cmds.setAttr("{}.displayHandle".format(group_name), display_handle) + display_handle = settings['maya']['load'].get( + 'reference_loader', {}).get('display_handle', True) + cmds.setAttr( + "{}.displayHandle".format(group_name), display_handle) colors = settings['maya']['load']['colors'] c = colors.get(family) @@ -177,7 +178,8 @@ class ReferenceLoader(openpype.hosts.maya.api.plugin.ReferenceLoader): (float(c[1]) / 255), (float(c[2]) / 255)) - cmds.setAttr("{}.displayHandle".format(group_name), display_handle) + cmds.setAttr( + "{}.displayHandle".format(group_name), display_handle) # get bounding box bbox = cmds.exactWorldBoundingBox(group_name) # get pivot position on world space From 37ea36b811d427f1c31563967789837c26b96cd6 Mon Sep 17 00:00:00 2001 From: Seyedmohammadreza Hashemizadeh Date: Wed, 26 Apr 2023 10:38:00 +0200 Subject: [PATCH 178/187] cosmetiques --- openpype/hosts/maya/plugins/load/load_reference.py | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/openpype/hosts/maya/plugins/load/load_reference.py b/openpype/hosts/maya/plugins/load/load_reference.py index 86c2a92a07..7d717dcd44 100644 --- a/openpype/hosts/maya/plugins/load/load_reference.py +++ b/openpype/hosts/maya/plugins/load/load_reference.py @@ -165,9 +165,11 @@ class ReferenceLoader(openpype.hosts.maya.api.plugin.ReferenceLoader): settings = get_project_settings(os.environ['AVALON_PROJECT']) display_handle = settings['maya']['load'].get( - 'reference_loader', {}).get('display_handle', True) + 'reference_loader', {} + ).get('display_handle', True) cmds.setAttr( - "{}.displayHandle".format(group_name), display_handle) + "{}.displayHandle".format(group_name), display_handle + ) colors = settings['maya']['load']['colors'] c = colors.get(family) @@ -179,7 +181,8 @@ class ReferenceLoader(openpype.hosts.maya.api.plugin.ReferenceLoader): (float(c[2]) / 255)) cmds.setAttr( - "{}.displayHandle".format(group_name), display_handle) + "{}.displayHandle".format(group_name), display_handle + ) # get bounding box bbox = cmds.exactWorldBoundingBox(group_name) # get pivot position on world space From 4107874eb999a6a5dfb7bf00c209b365b71bd796 Mon Sep 17 00:00:00 2001 From: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> Date: Thu, 27 Apr 2023 23:41:00 +0200 Subject: [PATCH 179/187] Project packager: Backup and restore can store only database (#4879) * added helper functions to client mongo api * pack and unpack project functions can work without project files * added flag argument to pack project command to zip only project files * unpack project has also only project argument * Fix extractions --- openpype/cli.py | 15 +- openpype/client/mongo.py | 223 +++++++++++++++++++++++++- openpype/lib/project_backpack.py | 267 +++++++++++++++++++------------ openpype/pype_commands.py | 8 +- 4 files changed, 394 insertions(+), 119 deletions(-) diff --git a/openpype/cli.py b/openpype/cli.py index a650a9fdcc..54af42920d 100644 --- a/openpype/cli.py +++ b/openpype/cli.py @@ -415,11 +415,12 @@ def repack_version(directory): @main.command() @click.option("--project", help="Project name") @click.option( - "--dirpath", help="Directory where package is stored", default=None -) -def pack_project(project, dirpath): + "--dirpath", help="Directory where package is stored", default=None) +@click.option( + "--dbonly", help="Store only Database data", default=False, is_flag=True) +def pack_project(project, dirpath, dbonly): """Create a package of project with all files and database dump.""" - PypeCommands().pack_project(project, dirpath) + PypeCommands().pack_project(project, dirpath, dbonly) @main.command() @@ -427,9 +428,11 @@ def pack_project(project, dirpath): @click.option( "--root", help="Replace root which was stored in project", default=None ) -def unpack_project(zipfile, root): +@click.option( + "--dbonly", help="Store only Database data", default=False, is_flag=True) +def unpack_project(zipfile, root, dbonly): """Create a package of project with all files and database dump.""" - PypeCommands().unpack_project(zipfile, root) + PypeCommands().unpack_project(zipfile, root, dbonly) @main.command() diff --git a/openpype/client/mongo.py b/openpype/client/mongo.py index 72acbc5476..251041c028 100644 --- a/openpype/client/mongo.py +++ b/openpype/client/mongo.py @@ -5,6 +5,12 @@ import logging import pymongo import certifi +from bson.json_util import ( + loads, + dumps, + CANONICAL_JSON_OPTIONS +) + if sys.version_info[0] == 2: from urlparse import urlparse, parse_qs else: @@ -15,6 +21,49 @@ class MongoEnvNotSet(Exception): pass +def documents_to_json(docs): + """Convert documents to json string. + + Args: + Union[list[dict[str, Any]], dict[str, Any]]: Document/s to convert to + json string. + + Returns: + str: Json string with mongo documents. + """ + + return dumps(docs, json_options=CANONICAL_JSON_OPTIONS) + + +def load_json_file(filepath): + """Load mongo documents from a json file. + + Args: + filepath (str): Path to a json file. + + Returns: + Union[dict[str, Any], list[dict[str, Any]]]: Loaded content from a + json file. + """ + + if not os.path.exists(filepath): + raise ValueError("Path {} was not found".format(filepath)) + + with open(filepath, "r") as stream: + content = stream.read() + return loads("".join(content)) + + +def get_project_database_name(): + """Name of database name where projects are available. + + Returns: + str: Name of database name where projects are. + """ + + return os.environ.get("AVALON_DB") or "avalon" + + def _decompose_url(url): """Decompose mongo url to basic components. @@ -210,12 +259,102 @@ class OpenPypeMongoConnection: return mongo_client -def get_project_database(): - db_name = os.environ.get("AVALON_DB") or "avalon" - return OpenPypeMongoConnection.get_mongo_client()[db_name] +# ------ Helper Mongo functions ------ +# Functions can be helpful with custom tools to backup/restore mongo state. +# Not meant as API functionality that should be used in production codebase! +def get_collection_documents(database_name, collection_name, as_json=False): + """Query all documents from a collection. + + Args: + database_name (str): Name of database where to look for collection. + collection_name (str): Name of collection where to look for collection. + as_json (Optional[bool]): Output should be a json string. + Default: 'False' + + Returns: + Union[list[dict[str, Any]], str]: Queried documents. + """ + + client = OpenPypeMongoConnection.get_mongo_client() + output = list(client[database_name][collection_name].find({})) + if as_json: + output = documents_to_json(output) + return output -def get_project_connection(project_name): +def store_collection(filepath, database_name, collection_name): + """Store collection documents to a json file. + + Args: + filepath (str): Path to a json file where documents will be stored. + database_name (str): Name of database where to look for collection. + collection_name (str): Name of collection to store. + """ + + # Make sure directory for output file exists + dirpath = os.path.dirname(filepath) + if not os.path.isdir(dirpath): + os.makedirs(dirpath) + + content = get_collection_documents(database_name, collection_name, True) + with open(filepath, "w") as stream: + stream.write(content) + + +def replace_collection_documents(docs, database_name, collection_name): + """Replace all documents in a collection with passed documents. + + Warnings: + All existing documents in collection will be removed if there are any. + + Args: + docs (list[dict[str, Any]]): New documents. + database_name (str): Name of database where to look for collection. + collection_name (str): Name of collection where new documents are + uploaded. + """ + + client = OpenPypeMongoConnection.get_mongo_client() + database = client[database_name] + if collection_name in database.list_collection_names(): + database.drop_collection(collection_name) + col = database[collection_name] + col.insert_many(docs) + + +def restore_collection(filepath, database_name, collection_name): + """Restore/replace collection from a json filepath. + + Warnings: + All existing documents in collection will be removed if there are any. + + Args: + filepath (str): Path to a json with documents. + database_name (str): Name of database where to look for collection. + collection_name (str): Name of collection where new documents are + uploaded. + """ + + docs = load_json_file(filepath) + replace_collection_documents(docs, database_name, collection_name) + + +def get_project_database(database_name=None): + """Database object where project collections are. + + Args: + database_name (Optional[str]): Custom name of database. + + Returns: + pymongo.database.Database: Collection related to passed project. + """ + + if not database_name: + database_name = get_project_database_name() + return OpenPypeMongoConnection.get_mongo_client()[database_name] + + +def get_project_connection(project_name, database_name=None): """Direct access to mongo collection. We're trying to avoid using direct access to mongo. This should be used @@ -223,13 +362,83 @@ def get_project_connection(project_name): api calls for that. Args: - project_name(str): Project name for which collection should be + project_name (str): Project name for which collection should be returned. + database_name (Optional[str]): Custom name of database. Returns: - pymongo.Collection: Collection realated to passed project. + pymongo.collection.Collection: Collection related to passed project. """ if not project_name: raise ValueError("Invalid project name {}".format(str(project_name))) - return get_project_database()[project_name] + return get_project_database(database_name)[project_name] + + +def get_project_documents(project_name, database_name=None): + """Query all documents from project collection. + + Args: + project_name (str): Name of project. + database_name (Optional[str]): Name of mongo database where to look for + project. + + Returns: + list[dict[str, Any]]: Documents in project collection. + """ + + if not database_name: + database_name = get_project_database_name() + return get_collection_documents(database_name, project_name) + + +def store_project_documents(project_name, filepath, database_name=None): + """Store project documents to a file as json string. + + Args: + project_name (str): Name of project to store. + filepath (str): Path to a json file where output will be stored. + database_name (Optional[str]): Name of mongo database where to look for + project. + """ + + if not database_name: + database_name = get_project_database_name() + + store_collection(filepath, database_name, project_name) + + +def replace_project_documents(project_name, docs, database_name=None): + """Replace documents in mongo with passed documents. + + Warnings: + Existing project collection is removed if exists in mongo. + + Args: + project_name (str): Name of project. + docs (list[dict[str, Any]]): Documents to restore. + database_name (Optional[str]): Name of mongo database where project + collection will be created. + """ + + if not database_name: + database_name = get_project_database_name() + replace_collection_documents(docs, database_name, project_name) + + +def restore_project_documents(project_name, filepath, database_name=None): + """Replace documents in mongo with passed documents. + + Warnings: + Existing project collection is removed if exists in mongo. + + Args: + project_name (str): Name of project. + filepath (str): File to json file with project documents. + database_name (Optional[str]): Name of mongo database where project + collection will be created. + """ + + if not database_name: + database_name = get_project_database_name() + restore_collection(filepath, database_name, project_name) diff --git a/openpype/lib/project_backpack.py b/openpype/lib/project_backpack.py index ff2f1d4b88..07107ec011 100644 --- a/openpype/lib/project_backpack.py +++ b/openpype/lib/project_backpack.py @@ -1,16 +1,19 @@ -"""These lib functions are primarily for development purposes. +"""These lib functions are for development purposes. -WARNING: This is not meant for production data. +WARNING: + This is not meant for production data. Please don't write code which is + dependent on functionality here. -Goal is to be able create package of current state of project with related -documents from mongo and files from disk to zip file and then be able recreate -the project based on the zip. +Goal is to be able to create package of current state of project with related +documents from mongo and files from disk to zip file and then be able +to recreate the project based on the zip. This gives ability to create project where a changes and tests can be done. -Keep in mind that to be able create a package of project has few requirements. -Possible requirement should be listed in 'pack_project' function. +Keep in mind that to be able to create a package of project has few +requirements. Possible requirement should be listed in 'pack_project' function. """ + import os import json import platform @@ -19,16 +22,12 @@ import shutil import datetime import zipfile -from bson.json_util import ( - loads, - dumps, - CANONICAL_JSON_OPTIONS +from openpype.client.mongo import ( + load_json_file, + get_project_connection, + replace_project_documents, + store_project_documents, ) -from openpype.client import ( - get_project, - get_whole_project, -) -from openpype.pipeline import AvalonMongoDB DOCUMENTS_FILE_NAME = "database" METADATA_FILE_NAME = "metadata" @@ -43,7 +42,52 @@ def add_timestamp(filepath): return new_base + ext -def pack_project(project_name, destination_dir=None): +def get_project_document(project_name, database_name=None): + """Query project document. + + Function 'get_project' from client api cannot be used as it does not allow + to change which 'database_name' is used. + + Args: + project_name (str): Name of project. + database_name (Optional[str]): Name of mongo database where to look for + project. + + Returns: + Union[dict[str, Any], None]: Project document or None. + """ + + col = get_project_connection(project_name, database_name) + return col.find_one({"type": "project"}) + + +def _pack_files_to_zip(zip_stream, source_path, root_path): + """Pack files to a zip stream. + + Args: + zip_stream (zipfile.ZipFile): Stream to a zipfile. + source_path (str): Path to a directory where files are. + root_path (str): Path to a directory which is used for calculation + of relative path. + """ + + for root, _, filenames in os.walk(source_path): + for filename in filenames: + filepath = os.path.join(root, filename) + # TODO add one more folder + archive_name = os.path.join( + PROJECT_FILES_DIR, + os.path.relpath(filepath, root_path) + ) + zip_stream.write(filepath, archive_name) + + +def pack_project( + project_name, + destination_dir=None, + only_documents=False, + database_name=None +): """Make a package of a project with mongo documents and files. This function has few restrictions: @@ -52,13 +96,18 @@ def pack_project(project_name, destination_dir=None): "{root[...]}/{project[name]}" Args: - project_name(str): Project that should be packaged. - destination_dir(str): Optional path where zip will be stored. Project's - root is used if not passed. + project_name (str): Project that should be packaged. + destination_dir (Optional[str]): Optional path where zip will be + stored. Project's root is used if not passed. + only_documents (Optional[bool]): Pack only Mongo documents and skip + files. + database_name (Optional[str]): Custom database name from which is + project queried. """ + print("Creating package of project \"{}\"".format(project_name)) # Validate existence of project - project_doc = get_project(project_name) + project_doc = get_project_document(project_name, database_name) if not project_doc: raise ValueError("Project \"{}\" was not found in database".format( project_name @@ -119,12 +168,7 @@ def pack_project(project_name, destination_dir=None): temp_docs_json = s.name # Query all project documents and store them to temp json - docs = list(get_whole_project(project_name)) - data = dumps( - docs, json_options=CANONICAL_JSON_OPTIONS - ) - with open(temp_docs_json, "w") as stream: - stream.write(data) + store_project_documents(project_name, temp_docs_json, database_name) print("Packing files into zip") # Write all to zip file @@ -133,16 +177,10 @@ def pack_project(project_name, destination_dir=None): zip_stream.write(temp_metadata_json, METADATA_FILE_NAME + ".json") # Add database documents zip_stream.write(temp_docs_json, DOCUMENTS_FILE_NAME + ".json") + # Add project files to zip - for root, _, filenames in os.walk(project_source_path): - for filename in filenames: - filepath = os.path.join(root, filename) - # TODO add one more folder - archive_name = os.path.join( - PROJECT_FILES_DIR, - os.path.relpath(filepath, root_path) - ) - zip_stream.write(filepath, archive_name) + if not only_documents: + _pack_files_to_zip(zip_stream, project_source_path, root_path) print("Cleaning up") # Cleanup @@ -152,80 +190,30 @@ def pack_project(project_name, destination_dir=None): print("*** Packing finished ***") -def unpack_project(path_to_zip, new_root=None): - """Unpack project zip file to recreate project. +def _unpack_project_files(unzip_dir, root_path, project_name): + """Move project files from unarchived temp folder to new root. + + Unpack is skipped if source files are not available in the zip. That can + happen if nothing was published yet or only documents were stored to + package. Args: - path_to_zip(str): Path to zip which was created using 'pack_project' - function. - new_root(str): Optional way how to set different root path for unpacked - project. + unzip_dir (str): Location where zip was unzipped. + root_path (str): Path to new root. + project_name (str): Name of project. """ - print("Unpacking project from zip {}".format(path_to_zip)) - if not os.path.exists(path_to_zip): - print("Zip file does not exists: {}".format(path_to_zip)) + + src_project_files_dir = os.path.join( + unzip_dir, PROJECT_FILES_DIR, project_name + ) + # Skip if files are not in the zip + if not os.path.exists(src_project_files_dir): return - tmp_dir = tempfile.mkdtemp(prefix="unpack_") - print("Zip is extracted to temp: {}".format(tmp_dir)) - with zipfile.ZipFile(path_to_zip, "r") as zip_stream: - zip_stream.extractall(tmp_dir) - - metadata_json_path = os.path.join(tmp_dir, METADATA_FILE_NAME + ".json") - with open(metadata_json_path, "r") as stream: - metadata = json.load(stream) - - docs_json_path = os.path.join(tmp_dir, DOCUMENTS_FILE_NAME + ".json") - with open(docs_json_path, "r") as stream: - content = stream.readlines() - docs = loads("".join(content)) - - low_platform = platform.system().lower() - project_name = metadata["project_name"] - source_root = metadata["root"] - root_path = source_root[low_platform] - - # Drop existing collection - dbcon = AvalonMongoDB() - database = dbcon.database - if project_name in database.list_collection_names(): - database.drop_collection(project_name) - print("Removed existing project collection") - - print("Creating project documents ({})".format(len(docs))) - # Create new collection with loaded docs - collection = database[project_name] - collection.insert_many(docs) - - # Skip change of root if is the same as the one stored in metadata - if ( - new_root - and (os.path.normpath(new_root) == os.path.normpath(root_path)) - ): - new_root = None - - if new_root: - print("Using different root path {}".format(new_root)) - root_path = new_root - - project_doc = get_project(project_name) - roots = project_doc["config"]["roots"] - key = tuple(roots.keys())[0] - update_key = "config.roots.{}.{}".format(key, low_platform) - collection.update_one( - {"_id": project_doc["_id"]}, - {"$set": { - update_key: new_root - }} - ) - # Make sure root path exists if not os.path.exists(root_path): os.makedirs(root_path) - src_project_files_dir = os.path.join( - tmp_dir, PROJECT_FILES_DIR, project_name - ) dst_project_files_dir = os.path.normpath( os.path.join(root_path, project_name) ) @@ -241,8 +229,83 @@ def unpack_project(path_to_zip, new_root=None): )) shutil.move(src_project_files_dir, dst_project_files_dir) + +def unpack_project( + path_to_zip, new_root=None, database_only=None, database_name=None +): + """Unpack project zip file to recreate project. + + Args: + path_to_zip (str): Path to zip which was created using 'pack_project' + function. + new_root (str): Optional way how to set different root path for + unpacked project. + database_only (Optional[bool]): Unpack only database from zip. + database_name (str): Name of database where project will be recreated. + """ + + if database_only is None: + database_only = False + + print("Unpacking project from zip {}".format(path_to_zip)) + if not os.path.exists(path_to_zip): + print("Zip file does not exists: {}".format(path_to_zip)) + return + + tmp_dir = tempfile.mkdtemp(prefix="unpack_") + print("Zip is extracted to temp: {}".format(tmp_dir)) + with zipfile.ZipFile(path_to_zip, "r") as zip_stream: + if database_only: + for filename in ( + "{}.json".format(METADATA_FILE_NAME), + "{}.json".format(DOCUMENTS_FILE_NAME), + ): + zip_stream.extract(filename, tmp_dir) + else: + zip_stream.extractall(tmp_dir) + + metadata_json_path = os.path.join(tmp_dir, METADATA_FILE_NAME + ".json") + with open(metadata_json_path, "r") as stream: + metadata = json.load(stream) + + docs_json_path = os.path.join(tmp_dir, DOCUMENTS_FILE_NAME + ".json") + docs = load_json_file(docs_json_path) + + low_platform = platform.system().lower() + project_name = metadata["project_name"] + source_root = metadata["root"] + root_path = source_root[low_platform] + + # Drop existing collection + replace_project_documents(project_name, docs, database_name) + print("Creating project documents ({})".format(len(docs))) + + # Skip change of root if is the same as the one stored in metadata + if ( + new_root + and (os.path.normpath(new_root) == os.path.normpath(root_path)) + ): + new_root = None + + if new_root: + print("Using different root path {}".format(new_root)) + root_path = new_root + + project_doc = get_project_document(project_name) + roots = project_doc["config"]["roots"] + key = tuple(roots.keys())[0] + update_key = "config.roots.{}.{}".format(key, low_platform) + collection = get_project_connection(project_name, database_name) + collection.update_one( + {"_id": project_doc["_id"]}, + {"$set": { + update_key: new_root + }} + ) + + _unpack_project_files(tmp_dir, root_path, project_name) + # CLeanup print("Cleaning up") shutil.rmtree(tmp_dir) - dbcon.uninstall() print("*** Unpack finished ***") diff --git a/openpype/pype_commands.py b/openpype/pype_commands.py index dc5b3d63c3..6a24cb0ebc 100644 --- a/openpype/pype_commands.py +++ b/openpype/pype_commands.py @@ -353,12 +353,12 @@ class PypeCommands: version_packer = VersionRepacker(directory) version_packer.process() - def pack_project(self, project_name, dirpath): + def pack_project(self, project_name, dirpath, database_only): from openpype.lib.project_backpack import pack_project - pack_project(project_name, dirpath) + pack_project(project_name, dirpath, database_only) - def unpack_project(self, zip_filepath, new_root): + def unpack_project(self, zip_filepath, new_root, database_only): from openpype.lib.project_backpack import unpack_project - unpack_project(zip_filepath, new_root) + unpack_project(zip_filepath, new_root, database_only) From 37d7a87fd116b2f3351df6ac42500ea696b427e6 Mon Sep 17 00:00:00 2001 From: Ynbot Date: Sat, 29 Apr 2023 03:25:06 +0000 Subject: [PATCH 180/187] [Automated] Bump version --- openpype/version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/version.py b/openpype/version.py index 080fd6eece..72297a4430 100644 --- a/openpype/version.py +++ b/openpype/version.py @@ -1,3 +1,3 @@ # -*- coding: utf-8 -*- """Package declaring Pype version.""" -__version__ = "3.15.6-nightly.1" +__version__ = "3.15.6-nightly.2" From 3e2559c0c2797c8c3dba717ac9594cd22499b80b Mon Sep 17 00:00:00 2001 From: Toke Stuart Jepsen Date: Sat, 29 Apr 2023 16:32:52 +0100 Subject: [PATCH 181/187] Fix repair and validation --- openpype/hosts/maya/plugins/publish/validate_attributes.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/openpype/hosts/maya/plugins/publish/validate_attributes.py b/openpype/hosts/maya/plugins/publish/validate_attributes.py index 6ca9afb9a4..7ebd9d7d03 100644 --- a/openpype/hosts/maya/plugins/publish/validate_attributes.py +++ b/openpype/hosts/maya/plugins/publish/validate_attributes.py @@ -6,7 +6,7 @@ import pyblish.api from openpype.hosts.maya.api.lib import set_attribute from openpype.pipeline.publish import ( - RepairContextAction, + RepairAction, ValidateContentsOrder, ) @@ -26,7 +26,7 @@ class ValidateAttributes(pyblish.api.InstancePlugin): order = ValidateContentsOrder label = "Attributes" hosts = ["maya"] - actions = [RepairContextAction] + actions = [RepairAction] optional = True attributes = None @@ -81,7 +81,7 @@ class ValidateAttributes(pyblish.api.InstancePlugin): if node_name not in attributes: continue - for attr_name, expected in attributes.items(): + for attr_name, expected in attributes[node_name].items(): # Skip if attribute does not exist if not cmds.attributeQuery(attr_name, node=node, exists=True): From b8ce6e9e9c10383c7e7e0c36fba7bb603a5d9ee7 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Tue, 2 May 2023 11:19:50 +0200 Subject: [PATCH 182/187] Photoshop: add autocreators for review and flat image (#4871) * OP-5656 - added auto creator for review in PS Review instance should be togglable. Review instance needs to be created for non publisher based workflows. * OP-5656 - refactored names * OP-5656 - refactored names * OP-5656 - new auto creator for flat image In old version flat image was created if no instances were created. Explicit auto creator added for clarity. Standardization of state of plugins * OP-5656 - updated according to auto image creator Subset template should be used from autocreator and not be separate. * OP-5656 - fix proper creator name * OP-5656 - fix log message * OP-5656 - fix use enable state * OP-5656 - fix formatting * OP-5656 - add review toggle to image instance For special cases where each image should have separate review. * OP-5656 - fix description * OP-5656 - fix not present asset and task in instance context * OP-5656 - refactor - both auto creators should use same class Provided separate description. * OP-5656 - fix - propagate review to families Image and auto image could have now review flag. Bottom logic is only for Webpublisher. * OP-5656 - fix - rename review files to avaid collision Image family produces jpg and png, jpg review would clash with name. It should be replaced by 'jpg_jpg'. * OP-5656 - fix - limit additional auto created only on WP In artist based publishing auto image would be created by auto creator (if enabled). Artist might want to disable image creation. * OP-5656 - added mark_for_review flag to Publish tab * OP-5656 - fixes for auto creator * OP-5656 - fixe - outputDef not needed outputDef should contain dict of output definition. In PS it doesn't make sense as it has separate extract_review without output definitions. * OP-5656 - added persistency of changes to auto creators Changes as enabling/disabling, changing review flag should persist. * OP-5656 - added documentation for admins * OP-5656 - added link to new documentation for admins * OP-5656 - Hound * OP-5656 - Hound * OP-5656 - fix shared families list * OP-5656 - added default variant for review and workfile creator For workfile Main was default variant, "" was for review. * OP-5656 - fix - use values from Settings * OP-5656 - fix - use original name of review for main review family outputName cannot be in repre or file would have ..._jpg.jpg * OP-5656 - refactor - standardized settings Active by default denotes if created instance is active (eg. publishable) when created. * OP-5656 - fixes for skipping collecting auto_image data["ids"] are necessary for extracting. Members are physical layers in image, ids are "virtual" items, won't get grouped into real image instance. * OP-5656 - reworked auto collectors This allows to use automatic test for proper testing. * OP-5656 - added automatic tests * OP-5656 - fixes for auto collectors * OP-5656 - removed unnecessary collector Logic moved to auto collectors. * OP-5656 - Hound --- .../create/workfile_creator.py => lib.py} | 23 +-- .../plugins/create/create_flatten_image.py | 120 ++++++++++++++ .../photoshop/plugins/create/create_image.py | 47 +++++- .../photoshop/plugins/create/create_review.py | 28 ++++ .../plugins/create/create_workfile.py | 28 ++++ .../plugins/publish/collect_auto_image.py | 101 ++++++++++++ .../plugins/publish/collect_auto_review.py | 92 +++++++++++ .../plugins/publish/collect_auto_workfile.py | 99 ++++++++++++ .../plugins/publish/collect_instances.py | 116 -------------- .../plugins/publish/collect_review.py | 32 +--- .../plugins/publish/collect_workfile.py | 57 ++----- .../plugins/publish/extract_review.py | 34 ++-- .../defaults/project_settings/photoshop.json | 29 +++- .../schema_project_photoshop.json | 151 +++++++++++++++--- .../test_publish_in_photoshop_auto_image.py | 93 +++++++++++ .../test_publish_in_photoshop_review.py | 111 +++++++++++++ website/docs/admin_hosts_photoshop.md | 127 +++++++++++++++ .../assets/admin_hosts_photoshop_settings.png | Bin 0 -> 14364 bytes website/sidebars.js | 1 + 19 files changed, 1044 insertions(+), 245 deletions(-) rename openpype/hosts/photoshop/{plugins/create/workfile_creator.py => lib.py} (83%) create mode 100644 openpype/hosts/photoshop/plugins/create/create_flatten_image.py create mode 100644 openpype/hosts/photoshop/plugins/create/create_review.py create mode 100644 openpype/hosts/photoshop/plugins/create/create_workfile.py create mode 100644 openpype/hosts/photoshop/plugins/publish/collect_auto_image.py create mode 100644 openpype/hosts/photoshop/plugins/publish/collect_auto_review.py create mode 100644 openpype/hosts/photoshop/plugins/publish/collect_auto_workfile.py delete mode 100644 openpype/hosts/photoshop/plugins/publish/collect_instances.py create mode 100644 tests/integration/hosts/photoshop/test_publish_in_photoshop_auto_image.py create mode 100644 tests/integration/hosts/photoshop/test_publish_in_photoshop_review.py create mode 100644 website/docs/admin_hosts_photoshop.md create mode 100644 website/docs/assets/admin_hosts_photoshop_settings.png diff --git a/openpype/hosts/photoshop/plugins/create/workfile_creator.py b/openpype/hosts/photoshop/lib.py similarity index 83% rename from openpype/hosts/photoshop/plugins/create/workfile_creator.py rename to openpype/hosts/photoshop/lib.py index f5d56adcbc..ae7a33b7b6 100644 --- a/openpype/hosts/photoshop/plugins/create/workfile_creator.py +++ b/openpype/hosts/photoshop/lib.py @@ -7,28 +7,26 @@ from openpype.pipeline import ( from openpype.hosts.photoshop.api.pipeline import cache_and_get_instances -class PSWorkfileCreator(AutoCreator): - identifier = "workfile" - family = "workfile" - - default_variant = "Main" - +class PSAutoCreator(AutoCreator): + """Generic autocreator to extend.""" def get_instance_attr_defs(self): return [] def collect_instances(self): for instance_data in cache_and_get_instances(self): creator_id = instance_data.get("creator_identifier") + if creator_id == self.identifier: - subset_name = instance_data["subset"] - instance = CreatedInstance( - self.family, subset_name, instance_data, self + instance = CreatedInstance.from_existing( + instance_data, self ) self._add_instance_to_context(instance) def update_instances(self, update_list): - # nothing to change on workfiles - pass + self.log.debug("update_list:: {}".format(update_list)) + for created_inst, _changes in update_list: + api.stub().imprint(created_inst.get("instance_id"), + created_inst.data_to_store()) def create(self, options=None): existing_instance = None @@ -58,6 +56,9 @@ class PSWorkfileCreator(AutoCreator): project_name, host_name, None )) + if not self.active_on_create: + data["active"] = False + new_instance = CreatedInstance( self.family, subset_name, data, self ) diff --git a/openpype/hosts/photoshop/plugins/create/create_flatten_image.py b/openpype/hosts/photoshop/plugins/create/create_flatten_image.py new file mode 100644 index 0000000000..3bc61c8184 --- /dev/null +++ b/openpype/hosts/photoshop/plugins/create/create_flatten_image.py @@ -0,0 +1,120 @@ +from openpype.pipeline import CreatedInstance + +from openpype.lib import BoolDef +import openpype.hosts.photoshop.api as api +from openpype.hosts.photoshop.lib import PSAutoCreator +from openpype.pipeline.create import get_subset_name +from openpype.client import get_asset_by_name + + +class AutoImageCreator(PSAutoCreator): + """Creates flatten image from all visible layers. + + Used in simplified publishing as auto created instance. + Must be enabled in Setting and template for subset name provided + """ + identifier = "auto_image" + family = "image" + + # Settings + default_variant = "" + # - Mark by default instance for review + mark_for_review = True + active_on_create = True + + def create(self, options=None): + existing_instance = None + for instance in self.create_context.instances: + if instance.creator_identifier == self.identifier: + existing_instance = instance + break + + context = self.create_context + project_name = context.get_current_project_name() + asset_name = context.get_current_asset_name() + task_name = context.get_current_task_name() + host_name = context.host_name + asset_doc = get_asset_by_name(project_name, asset_name) + + if existing_instance is None: + subset_name = get_subset_name( + self.family, self.default_variant, task_name, asset_doc, + project_name, host_name + ) + + publishable_ids = [layer.id for layer in api.stub().get_layers() + if layer.visible] + data = { + "asset": asset_name, + "task": task_name, + # ids are "virtual" layers, won't get grouped as 'members' do + # same difference in color coded layers in WP + "ids": publishable_ids + } + + if not self.active_on_create: + data["active"] = False + + creator_attributes = {"mark_for_review": self.mark_for_review} + data.update({"creator_attributes": creator_attributes}) + + new_instance = CreatedInstance( + self.family, subset_name, data, self + ) + self._add_instance_to_context(new_instance) + api.stub().imprint(new_instance.get("instance_id"), + new_instance.data_to_store()) + + elif ( # existing instance from different context + existing_instance["asset"] != asset_name + or existing_instance["task"] != task_name + ): + subset_name = get_subset_name( + self.family, self.default_variant, task_name, asset_doc, + project_name, host_name + ) + + existing_instance["asset"] = asset_name + existing_instance["task"] = task_name + existing_instance["subset"] = subset_name + + api.stub().imprint(existing_instance.get("instance_id"), + existing_instance.data_to_store()) + + def get_pre_create_attr_defs(self): + return [ + BoolDef( + "mark_for_review", + label="Review", + default=self.mark_for_review + ) + ] + + def get_instance_attr_defs(self): + return [ + BoolDef( + "mark_for_review", + label="Review" + ) + ] + + def apply_settings(self, project_settings, system_settings): + plugin_settings = ( + project_settings["photoshop"]["create"]["AutoImageCreator"] + ) + + self.active_on_create = plugin_settings["active_on_create"] + self.default_variant = plugin_settings["default_variant"] + self.mark_for_review = plugin_settings["mark_for_review"] + self.enabled = plugin_settings["enabled"] + + def get_detail_description(self): + return """Creator for flatten image. + + Studio might configure simple publishing workflow. In that case + `image` instance is automatically created which will publish flat + image from all visible layers. + + Artist might disable this instance from publishing or from creating + review for it though. + """ diff --git a/openpype/hosts/photoshop/plugins/create/create_image.py b/openpype/hosts/photoshop/plugins/create/create_image.py index 3d82d6b6f0..f3165fca57 100644 --- a/openpype/hosts/photoshop/plugins/create/create_image.py +++ b/openpype/hosts/photoshop/plugins/create/create_image.py @@ -23,6 +23,11 @@ class ImageCreator(Creator): family = "image" description = "Image creator" + # Settings + default_variants = "" + mark_for_review = False + active_on_create = True + def create(self, subset_name_from_ui, data, pre_create_data): groups_to_create = [] top_layers_to_wrap = [] @@ -94,6 +99,12 @@ class ImageCreator(Creator): data.update({"layer_name": layer_name}) data.update({"long_name": "_".join(layer_names_in_hierarchy)}) + creator_attributes = {"mark_for_review": self.mark_for_review} + data.update({"creator_attributes": creator_attributes}) + + if not self.active_on_create: + data["active"] = False + new_instance = CreatedInstance(self.family, subset_name, data, self) @@ -134,11 +145,6 @@ class ImageCreator(Creator): self.host.remove_instance(instance) self._remove_instance_from_context(instance) - def get_default_variants(self): - return [ - "Main" - ] - def get_pre_create_attr_defs(self): output = [ BoolDef("use_selection", default=True, @@ -148,10 +154,34 @@ class ImageCreator(Creator): label="Create separate instance for each selected"), BoolDef("use_layer_name", default=False, - label="Use layer name in subset") + label="Use layer name in subset"), + BoolDef( + "mark_for_review", + label="Create separate review", + default=False + ) ] return output + def get_instance_attr_defs(self): + return [ + BoolDef( + "mark_for_review", + label="Review" + ) + ] + + def apply_settings(self, project_settings, system_settings): + plugin_settings = ( + project_settings["photoshop"]["create"]["ImageCreator"] + ) + + self.active_on_create = plugin_settings["active_on_create"] + self.default_variants = plugin_settings["default_variants"] + self.mark_for_review = plugin_settings["mark_for_review"] + self.enabled = plugin_settings["enabled"] + + def get_detail_description(self): return """Creator for Image instances @@ -180,6 +210,11 @@ class ImageCreator(Creator): but layer name should be used (set explicitly in UI or implicitly if multiple images should be created), it is added in capitalized form as a suffix to subset name. + + Each image could have its separate review created if necessary via + `Create separate review` toggle. + But more use case is to use separate `review` instance to create review + from all published items. """ def _handle_legacy(self, instance_data): diff --git a/openpype/hosts/photoshop/plugins/create/create_review.py b/openpype/hosts/photoshop/plugins/create/create_review.py new file mode 100644 index 0000000000..064485d465 --- /dev/null +++ b/openpype/hosts/photoshop/plugins/create/create_review.py @@ -0,0 +1,28 @@ +from openpype.hosts.photoshop.lib import PSAutoCreator + + +class ReviewCreator(PSAutoCreator): + """Creates review instance which might be disabled from publishing.""" + identifier = "review" + family = "review" + + default_variant = "Main" + + def get_detail_description(self): + return """Auto creator for review. + + Photoshop review is created from all published images or from all + visible layers if no `image` instances got created. + + Review might be disabled by an artist (instance shouldn't be deleted as + it will get recreated in next publish either way). + """ + + def apply_settings(self, project_settings, system_settings): + plugin_settings = ( + project_settings["photoshop"]["create"]["ReviewCreator"] + ) + + self.default_variant = plugin_settings["default_variant"] + self.active_on_create = plugin_settings["active_on_create"] + self.enabled = plugin_settings["enabled"] diff --git a/openpype/hosts/photoshop/plugins/create/create_workfile.py b/openpype/hosts/photoshop/plugins/create/create_workfile.py new file mode 100644 index 0000000000..d498f0549c --- /dev/null +++ b/openpype/hosts/photoshop/plugins/create/create_workfile.py @@ -0,0 +1,28 @@ +from openpype.hosts.photoshop.lib import PSAutoCreator + + +class WorkfileCreator(PSAutoCreator): + identifier = "workfile" + family = "workfile" + + default_variant = "Main" + + def get_detail_description(self): + return """Auto creator for workfile. + + It is expected that each publish will also publish its source workfile + for safekeeping. This creator triggers automatically without need for + an artist to remember and trigger it explicitly. + + Workfile instance could be disabled if it is not required to publish + workfile. (Instance shouldn't be deleted though as it will be recreated + in next publish automatically). + """ + + def apply_settings(self, project_settings, system_settings): + plugin_settings = ( + project_settings["photoshop"]["create"]["WorkfileCreator"] + ) + + self.active_on_create = plugin_settings["active_on_create"] + self.enabled = plugin_settings["enabled"] diff --git a/openpype/hosts/photoshop/plugins/publish/collect_auto_image.py b/openpype/hosts/photoshop/plugins/publish/collect_auto_image.py new file mode 100644 index 0000000000..ce408f8d01 --- /dev/null +++ b/openpype/hosts/photoshop/plugins/publish/collect_auto_image.py @@ -0,0 +1,101 @@ +import pyblish.api + +from openpype.hosts.photoshop import api as photoshop +from openpype.pipeline.create import get_subset_name + + +class CollectAutoImage(pyblish.api.ContextPlugin): + """Creates auto image in non artist based publishes (Webpublisher). + + 'remotepublish' should be renamed to 'autopublish' or similar in the future + """ + + label = "Collect Auto Image" + order = pyblish.api.CollectorOrder + hosts = ["photoshop"] + order = pyblish.api.CollectorOrder + 0.2 + + targets = ["remotepublish"] + + def process(self, context): + family = "image" + for instance in context: + creator_identifier = instance.data.get("creator_identifier") + if creator_identifier and creator_identifier == "auto_image": + self.log.debug("Auto image instance found, won't create new") + return + + project_name = context.data["anatomyData"]["project"]["name"] + proj_settings = context.data["project_settings"] + task_name = context.data["anatomyData"]["task"]["name"] + host_name = context.data["hostName"] + asset_doc = context.data["assetEntity"] + asset_name = asset_doc["name"] + + auto_creator = proj_settings.get( + "photoshop", {}).get( + "create", {}).get( + "AutoImageCreator", {}) + + if not auto_creator or not auto_creator["enabled"]: + self.log.debug("Auto image creator disabled, won't create new") + return + + stub = photoshop.stub() + stored_items = stub.get_layers_metadata() + for item in stored_items: + if item.get("creator_identifier") == "auto_image": + if not item.get("active"): + self.log.debug("Auto_image instance disabled") + return + + layer_items = stub.get_layers() + + publishable_ids = [layer.id for layer in layer_items + if layer.visible] + + # collect stored image instances + instance_names = [] + for layer_item in layer_items: + layer_meta_data = stub.read(layer_item, stored_items) + + # Skip layers without metadata. + if layer_meta_data is None: + continue + + # Skip containers. + if "container" in layer_meta_data["id"]: + continue + + # active might not be in legacy meta + if layer_meta_data.get("active", True) and layer_item.visible: + instance_names.append(layer_meta_data["subset"]) + + if len(instance_names) == 0: + variants = proj_settings.get( + "photoshop", {}).get( + "create", {}).get( + "CreateImage", {}).get( + "default_variants", ['']) + family = "image" + + variant = context.data.get("variant") or variants[0] + + subset_name = get_subset_name( + family, variant, task_name, asset_doc, + project_name, host_name + ) + + instance = context.create_instance(subset_name) + instance.data["family"] = family + instance.data["asset"] = asset_name + instance.data["subset"] = subset_name + instance.data["ids"] = publishable_ids + instance.data["publish"] = True + instance.data["creator_identifier"] = "auto_image" + + if auto_creator["mark_for_review"]: + instance.data["creator_attributes"] = {"mark_for_review": True} + instance.data["families"] = ["review"] + + self.log.info("auto image instance: {} ".format(instance.data)) diff --git a/openpype/hosts/photoshop/plugins/publish/collect_auto_review.py b/openpype/hosts/photoshop/plugins/publish/collect_auto_review.py new file mode 100644 index 0000000000..7de4adcaf4 --- /dev/null +++ b/openpype/hosts/photoshop/plugins/publish/collect_auto_review.py @@ -0,0 +1,92 @@ +""" +Requires: + None + +Provides: + instance -> family ("review") +""" +import pyblish.api + +from openpype.hosts.photoshop import api as photoshop +from openpype.pipeline.create import get_subset_name + + +class CollectAutoReview(pyblish.api.ContextPlugin): + """Create review instance in non artist based workflow. + + Called only if PS is triggered in Webpublisher or in tests. + """ + + label = "Collect Auto Review" + hosts = ["photoshop"] + order = pyblish.api.CollectorOrder + 0.2 + targets = ["remotepublish"] + + publish = True + + def process(self, context): + family = "review" + has_review = False + for instance in context: + if instance.data["family"] == family: + self.log.debug("Review instance found, won't create new") + has_review = True + + creator_attributes = instance.data.get("creator_attributes", {}) + if (creator_attributes.get("mark_for_review") and + "review" not in instance.data["families"]): + instance.data["families"].append("review") + + if has_review: + return + + stub = photoshop.stub() + stored_items = stub.get_layers_metadata() + for item in stored_items: + if item.get("creator_identifier") == family: + if not item.get("active"): + self.log.debug("Review instance disabled") + return + + auto_creator = context.data["project_settings"].get( + "photoshop", {}).get( + "create", {}).get( + "ReviewCreator", {}) + + if not auto_creator or not auto_creator["enabled"]: + self.log.debug("Review creator disabled, won't create new") + return + + variant = (context.data.get("variant") or + auto_creator["default_variant"]) + + project_name = context.data["anatomyData"]["project"]["name"] + proj_settings = context.data["project_settings"] + task_name = context.data["anatomyData"]["task"]["name"] + host_name = context.data["hostName"] + asset_doc = context.data["assetEntity"] + asset_name = asset_doc["name"] + + subset_name = get_subset_name( + family, + variant, + task_name, + asset_doc, + project_name, + host_name=host_name, + project_settings=proj_settings + ) + + instance = context.create_instance(subset_name) + instance.data.update({ + "subset": subset_name, + "label": subset_name, + "name": subset_name, + "family": family, + "families": [], + "representations": [], + "asset": asset_name, + "publish": self.publish + }) + + self.log.debug("auto review created::{}".format(instance.data)) diff --git a/openpype/hosts/photoshop/plugins/publish/collect_auto_workfile.py b/openpype/hosts/photoshop/plugins/publish/collect_auto_workfile.py new file mode 100644 index 0000000000..d10cf62c67 --- /dev/null +++ b/openpype/hosts/photoshop/plugins/publish/collect_auto_workfile.py @@ -0,0 +1,99 @@ +import os +import pyblish.api + +from openpype.hosts.photoshop import api as photoshop +from openpype.pipeline.create import get_subset_name + + +class CollectAutoWorkfile(pyblish.api.ContextPlugin): + """Collect current script for publish.""" + + order = pyblish.api.CollectorOrder + 0.2 + label = "Collect Workfile" + hosts = ["photoshop"] + + targets = ["remotepublish"] + + def process(self, context): + family = "workfile" + file_path = context.data["currentFile"] + _, ext = os.path.splitext(file_path) + staging_dir = os.path.dirname(file_path) + base_name = os.path.basename(file_path) + workfile_representation = { + "name": ext[1:], + "ext": ext[1:], + "files": base_name, + "stagingDir": staging_dir, + } + + for instance in context: + if instance.data["family"] == family: + self.log.debug("Workfile instance found, won't create new") + instance.data.update({ + "label": base_name, + "name": base_name, + "representations": [], + }) + + # creating representation + _, ext = os.path.splitext(file_path) + instance.data["representations"].append( + workfile_representation) + + return + + stub = photoshop.stub() + stored_items = stub.get_layers_metadata() + for item in stored_items: + if item.get("creator_identifier") == family: + if not item.get("active"): + self.log.debug("Workfile instance disabled") + return + + project_name = context.data["anatomyData"]["project"]["name"] + proj_settings = context.data["project_settings"] + auto_creator = proj_settings.get( + "photoshop", {}).get( + "create", {}).get( + "WorkfileCreator", {}) + + if not auto_creator or not auto_creator["enabled"]: + self.log.debug("Workfile creator disabled, won't create new") + return + + # context.data["variant"] might come only from collect_batch_data + variant = (context.data.get("variant") or + auto_creator["default_variant"]) + + task_name = context.data["anatomyData"]["task"]["name"] + host_name = context.data["hostName"] + asset_doc = context.data["assetEntity"] + asset_name = asset_doc["name"] + + subset_name = get_subset_name( + family, + variant, + task_name, + asset_doc, + project_name, + host_name=host_name, + project_settings=proj_settings + ) + + # Create instance + instance = context.create_instance(subset_name) + instance.data.update({ + "subset": subset_name, + "label": base_name, + "name": base_name, + "family": family, + "families": [], + "representations": [], + "asset": asset_name + }) + + # creating representation + instance.data["representations"].append(workfile_representation) + + self.log.debug("auto workfile review created:{}".format(instance.data)) diff --git a/openpype/hosts/photoshop/plugins/publish/collect_instances.py b/openpype/hosts/photoshop/plugins/publish/collect_instances.py deleted file mode 100644 index 5bf12379b1..0000000000 --- a/openpype/hosts/photoshop/plugins/publish/collect_instances.py +++ /dev/null @@ -1,116 +0,0 @@ -import pprint - -import pyblish.api - -from openpype.settings import get_project_settings -from openpype.hosts.photoshop import api as photoshop -from openpype.lib import prepare_template_data -from openpype.pipeline import legacy_io - - -class CollectInstances(pyblish.api.ContextPlugin): - """Gather instances by LayerSet and file metadata - - Collects publishable instances from file metadata or enhance - already collected by creator (family == "image"). - - If no image instances are explicitly created, it looks if there is value - in `flatten_subset_template` (configurable in Settings), in that case it - produces flatten image with all visible layers. - - Identifier: - id (str): "pyblish.avalon.instance" - """ - - label = "Collect Instances" - order = pyblish.api.CollectorOrder - hosts = ["photoshop"] - families_mapping = { - "image": [] - } - # configurable in Settings - flatten_subset_template = "" - - def process(self, context): - instance_by_layer_id = {} - for instance in context: - if ( - instance.data["family"] == "image" and - instance.data.get("members")): - layer_id = str(instance.data["members"][0]) - instance_by_layer_id[layer_id] = instance - - stub = photoshop.stub() - layer_items = stub.get_layers() - layers_meta = stub.get_layers_metadata() - instance_names = [] - - all_layer_ids = [] - for layer_item in layer_items: - layer_meta_data = stub.read(layer_item, layers_meta) - all_layer_ids.append(layer_item.id) - - # Skip layers without metadata. - if layer_meta_data is None: - continue - - # Skip containers. - if "container" in layer_meta_data["id"]: - continue - - # active might not be in legacy meta - if not layer_meta_data.get("active", True): - continue - - instance = instance_by_layer_id.get(str(layer_item.id)) - if instance is None: - instance = context.create_instance(layer_meta_data["subset"]) - - instance.data["layer"] = layer_item - instance.data.update(layer_meta_data) - instance.data["families"] = self.families_mapping[ - layer_meta_data["family"] - ] - instance.data["publish"] = layer_item.visible - instance_names.append(layer_meta_data["subset"]) - - # Produce diagnostic message for any graphical - # user interface interested in visualising it. - self.log.info("Found: \"%s\" " % instance.data["name"]) - self.log.info("instance: {} ".format( - pprint.pformat(instance.data, indent=4))) - - if len(instance_names) != len(set(instance_names)): - self.log.warning("Duplicate instances found. " + - "Remove unwanted via Publisher") - - if len(instance_names) == 0 and self.flatten_subset_template: - project_name = context.data["projectEntity"]["name"] - variants = get_project_settings(project_name).get( - "photoshop", {}).get( - "create", {}).get( - "CreateImage", {}).get( - "defaults", ['']) - family = "image" - task_name = legacy_io.Session["AVALON_TASK"] - asset_name = context.data["assetEntity"]["name"] - - variant = context.data.get("variant") or variants[0] - fill_pairs = { - "variant": variant, - "family": family, - "task": task_name - } - - subset = self.flatten_subset_template.format( - **prepare_template_data(fill_pairs)) - - instance = context.create_instance(subset) - instance.data["family"] = family - instance.data["asset"] = asset_name - instance.data["subset"] = subset - instance.data["ids"] = all_layer_ids - instance.data["families"] = self.families_mapping[family] - instance.data["publish"] = True - - self.log.info("flatten instance: {} ".format(instance.data)) diff --git a/openpype/hosts/photoshop/plugins/publish/collect_review.py b/openpype/hosts/photoshop/plugins/publish/collect_review.py index 7e598a8250..87ec4ee3f1 100644 --- a/openpype/hosts/photoshop/plugins/publish/collect_review.py +++ b/openpype/hosts/photoshop/plugins/publish/collect_review.py @@ -14,10 +14,7 @@ from openpype.pipeline.create import get_subset_name class CollectReview(pyblish.api.ContextPlugin): - """Gather the active document as review instance. - - Triggers once even if no 'image' is published as by defaults it creates - flatten image from a workfile. + """Adds review to families for instances marked to be reviewable. """ label = "Collect Review" @@ -28,25 +25,8 @@ class CollectReview(pyblish.api.ContextPlugin): publish = True def process(self, context): - family = "review" - subset = get_subset_name( - family, - context.data.get("variant", ''), - context.data["anatomyData"]["task"]["name"], - context.data["assetEntity"], - context.data["anatomyData"]["project"]["name"], - host_name=context.data["hostName"], - project_settings=context.data["project_settings"] - ) - - instance = context.create_instance(subset) - instance.data.update({ - "subset": subset, - "label": subset, - "name": subset, - "family": family, - "families": [], - "representations": [], - "asset": os.environ["AVALON_ASSET"], - "publish": self.publish - }) + for instance in context: + creator_attributes = instance.data["creator_attributes"] + if (creator_attributes.get("mark_for_review") and + "review" not in instance.data["families"]): + instance.data["families"].append("review") diff --git a/openpype/hosts/photoshop/plugins/publish/collect_workfile.py b/openpype/hosts/photoshop/plugins/publish/collect_workfile.py index 9a5aad5569..9625464499 100644 --- a/openpype/hosts/photoshop/plugins/publish/collect_workfile.py +++ b/openpype/hosts/photoshop/plugins/publish/collect_workfile.py @@ -14,50 +14,19 @@ class CollectWorkfile(pyblish.api.ContextPlugin): default_variant = "Main" def process(self, context): - existing_instance = None for instance in context: if instance.data["family"] == "workfile": - self.log.debug("Workfile instance found, won't create new") - existing_instance = instance - break + file_path = context.data["currentFile"] + _, ext = os.path.splitext(file_path) + staging_dir = os.path.dirname(file_path) + base_name = os.path.basename(file_path) - family = "workfile" - # context.data["variant"] might come only from collect_batch_data - variant = context.data.get("variant") or self.default_variant - subset = get_subset_name( - family, - variant, - context.data["anatomyData"]["task"]["name"], - context.data["assetEntity"], - context.data["anatomyData"]["project"]["name"], - host_name=context.data["hostName"], - project_settings=context.data["project_settings"] - ) - - file_path = context.data["currentFile"] - staging_dir = os.path.dirname(file_path) - base_name = os.path.basename(file_path) - - # Create instance - if existing_instance is None: - instance = context.create_instance(subset) - instance.data.update({ - "subset": subset, - "label": base_name, - "name": base_name, - "family": family, - "families": [], - "representations": [], - "asset": os.environ["AVALON_ASSET"] - }) - else: - instance = existing_instance - - # creating representation - _, ext = os.path.splitext(file_path) - instance.data["representations"].append({ - "name": ext[1:], - "ext": ext[1:], - "files": base_name, - "stagingDir": staging_dir, - }) + # creating representation + _, ext = os.path.splitext(file_path) + instance.data["representations"].append({ + "name": ext[1:], + "ext": ext[1:], + "files": base_name, + "stagingDir": staging_dir, + }) + return diff --git a/openpype/hosts/photoshop/plugins/publish/extract_review.py b/openpype/hosts/photoshop/plugins/publish/extract_review.py index 9d7eff0211..d5416a389d 100644 --- a/openpype/hosts/photoshop/plugins/publish/extract_review.py +++ b/openpype/hosts/photoshop/plugins/publish/extract_review.py @@ -47,32 +47,42 @@ class ExtractReview(publish.Extractor): layers = self._get_layers_from_image_instances(instance) self.log.info("Layers image instance found: {}".format(layers)) + repre_name = "jpg" + repre_skeleton = { + "name": repre_name, + "ext": "jpg", + "stagingDir": staging_dir, + "tags": self.jpg_options['tags'], + } + + if instance.data["family"] != "review": + # enable creation of review, without this jpg review would clash + # with jpg of the image family + output_name = repre_name + repre_name = "{}_{}".format(repre_name, output_name) + repre_skeleton.update({"name": repre_name, + "outputName": output_name}) + if self.make_image_sequence and len(layers) > 1: self.log.info("Extract layers to image sequence.") img_list = self._save_sequence_images(staging_dir, layers) - instance.data["representations"].append({ - "name": "jpg", - "ext": "jpg", - "files": img_list, + repre_skeleton.update({ "frameStart": 0, "frameEnd": len(img_list), "fps": fps, - "stagingDir": staging_dir, - "tags": self.jpg_options['tags'], + "files": img_list, }) + instance.data["representations"].append(repre_skeleton) processed_img_names = img_list else: self.log.info("Extract layers to flatten image.") img_list = self._save_flatten_image(staging_dir, layers) - instance.data["representations"].append({ - "name": "jpg", - "ext": "jpg", - "files": img_list, # cannot be [] for single frame - "stagingDir": staging_dir, - "tags": self.jpg_options['tags'] + repre_skeleton.update({ + "files": img_list, }) + instance.data["representations"].append(repre_skeleton) processed_img_names = [img_list] ffmpeg_path = get_ffmpeg_tool_path("ffmpeg") diff --git a/openpype/settings/defaults/project_settings/photoshop.json b/openpype/settings/defaults/project_settings/photoshop.json index bcf21f55dd..2454691958 100644 --- a/openpype/settings/defaults/project_settings/photoshop.json +++ b/openpype/settings/defaults/project_settings/photoshop.json @@ -10,23 +10,40 @@ } }, "create": { - "CreateImage": { - "defaults": [ + "ImageCreator": { + "enabled": true, + "active_on_create": true, + "mark_for_review": false, + "default_variants": [ "Main" ] + }, + "AutoImageCreator": { + "enabled": false, + "active_on_create": true, + "mark_for_review": false, + "default_variant": "" + }, + "ReviewCreator": { + "enabled": true, + "active_on_create": true, + "default_variant": "" + }, + "WorkfileCreator": { + "enabled": true, + "active_on_create": true, + "default_variant": "Main" } }, "publish": { "CollectColorCodedInstances": { + "enabled": true, "create_flatten_image": "no", "flatten_subset_template": "", "color_code_mapping": [] }, - "CollectInstances": { - "flatten_subset_template": "" - }, "CollectReview": { - "publish": true + "enabled": true }, "CollectVersion": { "enabled": false diff --git a/openpype/settings/entities/schemas/projects_schema/schema_project_photoshop.json b/openpype/settings/entities/schemas/projects_schema/schema_project_photoshop.json index 0071e632af..f6c46aba8b 100644 --- a/openpype/settings/entities/schemas/projects_schema/schema_project_photoshop.json +++ b/openpype/settings/entities/schemas/projects_schema/schema_project_photoshop.json @@ -31,16 +31,126 @@ { "type": "dict", "collapsible": true, - "key": "CreateImage", + "key": "ImageCreator", "label": "Create Image", + "checkbox_key": "enabled", "children": [ + { + "type": "label", + "label": "Manually create instance from layer or group of layers. \n Separate review could be created for this image to be sent to Asset Management System." + }, + { + "type": "boolean", + "key": "enabled", + "label": "Enabled" + }, + { + "type": "boolean", + "key": "active_on_create", + "label": "Active by default" + }, + { + "type": "boolean", + "key": "mark_for_review", + "label": "Review by default" + }, { "type": "list", - "key": "defaults", - "label": "Default Subsets", + "key": "default_variants", + "label": "Default Variants", "object_type": "text" } ] + }, + { + "type": "dict", + "collapsible": true, + "key": "AutoImageCreator", + "label": "Create Flatten Image", + "checkbox_key": "enabled", + "children": [ + { + "type": "label", + "label": "Auto create image for all visible layers, used for simplified processing. \n Separate review could be created for this image to be sent to Asset Management System." + }, + { + "type": "boolean", + "key": "enabled", + "label": "Enabled" + }, + { + "type": "boolean", + "key": "active_on_create", + "label": "Active by default" + }, + { + "type": "boolean", + "key": "mark_for_review", + "label": "Review by default" + }, + { + "type": "text", + "key": "default_variant", + "label": "Default variant" + } + ] + }, + { + "type": "dict", + "collapsible": true, + "key": "ReviewCreator", + "label": "Create Review", + "checkbox_key": "enabled", + "children": [ + { + "type": "label", + "label": "Auto create review instance containing all published image instances or visible layers if no image instance." + }, + { + "type": "boolean", + "key": "enabled", + "label": "Enabled", + "default": true + }, + { + "type": "boolean", + "key": "active_on_create", + "label": "Active by default" + }, + { + "type": "text", + "key": "default_variant", + "label": "Default variant" + } + ] + }, + { + "type": "dict", + "collapsible": true, + "key": "WorkfileCreator", + "label": "Create Workfile", + "checkbox_key": "enabled", + "children": [ + { + "type": "label", + "label": "Auto create workfile instance" + }, + { + "type": "boolean", + "key": "enabled", + "label": "Enabled" + }, + { + "type": "boolean", + "key": "active_on_create", + "label": "Active by default" + }, + { + "type": "text", + "key": "default_variant", + "label": "Default variant" + } + ] } ] }, @@ -56,11 +166,18 @@ "is_group": true, "key": "CollectColorCodedInstances", "label": "Collect Color Coded Instances", + "checkbox_key": "enabled", "children": [ { "type": "label", "label": "Set color for publishable layers, set its resulting family and template for subset name. \nCan create flatten image from published instances.(Applicable only for remote publishing!)" }, + { + "type": "boolean", + "key": "enabled", + "label": "Enabled", + "default": true + }, { "key": "create_flatten_image", "label": "Create flatten image", @@ -131,40 +248,26 @@ } ] }, - { - "type": "dict", - "collapsible": true, - "key": "CollectInstances", - "label": "Collect Instances", - "children": [ - { - "type": "label", - "label": "Name for flatten image created if no image instance present" - }, - { - "type": "text", - "key": "flatten_subset_template", - "label": "Subset template for flatten image" - } - ] - }, { "type": "dict", "collapsible": true, "key": "CollectReview", "label": "Collect Review", + "checkbox_key": "enabled", "children": [ { "type": "boolean", - "key": "publish", - "label": "Active" - } - ] + "key": "enabled", + "label": "Enabled", + "default": true + } + ] }, { "type": "dict", "key": "CollectVersion", "label": "Collect Version", + "checkbox_key": "enabled", "children": [ { "type": "label", diff --git a/tests/integration/hosts/photoshop/test_publish_in_photoshop_auto_image.py b/tests/integration/hosts/photoshop/test_publish_in_photoshop_auto_image.py new file mode 100644 index 0000000000..1594b36dec --- /dev/null +++ b/tests/integration/hosts/photoshop/test_publish_in_photoshop_auto_image.py @@ -0,0 +1,93 @@ +import logging + +from tests.lib.assert_classes import DBAssert +from tests.integration.hosts.photoshop.lib import PhotoshopTestClass + +log = logging.getLogger("test_publish_in_photoshop") + + +class TestPublishInPhotoshopAutoImage(PhotoshopTestClass): + """Test for publish in Phohoshop with different review configuration. + + Workfile contains 3 layers, auto image and review instances created. + + Test contains updates to Settings!!! + + """ + PERSIST = True + + TEST_FILES = [ + ("1iLF6aNI31qlUCD1rGg9X9eMieZzxL-rc", + "test_photoshop_publish_auto_image.zip", "") + ] + + APP_GROUP = "photoshop" + # keep empty to locate latest installed variant or explicit + APP_VARIANT = "" + + APP_NAME = "{}/{}".format(APP_GROUP, APP_VARIANT) + + TIMEOUT = 120 # publish timeout + + def test_db_asserts(self, dbcon, publish_finished): + """Host and input data dependent expected results in DB.""" + print("test_db_asserts") + failures = [] + + failures.append(DBAssert.count_of_types(dbcon, "version", 3)) + + failures.append( + DBAssert.count_of_types(dbcon, "version", 0, name={"$ne": 1})) + + failures.append( + DBAssert.count_of_types(dbcon, "subset", 0, + name="imageMainForeground")) + + failures.append( + DBAssert.count_of_types(dbcon, "subset", 0, + name="imageMainBackground")) + + failures.append( + DBAssert.count_of_types(dbcon, "subset", 1, + name="workfileTest_task")) + + failures.append( + DBAssert.count_of_types(dbcon, "representation", 5)) + + additional_args = {"context.subset": "imageMainForeground", + "context.ext": "png"} + failures.append( + DBAssert.count_of_types(dbcon, "representation", 0, + additional_args=additional_args)) + + additional_args = {"context.subset": "imageMainBackground", + "context.ext": "png"} + failures.append( + DBAssert.count_of_types(dbcon, "representation", 0, + additional_args=additional_args)) + + # review from image + additional_args = {"context.subset": "imageBeautyMain", + "context.ext": "jpg", + "name": "jpg_jpg"} + failures.append( + DBAssert.count_of_types(dbcon, "representation", 1, + additional_args=additional_args)) + + additional_args = {"context.subset": "imageBeautyMain", + "context.ext": "jpg", + "name": "jpg"} + failures.append( + DBAssert.count_of_types(dbcon, "representation", 1, + additional_args=additional_args)) + + additional_args = {"context.subset": "review"} + failures.append( + DBAssert.count_of_types(dbcon, "representation", 1, + additional_args=additional_args)) + + assert not any(failures) + + +if __name__ == "__main__": + test_case = TestPublishInPhotoshopAutoImage() diff --git a/tests/integration/hosts/photoshop/test_publish_in_photoshop_review.py b/tests/integration/hosts/photoshop/test_publish_in_photoshop_review.py new file mode 100644 index 0000000000..64b6868d7c --- /dev/null +++ b/tests/integration/hosts/photoshop/test_publish_in_photoshop_review.py @@ -0,0 +1,111 @@ +import logging + +from tests.lib.assert_classes import DBAssert +from tests.integration.hosts.photoshop.lib import PhotoshopTestClass + +log = logging.getLogger("test_publish_in_photoshop") + + +class TestPublishInPhotoshopImageReviews(PhotoshopTestClass): + """Test for publish in Phohoshop with different review configuration. + + Workfile contains 2 image instance, one has review flag, second doesn't. + + Regular `review` family is disabled. + + Expected result is to `imageMainForeground` to have additional file with + review, `imageMainBackground` without. No separate `review` family. + + `test_project_test_asset_imageMainForeground_v001_jpg.jpg` is expected name + of imageForeground review, `_jpg` suffix is needed to differentiate between + image and review file. + + """ + PERSIST = True + + TEST_FILES = [ + ("12WGbNy9RJ3m9jlnk0Ib9-IZmONoxIz_p", + "test_photoshop_publish_review.zip", "") + ] + + APP_GROUP = "photoshop" + # keep empty to locate latest installed variant or explicit + APP_VARIANT = "" + + APP_NAME = "{}/{}".format(APP_GROUP, APP_VARIANT) + + TIMEOUT = 120 # publish timeout + + def test_db_asserts(self, dbcon, publish_finished): + """Host and input data dependent expected results in DB.""" + print("test_db_asserts") + failures = [] + + failures.append(DBAssert.count_of_types(dbcon, "version", 3)) + + failures.append( + DBAssert.count_of_types(dbcon, "version", 0, name={"$ne": 1})) + + failures.append( + DBAssert.count_of_types(dbcon, "subset", 1, + name="imageMainForeground")) + + failures.append( + DBAssert.count_of_types(dbcon, "subset", 1, + name="imageMainBackground")) + + failures.append( + DBAssert.count_of_types(dbcon, "subset", 1, + name="workfileTest_task")) + + failures.append( + DBAssert.count_of_types(dbcon, "representation", 6)) + + additional_args = {"context.subset": "imageMainForeground", + "context.ext": "png"} + failures.append( + DBAssert.count_of_types(dbcon, "representation", 1, + additional_args=additional_args)) + + additional_args = {"context.subset": "imageMainForeground", + "context.ext": "jpg"} + failures.append( + DBAssert.count_of_types(dbcon, "representation", 2, + additional_args=additional_args)) + + additional_args = {"context.subset": "imageMainForeground", + "context.ext": "jpg", + "context.representation": "jpg_jpg"} + failures.append( + DBAssert.count_of_types(dbcon, "representation", 1, + additional_args=additional_args)) + + additional_args = {"context.subset": "imageMainBackground", + "context.ext": "png"} + failures.append( + DBAssert.count_of_types(dbcon, "representation", 1, + additional_args=additional_args)) + + additional_args = {"context.subset": "imageMainBackground", + "context.ext": "jpg"} + failures.append( + DBAssert.count_of_types(dbcon, "representation", 1, + additional_args=additional_args)) + + additional_args = {"context.subset": "imageMainBackground", + "context.ext": "jpg", + "context.representation": "jpg_jpg"} + failures.append( + DBAssert.count_of_types(dbcon, "representation", 0, + additional_args=additional_args)) + + additional_args = {"context.subset": "review"} + failures.append( + DBAssert.count_of_types(dbcon, "representation", 0, + additional_args=additional_args)) + + assert not any(failures) + + +if __name__ == "__main__": + test_case = TestPublishInPhotoshopImageReviews() diff --git a/website/docs/admin_hosts_photoshop.md b/website/docs/admin_hosts_photoshop.md new file mode 100644 index 0000000000..de684f01d2 --- /dev/null +++ b/website/docs/admin_hosts_photoshop.md @@ -0,0 +1,127 @@ +--- +id: admin_hosts_photoshop +title: Photoshop Settings +sidebar_label: Photoshop +--- + +import Tabs from '@theme/Tabs'; +import TabItem from '@theme/TabItem'; + +## Photoshop settings + +There is a couple of settings that could configure publishing process for **Photoshop**. +All of them are Project based, eg. each project could have different configuration. + +Location: Settings > Project > Photoshop + +![AfterEffects Project Settings](assets/admin_hosts_photoshop_settings.png) + +## Color Management (ImageIO) + +Placeholder for Color Management. Currently not implemented yet. + +## Creator plugins + +Contains configurable items for creators used during publishing from Photoshop. + +### Create Image + +Provides list of [variants](artist_concepts.md#variant) that will be shown to an artist in Publisher. Default value `Main`. + +### Create Flatten Image + +Provides simplified publishing process. It will create single `image` instance for artist automatically. This instance will +produce flatten image from all visible layers in a workfile. + +- Subset template for flatten image - provide template for subset name for this instance (example `imageBeauty`) +- Review - should be separate review created for this instance + +### Create Review + +Creates single `review` instance automatically. This allows artists to disable it if needed. + +### Create Workfile + +Creates single `workfile` instance automatically. This allows artists to disable it if needed. + +## Publish plugins + +Contains configurable items for publish plugins used during publishing from Photoshop. + +### Collect Color Coded Instances + +Used only in remote publishing! + +Allows to create automatically `image` instances for configurable highlight color set on layer or group in the workfile. + +#### Create flatten image + - Flatten with images - produce additional `image` with all published `image` instances merged + - Flatten only - produce only merged `image` instance + - No - produce only separate `image` instances + +#### Subset template for flatten image + +Template used to create subset name automatically (example `image{layer}Main` - uses layer name in subset name) + +### Collect Review + +Disable if no review should be created + +### Collect Version + +If enabled it will push version from workfile name to all published items. Eg. if artist is publishing `test_asset_workfile_v005.psd` +produced `image` and `review` files will contain `v005` (even if some previous version were skipped for particular family). + +### Validate Containers + +Checks if all imported assets to the workfile through `Loader` are in latest version. Limits cases that older version of asset would be used. + +If enabled, artist might still decide to disable validation for each publish (for special use cases). +Limit this optionality by toggling `Optional`. +`Active` toggle denotes that by default artists sees that optional validation as enabled. + +### Validate naming of subsets and layers + +Subset cannot contain invalid characters or extract to file would fail + +#### Regex pattern of invalid characters + +Contains weird characters like `/`, `/`, these might cause an issue when file (which contains subset name) is created on OS disk. + +#### Replacement character + +Replace all offending characters with this one. `_` is default. + +### Extract Image + +Controls extension formats of published instances of `image` family. `png` and `jpg` are by default. + +### Extract Review + +Controls output definitions of extracted reviews to upload on Asset Management (AM). + +#### Makes an image sequence instead of flatten image + +If multiple `image` instances are produced, glue created images into image sequence (`mov`) to review all of them separetely. +Without it only flatten image would be produced. + +#### Maximum size of sources for review + +Set Byte limit for review file. Applicable if gigantic `image` instances are produced, full image size is unnecessary to upload to AM. + +#### Extract jpg Options + +Handles tags for produced `.jpg` representation. `Create review` and `Add review to Ftrack` are defaults. + +#### Extract mov Options + +Handles tags for produced `.mov` representation. `Create review` and `Add review to Ftrack` are defaults. + + +### Workfile Builder + +Allows to open prepared workfile for an artist when no workfile exists. Useful to share standards, additional helpful content in the workfile. + +Could be configured per `Task type`, eg. `composition` task type could use different `.psd` template file than `art` task. +Workfile template must be accessible for all artists. +(Currently not handled by [SiteSync](module_site_sync.md)) \ No newline at end of file diff --git a/website/docs/assets/admin_hosts_photoshop_settings.png b/website/docs/assets/admin_hosts_photoshop_settings.png new file mode 100644 index 0000000000000000000000000000000000000000..aaa6ecbed7b353733f8424abe3c2df0cb903935a GIT binary patch literal 14364 zcmc(Gd03KZ+xOix*{-cZxwTBI=2DYeDDAfDIF(B(gr-)mq_~1qR+b{vG^XWNIa5t8 zxa5jRxj<&Zm?M*q5VGtz;|>2nDud<0r-i_$KjRWpII?KyX^(an5M(vKi`M$ zIlcz~Do6{bQFFlm=SLs%jR63|7X8mGM%2@@0N_w@c>kW02?!}S;6}(~exLfxvNX&u zndrT1zTbD%E2`*rT!?ae*U`Ykp3-w$N_(F=>@@sU6zKOD`n2c1f#*p0hkgUuuP#4c zK=w)B+owK%ue@S|U3wX}mOn4|xV5pB0eNntxoy2&{r1*dSN-#*@uE+OOedBsu{i0O z<_{6pz%`Z~!<;PCIraRE*YHJ&Q}6`&CmSySu-$1f^=i}Ht9Afz=Q$dRmvCs#ihDNL z#Q-q+TL~uB)&My3b7k-1W+-s!aQkV8c|icMaYGn)4Ph3L^XtELKs<~f^M-ivOABB) zpZwoAg=x6FsMU0-dVQQR)xNdx64@NnT#>C}?bMAv~_J#UN zuJ2k-_s%vd&gfu7tchPr$(`roOhR9?1755+TWS)wj4q? zUQAcabJ>OW3Fv{_m}fp3uNz?_6Nyr8!V{|CbZ1{NDWg9B&0d#S%YKK#?r=iKTx!g? z_v=|eT^Z<@d$Z$+YkJFLlqt%$+0HAA(%0C?C))8t>%@as#h&xujhaG4>f$@yk6hVe zf$NDY4Kq`oH`(2DKQ#supR@?dc0GBiTK1-bI)5af^ulc5n0IdjDimcJXVQim-+W+x z=f*XZ-6P*sLI_Gl^QY=%C2n#Iq`Ydg_B7^VKOxaGEnKbK?94V}N00;tmi3gJ6T1%9 z;_aqq?@_D<08Ynghmju%Eu)Vd&qg&R_bu$R-m4*PSyZ%r;cI!AFElF7UxA6RCkMPa z44n-O-US!LY)Pm!w)h=-+TvktP2ixEjqk1F#Y19Fn-+1b!Dk-1F^n5Mfr6!&27O56 zCC1X9MZuO2!d6#I)%m>4WV43ueT-;Hkmfbmy^dpnMn_J93;GV_|1PRrGPI*6OREu> zF>*x%5pCh8;@Z#xkE*(c5oT?M6GoFgR&b!o<97h)yY}sopA1GX23}Z$PwuK5u?!5t z+OG#s{R{ZC>$dqy3{Z=J7r*Jh)}P*Asw0ajsb>>G>-Py#ERl#9v{OwDVh-{CfsCCAf)5#l>z$#Z>~V z=A}~;G61%fjZLxP8v@I-l^2%Kk7m^+Ohqa`4!4MZ;4-;?rdZg>HgOEu?Y|=_K>4xW zVpLS~1XT?`grxmZ)IGPSt?0&8uM*P&p~jGV5XvuRhQzEQaX6~adoVxt9Ta~JEcxrK z_deTF-g2JIrx&;^7M@bToN+z@i&Kun%dFkqid&S_MD{Z~;D<`Y;^dX|T_nM>B>}Kj zRxC}XQxC-9`4|= zQ|CpJx#SIi&n@MJK8h_bz?8K<_WV3HE6`r^?#Q{kH+#3t7aUj78j(YvP|NEV7T*pi zfgyZ4CF{tD`6F$|TzU;eu$!WD-MHRN&(;NJDbi>AeU_i4zPDoIdS@O6$|rcHQ@4%P z4_jwOQnG>!W>JQ7j+PZ8c1jUZ+scSc#xRZ7_6e(#1hhzb?3D_uFYuFD0t$Np=>)Lb zTGCUVjcUB(I2wo=PVa|pFSFKIMg<&-csh#ie;W$r%;YYg!LD$C?1lswJ}kS2XO&aKS{nTtEHOa-yXonaX*>brKObH2gPyj z#FUxDv}4SAJo_U_cJ3J|zg{0@zxb>v-|FZ(mgV5`$>=v-t|p{2z=LhX*Z=R@kIH^Kbfw_sH#xGH&Ee zn$#HwotJ1Kgur0b?_{$e;~l{|-=7ZbK5%J+hP>z%CTImU`6NDRui06xU0TpPO(z}M zII2W74(AlNIF9m1j-%1jYZjSw`U?eb0vk?{`w*hlk?JO)BAmz(#t^(q56ymYGMKvB z(3Q~db+g*JVQOh|TJ`0e1BxnTxJ)u(^gZyxzqfb~)PtuLubVkC)2Nk9N6x9DZBJ&n z6oy6=if|F$wWY?hcW>2W-6k8a?QZyde4?Z0pp$b-uO*$OZgSN~=3Jtz|My_w*P`P; z!!7KN9SqakG^s(DH|wcbBF@@<&BTRHazo4y6q=}pT0I) zW03_VGsN}IGzXLT#v~Vu_%>`v1J3;9rflxCdA1~sft)9hvky@32OuP5KrO_cqPtSU z=@HE-?K#1bn?4xbDJM zUrWSTN+m4`aqzpDtAc#C7yzno>CEW;7f)K96Xz{Bwz+0QDk-yzL%q zH-%-`;+V^hp6Q^EAo)X$2I|dTYz_Vp03;B?* z(lxZ|L)BDgoHV7XoIt`!T!QjiFN`c_y|*VVA7?}ex5Fa)u~QAW*H9CeAQ(KgP%k`T znj|nag6~RHcrX-~w-V?;0=)OwfA`9GyPhu0=mm~~U#=fApmwI<+=#8u)>vZte z0?f4uV=f_ie05-b81qk3puXgN+3_D!MlV54^E4K}k@@eshEv5uU6h_XPfGQ*VmFqb z@09XWfD6t)-)s}dKKf#RZd6@k;pkkz_LOhT_0;)-e1?3u#n&H^wWcXyyl&?2P%?#@ zz9&fT@r5w5jh+=rh@i6%KxgN!q*F2k%d(|M(B{qR;tXe)Ec+H%6IR2?wPt)9hW*k2 zOHbU}jP4{|*Vfot?bFr-7cW-NBsG&R4I65R507fvl&!EU3!Gt4Psx3j@}tYDd06t1 zs}6g38B^PYi-56`vz@C9tqJkV7)rsSr5C~3!;>NRcaS8nMrbJ*#JcAh!Y3Q%QG(WQ zT)(?RRb~A%M`7Btwx1Bclu>AmITUFd$$D#R_-PQ5t$0wXwnVZ7b@82+Lg$! zU;qwA7Eb15#H@m{FerEK*5qVjglrD*V#hB#Lmi*mGHU`l=qOnFD{F)0#~1NTV6Sa! zQP52_3k8#jy|SvLvJ3`E*z_q6jOeTqre;f8r`q&5DX=IzkP42O}Og&+Skd7|60w1Q>j@142J zVWGsG8kn4E%R5^})tta@J>Vjnxu6cwo4Ttvm1DMLsec+;l{N>sq`kc}bP3(R8>|}4 z)s9gXcRqVvFP%3VNVX0G!>InT&suYrjp!}uvo2|~^&`KgTxpURGl>GlP zK;;V71n+(S=?j`|Q1cAO0^w`9Ry;JUM+u|iJF&Zf7b&mq?>Od65Xf%^TebyN@CO)M zPj?2boOIgd_QdE}hTGC#{w17TBs)u3^%zfpPq}q{%m)^emmeh@|6uj1?8p)%EXiNjIR6bNCe~cke;kSoxSLpeUtVJ* z2v*cj!W^Rg%YM%gbjN-3pfT}yUvVoIwGO*FSSR|h29TYGzF|0=zI|6w} zp@bEa!goL7!=n2nZ40=hrpriZCphaA)`pmDIUW3FMoDg9o|fG&`wCQ)G!s5}>w$ph zCEl5^&$zrl<=({j&5?=VK;4g5m-kOzPzHUs6s|`ujg{@L4PV5z1t4^p%h_#SH=mC7 z%`3lAQ>iUkNKwJuFxZ`ffL8gF17=7Q!mc}|P&hO*rfSk3U!c`^5i{#C6$9FrfoIP{ z)3(c?MGFU#Xfrul89!ZO`iA1#wD7$(fgJz=%ER<~V?lptLGD)t7j&e{zoOb4KhL3J z!)J((7*uu{Ec-<`{oY(Jg}?M%b$l^egn~I3Hg4EcJB+ou3=z7V#_&~|1;C(BZ{CMB&5rx)LdfJV{sneCM}pgjo;fEU&3~GlN}DP4TmC4F zA>{}?)5Dmd{gNvETL=Ilw);!Um@KsF>B!;M~aZ>x11TMzff*)V@ zkDS2Y->-d2zmJ|jtgT-6XQgDD?+H8`-gWpM5diO^=WZgm6}=x&kFot9+{+@btwQ0#Cd`N zZwX`TjH;Vo%D)9EbX^+tJR>xn?dmG;f=EKsa~Yc+PuTD(@<)vz)Mx{s;Exip9J%g? zp*7ra+souIerU)-SvCV>gJ9;kYB%`o??C%L@!wL(`~$MRH<&4cPa;oR=*A&$BVMF@ zvtM_4o}f_MjrTHbF$42spT^eU5>90?(+)q;hSv?VW*mCb;Hrhgt}pZ)@_T~@TcWPj zZZHA_-^d3Rba|oUb;?ob>=%m(EsitG8PN{&gMhlrRs;bYy(pdp8Z+@R{x<4>epJZ< zflj2;>P@WGn>cd{%E0SJ`C8-sVMb|Z+rb8kfnsQ;<4L_cu+<|`$nx1oKz95R`J0$L8cSl%KRlVEh z_8a;q6LVV!6&R_E(>vvv$lbh>{=&axh1&`S9xsMZlDl!2wA(XniKiIxyk|{nrmAZR zsz!#wLWBtctkHof&q=HAPC#28Y9^kzBFU?D_jF(O^{{Y5JHV%ya&JEr|0z?(E+)za zww1r!da4sQ`SuihTXJZ0a;KNIppGYzVQZ(3#7|%O(9}68sJ=)XwY%IeZ^3E0P9pbJ zMszy~XBlf^*%lAIQf3CGyRL0g;`&nD6%~{SXg!NQ&vMXyWN3Qshy1BVqyhnMd@sa zEGnfGh*lq4L*M{6eV%#~8Ebn;q_{$Rf~)W8 zas+QHrr3Pb9fc<}7~M>3Vz(r`wv%bsG>RQLUWQ4C5TLWa=T>OpYVQduE2b2P!6$~*GYaZa@}_a&uTr+oH5^@<8g`V$r(OTp zRQ8TW5}<#Pi4vw?Vv`u}$WeG&HWMY#^uS#?y@irrBtD5{UFa%pRXy;68#4y>DtdHb zyn*-VRL?_%<}+)ZyH{3j*i#w1b6Y=LWyY%d%nWc7pW)nhGE_w@QvIP6Rp6jD`6uGI z=y3G_U11X7mju_a%Rg%VteSR>w0GA^{-AEs7TTCEM>>ddkly%0t4PMufzjkrnMjP> zp_NJBJ9`y`VKehbPS1Kj>NI9cQhnCO0=+1kYLRrF%vuL1ER{Lb@&$;`5CCh^gW+i_CAL<WacQL*})n#TQ$aFB8GPL3gp%|nj;p4PtS@1!#odLI7pF7F zk1WvzDk~sxKiq_%u*`M*w#Rob^2fU%h%(oiKUiLcx-Z2&Ec_@)j>|At?QF-|vzvTy zP5b4HqbovnbHUB;aziBiF*OJwz2$41o1xHz$t-fOWm~`Q58y?Bu_Z&U@mFb(OXx3l zml!8wzq_<=JjOy3t9^RruTL92*J|L)0mtwEHt;B}YKpl_=EYCm#)*6S70Wd}@x;oA zU9tZJ=Jo~xsyw_56^4%DKWoa5BwK3^|1;UqNKLvSCLd(2ccvq-xJF$jA{uThiFCwMe{p{qlK#u@!71^GfFlPUpkGE$lKv2Iert+{xdD_E8q z(VxJkUFQz!X0#&`Rv1Ud`69_7aU;SQ0d;8|#DE|44scUt1le5h}Jxl(__MEqe%9df}>^U?GYC}ob8yeB|bPV zt|iEgu`*RNcpxB&Y|@T=*L&3pT zX86ts<72tkc*W`I$(9SLOgYQ>YvTGW3lgg_zjWyb)Ol8@64%wo|QdGE{4wzQ9r^$~|-`1@A zZtl|Cb6O{=p+z@GeUJ5sKJUeraSGae_|2*;L4=8qqzv3pafN{>e{OG_B3ylbF;VI^ zcnojeu0DU(G26s{pK9Q)8lU*5N>nXXN3>zis%?l;1ZPO|b}2Z|b`-4p`@_rU9FFc; z+SPW*_;$y-Qin+Rk!8LF<+gC76R+lS_2~U1xW!;bBxhP86*~Rs7-vgk{^GP)bgZH3h9 zLX5u?)~9MPmf=@Ft2VhVe=G5c*Lih*dnDjdNcz2W|6W#L>v#>-j`-?};w6;2+l0hj zn7#2z5@(8`WI3f*e&S?j)&`&H(PRf@r@W;HHexhXEnU*7g%UWC$X%?5s8`oN)Emo! zUEM>^Ly=%3BSDv@rbM9;92{RLLe>$xro~7`?o84#ut+uRk8R1h{liB`SkX#h1Xx2F z-m`tYfnbEBGz=nfJg|lQ{?`1U)6El0dRQye7Fl&KFQ7q81NVGU_%Z10=vPu7DudI6evY#*H4O z`B2B+TK2fP&M1a;V73kb$v^B+ccCSXOg&9~9$4)-FX+r4tm$}Uv8(!%x5Qy9IA#TF z1#NngT;jiFR9~Hc>r#V&@r~j5-vDy?I~$w?|7=FZ*K)<1@MC{55;OAlZVNA@j$Zo~ z9~U#)CT_9NO^SnTQtZtjfa*5ruC_V*8GlMzEPjF?XY?e?@a1a|ctt7y57+4mj}4x% z@t6&_kO{*JCli4)B+%oRQCbz}{1TE@h@31wzle0eG*mR}5^f#g12cg-#p|wp7WIS^ zx5>QFr{-svv8EJX^5{dw0zj1y-fYIC4thn1RHLtmvHVV#Tw7vjdeuCHFiM+m@!MLM zF$=Ud`x0&~;e+vhC{0Uu5Rs`WubTo6X&~T+LtpT$#$zLe*Tx=!Ji)9NO-ty}3yquW z5K<|5HG{2K`AJ4KCVxe9fdI@JU?uDA@fGqi={~S{YEpm&-yCuq(KK2^YJ1{w_N%e9 zu}33Vpxg$$U|m7yF$MNb=5xln;)`o3Js<5)hc4xea`s2T3E3piLP;l^BghTnrG%Ql z%jj}F+XlV#$1>0b@u>mch0!%}nzjBW3>j7#Ul(OV@XoX|lrC$xD-3zlN2ZAzU+tg+ z$Fjgp_!F6BR%Q%QD(FSg)(u+8(@1Zu25xax{@PBc>AX_6_+#4x#K#j`fSj$UMYZ5H zs%7PebwjaZb*-CR85;R=FSK^;F09of9dtiFxb7U-R5}=5bEWYtA*mUHLQJ&a)gR!*$ zrbTHDC?IJvVOOvPp_^#UQ0{0mF9hCOgQ)k9Qj7odAqMF^no^3)pL_#pG5}>(;Eglk zFMi$D45USD@LJYFFen~_>^aC3A&Joz%pO3B5K?dp-~s__2Wm&vkIaBVi; zUpJAg`{>pqp-q}79s4Ih-i`$FBOhBPvdFa4+Akdb;q`Sgo9Xv9-gpHAPn#On=o*q_ zS|j6D30`0^Q$E#fGxKwjImi$0>&-*GMVVSQ@(0$W@7s?yZVe(GHT839D%14z7yYgO z7zbR+1$zgk`=M^PKq_ZB$>Z!&m@&!2%5RH#hIXhTeFTg9O8Pl}=SMR43Vkx!v@2%% zK||71o%1gDnuw1GlQJAAMd{G=csA&20;jk0istTGZ3LY01R0_;&5ma|UN^zuTw)G# z78*H)$=Aq9Zxx++u6fr!oti(Cu`E@t$nY~16e%28k3` z;PUreL%*uAsT$3U%TGuR(3whFXZj~sE9&0tn@GWQT^Dvuwrqq^KEsG^i|>Eq_YOq< zo!`S6SCkejl1(yqu{)^{wsLE%X}8~}0B1xzR_1y0rCeHVRAW=fX%B9+FFdNi*in$u zzDH)j=XP*c{Gn;#g~yCv>ghbWlh;jcm7=Hemg(oA@^09(Kk-`ebPDF}ff)F@^(jXk z3$mo4*%8NTL{86USFMt*Bf__d9}J!uyw_$M>GiN9tZKsF1yQRTF`6S4CoUfr*z&?l zW-7cf`)e6S^hnlR;1~kjviHr7oTfK1zIMEI1RM9{J#4TK;ivV#@uq26bIU7#e^hD2 z&QWZ2@&j6PV`O93*2o=C(DRI1;)*K7h1p@dqVQIV!(<4M)A()QY9g{kUkGxr8DTVV z8gldI#xt_4CE<_I=UQytL84@8l|Us*)!IX&j9UU4Sp_>}Q3S91c&W$l;6P8-O zQTiN}?f83pXhexAU_JRcR$d#vA!bEd{#j-0gIenRmu+bK06RqF^yUK>x8)MpqcNUC z4Krxt-XfJ zZa~3&N^w8I*JHhC{gEqf%L(Z4+){H=OI`n_N8=?aCxbfcW(#bsLqt-q5QkM{3jG$9 zN3D73!<`H&RQV3gXw6DOW&O}>Dk!q+{~Jxxgsz)tO-hO$;L1j}dIc7ctL|JioN zKQ}~BvKBDW7l3650Q{>=KZ1j5>g)j^Zi~L}@>Mz1ef|pzf}twp^l4=e z(o!@b!T&MU^Mr=TpMLLfcWM+qkuO&3`r&0W@2To%g)<%SGW;|Z|D{N*+Vd0RHCO5+S;66a`TU?Al z4IYT%w!^l>mAt5kC2eaTIW8QkU~C|kn#!90vCEOXSp!SQt@ajyPW2$;8R6RvK;bKR zXj111=Dh`v`H=jk)fEQ#`)dmX3G=$0PnLZif4N3n088@8&lhS&-^f$N;$eY!#Ie7o zmul~v#whf!QP3LU{90EJ|L#=r)8vRSNs;RFw0wjY;D1+33?hP1*`Ej|mh#PD!_gLH zS{YH?x}(i^9;t0dn|2~VB-2i~4Nr!+TiEaM_CN40D<`86Id~E3ur`rvTi`Z^EeNo3 z%TukZqco*hMMdD=Su;by+7!qDS#YoyyB36w28RL6(Be34Ov!Rhs3`1B&~~E+d*P9j z=K1h&F9hWUtO-fJ(c87^$jQR-b+tVF^99@p_|}Tkq@EZ|N>mX#l>U-ax`-`&NnKJB zJS`VbV5fuGyNClHaJi+%tkx7k5FH80MOXsS<@!k>;G~=G9EjfNiVMUNPX+2mu+`P1 zRk3#LME`)rd+n~M^bmO#KD938=;!>e5IJ29LS$F$TyL5rW)%XZ+uR< zWX3q(pR$Ssi}h@%quMp``eZz-8cI$f(qosbJ2}{08#EQPDaJRs6nOCvq?kdN4&`Jg zT3gcnuF4PGr1neN1@pPj&NqB9Eq<n zS$}m)LU-64!0_jPft!AIi@7niOYar$L(d4SiaX{icQl78bEOBnAS@`O=2~c?XzeOZ zDAw*Fe;K&+H) z3veQ`*QYyZvt`Dv8-_d~_d534{aS9~Vr{2;!~0N^FxyWN9vPLljNF1>Qk487s;4eP zc$v!&HP2sYS!oHMAJ_r{HU7al2HzDl$zA|_?E-#maco@>1@l_UG9@SPL%_V`8r`@> zPgHq^N8t?%!M&jbY%?ukE>Nxm4;WbRfhzO=nGWn%A^U$!0V3mU;6;@l6#rS_t1mmh zO2#h!|B#H4V`&Y4Q}doJlB$cV+=^AQ5|em8in&dA(l!8yKl+b~-lpV;J|);p4anmi zRRx4_Y>6?hWo70rPl?7VDP(m7TE?o{IkgYAeAfS|>5T2+n?3gN6 zxCuSCVIi1AqI15fIYFI=H9@o%8_R2z_av6A-ei{u)`98lkmBE@uKLoDT3$eQCNZFR z9NGDv858;psu|;E}41kU?R}wpEHRDVU!} z1!tI5@N-raUL@FnR)8Ajz7AX7aZ{NsL6EcTQbsZJ=um~G;c6%qze@HIRLIGnn#Mbu z)}}PB_j*oYxE@I`BnQ>u9Y`CGIpn$dU?P;04%C{E!dT`}uo^mp*GopX6RM;f6PF_u z_S5OVThbM$dhaBi!ua}=nnw59F`ayzq$-K<=d+d#0&OzK%WY;t5(rCisP(-;(ygfo ze-mGvVI7y9SszUWvkWL+ERP5EB9wweD64h-(tsP0%Sc>Thk0*b7R{+rY!bOMcg8Cc z3FL?bY?a4yt)#gQqG)gYBAEm9S$y005v9Zr`)lCln+#qNaQySO(W;(w`7v2PC+V6! zhm4Cj1?8VL7lct%>2k5{D19{|3U2~#O%}K{xwbqi1~usCSysrA(&k5+(l_kpgeFU* zVSd{+(X!e^L*SCgMlaBkUFFOZY!88jpv^5xZ?1E$2UVz1?( z*emE-U9wfthpxH6nFZhh`xfT^UX}beWoXL|km>>Y;{U6gW5Pz%h+qI50nw|?zP>*2 zKO_A7XKkt6eUNNj(B$WmI92X^xHhelRn+$cjhT`3_-ZlnZ#D)E)@b~g!w8B_Ut)mG zigLU@5|``WSlKf*g$aHccX1VYWwu(RF^4}b8>)5#6$Yc#rKO~SA_b$f-omBC+PWi! zu{7&z(Gj(b9`L|0AS)(6d}RrTseipA9#ov7$ju4a=)nVHZRgdeZOk=Gu~X%Vr4}82 zeiqdkid7s?hLQD|I95>V&c84QUX$OCwkPcz34qG1h>UNA!rIy;C5#OQl&6bQPqFP+ zbmX{+jB@2-Ib)WSQ4ch}<}_@%CX;k{-SamayrM%H+X7}Wzr);-Gm1Z%n4TC7i?;y5 z=ehb>JTEIIlHpf}=Yn@q&ffu`c4_ZHYF|Dp4&6my`rOXU|x3qo&{;#12;L7 z^#kk<$Izn9;M)OaRaSfN)W=$~GDtyhS`t2^F%=!Qh@9?ozb8raC~~^m2zcNBZL2~# zWnt#Q5x1&e+Gl*U{}^IPk!n~;SnVG}6dS3jvQ z^Mr1?jvCAbQ__yRV9XcWG7+@f_R~XTt;AyPtm!)3$w!bo?dhRSfiOeLsIrnjnW_>| z(GdEu#T~bE_Sv6)u|*_oyO$hK^z*kfKOzKxF#;&SRBa?gxj_P{2^nlk*4t}`Bzws> zhd;5Pq?X9TyO~!Mt5L9=j#Tl-@f3_PYlNQLKVT=KD+=I2d6{h7*})O>@YVDxGDH1@ zQY#U{%=OFcNZM(d0A-~ Date: Tue, 2 May 2023 16:25:03 +0200 Subject: [PATCH 183/187] :art: soft-fail when pan/zoom locked on camera --- openpype/hosts/maya/plugins/publish/extract_playblast.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/openpype/hosts/maya/plugins/publish/extract_playblast.py b/openpype/hosts/maya/plugins/publish/extract_playblast.py index 825a8d38c7..3ceef6f3d3 100644 --- a/openpype/hosts/maya/plugins/publish/extract_playblast.py +++ b/openpype/hosts/maya/plugins/publish/extract_playblast.py @@ -217,7 +217,11 @@ class ExtractPlayblast(publish.Extractor): instance.data["panel"], edit=True, **viewport_defaults ) - cmds.setAttr("{}.panZoomEnabled".format(preset["camera"]), pan_zoom) + try: + cmds.setAttr( + "{}.panZoomEnabled".format(preset["camera"]), pan_zoom) + except RuntimeError: + self.log.warning("Cannot restore Pan/Zoom settings.") collected_files = os.listdir(stagingdir) patterns = [clique.PATTERNS["frames"]] From fec104de8e085d0ce0d70e9679c98924338ab3ce Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Tue, 2 May 2023 18:49:02 +0200 Subject: [PATCH 184/187] Fix: Locally copied version of last published workfile is not incremented (#4722) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * Fix: Locally copied version of last published workfile is not incremented * fix subset first match * correct anatomy name * Fix typo and linting * keep source filepath for further path conformation * fetch also input dependencies of workfile * required changes * lint * fix case only one subset * Enhancement: copy last workfile as reusable methods (#6) * Enhancement: copy last published workfile as reusable methods (WiP) * Added get_host_extensions method, added subset_id and las_version_doc access, added optional arguments to get_last_published_workfile * Plugged in the new methods + minor changes * Added docstrings, last workfile optional argument, and removed unused code * Using new implementation to get local workfile path. Warning: It adds an extra dot to the extension which I need to fix * Refactoring and fixed double dots * Added match subset_id and get representation method, plus clan up * Removed unused vars * Fixed some rebasing errors * delinted unchanged code and renamed get_representation into get_representation_with_task * This time it's really delinted, I hope... * Update openpype/modules/sync_server/sync_server.py reprenation isn't the right spelling (: Co-authored-by: Félix David * Changes based on reviews * Fixed non imperative docstring and missing space * Fixed another non imperative docstring * Update openpype/modules/sync_server/sync_server.py Fixed typo Co-authored-by: Félix David Co-authored-by: Hayley GUILLOT Co-authored-by: Félix David * Fix: syntax error * fix single subset case * Restore sync server enabled test in hook * Python2 syntax * renaming and missing key case handling * Fix local workfile overwritten on update in some cases (#7) * Fix: Local workfile overwrite when local version number is higher than published workfile version number (WiP) * Changed regex search, clean up * Readded mistakenly removed newline * lint * remove anticipated functions for cleaner PR * remove funcs from entities.py * change to get_last_workfile_with_version * clean * Update openpype/modules/sync_server/sync_server.py Co-authored-by: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> * removed get_last_published_workfile_path * moved hook to sync server module * fix lint * Refactor - download only if not present * Refactor - change to list instead of set * Refactor - removing unnecessary code last_published_workfile_path must exists or we wouldn't get there. Use version only from that. * Refactor - removing unnecessary imports * Added check for max fail tries * Refactor - cleaned up how to get last workfile * Updated docstrings * Remove unused imports Co-authored-by: Félix David * OP-5466 - run this on more DCC * Updated documentation * Fix - handle hero versions Skip hero versions, look only for versioned published to get max version id. * Hound * Refactor - simplified download_last_published_workfile Logic should be in pre hook * Skip if no profile found * Removed unwanted import * Use collected project_doc Co-authored-by: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> * Use cached project_settings Co-authored-by: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> --------- Co-authored-by: Félix David Co-authored-by: Sharkitty <81646000+Sharkitty@users.noreply.github.com> Co-authored-by: Hayley GUILLOT Co-authored-by: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> Co-authored-by: Jakub Ježek --- .../pre_copy_last_published_workfile.py | 151 +++++------ openpype/modules/sync_server/sync_server.py | 104 +++++++- .../modules/sync_server/sync_server_module.py | 35 ++- website/docs/module_site_sync.md | 237 ++++++++++++------ 4 files changed, 379 insertions(+), 148 deletions(-) rename openpype/{hooks => modules/sync_server/launch_hooks}/pre_copy_last_published_workfile.py (56%) diff --git a/openpype/hooks/pre_copy_last_published_workfile.py b/openpype/modules/sync_server/launch_hooks/pre_copy_last_published_workfile.py similarity index 56% rename from openpype/hooks/pre_copy_last_published_workfile.py rename to openpype/modules/sync_server/launch_hooks/pre_copy_last_published_workfile.py index 26b43c39cb..bbc220945c 100644 --- a/openpype/hooks/pre_copy_last_published_workfile.py +++ b/openpype/modules/sync_server/launch_hooks/pre_copy_last_published_workfile.py @@ -1,15 +1,20 @@ import os import shutil -from time import sleep + from openpype.client.entities import ( - get_last_version_by_subset_id, get_representations, - get_subsets, + get_project ) + from openpype.lib import PreLaunchHook -from openpype.lib.local_settings import get_local_site_id from openpype.lib.profiles_filtering import filter_profiles -from openpype.pipeline.load.utils import get_representation_path +from openpype.modules.sync_server.sync_server import ( + download_last_published_workfile, +) +from openpype.pipeline.template_data import get_template_data +from openpype.pipeline.workfile.path_resolving import ( + get_workfile_template_key, +) from openpype.settings.lib import get_project_settings @@ -22,7 +27,11 @@ class CopyLastPublishedWorkfile(PreLaunchHook): # Before `AddLastWorkfileToLaunchArgs` order = -1 - app_groups = ["blender", "photoshop", "tvpaint", "aftereffects"] + # any DCC could be used but TrayPublisher and other specials + app_groups = ["blender", "photoshop", "tvpaint", "aftereffects", + "nuke", "nukeassist", "nukex", "hiero", "nukestudio", + "maya", "harmony", "celaction", "flame", "fusion", + "houdini", "tvpaint"] def execute(self): """Check if local workfile doesn't exist, else copy it. @@ -31,11 +40,11 @@ class CopyLastPublishedWorkfile(PreLaunchHook): 2- Check if workfile in work area doesn't exist 3- Check if published workfile exists and is copied locally in publish 4- Substitute copied published workfile as first workfile + with incremented version by +1 Returns: None: This is a void method. """ - sync_server = self.modules_manager.get("sync_server") if not sync_server or not sync_server.enabled: self.log.debug("Sync server module is not enabled or available") @@ -53,6 +62,7 @@ class CopyLastPublishedWorkfile(PreLaunchHook): # Get data project_name = self.data["project_name"] + asset_name = self.data["asset_name"] task_name = self.data["task_name"] task_type = self.data["task_type"] host_name = self.application.host_name @@ -68,6 +78,8 @@ class CopyLastPublishedWorkfile(PreLaunchHook): "hosts": host_name, } last_workfile_settings = filter_profiles(profiles, filter_data) + if not last_workfile_settings: + return use_last_published_workfile = last_workfile_settings.get( "use_last_published_workfile" ) @@ -92,57 +104,27 @@ class CopyLastPublishedWorkfile(PreLaunchHook): ) return + max_retries = int((sync_server.sync_project_settings[project_name] + ["config"] + ["retry_cnt"])) + self.log.info("Trying to fetch last published workfile...") - project_doc = self.data.get("project_doc") asset_doc = self.data.get("asset_doc") anatomy = self.data.get("anatomy") - # Check it can proceed - if not project_doc and not asset_doc: - return + context_filters = { + "asset": asset_name, + "family": "workfile", + "task": {"name": task_name, "type": task_type} + } - # Get subset id - subset_id = next( - ( - subset["_id"] - for subset in get_subsets( - project_name, - asset_ids=[asset_doc["_id"]], - fields=["_id", "data.family", "data.families"], - ) - if subset["data"].get("family") == "workfile" - # Legacy compatibility - or "workfile" in subset["data"].get("families", {}) - ), - None, - ) - if not subset_id: - self.log.debug( - 'No any workfile for asset "{}".'.format(asset_doc["name"]) - ) - return + workfile_representations = list(get_representations( + project_name, + context_filters=context_filters + )) - # Get workfile representation - last_version_doc = get_last_version_by_subset_id( - project_name, subset_id, fields=["_id"] - ) - if not last_version_doc: - self.log.debug("Subset does not have any versions") - return - - workfile_representation = next( - ( - representation - for representation in get_representations( - project_name, version_ids=[last_version_doc["_id"]] - ) - if representation["context"]["task"]["name"] == task_name - ), - None, - ) - - if not workfile_representation: + if not workfile_representations: self.log.debug( 'No published workfile for task "{}" and host "{}".'.format( task_name, host_name @@ -150,28 +132,55 @@ class CopyLastPublishedWorkfile(PreLaunchHook): ) return - local_site_id = get_local_site_id() - sync_server.add_site( - project_name, - workfile_representation["_id"], - local_site_id, - force=True, - priority=99, - reset_timer=True, + filtered_repres = filter( + lambda r: r["context"].get("version") is not None, + workfile_representations ) - - while not sync_server.is_representation_on_site( - project_name, workfile_representation["_id"], local_site_id - ): - sleep(5) - - # Get paths - published_workfile_path = get_representation_path( - workfile_representation, root=anatomy.roots + workfile_representation = max( + filtered_repres, key=lambda r: r["context"]["version"] ) - local_workfile_dir = os.path.dirname(last_workfile) # Copy file and substitute path - self.data["last_workfile_path"] = shutil.copy( - published_workfile_path, local_workfile_dir + last_published_workfile_path = download_last_published_workfile( + host_name, + project_name, + task_name, + workfile_representation, + max_retries, + anatomy=anatomy ) + if not last_published_workfile_path: + self.log.debug( + "Couldn't download {}".format(last_published_workfile_path) + ) + return + + project_doc = self.data["project_doc"] + + project_settings = self.data["project_settings"] + template_key = get_workfile_template_key( + task_name, host_name, project_name, project_settings + ) + + # Get workfile data + workfile_data = get_template_data( + project_doc, asset_doc, task_name, host_name + ) + + extension = last_published_workfile_path.split(".")[-1] + workfile_data["version"] = ( + workfile_representation["context"]["version"] + 1) + workfile_data["ext"] = extension + + anatomy_result = anatomy.format(workfile_data) + local_workfile_path = anatomy_result[template_key]["path"] + + # Copy last published workfile to local workfile directory + shutil.copy( + last_published_workfile_path, + local_workfile_path, + ) + + self.data["last_workfile_path"] = local_workfile_path + # Keep source filepath for further path conformation + self.data["source_filepath"] = last_published_workfile_path diff --git a/openpype/modules/sync_server/sync_server.py b/openpype/modules/sync_server/sync_server.py index 5b873a37cf..d1d5c2863d 100644 --- a/openpype/modules/sync_server/sync_server.py +++ b/openpype/modules/sync_server/sync_server.py @@ -3,10 +3,15 @@ import os import asyncio import threading import concurrent.futures -from concurrent.futures._base import CancelledError +from time import sleep from .providers import lib +from openpype.client.entity_links import get_linked_representation_id from openpype.lib import Logger +from openpype.lib.local_settings import get_local_site_id +from openpype.modules.base import ModulesManager +from openpype.pipeline import Anatomy +from openpype.pipeline.load.utils import get_representation_path_with_anatomy from .utils import SyncStatus, ResumableError @@ -189,6 +194,98 @@ def _site_is_working(module, project_name, site_name, site_config): return handler.is_active() +def download_last_published_workfile( + host_name: str, + project_name: str, + task_name: str, + workfile_representation: dict, + max_retries: int, + anatomy: Anatomy = None, +) -> str: + """Download the last published workfile + + Args: + host_name (str): Host name. + project_name (str): Project name. + task_name (str): Task name. + workfile_representation (dict): Workfile representation. + max_retries (int): complete file failure only after so many attempts + anatomy (Anatomy, optional): Anatomy (Used for optimization). + Defaults to None. + + Returns: + str: last published workfile path localized + """ + + if not anatomy: + anatomy = Anatomy(project_name) + + # Get sync server module + sync_server = ModulesManager().modules_by_name.get("sync_server") + if not sync_server or not sync_server.enabled: + print("Sync server module is disabled or unavailable.") + return + + if not workfile_representation: + print( + "Not published workfile for task '{}' and host '{}'.".format( + task_name, host_name + ) + ) + return + + last_published_workfile_path = get_representation_path_with_anatomy( + workfile_representation, anatomy + ) + if (not last_published_workfile_path or + not os.path.exists(last_published_workfile_path)): + return + + # If representation isn't available on remote site, then return. + if not sync_server.is_representation_on_site( + project_name, + workfile_representation["_id"], + sync_server.get_remote_site(project_name), + ): + print( + "Representation for task '{}' and host '{}'".format( + task_name, host_name + ) + ) + return + + # Get local site + local_site_id = get_local_site_id() + + # Add workfile representation to local site + representation_ids = {workfile_representation["_id"]} + representation_ids.update( + get_linked_representation_id( + project_name, repre_id=workfile_representation["_id"] + ) + ) + for repre_id in representation_ids: + if not sync_server.is_representation_on_site(project_name, repre_id, + local_site_id): + sync_server.add_site( + project_name, + repre_id, + local_site_id, + force=True, + priority=99 + ) + sync_server.reset_timer() + print("Starting to download:{}".format(last_published_workfile_path)) + # While representation unavailable locally, wait. + while not sync_server.is_representation_on_site( + project_name, workfile_representation["_id"], local_site_id, + max_retries=max_retries + ): + sleep(5) + + return last_published_workfile_path + + class SyncServerThread(threading.Thread): """ Separate thread running synchronization server with asyncio loop. @@ -358,7 +455,6 @@ class SyncServerThread(threading.Thread): duration = time.time() - start_time self.log.debug("One loop took {:.2f}s".format(duration)) - delay = self.module.get_loop_delay(project_name) self.log.debug( "Waiting for {} seconds to new loop".format(delay) @@ -370,8 +466,8 @@ class SyncServerThread(threading.Thread): self.log.warning( "ConnectionResetError in sync loop, trying next loop", exc_info=True) - except CancelledError: - # just stopping server + except asyncio.exceptions.CancelledError: + # cancelling timer pass except ResumableError: self.log.warning( diff --git a/openpype/modules/sync_server/sync_server_module.py b/openpype/modules/sync_server/sync_server_module.py index 5a4fa07e98..b85b045bd9 100644 --- a/openpype/modules/sync_server/sync_server_module.py +++ b/openpype/modules/sync_server/sync_server_module.py @@ -838,6 +838,18 @@ class SyncServerModule(OpenPypeModule, ITrayModule): return ret_dict + def get_launch_hook_paths(self): + """Implementation for applications launch hooks. + + Returns: + (str): full absolut path to directory with hooks for the module + """ + + return os.path.join( + os.path.dirname(os.path.abspath(__file__)), + "launch_hooks" + ) + # Needs to be refactored after Settings are updated # # Methods for Settings to get appriate values to fill forms # def get_configurable_items(self, scope=None): @@ -1045,9 +1057,23 @@ class SyncServerModule(OpenPypeModule, ITrayModule): self.sync_server_thread.reset_timer() def is_representation_on_site( - self, project_name, representation_id, site_name + self, project_name, representation_id, site_name, max_retries=None ): - """Checks if 'representation_id' has all files avail. on 'site_name'""" + """Checks if 'representation_id' has all files avail. on 'site_name' + + Args: + project_name (str) + representation_id (str) + site_name (str) + max_retries (int) (optional) - provide only if method used in while + loop to bail out + Returns: + (bool): True if 'representation_id' has all files correctly on the + 'site_name' + Raises: + (ValueError) Only If 'max_retries' provided if upload/download + failed too many times to limit infinite loop check. + """ representation = get_representation_by_id(project_name, representation_id, fields=["_id", "files"]) @@ -1060,6 +1086,11 @@ class SyncServerModule(OpenPypeModule, ITrayModule): if site["name"] != site_name: continue + if max_retries: + tries = self._get_tries_count_from_rec(site) + if tries >= max_retries: + raise ValueError("Failed too many times") + if (site.get("progress") or site.get("error") or not site.get("created_dt")): return False diff --git a/website/docs/module_site_sync.md b/website/docs/module_site_sync.md index 3e5794579c..68f56cb548 100644 --- a/website/docs/module_site_sync.md +++ b/website/docs/module_site_sync.md @@ -7,80 +7,112 @@ sidebar_label: Site Sync import Tabs from '@theme/Tabs'; import TabItem from '@theme/TabItem'; +Site Sync allows users and studios to synchronize published assets between +multiple 'sites'. Site denotes a storage location, +which could be a physical disk, server, cloud storage. To be able to use site +sync, it first needs to be configured. -:::warning -**This feature is** currently **in a beta stage** and it is not recommended to rely on it fully for production. -::: - -Site Sync allows users and studios to synchronize published assets between multiple 'sites'. Site denotes a storage location, -which could be a physical disk, server, cloud storage. To be able to use site sync, it first needs to be configured. - -The general idea is that each user acts as an individual site and can download and upload any published project files when they are needed. that way, artist can have access to the whole project, but only every store files that are relevant to them on their home workstation. +The general idea is that each user acts as an individual site and can download +and upload any published project files when they are needed. that way, artist +can have access to the whole project, but only every store files that are +relevant to them on their home workstation. :::note -At the moment site sync is only able to deal with publishes files. No workfiles will be synchronized unless they are published. We are working on making workfile synchronization possible as well. +At the moment site sync is only able to deal with publishes files. No workfiles +will be synchronized unless they are published. We are working on making +workfile synchronization possible as well. ::: ## System Settings -To use synchronization, *Site Sync* needs to be enabled globally in **OpenPype Settings/System/Modules/Site Sync**. +To use synchronization, *Site Sync* needs to be enabled globally in **OpenPype +Settings/System/Modules/Site Sync**. ![Configure module](assets/site_sync_system.png) -### Sites +### Sites By default there are two sites created for each OpenPype installation: -- **studio** - default site - usually a centralized mounted disk accessible to all artists. Studio site is used if Site Sync is disabled. -- **local** - each workstation or server running OpenPype Tray receives its own with unique site name. Workstation refers to itself as "local"however all other sites will see it under it's unique ID. -Artists can explore their site ID by opening OpenPype Info tool by clicking on a version number in the tray app. +- **studio** - default site - usually a centralized mounted disk accessible to + all artists. Studio site is used if Site Sync is disabled. +- **local** - each workstation or server running OpenPype Tray receives its own + with unique site name. Workstation refers to itself as "local"however all + other sites will see it under it's unique ID. -Many different sites can be created and configured on the system level, and some or all can be assigned to each project. +Artists can explore their site ID by opening OpenPype Info tool by clicking on +a version number in the tray app. -Each OpenPype Tray app works with two sites at one time. (Sites can be the same, and no syncing is done in this setup). +Many different sites can be created and configured on the system level, and +some or all can be assigned to each project. -Sites could be configured differently per project basis. +Each OpenPype Tray app works with two sites at one time. (Sites can be the +same, and no syncing is done in this setup). -Each new site needs to be created first in `System Settings`. Most important feature of site is its Provider, select one from already prepared Providers. +Sites could be configured differently per project basis. -#### Alternative sites +Each new site needs to be created first in `System Settings`. Most important +feature of site is its Provider, select one from already prepared Providers. + +#### Alternative sites This attribute is meant for special use cases only. -One of the use cases is sftp site vendoring (exposing) same data as regular site (studio). Each site is accessible for different audience. 'studio' for artists in a studio via shared disk, 'sftp' for externals via sftp server with mounted 'studio' drive. +One of the use cases is sftp site vendoring (exposing) same data as regular +site (studio). Each site is accessible for different audience. 'studio' for +artists in a studio via shared disk, 'sftp' for externals via sftp server with +mounted 'studio' drive. -Change of file status on one site actually means same change on 'alternate' site occurred too. (eg. artists publish to 'studio', 'sftp' is using -same location >> file is accessible on 'sftp' site right away, no need to sync it anyhow.) +Change of file status on one site actually means same change on 'alternate' +site occurred too. (eg. artists publish to 'studio', 'sftp' is using +same location >> file is accessible on 'sftp' site right away, no need to sync +it anyhow.) ##### Example + ![Configure module](assets/site_sync_system_sites.png) -Admin created new `sftp` site which is handled by `SFTP` provider. Somewhere in the studio SFTP server is deployed on a machine that has access to `studio` drive. +Admin created new `sftp` site which is handled by `SFTP` provider. Somewhere in +the studio SFTP server is deployed on a machine that has access to `studio` +drive. Alternative sites work both way: + - everything published to `studio` is accessible on a `sftp` site too -- everything published to `sftp` (most probably via artist's local disk - artists publishes locally, representation is marked to be synced to `sftp`. Immediately after it is synced, it is marked to be available on `studio` too for artists in the studio to use.) +- everything published to `sftp` (most probably via artist's local disk - + artists publishes locally, representation is marked to be synced to `sftp`. + Immediately after it is synced, it is marked to be available on `studio` too + for artists in the studio to use.) ## Project Settings -Sites need to be made available for each project. Of course this is possible to do on the default project as well, in which case all other projects will inherit these settings until overridden explicitly. +Sites need to be made available for each project. Of course this is possible to +do on the default project as well, in which case all other projects will +inherit these settings until overridden explicitly. You'll find the setting in **Settings/Project/Global/Site Sync** -The attributes that can be configured will vary between sites and their providers. +The attributes that can be configured will vary between sites and their +providers. ## Local settings -Each user should configure root folder for their 'local' site via **Local Settings** in OpenPype Tray. This folder will be used for all files that the user publishes or downloads while working on a project. Artist has the option to set the folder as "default"in which case it is used for all the projects, or it can be set on a project level individually. +Each user should configure root folder for their 'local' site via **Local +Settings** in OpenPype Tray. This folder will be used for all files that the +user publishes or downloads while working on a project. Artist has the option +to set the folder as "default"in which case it is used for all the projects, or +it can be set on a project level individually. -Artists can also override which site they use as active and remote if need be. +Artists can also override which site they use as active and remote if need be. ![Local overrides](assets/site_sync_local_setting.png) - ## Providers -Each site implements a so called `provider` which handles most common operations (list files, copy files etc.) and provides interface with a particular type of storage. (disk, gdrive, aws, etc.) -Multiple configured sites could share the same provider with different settings (multiple mounted disks - each disk can be a separate site, while +Each site implements a so called `provider` which handles most common +operations (list files, copy files etc.) and provides interface with a +particular type of storage. (disk, gdrive, aws, etc.) +Multiple configured sites could share the same provider with different +settings (multiple mounted disks - each disk can be a separate site, while all share the same provider). **Currently implemented providers:** @@ -89,21 +121,30 @@ all share the same provider). Handles files stored on disk storage. -Local drive provider is the most basic one that is used for accessing all standard hard disk storage scenarios. It will work with any storage that can be mounted on your system in a standard way. This could correspond to a physical external hard drive, network mounted storage, internal drive or even VPN connected network drive. It doesn't care about how the drive is mounted, but you must be able to point to it with a simple directory path. +Local drive provider is the most basic one that is used for accessing all +standard hard disk storage scenarios. It will work with any storage that can be +mounted on your system in a standard way. This could correspond to a physical +external hard drive, network mounted storage, internal drive or even VPN +connected network drive. It doesn't care about how the drive is mounted, but +you must be able to point to it with a simple directory path. Default sites `local` and `studio` both use local drive provider. - ### Google Drive -Handles files on Google Drive (this). GDrive is provided as a production example for implementing other cloud providers +Handles files on Google Drive (this). GDrive is provided as a production +example for implementing other cloud providers -Let's imagine a small globally distributed studio which wants all published work for all their freelancers uploaded to Google Drive folder. +Let's imagine a small globally distributed studio which wants all published +work for all their freelancers uploaded to Google Drive folder. For this use case admin needs to configure: -- how many times it tries to synchronize file in case of some issue (network, permissions) + +- how many times it tries to synchronize file in case of some issue (network, + permissions) - how often should synchronization check for new assets -- sites for synchronization - 'local' and 'gdrive' (this can be overridden in local settings) +- sites for synchronization - 'local' and 'gdrive' (this can be overridden in + local settings) - user credentials - root folder location on Google Drive side @@ -111,30 +152,43 @@ Configuration would look like this: ![Configure project](assets/site_sync_project_settings.png) -*Site Sync* for Google Drive works using its API: https://developers.google.com/drive/api/v3/about-sdk +*Site Sync* for Google Drive works using its +API: https://developers.google.com/drive/api/v3/about-sdk -To configure Google Drive side you would need to have access to Google Cloud Platform project: https://console.cloud.google.com/ +To configure Google Drive side you would need to have access to Google Cloud +Platform project: https://console.cloud.google.com/ To get working connection to Google Drive there are some necessary steps: -- first you need to enable GDrive API: https://developers.google.com/drive/api/v3/enable-drive-api -- next you need to create user, choose **Service Account** (for basic configuration no roles for account are necessary) + +- first you need to enable GDrive + API: https://developers.google.com/drive/api/v3/enable-drive-api +- next you need to create user, choose **Service Account** (for basic + configuration no roles for account are necessary) - add new key for created account and download .json file with credentials -- share destination folder on the Google Drive with created account (directly in GDrive web application) -- add new site back in OpenPype Settings, name as you want, provider needs to be 'gdrive' +- share destination folder on the Google Drive with created account (directly + in GDrive web application) +- add new site back in OpenPype Settings, name as you want, provider needs to + be 'gdrive' - distribute credentials file via shared mounted disk location :::note -If you are using regular personal GDrive for testing don't forget adding `/My Drive` as the prefix in root configuration. Business accounts and share drives don't need this. +If you are using regular personal GDrive for testing don't forget +adding `/My Drive` as the prefix in root configuration. Business accounts and +share drives don't need this. ::: ### SFTP -SFTP provider is used to connect to SFTP server. Currently authentication with `user:password` or `user:ssh key` is implemented. -Please provide only one combination, don't forget to provide password for ssh key if ssh key was created with a passphrase. +SFTP provider is used to connect to SFTP server. Currently authentication +with `user:password` or `user:ssh key` is implemented. +Please provide only one combination, don't forget to provide password for ssh +key if ssh key was created with a passphrase. -(SFTP connection could be a bit finicky, use FileZilla or WinSCP for testing connection, it will be mush faster.) +(SFTP connection could be a bit finicky, use FileZilla or WinSCP for testing +connection, it will be mush faster.) -Beware that ssh key expects OpenSSH format (`.pem`) not a Putty format (`.ppk`)! +Beware that ssh key expects OpenSSH format (`.pem`) not a Putty +format (`.ppk`)! #### How to set SFTP site @@ -143,60 +197,101 @@ Beware that ssh key expects OpenSSH format (`.pem`) not a Putty format (`.ppk`)! ![Enable syncing and create site](assets/site_sync_sftp_system.png) -- In Projects setting enable Site Sync (on default project - all project will be synched, or on specific project) -- Configure SFTP connection and destination folder on a SFTP server (in screenshot `/upload`) +- In Projects setting enable Site Sync (on default project - all project will + be synched, or on specific project) +- Configure SFTP connection and destination folder on a SFTP server (in + screenshot `/upload`) ![SFTP connection](assets/site_sync_project_sftp_settings.png) - -- if you want to force syncing between local and sftp site for all users, use combination `active site: local`, `remote site: NAME_OF_SFTP_SITE` -- if you want to allow only specific users to use SFTP syncing (external users, not located in the office), use `active site: studio`, `remote site: studio`. + +- if you want to force syncing between local and sftp site for all users, use + combination `active site: local`, `remote site: NAME_OF_SFTP_SITE` +- if you want to allow only specific users to use SFTP syncing (external users, + not located in the office), use `active site: studio`, `remote site: studio`. ![Select active and remote site on a project](assets/site_sync_sftp_project_setting_not_forced.png) -- Each artist can decide and configure syncing from his/her local to SFTP via `Local Settings` +- Each artist can decide and configure syncing from his/her local to SFTP + via `Local Settings` ![Select active and remote site on a project](assets/site_sync_sftp_settings_local.png) - + ### Custom providers -If a studio needs to use other services for cloud storage, or want to implement totally different storage providers, they can do so by writing their own provider plugin. We're working on a developer documentation, however, for now we recommend looking at `abstract_provider.py`and `gdrive.py` inside `openpype/modules/sync_server/providers` and using it as a template. +If a studio needs to use other services for cloud storage, or want to implement +totally different storage providers, they can do so by writing their own +provider plugin. We're working on a developer documentation, however, for now +we recommend looking at `abstract_provider.py`and `gdrive.py` +inside `openpype/modules/sync_server/providers` and using it as a template. ### Running Site Sync in background -Site Sync server synchronizes new published files from artist machine into configured remote location by default. +Site Sync server synchronizes new published files from artist machine into +configured remote location by default. -There might be a use case where you need to synchronize between "non-artist" sites, for example between studio site and cloud. In this case -you need to run Site Sync as a background process from a command line (via service etc) 24/7. +There might be a use case where you need to synchronize between "non-artist" +sites, for example between studio site and cloud. In this case +you need to run Site Sync as a background process from a command line (via +service etc) 24/7. -To configure all sites where all published files should be synced eventually you need to configure `project_settings/global/sync_server/config/always_accessible_on` property in Settings (per project) first. +To configure all sites where all published files should be synced eventually +you need to +configure `project_settings/global/sync_server/config/always_accessible_on` +property in Settings (per project) first. ![Set another non artist remote site](assets/site_sync_always_on.png) This is an example of: + - Site Sync is enabled for a project -- default active and remote sites are set to `studio` - eg. standard process: everyone is working in a studio, publishing to shared location etc. -- (but this also allows any of the artists to work remotely, they would change their active site in their own Local Settings to `local` and configure local root. - This would result in everything artist publishes is saved first onto his local folder AND synchronized to `studio` site eventually.) +- default active and remote sites are set to `studio` - eg. standard process: + everyone is working in a studio, publishing to shared location etc. +- (but this also allows any of the artists to work remotely, they would change + their active site in their own Local Settings to `local` and configure local + root. + This would result in everything artist publishes is saved first onto his + local folder AND synchronized to `studio` site eventually.) - everything exported must also be eventually uploaded to `sftp` site -This eventual synchronization between `studio` and `sftp` sites must be physically handled by background process. +This eventual synchronization between `studio` and `sftp` sites must be +physically handled by background process. -As current implementation relies heavily on Settings and Local Settings, background process for a specific site ('studio' for example) must be configured via Tray first to `syncserver` command to work. +As current implementation relies heavily on Settings and Local Settings, +background process for a specific site ('studio' for example) must be +configured via Tray first to `syncserver` command to work. To do this: -- run OP `Tray` with environment variable OPENPYPE_LOCAL_ID set to name of active (source) site. In most use cases it would be studio (for cases of backups of everything published to studio site to different cloud site etc.) +- run OP `Tray` with environment variable OPENPYPE_LOCAL_ID set to name of + active (source) site. In most use cases it would be studio (for cases of + backups of everything published to studio site to different cloud site etc.) - start `Tray` -- check `Local ID` in information dialog after clicking on version number in the Tray +- check `Local ID` in information dialog after clicking on version number in + the Tray - open `Local Settings` in the `Tray` - configure for each project necessary active site and remote site - close `Tray` - run OP from a command line with `syncserver` and `--active_site` arguments - -This is an example how to trigger background syncing process where active (source) site is `studio`. -(It is expected that OP is installed on a machine, `openpype_console` is on PATH. If not, add full path to executable. +This is an example how to trigger background syncing process where active ( +source) site is `studio`. +(It is expected that OP is installed on a machine, `openpype_console` is on +PATH. If not, add full path to executable. ) + ```shell openpype_console syncserver --active_site studio -``` \ No newline at end of file +``` + +### Syncing of last published workfile + +Some DCC might have enabled +in `project_setting/global/tools/Workfiles/last_workfile_on_startup`, eg. open +DCC with last opened workfile. + +Flag `use_last_published_workfile` tells that last published workfile should be +used if no workfile is present locally. +This use case could happen if artists starts working on new task locally, +doesn't have any workfile present. In that case last published will be +synchronized locally and its version bumped by 1 (as workfile's version is +always +1 from published version). \ No newline at end of file From c542934da45f6dc50bb8ceabfb23f4ff822f016b Mon Sep 17 00:00:00 2001 From: Ynbot Date: Wed, 3 May 2023 03:25:25 +0000 Subject: [PATCH 185/187] [Automated] Bump version --- openpype/version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/version.py b/openpype/version.py index 72297a4430..9832ff4747 100644 --- a/openpype/version.py +++ b/openpype/version.py @@ -1,3 +1,3 @@ # -*- coding: utf-8 -*- """Package declaring Pype version.""" -__version__ = "3.15.6-nightly.2" +__version__ = "3.15.6-nightly.3" From bc92395a7eb4fd98deb33299adca314b6c5ebfa0 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Wed, 3 May 2023 15:44:37 +0200 Subject: [PATCH 186/187] update bug report workflow --- .github/workflows/update_bug_report.yml | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/.github/workflows/update_bug_report.yml b/.github/workflows/update_bug_report.yml index 7a1bfb7bfd..1e5da414bb 100644 --- a/.github/workflows/update_bug_report.yml +++ b/.github/workflows/update_bug_report.yml @@ -18,10 +18,16 @@ jobs: uses: ynput/gha-populate-form-version@main with: github_token: ${{ secrets.YNPUT_BOT_TOKEN }} - github_user: ${{ secrets.CI_USER }} - github_email: ${{ secrets.CI_EMAIL }} registry: github dropdown: _version limit_to: 100 form: .github/ISSUE_TEMPLATE/bug_report.yml commit_message: 'chore(): update bug report / version' + dry_run: no-push + + - name: Push to protected develop branch + uses: CasperWA/push-protected@v2.10.0 + with: + token: ${{ secrets.YNPUT_BOT_TOKEN }} + branch: develop + unprotect_reviews: true \ No newline at end of file From 3d870ef794c8fbdf7bf6ac17351a7aaaeaa1811a Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" Date: Wed, 3 May 2023 13:45:37 +0000 Subject: [PATCH 187/187] chore(): update bug report / version --- .github/ISSUE_TEMPLATE/bug_report.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/ISSUE_TEMPLATE/bug_report.yml b/.github/ISSUE_TEMPLATE/bug_report.yml index fe86a8400b..8328a35cad 100644 --- a/.github/ISSUE_TEMPLATE/bug_report.yml +++ b/.github/ISSUE_TEMPLATE/bug_report.yml @@ -35,6 +35,9 @@ body: label: Version description: What version are you running? Look to OpenPype Tray options: + - 3.15.6-nightly.3 + - 3.15.6-nightly.2 + - 3.15.6-nightly.1 - 3.15.5 - 3.15.5-nightly.2 - 3.15.5-nightly.1 @@ -132,9 +135,6 @@ body: - 3.14.0 - 3.14.0-nightly.1 - 3.13.1-nightly.3 - - 3.13.1-nightly.2 - - 3.13.1-nightly.1 - - 3.13.0 validations: required: true - type: dropdown