mirror of
https://github.com/ynput/ayon-core.git
synced 2025-12-24 12:54:40 +01:00
Compare commits
849 commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
f9bbab9944 | ||
|
|
826d22b166 | ||
|
|
b6b2726795 | ||
|
|
1612b0297d | ||
|
|
a802285a6c | ||
|
|
07edce9c9c | ||
|
|
0dc34c32d8 | ||
|
|
7485d99cf6 | ||
|
|
3d0cd51e65 | ||
|
|
8f1eebfcbf | ||
|
|
f46f1d2e8d | ||
|
|
92d4da9efa | ||
|
|
1be1a30b38 | ||
|
|
c55c6a2675 | ||
|
|
818a9f21f3 | ||
|
|
a83ebe3c8d | ||
|
|
0b6c0f3de9 | ||
|
|
4b4ccad085 | ||
|
|
11f5c4ba8b | ||
|
|
ef93ab833a | ||
|
|
9e067348bd | ||
|
|
69e4fb011a | ||
|
|
46da65bf82 | ||
|
|
15af2c051b | ||
|
|
04958c3429 | ||
|
|
301b603775 | ||
|
|
80f303c735 | ||
|
|
b22fbe3e77 | ||
|
|
b77b0583dd | ||
|
|
bb35eccb57 | ||
|
|
4051d679dd | ||
|
|
a9af964f4c | ||
|
|
3fe508e773 | ||
|
|
9668623005 | ||
|
|
3e100408c3 | ||
|
|
e462dca889 | ||
|
|
69e003c065 | ||
|
|
963e11e407 | ||
|
|
1daba76e3a | ||
|
|
5982ad7944 | ||
|
|
de7b49e68f | ||
|
|
2baffc253c | ||
|
|
3e3cd49bea | ||
|
|
108286aa34 | ||
|
|
8047c70af2 | ||
|
|
cd1c2cdb0f | ||
|
|
6a3f28cfb8 | ||
|
|
ad36a449fd | ||
|
|
67d9ec366c | ||
|
|
d1db95d8cb | ||
|
|
e1dc93cb44 | ||
|
|
73cc4c53b4 | ||
|
|
f4bd5d49f9 | ||
|
|
78df19df44 | ||
|
|
5404153b94 | ||
|
|
e8635725fa | ||
|
|
f2e014b3f8 | ||
|
|
ae7726bdef | ||
|
|
856a58dc35 | ||
|
|
c53a2f68e5 | ||
|
|
096a5a809e | ||
|
|
09364a4f7e | ||
|
|
3f72115a5e | ||
|
|
7313025572 | ||
|
|
b056d974f2 | ||
|
|
f7a2aa2792 | ||
|
|
d80fc97604 | ||
|
|
0b14100976 | ||
|
|
e2c9cacdd3 | ||
|
|
18a4461e83 | ||
|
|
46791bc671 | ||
|
|
e32b54f911 | ||
|
|
a90eb2d54a | ||
|
|
ea59a764cd | ||
|
|
448d32fa42 | ||
|
|
0f13d7a8e1 | ||
|
|
46a8db48e7 | ||
|
|
a88e3bab77 | ||
|
|
cc712739ba | ||
|
|
e03c39dce1 | ||
|
|
1614737053 | ||
|
|
74971bd3dc | ||
|
|
69de145bb7 | ||
|
|
19f84805bd | ||
|
|
92d01a2ceb | ||
|
|
34004ac538 | ||
|
|
362995d5f7 | ||
|
|
f9de1d13ba | ||
|
|
c86631fcf3 | ||
|
|
0cfc959875 | ||
|
|
a2387d1856 | ||
|
|
5ca04b0d6e | ||
|
|
bd2e26ea50 | ||
|
|
dbdc4c590b | ||
|
|
3c0dd4335e | ||
|
|
9cb97029bf | ||
|
|
e3b94654f8 | ||
|
|
e3fa6e446e | ||
|
|
6b58d4fba7 | ||
|
|
6558af5ff1 | ||
|
|
3dacfec4ec | ||
|
|
cb06323e96 | ||
|
|
dbdda81f94 | ||
|
|
527d1d6c84 | ||
|
|
b39e09142f | ||
|
|
e19ca9e1d1 | ||
|
|
be2dd92a7e | ||
|
|
e2251ed76c | ||
|
|
c93eb31b54 | ||
|
|
2871ecac7d | ||
|
|
7e1720d740 | ||
|
|
6f534f4ff0 | ||
|
|
9da077b52f | ||
|
|
52e4932c97 | ||
|
|
7ca1a67d82 | ||
|
|
65791a1d9f | ||
|
|
4faf61dd22 | ||
|
|
d0034b6007 | ||
|
|
2e1c9a3afb | ||
|
|
70328e53c6 | ||
|
|
7fa5b39ef6 | ||
|
|
deb93bc95b | ||
|
|
237cee6593 | ||
|
|
15b0192d4e | ||
|
|
ea2642ab15 | ||
|
|
8fe830f5de | ||
|
|
7329725979 | ||
|
|
7d248880cc | ||
|
|
f03ae1bc15 | ||
|
|
41fa48dbe7 | ||
|
|
3dbba063ca | ||
|
|
4d8d9078b8 | ||
|
|
55eb4cccbe | ||
|
|
2a7316b262 | ||
|
|
fef45cebb3 | ||
|
|
f9ca97ec71 | ||
|
|
af901213a2 | ||
|
|
11ecc69b35 | ||
|
|
775b0724bf | ||
|
|
82427cb004 | ||
|
|
197b74d1af | ||
|
|
ec5766f656 | ||
|
|
8865e7a2b4 | ||
|
|
6ec302d01b | ||
|
|
e6eaf87272 | ||
|
|
738d9cf8d8 | ||
|
|
061e9c5015 | ||
|
|
c1f36199c2 | ||
|
|
9f6840a18d | ||
|
|
bbff056268 | ||
|
|
9ae72a1b21 | ||
|
|
048cbddb43 | ||
|
|
b6709f9859 | ||
|
|
2aaca57672 | ||
|
|
3c22320c43 | ||
|
|
505021344b | ||
|
|
55c74196ab | ||
|
|
7a5d6ae77e | ||
|
|
ec510ab149 | ||
|
|
5e674844b5 | ||
|
|
e3206796a7 | ||
|
|
aff0ecf436 | ||
|
|
c52a7e367b | ||
|
|
82dd0d0a76 | ||
|
|
4eece5e6e9 | ||
|
|
9cdecbdee0 | ||
|
|
ced9eadd3d | ||
|
|
17b09d608b | ||
|
|
bceb645a80 | ||
|
|
d4e5f96b3b | ||
|
|
5462c9516a | ||
|
|
97a8b13a4e | ||
|
|
31e6b5a139 | ||
|
|
9e34f628e6 | ||
|
|
b1be956994 | ||
|
|
f0e603fe7c | ||
|
|
3d321b4896 | ||
|
|
8103135efd | ||
|
|
8076615a5f | ||
|
|
721c1fdd8d | ||
|
|
9ade73fb27 | ||
|
|
f3a2cad425 | ||
|
|
de3971ed56 | ||
|
|
ab78158d6e | ||
|
|
5c17102d16 | ||
|
|
dde471332f | ||
|
|
faff50ce33 | ||
|
|
94dc9d0484 | ||
|
|
44251c93c7 | ||
|
|
8624dcce60 | ||
|
|
a4ae90c16a | ||
|
|
647d91e288 | ||
|
|
e0597ac6de | ||
|
|
9b35dd6cfc | ||
|
|
989c54001c | ||
|
|
699673bbf2 | ||
|
|
f0bd2b7e98 | ||
|
|
fb2df33970 | ||
|
|
165f9c7e70 | ||
|
|
bd81f40156 | ||
|
|
00102dae85 | ||
|
|
ad0cbad663 | ||
|
|
cdac62aae7 | ||
|
|
14bead732c | ||
|
|
f1288eb096 | ||
|
|
d859ea2fc3 | ||
|
|
6cfb22a4b5 | ||
|
|
a4559fe79e | ||
|
|
89129dfeb4 | ||
|
|
abc08e63c1 | ||
|
|
cf28f96eda | ||
|
|
b1db949ecc | ||
|
|
f665528ee7 | ||
|
|
074c43ff68 | ||
|
|
a657022919 | ||
|
|
f7f0005511 | ||
|
|
32bc4248fc | ||
|
|
fa6e8b4478 | ||
|
|
a59b264496 | ||
|
|
56df03848f | ||
|
|
c7672fd511 | ||
|
|
523ac20121 | ||
|
|
24ff7f02d6 | ||
|
|
aec589d9dd | ||
|
|
c7e9789582 | ||
|
|
49b736fb68 | ||
|
|
c0ed22c4d7 | ||
|
|
7f40b6c6a2 | ||
|
|
2885ed1805 | ||
|
|
dabeb0d552 | ||
|
|
a187a7fc56 | ||
|
|
a426baf1a1 | ||
|
|
d9344239dd | ||
|
|
bfca3175d6 | ||
|
|
2928c62d2b | ||
|
|
daa9effd04 | ||
|
|
0ab00dbb4e | ||
|
|
ad83f76318 | ||
|
|
08c03e980b | ||
|
|
3edb0148cd | ||
|
|
b39dd35af9 | ||
|
|
74dc83d14a | ||
|
|
a2a5f54857 | ||
|
|
b25e3e27ad | ||
|
|
0ce6e70547 | ||
|
|
83c4350277 | ||
|
|
7592bdcfcb | ||
|
|
1943b897da | ||
|
|
acd1fcb0cf | ||
|
|
a9c7785700 | ||
|
|
e413d88234 | ||
|
|
206bcfe717 | ||
|
|
1e66017861 | ||
|
|
2efda3d3fe | ||
|
|
6ade0bb665 | ||
|
|
4e65d2a524 | ||
|
|
fc19076839 | ||
|
|
2276f06733 | ||
|
|
c45fd481b3 | ||
|
|
b0c5b171c9 | ||
|
|
43b557d95e | ||
|
|
85668a1b74 | ||
|
|
055bf3fc17 | ||
|
|
cd499f4951 | ||
|
|
215d077f31 | ||
|
|
31b65f22ae | ||
|
|
0e34fb6474 | ||
|
|
79aa108da7 | ||
|
|
0589733e21 | ||
|
|
930f3b3227 | ||
|
|
3edc31990f | ||
|
|
65fcdd6c07 | ||
|
|
f1cbd3436a | ||
|
|
047464dc8c | ||
|
|
58e6ab4419 | ||
|
|
7f92fb0b81 | ||
|
|
1432d61aca | ||
|
|
9c93e6697d | ||
|
|
617887d0c3 | ||
|
|
5a610b39b7 | ||
|
|
0b29621760 | ||
|
|
2339f3f9aa | ||
|
|
235ba786ea | ||
|
|
b0d153ce87 | ||
|
|
f1fa37a431 | ||
|
|
930454ad08 | ||
|
|
7224969180 | ||
|
|
8a0e1afcb3 | ||
|
|
1238d8a18c | ||
|
|
ddb29c857b | ||
|
|
151950b18c | ||
|
|
bad910ee03 | ||
|
|
f5a139e61e | ||
|
|
e13d54bf8a | ||
|
|
48613ab845 | ||
|
|
64bfd5e132 | ||
|
|
07d88cd639 | ||
|
|
feb1612200 | ||
|
|
a1bfdc94ba | ||
|
|
61b9ce3cfa | ||
|
|
53b84d7dcd | ||
|
|
0b942a062f | ||
|
|
a7e02c19e5 | ||
|
|
596612cc99 | ||
|
|
c1210b2977 | ||
|
|
e2727ad15e | ||
|
|
877a9fdecd | ||
|
|
1a5a6e4ad0 | ||
|
|
7255ee639c | ||
|
|
2a5210ccc5 | ||
|
|
bb8f214e47 | ||
|
|
f8e8ab2b27 | ||
|
|
c33795b68a | ||
|
|
67364633f0 | ||
|
|
81fb1e73c4 | ||
|
|
fd1b3b0e64 | ||
|
|
a73d8f947d | ||
|
|
ab8a93b4a4 | ||
|
|
ba6a9bdca4 | ||
|
|
85e5024078 | ||
|
|
1f88b0031d | ||
|
|
64f549c495 | ||
|
|
3a24db94f5 | ||
|
|
b0005180f2 | ||
|
|
bb430342d8 | ||
|
|
700b025024 | ||
|
|
e6007b2cee | ||
|
|
00e2e3c2ad | ||
|
|
794bb716b2 | ||
|
|
1cddb86918 | ||
|
|
b967f8f818 | ||
|
|
05547c752e | ||
|
|
2cf392633e | ||
|
|
d6431a4990 | ||
|
|
0576638603 | ||
|
|
4f332766f0 | ||
|
|
344f91c983 | ||
|
|
dcb39eb912 | ||
|
|
2aa7e46c9c | ||
|
|
58432ff4dd | ||
|
|
70bf746c7a | ||
|
|
73dfff9191 | ||
|
|
47247e68ac | ||
|
|
04527b0061 | ||
|
|
90da1c9059 | ||
|
|
17769b5291 | ||
|
|
5bccc7cf2b | ||
|
|
1ac26453d5 | ||
|
|
cebf3be97f | ||
|
|
7d3a85aac0 | ||
|
|
626f627f58 | ||
|
|
07bf997aa2 | ||
|
|
3375021ee6 | ||
|
|
8274cd5d82 | ||
|
|
1792529267 | ||
|
|
e90305d43f | ||
|
|
bede14ad11 | ||
|
|
1f3209698e | ||
|
|
42da0fb424 | ||
|
|
6125a7db80 | ||
|
|
8ec74b80f4 | ||
|
|
7c5a761ba5 | ||
|
|
17f5788e43 | ||
|
|
2af5e918a7 | ||
|
|
b684ba5ef0 | ||
|
|
0e249ae389 | ||
|
|
e5ae6f5547 | ||
|
|
2375dda43b | ||
|
|
55f7ff6a46 | ||
|
|
90eef3f6b7 | ||
|
|
3b86b36128 | ||
|
|
d1a410c7fe | ||
|
|
89dc8502e5 | ||
|
|
b867c76d10 | ||
|
|
7ba9ffc758 | ||
|
|
6f99cd0bef | ||
|
|
82128c30c5 | ||
|
|
a6ecea872e | ||
|
|
335f9cf21b | ||
|
|
1c25e35777 | ||
|
|
c1b262138d | ||
|
|
c81d15e08e | ||
|
|
8be8f245d4 | ||
|
|
b307cc6227 | ||
|
|
aea231d64e | ||
|
|
b15d1adb3c | ||
|
|
80a95c19f1 | ||
|
|
3598913d43 | ||
|
|
62b60e8c8b | ||
|
|
c3dac96dfd | ||
|
|
8478899b67 | ||
|
|
42b249a6b3 | ||
|
|
efa702405c | ||
|
|
46b534cfcc | ||
|
|
bab249a54a | ||
|
|
994ba7790e | ||
|
|
f29470a08c | ||
|
|
0262a8e763 | ||
|
|
84a4033606 | ||
|
|
3936270266 | ||
|
|
4d90d35fc7 | ||
|
|
2cdcfa3f22 | ||
|
|
d00e35de84 | ||
|
|
e12b913cbf | ||
|
|
463f96cda4 | ||
|
|
30dda67e7c | ||
|
|
361f6fa30a | ||
|
|
2ce5ba2575 | ||
|
|
ea81e643f2 | ||
|
|
26839fa5c1 | ||
|
|
d35e09bf39 | ||
|
|
837b36cccf | ||
|
|
be9b476151 | ||
|
|
1cdde6d777 | ||
|
|
7622c150cf | ||
|
|
2f893574f4 | ||
|
|
ca8b776ce1 | ||
|
|
5ede9cb091 | ||
|
|
f4824cdc42 | ||
|
|
f7ea4a354b | ||
|
|
0dc9f174d4 | ||
|
|
42642ebd34 | ||
|
|
821b55ccce | ||
|
|
e2c6687690 | ||
|
|
f38a6dffba | ||
|
|
ccd54e16cc | ||
|
|
8fdc943553 | ||
|
|
76cfa3e148 | ||
|
|
d1d7bc5355 | ||
|
|
90d2e341ad | ||
|
|
7c8e7c23e9 | ||
|
|
22f9e0573c | ||
|
|
0dfaed53cb | ||
|
|
9883f4bfde | ||
|
|
dc7f155675 | ||
|
|
ba4ecc6f80 | ||
|
|
f9f55b48b0 | ||
|
|
3a6ee43f22 | ||
|
|
9c3dec09c9 | ||
|
|
8947c8a6e8 | ||
|
|
e6325fa2e8 | ||
|
|
91d44a833b | ||
|
|
ad83d827e2 | ||
|
|
0dd47211c5 | ||
|
|
cef3bc229a | ||
|
|
d1ef11defa | ||
|
|
3fcb4949f2 | ||
|
|
d5df6a99c1 | ||
|
|
0ff3b456ce | ||
|
|
339d90afd7 | ||
|
|
09fa268451 | ||
|
|
0f8339ac92 | ||
|
|
9be4493a9e | ||
|
|
9a70ecdd7e | ||
|
|
6d573b6c70 | ||
|
|
770b94bde5 | ||
|
|
a07fc4bfaa | ||
|
|
9dbd46d866 | ||
|
|
feba551e99 | ||
|
|
c1d0510fd3 | ||
|
|
e7896c66f3 | ||
|
|
113d01ce99 | ||
|
|
67d5422c94 | ||
|
|
503e627fb5 | ||
|
|
527b1f9795 | ||
|
|
ad3c4c9317 | ||
|
|
8ba1a40685 | ||
|
|
6bf7dea414 | ||
|
|
f8e4b29a6c | ||
|
|
48d2151d05 | ||
|
|
614ecfbc58 | ||
|
|
0cc99003f6 | ||
|
|
ece086c03f | ||
|
|
3338dbe473 | ||
|
|
91836b99d1 | ||
|
|
ad2641264b | ||
|
|
b87a7615e5 | ||
|
|
82b4070dad | ||
|
|
00c0dea2a7 | ||
|
|
ef2600ae5a | ||
|
|
1cf1696108 | ||
|
|
73ef8a8723 | ||
|
|
b8714b3864 | ||
|
|
6032a3332b | ||
|
|
447c0f45e5 | ||
|
|
606fc39ee3 | ||
|
|
0f480ee410 | ||
|
|
bc54ddbc5e | ||
|
|
4bf0bbe6c3 | ||
|
|
7e13d33588 | ||
|
|
3f49ad6791 | ||
|
|
6ae58b4584 | ||
|
|
f22ec30e34 | ||
|
|
55a15b7d3f | ||
|
|
0a67d9f511 | ||
|
|
2e09a7e713 | ||
|
|
84db5d3965 | ||
|
|
e4b3aafc94 | ||
|
|
960f3b0fb7 | ||
|
|
5dc462c62a | ||
|
|
e109ff5ea8 | ||
|
|
2ed1d42f35 | ||
|
|
026eb67e91 | ||
|
|
4340989039 | ||
|
|
64f511a43b | ||
|
|
e0790c1323 | ||
|
|
2148f8ff16 | ||
|
|
b1422b7fb4 | ||
|
|
89646250fc | ||
|
|
6b6001dc42 | ||
|
|
8292b612ed | ||
|
|
c0fd2aa8c5 | ||
|
|
5ab274aa50 | ||
|
|
76dfbaeb68 | ||
|
|
7648d6cc81 | ||
|
|
87ba72eb00 | ||
|
|
f8e02573c9 | ||
|
|
cfed4afaaf | ||
|
|
2fe89c4b46 | ||
|
|
231205abc9 | ||
|
|
51969d3bab | ||
|
|
9078902bf0 | ||
|
|
ab9703f83f | ||
|
|
bacd14db7f | ||
|
|
9e6dd82c74 | ||
|
|
0397ffdbc5 | ||
|
|
3d9c9fe0b9 | ||
|
|
7229f5d794 | ||
|
|
23b0378a0e | ||
|
|
9713852deb | ||
|
|
3bc92d88f0 | ||
|
|
23a6578d6f | ||
|
|
8bb4b2096a | ||
|
|
23fd59f23a | ||
|
|
43f7ace90e | ||
|
|
758e232b6c | ||
|
|
5e877f9b05 | ||
|
|
e0ffd2d948 | ||
|
|
66c6bdd960 | ||
|
|
cb81a57ddd | ||
|
|
ad5368eaa2 | ||
|
|
174807277a | ||
|
|
9ba9361053 | ||
|
|
13e88e70a2 | ||
|
|
87362cbc90 | ||
|
|
95e4195561 | ||
|
|
9eef269aaf | ||
|
|
b3dbee7664 | ||
|
|
5cd46678b4 | ||
|
|
757d42148e | ||
|
|
e5265ccdc0 | ||
|
|
074f79d3b0 | ||
|
|
842c25a762 | ||
|
|
b8db0c4792 | ||
|
|
6169dbf42c | ||
|
|
4e7d7f2492 | ||
|
|
933eef3584 | ||
|
|
334c86dd1f | ||
|
|
b95304881a | ||
|
|
299ba084d0 | ||
|
|
35926269a6 | ||
|
|
6dc6860622 | ||
|
|
9d3585a0c0 | ||
|
|
e0088c496c | ||
|
|
e184c1b3dd | ||
|
|
425dbc6db1 | ||
|
|
a423b52b55 | ||
|
|
5d74d9dc51 | ||
|
|
32b771ad80 | ||
|
|
c03fe908a7 | ||
|
|
a162d6bce1 | ||
|
|
5ac4dd04fa | ||
|
|
373683890c | ||
|
|
67bf46d9c2 | ||
|
|
3ee7c30cae | ||
|
|
1b201a755a | ||
|
|
fcebdaf130 | ||
|
|
542acd0896 | ||
|
|
3104e07c78 | ||
|
|
fcc82a8e46 | ||
|
|
e75a8a90e2 | ||
|
|
49162f228e | ||
|
|
636ef024b7 | ||
|
|
efec97fda3 | ||
|
|
f6e4d50137 | ||
|
|
f33b13c194 | ||
|
|
7e3e5855b8 | ||
|
|
4140af232b | ||
|
|
334b783c8f | ||
|
|
42722c0896 | ||
|
|
04322ef94d | ||
|
|
ca7162fb4b | ||
|
|
cea56fbe53 | ||
|
|
67994bb5a3 | ||
|
|
7b5ca16993 | ||
|
|
39bf605e63 | ||
|
|
9b6d402bc5 | ||
|
|
9f003c950d | ||
|
|
1ee701b52f | ||
|
|
0bade2d940 | ||
|
|
c50406a279 | ||
|
|
87f1d458b8 | ||
|
|
f13a40aa73 | ||
|
|
f0230e24a7 | ||
|
|
7542d446d0 | ||
|
|
ef0f5ac023 | ||
|
|
d8dd2a23a8 | ||
|
|
0ebbd0a232 | ||
|
|
5d7416c0ef | ||
|
|
0d235ed8ca | ||
|
|
d9b8feec01 | ||
|
|
15eaf5beff | ||
|
|
4d1bbf012a | ||
|
|
110018487f | ||
|
|
45ddec53d3 | ||
|
|
a077c57eee | ||
|
|
e0f3a6f5d9 | ||
|
|
90fe64303d | ||
|
|
9340df7a25 | ||
|
|
475d4800a2 | ||
|
|
062f756413 | ||
|
|
db11ba7437 | ||
|
|
b094cbd0cb | ||
|
|
d700f9f09b | ||
|
|
2e2d67c243 | ||
|
|
54aedc8426 | ||
|
|
90852663d1 | ||
|
|
d2fdae67e7 | ||
|
|
311fab2ab1 | ||
|
|
182e457505 | ||
|
|
34b292b06a | ||
|
|
ec2a18ab4a | ||
|
|
f1043acf46 | ||
|
|
363e338a61 | ||
|
|
5757ac8002 | ||
|
|
9494472a7d | ||
|
|
0d49f5a8df | ||
|
|
afee12cd7a | ||
|
|
fc5199c70f | ||
|
|
5a51c2b578 | ||
|
|
d7f913d004 | ||
|
|
a798d9b92b | ||
|
|
f35521a943 | ||
|
|
4fca5bcde5 | ||
|
|
0dd5620de6 | ||
|
|
d7433f84d7 | ||
|
|
882c0bcc6a | ||
|
|
bd0320f56f | ||
|
|
fbf370befa | ||
|
|
50531fa35a | ||
|
|
6807664188 | ||
|
|
fae8e2b0d3 | ||
|
|
0ca2d25ef6 | ||
|
|
f147d28c52 | ||
|
|
0b51e17a8a | ||
|
|
4a1755c7c5 | ||
|
|
ff9167192a | ||
|
|
8906a1c903 | ||
|
|
fa6d50c23e | ||
|
|
d8dab91619 | ||
|
|
b1cba11f6b | ||
|
|
aabd9f7f50 | ||
|
|
2541f8909e | ||
|
|
7ef330c3f4 | ||
|
|
0db3f67eb3 | ||
|
|
862049d995 | ||
|
|
4b2d2d5002 | ||
|
|
b665bf3f79 | ||
|
|
f11800f1e7 | ||
|
|
725e0f5a11 | ||
|
|
e59975fe95 | ||
|
|
5fd5b73e91 | ||
|
|
a35b179ed1 | ||
|
|
16b4584609 | ||
|
|
31b023b0fa | ||
|
|
348e11f968 | ||
|
|
14fb34e4b6 | ||
|
|
f7e9f6e7c9 | ||
|
|
fc7ca39f39 | ||
|
|
07650130c6 | ||
|
|
d81f6eaa3e | ||
|
|
bc5c162a00 | ||
|
|
48cc1719e3 | ||
|
|
d465e4a9b3 | ||
|
|
6d1d1e01d4 | ||
|
|
eedd982a84 | ||
|
|
917c4e317c | ||
|
|
cff10604f9 | ||
|
|
e9958811d4 | ||
|
|
55828c7341 | ||
|
|
fa28301952 | ||
|
|
0dfaa00165 | ||
|
|
81a0b67640 | ||
|
|
365d0a95e0 | ||
|
|
90497bdd59 | ||
|
|
af196dd049 | ||
|
|
76be69c4b2 | ||
|
|
4c492b6d4b | ||
|
|
66b1a6e8ad | ||
|
|
3945655f21 | ||
|
|
56fa213886 | ||
|
|
b05ccf3be8 | ||
|
|
dcf5db31d0 | ||
|
|
80f84e95fc | ||
|
|
efcd4425b7 | ||
|
|
8c61e65521 | ||
|
|
c9bb43059d | ||
|
|
d55ac4aa54 | ||
|
|
ce3a59446c | ||
|
|
b026fe9b18 | ||
|
|
8edd6c583d | ||
|
|
60ff1ddb0c | ||
|
|
80ba7ea5ed | ||
|
|
f5ac5c2cfb | ||
|
|
8fdbda78ee | ||
|
|
a7b379059f | ||
|
|
8bbd15c482 | ||
|
|
291930b78d | ||
|
|
670bf7f6ab | ||
|
|
4c25826a9c | ||
|
|
51965a9de1 | ||
|
|
2597469b30 | ||
|
|
93bf258978 | ||
|
|
b560bb356e | ||
|
|
15a3f9d29a | ||
|
|
3a6e993158 | ||
|
|
827cf15bf2 | ||
|
|
32c022cd4d | ||
|
|
751ad94343 | ||
|
|
cf62eede8a | ||
|
|
a0f6a3f379 | ||
|
|
c4b47950a8 | ||
|
|
b1a4d5dfc5 | ||
|
|
062069028f | ||
|
|
fc0232b744 | ||
|
|
79ca56f3ad | ||
|
|
ed6247d231 | ||
|
|
2a13074e6b | ||
|
|
f784eeb17e | ||
|
|
47fc15faf0 | ||
|
|
0ad0b3927f | ||
|
|
f100a6c563 | ||
|
|
1768543b8b | ||
|
|
f06fbe159f | ||
|
|
270d7cbff9 | ||
|
|
c2cdd4130e | ||
|
|
51beef8192 | ||
|
|
856aa31231 | ||
|
|
c6c642f37a | ||
|
|
2be5d3b72b | ||
|
|
8bdfe806e0 | ||
|
|
e30738d79b | ||
|
|
8da213c566 | ||
|
|
d0cb16a155 | ||
|
|
afc1af7e95 | ||
|
|
12d4905b39 | ||
|
|
234ac09f42 | ||
|
|
39dc54b09e | ||
|
|
b5ab3d3380 | ||
|
|
db764619fc | ||
|
|
a22f378ed5 | ||
|
|
3a65c56123 | ||
|
|
422968315e | ||
|
|
e7439a2d7f | ||
|
|
e05ffe0263 | ||
|
|
700006692a | ||
|
|
0f65fe34a7 | ||
|
|
7b81cb1215 | ||
|
|
599716fe94 | ||
|
|
dee1d51640 | ||
|
|
b3c5933042 | ||
|
|
29b3794dd8 | ||
|
|
53848ad366 | ||
|
|
723932cfac | ||
|
|
5e3b38376c | ||
|
|
bd94d7ede6 | ||
|
|
1e1828bbdc | ||
|
|
da83767fa2 | ||
|
|
3d61201608 | ||
|
|
f17fa50456 | ||
|
|
49278fb63d | ||
|
|
4629a09036 | ||
|
|
9dbaf15449 | ||
|
|
a62c6df126 | ||
|
|
3e77031d9c | ||
|
|
27c42bb865 | ||
|
|
4237468bcf | ||
|
|
1a623ff853 | ||
|
|
55bfd79cf3 | ||
|
|
0c23ecc70d | ||
|
|
f94c6a0408 | ||
|
|
50045d71bd | ||
|
|
e2a413f20e | ||
|
|
da286e3cfb | ||
|
|
7f21d39d81 | ||
|
|
fa8c054889 | ||
|
|
6061e8a82b | ||
|
|
4aa2f1bb86 | ||
|
|
8fbb8c93c1 | ||
|
|
dfd8fe6e8c | ||
|
|
2ee31c77d4 | ||
|
|
fce1ef248d | ||
|
|
67db5c123f | ||
|
|
d237e5f54c | ||
|
|
9e730a6b5b | ||
|
|
bcdeba18ac | ||
|
|
204625b5c8 | ||
|
|
00921e7806 | ||
|
|
5917671521 | ||
|
|
526e5bfabb | ||
|
|
fa1820ab97 | ||
|
|
b8ea018b43 | ||
|
|
72895df6ae | ||
|
|
9f3faa0e46 | ||
|
|
7fa192229c | ||
|
|
44dc1ea99e | ||
|
|
afbf2c8848 | ||
|
|
a093e1e9c9 | ||
|
|
90070bc8ef | ||
|
|
82b6837dc2 | ||
|
|
2d7bd487ba | ||
|
|
3248faff40 | ||
|
|
ec9c6c510a | ||
|
|
537dac6033 | ||
|
|
422febf441 | ||
|
|
7bf2bfd6b1 | ||
|
|
ea5f1c81d6 | ||
|
|
7b91c0da1e | ||
|
|
849a999744 | ||
|
|
01174c9b11 | ||
|
|
a94bda06f4 | ||
|
|
0aa0673b57 | ||
|
|
c79ae86c44 | ||
|
|
e8a0c69cf2 | ||
|
|
98e0ec1051 | ||
|
|
08aee24a48 | ||
|
|
b6296423f5 | ||
|
|
66ecc40a80 | ||
|
|
1e3aaa887d | ||
|
|
726c259ec3 | ||
|
|
445dd4ec5b | ||
|
|
04c14cab7a | ||
|
|
b43969da1c | ||
|
|
c7c2a4a7ec | ||
|
|
cb125a192f | ||
|
|
5f82473a26 | ||
|
|
363824d589 | ||
|
|
d072da86d1 | ||
|
|
148ce21a9a |
134 changed files with 8302 additions and 2814 deletions
10
.github/ISSUE_TEMPLATE/bug_report.yml
vendored
10
.github/ISSUE_TEMPLATE/bug_report.yml
vendored
|
|
@ -35,6 +35,16 @@ body:
|
||||||
label: Version
|
label: Version
|
||||||
description: What version are you running? Look to AYON Tray
|
description: What version are you running? Look to AYON Tray
|
||||||
options:
|
options:
|
||||||
|
- 1.7.0
|
||||||
|
- 1.6.13
|
||||||
|
- 1.6.12
|
||||||
|
- 1.6.11
|
||||||
|
- 1.6.10
|
||||||
|
- 1.6.9
|
||||||
|
- 1.6.8
|
||||||
|
- 1.6.7
|
||||||
|
- 1.6.6
|
||||||
|
- 1.6.5
|
||||||
- 1.6.4
|
- 1.6.4
|
||||||
- 1.6.3
|
- 1.6.3
|
||||||
- 1.6.2
|
- 1.6.2
|
||||||
|
|
|
||||||
|
|
@ -141,6 +141,9 @@ def _get_ayon_bundle_data() -> tuple[
|
||||||
]:
|
]:
|
||||||
studio_bundle_name = os.environ.get("AYON_STUDIO_BUNDLE_NAME")
|
studio_bundle_name = os.environ.get("AYON_STUDIO_BUNDLE_NAME")
|
||||||
project_bundle_name = os.getenv("AYON_BUNDLE_NAME")
|
project_bundle_name = os.getenv("AYON_BUNDLE_NAME")
|
||||||
|
# If AYON launcher <1.4.0 was used
|
||||||
|
if not studio_bundle_name:
|
||||||
|
studio_bundle_name = project_bundle_name
|
||||||
bundles = ayon_api.get_bundles()["bundles"]
|
bundles = ayon_api.get_bundles()["bundles"]
|
||||||
studio_bundle = next(
|
studio_bundle = next(
|
||||||
(
|
(
|
||||||
|
|
|
||||||
|
|
@ -185,6 +185,20 @@ class IPluginPaths(AYONInterface):
|
||||||
"""
|
"""
|
||||||
return self._get_plugin_paths_by_type("inventory")
|
return self._get_plugin_paths_by_type("inventory")
|
||||||
|
|
||||||
|
def get_loader_action_plugin_paths(
|
||||||
|
self, host_name: Optional[str]
|
||||||
|
) -> list[str]:
|
||||||
|
"""Receive loader action plugin paths.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
host_name (Optional[str]): Current host name.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
list[str]: Paths to loader action plugins.
|
||||||
|
|
||||||
|
"""
|
||||||
|
return []
|
||||||
|
|
||||||
|
|
||||||
class ITrayAddon(AYONInterface):
|
class ITrayAddon(AYONInterface):
|
||||||
"""Addon has special procedures when used in Tray tool.
|
"""Addon has special procedures when used in Tray tool.
|
||||||
|
|
|
||||||
|
|
@ -6,7 +6,6 @@ import logging
|
||||||
import code
|
import code
|
||||||
import traceback
|
import traceback
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
import warnings
|
|
||||||
|
|
||||||
import click
|
import click
|
||||||
|
|
||||||
|
|
@ -90,54 +89,6 @@ def addon(ctx):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
@main_cli.command()
|
|
||||||
@click.pass_context
|
|
||||||
@click.argument("output_json_path")
|
|
||||||
@click.option("--project", help="Project name", default=None)
|
|
||||||
@click.option("--asset", help="Folder path", default=None)
|
|
||||||
@click.option("--task", help="Task name", default=None)
|
|
||||||
@click.option("--app", help="Application name", default=None)
|
|
||||||
@click.option(
|
|
||||||
"--envgroup", help="Environment group (e.g. \"farm\")", default=None
|
|
||||||
)
|
|
||||||
def extractenvironments(
|
|
||||||
ctx, output_json_path, project, asset, task, app, envgroup
|
|
||||||
):
|
|
||||||
"""Extract environment variables for entered context to a json file.
|
|
||||||
|
|
||||||
Entered output filepath will be created if does not exists.
|
|
||||||
|
|
||||||
All context options must be passed otherwise only AYON's global
|
|
||||||
environments will be extracted.
|
|
||||||
|
|
||||||
Context options are "project", "asset", "task", "app"
|
|
||||||
|
|
||||||
Deprecated:
|
|
||||||
This function is deprecated and will be removed in future. Please use
|
|
||||||
'addon applications extractenvironments ...' instead.
|
|
||||||
"""
|
|
||||||
warnings.warn(
|
|
||||||
(
|
|
||||||
"Command 'extractenvironments' is deprecated and will be"
|
|
||||||
" removed in future. Please use"
|
|
||||||
" 'addon applications extractenvironments ...' instead."
|
|
||||||
),
|
|
||||||
DeprecationWarning
|
|
||||||
)
|
|
||||||
|
|
||||||
addons_manager = ctx.obj["addons_manager"]
|
|
||||||
applications_addon = addons_manager.get_enabled_addon("applications")
|
|
||||||
if applications_addon is None:
|
|
||||||
raise RuntimeError(
|
|
||||||
"Applications addon is not available or enabled."
|
|
||||||
)
|
|
||||||
|
|
||||||
# Please ignore the fact this is using private method
|
|
||||||
applications_addon._cli_extract_environments(
|
|
||||||
output_json_path, project, asset, task, app, envgroup
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@main_cli.command()
|
@main_cli.command()
|
||||||
@click.pass_context
|
@click.pass_context
|
||||||
@click.argument("path", required=True)
|
@click.argument("path", required=True)
|
||||||
|
|
|
||||||
|
|
@ -1,11 +1,4 @@
|
||||||
from enum import Enum
|
from ayon_core.lib import StrEnum
|
||||||
|
|
||||||
|
|
||||||
class StrEnum(str, Enum):
|
|
||||||
"""A string-based Enum class that allows for string comparison."""
|
|
||||||
|
|
||||||
def __str__(self) -> str:
|
|
||||||
return self.value
|
|
||||||
|
|
||||||
|
|
||||||
class ContextChangeReason(StrEnum):
|
class ContextChangeReason(StrEnum):
|
||||||
|
|
|
||||||
|
|
@ -137,7 +137,7 @@ class HostBase(AbstractHost):
|
||||||
def get_current_folder_path(self) -> Optional[str]:
|
def get_current_folder_path(self) -> Optional[str]:
|
||||||
"""
|
"""
|
||||||
Returns:
|
Returns:
|
||||||
Optional[str]: Current asset name.
|
Optional[str]: Current folder path.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
return os.environ.get("AYON_FOLDER_PATH")
|
return os.environ.get("AYON_FOLDER_PATH")
|
||||||
|
|
|
||||||
|
|
@ -2,6 +2,7 @@
|
||||||
# flake8: noqa E402
|
# flake8: noqa E402
|
||||||
"""AYON lib functions."""
|
"""AYON lib functions."""
|
||||||
|
|
||||||
|
from ._compatibility import StrEnum
|
||||||
from .local_settings import (
|
from .local_settings import (
|
||||||
IniSettingRegistry,
|
IniSettingRegistry,
|
||||||
JSONSettingRegistry,
|
JSONSettingRegistry,
|
||||||
|
|
@ -11,6 +12,7 @@ from .local_settings import (
|
||||||
get_launcher_storage_dir,
|
get_launcher_storage_dir,
|
||||||
get_addons_resources_dir,
|
get_addons_resources_dir,
|
||||||
get_local_site_id,
|
get_local_site_id,
|
||||||
|
get_ayon_user_entity,
|
||||||
get_ayon_username,
|
get_ayon_username,
|
||||||
)
|
)
|
||||||
from .ayon_connection import initialize_ayon_connection
|
from .ayon_connection import initialize_ayon_connection
|
||||||
|
|
@ -73,6 +75,7 @@ from .log import (
|
||||||
)
|
)
|
||||||
|
|
||||||
from .path_templates import (
|
from .path_templates import (
|
||||||
|
DefaultKeysDict,
|
||||||
TemplateUnsolved,
|
TemplateUnsolved,
|
||||||
StringTemplate,
|
StringTemplate,
|
||||||
FormatObject,
|
FormatObject,
|
||||||
|
|
@ -140,6 +143,8 @@ from .ayon_info import (
|
||||||
terminal = Terminal
|
terminal = Terminal
|
||||||
|
|
||||||
__all__ = [
|
__all__ = [
|
||||||
|
"StrEnum",
|
||||||
|
|
||||||
"IniSettingRegistry",
|
"IniSettingRegistry",
|
||||||
"JSONSettingRegistry",
|
"JSONSettingRegistry",
|
||||||
"AYONSecureRegistry",
|
"AYONSecureRegistry",
|
||||||
|
|
@ -148,6 +153,7 @@ __all__ = [
|
||||||
"get_launcher_storage_dir",
|
"get_launcher_storage_dir",
|
||||||
"get_addons_resources_dir",
|
"get_addons_resources_dir",
|
||||||
"get_local_site_id",
|
"get_local_site_id",
|
||||||
|
"get_ayon_user_entity",
|
||||||
"get_ayon_username",
|
"get_ayon_username",
|
||||||
|
|
||||||
"initialize_ayon_connection",
|
"initialize_ayon_connection",
|
||||||
|
|
@ -228,6 +234,7 @@ __all__ = [
|
||||||
"get_version_from_path",
|
"get_version_from_path",
|
||||||
"get_last_version_from_path",
|
"get_last_version_from_path",
|
||||||
|
|
||||||
|
"DefaultKeysDict",
|
||||||
"TemplateUnsolved",
|
"TemplateUnsolved",
|
||||||
"StringTemplate",
|
"StringTemplate",
|
||||||
"FormatObject",
|
"FormatObject",
|
||||||
|
|
|
||||||
8
client/ayon_core/lib/_compatibility.py
Normal file
8
client/ayon_core/lib/_compatibility.py
Normal file
|
|
@ -0,0 +1,8 @@
|
||||||
|
from enum import Enum
|
||||||
|
|
||||||
|
|
||||||
|
class StrEnum(str, Enum):
|
||||||
|
"""A string-based Enum class that allows for string comparison."""
|
||||||
|
|
||||||
|
def __str__(self) -> str:
|
||||||
|
return self.value
|
||||||
|
|
@ -604,7 +604,11 @@ class EnumDef(AbstractAttrDef):
|
||||||
|
|
||||||
if value is None:
|
if value is None:
|
||||||
return copy.deepcopy(self.default)
|
return copy.deepcopy(self.default)
|
||||||
return list(self._item_values.intersection(value))
|
return [
|
||||||
|
v
|
||||||
|
for v in value
|
||||||
|
if v in self._item_values
|
||||||
|
]
|
||||||
|
|
||||||
def is_value_valid(self, value: Any) -> bool:
|
def is_value_valid(self, value: Any) -> bool:
|
||||||
"""Check if item is available in possible values."""
|
"""Check if item is available in possible values."""
|
||||||
|
|
|
||||||
|
|
@ -5,6 +5,7 @@ import json
|
||||||
import platform
|
import platform
|
||||||
import configparser
|
import configparser
|
||||||
import warnings
|
import warnings
|
||||||
|
import copy
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
from abc import ABC, abstractmethod
|
from abc import ABC, abstractmethod
|
||||||
from functools import lru_cache
|
from functools import lru_cache
|
||||||
|
|
@ -13,6 +14,8 @@ from typing import Optional, Any
|
||||||
import platformdirs
|
import platformdirs
|
||||||
import ayon_api
|
import ayon_api
|
||||||
|
|
||||||
|
from .cache import NestedCacheItem, CacheItem
|
||||||
|
|
||||||
_PLACEHOLDER = object()
|
_PLACEHOLDER = object()
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -23,6 +26,7 @@ class RegistryItemNotFound(ValueError):
|
||||||
|
|
||||||
class _Cache:
|
class _Cache:
|
||||||
username = None
|
username = None
|
||||||
|
user_entities_by_name = NestedCacheItem()
|
||||||
|
|
||||||
|
|
||||||
def _get_ayon_appdirs(*args: str) -> str:
|
def _get_ayon_appdirs(*args: str) -> str:
|
||||||
|
|
@ -569,6 +573,68 @@ def get_local_site_id():
|
||||||
return site_id
|
return site_id
|
||||||
|
|
||||||
|
|
||||||
|
def _get_ayon_service_username() -> Optional[str]:
|
||||||
|
# TODO @iLLiCiTiT - do not use private attribute of 'ServerAPI', rather
|
||||||
|
# use public method to get username from connection stack.
|
||||||
|
con = ayon_api.get_server_api_connection()
|
||||||
|
user_stack = getattr(con, "_as_user_stack", None)
|
||||||
|
if user_stack is None:
|
||||||
|
return None
|
||||||
|
return user_stack.username
|
||||||
|
|
||||||
|
|
||||||
|
def get_ayon_user_entity(username: Optional[str] = None) -> dict[str, Any]:
|
||||||
|
"""AYON user entity used for templates and publishing.
|
||||||
|
|
||||||
|
Note:
|
||||||
|
Usually only service and admin users can receive the full user entity.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
username (Optional[str]): Username of the user. If not passed, then
|
||||||
|
the current user in 'ayon_api' is used.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
dict[str, Any]: User entity.
|
||||||
|
|
||||||
|
"""
|
||||||
|
service_username = _get_ayon_service_username()
|
||||||
|
# Handle service user handling first
|
||||||
|
if service_username:
|
||||||
|
if username is None:
|
||||||
|
username = service_username
|
||||||
|
cache: CacheItem = _Cache.user_entities_by_name[username]
|
||||||
|
if not cache.is_valid:
|
||||||
|
if username == service_username:
|
||||||
|
user = ayon_api.get_user()
|
||||||
|
else:
|
||||||
|
user = ayon_api.get_user(username)
|
||||||
|
cache.update_data(user)
|
||||||
|
return copy.deepcopy(cache.get_data())
|
||||||
|
|
||||||
|
# Cache current user
|
||||||
|
current_user = None
|
||||||
|
if _Cache.username is None:
|
||||||
|
current_user = ayon_api.get_user()
|
||||||
|
_Cache.username = current_user["name"]
|
||||||
|
|
||||||
|
if username is None:
|
||||||
|
username = _Cache.username
|
||||||
|
|
||||||
|
cache: CacheItem = _Cache.user_entities_by_name[username]
|
||||||
|
if not cache.is_valid:
|
||||||
|
user = None
|
||||||
|
if username == _Cache.username:
|
||||||
|
if current_user is None:
|
||||||
|
current_user = ayon_api.get_user()
|
||||||
|
user = current_user
|
||||||
|
|
||||||
|
if user is None:
|
||||||
|
user = ayon_api.get_user(username)
|
||||||
|
cache.update_data(user)
|
||||||
|
|
||||||
|
return copy.deepcopy(cache.get_data())
|
||||||
|
|
||||||
|
|
||||||
def get_ayon_username():
|
def get_ayon_username():
|
||||||
"""AYON username used for templates and publishing.
|
"""AYON username used for templates and publishing.
|
||||||
|
|
||||||
|
|
@ -578,20 +644,5 @@ def get_ayon_username():
|
||||||
str: Username.
|
str: Username.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
# Look for username in the connection stack
|
user = get_ayon_user_entity()
|
||||||
# - this is used when service is working as other user
|
return user["name"]
|
||||||
# (e.g. in background sync)
|
|
||||||
# TODO @iLLiCiTiT - do not use private attribute of 'ServerAPI', rather
|
|
||||||
# use public method to get username from connection stack.
|
|
||||||
con = ayon_api.get_server_api_connection()
|
|
||||||
user_stack = getattr(con, "_as_user_stack", None)
|
|
||||||
if user_stack is not None:
|
|
||||||
username = user_stack.username
|
|
||||||
if username is not None:
|
|
||||||
return username
|
|
||||||
|
|
||||||
# Cache the username to avoid multiple API calls
|
|
||||||
# - it is not expected that user would change
|
|
||||||
if _Cache.username is None:
|
|
||||||
_Cache.username = ayon_api.get_user()["name"]
|
|
||||||
return _Cache.username
|
|
||||||
|
|
|
||||||
|
|
@ -1,3 +1,5 @@
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
import copy
|
import copy
|
||||||
|
|
@ -5,11 +7,7 @@ import numbers
|
||||||
import warnings
|
import warnings
|
||||||
import platform
|
import platform
|
||||||
from string import Formatter
|
from string import Formatter
|
||||||
import typing
|
from typing import Any, Union, Iterable
|
||||||
from typing import List, Dict, Any, Set
|
|
||||||
|
|
||||||
if typing.TYPE_CHECKING:
|
|
||||||
from typing import Union
|
|
||||||
|
|
||||||
SUB_DICT_PATTERN = re.compile(r"([^\[\]]+)")
|
SUB_DICT_PATTERN = re.compile(r"([^\[\]]+)")
|
||||||
OPTIONAL_PATTERN = re.compile(r"(<.*?[^{0]*>)[^0-9]*?")
|
OPTIONAL_PATTERN = re.compile(r"(<.*?[^{0]*>)[^0-9]*?")
|
||||||
|
|
@ -44,6 +42,54 @@ class TemplateUnsolved(Exception):
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class DefaultKeysDict(dict):
|
||||||
|
"""Dictionary that supports the default key to use for str conversion.
|
||||||
|
|
||||||
|
Is helpful for changes of a key in a template from string to dictionary
|
||||||
|
for example '{folder}' -> '{folder[name]}'.
|
||||||
|
>>> data = DefaultKeysDict(
|
||||||
|
>>> "name",
|
||||||
|
>>> {"folder": {"name": "FolderName"}}
|
||||||
|
>>> )
|
||||||
|
>>> print("{folder[name]}".format_map(data))
|
||||||
|
FolderName
|
||||||
|
>>> print("{folder}".format_map(data))
|
||||||
|
FolderName
|
||||||
|
|
||||||
|
Args:
|
||||||
|
default_key (Union[str, Iterable[str]]): Default key to use for str
|
||||||
|
conversion. Can also expect multiple keys for more nested
|
||||||
|
dictionary.
|
||||||
|
|
||||||
|
"""
|
||||||
|
def __init__(
|
||||||
|
self, default_keys: Union[str, Iterable[str]], *args, **kwargs
|
||||||
|
) -> None:
|
||||||
|
if isinstance(default_keys, str):
|
||||||
|
default_keys = [default_keys]
|
||||||
|
else:
|
||||||
|
default_keys = list(default_keys)
|
||||||
|
if not default_keys:
|
||||||
|
raise ValueError(
|
||||||
|
"Default key must be set. Got empty default keys."
|
||||||
|
)
|
||||||
|
|
||||||
|
self._default_keys = default_keys
|
||||||
|
super().__init__(*args, **kwargs)
|
||||||
|
|
||||||
|
def __str__(self) -> str:
|
||||||
|
return str(self.get_default_value())
|
||||||
|
|
||||||
|
def get_default_keys(self) -> list[str]:
|
||||||
|
return list(self._default_keys)
|
||||||
|
|
||||||
|
def get_default_value(self) -> Any:
|
||||||
|
value = self
|
||||||
|
for key in self._default_keys:
|
||||||
|
value = value[key]
|
||||||
|
return value
|
||||||
|
|
||||||
|
|
||||||
class StringTemplate:
|
class StringTemplate:
|
||||||
"""String that can be formatted."""
|
"""String that can be formatted."""
|
||||||
def __init__(self, template: str):
|
def __init__(self, template: str):
|
||||||
|
|
@ -84,7 +130,7 @@ class StringTemplate:
|
||||||
if substr:
|
if substr:
|
||||||
new_parts.append(substr)
|
new_parts.append(substr)
|
||||||
|
|
||||||
self._parts: List["Union[str, OptionalPart, FormattingPart]"] = (
|
self._parts: list[Union[str, OptionalPart, FormattingPart]] = (
|
||||||
self.find_optional_parts(new_parts)
|
self.find_optional_parts(new_parts)
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
@ -105,7 +151,7 @@ class StringTemplate:
|
||||||
def template(self) -> str:
|
def template(self) -> str:
|
||||||
return self._template
|
return self._template
|
||||||
|
|
||||||
def format(self, data: Dict[str, Any]) -> "TemplateResult":
|
def format(self, data: dict[str, Any]) -> "TemplateResult":
|
||||||
""" Figure out with whole formatting.
|
""" Figure out with whole formatting.
|
||||||
|
|
||||||
Separate advanced keys (*Like '{project[name]}') from string which must
|
Separate advanced keys (*Like '{project[name]}') from string which must
|
||||||
|
|
@ -145,29 +191,29 @@ class StringTemplate:
|
||||||
invalid_types
|
invalid_types
|
||||||
)
|
)
|
||||||
|
|
||||||
def format_strict(self, data: Dict[str, Any]) -> "TemplateResult":
|
def format_strict(self, data: dict[str, Any]) -> "TemplateResult":
|
||||||
result = self.format(data)
|
result = self.format(data)
|
||||||
result.validate()
|
result.validate()
|
||||||
return result
|
return result
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def format_template(
|
def format_template(
|
||||||
cls, template: str, data: Dict[str, Any]
|
cls, template: str, data: dict[str, Any]
|
||||||
) -> "TemplateResult":
|
) -> "TemplateResult":
|
||||||
objected_template = cls(template)
|
objected_template = cls(template)
|
||||||
return objected_template.format(data)
|
return objected_template.format(data)
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def format_strict_template(
|
def format_strict_template(
|
||||||
cls, template: str, data: Dict[str, Any]
|
cls, template: str, data: dict[str, Any]
|
||||||
) -> "TemplateResult":
|
) -> "TemplateResult":
|
||||||
objected_template = cls(template)
|
objected_template = cls(template)
|
||||||
return objected_template.format_strict(data)
|
return objected_template.format_strict(data)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def find_optional_parts(
|
def find_optional_parts(
|
||||||
parts: List["Union[str, FormattingPart]"]
|
parts: list[Union[str, FormattingPart]]
|
||||||
) -> List["Union[str, OptionalPart, FormattingPart]"]:
|
) -> list[Union[str, OptionalPart, FormattingPart]]:
|
||||||
new_parts = []
|
new_parts = []
|
||||||
tmp_parts = {}
|
tmp_parts = {}
|
||||||
counted_symb = -1
|
counted_symb = -1
|
||||||
|
|
@ -192,7 +238,7 @@ class StringTemplate:
|
||||||
len(parts) == 1
|
len(parts) == 1
|
||||||
and isinstance(parts[0], str)
|
and isinstance(parts[0], str)
|
||||||
):
|
):
|
||||||
value = "<{}>".format(parts[0])
|
value = f"<{parts[0]}>"
|
||||||
else:
|
else:
|
||||||
value = OptionalPart(parts)
|
value = OptionalPart(parts)
|
||||||
|
|
||||||
|
|
@ -223,7 +269,7 @@ class TemplateResult(str):
|
||||||
only used keys.
|
only used keys.
|
||||||
solved (bool): For check if all required keys were filled.
|
solved (bool): For check if all required keys were filled.
|
||||||
template (str): Original template.
|
template (str): Original template.
|
||||||
missing_keys (Iterable[str]): Missing keys that were not in the data.
|
missing_keys (list[str]): Missing keys that were not in the data.
|
||||||
Include missing optional keys.
|
Include missing optional keys.
|
||||||
invalid_types (dict): When key was found in data, but value had not
|
invalid_types (dict): When key was found in data, but value had not
|
||||||
allowed DataType. Allowed data types are `numbers`,
|
allowed DataType. Allowed data types are `numbers`,
|
||||||
|
|
@ -232,11 +278,11 @@ class TemplateResult(str):
|
||||||
of number.
|
of number.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
used_values: Dict[str, Any] = None
|
used_values: dict[str, Any] = None
|
||||||
solved: bool = None
|
solved: bool = None
|
||||||
template: str = None
|
template: str = None
|
||||||
missing_keys: List[str] = None
|
missing_keys: list[str] = None
|
||||||
invalid_types: Dict[str, Any] = None
|
invalid_types: dict[str, Any] = None
|
||||||
|
|
||||||
def __new__(
|
def __new__(
|
||||||
cls, filled_template, template, solved,
|
cls, filled_template, template, solved,
|
||||||
|
|
@ -296,21 +342,21 @@ class TemplatePartResult:
|
||||||
"""Result to store result of template parts."""
|
"""Result to store result of template parts."""
|
||||||
def __init__(self, optional: bool = False):
|
def __init__(self, optional: bool = False):
|
||||||
# Missing keys or invalid value types of required keys
|
# Missing keys or invalid value types of required keys
|
||||||
self._missing_keys: Set[str] = set()
|
self._missing_keys: set[str] = set()
|
||||||
self._invalid_types: Dict[str, Any] = {}
|
self._invalid_types: dict[str, Any] = {}
|
||||||
# Missing keys or invalid value types of optional keys
|
# Missing keys or invalid value types of optional keys
|
||||||
self._missing_optional_keys: Set[str] = set()
|
self._missing_optional_keys: set[str] = set()
|
||||||
self._invalid_optional_types: Dict[str, Any] = {}
|
self._invalid_optional_types: dict[str, Any] = {}
|
||||||
|
|
||||||
# Used values stored by key with origin type
|
# Used values stored by key with origin type
|
||||||
# - key without any padding or key modifiers
|
# - key without any padding or key modifiers
|
||||||
# - value from filling data
|
# - value from filling data
|
||||||
# Example: {"version": 1}
|
# Example: {"version": 1}
|
||||||
self._used_values: Dict[str, Any] = {}
|
self._used_values: dict[str, Any] = {}
|
||||||
# Used values stored by key with all modifirs
|
# Used values stored by key with all modifirs
|
||||||
# - value is already formatted string
|
# - value is already formatted string
|
||||||
# Example: {"version:0>3": "001"}
|
# Example: {"version:0>3": "001"}
|
||||||
self._really_used_values: Dict[str, Any] = {}
|
self._really_used_values: dict[str, Any] = {}
|
||||||
# Concatenated string output after formatting
|
# Concatenated string output after formatting
|
||||||
self._output: str = ""
|
self._output: str = ""
|
||||||
# Is this result from optional part
|
# Is this result from optional part
|
||||||
|
|
@ -336,8 +382,9 @@ class TemplatePartResult:
|
||||||
self._really_used_values.update(other.really_used_values)
|
self._really_used_values.update(other.really_used_values)
|
||||||
|
|
||||||
else:
|
else:
|
||||||
raise TypeError("Cannot add data from \"{}\" to \"{}\"".format(
|
raise TypeError(
|
||||||
str(type(other)), self.__class__.__name__)
|
f"Cannot add data from \"{type(other)}\""
|
||||||
|
f" to \"{self.__class__.__name__}\""
|
||||||
)
|
)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
|
|
@ -362,40 +409,41 @@ class TemplatePartResult:
|
||||||
return self._output
|
return self._output
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def missing_keys(self) -> Set[str]:
|
def missing_keys(self) -> set[str]:
|
||||||
return self._missing_keys
|
return self._missing_keys
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def missing_optional_keys(self) -> Set[str]:
|
def missing_optional_keys(self) -> set[str]:
|
||||||
return self._missing_optional_keys
|
return self._missing_optional_keys
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def invalid_types(self) -> Dict[str, Any]:
|
def invalid_types(self) -> dict[str, Any]:
|
||||||
return self._invalid_types
|
return self._invalid_types
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def invalid_optional_types(self) -> Dict[str, Any]:
|
def invalid_optional_types(self) -> dict[str, Any]:
|
||||||
return self._invalid_optional_types
|
return self._invalid_optional_types
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def really_used_values(self) -> Dict[str, Any]:
|
def really_used_values(self) -> dict[str, Any]:
|
||||||
return self._really_used_values
|
return self._really_used_values
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def realy_used_values(self) -> Dict[str, Any]:
|
def realy_used_values(self) -> dict[str, Any]:
|
||||||
warnings.warn(
|
warnings.warn(
|
||||||
"Property 'realy_used_values' is deprecated."
|
"Property 'realy_used_values' is deprecated."
|
||||||
" Use 'really_used_values' instead.",
|
" Use 'really_used_values' instead.",
|
||||||
DeprecationWarning
|
DeprecationWarning,
|
||||||
|
stacklevel=2,
|
||||||
)
|
)
|
||||||
return self._really_used_values
|
return self._really_used_values
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def used_values(self) -> Dict[str, Any]:
|
def used_values(self) -> dict[str, Any]:
|
||||||
return self._used_values
|
return self._used_values
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def split_keys_to_subdicts(values: Dict[str, Any]) -> Dict[str, Any]:
|
def split_keys_to_subdicts(values: dict[str, Any]) -> dict[str, Any]:
|
||||||
output = {}
|
output = {}
|
||||||
formatter = Formatter()
|
formatter = Formatter()
|
||||||
for key, value in values.items():
|
for key, value in values.items():
|
||||||
|
|
@ -410,7 +458,7 @@ class TemplatePartResult:
|
||||||
data[last_key] = value
|
data[last_key] = value
|
||||||
return output
|
return output
|
||||||
|
|
||||||
def get_clean_used_values(self) -> Dict[str, Any]:
|
def get_clean_used_values(self) -> dict[str, Any]:
|
||||||
new_used_values = {}
|
new_used_values = {}
|
||||||
for key, value in self.used_values.items():
|
for key, value in self.used_values.items():
|
||||||
if isinstance(value, FormatObject):
|
if isinstance(value, FormatObject):
|
||||||
|
|
@ -426,7 +474,8 @@ class TemplatePartResult:
|
||||||
warnings.warn(
|
warnings.warn(
|
||||||
"Method 'add_realy_used_value' is deprecated."
|
"Method 'add_realy_used_value' is deprecated."
|
||||||
" Use 'add_really_used_value' instead.",
|
" Use 'add_really_used_value' instead.",
|
||||||
DeprecationWarning
|
DeprecationWarning,
|
||||||
|
stacklevel=2,
|
||||||
)
|
)
|
||||||
self.add_really_used_value(key, value)
|
self.add_really_used_value(key, value)
|
||||||
|
|
||||||
|
|
@ -479,7 +528,7 @@ class FormattingPart:
|
||||||
self,
|
self,
|
||||||
field_name: str,
|
field_name: str,
|
||||||
format_spec: str,
|
format_spec: str,
|
||||||
conversion: "Union[str, None]",
|
conversion: Union[str, None],
|
||||||
):
|
):
|
||||||
format_spec_v = ""
|
format_spec_v = ""
|
||||||
if format_spec:
|
if format_spec:
|
||||||
|
|
@ -546,7 +595,7 @@ class FormattingPart:
|
||||||
return not queue
|
return not queue
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def keys_to_template_base(keys: List[str]):
|
def keys_to_template_base(keys: list[str]):
|
||||||
if not keys:
|
if not keys:
|
||||||
return None
|
return None
|
||||||
# Create copy of keys
|
# Create copy of keys
|
||||||
|
|
@ -556,7 +605,7 @@ class FormattingPart:
|
||||||
return f"{template_base}{joined_keys}"
|
return f"{template_base}{joined_keys}"
|
||||||
|
|
||||||
def format(
|
def format(
|
||||||
self, data: Dict[str, Any], result: TemplatePartResult
|
self, data: dict[str, Any], result: TemplatePartResult
|
||||||
) -> TemplatePartResult:
|
) -> TemplatePartResult:
|
||||||
"""Format the formattings string.
|
"""Format the formattings string.
|
||||||
|
|
||||||
|
|
@ -635,6 +684,12 @@ class FormattingPart:
|
||||||
result.add_output(self.template)
|
result.add_output(self.template)
|
||||||
return result
|
return result
|
||||||
|
|
||||||
|
if isinstance(value, DefaultKeysDict):
|
||||||
|
try:
|
||||||
|
value = value.get_default_value()
|
||||||
|
except KeyError:
|
||||||
|
pass
|
||||||
|
|
||||||
if not self.validate_value_type(value):
|
if not self.validate_value_type(value):
|
||||||
result.add_invalid_type(key, value)
|
result.add_invalid_type(key, value)
|
||||||
result.add_output(self.template)
|
result.add_output(self.template)
|
||||||
|
|
@ -687,23 +742,25 @@ class OptionalPart:
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
parts: List["Union[str, OptionalPart, FormattingPart]"]
|
parts: list[Union[str, OptionalPart, FormattingPart]]
|
||||||
):
|
):
|
||||||
self._parts: List["Union[str, OptionalPart, FormattingPart]"] = parts
|
self._parts: list[Union[str, OptionalPart, FormattingPart]] = parts
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def parts(self) -> List["Union[str, OptionalPart, FormattingPart]"]:
|
def parts(self) -> list[Union[str, OptionalPart, FormattingPart]]:
|
||||||
return self._parts
|
return self._parts
|
||||||
|
|
||||||
def __str__(self) -> str:
|
def __str__(self) -> str:
|
||||||
return "<{}>".format("".join([str(p) for p in self._parts]))
|
joined_parts = "".join([str(p) for p in self._parts])
|
||||||
|
return f"<{joined_parts}>"
|
||||||
|
|
||||||
def __repr__(self) -> str:
|
def __repr__(self) -> str:
|
||||||
return "<Optional:{}>".format("".join([str(p) for p in self._parts]))
|
joined_parts = "".join([str(p) for p in self._parts])
|
||||||
|
return f"<Optional:{joined_parts}>"
|
||||||
|
|
||||||
def format(
|
def format(
|
||||||
self,
|
self,
|
||||||
data: Dict[str, Any],
|
data: dict[str, Any],
|
||||||
result: TemplatePartResult,
|
result: TemplatePartResult,
|
||||||
) -> TemplatePartResult:
|
) -> TemplatePartResult:
|
||||||
new_result = TemplatePartResult(True)
|
new_result = TemplatePartResult(True)
|
||||||
|
|
|
||||||
|
|
@ -1,3 +1,4 @@
|
||||||
|
from __future__ import annotations
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
import logging
|
import logging
|
||||||
|
|
@ -12,6 +13,8 @@ from typing import Optional
|
||||||
|
|
||||||
import xml.etree.ElementTree
|
import xml.etree.ElementTree
|
||||||
|
|
||||||
|
import clique
|
||||||
|
|
||||||
from .execute import run_subprocess
|
from .execute import run_subprocess
|
||||||
from .vendor_bin_utils import (
|
from .vendor_bin_utils import (
|
||||||
get_ffmpeg_tool_args,
|
get_ffmpeg_tool_args,
|
||||||
|
|
@ -110,6 +113,15 @@ def deprecated(new_destination):
|
||||||
return _decorator(func)
|
return _decorator(func)
|
||||||
|
|
||||||
|
|
||||||
|
class MissingRGBAChannelsError(ValueError):
|
||||||
|
"""Raised when we can't find channels to use as RGBA for conversion in
|
||||||
|
input media.
|
||||||
|
|
||||||
|
This may be other channels than solely RGBA, like Z-channel. The error is
|
||||||
|
raised when no matching 'reviewable' channel was found.
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
def get_transcode_temp_directory():
|
def get_transcode_temp_directory():
|
||||||
"""Creates temporary folder for transcoding.
|
"""Creates temporary folder for transcoding.
|
||||||
|
|
||||||
|
|
@ -122,16 +134,29 @@ def get_transcode_temp_directory():
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def get_oiio_info_for_input(filepath, logger=None, subimages=False):
|
def get_oiio_info_for_input(
|
||||||
|
filepath: str,
|
||||||
|
*,
|
||||||
|
subimages: bool = False,
|
||||||
|
verbose: bool = True,
|
||||||
|
logger: logging.Logger = None,
|
||||||
|
):
|
||||||
"""Call oiiotool to get information about input and return stdout.
|
"""Call oiiotool to get information about input and return stdout.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
filepath (str): Path to file.
|
||||||
|
subimages (bool): include info about subimages in the output.
|
||||||
|
verbose (bool): get the full metadata about each input image.
|
||||||
|
logger (logging.Logger): Logger used for logging.
|
||||||
|
|
||||||
Stdout should contain xml format string.
|
Stdout should contain xml format string.
|
||||||
"""
|
"""
|
||||||
args = get_oiio_tool_args(
|
args = get_oiio_tool_args(
|
||||||
"oiiotool",
|
"oiiotool",
|
||||||
"--info",
|
"--info",
|
||||||
"-v"
|
|
||||||
)
|
)
|
||||||
|
if verbose:
|
||||||
|
args.append("-v")
|
||||||
if subimages:
|
if subimages:
|
||||||
args.append("-a")
|
args.append("-a")
|
||||||
|
|
||||||
|
|
@ -388,6 +413,10 @@ def get_review_info_by_layer_name(channel_names):
|
||||||
...
|
...
|
||||||
]
|
]
|
||||||
|
|
||||||
|
This tries to find suitable outputs good for review purposes, by
|
||||||
|
searching for channel names like RGBA, but also XYZ, Z, N, AR, AG, AB
|
||||||
|
channels.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
channel_names (list[str]): List of channel names.
|
channel_names (list[str]): List of channel names.
|
||||||
|
|
||||||
|
|
@ -396,7 +425,6 @@ def get_review_info_by_layer_name(channel_names):
|
||||||
"""
|
"""
|
||||||
|
|
||||||
layer_names_order = []
|
layer_names_order = []
|
||||||
rgba_by_layer_name = collections.defaultdict(dict)
|
|
||||||
channels_by_layer_name = collections.defaultdict(dict)
|
channels_by_layer_name = collections.defaultdict(dict)
|
||||||
|
|
||||||
for channel_name in channel_names:
|
for channel_name in channel_names:
|
||||||
|
|
@ -405,45 +433,95 @@ def get_review_info_by_layer_name(channel_names):
|
||||||
if "." in channel_name:
|
if "." in channel_name:
|
||||||
layer_name, last_part = channel_name.rsplit(".", 1)
|
layer_name, last_part = channel_name.rsplit(".", 1)
|
||||||
|
|
||||||
channels_by_layer_name[layer_name][channel_name] = last_part
|
# R, G, B, A or X, Y, Z, N, AR, AG, AB, RED, GREEN, BLUE, ALPHA
|
||||||
if last_part.lower() not in {
|
channel = last_part.upper()
|
||||||
"r", "red",
|
if channel not in {
|
||||||
"g", "green",
|
# Detect RGBA channels
|
||||||
"b", "blue",
|
"R", "G", "B", "A",
|
||||||
"a", "alpha"
|
# Support fully written out rgba channel names
|
||||||
|
"RED", "GREEN", "BLUE", "ALPHA",
|
||||||
|
# Allow detecting of x, y and z channels, and normal channels
|
||||||
|
"X", "Y", "Z", "N",
|
||||||
|
# red, green and blue alpha/opacity, for colored mattes
|
||||||
|
"AR", "AG", "AB"
|
||||||
}:
|
}:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
if layer_name not in layer_names_order:
|
if layer_name not in layer_names_order:
|
||||||
layer_names_order.append(layer_name)
|
layer_names_order.append(layer_name)
|
||||||
# R, G, B or A
|
|
||||||
channel = last_part[0].upper()
|
channels_by_layer_name[layer_name][channel] = channel_name
|
||||||
rgba_by_layer_name[layer_name][channel] = channel_name
|
|
||||||
|
|
||||||
# Put empty layer or 'rgba' to the beginning of the list
|
# Put empty layer or 'rgba' to the beginning of the list
|
||||||
# - if input has R, G, B, A channels they should be used for review
|
# - if input has R, G, B, A channels they should be used for review
|
||||||
# NOTE They are iterated in reversed order because they're inserted to
|
def _sort(_layer_name: str) -> int:
|
||||||
# the beginning of 'layer_names_order' -> last added will be first.
|
# Prioritize "" layer name
|
||||||
for name in reversed(["", "rgba"]):
|
# Prioritize layers with RGB channels
|
||||||
if name in layer_names_order:
|
if _layer_name == "rgba":
|
||||||
layer_names_order.remove(name)
|
return 0
|
||||||
layer_names_order.insert(0, name)
|
|
||||||
|
if _layer_name == "":
|
||||||
|
return 1
|
||||||
|
|
||||||
|
channels = channels_by_layer_name[_layer_name]
|
||||||
|
if all(channel in channels for channel in "RGB"):
|
||||||
|
return 2
|
||||||
|
return 10
|
||||||
|
layer_names_order.sort(key=_sort)
|
||||||
|
|
||||||
output = []
|
output = []
|
||||||
for layer_name in layer_names_order:
|
for layer_name in layer_names_order:
|
||||||
rgba_layer_info = rgba_by_layer_name[layer_name]
|
channel_info = channels_by_layer_name[layer_name]
|
||||||
red = rgba_layer_info.get("R")
|
|
||||||
green = rgba_layer_info.get("G")
|
alpha = channel_info.get("A")
|
||||||
blue = rgba_layer_info.get("B")
|
|
||||||
if not red or not green or not blue:
|
# RGB channels
|
||||||
|
if all(channel in channel_info for channel in "RGB"):
|
||||||
|
rgb = "R", "G", "B"
|
||||||
|
|
||||||
|
# RGB channels using fully written out channel names
|
||||||
|
elif all(
|
||||||
|
channel in channel_info
|
||||||
|
for channel in ("RED", "GREEN", "BLUE")
|
||||||
|
):
|
||||||
|
rgb = "RED", "GREEN", "BLUE"
|
||||||
|
alpha = channel_info.get("ALPHA")
|
||||||
|
|
||||||
|
# XYZ channels (position pass)
|
||||||
|
elif all(channel in channel_info for channel in "XYZ"):
|
||||||
|
rgb = "X", "Y", "Z"
|
||||||
|
|
||||||
|
# Colored mattes (as defined in OpenEXR Channel Name standards)
|
||||||
|
elif all(channel in channel_info for channel in ("AR", "AG", "AB")):
|
||||||
|
rgb = "AR", "AG", "AB"
|
||||||
|
|
||||||
|
# Luminance channel (as defined in OpenEXR Channel Name standards)
|
||||||
|
elif "Y" in channel_info:
|
||||||
|
rgb = "Y", "Y", "Y"
|
||||||
|
|
||||||
|
# Has only Z channel (Z-depth layer)
|
||||||
|
elif "Z" in channel_info:
|
||||||
|
rgb = "Z", "Z", "Z"
|
||||||
|
|
||||||
|
# Has only A channel (Alpha layer)
|
||||||
|
elif "A" in channel_info:
|
||||||
|
rgb = "A", "A", "A"
|
||||||
|
alpha = None
|
||||||
|
|
||||||
|
else:
|
||||||
|
# No reviewable channels found
|
||||||
continue
|
continue
|
||||||
|
|
||||||
|
red = channel_info[rgb[0]]
|
||||||
|
green = channel_info[rgb[1]]
|
||||||
|
blue = channel_info[rgb[2]]
|
||||||
output.append({
|
output.append({
|
||||||
"name": layer_name,
|
"name": layer_name,
|
||||||
"review_channels": {
|
"review_channels": {
|
||||||
"R": red,
|
"R": red,
|
||||||
"G": green,
|
"G": green,
|
||||||
"B": blue,
|
"B": blue,
|
||||||
"A": rgba_layer_info.get("A"),
|
"A": alpha,
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
return output
|
return output
|
||||||
|
|
@ -508,7 +586,10 @@ def get_review_layer_name(src_filepath):
|
||||||
return None
|
return None
|
||||||
|
|
||||||
# Load info about file from oiio tool
|
# Load info about file from oiio tool
|
||||||
input_info = get_oiio_info_for_input(src_filepath)
|
input_info = get_oiio_info_for_input(
|
||||||
|
src_filepath,
|
||||||
|
verbose=False,
|
||||||
|
)
|
||||||
if not input_info:
|
if not input_info:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
|
@ -572,6 +653,37 @@ def should_convert_for_ffmpeg(src_filepath):
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
def _get_attributes_to_erase(
|
||||||
|
input_info: dict, logger: logging.Logger
|
||||||
|
) -> list[str]:
|
||||||
|
"""FFMPEG does not support some attributes in metadata."""
|
||||||
|
erase_attrs: dict[str, str] = {} # Attr name to reason mapping
|
||||||
|
for attr_name, attr_value in input_info["attribs"].items():
|
||||||
|
if not isinstance(attr_value, str):
|
||||||
|
continue
|
||||||
|
|
||||||
|
# Remove attributes that have string value longer than allowed length
|
||||||
|
# for ffmpeg or when contain prohibited symbols
|
||||||
|
if len(attr_value) > MAX_FFMPEG_STRING_LEN:
|
||||||
|
reason = f"has too long value ({len(attr_value)} chars)."
|
||||||
|
erase_attrs[attr_name] = reason
|
||||||
|
continue
|
||||||
|
|
||||||
|
for char in NOT_ALLOWED_FFMPEG_CHARS:
|
||||||
|
if char not in attr_value:
|
||||||
|
continue
|
||||||
|
reason = f"contains unsupported character \"{char}\"."
|
||||||
|
erase_attrs[attr_name] = reason
|
||||||
|
break
|
||||||
|
|
||||||
|
for attr_name, reason in erase_attrs.items():
|
||||||
|
logger.info(
|
||||||
|
f"Removed attribute \"{attr_name}\" from metadata"
|
||||||
|
f" because {reason}."
|
||||||
|
)
|
||||||
|
return list(erase_attrs.keys())
|
||||||
|
|
||||||
|
|
||||||
def convert_input_paths_for_ffmpeg(
|
def convert_input_paths_for_ffmpeg(
|
||||||
input_paths,
|
input_paths,
|
||||||
output_dir,
|
output_dir,
|
||||||
|
|
@ -597,7 +709,7 @@ def convert_input_paths_for_ffmpeg(
|
||||||
|
|
||||||
Raises:
|
Raises:
|
||||||
ValueError: If input filepath has extension not supported by function.
|
ValueError: If input filepath has extension not supported by function.
|
||||||
Currently is supported only ".exr" extension.
|
Currently, only ".exr" extension is supported.
|
||||||
"""
|
"""
|
||||||
if logger is None:
|
if logger is None:
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
@ -622,7 +734,22 @@ def convert_input_paths_for_ffmpeg(
|
||||||
# Collect channels to export
|
# Collect channels to export
|
||||||
input_arg, channels_arg = get_oiio_input_and_channel_args(input_info)
|
input_arg, channels_arg = get_oiio_input_and_channel_args(input_info)
|
||||||
|
|
||||||
for input_path in input_paths:
|
# Find which attributes to strip
|
||||||
|
erase_attributes: list[str] = _get_attributes_to_erase(
|
||||||
|
input_info, logger=logger
|
||||||
|
)
|
||||||
|
|
||||||
|
# clique.PATTERNS["frames"] supports only `.1001.exr` not `_1001.exr` so
|
||||||
|
# we use a customized pattern.
|
||||||
|
pattern = "[_.](?P<index>(?P<padding>0*)\\d+)\\.\\D+\\d?$"
|
||||||
|
input_collections, input_remainder = clique.assemble(
|
||||||
|
input_paths,
|
||||||
|
patterns=[pattern],
|
||||||
|
assume_padded_when_ambiguous=True,
|
||||||
|
)
|
||||||
|
input_items = list(input_collections)
|
||||||
|
input_items.extend(input_remainder)
|
||||||
|
for input_item in input_items:
|
||||||
# Prepare subprocess arguments
|
# Prepare subprocess arguments
|
||||||
oiio_cmd = get_oiio_tool_args(
|
oiio_cmd = get_oiio_tool_args(
|
||||||
"oiiotool",
|
"oiiotool",
|
||||||
|
|
@ -633,8 +760,23 @@ def convert_input_paths_for_ffmpeg(
|
||||||
if compression:
|
if compression:
|
||||||
oiio_cmd.extend(["--compression", compression])
|
oiio_cmd.extend(["--compression", compression])
|
||||||
|
|
||||||
|
# Convert a sequence of files using a single oiiotool command
|
||||||
|
# using its sequence syntax
|
||||||
|
if isinstance(input_item, clique.Collection):
|
||||||
|
frames = input_item.format("{head}#{tail}").replace(" ", "")
|
||||||
oiio_cmd.extend([
|
oiio_cmd.extend([
|
||||||
input_arg, input_path,
|
"--framepadding", input_item.padding,
|
||||||
|
"--frames", frames,
|
||||||
|
"--parallel-frames"
|
||||||
|
])
|
||||||
|
input_item: str = input_item.format("{head}#{tail}")
|
||||||
|
elif not isinstance(input_item, str):
|
||||||
|
raise TypeError(
|
||||||
|
f"Input is not a string or Collection: {input_item}"
|
||||||
|
)
|
||||||
|
|
||||||
|
oiio_cmd.extend([
|
||||||
|
input_arg, input_item,
|
||||||
# Tell oiiotool which channels should be put to top stack
|
# Tell oiiotool which channels should be put to top stack
|
||||||
# (and output)
|
# (and output)
|
||||||
"--ch", channels_arg,
|
"--ch", channels_arg,
|
||||||
|
|
@ -642,38 +784,11 @@ def convert_input_paths_for_ffmpeg(
|
||||||
"--subimage", "0"
|
"--subimage", "0"
|
||||||
])
|
])
|
||||||
|
|
||||||
for attr_name, attr_value in input_info["attribs"].items():
|
for attr_name in erase_attributes:
|
||||||
if not isinstance(attr_value, str):
|
|
||||||
continue
|
|
||||||
|
|
||||||
# Remove attributes that have string value longer than allowed
|
|
||||||
# length for ffmpeg or when containing prohibited symbols
|
|
||||||
erase_reason = "Missing reason"
|
|
||||||
erase_attribute = False
|
|
||||||
if len(attr_value) > MAX_FFMPEG_STRING_LEN:
|
|
||||||
erase_reason = "has too long value ({} chars).".format(
|
|
||||||
len(attr_value)
|
|
||||||
)
|
|
||||||
erase_attribute = True
|
|
||||||
|
|
||||||
if not erase_attribute:
|
|
||||||
for char in NOT_ALLOWED_FFMPEG_CHARS:
|
|
||||||
if char in attr_value:
|
|
||||||
erase_attribute = True
|
|
||||||
erase_reason = (
|
|
||||||
"contains unsupported character \"{}\"."
|
|
||||||
).format(char)
|
|
||||||
break
|
|
||||||
|
|
||||||
if erase_attribute:
|
|
||||||
# Set attribute to empty string
|
|
||||||
logger.info((
|
|
||||||
"Removed attribute \"{}\" from metadata because {}."
|
|
||||||
).format(attr_name, erase_reason))
|
|
||||||
oiio_cmd.extend(["--eraseattrib", attr_name])
|
oiio_cmd.extend(["--eraseattrib", attr_name])
|
||||||
|
|
||||||
# Add last argument - path to output
|
# Add last argument - path to output
|
||||||
base_filename = os.path.basename(input_path)
|
base_filename = os.path.basename(input_item)
|
||||||
output_path = os.path.join(output_dir, base_filename)
|
output_path = os.path.join(output_dir, base_filename)
|
||||||
oiio_cmd.extend([
|
oiio_cmd.extend([
|
||||||
"-o", output_path
|
"-o", output_path
|
||||||
|
|
@ -1074,7 +1189,10 @@ def oiio_color_convert(
|
||||||
target_display=None,
|
target_display=None,
|
||||||
target_view=None,
|
target_view=None,
|
||||||
additional_command_args=None,
|
additional_command_args=None,
|
||||||
logger=None,
|
frames: Optional[str] = None,
|
||||||
|
frame_padding: Optional[int] = None,
|
||||||
|
parallel_frames: bool = False,
|
||||||
|
logger: Optional[logging.Logger] = None,
|
||||||
):
|
):
|
||||||
"""Transcode source file to other with colormanagement.
|
"""Transcode source file to other with colormanagement.
|
||||||
|
|
||||||
|
|
@ -1086,7 +1204,7 @@ def oiio_color_convert(
|
||||||
input_path (str): Path that should be converted. It is expected that
|
input_path (str): Path that should be converted. It is expected that
|
||||||
contains single file or image sequence of same type
|
contains single file or image sequence of same type
|
||||||
(sequence in format 'file.FRAMESTART-FRAMEEND#.ext', see oiio docs,
|
(sequence in format 'file.FRAMESTART-FRAMEEND#.ext', see oiio docs,
|
||||||
eg `big.1-3#.tif`)
|
eg `big.1-3#.tif` or `big.1-3%d.ext` with `frames` argument)
|
||||||
output_path (str): Path to output filename.
|
output_path (str): Path to output filename.
|
||||||
(must follow format of 'input_path', eg. single file or
|
(must follow format of 'input_path', eg. single file or
|
||||||
sequence in 'file.FRAMESTART-FRAMEEND#.ext', `output.1-3#.tif`)
|
sequence in 'file.FRAMESTART-FRAMEEND#.ext', `output.1-3#.tif`)
|
||||||
|
|
@ -1107,6 +1225,13 @@ def oiio_color_convert(
|
||||||
both 'view' and 'display' must be filled (if 'target_colorspace')
|
both 'view' and 'display' must be filled (if 'target_colorspace')
|
||||||
additional_command_args (list): arguments for oiiotool (like binary
|
additional_command_args (list): arguments for oiiotool (like binary
|
||||||
depth for .dpx)
|
depth for .dpx)
|
||||||
|
frames (Optional[str]): Complex frame range to process. This requires
|
||||||
|
input path and output path to use frame token placeholder like
|
||||||
|
`#` or `%d`, e.g. file.#.exr
|
||||||
|
frame_padding (Optional[int]): Frame padding to use for the input and
|
||||||
|
output when using a sequence filepath.
|
||||||
|
parallel_frames (bool): If True, process frames in parallel inside
|
||||||
|
the `oiiotool` process. Only supported in OIIO 2.5.20.0+.
|
||||||
logger (logging.Logger): Logger used for logging.
|
logger (logging.Logger): Logger used for logging.
|
||||||
|
|
||||||
Raises:
|
Raises:
|
||||||
|
|
@ -1116,7 +1241,20 @@ def oiio_color_convert(
|
||||||
if logger is None:
|
if logger is None:
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
input_info = get_oiio_info_for_input(input_path, logger=logger)
|
# Get oiioinfo only from first image, otherwise file can't be found
|
||||||
|
first_input_path = input_path
|
||||||
|
if frames:
|
||||||
|
frames: str
|
||||||
|
first_frame = int(re.split("[ x-]", frames, 1)[0])
|
||||||
|
first_frame = str(first_frame).zfill(frame_padding or 0)
|
||||||
|
for token in ["#", "%d"]:
|
||||||
|
first_input_path = first_input_path.replace(token, first_frame)
|
||||||
|
|
||||||
|
input_info = get_oiio_info_for_input(
|
||||||
|
first_input_path,
|
||||||
|
verbose=False,
|
||||||
|
logger=logger,
|
||||||
|
)
|
||||||
|
|
||||||
# Collect channels to export
|
# Collect channels to export
|
||||||
input_arg, channels_arg = get_oiio_input_and_channel_args(input_info)
|
input_arg, channels_arg = get_oiio_input_and_channel_args(input_info)
|
||||||
|
|
@ -1129,6 +1267,22 @@ def oiio_color_convert(
|
||||||
"--colorconfig", config_path
|
"--colorconfig", config_path
|
||||||
)
|
)
|
||||||
|
|
||||||
|
if frames:
|
||||||
|
# If `frames` is specified, then process the input and output
|
||||||
|
# as if it's a sequence of frames (must contain `%04d` as frame
|
||||||
|
# token placeholder in filepaths)
|
||||||
|
oiio_cmd.extend([
|
||||||
|
"--frames", frames,
|
||||||
|
])
|
||||||
|
|
||||||
|
if frame_padding:
|
||||||
|
oiio_cmd.extend([
|
||||||
|
"--framepadding", str(frame_padding),
|
||||||
|
])
|
||||||
|
|
||||||
|
if parallel_frames:
|
||||||
|
oiio_cmd.append("--parallel-frames")
|
||||||
|
|
||||||
oiio_cmd.extend([
|
oiio_cmd.extend([
|
||||||
input_arg, input_path,
|
input_arg, input_path,
|
||||||
# Tell oiiotool which channels should be put to top stack
|
# Tell oiiotool which channels should be put to top stack
|
||||||
|
|
@ -1170,31 +1324,45 @@ def oiio_color_convert(
|
||||||
# Handle the different conversion cases
|
# Handle the different conversion cases
|
||||||
# Source view and display are known
|
# Source view and display are known
|
||||||
if source_view and source_display:
|
if source_view and source_display:
|
||||||
|
color_convert_args = None
|
||||||
|
ocio_display_args = None
|
||||||
if target_colorspace:
|
if target_colorspace:
|
||||||
# This is a two-step conversion process since there's no direct
|
# This is a two-step conversion process since there's no direct
|
||||||
# display/view to colorspace command
|
# display/view to colorspace command
|
||||||
# This could be a config parameter or determined from OCIO config
|
# This could be a config parameter or determined from OCIO config
|
||||||
# Use temporarty role space 'scene_linear'
|
# Use temporary role space 'scene_linear'
|
||||||
color_convert_args = ("scene_linear", target_colorspace)
|
color_convert_args = ("scene_linear", target_colorspace)
|
||||||
elif source_display != target_display or source_view != target_view:
|
elif source_display != target_display or source_view != target_view:
|
||||||
# Complete display/view pair conversion
|
# Complete display/view pair conversion
|
||||||
# - go through a reference space
|
# - go through a reference space
|
||||||
color_convert_args = (target_display, target_view)
|
ocio_display_args = (target_display, target_view)
|
||||||
else:
|
else:
|
||||||
color_convert_args = None
|
|
||||||
logger.debug(
|
logger.debug(
|
||||||
"Source and target display/view pairs are identical."
|
"Source and target display/view pairs are identical."
|
||||||
" No color conversion needed."
|
" No color conversion needed."
|
||||||
)
|
)
|
||||||
|
|
||||||
if color_convert_args:
|
if color_convert_args or ocio_display_args:
|
||||||
|
# Invert source display/view so that we can go from there to the
|
||||||
|
# target colorspace or display/view
|
||||||
oiio_cmd.extend([
|
oiio_cmd.extend([
|
||||||
"--ociodisplay:inverse=1:subimages=0",
|
"--ociodisplay:inverse=1:subimages=0",
|
||||||
source_display,
|
source_display,
|
||||||
source_view,
|
source_view,
|
||||||
|
])
|
||||||
|
|
||||||
|
if color_convert_args:
|
||||||
|
# Use colorconvert for colorspace target
|
||||||
|
oiio_cmd.extend([
|
||||||
"--colorconvert:subimages=0",
|
"--colorconvert:subimages=0",
|
||||||
*color_convert_args
|
*color_convert_args
|
||||||
])
|
])
|
||||||
|
elif ocio_display_args:
|
||||||
|
# Use ociodisplay for display/view target
|
||||||
|
oiio_cmd.extend([
|
||||||
|
"--ociodisplay:subimages=0",
|
||||||
|
*ocio_display_args
|
||||||
|
])
|
||||||
|
|
||||||
elif target_colorspace:
|
elif target_colorspace:
|
||||||
# Standard color space to color space conversion
|
# Standard color space to color space conversion
|
||||||
|
|
@ -1219,24 +1387,6 @@ def oiio_color_convert(
|
||||||
run_subprocess(oiio_cmd, logger=logger)
|
run_subprocess(oiio_cmd, logger=logger)
|
||||||
|
|
||||||
|
|
||||||
def split_cmd_args(in_args):
|
|
||||||
"""Makes sure all entered arguments are separated in individual items.
|
|
||||||
|
|
||||||
Split each argument string with " -" to identify if string contains
|
|
||||||
one or more arguments.
|
|
||||||
Args:
|
|
||||||
in_args (list): of arguments ['-n', '-d uint10']
|
|
||||||
Returns
|
|
||||||
(list): ['-n', '-d', 'unint10']
|
|
||||||
"""
|
|
||||||
splitted_args = []
|
|
||||||
for arg in in_args:
|
|
||||||
if not arg.strip():
|
|
||||||
continue
|
|
||||||
splitted_args.extend(arg.split(" "))
|
|
||||||
return splitted_args
|
|
||||||
|
|
||||||
|
|
||||||
def get_rescaled_command_arguments(
|
def get_rescaled_command_arguments(
|
||||||
application,
|
application,
|
||||||
input_path,
|
input_path,
|
||||||
|
|
@ -1318,7 +1468,11 @@ def get_rescaled_command_arguments(
|
||||||
command_args.extend(["-vf", "{0},{1}".format(scale, pad)])
|
command_args.extend(["-vf", "{0},{1}".format(scale, pad)])
|
||||||
|
|
||||||
elif application == "oiiotool":
|
elif application == "oiiotool":
|
||||||
input_info = get_oiio_info_for_input(input_path, logger=log)
|
input_info = get_oiio_info_for_input(
|
||||||
|
input_path,
|
||||||
|
verbose=False,
|
||||||
|
logger=log,
|
||||||
|
)
|
||||||
# Collect channels to export
|
# Collect channels to export
|
||||||
_, channels_arg = get_oiio_input_and_channel_args(
|
_, channels_arg = get_oiio_input_and_channel_args(
|
||||||
input_info, alpha_default=1.0)
|
input_info, alpha_default=1.0)
|
||||||
|
|
@ -1409,7 +1563,11 @@ def _get_image_dimensions(application, input_path, log):
|
||||||
# fallback for weird files with width=0, height=0
|
# fallback for weird files with width=0, height=0
|
||||||
if (input_width == 0 or input_height == 0) and application == "oiiotool":
|
if (input_width == 0 or input_height == 0) and application == "oiiotool":
|
||||||
# Load info about file from oiio tool
|
# Load info about file from oiio tool
|
||||||
input_info = get_oiio_info_for_input(input_path, logger=log)
|
input_info = get_oiio_info_for_input(
|
||||||
|
input_path,
|
||||||
|
verbose=False,
|
||||||
|
logger=log,
|
||||||
|
)
|
||||||
if input_info:
|
if input_info:
|
||||||
input_width = int(input_info["width"])
|
input_width = int(input_info["width"])
|
||||||
input_height = int(input_info["height"])
|
input_height = int(input_info["height"])
|
||||||
|
|
@ -1458,17 +1616,21 @@ def get_oiio_input_and_channel_args(oiio_input_info, alpha_default=None):
|
||||||
"""Get input and channel arguments for oiiotool.
|
"""Get input and channel arguments for oiiotool.
|
||||||
Args:
|
Args:
|
||||||
oiio_input_info (dict): Information about input from oiio tool.
|
oiio_input_info (dict): Information about input from oiio tool.
|
||||||
Should be output of function `get_oiio_info_for_input`.
|
Should be output of function 'get_oiio_info_for_input' (can be
|
||||||
|
called with 'verbose=False').
|
||||||
alpha_default (float, optional): Default value for alpha channel.
|
alpha_default (float, optional): Default value for alpha channel.
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
tuple[str, str]: Tuple of input and channel arguments.
|
tuple[str, str]: Tuple of input and channel arguments.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
channel_names = oiio_input_info["channelnames"]
|
channel_names = oiio_input_info["channelnames"]
|
||||||
review_channels = get_convert_rgb_channels(channel_names)
|
review_channels = get_convert_rgb_channels(channel_names)
|
||||||
|
|
||||||
if review_channels is None:
|
if review_channels is None:
|
||||||
raise ValueError(
|
raise MissingRGBAChannelsError(
|
||||||
"Couldn't find channels that can be used for conversion."
|
"Couldn't find channels that can be used for conversion "
|
||||||
|
f"among channels: {channel_names}."
|
||||||
)
|
)
|
||||||
|
|
||||||
red, green, blue, alpha = review_channels
|
red, green, blue, alpha = review_channels
|
||||||
|
|
@ -1482,7 +1644,8 @@ def get_oiio_input_and_channel_args(oiio_input_info, alpha_default=None):
|
||||||
channels_arg += ",A={}".format(float(alpha_default))
|
channels_arg += ",A={}".format(float(alpha_default))
|
||||||
input_channels.append("A")
|
input_channels.append("A")
|
||||||
|
|
||||||
input_channels_str = ",".join(input_channels)
|
# Make sure channels are unique, but preserve order to avoid oiiotool crash
|
||||||
|
input_channels_str = ",".join(list(dict.fromkeys(input_channels)))
|
||||||
|
|
||||||
subimages = oiio_input_info.get("subimages")
|
subimages = oiio_input_info.get("subimages")
|
||||||
input_arg = "-i"
|
input_arg = "-i"
|
||||||
|
|
|
||||||
62
client/ayon_core/pipeline/actions/__init__.py
Normal file
62
client/ayon_core/pipeline/actions/__init__.py
Normal file
|
|
@ -0,0 +1,62 @@
|
||||||
|
from .structures import (
|
||||||
|
ActionForm,
|
||||||
|
)
|
||||||
|
from .utils import (
|
||||||
|
webaction_fields_to_attribute_defs,
|
||||||
|
)
|
||||||
|
from .loader import (
|
||||||
|
LoaderSelectedType,
|
||||||
|
LoaderActionResult,
|
||||||
|
LoaderActionItem,
|
||||||
|
LoaderActionPlugin,
|
||||||
|
LoaderActionSelection,
|
||||||
|
LoaderActionsContext,
|
||||||
|
SelectionEntitiesCache,
|
||||||
|
LoaderSimpleActionPlugin,
|
||||||
|
)
|
||||||
|
|
||||||
|
from .launcher import (
|
||||||
|
LauncherAction,
|
||||||
|
LauncherActionSelection,
|
||||||
|
discover_launcher_actions,
|
||||||
|
register_launcher_action,
|
||||||
|
register_launcher_action_path,
|
||||||
|
)
|
||||||
|
|
||||||
|
from .inventory import (
|
||||||
|
InventoryAction,
|
||||||
|
discover_inventory_actions,
|
||||||
|
register_inventory_action,
|
||||||
|
register_inventory_action_path,
|
||||||
|
|
||||||
|
deregister_inventory_action,
|
||||||
|
deregister_inventory_action_path,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
__all__ = (
|
||||||
|
"ActionForm",
|
||||||
|
"webaction_fields_to_attribute_defs",
|
||||||
|
|
||||||
|
"LoaderSelectedType",
|
||||||
|
"LoaderActionResult",
|
||||||
|
"LoaderActionItem",
|
||||||
|
"LoaderActionPlugin",
|
||||||
|
"LoaderActionSelection",
|
||||||
|
"LoaderActionsContext",
|
||||||
|
"SelectionEntitiesCache",
|
||||||
|
"LoaderSimpleActionPlugin",
|
||||||
|
|
||||||
|
"LauncherAction",
|
||||||
|
"LauncherActionSelection",
|
||||||
|
"discover_launcher_actions",
|
||||||
|
"register_launcher_action",
|
||||||
|
"register_launcher_action_path",
|
||||||
|
|
||||||
|
"InventoryAction",
|
||||||
|
"discover_inventory_actions",
|
||||||
|
"register_inventory_action",
|
||||||
|
"register_inventory_action_path",
|
||||||
|
"deregister_inventory_action",
|
||||||
|
"deregister_inventory_action_path",
|
||||||
|
)
|
||||||
108
client/ayon_core/pipeline/actions/inventory.py
Normal file
108
client/ayon_core/pipeline/actions/inventory.py
Normal file
|
|
@ -0,0 +1,108 @@
|
||||||
|
import logging
|
||||||
|
|
||||||
|
from ayon_core.pipeline.plugin_discover import (
|
||||||
|
discover,
|
||||||
|
register_plugin,
|
||||||
|
register_plugin_path,
|
||||||
|
deregister_plugin,
|
||||||
|
deregister_plugin_path
|
||||||
|
)
|
||||||
|
from ayon_core.pipeline.load.utils import get_representation_path_from_context
|
||||||
|
|
||||||
|
|
||||||
|
class InventoryAction:
|
||||||
|
"""A custom action for the scene inventory tool
|
||||||
|
|
||||||
|
If registered the action will be visible in the Right Mouse Button menu
|
||||||
|
under the submenu "Actions".
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
label = None
|
||||||
|
icon = None
|
||||||
|
color = None
|
||||||
|
order = 0
|
||||||
|
|
||||||
|
log = logging.getLogger("InventoryAction")
|
||||||
|
log.propagate = True
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def is_compatible(container):
|
||||||
|
"""Override function in a custom class
|
||||||
|
|
||||||
|
This method is specifically used to ensure the action can operate on
|
||||||
|
the container.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
container(dict): the data of a loaded asset, see host.ls()
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
bool
|
||||||
|
"""
|
||||||
|
return bool(container.get("objectName"))
|
||||||
|
|
||||||
|
def process(self, containers):
|
||||||
|
"""Override function in a custom class
|
||||||
|
|
||||||
|
This method will receive all containers even those which are
|
||||||
|
incompatible. It is advised to create a small filter along the lines
|
||||||
|
of this example:
|
||||||
|
|
||||||
|
valid_containers = filter(self.is_compatible(c) for c in containers)
|
||||||
|
|
||||||
|
The return value will need to be a True-ish value to trigger
|
||||||
|
the data_changed signal in order to refresh the view.
|
||||||
|
|
||||||
|
You can return a list of container names to trigger GUI to select
|
||||||
|
treeview items.
|
||||||
|
|
||||||
|
You can return a dict to carry extra GUI options. For example:
|
||||||
|
{
|
||||||
|
"objectNames": [container names...],
|
||||||
|
"options": {"mode": "toggle",
|
||||||
|
"clear": False}
|
||||||
|
}
|
||||||
|
Currently workable GUI options are:
|
||||||
|
- clear (bool): Clear current selection before selecting by action.
|
||||||
|
Default `True`.
|
||||||
|
- mode (str): selection mode, use one of these:
|
||||||
|
"select", "deselect", "toggle". Default is "select".
|
||||||
|
|
||||||
|
Args:
|
||||||
|
containers (list): list of dictionaries
|
||||||
|
|
||||||
|
Return:
|
||||||
|
bool, list or dict
|
||||||
|
|
||||||
|
"""
|
||||||
|
return True
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def filepath_from_context(cls, context):
|
||||||
|
return get_representation_path_from_context(context)
|
||||||
|
|
||||||
|
|
||||||
|
def discover_inventory_actions():
|
||||||
|
actions = discover(InventoryAction)
|
||||||
|
filtered_actions = []
|
||||||
|
for action in actions:
|
||||||
|
if action is not InventoryAction:
|
||||||
|
filtered_actions.append(action)
|
||||||
|
|
||||||
|
return filtered_actions
|
||||||
|
|
||||||
|
|
||||||
|
def register_inventory_action(plugin):
|
||||||
|
return register_plugin(InventoryAction, plugin)
|
||||||
|
|
||||||
|
|
||||||
|
def deregister_inventory_action(plugin):
|
||||||
|
deregister_plugin(InventoryAction, plugin)
|
||||||
|
|
||||||
|
|
||||||
|
def register_inventory_action_path(path):
|
||||||
|
return register_plugin_path(InventoryAction, path)
|
||||||
|
|
||||||
|
|
||||||
|
def deregister_inventory_action_path(path):
|
||||||
|
return deregister_plugin_path(InventoryAction, path)
|
||||||
|
|
@ -8,12 +8,8 @@ from ayon_core.pipeline.plugin_discover import (
|
||||||
discover,
|
discover,
|
||||||
register_plugin,
|
register_plugin,
|
||||||
register_plugin_path,
|
register_plugin_path,
|
||||||
deregister_plugin,
|
|
||||||
deregister_plugin_path
|
|
||||||
)
|
)
|
||||||
|
|
||||||
from .load.utils import get_representation_path_from_context
|
|
||||||
|
|
||||||
|
|
||||||
class LauncherActionSelection:
|
class LauncherActionSelection:
|
||||||
"""Object helper to pass selection to actions.
|
"""Object helper to pass selection to actions.
|
||||||
|
|
@ -390,79 +386,6 @@ class LauncherAction(object):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
class InventoryAction(object):
|
|
||||||
"""A custom action for the scene inventory tool
|
|
||||||
|
|
||||||
If registered the action will be visible in the Right Mouse Button menu
|
|
||||||
under the submenu "Actions".
|
|
||||||
|
|
||||||
"""
|
|
||||||
|
|
||||||
label = None
|
|
||||||
icon = None
|
|
||||||
color = None
|
|
||||||
order = 0
|
|
||||||
|
|
||||||
log = logging.getLogger("InventoryAction")
|
|
||||||
log.propagate = True
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def is_compatible(container):
|
|
||||||
"""Override function in a custom class
|
|
||||||
|
|
||||||
This method is specifically used to ensure the action can operate on
|
|
||||||
the container.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
container(dict): the data of a loaded asset, see host.ls()
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
bool
|
|
||||||
"""
|
|
||||||
return bool(container.get("objectName"))
|
|
||||||
|
|
||||||
def process(self, containers):
|
|
||||||
"""Override function in a custom class
|
|
||||||
|
|
||||||
This method will receive all containers even those which are
|
|
||||||
incompatible. It is advised to create a small filter along the lines
|
|
||||||
of this example:
|
|
||||||
|
|
||||||
valid_containers = filter(self.is_compatible(c) for c in containers)
|
|
||||||
|
|
||||||
The return value will need to be a True-ish value to trigger
|
|
||||||
the data_changed signal in order to refresh the view.
|
|
||||||
|
|
||||||
You can return a list of container names to trigger GUI to select
|
|
||||||
treeview items.
|
|
||||||
|
|
||||||
You can return a dict to carry extra GUI options. For example:
|
|
||||||
{
|
|
||||||
"objectNames": [container names...],
|
|
||||||
"options": {"mode": "toggle",
|
|
||||||
"clear": False}
|
|
||||||
}
|
|
||||||
Currently workable GUI options are:
|
|
||||||
- clear (bool): Clear current selection before selecting by action.
|
|
||||||
Default `True`.
|
|
||||||
- mode (str): selection mode, use one of these:
|
|
||||||
"select", "deselect", "toggle". Default is "select".
|
|
||||||
|
|
||||||
Args:
|
|
||||||
containers (list): list of dictionaries
|
|
||||||
|
|
||||||
Return:
|
|
||||||
bool, list or dict
|
|
||||||
|
|
||||||
"""
|
|
||||||
return True
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def filepath_from_context(cls, context):
|
|
||||||
return get_representation_path_from_context(context)
|
|
||||||
|
|
||||||
|
|
||||||
# Launcher action
|
|
||||||
def discover_launcher_actions():
|
def discover_launcher_actions():
|
||||||
return discover(LauncherAction)
|
return discover(LauncherAction)
|
||||||
|
|
||||||
|
|
@ -473,30 +396,3 @@ def register_launcher_action(plugin):
|
||||||
|
|
||||||
def register_launcher_action_path(path):
|
def register_launcher_action_path(path):
|
||||||
return register_plugin_path(LauncherAction, path)
|
return register_plugin_path(LauncherAction, path)
|
||||||
|
|
||||||
|
|
||||||
# Inventory action
|
|
||||||
def discover_inventory_actions():
|
|
||||||
actions = discover(InventoryAction)
|
|
||||||
filtered_actions = []
|
|
||||||
for action in actions:
|
|
||||||
if action is not InventoryAction:
|
|
||||||
filtered_actions.append(action)
|
|
||||||
|
|
||||||
return filtered_actions
|
|
||||||
|
|
||||||
|
|
||||||
def register_inventory_action(plugin):
|
|
||||||
return register_plugin(InventoryAction, plugin)
|
|
||||||
|
|
||||||
|
|
||||||
def deregister_inventory_action(plugin):
|
|
||||||
deregister_plugin(InventoryAction, plugin)
|
|
||||||
|
|
||||||
|
|
||||||
def register_inventory_action_path(path):
|
|
||||||
return register_plugin_path(InventoryAction, path)
|
|
||||||
|
|
||||||
|
|
||||||
def deregister_inventory_action_path(path):
|
|
||||||
return deregister_plugin_path(InventoryAction, path)
|
|
||||||
882
client/ayon_core/pipeline/actions/loader.py
Normal file
882
client/ayon_core/pipeline/actions/loader.py
Normal file
|
|
@ -0,0 +1,882 @@
|
||||||
|
"""API for actions for loader tool.
|
||||||
|
|
||||||
|
Even though the api is meant for the loader tool, the api should be possible
|
||||||
|
to use in a standalone way out of the loader tool.
|
||||||
|
|
||||||
|
To use add actions, make sure your addon does inherit from
|
||||||
|
'IPluginPaths' and implements 'get_loader_action_plugin_paths' which
|
||||||
|
returns paths to python files with loader actions.
|
||||||
|
|
||||||
|
The plugin is used to collect available actions for the given context and to
|
||||||
|
execute them. Selection is defined with 'LoaderActionSelection' object
|
||||||
|
that also contains a cache of entities and project anatomy.
|
||||||
|
|
||||||
|
Implementing 'get_action_items' allows the plugin to define what actions
|
||||||
|
are shown and available for the selection. Because for a single selection
|
||||||
|
can be shown multiple actions with the same action identifier, the action
|
||||||
|
items also have 'data' attribute which can be used to store additional
|
||||||
|
data for the action (they have to be json-serializable).
|
||||||
|
|
||||||
|
The action is triggered by calling the 'execute_action' method. Which takes
|
||||||
|
the action identifier, the selection, the additional data from the action
|
||||||
|
item and form values from the form if any.
|
||||||
|
|
||||||
|
Using 'LoaderActionResult' as the output of 'execute_action' can trigger to
|
||||||
|
show a message in UI or to show an additional form ('ActionForm')
|
||||||
|
which would retrigger the action with the values from the form on
|
||||||
|
submitting. That allows handling of multistep actions.
|
||||||
|
|
||||||
|
It is also recommended that the plugin does override the 'identifier'
|
||||||
|
attribute. The identifier has to be unique across all plugins.
|
||||||
|
Class name is used by default.
|
||||||
|
|
||||||
|
The selection wrapper currently supports the following types of entity types:
|
||||||
|
- version
|
||||||
|
- representation
|
||||||
|
It is planned to add 'folder' and 'task' selection in the future.
|
||||||
|
|
||||||
|
NOTE: It is possible to trigger 'execute_action' without ever calling
|
||||||
|
'get_action_items', that can be handy in automations.
|
||||||
|
|
||||||
|
The whole logic is wrapped into 'LoaderActionsContext'. It takes care of
|
||||||
|
the discovery of plugins and wraps the collection and execution of
|
||||||
|
action items. Method 'execute_action' on context also requires plugin
|
||||||
|
identifier.
|
||||||
|
|
||||||
|
The flow of the logic is (in the loader tool):
|
||||||
|
1. User selects entities in the UI.
|
||||||
|
2. Right-click the selected entities.
|
||||||
|
3. Use 'LoaderActionsContext' to collect items using 'get_action_items'.
|
||||||
|
4. Show a menu (with submenus) in the UI.
|
||||||
|
5. If a user selects an action, the action is triggered using
|
||||||
|
'execute_action'.
|
||||||
|
5a. If the action returns 'LoaderActionResult', show a 'message' if it is
|
||||||
|
filled and show a form dialog if 'form' is filled.
|
||||||
|
5b. If the user submitted the form, trigger the action again with the
|
||||||
|
values from the form and repeat from 5a.
|
||||||
|
|
||||||
|
"""
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import os
|
||||||
|
import collections
|
||||||
|
import copy
|
||||||
|
import logging
|
||||||
|
from abc import ABC, abstractmethod
|
||||||
|
import typing
|
||||||
|
from typing import Optional, Any, Callable
|
||||||
|
from dataclasses import dataclass
|
||||||
|
|
||||||
|
import ayon_api
|
||||||
|
|
||||||
|
from ayon_core import AYON_CORE_ROOT
|
||||||
|
from ayon_core.lib import StrEnum, Logger, is_func_signature_supported
|
||||||
|
from ayon_core.host import AbstractHost
|
||||||
|
from ayon_core.addon import AddonsManager, IPluginPaths
|
||||||
|
from ayon_core.settings import get_studio_settings, get_project_settings
|
||||||
|
from ayon_core.pipeline import Anatomy
|
||||||
|
from ayon_core.pipeline.plugin_discover import discover_plugins
|
||||||
|
|
||||||
|
from .structures import ActionForm
|
||||||
|
|
||||||
|
if typing.TYPE_CHECKING:
|
||||||
|
from typing import Union
|
||||||
|
|
||||||
|
DataBaseType = Union[str, int, float, bool]
|
||||||
|
DataType = dict[str, Union[DataBaseType, list[DataBaseType]]]
|
||||||
|
|
||||||
|
_PLACEHOLDER = object()
|
||||||
|
|
||||||
|
|
||||||
|
class LoaderSelectedType(StrEnum):
|
||||||
|
"""Selected entity type."""
|
||||||
|
# folder = "folder"
|
||||||
|
# task = "task"
|
||||||
|
version = "version"
|
||||||
|
representation = "representation"
|
||||||
|
|
||||||
|
|
||||||
|
class SelectionEntitiesCache:
|
||||||
|
"""Cache of entities used as helper in the selection wrapper.
|
||||||
|
|
||||||
|
It is possible to get entities based on ids with helper methods to get
|
||||||
|
entities, their parents or their children's entities.
|
||||||
|
|
||||||
|
The goal is to avoid multiple API calls for the same entity in multiple
|
||||||
|
action plugins.
|
||||||
|
|
||||||
|
The cache is based on the selected project. Entities are fetched
|
||||||
|
if are not in cache yet.
|
||||||
|
"""
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
project_name: str,
|
||||||
|
project_entity: Optional[dict[str, Any]] = None,
|
||||||
|
folders_by_id: Optional[dict[str, dict[str, Any]]] = None,
|
||||||
|
tasks_by_id: Optional[dict[str, dict[str, Any]]] = None,
|
||||||
|
products_by_id: Optional[dict[str, dict[str, Any]]] = None,
|
||||||
|
versions_by_id: Optional[dict[str, dict[str, Any]]] = None,
|
||||||
|
representations_by_id: Optional[dict[str, dict[str, Any]]] = None,
|
||||||
|
task_ids_by_folder_id: Optional[dict[str, set[str]]] = None,
|
||||||
|
product_ids_by_folder_id: Optional[dict[str, set[str]]] = None,
|
||||||
|
version_ids_by_product_id: Optional[dict[str, set[str]]] = None,
|
||||||
|
representation_ids_by_version_id: Optional[dict[str, set[str]]] = None,
|
||||||
|
):
|
||||||
|
self._project_name = project_name
|
||||||
|
self._project_entity = project_entity
|
||||||
|
self._folders_by_id = folders_by_id or {}
|
||||||
|
self._tasks_by_id = tasks_by_id or {}
|
||||||
|
self._products_by_id = products_by_id or {}
|
||||||
|
self._versions_by_id = versions_by_id or {}
|
||||||
|
self._representations_by_id = representations_by_id or {}
|
||||||
|
|
||||||
|
self._task_ids_by_folder_id = task_ids_by_folder_id or {}
|
||||||
|
self._product_ids_by_folder_id = product_ids_by_folder_id or {}
|
||||||
|
self._version_ids_by_product_id = version_ids_by_product_id or {}
|
||||||
|
self._representation_ids_by_version_id = (
|
||||||
|
representation_ids_by_version_id or {}
|
||||||
|
)
|
||||||
|
|
||||||
|
def get_project(self) -> dict[str, Any]:
|
||||||
|
"""Get project entity"""
|
||||||
|
if self._project_entity is None:
|
||||||
|
self._project_entity = ayon_api.get_project(self._project_name)
|
||||||
|
return copy.deepcopy(self._project_entity)
|
||||||
|
|
||||||
|
def get_folders(
|
||||||
|
self, folder_ids: set[str]
|
||||||
|
) -> list[dict[str, Any]]:
|
||||||
|
return self._get_entities(
|
||||||
|
folder_ids,
|
||||||
|
self._folders_by_id,
|
||||||
|
"folder_ids",
|
||||||
|
ayon_api.get_folders,
|
||||||
|
)
|
||||||
|
|
||||||
|
def get_tasks(
|
||||||
|
self, task_ids: set[str]
|
||||||
|
) -> list[dict[str, Any]]:
|
||||||
|
return self._get_entities(
|
||||||
|
task_ids,
|
||||||
|
self._tasks_by_id,
|
||||||
|
"task_ids",
|
||||||
|
ayon_api.get_tasks,
|
||||||
|
)
|
||||||
|
|
||||||
|
def get_products(
|
||||||
|
self, product_ids: set[str]
|
||||||
|
) -> list[dict[str, Any]]:
|
||||||
|
return self._get_entities(
|
||||||
|
product_ids,
|
||||||
|
self._products_by_id,
|
||||||
|
"product_ids",
|
||||||
|
ayon_api.get_products,
|
||||||
|
)
|
||||||
|
|
||||||
|
def get_versions(
|
||||||
|
self, version_ids: set[str]
|
||||||
|
) -> list[dict[str, Any]]:
|
||||||
|
return self._get_entities(
|
||||||
|
version_ids,
|
||||||
|
self._versions_by_id,
|
||||||
|
"version_ids",
|
||||||
|
ayon_api.get_versions,
|
||||||
|
)
|
||||||
|
|
||||||
|
def get_representations(
|
||||||
|
self, representation_ids: set[str]
|
||||||
|
) -> list[dict[str, Any]]:
|
||||||
|
return self._get_entities(
|
||||||
|
representation_ids,
|
||||||
|
self._representations_by_id,
|
||||||
|
"representation_ids",
|
||||||
|
ayon_api.get_representations,
|
||||||
|
)
|
||||||
|
|
||||||
|
def get_folders_tasks(
|
||||||
|
self, folder_ids: set[str]
|
||||||
|
) -> list[dict[str, Any]]:
|
||||||
|
task_ids = self._fill_parent_children_ids(
|
||||||
|
folder_ids,
|
||||||
|
"folderId",
|
||||||
|
"folder_ids",
|
||||||
|
self._task_ids_by_folder_id,
|
||||||
|
ayon_api.get_tasks,
|
||||||
|
)
|
||||||
|
return self.get_tasks(task_ids)
|
||||||
|
|
||||||
|
def get_folders_products(
|
||||||
|
self, folder_ids: set[str]
|
||||||
|
) -> list[dict[str, Any]]:
|
||||||
|
product_ids = self._get_folders_products_ids(folder_ids)
|
||||||
|
return self.get_products(product_ids)
|
||||||
|
|
||||||
|
def get_tasks_versions(
|
||||||
|
self, task_ids: set[str]
|
||||||
|
) -> list[dict[str, Any]]:
|
||||||
|
folder_ids = {
|
||||||
|
task["folderId"]
|
||||||
|
for task in self.get_tasks(task_ids)
|
||||||
|
}
|
||||||
|
product_ids = self._get_folders_products_ids(folder_ids)
|
||||||
|
output = []
|
||||||
|
for version in self.get_products_versions(product_ids):
|
||||||
|
task_id = version["taskId"]
|
||||||
|
if task_id in task_ids:
|
||||||
|
output.append(version)
|
||||||
|
return output
|
||||||
|
|
||||||
|
def get_products_versions(
|
||||||
|
self, product_ids: set[str]
|
||||||
|
) -> list[dict[str, Any]]:
|
||||||
|
version_ids = self._fill_parent_children_ids(
|
||||||
|
product_ids,
|
||||||
|
"productId",
|
||||||
|
"product_ids",
|
||||||
|
self._version_ids_by_product_id,
|
||||||
|
ayon_api.get_versions,
|
||||||
|
)
|
||||||
|
return self.get_versions(version_ids)
|
||||||
|
|
||||||
|
def get_versions_representations(
|
||||||
|
self, version_ids: set[str]
|
||||||
|
) -> list[dict[str, Any]]:
|
||||||
|
repre_ids = self._fill_parent_children_ids(
|
||||||
|
version_ids,
|
||||||
|
"versionId",
|
||||||
|
"version_ids",
|
||||||
|
self._representation_ids_by_version_id,
|
||||||
|
ayon_api.get_representations,
|
||||||
|
)
|
||||||
|
return self.get_representations(repre_ids)
|
||||||
|
|
||||||
|
def get_tasks_folders(self, task_ids: set[str]) -> list[dict[str, Any]]:
|
||||||
|
folder_ids = {
|
||||||
|
task["folderId"]
|
||||||
|
for task in self.get_tasks(task_ids)
|
||||||
|
}
|
||||||
|
return self.get_folders(folder_ids)
|
||||||
|
|
||||||
|
def get_products_folders(
|
||||||
|
self, product_ids: set[str]
|
||||||
|
) -> list[dict[str, Any]]:
|
||||||
|
folder_ids = {
|
||||||
|
product["folderId"]
|
||||||
|
for product in self.get_products(product_ids)
|
||||||
|
}
|
||||||
|
return self.get_folders(folder_ids)
|
||||||
|
|
||||||
|
def get_versions_products(
|
||||||
|
self, version_ids: set[str]
|
||||||
|
) -> list[dict[str, Any]]:
|
||||||
|
product_ids = {
|
||||||
|
version["productId"]
|
||||||
|
for version in self.get_versions(version_ids)
|
||||||
|
}
|
||||||
|
return self.get_products(product_ids)
|
||||||
|
|
||||||
|
def get_versions_tasks(
|
||||||
|
self, version_ids: set[str]
|
||||||
|
) -> list[dict[str, Any]]:
|
||||||
|
task_ids = {
|
||||||
|
version["taskId"]
|
||||||
|
for version in self.get_versions(version_ids)
|
||||||
|
if version["taskId"]
|
||||||
|
}
|
||||||
|
return self.get_tasks(task_ids)
|
||||||
|
|
||||||
|
def get_representations_versions(
|
||||||
|
self, representation_ids: set[str]
|
||||||
|
) -> list[dict[str, Any]]:
|
||||||
|
version_ids = {
|
||||||
|
repre["versionId"]
|
||||||
|
for repre in self.get_representations(representation_ids)
|
||||||
|
}
|
||||||
|
return self.get_versions(version_ids)
|
||||||
|
|
||||||
|
def _get_folders_products_ids(self, folder_ids: set[str]) -> set[str]:
|
||||||
|
return self._fill_parent_children_ids(
|
||||||
|
folder_ids,
|
||||||
|
"folderId",
|
||||||
|
"folder_ids",
|
||||||
|
self._product_ids_by_folder_id,
|
||||||
|
ayon_api.get_products,
|
||||||
|
)
|
||||||
|
|
||||||
|
def _fill_parent_children_ids(
|
||||||
|
self,
|
||||||
|
entity_ids: set[str],
|
||||||
|
parent_key: str,
|
||||||
|
filter_attr: str,
|
||||||
|
parent_mapping: dict[str, set[str]],
|
||||||
|
getter: Callable,
|
||||||
|
) -> set[str]:
|
||||||
|
if not entity_ids:
|
||||||
|
return set()
|
||||||
|
children_ids = set()
|
||||||
|
missing_ids = set()
|
||||||
|
for entity_id in entity_ids:
|
||||||
|
_children_ids = parent_mapping.get(entity_id)
|
||||||
|
if _children_ids is None:
|
||||||
|
missing_ids.add(entity_id)
|
||||||
|
else:
|
||||||
|
children_ids.update(_children_ids)
|
||||||
|
if missing_ids:
|
||||||
|
entities_by_parent_id = collections.defaultdict(set)
|
||||||
|
for entity in getter(
|
||||||
|
self._project_name,
|
||||||
|
fields={"id", parent_key},
|
||||||
|
**{filter_attr: missing_ids},
|
||||||
|
):
|
||||||
|
child_id = entity["id"]
|
||||||
|
children_ids.add(child_id)
|
||||||
|
entities_by_parent_id[entity[parent_key]].add(child_id)
|
||||||
|
|
||||||
|
for entity_id in missing_ids:
|
||||||
|
parent_mapping[entity_id] = entities_by_parent_id[entity_id]
|
||||||
|
|
||||||
|
return children_ids
|
||||||
|
|
||||||
|
def _get_entities(
|
||||||
|
self,
|
||||||
|
entity_ids: set[str],
|
||||||
|
cache_var: dict[str, Any],
|
||||||
|
filter_arg: str,
|
||||||
|
getter: Callable,
|
||||||
|
) -> list[dict[str, Any]]:
|
||||||
|
if not entity_ids:
|
||||||
|
return []
|
||||||
|
|
||||||
|
output = []
|
||||||
|
missing_ids: set[str] = set()
|
||||||
|
for entity_id in entity_ids:
|
||||||
|
entity = cache_var.get(entity_id)
|
||||||
|
if entity_id not in cache_var:
|
||||||
|
missing_ids.add(entity_id)
|
||||||
|
cache_var[entity_id] = None
|
||||||
|
elif entity:
|
||||||
|
output.append(entity)
|
||||||
|
|
||||||
|
if missing_ids:
|
||||||
|
for entity in getter(
|
||||||
|
self._project_name,
|
||||||
|
**{filter_arg: missing_ids}
|
||||||
|
):
|
||||||
|
output.append(entity)
|
||||||
|
cache_var[entity["id"]] = entity
|
||||||
|
return output
|
||||||
|
|
||||||
|
|
||||||
|
class LoaderActionSelection:
|
||||||
|
"""Selection of entities for loader actions.
|
||||||
|
|
||||||
|
Selection tells action plugins what exactly is selected in the tool and
|
||||||
|
which ids.
|
||||||
|
|
||||||
|
Contains entity cache which can be used to get entities by their ids. Or
|
||||||
|
to get project settings and anatomy.
|
||||||
|
|
||||||
|
"""
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
project_name: str,
|
||||||
|
selected_ids: set[str],
|
||||||
|
selected_type: LoaderSelectedType,
|
||||||
|
*,
|
||||||
|
project_anatomy: Optional[Anatomy] = None,
|
||||||
|
project_settings: Optional[dict[str, Any]] = None,
|
||||||
|
entities_cache: Optional[SelectionEntitiesCache] = None,
|
||||||
|
):
|
||||||
|
self._project_name = project_name
|
||||||
|
self._selected_ids = selected_ids
|
||||||
|
self._selected_type = selected_type
|
||||||
|
|
||||||
|
self._project_anatomy = project_anatomy
|
||||||
|
self._project_settings = project_settings
|
||||||
|
|
||||||
|
if entities_cache is None:
|
||||||
|
entities_cache = SelectionEntitiesCache(project_name)
|
||||||
|
self._entities_cache = entities_cache
|
||||||
|
|
||||||
|
def get_entities_cache(self) -> SelectionEntitiesCache:
|
||||||
|
return self._entities_cache
|
||||||
|
|
||||||
|
def get_project_name(self) -> str:
|
||||||
|
return self._project_name
|
||||||
|
|
||||||
|
def get_selected_ids(self) -> set[str]:
|
||||||
|
return set(self._selected_ids)
|
||||||
|
|
||||||
|
def get_selected_type(self) -> str:
|
||||||
|
return self._selected_type
|
||||||
|
|
||||||
|
def get_project_settings(self) -> dict[str, Any]:
|
||||||
|
if self._project_settings is None:
|
||||||
|
self._project_settings = get_project_settings(self._project_name)
|
||||||
|
return copy.deepcopy(self._project_settings)
|
||||||
|
|
||||||
|
def get_project_anatomy(self) -> Anatomy:
|
||||||
|
if self._project_anatomy is None:
|
||||||
|
self._project_anatomy = Anatomy(
|
||||||
|
self._project_name,
|
||||||
|
project_entity=self.get_entities_cache().get_project(),
|
||||||
|
)
|
||||||
|
return self._project_anatomy
|
||||||
|
|
||||||
|
project_name = property(get_project_name)
|
||||||
|
selected_ids = property(get_selected_ids)
|
||||||
|
selected_type = property(get_selected_type)
|
||||||
|
project_settings = property(get_project_settings)
|
||||||
|
project_anatomy = property(get_project_anatomy)
|
||||||
|
entities = property(get_entities_cache)
|
||||||
|
|
||||||
|
# --- Helper methods ---
|
||||||
|
def versions_selected(self) -> bool:
|
||||||
|
"""Selected entity type is version.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
bool: True if selected entity type is version.
|
||||||
|
|
||||||
|
"""
|
||||||
|
return self._selected_type == LoaderSelectedType.version
|
||||||
|
|
||||||
|
def representations_selected(self) -> bool:
|
||||||
|
"""Selected entity type is representation.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
bool: True if selected entity type is representation.
|
||||||
|
|
||||||
|
"""
|
||||||
|
return self._selected_type == LoaderSelectedType.representation
|
||||||
|
|
||||||
|
def get_selected_version_entities(self) -> list[dict[str, Any]]:
|
||||||
|
"""Retrieve selected version entities.
|
||||||
|
|
||||||
|
An empty list is returned if 'version' is not the selected
|
||||||
|
entity type.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
list[dict[str, Any]]: List of selected version entities.
|
||||||
|
|
||||||
|
"""
|
||||||
|
if self.versions_selected():
|
||||||
|
return self.entities.get_versions(self.selected_ids)
|
||||||
|
return []
|
||||||
|
|
||||||
|
def get_selected_representation_entities(self) -> list[dict[str, Any]]:
|
||||||
|
"""Retrieve selected representation entities.
|
||||||
|
|
||||||
|
An empty list is returned if 'representation' is not the selected
|
||||||
|
entity type.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
list[dict[str, Any]]: List of selected representation entities.
|
||||||
|
|
||||||
|
"""
|
||||||
|
if self.representations_selected():
|
||||||
|
return self.entities.get_representations(self.selected_ids)
|
||||||
|
return []
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class LoaderActionItem:
|
||||||
|
"""Item of loader action.
|
||||||
|
|
||||||
|
Action plugins return these items as possible actions to run for a given
|
||||||
|
context.
|
||||||
|
|
||||||
|
Because the action item can be related to a specific entity
|
||||||
|
and not the whole selection, they also have to define the entity type
|
||||||
|
and ids to be executed on.
|
||||||
|
|
||||||
|
Attributes:
|
||||||
|
label (str): Text shown in UI.
|
||||||
|
order (int): Order of the action in UI.
|
||||||
|
group_label (Optional[str]): Label of the group to which the action
|
||||||
|
belongs.
|
||||||
|
icon (Optional[dict[str, Any]): Icon definition.
|
||||||
|
data (Optional[DataType]): Action item data.
|
||||||
|
identifier (Optional[str]): Identifier of the plugin which
|
||||||
|
created the action item. Is filled automatically. Is not changed
|
||||||
|
if is filled -> can lead to different plugin.
|
||||||
|
|
||||||
|
"""
|
||||||
|
label: str
|
||||||
|
order: int = 0
|
||||||
|
group_label: Optional[str] = None
|
||||||
|
icon: Optional[dict[str, Any]] = None
|
||||||
|
data: Optional[DataType] = None
|
||||||
|
# Is filled automatically
|
||||||
|
identifier: str = None
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class LoaderActionResult:
|
||||||
|
"""Result of loader action execution.
|
||||||
|
|
||||||
|
Attributes:
|
||||||
|
message (Optional[str]): Message to show in UI.
|
||||||
|
success (bool): If the action was successful. Affects color of
|
||||||
|
the message.
|
||||||
|
form (Optional[ActionForm]): Form to show in UI.
|
||||||
|
form_values (Optional[dict[str, Any]]): Values for the form. Can be
|
||||||
|
used if the same form is re-shown e.g. because a user forgot to
|
||||||
|
fill a required field.
|
||||||
|
|
||||||
|
"""
|
||||||
|
message: Optional[str] = None
|
||||||
|
success: bool = True
|
||||||
|
form: Optional[ActionForm] = None
|
||||||
|
form_values: Optional[dict[str, Any]] = None
|
||||||
|
|
||||||
|
def to_json_data(self) -> dict[str, Any]:
|
||||||
|
form = self.form
|
||||||
|
if form is not None:
|
||||||
|
form = form.to_json_data()
|
||||||
|
return {
|
||||||
|
"message": self.message,
|
||||||
|
"success": self.success,
|
||||||
|
"form": form,
|
||||||
|
"form_values": self.form_values,
|
||||||
|
}
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def from_json_data(cls, data: dict[str, Any]) -> "LoaderActionResult":
|
||||||
|
form = data["form"]
|
||||||
|
if form is not None:
|
||||||
|
data["form"] = ActionForm.from_json_data(form)
|
||||||
|
return LoaderActionResult(**data)
|
||||||
|
|
||||||
|
|
||||||
|
class LoaderActionPlugin(ABC):
|
||||||
|
"""Plugin for loader actions.
|
||||||
|
|
||||||
|
Plugin is responsible for getting action items and executing actions.
|
||||||
|
|
||||||
|
"""
|
||||||
|
_log: Optional[logging.Logger] = None
|
||||||
|
enabled: bool = True
|
||||||
|
|
||||||
|
def __init__(self, context: "LoaderActionsContext") -> None:
|
||||||
|
self._context = context
|
||||||
|
self.apply_settings(context.get_studio_settings())
|
||||||
|
|
||||||
|
def apply_settings(self, studio_settings: dict[str, Any]) -> None:
|
||||||
|
"""Apply studio settings to the plugin.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
studio_settings (dict[str, Any]): Studio settings.
|
||||||
|
|
||||||
|
"""
|
||||||
|
pass
|
||||||
|
|
||||||
|
@property
|
||||||
|
def log(self) -> logging.Logger:
|
||||||
|
if self._log is None:
|
||||||
|
self._log = Logger.get_logger(self.__class__.__name__)
|
||||||
|
return self._log
|
||||||
|
|
||||||
|
@property
|
||||||
|
def identifier(self) -> str:
|
||||||
|
"""Identifier of the plugin.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
str: Plugin identifier.
|
||||||
|
|
||||||
|
"""
|
||||||
|
return self.__class__.__name__
|
||||||
|
|
||||||
|
@property
|
||||||
|
def host_name(self) -> Optional[str]:
|
||||||
|
"""Name of the current host."""
|
||||||
|
return self._context.get_host_name()
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
def get_action_items(
|
||||||
|
self, selection: LoaderActionSelection
|
||||||
|
) -> list[LoaderActionItem]:
|
||||||
|
"""Action items for the selection.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
selection (LoaderActionSelection): Selection.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
list[LoaderActionItem]: Action items.
|
||||||
|
|
||||||
|
"""
|
||||||
|
pass
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
def execute_action(
|
||||||
|
self,
|
||||||
|
selection: LoaderActionSelection,
|
||||||
|
data: Optional[DataType],
|
||||||
|
form_values: dict[str, Any],
|
||||||
|
) -> Optional[LoaderActionResult]:
|
||||||
|
"""Execute an action.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
selection (LoaderActionSelection): Selection wrapper. Can be used
|
||||||
|
to get entities or get context of original selection.
|
||||||
|
data (Optional[DataType]): Additional action item data.
|
||||||
|
form_values (dict[str, Any]): Attribute values.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Optional[LoaderActionResult]: Result of the action execution.
|
||||||
|
|
||||||
|
"""
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class LoaderActionsContext:
|
||||||
|
"""Wrapper for loader actions and their logic.
|
||||||
|
|
||||||
|
Takes care about the public api of loader actions and internal logic like
|
||||||
|
discovery and initialization of plugins.
|
||||||
|
|
||||||
|
"""
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
studio_settings: Optional[dict[str, Any]] = None,
|
||||||
|
addons_manager: Optional[AddonsManager] = None,
|
||||||
|
host: Optional[AbstractHost] = _PLACEHOLDER,
|
||||||
|
) -> None:
|
||||||
|
self._log = Logger.get_logger(self.__class__.__name__)
|
||||||
|
|
||||||
|
self._addons_manager = addons_manager
|
||||||
|
self._host = host
|
||||||
|
|
||||||
|
# Attributes that are re-cached on reset
|
||||||
|
self._studio_settings = studio_settings
|
||||||
|
self._plugins = None
|
||||||
|
|
||||||
|
def reset(
|
||||||
|
self, studio_settings: Optional[dict[str, Any]] = None
|
||||||
|
) -> None:
|
||||||
|
"""Reset context cache.
|
||||||
|
|
||||||
|
Reset plugins and studio settings to reload them.
|
||||||
|
|
||||||
|
Notes:
|
||||||
|
Does not reset the cache of AddonsManger because there should not
|
||||||
|
be a reason to do so.
|
||||||
|
|
||||||
|
"""
|
||||||
|
self._studio_settings = studio_settings
|
||||||
|
self._plugins = None
|
||||||
|
|
||||||
|
def get_addons_manager(self) -> AddonsManager:
|
||||||
|
if self._addons_manager is None:
|
||||||
|
self._addons_manager = AddonsManager(
|
||||||
|
settings=self.get_studio_settings()
|
||||||
|
)
|
||||||
|
return self._addons_manager
|
||||||
|
|
||||||
|
def get_host(self) -> Optional[AbstractHost]:
|
||||||
|
"""Get current host integration.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Optional[AbstractHost]: Host integration. Can be None if host
|
||||||
|
integration is not registered -> probably not used in the
|
||||||
|
host integration process.
|
||||||
|
|
||||||
|
"""
|
||||||
|
if self._host is _PLACEHOLDER:
|
||||||
|
from ayon_core.pipeline import registered_host
|
||||||
|
|
||||||
|
self._host = registered_host()
|
||||||
|
return self._host
|
||||||
|
|
||||||
|
def get_host_name(self) -> Optional[str]:
|
||||||
|
host = self.get_host()
|
||||||
|
if host is None:
|
||||||
|
return None
|
||||||
|
return host.name
|
||||||
|
|
||||||
|
def get_studio_settings(self) -> dict[str, Any]:
|
||||||
|
if self._studio_settings is None:
|
||||||
|
self._studio_settings = get_studio_settings()
|
||||||
|
return copy.deepcopy(self._studio_settings)
|
||||||
|
|
||||||
|
def get_action_items(
|
||||||
|
self, selection: LoaderActionSelection
|
||||||
|
) -> list[LoaderActionItem]:
|
||||||
|
"""Collect action items from all plugins for given selection.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
selection (LoaderActionSelection): Selection wrapper.
|
||||||
|
|
||||||
|
"""
|
||||||
|
output = []
|
||||||
|
for plugin_id, plugin in self._get_plugins().items():
|
||||||
|
try:
|
||||||
|
for action_item in plugin.get_action_items(selection):
|
||||||
|
if action_item.identifier is None:
|
||||||
|
action_item.identifier = plugin_id
|
||||||
|
output.append(action_item)
|
||||||
|
|
||||||
|
except Exception:
|
||||||
|
self._log.warning(
|
||||||
|
"Failed to get action items for"
|
||||||
|
f" plugin '{plugin.identifier}'",
|
||||||
|
exc_info=True,
|
||||||
|
)
|
||||||
|
return output
|
||||||
|
|
||||||
|
def execute_action(
|
||||||
|
self,
|
||||||
|
identifier: str,
|
||||||
|
selection: LoaderActionSelection,
|
||||||
|
data: Optional[DataType],
|
||||||
|
form_values: dict[str, Any],
|
||||||
|
) -> Optional[LoaderActionResult]:
|
||||||
|
"""Trigger action execution.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
identifier (str): Identifier of the plugin.
|
||||||
|
selection (LoaderActionSelection): Selection wrapper. Can be used
|
||||||
|
to get what is selected in UI and to get access to entity
|
||||||
|
cache.
|
||||||
|
data (Optional[DataType]): Additional action item data.
|
||||||
|
form_values (dict[str, Any]): Form values related to action.
|
||||||
|
Usually filled if action returned response with form.
|
||||||
|
|
||||||
|
"""
|
||||||
|
plugins_by_id = self._get_plugins()
|
||||||
|
plugin = plugins_by_id[identifier]
|
||||||
|
return plugin.execute_action(
|
||||||
|
selection,
|
||||||
|
data,
|
||||||
|
form_values,
|
||||||
|
)
|
||||||
|
|
||||||
|
def _get_plugins(self) -> dict[str, LoaderActionPlugin]:
|
||||||
|
if self._plugins is None:
|
||||||
|
host_name = self.get_host_name()
|
||||||
|
addons_manager = self.get_addons_manager()
|
||||||
|
all_paths = [
|
||||||
|
os.path.join(AYON_CORE_ROOT, "plugins", "loader")
|
||||||
|
]
|
||||||
|
for addon in addons_manager.addons:
|
||||||
|
if not isinstance(addon, IPluginPaths):
|
||||||
|
continue
|
||||||
|
|
||||||
|
try:
|
||||||
|
if is_func_signature_supported(
|
||||||
|
addon.get_loader_action_plugin_paths,
|
||||||
|
host_name
|
||||||
|
):
|
||||||
|
paths = addon.get_loader_action_plugin_paths(
|
||||||
|
host_name
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
paths = addon.get_loader_action_plugin_paths()
|
||||||
|
except Exception:
|
||||||
|
self._log.warning(
|
||||||
|
"Failed to get plugin paths for addon",
|
||||||
|
exc_info=True
|
||||||
|
)
|
||||||
|
continue
|
||||||
|
|
||||||
|
if paths:
|
||||||
|
all_paths.extend(paths)
|
||||||
|
|
||||||
|
result = discover_plugins(LoaderActionPlugin, all_paths)
|
||||||
|
result.log_report()
|
||||||
|
plugins = {}
|
||||||
|
for cls in result.plugins:
|
||||||
|
try:
|
||||||
|
plugin = cls(self)
|
||||||
|
if not plugin.enabled:
|
||||||
|
continue
|
||||||
|
|
||||||
|
plugin_id = plugin.identifier
|
||||||
|
if plugin_id not in plugins:
|
||||||
|
plugins[plugin_id] = plugin
|
||||||
|
continue
|
||||||
|
|
||||||
|
self._log.warning(
|
||||||
|
f"Duplicated plugins identifier found '{plugin_id}'."
|
||||||
|
)
|
||||||
|
|
||||||
|
except Exception:
|
||||||
|
self._log.warning(
|
||||||
|
f"Failed to initialize plugin '{cls.__name__}'",
|
||||||
|
exc_info=True,
|
||||||
|
)
|
||||||
|
self._plugins = plugins
|
||||||
|
return self._plugins
|
||||||
|
|
||||||
|
|
||||||
|
class LoaderSimpleActionPlugin(LoaderActionPlugin):
|
||||||
|
"""Simple action plugin.
|
||||||
|
|
||||||
|
This action will show exactly one action item defined by attributes
|
||||||
|
on the class.
|
||||||
|
|
||||||
|
Attributes:
|
||||||
|
label: Label of the action item.
|
||||||
|
order: Order of the action item.
|
||||||
|
group_label: Label of the group to which the action belongs.
|
||||||
|
icon: Icon definition shown next to label.
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
label: Optional[str] = None
|
||||||
|
order: int = 0
|
||||||
|
group_label: Optional[str] = None
|
||||||
|
icon: Optional[dict[str, Any]] = None
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
def is_compatible(self, selection: LoaderActionSelection) -> bool:
|
||||||
|
"""Check if plugin is compatible with selection.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
selection (LoaderActionSelection): Selection information.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
bool: True if plugin is compatible with selection.
|
||||||
|
|
||||||
|
"""
|
||||||
|
pass
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
def execute_simple_action(
|
||||||
|
self,
|
||||||
|
selection: LoaderActionSelection,
|
||||||
|
form_values: dict[str, Any],
|
||||||
|
) -> Optional[LoaderActionResult]:
|
||||||
|
"""Process action based on selection.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
selection (LoaderActionSelection): Selection information.
|
||||||
|
form_values (dict[str, Any]): Values from a form if there are any.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Optional[LoaderActionResult]: Result of the action.
|
||||||
|
|
||||||
|
"""
|
||||||
|
pass
|
||||||
|
|
||||||
|
def get_action_items(
|
||||||
|
self, selection: LoaderActionSelection
|
||||||
|
) -> list[LoaderActionItem]:
|
||||||
|
if self.is_compatible(selection):
|
||||||
|
label = self.label or self.__class__.__name__
|
||||||
|
return [
|
||||||
|
LoaderActionItem(
|
||||||
|
label=label,
|
||||||
|
order=self.order,
|
||||||
|
group_label=self.group_label,
|
||||||
|
icon=self.icon,
|
||||||
|
)
|
||||||
|
]
|
||||||
|
return []
|
||||||
|
|
||||||
|
def execute_action(
|
||||||
|
self,
|
||||||
|
selection: LoaderActionSelection,
|
||||||
|
data: Optional[DataType],
|
||||||
|
form_values: dict[str, Any],
|
||||||
|
) -> Optional[LoaderActionResult]:
|
||||||
|
return self.execute_simple_action(selection, form_values)
|
||||||
60
client/ayon_core/pipeline/actions/structures.py
Normal file
60
client/ayon_core/pipeline/actions/structures.py
Normal file
|
|
@ -0,0 +1,60 @@
|
||||||
|
from dataclasses import dataclass
|
||||||
|
from typing import Optional, Any
|
||||||
|
|
||||||
|
from ayon_core.lib.attribute_definitions import (
|
||||||
|
AbstractAttrDef,
|
||||||
|
serialize_attr_defs,
|
||||||
|
deserialize_attr_defs,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class ActionForm:
|
||||||
|
"""Form for loader action.
|
||||||
|
|
||||||
|
If an action needs to collect information from a user before or during of
|
||||||
|
the action execution, it can return a response with a form. When the
|
||||||
|
form is submitted, a new execution of the action is triggered.
|
||||||
|
|
||||||
|
It is also possible to just show a label message without the submit
|
||||||
|
button to make sure the user has seen the message.
|
||||||
|
|
||||||
|
Attributes:
|
||||||
|
title (str): Title of the form -> title of the window.
|
||||||
|
fields (list[AbstractAttrDef]): Fields of the form.
|
||||||
|
submit_label (Optional[str]): Label of the submit button. Is hidden
|
||||||
|
if is set to None.
|
||||||
|
submit_icon (Optional[dict[str, Any]]): Icon definition of the submit
|
||||||
|
button.
|
||||||
|
cancel_label (Optional[str]): Label of the cancel button. Is hidden
|
||||||
|
if is set to None. User can still close the window tho.
|
||||||
|
cancel_icon (Optional[dict[str, Any]]): Icon definition of the cancel
|
||||||
|
button.
|
||||||
|
|
||||||
|
"""
|
||||||
|
title: str
|
||||||
|
fields: list[AbstractAttrDef]
|
||||||
|
submit_label: Optional[str] = "Submit"
|
||||||
|
submit_icon: Optional[dict[str, Any]] = None
|
||||||
|
cancel_label: Optional[str] = "Cancel"
|
||||||
|
cancel_icon: Optional[dict[str, Any]] = None
|
||||||
|
|
||||||
|
def to_json_data(self) -> dict[str, Any]:
|
||||||
|
fields = self.fields
|
||||||
|
if fields is not None:
|
||||||
|
fields = serialize_attr_defs(fields)
|
||||||
|
return {
|
||||||
|
"title": self.title,
|
||||||
|
"fields": fields,
|
||||||
|
"submit_label": self.submit_label,
|
||||||
|
"submit_icon": self.submit_icon,
|
||||||
|
"cancel_label": self.cancel_label,
|
||||||
|
"cancel_icon": self.cancel_icon,
|
||||||
|
}
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def from_json_data(cls, data: dict[str, Any]) -> "ActionForm":
|
||||||
|
fields = data["fields"]
|
||||||
|
if fields is not None:
|
||||||
|
data["fields"] = deserialize_attr_defs(fields)
|
||||||
|
return cls(**data)
|
||||||
100
client/ayon_core/pipeline/actions/utils.py
Normal file
100
client/ayon_core/pipeline/actions/utils.py
Normal file
|
|
@ -0,0 +1,100 @@
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import uuid
|
||||||
|
from typing import Any
|
||||||
|
|
||||||
|
from ayon_core.lib.attribute_definitions import (
|
||||||
|
AbstractAttrDef,
|
||||||
|
UILabelDef,
|
||||||
|
BoolDef,
|
||||||
|
TextDef,
|
||||||
|
NumberDef,
|
||||||
|
EnumDef,
|
||||||
|
HiddenDef,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def webaction_fields_to_attribute_defs(
|
||||||
|
fields: list[dict[str, Any]]
|
||||||
|
) -> list[AbstractAttrDef]:
|
||||||
|
"""Helper function to convert fields definition from webactions form.
|
||||||
|
|
||||||
|
Convert form fields to attribute definitions to be able to display them
|
||||||
|
using attribute definitions.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
fields (list[dict[str, Any]]): Fields from webaction form.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
list[AbstractAttrDef]: Converted attribute definitions.
|
||||||
|
|
||||||
|
"""
|
||||||
|
attr_defs = []
|
||||||
|
for field in fields:
|
||||||
|
field_type = field["type"]
|
||||||
|
attr_def = None
|
||||||
|
if field_type == "label":
|
||||||
|
label = field.get("value")
|
||||||
|
if label is None:
|
||||||
|
label = field.get("text")
|
||||||
|
attr_def = UILabelDef(
|
||||||
|
label, key=uuid.uuid4().hex
|
||||||
|
)
|
||||||
|
elif field_type == "boolean":
|
||||||
|
value = field["value"]
|
||||||
|
if isinstance(value, str):
|
||||||
|
value = value.lower() == "true"
|
||||||
|
|
||||||
|
attr_def = BoolDef(
|
||||||
|
field["name"],
|
||||||
|
default=value,
|
||||||
|
label=field.get("label"),
|
||||||
|
)
|
||||||
|
elif field_type == "text":
|
||||||
|
attr_def = TextDef(
|
||||||
|
field["name"],
|
||||||
|
default=field.get("value"),
|
||||||
|
label=field.get("label"),
|
||||||
|
placeholder=field.get("placeholder"),
|
||||||
|
multiline=field.get("multiline", False),
|
||||||
|
regex=field.get("regex"),
|
||||||
|
# syntax=field["syntax"],
|
||||||
|
)
|
||||||
|
elif field_type in ("integer", "float"):
|
||||||
|
value = field.get("value")
|
||||||
|
if isinstance(value, str):
|
||||||
|
if field_type == "integer":
|
||||||
|
value = int(value)
|
||||||
|
else:
|
||||||
|
value = float(value)
|
||||||
|
attr_def = NumberDef(
|
||||||
|
field["name"],
|
||||||
|
default=value,
|
||||||
|
label=field.get("label"),
|
||||||
|
decimals=0 if field_type == "integer" else 5,
|
||||||
|
# placeholder=field.get("placeholder"),
|
||||||
|
minimum=field.get("min"),
|
||||||
|
maximum=field.get("max"),
|
||||||
|
)
|
||||||
|
elif field_type in ("select", "multiselect"):
|
||||||
|
attr_def = EnumDef(
|
||||||
|
field["name"],
|
||||||
|
items=field["options"],
|
||||||
|
default=field.get("value"),
|
||||||
|
label=field.get("label"),
|
||||||
|
multiselection=field_type == "multiselect",
|
||||||
|
)
|
||||||
|
elif field_type == "hidden":
|
||||||
|
attr_def = HiddenDef(
|
||||||
|
field["name"],
|
||||||
|
default=field.get("value"),
|
||||||
|
)
|
||||||
|
|
||||||
|
if attr_def is None:
|
||||||
|
print(f"Unknown config field type: {field_type}")
|
||||||
|
attr_def = UILabelDef(
|
||||||
|
f"Unknown field type '{field_type}",
|
||||||
|
key=uuid.uuid4().hex
|
||||||
|
)
|
||||||
|
attr_defs.append(attr_def)
|
||||||
|
return attr_defs
|
||||||
|
|
@ -7,6 +7,7 @@ import platform
|
||||||
import tempfile
|
import tempfile
|
||||||
import warnings
|
import warnings
|
||||||
from copy import deepcopy
|
from copy import deepcopy
|
||||||
|
from dataclasses import dataclass
|
||||||
|
|
||||||
import ayon_api
|
import ayon_api
|
||||||
|
|
||||||
|
|
@ -26,6 +27,18 @@ from ayon_core.pipeline.load import get_representation_path_with_anatomy
|
||||||
log = Logger.get_logger(__name__)
|
log = Logger.get_logger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class ConfigData:
|
||||||
|
"""OCIO Config to use in a certain context.
|
||||||
|
|
||||||
|
When enabled and no path/template are set, it will be considered invalid
|
||||||
|
and will error on OCIO path not found. Enabled must be False to explicitly
|
||||||
|
allow OCIO to be disabled."""
|
||||||
|
path: str = ""
|
||||||
|
template: str = ""
|
||||||
|
enabled: bool = True
|
||||||
|
|
||||||
|
|
||||||
class CachedData:
|
class CachedData:
|
||||||
remapping = {}
|
remapping = {}
|
||||||
has_compatible_ocio_package = None
|
has_compatible_ocio_package = None
|
||||||
|
|
@ -710,7 +723,7 @@ def _get_config_path_from_profile_data(
|
||||||
template_data (dict[str, Any]): Template data.
|
template_data (dict[str, Any]): Template data.
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
dict[str, str]: Config data with path and template.
|
ConfigData: Config data with path and template.
|
||||||
"""
|
"""
|
||||||
template = profile[profile_type]
|
template = profile[profile_type]
|
||||||
result = StringTemplate.format_strict_template(
|
result = StringTemplate.format_strict_template(
|
||||||
|
|
@ -719,12 +732,12 @@ def _get_config_path_from_profile_data(
|
||||||
normalized_path = str(result.normalized())
|
normalized_path = str(result.normalized())
|
||||||
if not os.path.exists(normalized_path):
|
if not os.path.exists(normalized_path):
|
||||||
log.warning(f"Path was not found '{normalized_path}'.")
|
log.warning(f"Path was not found '{normalized_path}'.")
|
||||||
return None
|
return ConfigData() # Return invalid config data
|
||||||
|
|
||||||
return {
|
return ConfigData(
|
||||||
"path": normalized_path,
|
path=normalized_path,
|
||||||
"template": template
|
template=template
|
||||||
}
|
)
|
||||||
|
|
||||||
|
|
||||||
def _get_global_config_data(
|
def _get_global_config_data(
|
||||||
|
|
@ -735,7 +748,7 @@ def _get_global_config_data(
|
||||||
imageio_global,
|
imageio_global,
|
||||||
folder_id,
|
folder_id,
|
||||||
log,
|
log,
|
||||||
):
|
) -> ConfigData:
|
||||||
"""Get global config data.
|
"""Get global config data.
|
||||||
|
|
||||||
Global config from core settings is using profiles that are based on
|
Global config from core settings is using profiles that are based on
|
||||||
|
|
@ -759,8 +772,7 @@ def _get_global_config_data(
|
||||||
log (logging.Logger): Logger object.
|
log (logging.Logger): Logger object.
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
Union[dict[str, str], None]: Config data with path and template
|
ConfigData: Config data with path and template.
|
||||||
or None.
|
|
||||||
|
|
||||||
"""
|
"""
|
||||||
task_name = task_type = None
|
task_name = task_type = None
|
||||||
|
|
@ -779,12 +791,14 @@ def _get_global_config_data(
|
||||||
)
|
)
|
||||||
if profile is None:
|
if profile is None:
|
||||||
log.info(f"No config profile matched filters {str(filter_values)}")
|
log.info(f"No config profile matched filters {str(filter_values)}")
|
||||||
return None
|
return ConfigData(enabled=False)
|
||||||
|
|
||||||
profile_type = profile["type"]
|
profile_type = profile["type"]
|
||||||
if profile_type in ("builtin_path", "custom_path"):
|
if profile_type in {"builtin_path", "custom_path"}:
|
||||||
return _get_config_path_from_profile_data(
|
return _get_config_path_from_profile_data(
|
||||||
profile, profile_type, template_data)
|
profile, profile_type, template_data)
|
||||||
|
elif profile_type == "disabled":
|
||||||
|
return ConfigData(enabled=False)
|
||||||
|
|
||||||
# TODO decide if this is the right name for representation
|
# TODO decide if this is the right name for representation
|
||||||
repre_name = "ocioconfig"
|
repre_name = "ocioconfig"
|
||||||
|
|
@ -798,7 +812,7 @@ def _get_global_config_data(
|
||||||
"Colorspace OCIO config path cannot be set. "
|
"Colorspace OCIO config path cannot be set. "
|
||||||
"Profile is set to published product but `Product name` is empty."
|
"Profile is set to published product but `Product name` is empty."
|
||||||
)
|
)
|
||||||
return None
|
return ConfigData()
|
||||||
|
|
||||||
folder_info = template_data.get("folder")
|
folder_info = template_data.get("folder")
|
||||||
if not folder_info:
|
if not folder_info:
|
||||||
|
|
@ -819,7 +833,7 @@ def _get_global_config_data(
|
||||||
)
|
)
|
||||||
if not folder_entity:
|
if not folder_entity:
|
||||||
log.warning(f"Folder entity '{folder_path}' was not found..")
|
log.warning(f"Folder entity '{folder_path}' was not found..")
|
||||||
return None
|
return ConfigData()
|
||||||
folder_id = folder_entity["id"]
|
folder_id = folder_entity["id"]
|
||||||
|
|
||||||
product_entities_by_name = {
|
product_entities_by_name = {
|
||||||
|
|
@ -855,7 +869,7 @@ def _get_global_config_data(
|
||||||
log.info(
|
log.info(
|
||||||
f"Product '{product_name}' does not have available any versions."
|
f"Product '{product_name}' does not have available any versions."
|
||||||
)
|
)
|
||||||
return None
|
return ConfigData()
|
||||||
|
|
||||||
# Find 'ocioconfig' representation entity
|
# Find 'ocioconfig' representation entity
|
||||||
repre_entity = ayon_api.get_representation_by_name(
|
repre_entity = ayon_api.get_representation_by_name(
|
||||||
|
|
@ -868,15 +882,15 @@ def _get_global_config_data(
|
||||||
f"Representation '{repre_name}'"
|
f"Representation '{repre_name}'"
|
||||||
f" not found on product '{product_name}'."
|
f" not found on product '{product_name}'."
|
||||||
)
|
)
|
||||||
return None
|
return ConfigData()
|
||||||
|
|
||||||
path = get_representation_path_with_anatomy(repre_entity, anatomy)
|
path = get_representation_path_with_anatomy(repre_entity, anatomy)
|
||||||
template = repre_entity["attrib"]["template"]
|
template = repre_entity["attrib"]["template"]
|
||||||
|
|
||||||
return {
|
return ConfigData(
|
||||||
"path": path,
|
path=path,
|
||||||
"template": template,
|
template=template
|
||||||
}
|
)
|
||||||
|
|
||||||
|
|
||||||
def get_imageio_config_preset(
|
def get_imageio_config_preset(
|
||||||
|
|
@ -1015,13 +1029,19 @@ def get_imageio_config_preset(
|
||||||
host_ocio_config["filepath"], template_data
|
host_ocio_config["filepath"], template_data
|
||||||
)
|
)
|
||||||
|
|
||||||
if not config_data:
|
if not config_data.enabled:
|
||||||
|
return {} # OCIO management disabled
|
||||||
|
|
||||||
|
if not config_data.path:
|
||||||
raise FileExistsError(
|
raise FileExistsError(
|
||||||
"No OCIO config found in settings. It is"
|
"No OCIO config found in settings. It is"
|
||||||
" either missing or there is typo in path inputs"
|
" either missing or there is typo in path inputs"
|
||||||
)
|
)
|
||||||
|
|
||||||
return config_data
|
return {
|
||||||
|
"path": config_data.path,
|
||||||
|
"template": config_data.template,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
def _get_host_config_data(templates, template_data):
|
def _get_host_config_data(templates, template_data):
|
||||||
|
|
|
||||||
|
|
@ -1,4 +1,5 @@
|
||||||
"""Package to handle compatibility checks for pipeline components."""
|
"""Package to handle compatibility checks for pipeline components."""
|
||||||
|
import ayon_api
|
||||||
|
|
||||||
|
|
||||||
def is_product_base_type_supported() -> bool:
|
def is_product_base_type_supported() -> bool:
|
||||||
|
|
@ -13,4 +14,7 @@ def is_product_base_type_supported() -> bool:
|
||||||
bool: True if product base types are supported, False otherwise.
|
bool: True if product base types are supported, False otherwise.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
if not hasattr(ayon_api, "is_product_base_type_supported"):
|
||||||
return False
|
return False
|
||||||
|
return ayon_api.is_product_base_type_supported()
|
||||||
|
|
|
||||||
|
|
@ -15,6 +15,7 @@ from typing import (
|
||||||
Any,
|
Any,
|
||||||
Callable,
|
Callable,
|
||||||
)
|
)
|
||||||
|
from warnings import warn
|
||||||
|
|
||||||
import pyblish.logic
|
import pyblish.logic
|
||||||
import pyblish.api
|
import pyblish.api
|
||||||
|
|
@ -752,13 +753,13 @@ class CreateContext:
|
||||||
manual_creators = {}
|
manual_creators = {}
|
||||||
report = discover_creator_plugins(return_report=True)
|
report = discover_creator_plugins(return_report=True)
|
||||||
self.creator_discover_result = report
|
self.creator_discover_result = report
|
||||||
for creator_class in report.plugins:
|
for creator_class in report.abstract_plugins:
|
||||||
if inspect.isabstract(creator_class):
|
|
||||||
self.log.debug(
|
self.log.debug(
|
||||||
"Skipping abstract Creator {}".format(str(creator_class))
|
"Skipping abstract Creator '%s'",
|
||||||
|
str(creator_class)
|
||||||
)
|
)
|
||||||
continue
|
|
||||||
|
|
||||||
|
for creator_class in report.plugins:
|
||||||
creator_identifier = creator_class.identifier
|
creator_identifier = creator_class.identifier
|
||||||
if creator_identifier in creators:
|
if creator_identifier in creators:
|
||||||
self.log.warning(
|
self.log.warning(
|
||||||
|
|
@ -772,19 +773,17 @@ class CreateContext:
|
||||||
creator_class.host_name
|
creator_class.host_name
|
||||||
and creator_class.host_name != self.host_name
|
and creator_class.host_name != self.host_name
|
||||||
):
|
):
|
||||||
self.log.info((
|
self.log.info(
|
||||||
"Creator's host name \"{}\""
|
(
|
||||||
" is not supported for current host \"{}\""
|
'Creator\'s host name "{}"'
|
||||||
).format(creator_class.host_name, self.host_name))
|
' is not supported for current host "{}"'
|
||||||
|
).format(creator_class.host_name, self.host_name)
|
||||||
|
)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
# TODO report initialization error
|
# TODO report initialization error
|
||||||
try:
|
try:
|
||||||
creator = creator_class(
|
creator = creator_class(project_settings, self, self.headless)
|
||||||
project_settings,
|
|
||||||
self,
|
|
||||||
self.headless
|
|
||||||
)
|
|
||||||
except Exception:
|
except Exception:
|
||||||
self.log.error(
|
self.log.error(
|
||||||
f"Failed to initialize plugin: {creator_class}",
|
f"Failed to initialize plugin: {creator_class}",
|
||||||
|
|
@ -792,6 +791,19 @@ class CreateContext:
|
||||||
)
|
)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
|
if not creator.product_base_type:
|
||||||
|
message = (
|
||||||
|
f"Provided creator {creator!r} doesn't have "
|
||||||
|
"product base type attribute defined. This will be "
|
||||||
|
"required in future."
|
||||||
|
)
|
||||||
|
warn(
|
||||||
|
message,
|
||||||
|
DeprecationWarning,
|
||||||
|
stacklevel=2
|
||||||
|
)
|
||||||
|
self.log.warning(message)
|
||||||
|
|
||||||
if not creator.enabled:
|
if not creator.enabled:
|
||||||
disabled_creators[creator_identifier] = creator
|
disabled_creators[creator_identifier] = creator
|
||||||
continue
|
continue
|
||||||
|
|
@ -1289,8 +1301,12 @@ class CreateContext:
|
||||||
"folderPath": folder_entity["path"],
|
"folderPath": folder_entity["path"],
|
||||||
"task": task_entity["name"] if task_entity else None,
|
"task": task_entity["name"] if task_entity else None,
|
||||||
"productType": creator.product_type,
|
"productType": creator.product_type,
|
||||||
|
# Add product base type if supported. Fallback to product type
|
||||||
|
"productBaseType": (
|
||||||
|
creator.product_base_type or creator.product_type),
|
||||||
"variant": variant
|
"variant": variant
|
||||||
}
|
}
|
||||||
|
|
||||||
if active is not None:
|
if active is not None:
|
||||||
if not isinstance(active, bool):
|
if not isinstance(active, bool):
|
||||||
self.log.warning(
|
self.log.warning(
|
||||||
|
|
|
||||||
|
|
@ -1,20 +1,21 @@
|
||||||
# -*- coding: utf-8 -*-
|
"""Creator plugins for the create process."""
|
||||||
import os
|
from __future__ import annotations
|
||||||
import copy
|
|
||||||
import collections
|
|
||||||
from typing import TYPE_CHECKING, Optional, Dict, Any
|
|
||||||
|
|
||||||
|
import collections
|
||||||
|
import copy
|
||||||
|
import os
|
||||||
from abc import ABC, abstractmethod
|
from abc import ABC, abstractmethod
|
||||||
|
from typing import TYPE_CHECKING, Any, Dict, Optional
|
||||||
|
|
||||||
from ayon_core.lib import Logger, get_version_from_path
|
from ayon_core.lib import Logger, get_version_from_path
|
||||||
from ayon_core.pipeline.plugin_discover import (
|
from ayon_core.pipeline.plugin_discover import (
|
||||||
|
deregister_plugin,
|
||||||
|
deregister_plugin_path,
|
||||||
discover,
|
discover,
|
||||||
register_plugin,
|
register_plugin,
|
||||||
register_plugin_path,
|
register_plugin_path,
|
||||||
deregister_plugin,
|
|
||||||
deregister_plugin_path
|
|
||||||
)
|
)
|
||||||
from ayon_core.pipeline.staging_dir import get_staging_dir_info, StagingDir
|
from ayon_core.pipeline.staging_dir import StagingDir, get_staging_dir_info
|
||||||
|
|
||||||
from .constants import DEFAULT_VARIANT_VALUE
|
from .constants import DEFAULT_VARIANT_VALUE
|
||||||
from .product_name import get_product_name
|
from .product_name import get_product_name
|
||||||
|
|
@ -23,6 +24,7 @@ from .structures import CreatedInstance
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
from ayon_core.lib import AbstractAttrDef
|
from ayon_core.lib import AbstractAttrDef
|
||||||
|
|
||||||
# Avoid cyclic imports
|
# Avoid cyclic imports
|
||||||
from .context import CreateContext, UpdateData # noqa: F401
|
from .context import CreateContext, UpdateData # noqa: F401
|
||||||
|
|
||||||
|
|
@ -66,7 +68,6 @@ class ProductConvertorPlugin(ABC):
|
||||||
Returns:
|
Returns:
|
||||||
logging.Logger: Logger with name of the plugin.
|
logging.Logger: Logger with name of the plugin.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
if self._log is None:
|
if self._log is None:
|
||||||
self._log = Logger.get_logger(self.__class__.__name__)
|
self._log = Logger.get_logger(self.__class__.__name__)
|
||||||
return self._log
|
return self._log
|
||||||
|
|
@ -82,9 +83,8 @@ class ProductConvertorPlugin(ABC):
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
str: Converted identifier unique for all converters in host.
|
str: Converted identifier unique for all converters in host.
|
||||||
"""
|
|
||||||
|
|
||||||
pass
|
"""
|
||||||
|
|
||||||
@abstractmethod
|
@abstractmethod
|
||||||
def find_instances(self):
|
def find_instances(self):
|
||||||
|
|
@ -94,14 +94,10 @@ class ProductConvertorPlugin(ABC):
|
||||||
convert.
|
convert.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
pass
|
|
||||||
|
|
||||||
@abstractmethod
|
@abstractmethod
|
||||||
def convert(self):
|
def convert(self):
|
||||||
"""Conversion code."""
|
"""Conversion code."""
|
||||||
|
|
||||||
pass
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def create_context(self):
|
def create_context(self):
|
||||||
"""Quick access to create context.
|
"""Quick access to create context.
|
||||||
|
|
@ -109,7 +105,6 @@ class ProductConvertorPlugin(ABC):
|
||||||
Returns:
|
Returns:
|
||||||
CreateContext: Context which initialized the plugin.
|
CreateContext: Context which initialized the plugin.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
return self._create_context
|
return self._create_context
|
||||||
|
|
||||||
@property
|
@property
|
||||||
|
|
@ -122,7 +117,6 @@ class ProductConvertorPlugin(ABC):
|
||||||
Raises:
|
Raises:
|
||||||
UnavailableSharedData: When called out of collection phase.
|
UnavailableSharedData: When called out of collection phase.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
return self._create_context.collection_shared_data
|
return self._create_context.collection_shared_data
|
||||||
|
|
||||||
def add_convertor_item(self, label):
|
def add_convertor_item(self, label):
|
||||||
|
|
@ -131,12 +125,10 @@ class ProductConvertorPlugin(ABC):
|
||||||
Args:
|
Args:
|
||||||
label (str): Label of item which will show in UI.
|
label (str): Label of item which will show in UI.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
self._create_context.add_convertor_item(self.identifier, label)
|
self._create_context.add_convertor_item(self.identifier, label)
|
||||||
|
|
||||||
def remove_convertor_item(self):
|
def remove_convertor_item(self):
|
||||||
"""Remove legacy item from create context when conversion finished."""
|
"""Remove legacy item from create context when conversion finished."""
|
||||||
|
|
||||||
self._create_context.remove_convertor_item(self.identifier)
|
self._create_context.remove_convertor_item(self.identifier)
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -154,7 +146,14 @@ class BaseCreator(ABC):
|
||||||
project_settings (dict[str, Any]): Project settings.
|
project_settings (dict[str, Any]): Project settings.
|
||||||
create_context (CreateContext): Context which initialized creator.
|
create_context (CreateContext): Context which initialized creator.
|
||||||
headless (bool): Running in headless mode.
|
headless (bool): Running in headless mode.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
# Attribute 'skip_discovery' is used during discovery phase to skip
|
||||||
|
# plugins, which can be used to mark base plugins that should not be
|
||||||
|
# considered as plugins "to use". The discovery logic does NOT use
|
||||||
|
# the attribute value from parent classes. Each base class has to define
|
||||||
|
# the attribute again.
|
||||||
|
skip_discovery = True
|
||||||
|
|
||||||
# Label shown in UI
|
# Label shown in UI
|
||||||
label = None
|
label = None
|
||||||
|
|
@ -219,7 +218,6 @@ class BaseCreator(ABC):
|
||||||
Returns:
|
Returns:
|
||||||
Optional[dict[str, Any]]: Settings values or None.
|
Optional[dict[str, Any]]: Settings values or None.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
settings = project_settings.get(category_name)
|
settings = project_settings.get(category_name)
|
||||||
if not settings:
|
if not settings:
|
||||||
return None
|
return None
|
||||||
|
|
@ -265,7 +263,6 @@ class BaseCreator(ABC):
|
||||||
Args:
|
Args:
|
||||||
project_settings (dict[str, Any]): Project settings.
|
project_settings (dict[str, Any]): Project settings.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
settings_category = self.settings_category
|
settings_category = self.settings_category
|
||||||
if not settings_category:
|
if not settings_category:
|
||||||
return
|
return
|
||||||
|
|
@ -277,18 +274,17 @@ class BaseCreator(ABC):
|
||||||
project_settings, settings_category, settings_name
|
project_settings, settings_category, settings_name
|
||||||
)
|
)
|
||||||
if settings is None:
|
if settings is None:
|
||||||
self.log.debug("No settings found for {}".format(cls_name))
|
self.log.debug(f"No settings found for {cls_name}")
|
||||||
return
|
return
|
||||||
|
|
||||||
for key, value in settings.items():
|
for key, value in settings.items():
|
||||||
# Log out attributes that are not defined on plugin object
|
# Log out attributes that are not defined on plugin object
|
||||||
# - those may be potential dangerous typos in settings
|
# - those may be potential dangerous typos in settings
|
||||||
if not hasattr(self, key):
|
if not hasattr(self, key):
|
||||||
self.log.debug((
|
self.log.debug(
|
||||||
"Applying settings to unknown attribute '{}' on '{}'."
|
"Applying settings to unknown attribute '%s' on '%s'.",
|
||||||
).format(
|
|
||||||
key, cls_name
|
key, cls_name
|
||||||
))
|
)
|
||||||
setattr(self, key, value)
|
setattr(self, key, value)
|
||||||
|
|
||||||
def register_callbacks(self):
|
def register_callbacks(self):
|
||||||
|
|
@ -297,23 +293,39 @@ class BaseCreator(ABC):
|
||||||
Default implementation does nothing. It can be overridden to register
|
Default implementation does nothing. It can be overridden to register
|
||||||
callbacks for creator.
|
callbacks for creator.
|
||||||
"""
|
"""
|
||||||
pass
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def identifier(self):
|
def identifier(self):
|
||||||
"""Identifier of creator (must be unique).
|
"""Identifier of creator (must be unique).
|
||||||
|
|
||||||
Default implementation returns plugin's product type.
|
Default implementation returns plugin's product base type,
|
||||||
"""
|
or falls back to product type if product base type is not set.
|
||||||
|
|
||||||
return self.product_type
|
"""
|
||||||
|
identifier = self.product_base_type
|
||||||
|
if not identifier:
|
||||||
|
identifier = self.product_type
|
||||||
|
return identifier
|
||||||
|
|
||||||
@property
|
@property
|
||||||
@abstractmethod
|
@abstractmethod
|
||||||
def product_type(self):
|
def product_type(self):
|
||||||
"""Family that plugin represents."""
|
"""Family that plugin represents."""
|
||||||
|
|
||||||
pass
|
@property
|
||||||
|
def product_base_type(self) -> Optional[str]:
|
||||||
|
"""Base product type that plugin represents.
|
||||||
|
|
||||||
|
Todo (antirotor): This should be required in future - it
|
||||||
|
should be made abstract then.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Optional[str]: Base product type that plugin represents.
|
||||||
|
If not set, it is assumed that the creator plugin is obsolete
|
||||||
|
and does not support product base type.
|
||||||
|
|
||||||
|
"""
|
||||||
|
return None
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def project_name(self):
|
def project_name(self):
|
||||||
|
|
@ -322,7 +334,6 @@ class BaseCreator(ABC):
|
||||||
Returns:
|
Returns:
|
||||||
str: Name of a project.
|
str: Name of a project.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
return self.create_context.project_name
|
return self.create_context.project_name
|
||||||
|
|
||||||
@property
|
@property
|
||||||
|
|
@ -332,7 +343,6 @@ class BaseCreator(ABC):
|
||||||
Returns:
|
Returns:
|
||||||
Anatomy: Project anatomy object.
|
Anatomy: Project anatomy object.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
return self.create_context.project_anatomy
|
return self.create_context.project_anatomy
|
||||||
|
|
||||||
@property
|
@property
|
||||||
|
|
@ -344,13 +354,14 @@ class BaseCreator(ABC):
|
||||||
|
|
||||||
Default implementation use attributes in this order:
|
Default implementation use attributes in this order:
|
||||||
- 'group_label' -> 'label' -> 'identifier'
|
- 'group_label' -> 'label' -> 'identifier'
|
||||||
Keep in mind that 'identifier' use 'product_type' by default.
|
|
||||||
|
Keep in mind that 'identifier' uses 'product_base_type' by default.
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
str: Group label that can be used for grouping of instances in UI.
|
str: Group label that can be used for grouping of instances in UI.
|
||||||
Group label can be overridden by instance itself.
|
Group label can be overridden by the instance itself.
|
||||||
"""
|
|
||||||
|
|
||||||
|
"""
|
||||||
if self._cached_group_label is None:
|
if self._cached_group_label is None:
|
||||||
label = self.identifier
|
label = self.identifier
|
||||||
if self.group_label:
|
if self.group_label:
|
||||||
|
|
@ -367,7 +378,6 @@ class BaseCreator(ABC):
|
||||||
Returns:
|
Returns:
|
||||||
logging.Logger: Logger with name of the plugin.
|
logging.Logger: Logger with name of the plugin.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
if self._log is None:
|
if self._log is None:
|
||||||
self._log = Logger.get_logger(self.__class__.__name__)
|
self._log = Logger.get_logger(self.__class__.__name__)
|
||||||
return self._log
|
return self._log
|
||||||
|
|
@ -376,7 +386,8 @@ class BaseCreator(ABC):
|
||||||
self,
|
self,
|
||||||
product_name: str,
|
product_name: str,
|
||||||
data: Dict[str, Any],
|
data: Dict[str, Any],
|
||||||
product_type: Optional[str] = None
|
product_type: Optional[str] = None,
|
||||||
|
product_base_type: Optional[str] = None
|
||||||
) -> CreatedInstance:
|
) -> CreatedInstance:
|
||||||
"""Create instance and add instance to context.
|
"""Create instance and add instance to context.
|
||||||
|
|
||||||
|
|
@ -385,6 +396,8 @@ class BaseCreator(ABC):
|
||||||
data (Dict[str, Any]): Instance data.
|
data (Dict[str, Any]): Instance data.
|
||||||
product_type (Optional[str]): Product type, object attribute
|
product_type (Optional[str]): Product type, object attribute
|
||||||
'product_type' is used if not passed.
|
'product_type' is used if not passed.
|
||||||
|
product_base_type (Optional[str]): Product base type, object
|
||||||
|
attribute 'product_base_type' is used if not passed.
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
CreatedInstance: Created instance.
|
CreatedInstance: Created instance.
|
||||||
|
|
@ -392,11 +405,16 @@ class BaseCreator(ABC):
|
||||||
"""
|
"""
|
||||||
if product_type is None:
|
if product_type is None:
|
||||||
product_type = self.product_type
|
product_type = self.product_type
|
||||||
|
|
||||||
|
if not product_base_type and not self.product_base_type:
|
||||||
|
product_base_type = product_type
|
||||||
|
|
||||||
instance = CreatedInstance(
|
instance = CreatedInstance(
|
||||||
product_type,
|
product_type=product_type,
|
||||||
product_name,
|
product_name=product_name,
|
||||||
data,
|
data=data,
|
||||||
creator=self,
|
creator=self,
|
||||||
|
product_base_type=product_base_type,
|
||||||
)
|
)
|
||||||
self._add_instance_to_context(instance)
|
self._add_instance_to_context(instance)
|
||||||
return instance
|
return instance
|
||||||
|
|
@ -412,7 +430,6 @@ class BaseCreator(ABC):
|
||||||
Args:
|
Args:
|
||||||
instance (CreatedInstance): New created instance.
|
instance (CreatedInstance): New created instance.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
self.create_context.creator_adds_instance(instance)
|
self.create_context.creator_adds_instance(instance)
|
||||||
|
|
||||||
def _remove_instance_from_context(self, instance):
|
def _remove_instance_from_context(self, instance):
|
||||||
|
|
@ -425,7 +442,6 @@ class BaseCreator(ABC):
|
||||||
Args:
|
Args:
|
||||||
instance (CreatedInstance): Instance which should be removed.
|
instance (CreatedInstance): Instance which should be removed.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
self.create_context.creator_removed_instance(instance)
|
self.create_context.creator_removed_instance(instance)
|
||||||
|
|
||||||
@abstractmethod
|
@abstractmethod
|
||||||
|
|
@ -437,8 +453,6 @@ class BaseCreator(ABC):
|
||||||
implementation
|
implementation
|
||||||
"""
|
"""
|
||||||
|
|
||||||
pass
|
|
||||||
|
|
||||||
@abstractmethod
|
@abstractmethod
|
||||||
def collect_instances(self):
|
def collect_instances(self):
|
||||||
"""Collect existing instances related to this creator plugin.
|
"""Collect existing instances related to this creator plugin.
|
||||||
|
|
@ -464,8 +478,6 @@ class BaseCreator(ABC):
|
||||||
```
|
```
|
||||||
"""
|
"""
|
||||||
|
|
||||||
pass
|
|
||||||
|
|
||||||
@abstractmethod
|
@abstractmethod
|
||||||
def update_instances(self, update_list):
|
def update_instances(self, update_list):
|
||||||
"""Store changes of existing instances so they can be recollected.
|
"""Store changes of existing instances so they can be recollected.
|
||||||
|
|
@ -475,8 +487,6 @@ class BaseCreator(ABC):
|
||||||
contain changed instance and it's changes.
|
contain changed instance and it's changes.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
pass
|
|
||||||
|
|
||||||
@abstractmethod
|
@abstractmethod
|
||||||
def remove_instances(self, instances):
|
def remove_instances(self, instances):
|
||||||
"""Method called on instance removal.
|
"""Method called on instance removal.
|
||||||
|
|
@ -489,14 +499,11 @@ class BaseCreator(ABC):
|
||||||
removed.
|
removed.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
pass
|
|
||||||
|
|
||||||
def get_icon(self):
|
def get_icon(self):
|
||||||
"""Icon of creator (product type).
|
"""Icon of creator (product type).
|
||||||
|
|
||||||
Can return path to image file or awesome icon name.
|
Can return path to image file or awesome icon name.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
return self.icon
|
return self.icon
|
||||||
|
|
||||||
def get_dynamic_data(
|
def get_dynamic_data(
|
||||||
|
|
@ -512,19 +519,18 @@ class BaseCreator(ABC):
|
||||||
|
|
||||||
These may be dynamically created based on current context of workfile.
|
These may be dynamically created based on current context of workfile.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
return {}
|
return {}
|
||||||
|
|
||||||
def get_product_name(
|
def get_product_name(
|
||||||
self,
|
self,
|
||||||
project_name,
|
project_name: str,
|
||||||
folder_entity,
|
folder_entity: dict[str, Any],
|
||||||
task_entity,
|
task_entity: Optional[dict[str, Any]],
|
||||||
variant,
|
variant: str,
|
||||||
host_name=None,
|
host_name: Optional[str] = None,
|
||||||
instance=None,
|
instance: Optional[CreatedInstance] = None,
|
||||||
project_entity=None,
|
project_entity: Optional[dict[str, Any]] = None,
|
||||||
):
|
) -> str:
|
||||||
"""Return product name for passed context.
|
"""Return product name for passed context.
|
||||||
|
|
||||||
Method is also called on product name update. In that case origin
|
Method is also called on product name update. In that case origin
|
||||||
|
|
@ -546,11 +552,6 @@ class BaseCreator(ABC):
|
||||||
if host_name is None:
|
if host_name is None:
|
||||||
host_name = self.create_context.host_name
|
host_name = self.create_context.host_name
|
||||||
|
|
||||||
task_name = task_type = None
|
|
||||||
if task_entity:
|
|
||||||
task_name = task_entity["name"]
|
|
||||||
task_type = task_entity["taskType"]
|
|
||||||
|
|
||||||
dynamic_data = self.get_dynamic_data(
|
dynamic_data = self.get_dynamic_data(
|
||||||
project_name,
|
project_name,
|
||||||
folder_entity,
|
folder_entity,
|
||||||
|
|
@ -566,11 +567,12 @@ class BaseCreator(ABC):
|
||||||
|
|
||||||
return get_product_name(
|
return get_product_name(
|
||||||
project_name,
|
project_name,
|
||||||
task_name,
|
folder_entity=folder_entity,
|
||||||
task_type,
|
task_entity=task_entity,
|
||||||
host_name,
|
product_base_type=self.product_base_type,
|
||||||
self.product_type,
|
product_type=self.product_type,
|
||||||
variant,
|
host_name=host_name,
|
||||||
|
variant=variant,
|
||||||
dynamic_data=dynamic_data,
|
dynamic_data=dynamic_data,
|
||||||
project_settings=self.project_settings,
|
project_settings=self.project_settings,
|
||||||
project_entity=project_entity,
|
project_entity=project_entity,
|
||||||
|
|
@ -583,15 +585,15 @@ class BaseCreator(ABC):
|
||||||
and values are stored to metadata for future usage and for publishing
|
and values are stored to metadata for future usage and for publishing
|
||||||
purposes.
|
purposes.
|
||||||
|
|
||||||
NOTE:
|
Note:
|
||||||
Convert method should be implemented which should care about updating
|
Convert method should be implemented which should care about
|
||||||
keys/values when plugin attributes change.
|
updating keys/values when plugin attributes change.
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
list[AbstractAttrDef]: Attribute definitions that can be tweaked
|
list[AbstractAttrDef]: Attribute definitions that can be tweaked
|
||||||
for created instance.
|
for created instance.
|
||||||
"""
|
|
||||||
|
|
||||||
|
"""
|
||||||
return self.instance_attr_defs
|
return self.instance_attr_defs
|
||||||
|
|
||||||
def get_attr_defs_for_instance(self, instance):
|
def get_attr_defs_for_instance(self, instance):
|
||||||
|
|
@ -614,12 +616,10 @@ class BaseCreator(ABC):
|
||||||
Raises:
|
Raises:
|
||||||
UnavailableSharedData: When called out of collection phase.
|
UnavailableSharedData: When called out of collection phase.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
return self.create_context.collection_shared_data
|
return self.create_context.collection_shared_data
|
||||||
|
|
||||||
def set_instance_thumbnail_path(self, instance_id, thumbnail_path=None):
|
def set_instance_thumbnail_path(self, instance_id, thumbnail_path=None):
|
||||||
"""Set path to thumbnail for instance."""
|
"""Set path to thumbnail for instance."""
|
||||||
|
|
||||||
self.create_context.thumbnail_paths_by_instance_id[instance_id] = (
|
self.create_context.thumbnail_paths_by_instance_id[instance_id] = (
|
||||||
thumbnail_path
|
thumbnail_path
|
||||||
)
|
)
|
||||||
|
|
@ -640,7 +640,6 @@ class BaseCreator(ABC):
|
||||||
Returns:
|
Returns:
|
||||||
dict[str, int]: Next versions by instance id.
|
dict[str, int]: Next versions by instance id.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
return get_next_versions_for_instances(
|
return get_next_versions_for_instances(
|
||||||
self.create_context.project_name, instances
|
self.create_context.project_name, instances
|
||||||
)
|
)
|
||||||
|
|
@ -651,7 +650,7 @@ class Creator(BaseCreator):
|
||||||
|
|
||||||
Creation requires prepared product name and instance data.
|
Creation requires prepared product name and instance data.
|
||||||
"""
|
"""
|
||||||
|
skip_discovery = True
|
||||||
# GUI Purposes
|
# GUI Purposes
|
||||||
# - default_variants may not be used if `get_default_variants`
|
# - default_variants may not be used if `get_default_variants`
|
||||||
# is overridden
|
# is overridden
|
||||||
|
|
@ -707,7 +706,6 @@ class Creator(BaseCreator):
|
||||||
int: Order in which is creator shown (less == earlier). By default
|
int: Order in which is creator shown (less == earlier). By default
|
||||||
is using Creator's 'order' or processing.
|
is using Creator's 'order' or processing.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
return self.order
|
return self.order
|
||||||
|
|
||||||
@abstractmethod
|
@abstractmethod
|
||||||
|
|
@ -722,11 +720,9 @@ class Creator(BaseCreator):
|
||||||
pre_create_data(dict): Data based on pre creation attributes.
|
pre_create_data(dict): Data based on pre creation attributes.
|
||||||
Those may affect how creator works.
|
Those may affect how creator works.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
# instance = CreatedInstance(
|
# instance = CreatedInstance(
|
||||||
# self.product_type, product_name, instance_data
|
# self.product_type, product_name, instance_data
|
||||||
# )
|
# )
|
||||||
pass
|
|
||||||
|
|
||||||
def get_description(self):
|
def get_description(self):
|
||||||
"""Short description of product type and plugin.
|
"""Short description of product type and plugin.
|
||||||
|
|
@ -734,7 +730,6 @@ class Creator(BaseCreator):
|
||||||
Returns:
|
Returns:
|
||||||
str: Short description of product type.
|
str: Short description of product type.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
return self.description
|
return self.description
|
||||||
|
|
||||||
def get_detail_description(self):
|
def get_detail_description(self):
|
||||||
|
|
@ -745,7 +740,6 @@ class Creator(BaseCreator):
|
||||||
Returns:
|
Returns:
|
||||||
str: Detailed description of product type for artist.
|
str: Detailed description of product type for artist.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
return self.detailed_description
|
return self.detailed_description
|
||||||
|
|
||||||
def get_default_variants(self):
|
def get_default_variants(self):
|
||||||
|
|
@ -759,7 +753,6 @@ class Creator(BaseCreator):
|
||||||
Returns:
|
Returns:
|
||||||
list[str]: Whisper variants for user input.
|
list[str]: Whisper variants for user input.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
return copy.deepcopy(self.default_variants)
|
return copy.deepcopy(self.default_variants)
|
||||||
|
|
||||||
def get_default_variant(self, only_explicit=False):
|
def get_default_variant(self, only_explicit=False):
|
||||||
|
|
@ -779,7 +772,6 @@ class Creator(BaseCreator):
|
||||||
Returns:
|
Returns:
|
||||||
str: Variant value.
|
str: Variant value.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
if only_explicit or self._default_variant:
|
if only_explicit or self._default_variant:
|
||||||
return self._default_variant
|
return self._default_variant
|
||||||
|
|
||||||
|
|
@ -800,7 +792,6 @@ class Creator(BaseCreator):
|
||||||
Returns:
|
Returns:
|
||||||
str: Variant value.
|
str: Variant value.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
return self.get_default_variant()
|
return self.get_default_variant()
|
||||||
|
|
||||||
def _set_default_variant_wrap(self, variant):
|
def _set_default_variant_wrap(self, variant):
|
||||||
|
|
@ -812,7 +803,6 @@ class Creator(BaseCreator):
|
||||||
Args:
|
Args:
|
||||||
variant (str): New default variant value.
|
variant (str): New default variant value.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
self._default_variant = variant
|
self._default_variant = variant
|
||||||
|
|
||||||
default_variant = property(
|
default_variant = property(
|
||||||
|
|
@ -949,6 +939,8 @@ class Creator(BaseCreator):
|
||||||
|
|
||||||
|
|
||||||
class HiddenCreator(BaseCreator):
|
class HiddenCreator(BaseCreator):
|
||||||
|
skip_discovery = True
|
||||||
|
|
||||||
@abstractmethod
|
@abstractmethod
|
||||||
def create(self, instance_data, source_data):
|
def create(self, instance_data, source_data):
|
||||||
pass
|
pass
|
||||||
|
|
@ -959,10 +951,10 @@ class AutoCreator(BaseCreator):
|
||||||
|
|
||||||
Can be used e.g. for `workfile`.
|
Can be used e.g. for `workfile`.
|
||||||
"""
|
"""
|
||||||
|
skip_discovery = True
|
||||||
|
|
||||||
def remove_instances(self, instances):
|
def remove_instances(self, instances):
|
||||||
"""Skip removal."""
|
"""Skip removal."""
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
def discover_creator_plugins(*args, **kwargs):
|
def discover_creator_plugins(*args, **kwargs):
|
||||||
|
|
@ -1020,7 +1012,6 @@ def cache_and_get_instances(creator, shared_key, list_instances_func):
|
||||||
dict[str, dict[str, Any]]: Cached instances by creator identifier from
|
dict[str, dict[str, Any]]: Cached instances by creator identifier from
|
||||||
result of passed function.
|
result of passed function.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
if shared_key not in creator.collection_shared_data:
|
if shared_key not in creator.collection_shared_data:
|
||||||
value = collections.defaultdict(list)
|
value = collections.defaultdict(list)
|
||||||
for instance in list_instances_func():
|
for instance in list_instances_func():
|
||||||
|
|
|
||||||
|
|
@ -1,24 +1,38 @@
|
||||||
|
"""Functions for handling product names."""
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import warnings
|
||||||
|
from functools import wraps
|
||||||
|
from typing import Any, Optional, Union, overload
|
||||||
|
from warnings import warn
|
||||||
|
|
||||||
import ayon_api
|
import ayon_api
|
||||||
from ayon_core.lib import (
|
from ayon_core.lib import (
|
||||||
StringTemplate,
|
StringTemplate,
|
||||||
filter_profiles,
|
filter_profiles,
|
||||||
prepare_template_data,
|
prepare_template_data,
|
||||||
|
Logger,
|
||||||
|
is_func_signature_supported,
|
||||||
)
|
)
|
||||||
|
from ayon_core.lib.path_templates import TemplateResult
|
||||||
from ayon_core.settings import get_project_settings
|
from ayon_core.settings import get_project_settings
|
||||||
|
|
||||||
from .constants import DEFAULT_PRODUCT_TEMPLATE
|
from .constants import DEFAULT_PRODUCT_TEMPLATE
|
||||||
from .exceptions import TaskNotSetError, TemplateFillError
|
from .exceptions import TaskNotSetError, TemplateFillError
|
||||||
|
|
||||||
|
log = Logger.get_logger(__name__)
|
||||||
|
|
||||||
|
|
||||||
def get_product_name_template(
|
def get_product_name_template(
|
||||||
project_name,
|
project_name: str,
|
||||||
product_type,
|
product_type: str,
|
||||||
task_name,
|
task_name: Optional[str],
|
||||||
task_type,
|
task_type: Optional[str],
|
||||||
host_name,
|
host_name: str,
|
||||||
default_template=None,
|
default_template: Optional[str] = None,
|
||||||
project_settings=None
|
project_settings: Optional[dict[str, Any]] = None,
|
||||||
):
|
product_base_type: Optional[str] = None
|
||||||
|
) -> str:
|
||||||
"""Get product name template based on passed context.
|
"""Get product name template based on passed context.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
|
|
@ -26,26 +40,32 @@ def get_product_name_template(
|
||||||
product_type (str): Product type for which the product name is
|
product_type (str): Product type for which the product name is
|
||||||
calculated.
|
calculated.
|
||||||
host_name (str): Name of host in which the product name is calculated.
|
host_name (str): Name of host in which the product name is calculated.
|
||||||
task_name (str): Name of task in which context the product is created.
|
task_name (Optional[str]): Name of task in which context the
|
||||||
task_type (str): Type of task in which context the product is created.
|
product is created.
|
||||||
default_template (Union[str, None]): Default template which is used if
|
task_type (Optional[str]): Type of task in which context the
|
||||||
|
product is created.
|
||||||
|
default_template (Optional[str]): Default template which is used if
|
||||||
settings won't find any matching possibility. Constant
|
settings won't find any matching possibility. Constant
|
||||||
'DEFAULT_PRODUCT_TEMPLATE' is used if not defined.
|
'DEFAULT_PRODUCT_TEMPLATE' is used if not defined.
|
||||||
project_settings (Union[Dict[str, Any], None]): Prepared settings for
|
project_settings (Optional[dict[str, Any]]): Prepared settings for
|
||||||
project. Settings are queried if not passed.
|
project. Settings are queried if not passed.
|
||||||
"""
|
product_base_type (Optional[str]): Base type of product.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
str: Product name template.
|
||||||
|
|
||||||
|
"""
|
||||||
if project_settings is None:
|
if project_settings is None:
|
||||||
project_settings = get_project_settings(project_name)
|
project_settings = get_project_settings(project_name)
|
||||||
tools_settings = project_settings["core"]["tools"]
|
tools_settings = project_settings["core"]["tools"]
|
||||||
profiles = tools_settings["creator"]["product_name_profiles"]
|
profiles = tools_settings["creator"]["product_name_profiles"]
|
||||||
filtering_criteria = {
|
filtering_criteria = {
|
||||||
|
"product_base_types": product_base_type or product_type,
|
||||||
"product_types": product_type,
|
"product_types": product_type,
|
||||||
"hosts": host_name,
|
"host_names": host_name,
|
||||||
"tasks": task_name,
|
"task_names": task_name,
|
||||||
"task_types": task_type
|
"task_types": task_type,
|
||||||
}
|
}
|
||||||
|
|
||||||
matching_profile = filter_profiles(profiles, filtering_criteria)
|
matching_profile = filter_profiles(profiles, filtering_criteria)
|
||||||
template = None
|
template = None
|
||||||
if matching_profile:
|
if matching_profile:
|
||||||
|
|
@ -69,6 +89,214 @@ def get_product_name_template(
|
||||||
return template
|
return template
|
||||||
|
|
||||||
|
|
||||||
|
def _get_product_name_old(
|
||||||
|
project_name: str,
|
||||||
|
task_name: Optional[str],
|
||||||
|
task_type: Optional[str],
|
||||||
|
host_name: str,
|
||||||
|
product_type: str,
|
||||||
|
variant: str,
|
||||||
|
default_template: Optional[str] = None,
|
||||||
|
dynamic_data: Optional[dict[str, Any]] = None,
|
||||||
|
project_settings: Optional[dict[str, Any]] = None,
|
||||||
|
product_type_filter: Optional[str] = None,
|
||||||
|
project_entity: Optional[dict[str, Any]] = None,
|
||||||
|
product_base_type: Optional[str] = None,
|
||||||
|
) -> TemplateResult:
|
||||||
|
warnings.warn(
|
||||||
|
"Used deprecated 'task_name' and 'task_type' arguments."
|
||||||
|
" Please use new signature with 'folder_entity' and 'task_entity'.",
|
||||||
|
DeprecationWarning,
|
||||||
|
stacklevel=2
|
||||||
|
)
|
||||||
|
if not product_type:
|
||||||
|
return StringTemplate("").format({})
|
||||||
|
|
||||||
|
template = get_product_name_template(
|
||||||
|
project_name=project_name,
|
||||||
|
product_type=product_type_filter or product_type,
|
||||||
|
task_name=task_name,
|
||||||
|
task_type=task_type,
|
||||||
|
host_name=host_name,
|
||||||
|
default_template=default_template,
|
||||||
|
project_settings=project_settings,
|
||||||
|
product_base_type=product_base_type,
|
||||||
|
)
|
||||||
|
|
||||||
|
template_low = template.lower()
|
||||||
|
# Simple check of task name existence for template with {task[name]} in
|
||||||
|
if not task_name and "{task" in template_low:
|
||||||
|
raise TaskNotSetError()
|
||||||
|
|
||||||
|
task_value = {
|
||||||
|
"name": task_name,
|
||||||
|
"type": task_type,
|
||||||
|
}
|
||||||
|
if "{task}" in template_low:
|
||||||
|
task_value = task_name
|
||||||
|
# NOTE this is message for TDs and Admins -> not really for users
|
||||||
|
# TODO validate this in settings and not allow it
|
||||||
|
log.warning(
|
||||||
|
"Found deprecated task key '{task}' in product name template."
|
||||||
|
" Please use '{task[name]}' instead."
|
||||||
|
)
|
||||||
|
|
||||||
|
elif "{task[short]}" in template_low:
|
||||||
|
if project_entity is None:
|
||||||
|
project_entity = ayon_api.get_project(project_name)
|
||||||
|
task_types_by_name = {
|
||||||
|
task["name"]: task for task in
|
||||||
|
project_entity["taskTypes"]
|
||||||
|
}
|
||||||
|
task_short = task_types_by_name.get(task_type, {}).get("shortName")
|
||||||
|
task_value["short"] = task_short
|
||||||
|
|
||||||
|
if not product_base_type and "{product[basetype]}" in template.lower():
|
||||||
|
warn(
|
||||||
|
"You have Product base type in product name template, "
|
||||||
|
"but it is not provided by the creator, please update your "
|
||||||
|
"creation code to include it. It will be required in "
|
||||||
|
"the future.",
|
||||||
|
DeprecationWarning,
|
||||||
|
stacklevel=2)
|
||||||
|
|
||||||
|
fill_pairs: dict[str, Union[str, dict[str, str]]] = {
|
||||||
|
"variant": variant,
|
||||||
|
"family": product_type,
|
||||||
|
"task": task_value,
|
||||||
|
"product": {
|
||||||
|
"type": product_type,
|
||||||
|
"basetype": product_base_type or product_type,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if dynamic_data:
|
||||||
|
# Dynamic data may override default values
|
||||||
|
for key, value in dynamic_data.items():
|
||||||
|
fill_pairs[key] = value
|
||||||
|
|
||||||
|
try:
|
||||||
|
return StringTemplate.format_strict_template(
|
||||||
|
template=template,
|
||||||
|
data=prepare_template_data(fill_pairs)
|
||||||
|
)
|
||||||
|
except KeyError as exp:
|
||||||
|
msg = (
|
||||||
|
f"Value for {exp} key is missing in template '{template}'."
|
||||||
|
f" Available values are {fill_pairs}"
|
||||||
|
)
|
||||||
|
raise TemplateFillError(msg) from exp
|
||||||
|
|
||||||
|
|
||||||
|
def _backwards_compatibility_product_name(func):
|
||||||
|
"""Helper to decide which variant of 'get_product_name' to use.
|
||||||
|
|
||||||
|
The old version expected 'task_name' and 'task_type' arguments. The new
|
||||||
|
version expects 'folder_entity' and 'task_entity' arguments instead.
|
||||||
|
|
||||||
|
The function is also marked with an attribute 'version' so other addons
|
||||||
|
can check if the function is using the new signature or is using
|
||||||
|
the old signature. That should allow addons to adapt to new signature.
|
||||||
|
>>> if getattr(get_product_name, "use_entities", None):
|
||||||
|
>>> # New signature is used
|
||||||
|
>>> path = get_product_name(project_name, folder_entity, ...)
|
||||||
|
>>> else:
|
||||||
|
>>> # Old signature is used
|
||||||
|
>>> path = get_product_name(project_name, taks_name, ...)
|
||||||
|
"""
|
||||||
|
# Add attribute to function to identify it as the new function
|
||||||
|
# so other addons can easily identify it.
|
||||||
|
# >>> geattr(get_product_name, "use_entities", False)
|
||||||
|
setattr(func, "use_entities", True)
|
||||||
|
|
||||||
|
@wraps(func)
|
||||||
|
def inner(*args, **kwargs):
|
||||||
|
# ---
|
||||||
|
# Decide which variant of the function is used based on
|
||||||
|
# passed arguments.
|
||||||
|
# ---
|
||||||
|
|
||||||
|
# Entities in key-word arguments mean that the new function is used
|
||||||
|
if "folder_entity" in kwargs or "task_entity" in kwargs:
|
||||||
|
return func(*args, **kwargs)
|
||||||
|
|
||||||
|
# Using more than 7 positional arguments is not allowed
|
||||||
|
# in the new function
|
||||||
|
if len(args) > 7:
|
||||||
|
return _get_product_name_old(*args, **kwargs)
|
||||||
|
|
||||||
|
if len(args) > 1:
|
||||||
|
arg_2 = args[1]
|
||||||
|
# The second argument is a string -> task name
|
||||||
|
if isinstance(arg_2, str):
|
||||||
|
return _get_product_name_old(*args, **kwargs)
|
||||||
|
|
||||||
|
if is_func_signature_supported(func, *args, **kwargs):
|
||||||
|
return func(*args, **kwargs)
|
||||||
|
return _get_product_name_old(*args, **kwargs)
|
||||||
|
|
||||||
|
return inner
|
||||||
|
|
||||||
|
|
||||||
|
@overload
|
||||||
|
def get_product_name(
|
||||||
|
project_name: str,
|
||||||
|
folder_entity: dict[str, Any],
|
||||||
|
task_entity: Optional[dict[str, Any]],
|
||||||
|
product_base_type: str,
|
||||||
|
product_type: str,
|
||||||
|
host_name: str,
|
||||||
|
variant: str,
|
||||||
|
*,
|
||||||
|
dynamic_data: Optional[dict[str, Any]] = None,
|
||||||
|
project_settings: Optional[dict[str, Any]] = None,
|
||||||
|
project_entity: Optional[dict[str, Any]] = None,
|
||||||
|
default_template: Optional[str] = None,
|
||||||
|
product_base_type_filter: Optional[str] = None,
|
||||||
|
) -> TemplateResult:
|
||||||
|
"""Calculate product name based on passed context and AYON settings.
|
||||||
|
|
||||||
|
Subst name templates are defined in `project_settings/global/tools/creator
|
||||||
|
/product_name_profiles` where are profiles with host name, product type,
|
||||||
|
task name and task type filters. If context does not match any profile
|
||||||
|
then `DEFAULT_PRODUCT_TEMPLATE` is used as default template.
|
||||||
|
|
||||||
|
That's main reason why so many arguments are required to calculate product
|
||||||
|
name.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
project_name (str): Project name.
|
||||||
|
folder_entity (Optional[dict[str, Any]]): Folder entity.
|
||||||
|
task_entity (Optional[dict[str, Any]]): Task entity.
|
||||||
|
host_name (str): Host name.
|
||||||
|
product_base_type (str): Product base type.
|
||||||
|
product_type (str): Product type.
|
||||||
|
variant (str): In most of the cases it is user input during creation.
|
||||||
|
dynamic_data (Optional[dict[str, Any]]): Dynamic data specific for
|
||||||
|
a creator which creates instance.
|
||||||
|
project_settings (Optional[dict[str, Any]]): Prepared settings
|
||||||
|
for project. Settings are queried if not passed.
|
||||||
|
project_entity (Optional[dict[str, Any]]): Project entity used when
|
||||||
|
task short name is required by template.
|
||||||
|
default_template (Optional[str]): Default template if any profile does
|
||||||
|
not match passed context. Constant 'DEFAULT_PRODUCT_TEMPLATE'
|
||||||
|
is used if is not passed.
|
||||||
|
product_base_type_filter (Optional[str]): Use different product base
|
||||||
|
type for product template filtering. Value of
|
||||||
|
`product_base_type_filter` is used when not passed.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
TemplateResult: Product name.
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
TaskNotSetError: If template requires task which is not provided.
|
||||||
|
TemplateFillError: If filled template contains placeholder key which
|
||||||
|
is not collected.
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
@overload
|
||||||
def get_product_name(
|
def get_product_name(
|
||||||
project_name,
|
project_name,
|
||||||
task_name,
|
task_name,
|
||||||
|
|
@ -81,25 +309,25 @@ def get_product_name(
|
||||||
project_settings=None,
|
project_settings=None,
|
||||||
product_type_filter=None,
|
product_type_filter=None,
|
||||||
project_entity=None,
|
project_entity=None,
|
||||||
):
|
) -> TemplateResult:
|
||||||
"""Calculate product name based on passed context and AYON settings.
|
"""Calculate product name based on passed context and AYON settings.
|
||||||
|
|
||||||
Subst name templates are defined in `project_settings/global/tools/creator
|
Product name templates are defined in `project_settings/global/tools
|
||||||
/product_name_profiles` where are profiles with host name, product type,
|
/creator/product_name_profiles` where are profiles with host name,
|
||||||
task name and task type filters. If context does not match any profile
|
product type, task name and task type filters. If context does not match
|
||||||
then `DEFAULT_PRODUCT_TEMPLATE` is used as default template.
|
any profile then `DEFAULT_PRODUCT_TEMPLATE` is used as default template.
|
||||||
|
|
||||||
That's main reason why so many arguments are required to calculate product
|
That's main reason why so many arguments are required to calculate product
|
||||||
name.
|
name.
|
||||||
|
|
||||||
Todos:
|
Deprecated:
|
||||||
Find better filtering options to avoid requirement of
|
This function is using deprecated signature that does not support
|
||||||
argument 'family_filter'.
|
folder entity data to be used.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
project_name (str): Project name.
|
project_name (str): Project name.
|
||||||
task_name (Union[str, None]): Task name.
|
task_name (Optional[str]): Task name.
|
||||||
task_type (Union[str, None]): Task type.
|
task_type (Optional[str]): Task type.
|
||||||
host_name (str): Host name.
|
host_name (str): Host name.
|
||||||
product_type (str): Product type.
|
product_type (str): Product type.
|
||||||
variant (str): In most of the cases it is user input during creation.
|
variant (str): In most of the cases it is user input during creation.
|
||||||
|
|
@ -117,7 +345,63 @@ def get_product_name(
|
||||||
task short name is required by template.
|
task short name is required by template.
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
str: Product name.
|
TemplateResult: Product name.
|
||||||
|
|
||||||
|
"""
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
@_backwards_compatibility_product_name
|
||||||
|
def get_product_name(
|
||||||
|
project_name: str,
|
||||||
|
folder_entity: dict[str, Any],
|
||||||
|
task_entity: Optional[dict[str, Any]],
|
||||||
|
product_base_type: str,
|
||||||
|
product_type: str,
|
||||||
|
host_name: str,
|
||||||
|
variant: str,
|
||||||
|
*,
|
||||||
|
dynamic_data: Optional[dict[str, Any]] = None,
|
||||||
|
project_settings: Optional[dict[str, Any]] = None,
|
||||||
|
project_entity: Optional[dict[str, Any]] = None,
|
||||||
|
default_template: Optional[str] = None,
|
||||||
|
product_base_type_filter: Optional[str] = None,
|
||||||
|
) -> TemplateResult:
|
||||||
|
"""Calculate product name based on passed context and AYON settings.
|
||||||
|
|
||||||
|
Product name templates are defined in `project_settings/global/tools
|
||||||
|
/creator/product_name_profiles` where are profiles with host name,
|
||||||
|
product base type, product type, task name and task type filters.
|
||||||
|
|
||||||
|
If context does not match any profile then `DEFAULT_PRODUCT_TEMPLATE`
|
||||||
|
is used as default template.
|
||||||
|
|
||||||
|
That's main reason why so many arguments are required to calculate product
|
||||||
|
name.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
project_name (str): Project name.
|
||||||
|
folder_entity (Optional[dict[str, Any]]): Folder entity.
|
||||||
|
task_entity (Optional[dict[str, Any]]): Task entity.
|
||||||
|
host_name (str): Host name.
|
||||||
|
product_base_type (str): Product base type.
|
||||||
|
product_type (str): Product type.
|
||||||
|
variant (str): In most of the cases it is user input during creation.
|
||||||
|
dynamic_data (Optional[dict[str, Any]]): Dynamic data specific for
|
||||||
|
a creator which creates instance.
|
||||||
|
project_settings (Optional[dict[str, Any]]): Prepared settings
|
||||||
|
for project. Settings are queried if not passed.
|
||||||
|
project_entity (Optional[dict[str, Any]]): Project entity used when
|
||||||
|
task short name is required by template.
|
||||||
|
default_template (Optional[str]): Default template if any profile does
|
||||||
|
not match passed context. Constant 'DEFAULT_PRODUCT_TEMPLATE'
|
||||||
|
is used if is not passed.
|
||||||
|
product_base_type_filter (Optional[str]): Use different product base
|
||||||
|
type for product template filtering. Value of
|
||||||
|
`product_base_type_filter` is used when not passed.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
TemplateResult: Product name.
|
||||||
|
|
||||||
Raises:
|
Raises:
|
||||||
TaskNotSetError: If template requires task which is not provided.
|
TaskNotSetError: If template requires task which is not provided.
|
||||||
|
|
@ -126,47 +410,68 @@ def get_product_name(
|
||||||
|
|
||||||
"""
|
"""
|
||||||
if not product_type:
|
if not product_type:
|
||||||
return ""
|
return StringTemplate("").format({})
|
||||||
|
|
||||||
|
task_name = task_type = None
|
||||||
|
if task_entity:
|
||||||
|
task_name = task_entity["name"]
|
||||||
|
task_type = task_entity["taskType"]
|
||||||
|
|
||||||
template = get_product_name_template(
|
template = get_product_name_template(
|
||||||
project_name,
|
project_name=project_name,
|
||||||
product_type_filter or product_type,
|
product_base_type=product_base_type_filter or product_base_type,
|
||||||
task_name,
|
product_type=product_type,
|
||||||
task_type,
|
task_name=task_name,
|
||||||
host_name,
|
task_type=task_type,
|
||||||
|
host_name=host_name,
|
||||||
default_template=default_template,
|
default_template=default_template,
|
||||||
project_settings=project_settings
|
project_settings=project_settings,
|
||||||
)
|
)
|
||||||
# Simple check of task name existence for template with {task} in
|
|
||||||
# - missing task should be possible only in Standalone publisher
|
template_low = template.lower()
|
||||||
if not task_name and "{task" in template.lower():
|
# Simple check of task name existence for template with {task[name]} in
|
||||||
|
if not task_name and "{task" in template_low:
|
||||||
raise TaskNotSetError()
|
raise TaskNotSetError()
|
||||||
|
|
||||||
task_value = {
|
task_value = {
|
||||||
"name": task_name,
|
"name": task_name,
|
||||||
"type": task_type,
|
"type": task_type,
|
||||||
}
|
}
|
||||||
if "{task}" in template.lower():
|
if "{task}" in template_low:
|
||||||
task_value = task_name
|
task_value = task_name
|
||||||
|
# NOTE this is message for TDs and Admins -> not really for users
|
||||||
|
# TODO validate this in settings and not allow it
|
||||||
|
log.warning(
|
||||||
|
"Found deprecated task key '{task}' in product name template."
|
||||||
|
" Please use '{task[name]}' instead."
|
||||||
|
)
|
||||||
|
|
||||||
elif "{task[short]}" in template.lower():
|
elif "{task[short]}" in template_low:
|
||||||
if project_entity is None:
|
if project_entity is None:
|
||||||
project_entity = ayon_api.get_project(project_name)
|
project_entity = ayon_api.get_project(project_name)
|
||||||
task_types_by_name = {
|
task_types_by_name = {
|
||||||
task["name"]: task for task in
|
task["name"]: task
|
||||||
project_entity["taskTypes"]
|
for task in project_entity["taskTypes"]
|
||||||
}
|
}
|
||||||
task_short = task_types_by_name.get(task_type, {}).get("shortName")
|
task_short = task_types_by_name.get(task_type, {}).get("shortName")
|
||||||
task_value["short"] = task_short
|
task_value["short"] = task_short
|
||||||
|
|
||||||
fill_pairs = {
|
fill_pairs = {
|
||||||
"variant": variant,
|
"variant": variant,
|
||||||
|
# TODO We should stop support 'family' key.
|
||||||
"family": product_type,
|
"family": product_type,
|
||||||
"task": task_value,
|
"task": task_value,
|
||||||
"product": {
|
"product": {
|
||||||
"type": product_type
|
"type": product_type,
|
||||||
|
"basetype": product_base_type,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
if folder_entity:
|
||||||
|
fill_pairs["folder"] = {
|
||||||
|
"name": folder_entity["name"],
|
||||||
|
"type": folder_entity["folderType"],
|
||||||
|
}
|
||||||
|
|
||||||
if dynamic_data:
|
if dynamic_data:
|
||||||
# Dynamic data may override default values
|
# Dynamic data may override default values
|
||||||
for key, value in dynamic_data.items():
|
for key, value in dynamic_data.items():
|
||||||
|
|
@ -178,7 +483,8 @@ def get_product_name(
|
||||||
data=prepare_template_data(fill_pairs)
|
data=prepare_template_data(fill_pairs)
|
||||||
)
|
)
|
||||||
except KeyError as exp:
|
except KeyError as exp:
|
||||||
raise TemplateFillError(
|
msg = (
|
||||||
"Value for {} key is missing in template '{}'."
|
f"Value for {exp} key is missing in template '{template}'."
|
||||||
" Available values are {}".format(str(exp), template, fill_pairs)
|
f" Available values are {fill_pairs}"
|
||||||
)
|
)
|
||||||
|
raise TemplateFillError(msg)
|
||||||
|
|
|
||||||
|
|
@ -11,6 +11,8 @@ from ayon_core.lib.attribute_definitions import (
|
||||||
serialize_attr_defs,
|
serialize_attr_defs,
|
||||||
deserialize_attr_defs,
|
deserialize_attr_defs,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
from ayon_core.pipeline import (
|
from ayon_core.pipeline import (
|
||||||
AYON_INSTANCE_ID,
|
AYON_INSTANCE_ID,
|
||||||
AVALON_INSTANCE_ID,
|
AVALON_INSTANCE_ID,
|
||||||
|
|
@ -137,6 +139,7 @@ class AttributeValues:
|
||||||
if value is None:
|
if value is None:
|
||||||
continue
|
continue
|
||||||
converted_value = attr_def.convert_value(value)
|
converted_value = attr_def.convert_value(value)
|
||||||
|
# QUESTION Could we just use converted value all the time?
|
||||||
if converted_value == value:
|
if converted_value == value:
|
||||||
self._data[attr_def.key] = value
|
self._data[attr_def.key] = value
|
||||||
|
|
||||||
|
|
@ -245,11 +248,11 @@ class AttributeValues:
|
||||||
|
|
||||||
def _update(self, value):
|
def _update(self, value):
|
||||||
changes = {}
|
changes = {}
|
||||||
for key, value in dict(value).items():
|
for key, key_value in dict(value).items():
|
||||||
if key in self._data and self._data.get(key) == value:
|
if key in self._data and self._data.get(key) == key_value:
|
||||||
continue
|
continue
|
||||||
self._data[key] = value
|
self._data[key] = key_value
|
||||||
changes[key] = value
|
changes[key] = key_value
|
||||||
return changes
|
return changes
|
||||||
|
|
||||||
def _pop(self, key, default):
|
def _pop(self, key, default):
|
||||||
|
|
@ -479,6 +482,10 @@ class CreatedInstance:
|
||||||
data (Dict[str, Any]): Data used for filling product name or override
|
data (Dict[str, Any]): Data used for filling product name or override
|
||||||
data from already existing instance.
|
data from already existing instance.
|
||||||
creator (BaseCreator): Creator responsible for instance.
|
creator (BaseCreator): Creator responsible for instance.
|
||||||
|
product_base_type (Optional[str]): Product base type that will be
|
||||||
|
created. If not provided then product base type is taken from
|
||||||
|
creator plugin. If creator does not have product base type then
|
||||||
|
deprecation warning is raised.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
# Keys that can't be changed or removed from data after loading using
|
# Keys that can't be changed or removed from data after loading using
|
||||||
|
|
@ -489,6 +496,7 @@ class CreatedInstance:
|
||||||
"id",
|
"id",
|
||||||
"instance_id",
|
"instance_id",
|
||||||
"productType",
|
"productType",
|
||||||
|
"productBaseType",
|
||||||
"creator_identifier",
|
"creator_identifier",
|
||||||
"creator_attributes",
|
"creator_attributes",
|
||||||
"publish_attributes"
|
"publish_attributes"
|
||||||
|
|
@ -508,7 +516,13 @@ class CreatedInstance:
|
||||||
data: Dict[str, Any],
|
data: Dict[str, Any],
|
||||||
creator: "BaseCreator",
|
creator: "BaseCreator",
|
||||||
transient_data: Optional[Dict[str, Any]] = None,
|
transient_data: Optional[Dict[str, Any]] = None,
|
||||||
|
product_base_type: Optional[str] = None
|
||||||
):
|
):
|
||||||
|
"""Initialize CreatedInstance."""
|
||||||
|
# fallback to product type for backward compatibility
|
||||||
|
if not product_base_type:
|
||||||
|
product_base_type = creator.product_base_type or product_type
|
||||||
|
|
||||||
self._creator = creator
|
self._creator = creator
|
||||||
creator_identifier = creator.identifier
|
creator_identifier = creator.identifier
|
||||||
group_label = creator.get_group_label()
|
group_label = creator.get_group_label()
|
||||||
|
|
@ -561,6 +575,9 @@ class CreatedInstance:
|
||||||
self._data["id"] = item_id
|
self._data["id"] = item_id
|
||||||
self._data["productType"] = product_type
|
self._data["productType"] = product_type
|
||||||
self._data["productName"] = product_name
|
self._data["productName"] = product_name
|
||||||
|
|
||||||
|
self._data["productBaseType"] = product_base_type
|
||||||
|
|
||||||
self._data["active"] = data.get("active", True)
|
self._data["active"] = data.get("active", True)
|
||||||
self._data["creator_identifier"] = creator_identifier
|
self._data["creator_identifier"] = creator_identifier
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -202,7 +202,8 @@ def is_clip_from_media_sequence(otio_clip):
|
||||||
|
|
||||||
|
|
||||||
def remap_range_on_file_sequence(otio_clip, otio_range):
|
def remap_range_on_file_sequence(otio_clip, otio_range):
|
||||||
"""
|
""" Remap the provided range on a file sequence clip.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
otio_clip (otio.schema.Clip): The OTIO clip to check.
|
otio_clip (otio.schema.Clip): The OTIO clip to check.
|
||||||
otio_range (otio.schema.TimeRange): The trim range to apply.
|
otio_range (otio.schema.TimeRange): The trim range to apply.
|
||||||
|
|
@ -249,7 +250,11 @@ def remap_range_on_file_sequence(otio_clip, otio_range):
|
||||||
if (
|
if (
|
||||||
is_clip_from_media_sequence(otio_clip)
|
is_clip_from_media_sequence(otio_clip)
|
||||||
and available_range_start_frame == media_ref.start_frame
|
and available_range_start_frame == media_ref.start_frame
|
||||||
and conformed_src_in.to_frames() < media_ref.start_frame
|
|
||||||
|
# source range should be included in available range from media
|
||||||
|
# using round instead of conformed_src_in.to_frames() to avoid
|
||||||
|
# any precision issue with frame rate.
|
||||||
|
and round(conformed_src_in.value) < media_ref.start_frame
|
||||||
):
|
):
|
||||||
media_in = otio.opentime.RationalTime(
|
media_in = otio.opentime.RationalTime(
|
||||||
0, rate=available_range_rate
|
0, rate=available_range_rate
|
||||||
|
|
|
||||||
|
|
@ -253,6 +253,19 @@ def create_skeleton_instance(
|
||||||
"reuseLastVersion": data.get("reuseLastVersion", False),
|
"reuseLastVersion": data.get("reuseLastVersion", False),
|
||||||
}
|
}
|
||||||
|
|
||||||
|
# Pass on the OCIO metadata of what the source display and view are
|
||||||
|
# so that the farm can correctly set up color management.
|
||||||
|
if "sceneDisplay" in data and "sceneView" in data:
|
||||||
|
instance_skeleton_data["sceneDisplay"] = data["sceneDisplay"]
|
||||||
|
instance_skeleton_data["sceneView"] = data["sceneView"]
|
||||||
|
elif "colorspaceDisplay" in data and "colorspaceView" in data:
|
||||||
|
# Backwards compatibility for sceneDisplay and sceneView
|
||||||
|
instance_skeleton_data["colorspaceDisplay"] = data["colorspaceDisplay"]
|
||||||
|
instance_skeleton_data["colorspaceView"] = data["colorspaceView"]
|
||||||
|
if "sourceDisplay" in data and "sourceView" in data:
|
||||||
|
instance_skeleton_data["sourceDisplay"] = data["sourceDisplay"]
|
||||||
|
instance_skeleton_data["sourceView"] = data["sourceView"]
|
||||||
|
|
||||||
if data.get("renderlayer"):
|
if data.get("renderlayer"):
|
||||||
instance_skeleton_data["renderlayer"] = data["renderlayer"]
|
instance_skeleton_data["renderlayer"] = data["renderlayer"]
|
||||||
|
|
||||||
|
|
@ -589,24 +602,7 @@ def create_instances_for_aov(
|
||||||
"""
|
"""
|
||||||
# we cannot attach AOVs to other products as we consider every
|
# we cannot attach AOVs to other products as we consider every
|
||||||
# AOV product of its own.
|
# AOV product of its own.
|
||||||
|
|
||||||
log = Logger.get_logger("farm_publishing")
|
log = Logger.get_logger("farm_publishing")
|
||||||
additional_color_data = {
|
|
||||||
"renderProducts": instance.data["renderProducts"],
|
|
||||||
"colorspaceConfig": instance.data["colorspaceConfig"],
|
|
||||||
"display": instance.data["colorspaceDisplay"],
|
|
||||||
"view": instance.data["colorspaceView"]
|
|
||||||
}
|
|
||||||
|
|
||||||
# Get templated path from absolute config path.
|
|
||||||
anatomy = instance.context.data["anatomy"]
|
|
||||||
colorspace_template = instance.data["colorspaceConfig"]
|
|
||||||
try:
|
|
||||||
additional_color_data["colorspaceTemplate"] = remap_source(
|
|
||||||
colorspace_template, anatomy)
|
|
||||||
except ValueError as e:
|
|
||||||
log.warning(e)
|
|
||||||
additional_color_data["colorspaceTemplate"] = colorspace_template
|
|
||||||
|
|
||||||
# if there are product to attach to and more than one AOV,
|
# if there are product to attach to and more than one AOV,
|
||||||
# we cannot proceed.
|
# we cannot proceed.
|
||||||
|
|
@ -618,6 +614,29 @@ def create_instances_for_aov(
|
||||||
"attaching multiple AOVs or renderable cameras to "
|
"attaching multiple AOVs or renderable cameras to "
|
||||||
"product is not supported yet.")
|
"product is not supported yet.")
|
||||||
|
|
||||||
|
additional_data = {
|
||||||
|
"renderProducts": instance.data["renderProducts"],
|
||||||
|
}
|
||||||
|
|
||||||
|
# Collect color management data if present
|
||||||
|
colorspace_config = instance.data.get("colorspaceConfig")
|
||||||
|
if colorspace_config:
|
||||||
|
additional_data.update({
|
||||||
|
"colorspaceConfig": colorspace_config,
|
||||||
|
# Display/View are optional
|
||||||
|
"display": instance.data.get("sourceDisplay"),
|
||||||
|
"view": instance.data.get("sourceView")
|
||||||
|
})
|
||||||
|
|
||||||
|
# Get templated path from absolute config path.
|
||||||
|
anatomy = instance.context.data["anatomy"]
|
||||||
|
try:
|
||||||
|
additional_data["colorspaceTemplate"] = remap_source(
|
||||||
|
colorspace_config, anatomy)
|
||||||
|
except ValueError as e:
|
||||||
|
log.warning(e)
|
||||||
|
additional_data["colorspaceTemplate"] = colorspace_config
|
||||||
|
|
||||||
# create instances for every AOV we found in expected files.
|
# create instances for every AOV we found in expected files.
|
||||||
# NOTE: this is done for every AOV and every render camera (if
|
# NOTE: this is done for every AOV and every render camera (if
|
||||||
# there are multiple renderable cameras in scene)
|
# there are multiple renderable cameras in scene)
|
||||||
|
|
@ -625,7 +644,7 @@ def create_instances_for_aov(
|
||||||
instance,
|
instance,
|
||||||
skeleton,
|
skeleton,
|
||||||
aov_filter,
|
aov_filter,
|
||||||
additional_color_data,
|
additional_data,
|
||||||
skip_integration_repre_list,
|
skip_integration_repre_list,
|
||||||
do_not_add_review,
|
do_not_add_review,
|
||||||
frames_to_render
|
frames_to_render
|
||||||
|
|
@ -936,16 +955,28 @@ def _create_instances_for_aov(
|
||||||
"stagingDir": staging_dir,
|
"stagingDir": staging_dir,
|
||||||
"fps": new_instance.get("fps"),
|
"fps": new_instance.get("fps"),
|
||||||
"tags": ["review"] if preview else [],
|
"tags": ["review"] if preview else [],
|
||||||
"colorspaceData": {
|
}
|
||||||
|
|
||||||
|
if colorspace and additional_data["colorspaceConfig"]:
|
||||||
|
# Only apply colorspace data if the image has a colorspace
|
||||||
|
colorspace_data: dict = {
|
||||||
"colorspace": colorspace,
|
"colorspace": colorspace,
|
||||||
"config": {
|
"config": {
|
||||||
"path": additional_data["colorspaceConfig"],
|
"path": additional_data["colorspaceConfig"],
|
||||||
"template": additional_data["colorspaceTemplate"]
|
"template": additional_data["colorspaceTemplate"]
|
||||||
},
|
},
|
||||||
"display": additional_data["display"],
|
|
||||||
"view": additional_data["view"]
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
# Display/View are optional
|
||||||
|
display = additional_data.get("display")
|
||||||
|
if display:
|
||||||
|
colorspace_data["display"] = display
|
||||||
|
view = additional_data.get("view")
|
||||||
|
if view:
|
||||||
|
colorspace_data["view"] = view
|
||||||
|
|
||||||
|
rep["colorspaceData"] = colorspace_data
|
||||||
|
else:
|
||||||
|
log.debug("No colorspace data for representation: {}".format(rep))
|
||||||
|
|
||||||
# support conversion from tiled to scanline
|
# support conversion from tiled to scanline
|
||||||
if instance.data.get("convertToScanline"):
|
if instance.data.get("convertToScanline"):
|
||||||
|
|
@ -1045,7 +1076,9 @@ def get_resources(project_name, version_entity, extension=None):
|
||||||
filtered.append(repre_entity)
|
filtered.append(repre_entity)
|
||||||
|
|
||||||
representation = filtered[0]
|
representation = filtered[0]
|
||||||
directory = get_representation_path(representation)
|
directory = get_representation_path(
|
||||||
|
project_name, representation
|
||||||
|
)
|
||||||
print("Source: ", directory)
|
print("Source: ", directory)
|
||||||
resources = sorted(
|
resources = sorted(
|
||||||
[
|
[
|
||||||
|
|
|
||||||
|
|
@ -25,8 +25,8 @@ from .utils import (
|
||||||
get_loader_identifier,
|
get_loader_identifier,
|
||||||
get_loaders_by_name,
|
get_loaders_by_name,
|
||||||
|
|
||||||
get_representation_path_from_context,
|
|
||||||
get_representation_path,
|
get_representation_path,
|
||||||
|
get_representation_path_from_context,
|
||||||
get_representation_path_with_anatomy,
|
get_representation_path_with_anatomy,
|
||||||
|
|
||||||
is_compatible_loader,
|
is_compatible_loader,
|
||||||
|
|
@ -85,8 +85,8 @@ __all__ = (
|
||||||
"get_loader_identifier",
|
"get_loader_identifier",
|
||||||
"get_loaders_by_name",
|
"get_loaders_by_name",
|
||||||
|
|
||||||
"get_representation_path_from_context",
|
|
||||||
"get_representation_path",
|
"get_representation_path",
|
||||||
|
"get_representation_path_from_context",
|
||||||
"get_representation_path_with_anatomy",
|
"get_representation_path_with_anatomy",
|
||||||
|
|
||||||
"is_compatible_loader",
|
"is_compatible_loader",
|
||||||
|
|
|
||||||
|
|
@ -21,6 +21,13 @@ from .utils import get_representation_path_from_context
|
||||||
class LoaderPlugin(list):
|
class LoaderPlugin(list):
|
||||||
"""Load representation into host application"""
|
"""Load representation into host application"""
|
||||||
|
|
||||||
|
# Attribute 'skip_discovery' is used during discovery phase to skip
|
||||||
|
# plugins, which can be used to mark base plugins that should not be
|
||||||
|
# considered as plugins "to use". The discovery logic does NOT use
|
||||||
|
# the attribute value from parent classes. Each base class has to define
|
||||||
|
# the attribute again.
|
||||||
|
skip_discovery = True
|
||||||
|
|
||||||
product_types: set[str] = set()
|
product_types: set[str] = set()
|
||||||
product_base_types: Optional[set[str]] = None
|
product_base_types: Optional[set[str]] = None
|
||||||
representations = set()
|
representations = set()
|
||||||
|
|
|
||||||
|
|
@ -1,11 +1,15 @@
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
import os
|
import os
|
||||||
import uuid
|
import uuid
|
||||||
import platform
|
import warnings
|
||||||
import logging
|
import logging
|
||||||
import inspect
|
import inspect
|
||||||
import collections
|
import collections
|
||||||
import numbers
|
import numbers
|
||||||
from typing import Optional, Union, Any
|
import copy
|
||||||
|
from functools import wraps
|
||||||
|
from typing import Optional, Union, Any, overload
|
||||||
|
|
||||||
import ayon_api
|
import ayon_api
|
||||||
|
|
||||||
|
|
@ -14,9 +18,8 @@ from ayon_core.lib import (
|
||||||
StringTemplate,
|
StringTemplate,
|
||||||
TemplateUnsolved,
|
TemplateUnsolved,
|
||||||
)
|
)
|
||||||
from ayon_core.pipeline import (
|
from ayon_core.lib.path_templates import TemplateResult
|
||||||
Anatomy,
|
from ayon_core.pipeline import Anatomy
|
||||||
)
|
|
||||||
|
|
||||||
log = logging.getLogger(__name__)
|
log = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
@ -644,15 +647,15 @@ def get_representation_path_from_context(context):
|
||||||
|
|
||||||
representation = context["representation"]
|
representation = context["representation"]
|
||||||
project_entity = context.get("project")
|
project_entity = context.get("project")
|
||||||
root = None
|
if project_entity:
|
||||||
if (
|
project_name = project_entity["name"]
|
||||||
project_entity
|
else:
|
||||||
and project_entity["name"] != get_current_project_name()
|
project_name = get_current_project_name()
|
||||||
):
|
return get_representation_path(
|
||||||
anatomy = Anatomy(project_entity["name"])
|
project_name,
|
||||||
root = anatomy.roots
|
representation,
|
||||||
|
project_entity=project_entity,
|
||||||
return get_representation_path(representation, root)
|
)
|
||||||
|
|
||||||
|
|
||||||
def get_representation_path_with_anatomy(repre_entity, anatomy):
|
def get_representation_path_with_anatomy(repre_entity, anatomy):
|
||||||
|
|
@ -671,64 +674,35 @@ def get_representation_path_with_anatomy(repre_entity, anatomy):
|
||||||
anatomy (Anatomy): Project anatomy object.
|
anatomy (Anatomy): Project anatomy object.
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
Union[None, TemplateResult]: None if path can't be received
|
TemplateResult: Resolved representation path.
|
||||||
|
|
||||||
Raises:
|
Raises:
|
||||||
InvalidRepresentationContext: When representation data are probably
|
InvalidRepresentationContext: When representation data are probably
|
||||||
invalid or not available.
|
invalid or not available.
|
||||||
"""
|
|
||||||
|
|
||||||
try:
|
|
||||||
template = repre_entity["attrib"]["template"]
|
|
||||||
|
|
||||||
except KeyError:
|
|
||||||
raise InvalidRepresentationContext((
|
|
||||||
"Representation document does not"
|
|
||||||
" contain template in data ('data.template')"
|
|
||||||
))
|
|
||||||
|
|
||||||
try:
|
|
||||||
context = repre_entity["context"]
|
|
||||||
_fix_representation_context_compatibility(context)
|
|
||||||
context["root"] = anatomy.roots
|
|
||||||
|
|
||||||
path = StringTemplate.format_strict_template(template, context)
|
|
||||||
|
|
||||||
except TemplateUnsolved as exc:
|
|
||||||
raise InvalidRepresentationContext((
|
|
||||||
"Couldn't resolve representation template with available data."
|
|
||||||
" Reason: {}".format(str(exc))
|
|
||||||
))
|
|
||||||
|
|
||||||
return path.normalized()
|
|
||||||
|
|
||||||
|
|
||||||
def get_representation_path(representation, root=None):
|
|
||||||
"""Get filename from representation document
|
|
||||||
|
|
||||||
There are three ways of getting the path from representation which are
|
|
||||||
tried in following sequence until successful.
|
|
||||||
1. Get template from representation['data']['template'] and data from
|
|
||||||
representation['context']. Then format template with the data.
|
|
||||||
2. Get template from project['config'] and format it with default data set
|
|
||||||
3. Get representation['data']['path'] and use it directly
|
|
||||||
|
|
||||||
Args:
|
|
||||||
representation(dict): representation document from the database
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
str: fullpath of the representation
|
|
||||||
|
|
||||||
"""
|
"""
|
||||||
if root is None:
|
return get_representation_path(
|
||||||
from ayon_core.pipeline import get_current_project_name, Anatomy
|
anatomy.project_name,
|
||||||
|
repre_entity,
|
||||||
anatomy = Anatomy(get_current_project_name())
|
anatomy=anatomy,
|
||||||
return get_representation_path_with_anatomy(
|
|
||||||
representation, anatomy
|
|
||||||
)
|
)
|
||||||
|
|
||||||
def path_from_representation():
|
|
||||||
|
def get_representation_path_with_roots(
|
||||||
|
representation: dict[str, Any],
|
||||||
|
roots: dict[str, str],
|
||||||
|
) -> Optional[TemplateResult]:
|
||||||
|
"""Get filename from representation with custom root.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
representation(dict): Representation entity.
|
||||||
|
roots (dict[str, str]): Roots to use.
|
||||||
|
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Optional[TemplateResult]: Resolved representation path.
|
||||||
|
|
||||||
|
"""
|
||||||
try:
|
try:
|
||||||
template = representation["attrib"]["template"]
|
template = representation["attrib"]["template"]
|
||||||
except KeyError:
|
except KeyError:
|
||||||
|
|
@ -739,71 +713,209 @@ def get_representation_path(representation, root=None):
|
||||||
|
|
||||||
_fix_representation_context_compatibility(context)
|
_fix_representation_context_compatibility(context)
|
||||||
|
|
||||||
context["root"] = root
|
context["root"] = roots
|
||||||
path = StringTemplate.format_strict_template(
|
path = StringTemplate.format_strict_template(
|
||||||
template, context
|
template, context
|
||||||
)
|
)
|
||||||
# Force replacing backslashes with forward slashed if not on
|
|
||||||
# windows
|
|
||||||
if platform.system().lower() != "windows":
|
|
||||||
path = path.replace("\\", "/")
|
|
||||||
except (TemplateUnsolved, KeyError):
|
except (TemplateUnsolved, KeyError):
|
||||||
# Template references unavailable data
|
# Template references unavailable data
|
||||||
return None
|
return None
|
||||||
|
|
||||||
if not path:
|
return path.normalized()
|
||||||
return path
|
|
||||||
|
|
||||||
normalized_path = os.path.normpath(path)
|
|
||||||
if os.path.exists(normalized_path):
|
|
||||||
return normalized_path
|
|
||||||
return path
|
|
||||||
|
|
||||||
def path_from_data():
|
def _backwards_compatibility_repre_path(func):
|
||||||
if "path" not in representation["attrib"]:
|
"""Wrapper handling backwards compatibility of 'get_representation_path'.
|
||||||
return None
|
|
||||||
|
|
||||||
path = representation["attrib"]["path"]
|
Allows 'get_representation_path' to support old and new signatures of the
|
||||||
# Force replacing backslashes with forward slashed if not on
|
function. The old signature supported passing in representation entity
|
||||||
# windows
|
and optional roots. The new signature requires the project name
|
||||||
if platform.system().lower() != "windows":
|
to be passed. In case custom roots should be used, a dedicated function
|
||||||
path = path.replace("\\", "/")
|
'get_representation_path_with_roots' is available.
|
||||||
|
|
||||||
if os.path.exists(path):
|
The wrapper handles passed arguments, and based on kwargs and types
|
||||||
return os.path.normpath(path)
|
of the arguments will call the function which relates to
|
||||||
|
the arguments.
|
||||||
|
|
||||||
dir_path, file_name = os.path.split(path)
|
The function is also marked with an attribute 'version' so other addons
|
||||||
if not os.path.exists(dir_path):
|
can check if the function is using the new signature or is using
|
||||||
return None
|
the old signature. That should allow addons to adapt to new signature.
|
||||||
|
>>> if getattr(get_representation_path, "version", None) == 2:
|
||||||
|
>>> path = get_representation_path(project_name, repre_entity)
|
||||||
|
>>> else:
|
||||||
|
>>> path = get_representation_path(repre_entity)
|
||||||
|
|
||||||
base_name, ext = os.path.splitext(file_name)
|
The plan to remove backwards compatibility is 1.1.2026.
|
||||||
file_name_items = None
|
|
||||||
if "#" in base_name:
|
|
||||||
file_name_items = [part for part in base_name.split("#") if part]
|
|
||||||
elif "%" in base_name:
|
|
||||||
file_name_items = base_name.split("%")
|
|
||||||
|
|
||||||
if not file_name_items:
|
"""
|
||||||
return None
|
# Add an attribute to the function so addons can check if the new variant
|
||||||
|
# of the function is available.
|
||||||
|
# >>> getattr(get_representation_path, "version", None) == 2
|
||||||
|
# >>> True
|
||||||
|
setattr(func, "version", 2)
|
||||||
|
|
||||||
filename_start = file_name_items[0]
|
@wraps(func)
|
||||||
|
def inner(*args, **kwargs):
|
||||||
|
from ayon_core.pipeline import get_current_project_name
|
||||||
|
|
||||||
for _file in os.listdir(dir_path):
|
# Decide which variant of the function based on passed arguments
|
||||||
if _file.startswith(filename_start) and _file.endswith(ext):
|
# will be used.
|
||||||
return os.path.normpath(path)
|
if args:
|
||||||
|
arg_1 = args[0]
|
||||||
|
if isinstance(arg_1, str):
|
||||||
|
return func(*args, **kwargs)
|
||||||
|
|
||||||
return (
|
elif "project_name" in kwargs:
|
||||||
path_from_representation() or path_from_data()
|
return func(*args, **kwargs)
|
||||||
|
|
||||||
|
warnings.warn(
|
||||||
|
(
|
||||||
|
"Used deprecated variant of 'get_representation_path'."
|
||||||
|
" Please change used arguments signature to follow"
|
||||||
|
" new definition. Will be removed 1.1.2026."
|
||||||
|
),
|
||||||
|
DeprecationWarning,
|
||||||
|
stacklevel=2,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
# Find out which arguments were passed
|
||||||
|
if args:
|
||||||
|
representation = args[0]
|
||||||
|
else:
|
||||||
|
representation = kwargs.get("representation")
|
||||||
|
|
||||||
|
if len(args) > 1:
|
||||||
|
roots = args[1]
|
||||||
|
else:
|
||||||
|
roots = kwargs.get("root")
|
||||||
|
|
||||||
|
if roots is not None:
|
||||||
|
return get_representation_path_with_roots(
|
||||||
|
representation, roots
|
||||||
|
)
|
||||||
|
|
||||||
|
project_name = (
|
||||||
|
representation["context"].get("project", {}).get("name")
|
||||||
|
)
|
||||||
|
if project_name is None:
|
||||||
|
project_name = get_current_project_name()
|
||||||
|
|
||||||
|
return func(project_name, representation)
|
||||||
|
|
||||||
|
return inner
|
||||||
|
|
||||||
|
|
||||||
|
@overload
|
||||||
|
def get_representation_path(
|
||||||
|
representation: dict[str, Any],
|
||||||
|
root: Optional[dict[str, Any]] = None,
|
||||||
|
) -> TemplateResult:
|
||||||
|
"""DEPRECATED Get filled representation path.
|
||||||
|
|
||||||
|
Use 'get_representation_path' using the new function signature.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
representation (dict[str, Any]): Representation entity.
|
||||||
|
root (Optional[dict[str, Any]): Roots to fill the path.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
TemplateResult: Resolved path to representation.
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
InvalidRepresentationContext: When representation data are probably
|
||||||
|
invalid or not available.
|
||||||
|
|
||||||
|
"""
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
@overload
|
||||||
|
def get_representation_path(
|
||||||
|
project_name: str,
|
||||||
|
repre_entity: dict[str, Any],
|
||||||
|
*,
|
||||||
|
anatomy: Optional[Anatomy] = None,
|
||||||
|
project_entity: Optional[dict[str, Any]] = None,
|
||||||
|
) -> TemplateResult:
|
||||||
|
"""Get filled representation path.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
project_name (str): Project name.
|
||||||
|
repre_entity (dict[str, Any]): Representation entity.
|
||||||
|
anatomy (Optional[Anatomy]): Project anatomy.
|
||||||
|
project_entity (Optional[dict[str, Any]): Project entity. Is used to
|
||||||
|
initialize Anatomy and is not needed if 'anatomy' is passed in.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
TemplateResult: Resolved path to representation.
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
InvalidRepresentationContext: When representation data are probably
|
||||||
|
invalid or not available.
|
||||||
|
|
||||||
|
"""
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
@_backwards_compatibility_repre_path
|
||||||
|
def get_representation_path(
|
||||||
|
project_name: str,
|
||||||
|
repre_entity: dict[str, Any],
|
||||||
|
*,
|
||||||
|
anatomy: Optional[Anatomy] = None,
|
||||||
|
project_entity: Optional[dict[str, Any]] = None,
|
||||||
|
) -> TemplateResult:
|
||||||
|
"""Get filled representation path.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
project_name (str): Project name.
|
||||||
|
repre_entity (dict[str, Any]): Representation entity.
|
||||||
|
anatomy (Optional[Anatomy]): Project anatomy.
|
||||||
|
project_entity (Optional[dict[str, Any]): Project entity. Is used to
|
||||||
|
initialize Anatomy and is not needed if 'anatomy' is passed in.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
TemplateResult: Resolved path to representation.
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
InvalidRepresentationContext: When representation data are probably
|
||||||
|
invalid or not available.
|
||||||
|
|
||||||
|
"""
|
||||||
|
if anatomy is None:
|
||||||
|
anatomy = Anatomy(project_name, project_entity=project_entity)
|
||||||
|
|
||||||
|
try:
|
||||||
|
template = repre_entity["attrib"]["template"]
|
||||||
|
|
||||||
|
except KeyError as exc:
|
||||||
|
raise InvalidRepresentationContext(
|
||||||
|
"Failed to receive template from representation entity."
|
||||||
|
) from exc
|
||||||
|
|
||||||
|
try:
|
||||||
|
context = copy.deepcopy(repre_entity["context"])
|
||||||
|
_fix_representation_context_compatibility(context)
|
||||||
|
context["root"] = anatomy.roots
|
||||||
|
|
||||||
|
path = StringTemplate.format_strict_template(template, context)
|
||||||
|
|
||||||
|
except TemplateUnsolved as exc:
|
||||||
|
raise InvalidRepresentationContext(
|
||||||
|
"Failed to resolve representation template with available data."
|
||||||
|
) from exc
|
||||||
|
|
||||||
|
return path.normalized()
|
||||||
|
|
||||||
|
|
||||||
def get_representation_path_by_names(
|
def get_representation_path_by_names(
|
||||||
project_name: str,
|
project_name: str,
|
||||||
folder_path: str,
|
folder_path: str,
|
||||||
product_name: str,
|
product_name: str,
|
||||||
version_name: str,
|
version_name: Union[int, str],
|
||||||
representation_name: str,
|
representation_name: str,
|
||||||
anatomy: Optional[Anatomy] = None) -> Optional[str]:
|
anatomy: Optional[Anatomy] = None
|
||||||
|
) -> Optional[TemplateResult]:
|
||||||
"""Get (latest) filepath for representation for folder and product.
|
"""Get (latest) filepath for representation for folder and product.
|
||||||
|
|
||||||
See `get_representation_by_names` for more details.
|
See `get_representation_by_names` for more details.
|
||||||
|
|
@ -820,14 +932,13 @@ def get_representation_path_by_names(
|
||||||
representation_name
|
representation_name
|
||||||
)
|
)
|
||||||
if not representation:
|
if not representation:
|
||||||
return
|
return None
|
||||||
|
|
||||||
if not anatomy:
|
return get_representation_path(
|
||||||
anatomy = Anatomy(project_name)
|
project_name,
|
||||||
|
representation,
|
||||||
if representation:
|
anatomy=anatomy,
|
||||||
path = get_representation_path_with_anatomy(representation, anatomy)
|
)
|
||||||
return str(path).replace("\\", "/")
|
|
||||||
|
|
||||||
|
|
||||||
def get_representation_by_names(
|
def get_representation_by_names(
|
||||||
|
|
@ -837,7 +948,7 @@ def get_representation_by_names(
|
||||||
version_name: Union[int, str],
|
version_name: Union[int, str],
|
||||||
representation_name: str,
|
representation_name: str,
|
||||||
) -> Optional[dict]:
|
) -> Optional[dict]:
|
||||||
"""Get representation entity for asset and subset.
|
"""Get representation entity for folder and product.
|
||||||
|
|
||||||
If version_name is "hero" then return the hero version
|
If version_name is "hero" then return the hero version
|
||||||
If version_name is "latest" then return the latest version
|
If version_name is "latest" then return the latest version
|
||||||
|
|
@ -852,10 +963,10 @@ def get_representation_by_names(
|
||||||
folder_entity = ayon_api.get_folder_by_path(
|
folder_entity = ayon_api.get_folder_by_path(
|
||||||
project_name, folder_path, fields=["id"])
|
project_name, folder_path, fields=["id"])
|
||||||
if not folder_entity:
|
if not folder_entity:
|
||||||
return
|
return None
|
||||||
|
|
||||||
if isinstance(product_name, dict) and "name" in product_name:
|
if isinstance(product_name, dict) and "name" in product_name:
|
||||||
# Allow explicitly passing subset document
|
# Allow explicitly passing product entity document
|
||||||
product_entity = product_name
|
product_entity = product_name
|
||||||
else:
|
else:
|
||||||
product_entity = ayon_api.get_product_by_name(
|
product_entity = ayon_api.get_product_by_name(
|
||||||
|
|
@ -864,7 +975,7 @@ def get_representation_by_names(
|
||||||
folder_id=folder_entity["id"],
|
folder_id=folder_entity["id"],
|
||||||
fields=["id"])
|
fields=["id"])
|
||||||
if not product_entity:
|
if not product_entity:
|
||||||
return
|
return None
|
||||||
|
|
||||||
if version_name == "hero":
|
if version_name == "hero":
|
||||||
version_entity = ayon_api.get_hero_version_by_product_id(
|
version_entity = ayon_api.get_hero_version_by_product_id(
|
||||||
|
|
@ -876,7 +987,7 @@ def get_representation_by_names(
|
||||||
version_entity = ayon_api.get_version_by_name(
|
version_entity = ayon_api.get_version_by_name(
|
||||||
project_name, version_name, product_id=product_entity["id"])
|
project_name, version_name, product_id=product_entity["id"])
|
||||||
if not version_entity:
|
if not version_entity:
|
||||||
return
|
return None
|
||||||
|
|
||||||
return ayon_api.get_representation_by_name(
|
return ayon_api.get_representation_by_name(
|
||||||
project_name, representation_name, version_id=version_entity["id"])
|
project_name, representation_name, version_id=version_entity["id"])
|
||||||
|
|
|
||||||
|
|
@ -1,6 +1,9 @@
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
import os
|
import os
|
||||||
import inspect
|
import inspect
|
||||||
import traceback
|
import traceback
|
||||||
|
from typing import Optional
|
||||||
|
|
||||||
from ayon_core.lib import Logger
|
from ayon_core.lib import Logger
|
||||||
from ayon_core.lib.python_module_tools import (
|
from ayon_core.lib.python_module_tools import (
|
||||||
|
|
@ -96,6 +99,77 @@ class DiscoverResult:
|
||||||
log.info(report)
|
log.info(report)
|
||||||
|
|
||||||
|
|
||||||
|
def discover_plugins(
|
||||||
|
base_class: type,
|
||||||
|
paths: Optional[list[str]] = None,
|
||||||
|
classes: Optional[list[type]] = None,
|
||||||
|
ignored_classes: Optional[list[type]] = None,
|
||||||
|
allow_duplicates: bool = True,
|
||||||
|
):
|
||||||
|
"""Find and return subclasses of `superclass`
|
||||||
|
|
||||||
|
Args:
|
||||||
|
base_class (type): Class which determines discovered subclasses.
|
||||||
|
paths (Optional[list[str]]): List of paths to look for plug-ins.
|
||||||
|
classes (Optional[list[str]]): List of classes to filter.
|
||||||
|
ignored_classes (list[type]): List of classes that won't be added to
|
||||||
|
the output plugins.
|
||||||
|
allow_duplicates (bool): Validate class name duplications.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
DiscoverResult: Object holding successfully
|
||||||
|
discovered plugins, ignored plugins, plugins with missing
|
||||||
|
abstract implementation and duplicated plugin.
|
||||||
|
|
||||||
|
"""
|
||||||
|
ignored_classes = ignored_classes or []
|
||||||
|
paths = paths or []
|
||||||
|
classes = classes or []
|
||||||
|
|
||||||
|
result = DiscoverResult(base_class)
|
||||||
|
|
||||||
|
all_plugins = list(classes)
|
||||||
|
|
||||||
|
for path in paths:
|
||||||
|
modules, crashed = modules_from_path(path)
|
||||||
|
for (filepath, exc_info) in crashed:
|
||||||
|
result.crashed_file_paths[filepath] = exc_info
|
||||||
|
|
||||||
|
for item in modules:
|
||||||
|
filepath, module = item
|
||||||
|
result.add_module(module)
|
||||||
|
for cls in classes_from_module(base_class, module):
|
||||||
|
if cls is base_class:
|
||||||
|
continue
|
||||||
|
# Class has defined 'skip_discovery = True'
|
||||||
|
skip_discovery = cls.__dict__.get("skip_discovery")
|
||||||
|
if skip_discovery is True:
|
||||||
|
continue
|
||||||
|
all_plugins.append(cls)
|
||||||
|
|
||||||
|
if base_class not in ignored_classes:
|
||||||
|
ignored_classes.append(base_class)
|
||||||
|
|
||||||
|
plugin_names = set()
|
||||||
|
for cls in all_plugins:
|
||||||
|
if cls in ignored_classes:
|
||||||
|
result.ignored_plugins.add(cls)
|
||||||
|
continue
|
||||||
|
|
||||||
|
if inspect.isabstract(cls):
|
||||||
|
result.abstract_plugins.append(cls)
|
||||||
|
continue
|
||||||
|
|
||||||
|
if not allow_duplicates:
|
||||||
|
class_name = cls.__name__
|
||||||
|
if class_name in plugin_names:
|
||||||
|
result.duplicated_plugins.append(cls)
|
||||||
|
continue
|
||||||
|
plugin_names.add(class_name)
|
||||||
|
result.plugins.append(cls)
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
class PluginDiscoverContext(object):
|
class PluginDiscoverContext(object):
|
||||||
"""Store and discover registered types nad registered paths to types.
|
"""Store and discover registered types nad registered paths to types.
|
||||||
|
|
||||||
|
|
@ -141,58 +215,17 @@ class PluginDiscoverContext(object):
|
||||||
Union[DiscoverResult, list[Any]]: Object holding successfully
|
Union[DiscoverResult, list[Any]]: Object holding successfully
|
||||||
discovered plugins, ignored plugins, plugins with missing
|
discovered plugins, ignored plugins, plugins with missing
|
||||||
abstract implementation and duplicated plugin.
|
abstract implementation and duplicated plugin.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
if not ignore_classes:
|
|
||||||
ignore_classes = []
|
|
||||||
|
|
||||||
result = DiscoverResult(superclass)
|
|
||||||
plugin_names = set()
|
|
||||||
registered_classes = self._registered_plugins.get(superclass) or []
|
registered_classes = self._registered_plugins.get(superclass) or []
|
||||||
registered_paths = self._registered_plugin_paths.get(superclass) or []
|
registered_paths = self._registered_plugin_paths.get(superclass) or []
|
||||||
for cls in registered_classes:
|
result = discover_plugins(
|
||||||
if cls is superclass or cls in ignore_classes:
|
superclass,
|
||||||
result.ignored_plugins.add(cls)
|
paths=registered_paths,
|
||||||
continue
|
classes=registered_classes,
|
||||||
|
ignored_classes=ignore_classes,
|
||||||
if inspect.isabstract(cls):
|
allow_duplicates=allow_duplicates,
|
||||||
result.abstract_plugins.append(cls)
|
)
|
||||||
continue
|
|
||||||
|
|
||||||
class_name = cls.__name__
|
|
||||||
if class_name in plugin_names:
|
|
||||||
result.duplicated_plugins.append(cls)
|
|
||||||
continue
|
|
||||||
plugin_names.add(class_name)
|
|
||||||
result.plugins.append(cls)
|
|
||||||
|
|
||||||
# Include plug-ins from registered paths
|
|
||||||
for path in registered_paths:
|
|
||||||
modules, crashed = modules_from_path(path)
|
|
||||||
for item in crashed:
|
|
||||||
filepath, exc_info = item
|
|
||||||
result.crashed_file_paths[filepath] = exc_info
|
|
||||||
|
|
||||||
for item in modules:
|
|
||||||
filepath, module = item
|
|
||||||
result.add_module(module)
|
|
||||||
for cls in classes_from_module(superclass, module):
|
|
||||||
if cls is superclass or cls in ignore_classes:
|
|
||||||
result.ignored_plugins.add(cls)
|
|
||||||
continue
|
|
||||||
|
|
||||||
if inspect.isabstract(cls):
|
|
||||||
result.abstract_plugins.append(cls)
|
|
||||||
continue
|
|
||||||
|
|
||||||
if not allow_duplicates:
|
|
||||||
class_name = cls.__name__
|
|
||||||
if class_name in plugin_names:
|
|
||||||
result.duplicated_plugins.append(cls)
|
|
||||||
continue
|
|
||||||
plugin_names.add(class_name)
|
|
||||||
|
|
||||||
result.plugins.append(cls)
|
|
||||||
|
|
||||||
# Store in memory last result to keep in memory loaded modules
|
# Store in memory last result to keep in memory loaded modules
|
||||||
self._last_discovered_results[superclass] = result
|
self._last_discovered_results[superclass] = result
|
||||||
|
|
|
||||||
|
|
@ -29,6 +29,7 @@ from .lib import (
|
||||||
get_publish_template_name,
|
get_publish_template_name,
|
||||||
|
|
||||||
publish_plugins_discover,
|
publish_plugins_discover,
|
||||||
|
filter_crashed_publish_paths,
|
||||||
load_help_content_from_plugin,
|
load_help_content_from_plugin,
|
||||||
load_help_content_from_filepath,
|
load_help_content_from_filepath,
|
||||||
|
|
||||||
|
|
@ -87,6 +88,7 @@ __all__ = (
|
||||||
"get_publish_template_name",
|
"get_publish_template_name",
|
||||||
|
|
||||||
"publish_plugins_discover",
|
"publish_plugins_discover",
|
||||||
|
"filter_crashed_publish_paths",
|
||||||
"load_help_content_from_plugin",
|
"load_help_content_from_plugin",
|
||||||
"load_help_content_from_filepath",
|
"load_help_content_from_filepath",
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -1,6 +1,8 @@
|
||||||
"""Library functions for publishing."""
|
"""Library functions for publishing."""
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
import os
|
import os
|
||||||
|
import platform
|
||||||
|
import re
|
||||||
import sys
|
import sys
|
||||||
import inspect
|
import inspect
|
||||||
import copy
|
import copy
|
||||||
|
|
@ -8,19 +10,19 @@ import warnings
|
||||||
import hashlib
|
import hashlib
|
||||||
import xml.etree.ElementTree
|
import xml.etree.ElementTree
|
||||||
from typing import TYPE_CHECKING, Optional, Union, List, Any
|
from typing import TYPE_CHECKING, Optional, Union, List, Any
|
||||||
import clique
|
|
||||||
import speedcopy
|
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
import pyblish.util
|
|
||||||
import pyblish.plugin
|
|
||||||
import pyblish.api
|
|
||||||
|
|
||||||
from ayon_api import (
|
from ayon_api import (
|
||||||
get_server_api_connection,
|
get_server_api_connection,
|
||||||
get_representations,
|
get_representations,
|
||||||
get_last_version_by_product_name
|
get_last_version_by_product_name
|
||||||
)
|
)
|
||||||
|
import clique
|
||||||
|
import pyblish.util
|
||||||
|
import pyblish.plugin
|
||||||
|
import pyblish.api
|
||||||
|
import speedcopy
|
||||||
|
|
||||||
from ayon_core.lib import (
|
from ayon_core.lib import (
|
||||||
import_filepath,
|
import_filepath,
|
||||||
Logger,
|
Logger,
|
||||||
|
|
@ -122,7 +124,8 @@ def get_publish_template_name(
|
||||||
task_type,
|
task_type,
|
||||||
project_settings=None,
|
project_settings=None,
|
||||||
hero=False,
|
hero=False,
|
||||||
logger=None
|
product_base_type: Optional[str] = None,
|
||||||
|
logger=None,
|
||||||
):
|
):
|
||||||
"""Get template name which should be used for passed context.
|
"""Get template name which should be used for passed context.
|
||||||
|
|
||||||
|
|
@ -140,17 +143,29 @@ def get_publish_template_name(
|
||||||
task_type (str): Task type on which is instance working.
|
task_type (str): Task type on which is instance working.
|
||||||
project_settings (Dict[str, Any]): Prepared project settings.
|
project_settings (Dict[str, Any]): Prepared project settings.
|
||||||
hero (bool): Template is for hero version publishing.
|
hero (bool): Template is for hero version publishing.
|
||||||
|
product_base_type (Optional[str]): Product type for which should
|
||||||
|
be found template.
|
||||||
logger (logging.Logger): Custom logger used for 'filter_profiles'
|
logger (logging.Logger): Custom logger used for 'filter_profiles'
|
||||||
function.
|
function.
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
str: Template name which should be used for integration.
|
str: Template name which should be used for integration.
|
||||||
"""
|
"""
|
||||||
|
if not product_base_type:
|
||||||
|
msg = (
|
||||||
|
"Argument 'product_base_type' is not provided to"
|
||||||
|
" 'get_publish_template_name' function. This argument"
|
||||||
|
" will be required in future versions."
|
||||||
|
)
|
||||||
|
warnings.warn(msg, DeprecationWarning)
|
||||||
|
if logger:
|
||||||
|
logger.warning(msg)
|
||||||
|
|
||||||
template = None
|
template = None
|
||||||
filter_criteria = {
|
filter_criteria = {
|
||||||
"hosts": host_name,
|
"hosts": host_name,
|
||||||
"product_types": product_type,
|
"product_types": product_type,
|
||||||
|
"product_base_types": product_base_type,
|
||||||
"task_names": task_name,
|
"task_names": task_name,
|
||||||
"task_types": task_type,
|
"task_types": task_type,
|
||||||
}
|
}
|
||||||
|
|
@ -179,7 +194,9 @@ class HelpContent:
|
||||||
self.detail = detail
|
self.detail = detail
|
||||||
|
|
||||||
|
|
||||||
def load_help_content_from_filepath(filepath):
|
def load_help_content_from_filepath(
|
||||||
|
filepath: str
|
||||||
|
) -> dict[str, dict[str, HelpContent]]:
|
||||||
"""Load help content from xml file.
|
"""Load help content from xml file.
|
||||||
Xml file may contain errors and warnings.
|
Xml file may contain errors and warnings.
|
||||||
"""
|
"""
|
||||||
|
|
@ -214,18 +231,84 @@ def load_help_content_from_filepath(filepath):
|
||||||
return output
|
return output
|
||||||
|
|
||||||
|
|
||||||
def load_help_content_from_plugin(plugin):
|
def load_help_content_from_plugin(
|
||||||
|
plugin: pyblish.api.Plugin,
|
||||||
|
help_filename: Optional[str] = None,
|
||||||
|
) -> dict[str, dict[str, HelpContent]]:
|
||||||
cls = plugin
|
cls = plugin
|
||||||
if not inspect.isclass(plugin):
|
if not inspect.isclass(plugin):
|
||||||
cls = plugin.__class__
|
cls = plugin.__class__
|
||||||
|
|
||||||
plugin_filepath = inspect.getfile(cls)
|
plugin_filepath = inspect.getfile(cls)
|
||||||
plugin_dir = os.path.dirname(plugin_filepath)
|
plugin_dir = os.path.dirname(plugin_filepath)
|
||||||
|
if help_filename is None:
|
||||||
basename = os.path.splitext(os.path.basename(plugin_filepath))[0]
|
basename = os.path.splitext(os.path.basename(plugin_filepath))[0]
|
||||||
filename = basename + ".xml"
|
help_filename = basename + ".xml"
|
||||||
filepath = os.path.join(plugin_dir, "help", filename)
|
filepath = os.path.join(plugin_dir, "help", help_filename)
|
||||||
return load_help_content_from_filepath(filepath)
|
return load_help_content_from_filepath(filepath)
|
||||||
|
|
||||||
|
|
||||||
|
def filter_crashed_publish_paths(
|
||||||
|
project_name: str,
|
||||||
|
crashed_paths: set[str],
|
||||||
|
*,
|
||||||
|
project_settings: Optional[dict[str, Any]] = None,
|
||||||
|
) -> set[str]:
|
||||||
|
"""Filter crashed paths happened during plugins discovery.
|
||||||
|
|
||||||
|
Check if plugins discovery has enabled strict mode and filter crashed
|
||||||
|
paths that happened during discover based on regexes from settings.
|
||||||
|
|
||||||
|
Publishing should not start if any paths are returned.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
project_name (str): Project name in which context plugins discovery
|
||||||
|
happened.
|
||||||
|
crashed_paths (set[str]): Crashed paths from plugins discovery report.
|
||||||
|
project_settings (Optional[dict[str, Any]]): Project settings.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
set[str]: Filtered crashed paths.
|
||||||
|
|
||||||
|
"""
|
||||||
|
filtered_paths = set()
|
||||||
|
# Nothing crashed all good...
|
||||||
|
if not crashed_paths:
|
||||||
|
return filtered_paths
|
||||||
|
|
||||||
|
if project_settings is None:
|
||||||
|
project_settings = get_project_settings(project_name)
|
||||||
|
|
||||||
|
discover_validation = (
|
||||||
|
project_settings["core"]["tools"]["publish"]["discover_validation"]
|
||||||
|
)
|
||||||
|
# Strict mode is not enabled.
|
||||||
|
if not discover_validation["enabled"]:
|
||||||
|
return filtered_paths
|
||||||
|
|
||||||
|
regexes = [
|
||||||
|
re.compile(value, re.IGNORECASE)
|
||||||
|
for value in discover_validation["ignore_paths"]
|
||||||
|
if value
|
||||||
|
]
|
||||||
|
is_windows = platform.system().lower() == "windows"
|
||||||
|
# Fitler path with regexes from settings
|
||||||
|
for path in crashed_paths:
|
||||||
|
# Normalize paths to use forward slashes on windows
|
||||||
|
if is_windows:
|
||||||
|
path = path.replace("\\", "/")
|
||||||
|
is_invalid = True
|
||||||
|
for regex in regexes:
|
||||||
|
if regex.match(path):
|
||||||
|
is_invalid = False
|
||||||
|
break
|
||||||
|
|
||||||
|
if is_invalid:
|
||||||
|
filtered_paths.add(path)
|
||||||
|
|
||||||
|
return filtered_paths
|
||||||
|
|
||||||
|
|
||||||
def publish_plugins_discover(
|
def publish_plugins_discover(
|
||||||
paths: Optional[list[str]] = None) -> DiscoverResult:
|
paths: Optional[list[str]] = None) -> DiscoverResult:
|
||||||
"""Find and return available pyblish plug-ins.
|
"""Find and return available pyblish plug-ins.
|
||||||
|
|
@ -812,7 +895,22 @@ def replace_with_published_scene_path(instance, replace_in_path=True):
|
||||||
template_data["comment"] = None
|
template_data["comment"] = None
|
||||||
|
|
||||||
anatomy = instance.context.data["anatomy"]
|
anatomy = instance.context.data["anatomy"]
|
||||||
template = anatomy.get_template_item("publish", "default", "path")
|
project_name = anatomy.project_name
|
||||||
|
task_name = task_type = None
|
||||||
|
task_entity = instance.data.get("taskEntity")
|
||||||
|
if task_entity:
|
||||||
|
task_name = task_entity["name"]
|
||||||
|
task_type = task_entity["taskType"]
|
||||||
|
project_settings = instance.context.data["project_settings"]
|
||||||
|
template_name = get_publish_template_name(
|
||||||
|
project_name=project_name,
|
||||||
|
host_name=instance.context.data["hostName"],
|
||||||
|
product_type=workfile_instance.data["productType"],
|
||||||
|
task_name=task_name,
|
||||||
|
task_type=task_type,
|
||||||
|
project_settings=project_settings,
|
||||||
|
)
|
||||||
|
template = anatomy.get_template_item("publish", template_name, "path")
|
||||||
template_filled = template.format_strict(template_data)
|
template_filled = template.format_strict(template_data)
|
||||||
file_path = os.path.normpath(template_filled)
|
file_path = os.path.normpath(template_filled)
|
||||||
|
|
||||||
|
|
@ -983,7 +1081,26 @@ def get_instance_expected_output_path(
|
||||||
"version": version
|
"version": version
|
||||||
})
|
})
|
||||||
|
|
||||||
path_template_obj = anatomy.get_template_item("publish", "default")["path"]
|
# Get instance publish template name
|
||||||
|
task_name = task_type = None
|
||||||
|
task_entity = instance.data.get("taskEntity")
|
||||||
|
if task_entity:
|
||||||
|
task_name = task_entity["name"]
|
||||||
|
task_type = task_entity["taskType"]
|
||||||
|
|
||||||
|
template_name = get_publish_template_name(
|
||||||
|
project_name=instance.context.data["projectName"],
|
||||||
|
host_name=instance.context.data["hostName"],
|
||||||
|
product_type=instance.data["productType"],
|
||||||
|
task_name=task_name,
|
||||||
|
task_type=task_type,
|
||||||
|
project_settings=instance.context.data["project_settings"],
|
||||||
|
)
|
||||||
|
|
||||||
|
path_template_obj = anatomy.get_template_item(
|
||||||
|
"publish",
|
||||||
|
template_name
|
||||||
|
)["path"]
|
||||||
template_filled = path_template_obj.format_strict(template_data)
|
template_filled = path_template_obj.format_strict(template_data)
|
||||||
return os.path.normpath(template_filled)
|
return os.path.normpath(template_filled)
|
||||||
|
|
||||||
|
|
@ -1045,14 +1162,16 @@ def main_cli_publish(
|
||||||
except ValueError:
|
except ValueError:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
context = get_global_context()
|
||||||
|
project_settings = get_project_settings(context["project_name"])
|
||||||
|
|
||||||
install_ayon_plugins()
|
install_ayon_plugins()
|
||||||
|
|
||||||
if addons_manager is None:
|
if addons_manager is None:
|
||||||
addons_manager = AddonsManager()
|
addons_manager = AddonsManager(project_settings)
|
||||||
|
|
||||||
applications_addon = addons_manager.get_enabled_addon("applications")
|
applications_addon = addons_manager.get_enabled_addon("applications")
|
||||||
if applications_addon is not None:
|
if applications_addon is not None:
|
||||||
context = get_global_context()
|
|
||||||
env = applications_addon.get_farm_publish_environment_variables(
|
env = applications_addon.get_farm_publish_environment_variables(
|
||||||
context["project_name"],
|
context["project_name"],
|
||||||
context["folder_path"],
|
context["folder_path"],
|
||||||
|
|
@ -1075,17 +1194,33 @@ def main_cli_publish(
|
||||||
log.info("Running publish ...")
|
log.info("Running publish ...")
|
||||||
|
|
||||||
discover_result = publish_plugins_discover()
|
discover_result = publish_plugins_discover()
|
||||||
publish_plugins = discover_result.plugins
|
|
||||||
print(discover_result.get_report(only_errors=False))
|
print(discover_result.get_report(only_errors=False))
|
||||||
|
|
||||||
|
filtered_crashed_paths = filter_crashed_publish_paths(
|
||||||
|
context["project_name"],
|
||||||
|
set(discover_result.crashed_file_paths),
|
||||||
|
project_settings=project_settings,
|
||||||
|
)
|
||||||
|
if filtered_crashed_paths:
|
||||||
|
joined_paths = "\n".join([
|
||||||
|
f"- {path}"
|
||||||
|
for path in filtered_crashed_paths
|
||||||
|
])
|
||||||
|
log.error(
|
||||||
|
"Plugin discovery strict mode is enabled."
|
||||||
|
" Crashed plugin paths that prevent from publishing:"
|
||||||
|
f"\n{joined_paths}"
|
||||||
|
)
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
publish_plugins = discover_result.plugins
|
||||||
|
|
||||||
# Error exit as soon as any error occurs.
|
# Error exit as soon as any error occurs.
|
||||||
error_format = ("Failed {plugin.__name__}: "
|
error_format = "Failed {plugin.__name__}: {error} -- {error.traceback}"
|
||||||
"{error} -- {error.traceback}")
|
|
||||||
|
|
||||||
for result in pyblish.util.publish_iter(plugins=publish_plugins):
|
for result in pyblish.util.publish_iter(plugins=publish_plugins):
|
||||||
if result["error"]:
|
if result["error"]:
|
||||||
log.error(error_format.format(**result))
|
log.error(error_format.format(**result))
|
||||||
# uninstall()
|
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
log.info("Publish finished.")
|
log.info("Publish finished.")
|
||||||
|
|
|
||||||
|
|
@ -1,7 +1,7 @@
|
||||||
import inspect
|
import inspect
|
||||||
from abc import ABCMeta
|
from abc import ABCMeta
|
||||||
import typing
|
import typing
|
||||||
from typing import Optional
|
from typing import Optional, Any
|
||||||
|
|
||||||
import pyblish.api
|
import pyblish.api
|
||||||
import pyblish.logic
|
import pyblish.logic
|
||||||
|
|
@ -82,22 +82,51 @@ class PublishValidationError(PublishError):
|
||||||
|
|
||||||
|
|
||||||
class PublishXmlValidationError(PublishValidationError):
|
class PublishXmlValidationError(PublishValidationError):
|
||||||
|
"""Raise an error from a dedicated xml file.
|
||||||
|
|
||||||
|
Can be useful to have one xml file with different possible messages that
|
||||||
|
helps to avoid flood code with dedicated artist messages.
|
||||||
|
|
||||||
|
XML files should live relative to the plugin file location:
|
||||||
|
'{plugin dir}/help/some_plugin.xml'.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
plugin (pyblish.api.Plugin): Plugin that raised an error. Is used
|
||||||
|
to get path to xml file.
|
||||||
|
message (str): Exception message, can be technical, is used for
|
||||||
|
console output.
|
||||||
|
key (Optional[str]): XML file can contain multiple error messages, key
|
||||||
|
is used to get one of them. By default is used 'main'.
|
||||||
|
formatting_data (Optional[dict[str, Any]): Error message can have
|
||||||
|
variables to fill.
|
||||||
|
help_filename (Optional[str]): Name of xml file with messages. By
|
||||||
|
default, is used filename where plugin lives with .xml extension.
|
||||||
|
|
||||||
|
"""
|
||||||
def __init__(
|
def __init__(
|
||||||
self, plugin, message, key=None, formatting_data=None
|
self,
|
||||||
):
|
plugin: pyblish.api.Plugin,
|
||||||
|
message: str,
|
||||||
|
key: Optional[str] = None,
|
||||||
|
formatting_data: Optional[dict[str, Any]] = None,
|
||||||
|
help_filename: Optional[str] = None,
|
||||||
|
) -> None:
|
||||||
if key is None:
|
if key is None:
|
||||||
key = "main"
|
key = "main"
|
||||||
|
|
||||||
if not formatting_data:
|
if not formatting_data:
|
||||||
formatting_data = {}
|
formatting_data = {}
|
||||||
result = load_help_content_from_plugin(plugin)
|
result = load_help_content_from_plugin(plugin, help_filename)
|
||||||
content_obj = result["errors"][key]
|
content_obj = result["errors"][key]
|
||||||
description = content_obj.description.format(**formatting_data)
|
description = content_obj.description.format(**formatting_data)
|
||||||
detail = content_obj.detail
|
detail = content_obj.detail
|
||||||
if detail:
|
if detail:
|
||||||
detail = detail.format(**formatting_data)
|
detail = detail.format(**formatting_data)
|
||||||
super(PublishXmlValidationError, self).__init__(
|
super().__init__(
|
||||||
message, content_obj.title, description, detail
|
message,
|
||||||
|
content_obj.title,
|
||||||
|
description,
|
||||||
|
detail
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -1,27 +1,50 @@
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from typing import Optional, Any
|
||||||
|
|
||||||
import ayon_api
|
import ayon_api
|
||||||
|
|
||||||
from ayon_core.settings import get_studio_settings
|
from ayon_core.settings import get_studio_settings
|
||||||
from ayon_core.lib.local_settings import get_ayon_username
|
from ayon_core.lib import DefaultKeysDict
|
||||||
|
from ayon_core.lib.local_settings import get_ayon_user_entity
|
||||||
|
|
||||||
|
|
||||||
def get_general_template_data(settings=None, username=None):
|
def get_general_template_data(
|
||||||
|
settings: Optional[dict[str, Any]] = None,
|
||||||
|
username: Optional[str] = None,
|
||||||
|
user_entity: Optional[dict[str, Any]] = None,
|
||||||
|
):
|
||||||
"""General template data based on system settings or machine.
|
"""General template data based on system settings or machine.
|
||||||
|
|
||||||
Output contains formatting keys:
|
Output contains formatting keys:
|
||||||
- 'studio[name]' - Studio name filled from system settings
|
- 'studio[name]' - Studio name filled from system settings
|
||||||
- 'studio[code]' - Studio code filled from system settings
|
- 'studio[code]' - Studio code filled from system settings
|
||||||
- 'user' - User's name using 'get_ayon_username'
|
- 'user[name]' - User's name
|
||||||
|
- 'user[attrib][...]' - User's attributes
|
||||||
|
- 'user[data][...]' - User's data
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
settings (Dict[str, Any]): Studio or project settings.
|
settings (Dict[str, Any]): Studio or project settings.
|
||||||
username (Optional[str]): AYON Username.
|
username (Optional[str]): AYON Username.
|
||||||
"""
|
user_entity (Optional[dict[str, Any]]): User entity.
|
||||||
|
|
||||||
|
"""
|
||||||
if not settings:
|
if not settings:
|
||||||
settings = get_studio_settings()
|
settings = get_studio_settings()
|
||||||
|
|
||||||
if username is None:
|
if user_entity is None:
|
||||||
username = get_ayon_username()
|
user_entity = get_ayon_user_entity(username)
|
||||||
|
|
||||||
|
# Use dictionary with default value for backwards compatibility
|
||||||
|
# - we did support '{user}' now it should be '{user[name]}'
|
||||||
|
user_data = DefaultKeysDict(
|
||||||
|
"name",
|
||||||
|
{
|
||||||
|
"name": user_entity["name"],
|
||||||
|
"attrib": user_entity["attrib"],
|
||||||
|
"data": user_entity["data"],
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
core_settings = settings["core"]
|
core_settings = settings["core"]
|
||||||
return {
|
return {
|
||||||
|
|
@ -29,7 +52,7 @@ def get_general_template_data(settings=None, username=None):
|
||||||
"name": core_settings["studio_name"],
|
"name": core_settings["studio_name"],
|
||||||
"code": core_settings["studio_code"]
|
"code": core_settings["studio_code"]
|
||||||
},
|
},
|
||||||
"user": username
|
"user": user_data,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -73,7 +96,6 @@ def get_folder_template_data(folder_entity, project_name):
|
||||||
|
|
||||||
Output dictionary contains keys:
|
Output dictionary contains keys:
|
||||||
- 'folder' - dictionary with 'name' key filled with folder name
|
- 'folder' - dictionary with 'name' key filled with folder name
|
||||||
- 'asset' - folder name
|
|
||||||
- 'hierarchy' - parent folder names joined with '/'
|
- 'hierarchy' - parent folder names joined with '/'
|
||||||
- 'parent' - direct parent name, project name used if is under
|
- 'parent' - direct parent name, project name used if is under
|
||||||
project
|
project
|
||||||
|
|
@ -109,7 +131,6 @@ def get_folder_template_data(folder_entity, project_name):
|
||||||
"path": path,
|
"path": path,
|
||||||
"parents": parents,
|
"parents": parents,
|
||||||
},
|
},
|
||||||
"asset": folder_name,
|
|
||||||
"hierarchy": hierarchy,
|
"hierarchy": hierarchy,
|
||||||
"parent": parent_name
|
"parent": parent_name
|
||||||
}
|
}
|
||||||
|
|
@ -150,7 +171,8 @@ def get_template_data(
|
||||||
task_entity=None,
|
task_entity=None,
|
||||||
host_name=None,
|
host_name=None,
|
||||||
settings=None,
|
settings=None,
|
||||||
username=None
|
username=None,
|
||||||
|
user_entity=None,
|
||||||
):
|
):
|
||||||
"""Prepare data for templates filling from entered documents and info.
|
"""Prepare data for templates filling from entered documents and info.
|
||||||
|
|
||||||
|
|
@ -173,13 +195,18 @@ def get_template_data(
|
||||||
host_name (Optional[str]): Used to fill '{app}' key.
|
host_name (Optional[str]): Used to fill '{app}' key.
|
||||||
settings (Union[Dict, None]): Prepared studio or project settings.
|
settings (Union[Dict, None]): Prepared studio or project settings.
|
||||||
They're queried if not passed (may be slower).
|
They're queried if not passed (may be slower).
|
||||||
username (Optional[str]): AYON Username.
|
username (Optional[str]): DEPRECATED AYON Username.
|
||||||
|
user_entity (Optional[dict[str, Any]): AYON user entity.
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
Dict[str, Any]: Data prepared for filling workdir template.
|
Dict[str, Any]: Data prepared for filling workdir template.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
template_data = get_general_template_data(settings, username=username)
|
template_data = get_general_template_data(
|
||||||
|
settings,
|
||||||
|
username=username,
|
||||||
|
user_entity=user_entity,
|
||||||
|
)
|
||||||
template_data.update(get_project_template_data(project_entity))
|
template_data.update(get_project_template_data(project_entity))
|
||||||
if folder_entity:
|
if folder_entity:
|
||||||
template_data.update(get_folder_template_data(
|
template_data.update(get_folder_template_data(
|
||||||
|
|
|
||||||
|
|
@ -299,7 +299,6 @@ def add_ordered_sublayer(layer, contribution_path, layer_id, order=None,
|
||||||
sdf format args metadata if enabled)
|
sdf format args metadata if enabled)
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
# Add the order with the contribution path so that for future
|
# Add the order with the contribution path so that for future
|
||||||
# contributions we can again use it to magically fit into the
|
# contributions we can again use it to magically fit into the
|
||||||
# ordering. We put this in the path because sublayer paths do
|
# ordering. We put this in the path because sublayer paths do
|
||||||
|
|
@ -317,20 +316,25 @@ def add_ordered_sublayer(layer, contribution_path, layer_id, order=None,
|
||||||
# If the layer was already in the layers, then replace it
|
# If the layer was already in the layers, then replace it
|
||||||
for index, existing_path in enumerate(layer.subLayerPaths):
|
for index, existing_path in enumerate(layer.subLayerPaths):
|
||||||
args = get_sdf_format_args(existing_path)
|
args = get_sdf_format_args(existing_path)
|
||||||
existing_layer = args.get("layer_id")
|
existing_layer_id = args.get("layer_id")
|
||||||
if existing_layer == layer_id:
|
if existing_layer_id == layer_id:
|
||||||
|
existing_layer = layer.subLayerPaths[index]
|
||||||
|
existing_order = args.get("order")
|
||||||
|
existing_order = int(existing_order) if existing_order else None
|
||||||
|
if order is not None and order != existing_order:
|
||||||
|
# We need to move the layer, so we will remove this index
|
||||||
|
# and then re-insert it below at the right order
|
||||||
|
log.debug(f"Removing existing layer: {existing_layer}")
|
||||||
|
del layer.subLayerPaths[index]
|
||||||
|
break
|
||||||
|
|
||||||
# Put it in the same position where it was before when swapping
|
# Put it in the same position where it was before when swapping
|
||||||
# it with the original, also take over its order metadata
|
# it with the original, also take over its order metadata
|
||||||
order = args.get("order")
|
|
||||||
if order is not None:
|
|
||||||
order = int(order)
|
|
||||||
else:
|
|
||||||
order = None
|
|
||||||
contribution_path = _format_path(contribution_path,
|
contribution_path = _format_path(contribution_path,
|
||||||
order=order,
|
order=existing_order,
|
||||||
layer_id=layer_id)
|
layer_id=layer_id)
|
||||||
log.debug(
|
log.debug(
|
||||||
f"Replacing existing layer: {layer.subLayerPaths[index]} "
|
f"Replacing existing layer: {existing_layer} "
|
||||||
f"-> {contribution_path}"
|
f"-> {contribution_path}"
|
||||||
)
|
)
|
||||||
layer.subLayerPaths[index] = contribution_path
|
layer.subLayerPaths[index] = contribution_path
|
||||||
|
|
@ -684,3 +688,20 @@ def get_sdf_format_args(path):
|
||||||
"""Return SDF_FORMAT_ARGS parsed to `dict`"""
|
"""Return SDF_FORMAT_ARGS parsed to `dict`"""
|
||||||
_raw_path, data = Sdf.Layer.SplitIdentifier(path)
|
_raw_path, data = Sdf.Layer.SplitIdentifier(path)
|
||||||
return data
|
return data
|
||||||
|
|
||||||
|
|
||||||
|
def get_standard_default_prim_name(folder_path: str) -> str:
|
||||||
|
"""Return the AYON-specified default prim name for a folder path.
|
||||||
|
|
||||||
|
This is used e.g. for the default prim in AYON USD Contribution workflows.
|
||||||
|
"""
|
||||||
|
folder_name: str = folder_path.rsplit("/", 1)[-1]
|
||||||
|
|
||||||
|
# Prim names are not allowed to start with a digit in USD. Authoring them
|
||||||
|
# would mean generating essentially garbage data and may result in
|
||||||
|
# unexpected behavior in certain USD or DCC versions, like failure to
|
||||||
|
# refresh in usdview or crashes in Houdini 21.
|
||||||
|
if folder_name and folder_name[0].isdigit():
|
||||||
|
folder_name = f"_{folder_name}"
|
||||||
|
|
||||||
|
return folder_name
|
||||||
|
|
|
||||||
|
|
@ -1,16 +1,19 @@
|
||||||
|
from __future__ import annotations
|
||||||
|
from typing import Optional, Any
|
||||||
|
|
||||||
from ayon_core.lib.profiles_filtering import filter_profiles
|
from ayon_core.lib.profiles_filtering import filter_profiles
|
||||||
from ayon_core.settings import get_project_settings
|
from ayon_core.settings import get_project_settings
|
||||||
|
|
||||||
|
|
||||||
def get_versioning_start(
|
def get_versioning_start(
|
||||||
project_name,
|
project_name: str,
|
||||||
host_name,
|
host_name: str,
|
||||||
task_name=None,
|
task_name: Optional[str] = None,
|
||||||
task_type=None,
|
task_type: Optional[str] = None,
|
||||||
product_type=None,
|
product_type: Optional[str] = None,
|
||||||
product_name=None,
|
product_name: Optional[str] = None,
|
||||||
project_settings=None,
|
project_settings: Optional[dict[str, Any]] = None,
|
||||||
):
|
) -> int:
|
||||||
"""Get anatomy versioning start"""
|
"""Get anatomy versioning start"""
|
||||||
if not project_settings:
|
if not project_settings:
|
||||||
project_settings = get_project_settings(project_name)
|
project_settings = get_project_settings(project_name)
|
||||||
|
|
@ -22,14 +25,12 @@ def get_versioning_start(
|
||||||
if not profiles:
|
if not profiles:
|
||||||
return version_start
|
return version_start
|
||||||
|
|
||||||
# TODO use 'product_types' and 'product_name' instead of
|
|
||||||
# 'families' and 'subsets'
|
|
||||||
filtering_criteria = {
|
filtering_criteria = {
|
||||||
"host_names": host_name,
|
"host_names": host_name,
|
||||||
"families": product_type,
|
"product_types": product_type,
|
||||||
|
"product_names": product_name,
|
||||||
"task_names": task_name,
|
"task_names": task_name,
|
||||||
"task_types": task_type,
|
"task_types": task_type,
|
||||||
"subsets": product_name
|
|
||||||
}
|
}
|
||||||
profile = filter_profiles(profiles, filtering_criteria)
|
profile = filter_profiles(profiles, filtering_criteria)
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -300,7 +300,11 @@ class AbstractTemplateBuilder(ABC):
|
||||||
self._loaders_by_name = get_loaders_by_name()
|
self._loaders_by_name = get_loaders_by_name()
|
||||||
return self._loaders_by_name
|
return self._loaders_by_name
|
||||||
|
|
||||||
def get_linked_folder_entities(self, link_type: Optional[str]):
|
def get_linked_folder_entities(
|
||||||
|
self,
|
||||||
|
link_type: Optional[str],
|
||||||
|
folder_path_regex: Optional[str],
|
||||||
|
):
|
||||||
if not link_type:
|
if not link_type:
|
||||||
return []
|
return []
|
||||||
project_name = self.project_name
|
project_name = self.project_name
|
||||||
|
|
@ -317,7 +321,11 @@ class AbstractTemplateBuilder(ABC):
|
||||||
if link["entityType"] == "folder"
|
if link["entityType"] == "folder"
|
||||||
}
|
}
|
||||||
|
|
||||||
return list(get_folders(project_name, folder_ids=linked_folder_ids))
|
return list(get_folders(
|
||||||
|
project_name,
|
||||||
|
folder_path_regex=folder_path_regex,
|
||||||
|
folder_ids=linked_folder_ids,
|
||||||
|
))
|
||||||
|
|
||||||
def _collect_creators(self):
|
def _collect_creators(self):
|
||||||
self._creators_by_name = {
|
self._creators_by_name = {
|
||||||
|
|
@ -832,14 +840,24 @@ class AbstractTemplateBuilder(ABC):
|
||||||
host_name = self.host_name
|
host_name = self.host_name
|
||||||
task_name = self.current_task_name
|
task_name = self.current_task_name
|
||||||
task_type = self.current_task_type
|
task_type = self.current_task_type
|
||||||
|
folder_path = self.current_folder_path
|
||||||
|
folder_type = None
|
||||||
|
folder_entity = self.current_folder_entity
|
||||||
|
if folder_entity:
|
||||||
|
folder_type = folder_entity["folderType"]
|
||||||
|
|
||||||
|
filter_data = {
|
||||||
|
"task_types": task_type,
|
||||||
|
"task_names": task_name,
|
||||||
|
"folder_types": folder_type,
|
||||||
|
"folder_paths": folder_path,
|
||||||
|
}
|
||||||
|
|
||||||
build_profiles = self._get_build_profiles()
|
build_profiles = self._get_build_profiles()
|
||||||
profile = filter_profiles(
|
profile = filter_profiles(
|
||||||
build_profiles,
|
build_profiles,
|
||||||
{
|
filter_data,
|
||||||
"task_types": task_type,
|
logger=self.log
|
||||||
"task_names": task_name
|
|
||||||
}
|
|
||||||
)
|
)
|
||||||
if not profile:
|
if not profile:
|
||||||
raise TemplateProfileNotFound((
|
raise TemplateProfileNotFound((
|
||||||
|
|
@ -1465,7 +1483,7 @@ class PlaceholderLoadMixin(object):
|
||||||
tooltip=(
|
tooltip=(
|
||||||
"Link Type\n"
|
"Link Type\n"
|
||||||
"\nDefines what type of link will be used to"
|
"\nDefines what type of link will be used to"
|
||||||
" link the asset to the current folder."
|
" link the product to the current folder."
|
||||||
)
|
)
|
||||||
),
|
),
|
||||||
attribute_definitions.EnumDef(
|
attribute_definitions.EnumDef(
|
||||||
|
|
@ -1638,7 +1656,10 @@ class PlaceholderLoadMixin(object):
|
||||||
linked_folder_entity["id"]
|
linked_folder_entity["id"]
|
||||||
for linked_folder_entity in (
|
for linked_folder_entity in (
|
||||||
self.builder.get_linked_folder_entities(
|
self.builder.get_linked_folder_entities(
|
||||||
link_type=link_type))
|
link_type=link_type,
|
||||||
|
folder_path_regex=folder_path_regex
|
||||||
|
)
|
||||||
|
)
|
||||||
]
|
]
|
||||||
|
|
||||||
if not folder_ids:
|
if not folder_ids:
|
||||||
|
|
@ -1666,6 +1687,8 @@ class PlaceholderLoadMixin(object):
|
||||||
for version in get_last_versions(
|
for version in get_last_versions(
|
||||||
project_name, filtered_product_ids, fields={"id"}
|
project_name, filtered_product_ids, fields={"id"}
|
||||||
).values()
|
).values()
|
||||||
|
# Version may be none if a product has no versions
|
||||||
|
if version is not None
|
||||||
)
|
)
|
||||||
return list(get_representations(
|
return list(get_representations(
|
||||||
project_name,
|
project_name,
|
||||||
|
|
|
||||||
|
|
@ -1,34 +0,0 @@
|
||||||
from ayon_core.style import get_default_entity_icon_color
|
|
||||||
from ayon_core.pipeline import load
|
|
||||||
|
|
||||||
|
|
||||||
class CopyFile(load.LoaderPlugin):
|
|
||||||
"""Copy the published file to be pasted at the desired location"""
|
|
||||||
|
|
||||||
representations = {"*"}
|
|
||||||
product_types = {"*"}
|
|
||||||
|
|
||||||
label = "Copy File"
|
|
||||||
order = 10
|
|
||||||
icon = "copy"
|
|
||||||
color = get_default_entity_icon_color()
|
|
||||||
|
|
||||||
def load(self, context, name=None, namespace=None, data=None):
|
|
||||||
path = self.filepath_from_context(context)
|
|
||||||
self.log.info("Added copy to clipboard: {0}".format(path))
|
|
||||||
self.copy_file_to_clipboard(path)
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def copy_file_to_clipboard(path):
|
|
||||||
from qtpy import QtCore, QtWidgets
|
|
||||||
|
|
||||||
clipboard = QtWidgets.QApplication.clipboard()
|
|
||||||
assert clipboard, "Must have running QApplication instance"
|
|
||||||
|
|
||||||
# Build mime data for clipboard
|
|
||||||
data = QtCore.QMimeData()
|
|
||||||
url = QtCore.QUrl.fromLocalFile(path)
|
|
||||||
data.setUrls([url])
|
|
||||||
|
|
||||||
# Set to Clipboard
|
|
||||||
clipboard.setMimeData(data)
|
|
||||||
|
|
@ -1,29 +0,0 @@
|
||||||
import os
|
|
||||||
|
|
||||||
from ayon_core.pipeline import load
|
|
||||||
|
|
||||||
|
|
||||||
class CopyFilePath(load.LoaderPlugin):
|
|
||||||
"""Copy published file path to clipboard"""
|
|
||||||
representations = {"*"}
|
|
||||||
product_types = {"*"}
|
|
||||||
|
|
||||||
label = "Copy File Path"
|
|
||||||
order = 20
|
|
||||||
icon = "clipboard"
|
|
||||||
color = "#999999"
|
|
||||||
|
|
||||||
def load(self, context, name=None, namespace=None, data=None):
|
|
||||||
path = self.filepath_from_context(context)
|
|
||||||
self.log.info("Added file path to clipboard: {0}".format(path))
|
|
||||||
self.copy_path_to_clipboard(path)
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def copy_path_to_clipboard(path):
|
|
||||||
from qtpy import QtWidgets
|
|
||||||
|
|
||||||
clipboard = QtWidgets.QApplication.clipboard()
|
|
||||||
assert clipboard, "Must have running QApplication instance"
|
|
||||||
|
|
||||||
# Set to Clipboard
|
|
||||||
clipboard.setText(os.path.normpath(path))
|
|
||||||
|
|
@ -62,8 +62,8 @@ class CreateHeroVersion(load.ProductLoaderPlugin):
|
||||||
|
|
||||||
ignored_representation_names: list[str] = []
|
ignored_representation_names: list[str] = []
|
||||||
db_representation_context_keys = [
|
db_representation_context_keys = [
|
||||||
"project", "folder", "asset", "hierarchy", "task", "product",
|
"project", "folder", "hierarchy", "task", "product",
|
||||||
"subset", "family", "representation", "username", "user", "output"
|
"representation", "username", "user", "output"
|
||||||
]
|
]
|
||||||
use_hardlinks = False
|
use_hardlinks = False
|
||||||
|
|
||||||
|
|
@ -75,6 +75,7 @@ class CreateHeroVersion(load.ProductLoaderPlugin):
|
||||||
msgBox.setStyleSheet(style.load_stylesheet())
|
msgBox.setStyleSheet(style.load_stylesheet())
|
||||||
msgBox.setWindowFlags(
|
msgBox.setWindowFlags(
|
||||||
msgBox.windowFlags() | QtCore.Qt.WindowType.FramelessWindowHint
|
msgBox.windowFlags() | QtCore.Qt.WindowType.FramelessWindowHint
|
||||||
|
| QtCore.Qt.WindowType.WindowStaysOnTopHint
|
||||||
)
|
)
|
||||||
msgBox.exec_()
|
msgBox.exec_()
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -1,477 +0,0 @@
|
||||||
import collections
|
|
||||||
import os
|
|
||||||
import uuid
|
|
||||||
from typing import List, Dict, Any
|
|
||||||
|
|
||||||
import clique
|
|
||||||
import ayon_api
|
|
||||||
from ayon_api.operations import OperationsSession
|
|
||||||
import qargparse
|
|
||||||
from qtpy import QtWidgets, QtCore
|
|
||||||
|
|
||||||
from ayon_core import style
|
|
||||||
from ayon_core.lib import format_file_size
|
|
||||||
from ayon_core.pipeline import load, Anatomy
|
|
||||||
from ayon_core.pipeline.load import (
|
|
||||||
get_representation_path_with_anatomy,
|
|
||||||
InvalidRepresentationContext,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class DeleteOldVersions(load.ProductLoaderPlugin):
|
|
||||||
"""Deletes specific number of old version"""
|
|
||||||
|
|
||||||
is_multiple_contexts_compatible = True
|
|
||||||
sequence_splitter = "__sequence_splitter__"
|
|
||||||
|
|
||||||
representations = ["*"]
|
|
||||||
product_types = {"*"}
|
|
||||||
tool_names = ["library_loader"]
|
|
||||||
|
|
||||||
label = "Delete Old Versions"
|
|
||||||
order = 35
|
|
||||||
icon = "trash"
|
|
||||||
color = "#d8d8d8"
|
|
||||||
|
|
||||||
options = [
|
|
||||||
qargparse.Integer(
|
|
||||||
"versions_to_keep", default=2, min=0, help="Versions to keep:"
|
|
||||||
),
|
|
||||||
qargparse.Boolean(
|
|
||||||
"remove_publish_folder", help="Remove publish folder:"
|
|
||||||
)
|
|
||||||
]
|
|
||||||
|
|
||||||
requires_confirmation = True
|
|
||||||
|
|
||||||
def delete_whole_dir_paths(self, dir_paths, delete=True):
|
|
||||||
size = 0
|
|
||||||
|
|
||||||
for dir_path in dir_paths:
|
|
||||||
# Delete all files and folders in dir path
|
|
||||||
for root, dirs, files in os.walk(dir_path, topdown=False):
|
|
||||||
for name in files:
|
|
||||||
file_path = os.path.join(root, name)
|
|
||||||
size += os.path.getsize(file_path)
|
|
||||||
if delete:
|
|
||||||
os.remove(file_path)
|
|
||||||
self.log.debug("Removed file: {}".format(file_path))
|
|
||||||
|
|
||||||
for name in dirs:
|
|
||||||
if delete:
|
|
||||||
os.rmdir(os.path.join(root, name))
|
|
||||||
|
|
||||||
if not delete:
|
|
||||||
continue
|
|
||||||
|
|
||||||
# Delete even the folder and it's parents folders if they are empty
|
|
||||||
while True:
|
|
||||||
if not os.path.exists(dir_path):
|
|
||||||
dir_path = os.path.dirname(dir_path)
|
|
||||||
continue
|
|
||||||
|
|
||||||
if len(os.listdir(dir_path)) != 0:
|
|
||||||
break
|
|
||||||
|
|
||||||
os.rmdir(os.path.join(dir_path))
|
|
||||||
|
|
||||||
return size
|
|
||||||
|
|
||||||
def path_from_representation(self, representation, anatomy):
|
|
||||||
try:
|
|
||||||
context = representation["context"]
|
|
||||||
except KeyError:
|
|
||||||
return (None, None)
|
|
||||||
|
|
||||||
try:
|
|
||||||
path = get_representation_path_with_anatomy(
|
|
||||||
representation, anatomy
|
|
||||||
)
|
|
||||||
except InvalidRepresentationContext:
|
|
||||||
return (None, None)
|
|
||||||
|
|
||||||
sequence_path = None
|
|
||||||
if "frame" in context:
|
|
||||||
context["frame"] = self.sequence_splitter
|
|
||||||
sequence_path = get_representation_path_with_anatomy(
|
|
||||||
representation, anatomy
|
|
||||||
)
|
|
||||||
|
|
||||||
if sequence_path:
|
|
||||||
sequence_path = sequence_path.normalized()
|
|
||||||
|
|
||||||
return (path.normalized(), sequence_path)
|
|
||||||
|
|
||||||
def delete_only_repre_files(self, dir_paths, file_paths, delete=True):
|
|
||||||
size = 0
|
|
||||||
|
|
||||||
for dir_id, dir_path in dir_paths.items():
|
|
||||||
dir_files = os.listdir(dir_path)
|
|
||||||
collections, remainders = clique.assemble(dir_files)
|
|
||||||
for file_path, seq_path in file_paths[dir_id]:
|
|
||||||
file_path_base = os.path.split(file_path)[1]
|
|
||||||
# Just remove file if `frame` key was not in context or
|
|
||||||
# filled path is in remainders (single file sequence)
|
|
||||||
if not seq_path or file_path_base in remainders:
|
|
||||||
if not os.path.exists(file_path):
|
|
||||||
self.log.debug(
|
|
||||||
"File was not found: {}".format(file_path)
|
|
||||||
)
|
|
||||||
continue
|
|
||||||
|
|
||||||
size += os.path.getsize(file_path)
|
|
||||||
|
|
||||||
if delete:
|
|
||||||
os.remove(file_path)
|
|
||||||
self.log.debug("Removed file: {}".format(file_path))
|
|
||||||
|
|
||||||
if file_path_base in remainders:
|
|
||||||
remainders.remove(file_path_base)
|
|
||||||
continue
|
|
||||||
|
|
||||||
seq_path_base = os.path.split(seq_path)[1]
|
|
||||||
head, tail = seq_path_base.split(self.sequence_splitter)
|
|
||||||
|
|
||||||
final_col = None
|
|
||||||
for collection in collections:
|
|
||||||
if head != collection.head or tail != collection.tail:
|
|
||||||
continue
|
|
||||||
final_col = collection
|
|
||||||
break
|
|
||||||
|
|
||||||
if final_col is not None:
|
|
||||||
# Fill full path to head
|
|
||||||
final_col.head = os.path.join(dir_path, final_col.head)
|
|
||||||
for _file_path in final_col:
|
|
||||||
if os.path.exists(_file_path):
|
|
||||||
|
|
||||||
size += os.path.getsize(_file_path)
|
|
||||||
|
|
||||||
if delete:
|
|
||||||
os.remove(_file_path)
|
|
||||||
self.log.debug(
|
|
||||||
"Removed file: {}".format(_file_path)
|
|
||||||
)
|
|
||||||
|
|
||||||
_seq_path = final_col.format("{head}{padding}{tail}")
|
|
||||||
self.log.debug("Removed files: {}".format(_seq_path))
|
|
||||||
collections.remove(final_col)
|
|
||||||
|
|
||||||
elif os.path.exists(file_path):
|
|
||||||
size += os.path.getsize(file_path)
|
|
||||||
|
|
||||||
if delete:
|
|
||||||
os.remove(file_path)
|
|
||||||
self.log.debug("Removed file: {}".format(file_path))
|
|
||||||
else:
|
|
||||||
self.log.debug(
|
|
||||||
"File was not found: {}".format(file_path)
|
|
||||||
)
|
|
||||||
|
|
||||||
# Delete as much as possible parent folders
|
|
||||||
if not delete:
|
|
||||||
return size
|
|
||||||
|
|
||||||
for dir_path in dir_paths.values():
|
|
||||||
while True:
|
|
||||||
if not os.path.exists(dir_path):
|
|
||||||
dir_path = os.path.dirname(dir_path)
|
|
||||||
continue
|
|
||||||
|
|
||||||
if len(os.listdir(dir_path)) != 0:
|
|
||||||
break
|
|
||||||
|
|
||||||
self.log.debug("Removed folder: {}".format(dir_path))
|
|
||||||
os.rmdir(dir_path)
|
|
||||||
|
|
||||||
return size
|
|
||||||
|
|
||||||
def message(self, text):
|
|
||||||
msgBox = QtWidgets.QMessageBox()
|
|
||||||
msgBox.setText(text)
|
|
||||||
msgBox.setStyleSheet(style.load_stylesheet())
|
|
||||||
msgBox.setWindowFlags(
|
|
||||||
msgBox.windowFlags() | QtCore.Qt.FramelessWindowHint
|
|
||||||
)
|
|
||||||
msgBox.exec_()
|
|
||||||
|
|
||||||
def _confirm_delete(self,
|
|
||||||
contexts: List[Dict[str, Any]],
|
|
||||||
versions_to_keep: int) -> bool:
|
|
||||||
"""Prompt user for a deletion confirmation"""
|
|
||||||
|
|
||||||
contexts_list = "\n".join(sorted(
|
|
||||||
"- {folder[name]} > {product[name]}".format_map(context)
|
|
||||||
for context in contexts
|
|
||||||
))
|
|
||||||
num_contexts = len(contexts)
|
|
||||||
s = "s" if num_contexts > 1 else ""
|
|
||||||
text = (
|
|
||||||
"Are you sure you want to delete versions?\n\n"
|
|
||||||
f"This will keep only the last {versions_to_keep} "
|
|
||||||
f"versions for the {num_contexts} selected product{s}."
|
|
||||||
)
|
|
||||||
informative_text = "Warning: This will delete files from disk"
|
|
||||||
detailed_text = (
|
|
||||||
f"Keep only {versions_to_keep} versions for:\n{contexts_list}"
|
|
||||||
)
|
|
||||||
|
|
||||||
messagebox = QtWidgets.QMessageBox()
|
|
||||||
messagebox.setIcon(QtWidgets.QMessageBox.Warning)
|
|
||||||
messagebox.setWindowTitle("Delete Old Versions")
|
|
||||||
messagebox.setText(text)
|
|
||||||
messagebox.setInformativeText(informative_text)
|
|
||||||
messagebox.setDetailedText(detailed_text)
|
|
||||||
messagebox.setStandardButtons(
|
|
||||||
QtWidgets.QMessageBox.Yes
|
|
||||||
| QtWidgets.QMessageBox.Cancel
|
|
||||||
)
|
|
||||||
messagebox.setDefaultButton(QtWidgets.QMessageBox.Cancel)
|
|
||||||
messagebox.setStyleSheet(style.load_stylesheet())
|
|
||||||
messagebox.setAttribute(QtCore.Qt.WA_DeleteOnClose, True)
|
|
||||||
return messagebox.exec_() == QtWidgets.QMessageBox.Yes
|
|
||||||
|
|
||||||
def get_data(self, context, versions_count):
|
|
||||||
product_entity = context["product"]
|
|
||||||
folder_entity = context["folder"]
|
|
||||||
project_name = context["project"]["name"]
|
|
||||||
anatomy = Anatomy(project_name, project_entity=context["project"])
|
|
||||||
|
|
||||||
version_fields = ayon_api.get_default_fields_for_type("version")
|
|
||||||
version_fields.add("tags")
|
|
||||||
versions = list(ayon_api.get_versions(
|
|
||||||
project_name,
|
|
||||||
product_ids=[product_entity["id"]],
|
|
||||||
active=None,
|
|
||||||
hero=False,
|
|
||||||
fields=version_fields
|
|
||||||
))
|
|
||||||
self.log.debug(
|
|
||||||
"Version Number ({})".format(len(versions))
|
|
||||||
)
|
|
||||||
versions_by_parent = collections.defaultdict(list)
|
|
||||||
for ent in versions:
|
|
||||||
versions_by_parent[ent["productId"]].append(ent)
|
|
||||||
|
|
||||||
def sort_func(ent):
|
|
||||||
return int(ent["version"])
|
|
||||||
|
|
||||||
all_last_versions = []
|
|
||||||
for _parent_id, _versions in versions_by_parent.items():
|
|
||||||
for idx, version in enumerate(
|
|
||||||
sorted(_versions, key=sort_func, reverse=True)
|
|
||||||
):
|
|
||||||
if idx >= versions_count:
|
|
||||||
break
|
|
||||||
all_last_versions.append(version)
|
|
||||||
|
|
||||||
self.log.debug("Collected versions ({})".format(len(versions)))
|
|
||||||
|
|
||||||
# Filter latest versions
|
|
||||||
for version in all_last_versions:
|
|
||||||
versions.remove(version)
|
|
||||||
|
|
||||||
# Update versions_by_parent without filtered versions
|
|
||||||
versions_by_parent = collections.defaultdict(list)
|
|
||||||
for ent in versions:
|
|
||||||
versions_by_parent[ent["productId"]].append(ent)
|
|
||||||
|
|
||||||
# Filter already deleted versions
|
|
||||||
versions_to_pop = []
|
|
||||||
for version in versions:
|
|
||||||
if "deleted" in version["tags"]:
|
|
||||||
versions_to_pop.append(version)
|
|
||||||
|
|
||||||
for version in versions_to_pop:
|
|
||||||
msg = "Folder: \"{}\" | Product: \"{}\" | Version: \"{}\"".format(
|
|
||||||
folder_entity["path"],
|
|
||||||
product_entity["name"],
|
|
||||||
version["version"]
|
|
||||||
)
|
|
||||||
self.log.debug((
|
|
||||||
"Skipping version. Already tagged as inactive. < {} >"
|
|
||||||
).format(msg))
|
|
||||||
versions.remove(version)
|
|
||||||
|
|
||||||
version_ids = [ent["id"] for ent in versions]
|
|
||||||
|
|
||||||
self.log.debug(
|
|
||||||
"Filtered versions to delete ({})".format(len(version_ids))
|
|
||||||
)
|
|
||||||
|
|
||||||
if not version_ids:
|
|
||||||
msg = "Skipping processing. Nothing to delete on {}/{}".format(
|
|
||||||
folder_entity["path"], product_entity["name"]
|
|
||||||
)
|
|
||||||
self.log.info(msg)
|
|
||||||
print(msg)
|
|
||||||
return
|
|
||||||
|
|
||||||
repres = list(ayon_api.get_representations(
|
|
||||||
project_name, version_ids=version_ids
|
|
||||||
))
|
|
||||||
|
|
||||||
self.log.debug(
|
|
||||||
"Collected representations to remove ({})".format(len(repres))
|
|
||||||
)
|
|
||||||
|
|
||||||
dir_paths = {}
|
|
||||||
file_paths_by_dir = collections.defaultdict(list)
|
|
||||||
for repre in repres:
|
|
||||||
file_path, seq_path = self.path_from_representation(
|
|
||||||
repre, anatomy
|
|
||||||
)
|
|
||||||
if file_path is None:
|
|
||||||
self.log.debug((
|
|
||||||
"Could not format path for represenation \"{}\""
|
|
||||||
).format(str(repre)))
|
|
||||||
continue
|
|
||||||
|
|
||||||
dir_path = os.path.dirname(file_path)
|
|
||||||
dir_id = None
|
|
||||||
for _dir_id, _dir_path in dir_paths.items():
|
|
||||||
if _dir_path == dir_path:
|
|
||||||
dir_id = _dir_id
|
|
||||||
break
|
|
||||||
|
|
||||||
if dir_id is None:
|
|
||||||
dir_id = uuid.uuid4()
|
|
||||||
dir_paths[dir_id] = dir_path
|
|
||||||
|
|
||||||
file_paths_by_dir[dir_id].append([file_path, seq_path])
|
|
||||||
|
|
||||||
dir_ids_to_pop = []
|
|
||||||
for dir_id, dir_path in dir_paths.items():
|
|
||||||
if os.path.exists(dir_path):
|
|
||||||
continue
|
|
||||||
|
|
||||||
dir_ids_to_pop.append(dir_id)
|
|
||||||
|
|
||||||
# Pop dirs from both dictionaries
|
|
||||||
for dir_id in dir_ids_to_pop:
|
|
||||||
dir_paths.pop(dir_id)
|
|
||||||
paths = file_paths_by_dir.pop(dir_id)
|
|
||||||
# TODO report of missing directories?
|
|
||||||
paths_msg = ", ".join([
|
|
||||||
"'{}'".format(path[0].replace("\\", "/")) for path in paths
|
|
||||||
])
|
|
||||||
self.log.debug((
|
|
||||||
"Folder does not exist. Deleting its files skipped: {}"
|
|
||||||
).format(paths_msg))
|
|
||||||
|
|
||||||
return {
|
|
||||||
"dir_paths": dir_paths,
|
|
||||||
"file_paths_by_dir": file_paths_by_dir,
|
|
||||||
"versions": versions,
|
|
||||||
"folder": folder_entity,
|
|
||||||
"product": product_entity,
|
|
||||||
"archive_product": versions_count == 0
|
|
||||||
}
|
|
||||||
|
|
||||||
def main(self, project_name, data, remove_publish_folder):
|
|
||||||
# Size of files.
|
|
||||||
size = 0
|
|
||||||
if not data:
|
|
||||||
return size
|
|
||||||
|
|
||||||
if remove_publish_folder:
|
|
||||||
size = self.delete_whole_dir_paths(data["dir_paths"].values())
|
|
||||||
else:
|
|
||||||
size = self.delete_only_repre_files(
|
|
||||||
data["dir_paths"], data["file_paths_by_dir"]
|
|
||||||
)
|
|
||||||
|
|
||||||
op_session = OperationsSession()
|
|
||||||
for version in data["versions"]:
|
|
||||||
orig_version_tags = version["tags"]
|
|
||||||
version_tags = list(orig_version_tags)
|
|
||||||
changes = {}
|
|
||||||
if "deleted" not in version_tags:
|
|
||||||
version_tags.append("deleted")
|
|
||||||
changes["tags"] = version_tags
|
|
||||||
|
|
||||||
if version["active"]:
|
|
||||||
changes["active"] = False
|
|
||||||
|
|
||||||
if not changes:
|
|
||||||
continue
|
|
||||||
op_session.update_entity(
|
|
||||||
project_name, "version", version["id"], changes
|
|
||||||
)
|
|
||||||
|
|
||||||
op_session.commit()
|
|
||||||
|
|
||||||
return size
|
|
||||||
|
|
||||||
def load(self, contexts, name=None, namespace=None, options=None):
|
|
||||||
|
|
||||||
# Get user options
|
|
||||||
versions_to_keep = 2
|
|
||||||
remove_publish_folder = False
|
|
||||||
if options:
|
|
||||||
versions_to_keep = options.get(
|
|
||||||
"versions_to_keep", versions_to_keep
|
|
||||||
)
|
|
||||||
remove_publish_folder = options.get(
|
|
||||||
"remove_publish_folder", remove_publish_folder
|
|
||||||
)
|
|
||||||
|
|
||||||
# Because we do not want this run by accident we will add an extra
|
|
||||||
# user confirmation
|
|
||||||
if (
|
|
||||||
self.requires_confirmation
|
|
||||||
and not self._confirm_delete(contexts, versions_to_keep)
|
|
||||||
):
|
|
||||||
return
|
|
||||||
|
|
||||||
try:
|
|
||||||
size = 0
|
|
||||||
for count, context in enumerate(contexts):
|
|
||||||
data = self.get_data(context, versions_to_keep)
|
|
||||||
if not data:
|
|
||||||
continue
|
|
||||||
project_name = context["project"]["name"]
|
|
||||||
size += self.main(project_name, data, remove_publish_folder)
|
|
||||||
print("Progressing {}/{}".format(count + 1, len(contexts)))
|
|
||||||
|
|
||||||
msg = "Total size of files: {}".format(format_file_size(size))
|
|
||||||
self.log.info(msg)
|
|
||||||
self.message(msg)
|
|
||||||
|
|
||||||
except Exception:
|
|
||||||
self.log.error("Failed to delete versions.", exc_info=True)
|
|
||||||
|
|
||||||
|
|
||||||
class CalculateOldVersions(DeleteOldVersions):
|
|
||||||
"""Calculate file size of old versions"""
|
|
||||||
label = "Calculate Old Versions"
|
|
||||||
order = 30
|
|
||||||
tool_names = ["library_loader"]
|
|
||||||
|
|
||||||
options = [
|
|
||||||
qargparse.Integer(
|
|
||||||
"versions_to_keep", default=2, min=0, help="Versions to keep:"
|
|
||||||
),
|
|
||||||
qargparse.Boolean(
|
|
||||||
"remove_publish_folder", help="Remove publish folder:"
|
|
||||||
)
|
|
||||||
]
|
|
||||||
|
|
||||||
requires_confirmation = False
|
|
||||||
|
|
||||||
def main(self, project_name, data, remove_publish_folder):
|
|
||||||
size = 0
|
|
||||||
|
|
||||||
if not data:
|
|
||||||
return size
|
|
||||||
|
|
||||||
if remove_publish_folder:
|
|
||||||
size = self.delete_whole_dir_paths(
|
|
||||||
data["dir_paths"].values(), delete=False
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
size = self.delete_only_repre_files(
|
|
||||||
data["dir_paths"], data["file_paths_by_dir"], delete=False
|
|
||||||
)
|
|
||||||
|
|
||||||
return size
|
|
||||||
|
|
@ -1,36 +0,0 @@
|
||||||
import sys
|
|
||||||
import os
|
|
||||||
import subprocess
|
|
||||||
|
|
||||||
from ayon_core.pipeline import load
|
|
||||||
|
|
||||||
|
|
||||||
def open(filepath):
|
|
||||||
"""Open file with system default executable"""
|
|
||||||
if sys.platform.startswith('darwin'):
|
|
||||||
subprocess.call(('open', filepath))
|
|
||||||
elif os.name == 'nt':
|
|
||||||
os.startfile(filepath)
|
|
||||||
elif os.name == 'posix':
|
|
||||||
subprocess.call(('xdg-open', filepath))
|
|
||||||
|
|
||||||
|
|
||||||
class OpenFile(load.LoaderPlugin):
|
|
||||||
"""Open Image Sequence or Video with system default"""
|
|
||||||
|
|
||||||
product_types = {"render2d"}
|
|
||||||
representations = {"*"}
|
|
||||||
|
|
||||||
label = "Open"
|
|
||||||
order = -10
|
|
||||||
icon = "play-circle"
|
|
||||||
color = "orange"
|
|
||||||
|
|
||||||
def load(self, context, name, namespace, data):
|
|
||||||
|
|
||||||
path = self.filepath_from_context(context)
|
|
||||||
if not os.path.exists(path):
|
|
||||||
raise RuntimeError("File not found: {}".format(path))
|
|
||||||
|
|
||||||
self.log.info("Opening : {}".format(path))
|
|
||||||
open(path)
|
|
||||||
|
|
@ -1,56 +0,0 @@
|
||||||
import os
|
|
||||||
|
|
||||||
from ayon_core import AYON_CORE_ROOT
|
|
||||||
from ayon_core.lib import get_ayon_launcher_args, run_detached_process
|
|
||||||
from ayon_core.pipeline import load
|
|
||||||
from ayon_core.pipeline.load import LoadError
|
|
||||||
|
|
||||||
|
|
||||||
class PushToProject(load.ProductLoaderPlugin):
|
|
||||||
"""Export selected versions to different project"""
|
|
||||||
|
|
||||||
is_multiple_contexts_compatible = True
|
|
||||||
|
|
||||||
representations = {"*"}
|
|
||||||
product_types = {"*"}
|
|
||||||
|
|
||||||
label = "Push to project"
|
|
||||||
order = 35
|
|
||||||
icon = "send"
|
|
||||||
color = "#d8d8d8"
|
|
||||||
|
|
||||||
def load(self, contexts, name=None, namespace=None, options=None):
|
|
||||||
filtered_contexts = [
|
|
||||||
context
|
|
||||||
for context in contexts
|
|
||||||
if context.get("project") and context.get("version")
|
|
||||||
]
|
|
||||||
if not filtered_contexts:
|
|
||||||
raise LoadError("Nothing to push for your selection")
|
|
||||||
|
|
||||||
folder_ids = set(
|
|
||||||
context["folder"]["id"]
|
|
||||||
for context in filtered_contexts
|
|
||||||
)
|
|
||||||
if len(folder_ids) > 1:
|
|
||||||
raise LoadError("Please select products from single folder")
|
|
||||||
|
|
||||||
push_tool_script_path = os.path.join(
|
|
||||||
AYON_CORE_ROOT,
|
|
||||||
"tools",
|
|
||||||
"push_to_project",
|
|
||||||
"main.py"
|
|
||||||
)
|
|
||||||
project_name = filtered_contexts[0]["project"]["name"]
|
|
||||||
|
|
||||||
version_ids = {
|
|
||||||
context["version"]["id"]
|
|
||||||
for context in filtered_contexts
|
|
||||||
}
|
|
||||||
|
|
||||||
args = get_ayon_launcher_args(
|
|
||||||
push_tool_script_path,
|
|
||||||
"--project", project_name,
|
|
||||||
"--versions", ",".join(version_ids)
|
|
||||||
)
|
|
||||||
run_detached_process(args)
|
|
||||||
122
client/ayon_core/plugins/loader/copy_file.py
Normal file
122
client/ayon_core/plugins/loader/copy_file.py
Normal file
|
|
@ -0,0 +1,122 @@
|
||||||
|
import os
|
||||||
|
import collections
|
||||||
|
|
||||||
|
from typing import Optional, Any
|
||||||
|
|
||||||
|
from ayon_core.pipeline.load import get_representation_path_with_anatomy
|
||||||
|
from ayon_core.pipeline.actions import (
|
||||||
|
LoaderActionPlugin,
|
||||||
|
LoaderActionItem,
|
||||||
|
LoaderActionSelection,
|
||||||
|
LoaderActionResult,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class CopyFileActionPlugin(LoaderActionPlugin):
|
||||||
|
"""Copy published file path to clipboard"""
|
||||||
|
identifier = "core.copy-action"
|
||||||
|
|
||||||
|
def get_action_items(
|
||||||
|
self, selection: LoaderActionSelection
|
||||||
|
) -> list[LoaderActionItem]:
|
||||||
|
repres = []
|
||||||
|
if selection.selected_type == "representation":
|
||||||
|
repres = selection.entities.get_representations(
|
||||||
|
selection.selected_ids
|
||||||
|
)
|
||||||
|
|
||||||
|
if selection.selected_type == "version":
|
||||||
|
repres = selection.entities.get_versions_representations(
|
||||||
|
selection.selected_ids
|
||||||
|
)
|
||||||
|
|
||||||
|
output = []
|
||||||
|
if not repres:
|
||||||
|
return output
|
||||||
|
|
||||||
|
repre_ids_by_name = collections.defaultdict(set)
|
||||||
|
for repre in repres:
|
||||||
|
repre_ids_by_name[repre["name"]].add(repre["id"])
|
||||||
|
|
||||||
|
for repre_name, repre_ids in repre_ids_by_name.items():
|
||||||
|
repre_id = next(iter(repre_ids), None)
|
||||||
|
if not repre_id:
|
||||||
|
continue
|
||||||
|
output.append(
|
||||||
|
LoaderActionItem(
|
||||||
|
label=repre_name,
|
||||||
|
order=32,
|
||||||
|
group_label="Copy file path",
|
||||||
|
data={
|
||||||
|
"representation_id": repre_id,
|
||||||
|
"action": "copy-path",
|
||||||
|
},
|
||||||
|
icon={
|
||||||
|
"type": "material-symbols",
|
||||||
|
"name": "content_copy",
|
||||||
|
"color": "#999999",
|
||||||
|
}
|
||||||
|
)
|
||||||
|
)
|
||||||
|
output.append(
|
||||||
|
LoaderActionItem(
|
||||||
|
label=repre_name,
|
||||||
|
order=33,
|
||||||
|
group_label="Copy file",
|
||||||
|
data={
|
||||||
|
"representation_id": repre_id,
|
||||||
|
"action": "copy-file",
|
||||||
|
},
|
||||||
|
icon={
|
||||||
|
"type": "material-symbols",
|
||||||
|
"name": "file_copy",
|
||||||
|
"color": "#999999",
|
||||||
|
}
|
||||||
|
)
|
||||||
|
)
|
||||||
|
return output
|
||||||
|
|
||||||
|
def execute_action(
|
||||||
|
self,
|
||||||
|
selection: LoaderActionSelection,
|
||||||
|
data: dict,
|
||||||
|
form_values: dict[str, Any],
|
||||||
|
) -> Optional[LoaderActionResult]:
|
||||||
|
from qtpy import QtWidgets, QtCore
|
||||||
|
|
||||||
|
action = data["action"]
|
||||||
|
repre_id = data["representation_id"]
|
||||||
|
repre = next(iter(selection.entities.get_representations({repre_id})))
|
||||||
|
path = get_representation_path_with_anatomy(
|
||||||
|
repre, selection.get_project_anatomy()
|
||||||
|
)
|
||||||
|
self.log.info(f"Added file path to clipboard: {path}")
|
||||||
|
|
||||||
|
clipboard = QtWidgets.QApplication.clipboard()
|
||||||
|
if not clipboard:
|
||||||
|
return LoaderActionResult(
|
||||||
|
"Failed to copy file path to clipboard.",
|
||||||
|
success=False,
|
||||||
|
)
|
||||||
|
|
||||||
|
if action == "copy-path":
|
||||||
|
# Set to Clipboard
|
||||||
|
clipboard.setText(os.path.normpath(path))
|
||||||
|
|
||||||
|
return LoaderActionResult(
|
||||||
|
"Path stored to clipboard...",
|
||||||
|
success=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Build mime data for clipboard
|
||||||
|
data = QtCore.QMimeData()
|
||||||
|
url = QtCore.QUrl.fromLocalFile(path)
|
||||||
|
data.setUrls([url])
|
||||||
|
|
||||||
|
# Set to Clipboard
|
||||||
|
clipboard.setMimeData(data)
|
||||||
|
|
||||||
|
return LoaderActionResult(
|
||||||
|
"File added to clipboard...",
|
||||||
|
success=True,
|
||||||
|
)
|
||||||
388
client/ayon_core/plugins/loader/delete_old_versions.py
Normal file
388
client/ayon_core/plugins/loader/delete_old_versions.py
Normal file
|
|
@ -0,0 +1,388 @@
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import os
|
||||||
|
import collections
|
||||||
|
import json
|
||||||
|
import shutil
|
||||||
|
from typing import Optional, Any
|
||||||
|
|
||||||
|
from ayon_api.operations import OperationsSession
|
||||||
|
|
||||||
|
from ayon_core.lib import (
|
||||||
|
format_file_size,
|
||||||
|
AbstractAttrDef,
|
||||||
|
NumberDef,
|
||||||
|
BoolDef,
|
||||||
|
TextDef,
|
||||||
|
UILabelDef,
|
||||||
|
)
|
||||||
|
from ayon_core.pipeline import Anatomy
|
||||||
|
from ayon_core.pipeline.actions import (
|
||||||
|
ActionForm,
|
||||||
|
LoaderActionPlugin,
|
||||||
|
LoaderActionItem,
|
||||||
|
LoaderActionSelection,
|
||||||
|
LoaderActionResult,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class DeleteOldVersions(LoaderActionPlugin):
|
||||||
|
"""Deletes specific number of old version"""
|
||||||
|
|
||||||
|
is_multiple_contexts_compatible = True
|
||||||
|
sequence_splitter = "__sequence_splitter__"
|
||||||
|
|
||||||
|
requires_confirmation = True
|
||||||
|
|
||||||
|
def get_action_items(
|
||||||
|
self, selection: LoaderActionSelection
|
||||||
|
) -> list[LoaderActionItem]:
|
||||||
|
# Do not show in hosts
|
||||||
|
if self.host_name is not None:
|
||||||
|
return []
|
||||||
|
|
||||||
|
versions = selection.get_selected_version_entities()
|
||||||
|
if not versions:
|
||||||
|
return []
|
||||||
|
|
||||||
|
product_ids = {
|
||||||
|
version["productId"]
|
||||||
|
for version in versions
|
||||||
|
}
|
||||||
|
|
||||||
|
return [
|
||||||
|
LoaderActionItem(
|
||||||
|
label="Delete Versions",
|
||||||
|
order=35,
|
||||||
|
data={
|
||||||
|
"product_ids": list(product_ids),
|
||||||
|
"action": "delete-versions",
|
||||||
|
},
|
||||||
|
icon={
|
||||||
|
"type": "material-symbols",
|
||||||
|
"name": "delete",
|
||||||
|
"color": "#d8d8d8",
|
||||||
|
}
|
||||||
|
),
|
||||||
|
LoaderActionItem(
|
||||||
|
label="Calculate Versions size",
|
||||||
|
order=34,
|
||||||
|
data={
|
||||||
|
"product_ids": list(product_ids),
|
||||||
|
"action": "calculate-versions-size",
|
||||||
|
},
|
||||||
|
icon={
|
||||||
|
"type": "material-symbols",
|
||||||
|
"name": "auto_delete",
|
||||||
|
"color": "#d8d8d8",
|
||||||
|
}
|
||||||
|
)
|
||||||
|
]
|
||||||
|
|
||||||
|
def execute_action(
|
||||||
|
self,
|
||||||
|
selection: LoaderActionSelection,
|
||||||
|
data: dict[str, Any],
|
||||||
|
form_values: dict[str, Any],
|
||||||
|
) -> Optional[LoaderActionResult]:
|
||||||
|
step = form_values.get("step")
|
||||||
|
action = data["action"]
|
||||||
|
versions_to_keep = form_values.get("versions_to_keep")
|
||||||
|
remove_publish_folder = form_values.get("remove_publish_folder")
|
||||||
|
if step is None:
|
||||||
|
return self._first_step(
|
||||||
|
action,
|
||||||
|
versions_to_keep,
|
||||||
|
remove_publish_folder,
|
||||||
|
)
|
||||||
|
|
||||||
|
if versions_to_keep is None:
|
||||||
|
versions_to_keep = 2
|
||||||
|
if remove_publish_folder is None:
|
||||||
|
remove_publish_folder = False
|
||||||
|
|
||||||
|
product_ids = data["product_ids"]
|
||||||
|
if step == "prepare-data":
|
||||||
|
return self._prepare_data_step(
|
||||||
|
action,
|
||||||
|
versions_to_keep,
|
||||||
|
remove_publish_folder,
|
||||||
|
product_ids,
|
||||||
|
selection,
|
||||||
|
)
|
||||||
|
|
||||||
|
if step == "delete-versions":
|
||||||
|
return self._delete_versions_step(
|
||||||
|
selection.project_name, form_values
|
||||||
|
)
|
||||||
|
return None
|
||||||
|
|
||||||
|
def _first_step(
|
||||||
|
self,
|
||||||
|
action: str,
|
||||||
|
versions_to_keep: Optional[int],
|
||||||
|
remove_publish_folder: Optional[bool],
|
||||||
|
) -> LoaderActionResult:
|
||||||
|
fields: list[AbstractAttrDef] = [
|
||||||
|
TextDef(
|
||||||
|
"step",
|
||||||
|
visible=False,
|
||||||
|
),
|
||||||
|
NumberDef(
|
||||||
|
"versions_to_keep",
|
||||||
|
label="Versions to keep",
|
||||||
|
minimum=0,
|
||||||
|
default=2,
|
||||||
|
),
|
||||||
|
]
|
||||||
|
if action == "delete-versions":
|
||||||
|
fields.append(
|
||||||
|
BoolDef(
|
||||||
|
"remove_publish_folder",
|
||||||
|
label="Remove publish folder",
|
||||||
|
default=False,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
form_values = {
|
||||||
|
key: value
|
||||||
|
for key, value in (
|
||||||
|
("remove_publish_folder", remove_publish_folder),
|
||||||
|
("versions_to_keep", versions_to_keep),
|
||||||
|
)
|
||||||
|
if value is not None
|
||||||
|
}
|
||||||
|
form_values["step"] = "prepare-data"
|
||||||
|
return LoaderActionResult(
|
||||||
|
form=ActionForm(
|
||||||
|
title="Delete Old Versions",
|
||||||
|
fields=fields,
|
||||||
|
),
|
||||||
|
form_values=form_values
|
||||||
|
)
|
||||||
|
|
||||||
|
def _prepare_data_step(
|
||||||
|
self,
|
||||||
|
action: str,
|
||||||
|
versions_to_keep: int,
|
||||||
|
remove_publish_folder: bool,
|
||||||
|
entity_ids: set[str],
|
||||||
|
selection: LoaderActionSelection,
|
||||||
|
):
|
||||||
|
versions_by_product_id = collections.defaultdict(list)
|
||||||
|
for version in selection.entities.get_products_versions(entity_ids):
|
||||||
|
# Keep hero version
|
||||||
|
if versions_to_keep != 0 and version["version"] < 0:
|
||||||
|
continue
|
||||||
|
versions_by_product_id[version["productId"]].append(version)
|
||||||
|
|
||||||
|
versions_to_delete = []
|
||||||
|
for product_id, versions in versions_by_product_id.items():
|
||||||
|
if versions_to_keep == 0:
|
||||||
|
versions_to_delete.extend(versions)
|
||||||
|
continue
|
||||||
|
|
||||||
|
if len(versions) <= versions_to_keep:
|
||||||
|
continue
|
||||||
|
|
||||||
|
versions.sort(key=lambda v: v["version"])
|
||||||
|
for _ in range(versions_to_keep):
|
||||||
|
if not versions:
|
||||||
|
break
|
||||||
|
versions.pop(-1)
|
||||||
|
versions_to_delete.extend(versions)
|
||||||
|
|
||||||
|
self.log.debug(
|
||||||
|
f"Collected versions to delete ({len(versions_to_delete)})"
|
||||||
|
)
|
||||||
|
|
||||||
|
version_ids = {
|
||||||
|
version["id"]
|
||||||
|
for version in versions_to_delete
|
||||||
|
}
|
||||||
|
if not version_ids:
|
||||||
|
return LoaderActionResult(
|
||||||
|
message="Skipping. Nothing to delete.",
|
||||||
|
success=False,
|
||||||
|
)
|
||||||
|
|
||||||
|
project = selection.entities.get_project()
|
||||||
|
anatomy = Anatomy(project["name"], project_entity=project)
|
||||||
|
|
||||||
|
repres = selection.entities.get_versions_representations(version_ids)
|
||||||
|
|
||||||
|
self.log.debug(
|
||||||
|
f"Collected representations to remove ({len(repres)})"
|
||||||
|
)
|
||||||
|
|
||||||
|
filepaths_by_repre_id = {}
|
||||||
|
repre_ids_by_version_id = {
|
||||||
|
version_id: []
|
||||||
|
for version_id in version_ids
|
||||||
|
}
|
||||||
|
for repre in repres:
|
||||||
|
repre_ids_by_version_id[repre["versionId"]].append(repre["id"])
|
||||||
|
filepaths_by_repre_id[repre["id"]] = [
|
||||||
|
anatomy.fill_root(repre_file["path"])
|
||||||
|
for repre_file in repre["files"]
|
||||||
|
]
|
||||||
|
|
||||||
|
size = 0
|
||||||
|
for filepaths in filepaths_by_repre_id.values():
|
||||||
|
for filepath in filepaths:
|
||||||
|
if os.path.exists(filepath):
|
||||||
|
size += os.path.getsize(filepath)
|
||||||
|
|
||||||
|
if action == "calculate-versions-size":
|
||||||
|
return LoaderActionResult(
|
||||||
|
message="Calculated size",
|
||||||
|
success=True,
|
||||||
|
form=ActionForm(
|
||||||
|
title="Calculated versions size",
|
||||||
|
fields=[
|
||||||
|
UILabelDef(
|
||||||
|
f"Total size of files: {format_file_size(size)}"
|
||||||
|
),
|
||||||
|
],
|
||||||
|
submit_label=None,
|
||||||
|
cancel_label="Close",
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
|
form, form_values = self._get_delete_form(
|
||||||
|
size,
|
||||||
|
remove_publish_folder,
|
||||||
|
list(version_ids),
|
||||||
|
repre_ids_by_version_id,
|
||||||
|
filepaths_by_repre_id,
|
||||||
|
)
|
||||||
|
return LoaderActionResult(
|
||||||
|
form=form,
|
||||||
|
form_values=form_values
|
||||||
|
)
|
||||||
|
|
||||||
|
def _delete_versions_step(
|
||||||
|
self, project_name: str, form_values: dict[str, Any]
|
||||||
|
) -> LoaderActionResult:
|
||||||
|
delete_data = json.loads(form_values["delete_data"])
|
||||||
|
remove_publish_folder = form_values["remove_publish_folder"]
|
||||||
|
if form_values["delete_value"].lower() != "delete":
|
||||||
|
size = delete_data["size"]
|
||||||
|
form, form_values = self._get_delete_form(
|
||||||
|
size,
|
||||||
|
remove_publish_folder,
|
||||||
|
delete_data["version_ids"],
|
||||||
|
delete_data["repre_ids_by_version_id"],
|
||||||
|
delete_data["filepaths_by_repre_id"],
|
||||||
|
True,
|
||||||
|
)
|
||||||
|
return LoaderActionResult(
|
||||||
|
form=form,
|
||||||
|
form_values=form_values,
|
||||||
|
)
|
||||||
|
|
||||||
|
version_ids = delete_data["version_ids"]
|
||||||
|
repre_ids_by_version_id = delete_data["repre_ids_by_version_id"]
|
||||||
|
filepaths_by_repre_id = delete_data["filepaths_by_repre_id"]
|
||||||
|
op_session = OperationsSession()
|
||||||
|
total_versions = len(version_ids)
|
||||||
|
try:
|
||||||
|
for version_idx, version_id in enumerate(version_ids):
|
||||||
|
self.log.info(
|
||||||
|
f"Progressing version {version_idx + 1}/{total_versions}"
|
||||||
|
)
|
||||||
|
for repre_id in repre_ids_by_version_id[version_id]:
|
||||||
|
for filepath in filepaths_by_repre_id[repre_id]:
|
||||||
|
publish_folder = os.path.dirname(filepath)
|
||||||
|
if remove_publish_folder:
|
||||||
|
if os.path.exists(publish_folder):
|
||||||
|
shutil.rmtree(
|
||||||
|
publish_folder, ignore_errors=True
|
||||||
|
)
|
||||||
|
continue
|
||||||
|
|
||||||
|
if os.path.exists(filepath):
|
||||||
|
os.remove(filepath)
|
||||||
|
|
||||||
|
op_session.delete_entity(
|
||||||
|
project_name, "representation", repre_id
|
||||||
|
)
|
||||||
|
op_session.delete_entity(
|
||||||
|
project_name, "version", version_id
|
||||||
|
)
|
||||||
|
self.log.info("All done")
|
||||||
|
|
||||||
|
except Exception:
|
||||||
|
self.log.error("Failed to delete versions.", exc_info=True)
|
||||||
|
return LoaderActionResult(
|
||||||
|
message="Failed to delete versions.",
|
||||||
|
success=False,
|
||||||
|
)
|
||||||
|
|
||||||
|
finally:
|
||||||
|
op_session.commit()
|
||||||
|
|
||||||
|
return LoaderActionResult(
|
||||||
|
message="Deleted versions",
|
||||||
|
success=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
def _get_delete_form(
|
||||||
|
self,
|
||||||
|
size: int,
|
||||||
|
remove_publish_folder: bool,
|
||||||
|
version_ids: list[str],
|
||||||
|
repre_ids_by_version_id: dict[str, list[str]],
|
||||||
|
filepaths_by_repre_id: dict[str, list[str]],
|
||||||
|
repeated: bool = False,
|
||||||
|
) -> tuple[ActionForm, dict[str, Any]]:
|
||||||
|
versions_len = len(repre_ids_by_version_id)
|
||||||
|
fields = [
|
||||||
|
UILabelDef(
|
||||||
|
f"Going to delete {versions_len} versions<br/>"
|
||||||
|
f"- total size of files: {format_file_size(size)}<br/>"
|
||||||
|
),
|
||||||
|
UILabelDef("Are you sure you want to continue?"),
|
||||||
|
TextDef(
|
||||||
|
"delete_value",
|
||||||
|
placeholder="Type 'delete' to confirm...",
|
||||||
|
),
|
||||||
|
]
|
||||||
|
if repeated:
|
||||||
|
fields.append(UILabelDef(
|
||||||
|
"*Please fill in '**delete**' to confirm deletion.*"
|
||||||
|
))
|
||||||
|
fields.extend([
|
||||||
|
TextDef(
|
||||||
|
"delete_data",
|
||||||
|
visible=False,
|
||||||
|
),
|
||||||
|
TextDef(
|
||||||
|
"step",
|
||||||
|
visible=False,
|
||||||
|
),
|
||||||
|
BoolDef(
|
||||||
|
"remove_publish_folder",
|
||||||
|
label="Remove publish folder",
|
||||||
|
default=False,
|
||||||
|
visible=False,
|
||||||
|
)
|
||||||
|
])
|
||||||
|
|
||||||
|
form = ActionForm(
|
||||||
|
title="Delete versions",
|
||||||
|
submit_label="Delete",
|
||||||
|
cancel_label="Close",
|
||||||
|
fields=fields,
|
||||||
|
)
|
||||||
|
form_values = {
|
||||||
|
"delete_data": json.dumps({
|
||||||
|
"size": size,
|
||||||
|
"version_ids": version_ids,
|
||||||
|
"repre_ids_by_version_id": repre_ids_by_version_id,
|
||||||
|
"filepaths_by_repre_id": filepaths_by_repre_id,
|
||||||
|
}),
|
||||||
|
"step": "delete-versions",
|
||||||
|
"remove_publish_folder": remove_publish_folder,
|
||||||
|
}
|
||||||
|
return form, form_values
|
||||||
|
|
@ -1,5 +1,6 @@
|
||||||
import platform
|
import platform
|
||||||
from collections import defaultdict
|
from collections import defaultdict
|
||||||
|
from typing import Optional, Any
|
||||||
|
|
||||||
import ayon_api
|
import ayon_api
|
||||||
from qtpy import QtWidgets, QtCore, QtGui
|
from qtpy import QtWidgets, QtCore, QtGui
|
||||||
|
|
@ -10,7 +11,12 @@ from ayon_core.lib import (
|
||||||
collect_frames,
|
collect_frames,
|
||||||
get_datetime_data,
|
get_datetime_data,
|
||||||
)
|
)
|
||||||
from ayon_core.pipeline import load, Anatomy
|
from ayon_core.pipeline import Anatomy
|
||||||
|
from ayon_core.pipeline.actions import (
|
||||||
|
LoaderSimpleActionPlugin,
|
||||||
|
LoaderActionSelection,
|
||||||
|
LoaderActionResult,
|
||||||
|
)
|
||||||
from ayon_core.pipeline.load import get_representation_path_with_anatomy
|
from ayon_core.pipeline.load import get_representation_path_with_anatomy
|
||||||
from ayon_core.pipeline.delivery import (
|
from ayon_core.pipeline.delivery import (
|
||||||
get_format_dict,
|
get_format_dict,
|
||||||
|
|
@ -20,43 +26,72 @@ from ayon_core.pipeline.delivery import (
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
class Delivery(load.ProductLoaderPlugin):
|
class DeliveryAction(LoaderSimpleActionPlugin):
|
||||||
"""Export selected versions to folder structure from Template"""
|
identifier = "core.delivery"
|
||||||
|
|
||||||
is_multiple_contexts_compatible = True
|
|
||||||
sequence_splitter = "__sequence_splitter__"
|
|
||||||
|
|
||||||
representations = {"*"}
|
|
||||||
product_types = {"*"}
|
|
||||||
tool_names = ["library_loader"]
|
|
||||||
|
|
||||||
label = "Deliver Versions"
|
label = "Deliver Versions"
|
||||||
order = 35
|
order = 35
|
||||||
icon = "upload"
|
icon = {
|
||||||
color = "#d8d8d8"
|
"type": "material-symbols",
|
||||||
|
"name": "upload",
|
||||||
|
"color": "#d8d8d8",
|
||||||
|
}
|
||||||
|
|
||||||
def message(self, text):
|
def is_compatible(self, selection: LoaderActionSelection) -> bool:
|
||||||
msgBox = QtWidgets.QMessageBox()
|
if self.host_name is not None:
|
||||||
msgBox.setText(text)
|
return False
|
||||||
msgBox.setStyleSheet(style.load_stylesheet())
|
|
||||||
msgBox.setWindowFlags(
|
if not selection.selected_ids:
|
||||||
msgBox.windowFlags() | QtCore.Qt.FramelessWindowHint
|
return False
|
||||||
|
|
||||||
|
return (
|
||||||
|
selection.versions_selected()
|
||||||
|
or selection.representations_selected()
|
||||||
|
)
|
||||||
|
|
||||||
|
def execute_simple_action(
|
||||||
|
self,
|
||||||
|
selection: LoaderActionSelection,
|
||||||
|
form_values: dict[str, Any],
|
||||||
|
) -> Optional[LoaderActionResult]:
|
||||||
|
version_ids = set()
|
||||||
|
if selection.selected_type == "representation":
|
||||||
|
versions = selection.entities.get_representations_versions(
|
||||||
|
selection.selected_ids
|
||||||
|
)
|
||||||
|
version_ids = {version["id"] for version in versions}
|
||||||
|
|
||||||
|
if selection.selected_type == "version":
|
||||||
|
version_ids = set(selection.selected_ids)
|
||||||
|
|
||||||
|
if not version_ids:
|
||||||
|
return LoaderActionResult(
|
||||||
|
message="No versions found in your selection",
|
||||||
|
success=False,
|
||||||
)
|
)
|
||||||
msgBox.exec_()
|
|
||||||
|
|
||||||
def load(self, contexts, name=None, namespace=None, options=None):
|
|
||||||
try:
|
try:
|
||||||
dialog = DeliveryOptionsDialog(contexts, self.log)
|
# TODO run the tool in subprocess
|
||||||
|
dialog = DeliveryOptionsDialog(
|
||||||
|
selection.project_name, version_ids, self.log
|
||||||
|
)
|
||||||
dialog.exec_()
|
dialog.exec_()
|
||||||
except Exception:
|
except Exception:
|
||||||
self.log.error("Failed to deliver versions.", exc_info=True)
|
self.log.error("Failed to deliver versions.", exc_info=True)
|
||||||
|
|
||||||
|
return LoaderActionResult()
|
||||||
|
|
||||||
|
|
||||||
class DeliveryOptionsDialog(QtWidgets.QDialog):
|
class DeliveryOptionsDialog(QtWidgets.QDialog):
|
||||||
"""Dialog to select template where to deliver selected representations."""
|
"""Dialog to select template where to deliver selected representations."""
|
||||||
|
|
||||||
def __init__(self, contexts, log=None, parent=None):
|
def __init__(
|
||||||
super(DeliveryOptionsDialog, self).__init__(parent=parent)
|
self,
|
||||||
|
project_name,
|
||||||
|
version_ids,
|
||||||
|
log=None,
|
||||||
|
parent=None,
|
||||||
|
):
|
||||||
|
super().__init__(parent=parent)
|
||||||
|
|
||||||
self.setWindowTitle("AYON - Deliver versions")
|
self.setWindowTitle("AYON - Deliver versions")
|
||||||
icon = QtGui.QIcon(resources.get_ayon_icon_filepath())
|
icon = QtGui.QIcon(resources.get_ayon_icon_filepath())
|
||||||
|
|
@ -70,13 +105,12 @@ class DeliveryOptionsDialog(QtWidgets.QDialog):
|
||||||
|
|
||||||
self.setStyleSheet(style.load_stylesheet())
|
self.setStyleSheet(style.load_stylesheet())
|
||||||
|
|
||||||
project_name = contexts[0]["project"]["name"]
|
|
||||||
self.anatomy = Anatomy(project_name)
|
self.anatomy = Anatomy(project_name)
|
||||||
self._representations = None
|
self._representations = None
|
||||||
self.log = log
|
self.log = log
|
||||||
self.currently_uploaded = 0
|
self.currently_uploaded = 0
|
||||||
|
|
||||||
self._set_representations(project_name, contexts)
|
self._set_representations(project_name, version_ids)
|
||||||
|
|
||||||
dropdown = QtWidgets.QComboBox()
|
dropdown = QtWidgets.QComboBox()
|
||||||
self.templates = self._get_templates(self.anatomy)
|
self.templates = self._get_templates(self.anatomy)
|
||||||
|
|
@ -316,9 +350,7 @@ class DeliveryOptionsDialog(QtWidgets.QDialog):
|
||||||
|
|
||||||
return templates
|
return templates
|
||||||
|
|
||||||
def _set_representations(self, project_name, contexts):
|
def _set_representations(self, project_name, version_ids):
|
||||||
version_ids = {context["version"]["id"] for context in contexts}
|
|
||||||
|
|
||||||
repres = list(ayon_api.get_representations(
|
repres = list(ayon_api.get_representations(
|
||||||
project_name, version_ids=version_ids
|
project_name, version_ids=version_ids
|
||||||
))
|
))
|
||||||
|
|
@ -2,11 +2,10 @@ import logging
|
||||||
import os
|
import os
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from collections import defaultdict
|
from collections import defaultdict
|
||||||
|
from typing import Any, Optional
|
||||||
|
|
||||||
from qtpy import QtWidgets, QtCore, QtGui
|
from qtpy import QtWidgets, QtCore, QtGui
|
||||||
from ayon_api import get_representations
|
|
||||||
|
|
||||||
from ayon_core.pipeline import load, Anatomy
|
|
||||||
from ayon_core import resources, style
|
from ayon_core import resources, style
|
||||||
from ayon_core.lib.transcoding import (
|
from ayon_core.lib.transcoding import (
|
||||||
IMAGE_EXTENSIONS,
|
IMAGE_EXTENSIONS,
|
||||||
|
|
@ -16,9 +15,16 @@ from ayon_core.lib import (
|
||||||
get_ffprobe_data,
|
get_ffprobe_data,
|
||||||
is_oiio_supported,
|
is_oiio_supported,
|
||||||
)
|
)
|
||||||
|
from ayon_core.pipeline import Anatomy
|
||||||
from ayon_core.pipeline.load import get_representation_path_with_anatomy
|
from ayon_core.pipeline.load import get_representation_path_with_anatomy
|
||||||
from ayon_core.tools.utils import show_message_dialog
|
from ayon_core.tools.utils import show_message_dialog
|
||||||
|
|
||||||
|
from ayon_core.pipeline.actions import (
|
||||||
|
LoaderSimpleActionPlugin,
|
||||||
|
LoaderActionSelection,
|
||||||
|
LoaderActionResult,
|
||||||
|
)
|
||||||
|
|
||||||
OTIO = None
|
OTIO = None
|
||||||
FRAME_SPLITTER = "__frame_splitter__"
|
FRAME_SPLITTER = "__frame_splitter__"
|
||||||
|
|
||||||
|
|
@ -30,34 +36,99 @@ def _import_otio():
|
||||||
OTIO = opentimelineio
|
OTIO = opentimelineio
|
||||||
|
|
||||||
|
|
||||||
class ExportOTIO(load.ProductLoaderPlugin):
|
class ExportOTIO(LoaderSimpleActionPlugin):
|
||||||
"""Export selected versions to OpenTimelineIO."""
|
identifier = "core.export-otio"
|
||||||
|
|
||||||
is_multiple_contexts_compatible = True
|
|
||||||
sequence_splitter = "__sequence_splitter__"
|
|
||||||
|
|
||||||
representations = {"*"}
|
|
||||||
product_types = {"*"}
|
|
||||||
tool_names = ["library_loader"]
|
|
||||||
|
|
||||||
label = "Export OTIO"
|
label = "Export OTIO"
|
||||||
|
group_label = None
|
||||||
order = 35
|
order = 35
|
||||||
icon = "save"
|
icon = {
|
||||||
color = "#d8d8d8"
|
"type": "material-symbols",
|
||||||
|
"name": "save",
|
||||||
|
"color": "#d8d8d8",
|
||||||
|
}
|
||||||
|
|
||||||
def load(self, contexts, name=None, namespace=None, options=None):
|
def is_compatible(
|
||||||
|
self, selection: LoaderActionSelection
|
||||||
|
) -> bool:
|
||||||
|
# Don't show in hosts
|
||||||
|
if self.host_name is not None:
|
||||||
|
return False
|
||||||
|
|
||||||
|
return selection.versions_selected()
|
||||||
|
|
||||||
|
def execute_simple_action(
|
||||||
|
self,
|
||||||
|
selection: LoaderActionSelection,
|
||||||
|
form_values: dict[str, Any],
|
||||||
|
) -> Optional[LoaderActionResult]:
|
||||||
_import_otio()
|
_import_otio()
|
||||||
|
version_ids = set(selection.selected_ids)
|
||||||
|
|
||||||
|
versions_by_id = {
|
||||||
|
version["id"]: version
|
||||||
|
for version in selection.entities.get_versions(version_ids)
|
||||||
|
}
|
||||||
|
product_ids = {
|
||||||
|
version["productId"]
|
||||||
|
for version in versions_by_id.values()
|
||||||
|
}
|
||||||
|
products_by_id = {
|
||||||
|
product["id"]: product
|
||||||
|
for product in selection.entities.get_products(product_ids)
|
||||||
|
}
|
||||||
|
folder_ids = {
|
||||||
|
product["folderId"]
|
||||||
|
for product in products_by_id.values()
|
||||||
|
}
|
||||||
|
folder_by_id = {
|
||||||
|
folder["id"]: folder
|
||||||
|
for folder in selection.entities.get_folders(folder_ids)
|
||||||
|
}
|
||||||
|
repre_entities = selection.entities.get_versions_representations(
|
||||||
|
version_ids
|
||||||
|
)
|
||||||
|
|
||||||
|
version_path_by_id = {}
|
||||||
|
for version in versions_by_id.values():
|
||||||
|
version_id = version["id"]
|
||||||
|
product_id = version["productId"]
|
||||||
|
product = products_by_id[product_id]
|
||||||
|
folder_id = product["folderId"]
|
||||||
|
folder = folder_by_id[folder_id]
|
||||||
|
|
||||||
|
version_path_by_id[version_id] = "/".join([
|
||||||
|
folder["path"],
|
||||||
|
product["name"],
|
||||||
|
version["name"]
|
||||||
|
])
|
||||||
|
|
||||||
try:
|
try:
|
||||||
dialog = ExportOTIOOptionsDialog(contexts, self.log)
|
# TODO this should probably trigger a subprocess?
|
||||||
|
dialog = ExportOTIOOptionsDialog(
|
||||||
|
selection.project_name,
|
||||||
|
versions_by_id,
|
||||||
|
repre_entities,
|
||||||
|
version_path_by_id,
|
||||||
|
self.log
|
||||||
|
)
|
||||||
dialog.exec_()
|
dialog.exec_()
|
||||||
except Exception:
|
except Exception:
|
||||||
self.log.error("Failed to export OTIO.", exc_info=True)
|
self.log.error("Failed to export OTIO.", exc_info=True)
|
||||||
|
return LoaderActionResult()
|
||||||
|
|
||||||
|
|
||||||
class ExportOTIOOptionsDialog(QtWidgets.QDialog):
|
class ExportOTIOOptionsDialog(QtWidgets.QDialog):
|
||||||
"""Dialog to select template where to deliver selected representations."""
|
"""Dialog to select template where to deliver selected representations."""
|
||||||
|
|
||||||
def __init__(self, contexts, log=None, parent=None):
|
def __init__(
|
||||||
|
self,
|
||||||
|
project_name,
|
||||||
|
versions_by_id,
|
||||||
|
repre_entities,
|
||||||
|
version_path_by_id,
|
||||||
|
log=None,
|
||||||
|
parent=None
|
||||||
|
):
|
||||||
# Not all hosts have OpenTimelineIO available.
|
# Not all hosts have OpenTimelineIO available.
|
||||||
self.log = log
|
self.log = log
|
||||||
|
|
||||||
|
|
@ -73,30 +144,14 @@ class ExportOTIOOptionsDialog(QtWidgets.QDialog):
|
||||||
| QtCore.Qt.WindowMinimizeButtonHint
|
| QtCore.Qt.WindowMinimizeButtonHint
|
||||||
)
|
)
|
||||||
|
|
||||||
project_name = contexts[0]["project"]["name"]
|
|
||||||
versions_by_id = {
|
|
||||||
context["version"]["id"]: context["version"]
|
|
||||||
for context in contexts
|
|
||||||
}
|
|
||||||
repre_entities = list(get_representations(
|
|
||||||
project_name, version_ids=set(versions_by_id)
|
|
||||||
))
|
|
||||||
version_by_representation_id = {
|
version_by_representation_id = {
|
||||||
repre_entity["id"]: versions_by_id[repre_entity["versionId"]]
|
repre_entity["id"]: versions_by_id[repre_entity["versionId"]]
|
||||||
for repre_entity in repre_entities
|
for repre_entity in repre_entities
|
||||||
}
|
}
|
||||||
version_path_by_id = {}
|
representations_by_version_id = {
|
||||||
representations_by_version_id = {}
|
version_id: []
|
||||||
for context in contexts:
|
for version_id in versions_by_id
|
||||||
version_id = context["version"]["id"]
|
}
|
||||||
if version_id in version_path_by_id:
|
|
||||||
continue
|
|
||||||
representations_by_version_id[version_id] = []
|
|
||||||
version_path_by_id[version_id] = "/".join([
|
|
||||||
context["folder"]["path"],
|
|
||||||
context["product"]["name"],
|
|
||||||
context["version"]["name"]
|
|
||||||
])
|
|
||||||
|
|
||||||
for repre_entity in repre_entities:
|
for repre_entity in repre_entities:
|
||||||
representations_by_version_id[repre_entity["versionId"]].append(
|
representations_by_version_id[repre_entity["versionId"]].append(
|
||||||
360
client/ayon_core/plugins/loader/open_file.py
Normal file
360
client/ayon_core/plugins/loader/open_file.py
Normal file
|
|
@ -0,0 +1,360 @@
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
import subprocess
|
||||||
|
import platform
|
||||||
|
import collections
|
||||||
|
import ctypes
|
||||||
|
from typing import Optional, Any, Callable
|
||||||
|
|
||||||
|
from ayon_core.pipeline.load import get_representation_path_with_anatomy
|
||||||
|
from ayon_core.pipeline.actions import (
|
||||||
|
LoaderActionPlugin,
|
||||||
|
LoaderActionItem,
|
||||||
|
LoaderActionSelection,
|
||||||
|
LoaderActionResult,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
WINDOWS_USER_REG_PATH = (
|
||||||
|
r"Software\Microsoft\Windows\CurrentVersion\Explorer\FileExts"
|
||||||
|
r"\{ext}\UserChoice"
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class _Cache:
|
||||||
|
"""Cache extensions information.
|
||||||
|
|
||||||
|
Notes:
|
||||||
|
The cache is cleared when loader tool is refreshed so it might be
|
||||||
|
moved to other place which is not cleared on refresh.
|
||||||
|
|
||||||
|
"""
|
||||||
|
supported_exts: set[str] = set()
|
||||||
|
unsupported_exts: set[str] = set()
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def is_supported(cls, ext: str) -> bool:
|
||||||
|
return ext in cls.supported_exts
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def already_checked(cls, ext: str) -> bool:
|
||||||
|
return (
|
||||||
|
ext in cls.supported_exts
|
||||||
|
or ext in cls.unsupported_exts
|
||||||
|
)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def set_ext_support(cls, ext: str, supported: bool) -> None:
|
||||||
|
if supported:
|
||||||
|
cls.supported_exts.add(ext)
|
||||||
|
else:
|
||||||
|
cls.unsupported_exts.add(ext)
|
||||||
|
|
||||||
|
|
||||||
|
def _extension_has_assigned_app_windows(ext: str) -> bool:
|
||||||
|
import winreg
|
||||||
|
progid = None
|
||||||
|
try:
|
||||||
|
with winreg.OpenKey(
|
||||||
|
winreg.HKEY_CURRENT_USER,
|
||||||
|
WINDOWS_USER_REG_PATH.format(ext=ext),
|
||||||
|
) as k:
|
||||||
|
progid, _ = winreg.QueryValueEx(k, "ProgId")
|
||||||
|
except OSError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
if progid:
|
||||||
|
return True
|
||||||
|
|
||||||
|
try:
|
||||||
|
with winreg.OpenKey(winreg.HKEY_CLASSES_ROOT, ext) as k:
|
||||||
|
progid = winreg.QueryValueEx(k, None)[0]
|
||||||
|
except OSError:
|
||||||
|
pass
|
||||||
|
return bool(progid)
|
||||||
|
|
||||||
|
|
||||||
|
def _linux_find_desktop_file(desktop: str) -> Optional[str]:
|
||||||
|
for dirpath in (
|
||||||
|
os.path.expanduser("~/.local/share/applications"),
|
||||||
|
"/usr/share/applications",
|
||||||
|
"/usr/local/share/applications",
|
||||||
|
):
|
||||||
|
path = os.path.join(dirpath, desktop)
|
||||||
|
if os.path.isfile(path):
|
||||||
|
return path
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
def _extension_has_assigned_app_linux(ext: str) -> bool:
|
||||||
|
import mimetypes
|
||||||
|
|
||||||
|
mime, _ = mimetypes.guess_type(f"file{ext}")
|
||||||
|
if not mime:
|
||||||
|
return False
|
||||||
|
|
||||||
|
try:
|
||||||
|
# xdg-mime query default <mime>
|
||||||
|
desktop = subprocess.check_output(
|
||||||
|
["xdg-mime", "query", "default", mime],
|
||||||
|
text=True
|
||||||
|
).strip() or None
|
||||||
|
except Exception:
|
||||||
|
desktop = None
|
||||||
|
|
||||||
|
if not desktop:
|
||||||
|
return False
|
||||||
|
|
||||||
|
desktop_path = _linux_find_desktop_file(desktop)
|
||||||
|
if not desktop_path:
|
||||||
|
return False
|
||||||
|
if desktop_path and os.path.isfile(desktop_path):
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
def _extension_has_assigned_app_macos(ext: str) -> bool:
|
||||||
|
# Uses CoreServices/LaunchServices and Uniform Type Identifiers via
|
||||||
|
# ctypes.
|
||||||
|
# Steps: ext -> UTI -> default handler bundle id for role 'all'.
|
||||||
|
cf = ctypes.cdll.LoadLibrary(
|
||||||
|
"/System/Library/Frameworks/CoreFoundation.framework/CoreFoundation"
|
||||||
|
)
|
||||||
|
ls = ctypes.cdll.LoadLibrary(
|
||||||
|
"/System/Library/Frameworks/CoreServices.framework/Frameworks"
|
||||||
|
"/LaunchServices.framework/LaunchServices"
|
||||||
|
)
|
||||||
|
|
||||||
|
# CFType/CFString helpers
|
||||||
|
CFStringRef = ctypes.c_void_p
|
||||||
|
CFAllocatorRef = ctypes.c_void_p
|
||||||
|
CFIndex = ctypes.c_long
|
||||||
|
|
||||||
|
kCFStringEncodingUTF8 = 0x08000100
|
||||||
|
|
||||||
|
cf.CFStringCreateWithCString.argtypes = [
|
||||||
|
CFAllocatorRef, ctypes.c_char_p, ctypes.c_uint32
|
||||||
|
]
|
||||||
|
cf.CFStringCreateWithCString.restype = CFStringRef
|
||||||
|
|
||||||
|
cf.CFStringGetCStringPtr.argtypes = [CFStringRef, ctypes.c_uint32]
|
||||||
|
cf.CFStringGetCStringPtr.restype = ctypes.c_char_p
|
||||||
|
|
||||||
|
cf.CFStringGetCString.argtypes = [
|
||||||
|
CFStringRef, ctypes.c_char_p, CFIndex, ctypes.c_uint32
|
||||||
|
]
|
||||||
|
cf.CFStringGetCString.restype = ctypes.c_bool
|
||||||
|
|
||||||
|
cf.CFRelease.argtypes = [ctypes.c_void_p]
|
||||||
|
cf.CFRelease.restype = None
|
||||||
|
|
||||||
|
try:
|
||||||
|
UTTypeCreatePreferredIdentifierForTag = ctypes.cdll.LoadLibrary(
|
||||||
|
"/System/Library/Frameworks/CoreServices.framework/CoreServices"
|
||||||
|
).UTTypeCreatePreferredIdentifierForTag
|
||||||
|
except OSError:
|
||||||
|
# Fallback path (older systems)
|
||||||
|
UTTypeCreatePreferredIdentifierForTag = (
|
||||||
|
ls.UTTypeCreatePreferredIdentifierForTag
|
||||||
|
)
|
||||||
|
UTTypeCreatePreferredIdentifierForTag.argtypes = [
|
||||||
|
CFStringRef, CFStringRef, CFStringRef
|
||||||
|
]
|
||||||
|
UTTypeCreatePreferredIdentifierForTag.restype = CFStringRef
|
||||||
|
|
||||||
|
LSRolesMask = ctypes.c_uint
|
||||||
|
kLSRolesAll = 0xFFFFFFFF
|
||||||
|
ls.LSCopyDefaultRoleHandlerForContentType.argtypes = [
|
||||||
|
CFStringRef, LSRolesMask
|
||||||
|
]
|
||||||
|
ls.LSCopyDefaultRoleHandlerForContentType.restype = CFStringRef
|
||||||
|
|
||||||
|
def cfstr(py_s: str) -> CFStringRef:
|
||||||
|
return cf.CFStringCreateWithCString(
|
||||||
|
None, py_s.encode("utf-8"), kCFStringEncodingUTF8
|
||||||
|
)
|
||||||
|
|
||||||
|
def to_pystr(cf_s: CFStringRef) -> Optional[str]:
|
||||||
|
if not cf_s:
|
||||||
|
return None
|
||||||
|
# Try fast pointer
|
||||||
|
ptr = cf.CFStringGetCStringPtr(cf_s, kCFStringEncodingUTF8)
|
||||||
|
if ptr:
|
||||||
|
return ctypes.cast(ptr, ctypes.c_char_p).value.decode("utf-8")
|
||||||
|
|
||||||
|
# Fallback buffer
|
||||||
|
buf_size = 1024
|
||||||
|
buf = ctypes.create_string_buffer(buf_size)
|
||||||
|
ok = cf.CFStringGetCString(
|
||||||
|
cf_s, buf, buf_size, kCFStringEncodingUTF8
|
||||||
|
)
|
||||||
|
if ok:
|
||||||
|
return buf.value.decode("utf-8")
|
||||||
|
return None
|
||||||
|
|
||||||
|
# Convert extension (without dot) to UTI
|
||||||
|
tag_class = cfstr("public.filename-extension")
|
||||||
|
tag_value = cfstr(ext.lstrip("."))
|
||||||
|
|
||||||
|
uti_ref = UTTypeCreatePreferredIdentifierForTag(
|
||||||
|
tag_class, tag_value, None
|
||||||
|
)
|
||||||
|
|
||||||
|
# Clean up temporary CFStrings
|
||||||
|
for ref in (tag_class, tag_value):
|
||||||
|
if ref:
|
||||||
|
cf.CFRelease(ref)
|
||||||
|
|
||||||
|
bundle_id = None
|
||||||
|
if uti_ref:
|
||||||
|
# Get default handler for the UTI
|
||||||
|
default_bundle_ref = ls.LSCopyDefaultRoleHandlerForContentType(
|
||||||
|
uti_ref, kLSRolesAll
|
||||||
|
)
|
||||||
|
bundle_id = to_pystr(default_bundle_ref)
|
||||||
|
if default_bundle_ref:
|
||||||
|
cf.CFRelease(default_bundle_ref)
|
||||||
|
cf.CFRelease(uti_ref)
|
||||||
|
return bundle_id is not None
|
||||||
|
|
||||||
|
|
||||||
|
def _filter_supported_exts(
|
||||||
|
extensions: set[str], test_func: Callable
|
||||||
|
) -> set[str]:
|
||||||
|
filtered_exs: set[str] = set()
|
||||||
|
for ext in extensions:
|
||||||
|
if not _Cache.already_checked(ext):
|
||||||
|
_Cache.set_ext_support(ext, test_func(ext))
|
||||||
|
if _Cache.is_supported(ext):
|
||||||
|
filtered_exs.add(ext)
|
||||||
|
return filtered_exs
|
||||||
|
|
||||||
|
|
||||||
|
def filter_supported_exts(extensions: set[str]) -> set[str]:
|
||||||
|
if not extensions:
|
||||||
|
return set()
|
||||||
|
platform_name = platform.system().lower()
|
||||||
|
if platform_name == "windows":
|
||||||
|
return _filter_supported_exts(
|
||||||
|
extensions, _extension_has_assigned_app_windows
|
||||||
|
)
|
||||||
|
if platform_name == "linux":
|
||||||
|
return _filter_supported_exts(
|
||||||
|
extensions, _extension_has_assigned_app_linux
|
||||||
|
)
|
||||||
|
if platform_name == "darwin":
|
||||||
|
return _filter_supported_exts(
|
||||||
|
extensions, _extension_has_assigned_app_macos
|
||||||
|
)
|
||||||
|
return set()
|
||||||
|
|
||||||
|
|
||||||
|
def open_file(filepath: str) -> None:
|
||||||
|
"""Open file with system default executable"""
|
||||||
|
if sys.platform.startswith("darwin"):
|
||||||
|
subprocess.call(("open", filepath))
|
||||||
|
elif os.name == "nt":
|
||||||
|
os.startfile(filepath)
|
||||||
|
elif os.name == "posix":
|
||||||
|
subprocess.call(("xdg-open", filepath))
|
||||||
|
|
||||||
|
|
||||||
|
class OpenFileAction(LoaderActionPlugin):
|
||||||
|
"""Open Image Sequence or Video with system default"""
|
||||||
|
identifier = "core.open-file"
|
||||||
|
|
||||||
|
def get_action_items(
|
||||||
|
self, selection: LoaderActionSelection
|
||||||
|
) -> list[LoaderActionItem]:
|
||||||
|
repres = []
|
||||||
|
if selection.selected_type == "representation":
|
||||||
|
repres = selection.entities.get_representations(
|
||||||
|
selection.selected_ids
|
||||||
|
)
|
||||||
|
|
||||||
|
if selection.selected_type == "version":
|
||||||
|
repres = selection.entities.get_versions_representations(
|
||||||
|
selection.selected_ids
|
||||||
|
)
|
||||||
|
|
||||||
|
if not repres:
|
||||||
|
return []
|
||||||
|
|
||||||
|
repres_by_ext = collections.defaultdict(list)
|
||||||
|
for repre in repres:
|
||||||
|
repre_context = repre.get("context")
|
||||||
|
if not repre_context:
|
||||||
|
continue
|
||||||
|
ext = repre_context.get("ext")
|
||||||
|
if not ext:
|
||||||
|
path = repre["attrib"].get("path")
|
||||||
|
if path:
|
||||||
|
ext = os.path.splitext(path)[1]
|
||||||
|
|
||||||
|
if ext:
|
||||||
|
ext = ext.lower()
|
||||||
|
if not ext.startswith("."):
|
||||||
|
ext = f".{ext}"
|
||||||
|
repres_by_ext[ext.lower()].append(repre)
|
||||||
|
|
||||||
|
if not repres_by_ext:
|
||||||
|
return []
|
||||||
|
|
||||||
|
filtered_exts = filter_supported_exts(set(repres_by_ext))
|
||||||
|
|
||||||
|
repre_ids_by_name = collections.defaultdict(set)
|
||||||
|
for ext in filtered_exts:
|
||||||
|
for repre in repres_by_ext[ext]:
|
||||||
|
repre_ids_by_name[repre["name"]].add(repre["id"])
|
||||||
|
|
||||||
|
return [
|
||||||
|
LoaderActionItem(
|
||||||
|
label=repre_name,
|
||||||
|
group_label="Open file",
|
||||||
|
order=30,
|
||||||
|
data={"representation_ids": list(repre_ids)},
|
||||||
|
icon={
|
||||||
|
"type": "material-symbols",
|
||||||
|
"name": "file_open",
|
||||||
|
"color": "#ffffff",
|
||||||
|
}
|
||||||
|
)
|
||||||
|
for repre_name, repre_ids in repre_ids_by_name.items()
|
||||||
|
]
|
||||||
|
|
||||||
|
def execute_action(
|
||||||
|
self,
|
||||||
|
selection: LoaderActionSelection,
|
||||||
|
data: dict[str, Any],
|
||||||
|
form_values: dict[str, Any],
|
||||||
|
) -> Optional[LoaderActionResult]:
|
||||||
|
path = None
|
||||||
|
repre_path = None
|
||||||
|
repre_ids = data["representation_ids"]
|
||||||
|
for repre in selection.entities.get_representations(repre_ids):
|
||||||
|
repre_path = get_representation_path_with_anatomy(
|
||||||
|
repre, selection.get_project_anatomy()
|
||||||
|
)
|
||||||
|
if os.path.exists(repre_path):
|
||||||
|
path = repre_path
|
||||||
|
break
|
||||||
|
|
||||||
|
if path is None:
|
||||||
|
if repre_path is None:
|
||||||
|
return LoaderActionResult(
|
||||||
|
"Failed to fill representation path...",
|
||||||
|
success=False,
|
||||||
|
)
|
||||||
|
return LoaderActionResult(
|
||||||
|
"File to open was not found...",
|
||||||
|
success=False,
|
||||||
|
)
|
||||||
|
|
||||||
|
self.log.info(f"Opening: {path}")
|
||||||
|
|
||||||
|
open_file(path)
|
||||||
|
|
||||||
|
return LoaderActionResult(
|
||||||
|
"File was opened...",
|
||||||
|
success=True,
|
||||||
|
)
|
||||||
69
client/ayon_core/plugins/loader/push_to_project.py
Normal file
69
client/ayon_core/plugins/loader/push_to_project.py
Normal file
|
|
@ -0,0 +1,69 @@
|
||||||
|
import os
|
||||||
|
from typing import Optional, Any
|
||||||
|
|
||||||
|
from ayon_core import AYON_CORE_ROOT
|
||||||
|
from ayon_core.lib import get_ayon_launcher_args, run_detached_process
|
||||||
|
|
||||||
|
from ayon_core.pipeline.actions import (
|
||||||
|
LoaderSimpleActionPlugin,
|
||||||
|
LoaderActionSelection,
|
||||||
|
LoaderActionResult,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class PushToProject(LoaderSimpleActionPlugin):
|
||||||
|
identifier = "core.push-to-project"
|
||||||
|
label = "Push to project"
|
||||||
|
order = 35
|
||||||
|
icon = {
|
||||||
|
"type": "material-symbols",
|
||||||
|
"name": "send",
|
||||||
|
"color": "#d8d8d8",
|
||||||
|
}
|
||||||
|
|
||||||
|
def is_compatible(
|
||||||
|
self, selection: LoaderActionSelection
|
||||||
|
) -> bool:
|
||||||
|
if not selection.versions_selected():
|
||||||
|
return False
|
||||||
|
|
||||||
|
version_ids = set(selection.selected_ids)
|
||||||
|
product_ids = {
|
||||||
|
product["id"]
|
||||||
|
for product in selection.entities.get_versions_products(
|
||||||
|
version_ids
|
||||||
|
)
|
||||||
|
}
|
||||||
|
folder_ids = {
|
||||||
|
folder["id"]
|
||||||
|
for folder in selection.entities.get_products_folders(
|
||||||
|
product_ids
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(folder_ids) == 1:
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
|
def execute_simple_action(
|
||||||
|
self,
|
||||||
|
selection: LoaderActionSelection,
|
||||||
|
form_values: dict[str, Any],
|
||||||
|
) -> Optional[LoaderActionResult]:
|
||||||
|
push_tool_script_path = os.path.join(
|
||||||
|
AYON_CORE_ROOT,
|
||||||
|
"tools",
|
||||||
|
"push_to_project",
|
||||||
|
"main.py"
|
||||||
|
)
|
||||||
|
|
||||||
|
args = get_ayon_launcher_args(
|
||||||
|
push_tool_script_path,
|
||||||
|
"--project", selection.project_name,
|
||||||
|
"--versions", ",".join(selection.selected_ids)
|
||||||
|
)
|
||||||
|
run_detached_process(args)
|
||||||
|
return LoaderActionResult(
|
||||||
|
message="Push to project tool opened...",
|
||||||
|
success=True,
|
||||||
|
)
|
||||||
|
|
@ -16,6 +16,7 @@ Provides:
|
||||||
import json
|
import json
|
||||||
import pyblish.api
|
import pyblish.api
|
||||||
|
|
||||||
|
from ayon_core.lib import get_ayon_user_entity
|
||||||
from ayon_core.pipeline.template_data import get_template_data
|
from ayon_core.pipeline.template_data import get_template_data
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -55,17 +56,18 @@ class CollectAnatomyContextData(pyblish.api.ContextPlugin):
|
||||||
if folder_entity:
|
if folder_entity:
|
||||||
task_entity = context.data["taskEntity"]
|
task_entity = context.data["taskEntity"]
|
||||||
|
|
||||||
|
username = context.data["user"]
|
||||||
|
user_entity = get_ayon_user_entity(username)
|
||||||
anatomy_data = get_template_data(
|
anatomy_data = get_template_data(
|
||||||
project_entity,
|
project_entity,
|
||||||
folder_entity,
|
folder_entity,
|
||||||
task_entity,
|
task_entity,
|
||||||
host_name,
|
host_name=host_name,
|
||||||
project_settings
|
settings=project_settings,
|
||||||
|
user_entity=user_entity,
|
||||||
)
|
)
|
||||||
anatomy_data.update(context.data.get("datetimeData") or {})
|
anatomy_data.update(context.data.get("datetimeData") or {})
|
||||||
|
|
||||||
username = context.data["user"]
|
|
||||||
anatomy_data["user"] = username
|
|
||||||
# Backwards compatibility for 'username' key
|
# Backwards compatibility for 'username' key
|
||||||
anatomy_data["username"] = username
|
anatomy_data["username"] = username
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -301,8 +301,6 @@ class CollectAnatomyInstanceData(pyblish.api.ContextPlugin):
|
||||||
product_name = instance.data["productName"]
|
product_name = instance.data["productName"]
|
||||||
product_type = instance.data["productType"]
|
product_type = instance.data["productType"]
|
||||||
anatomy_data.update({
|
anatomy_data.update({
|
||||||
"family": product_type,
|
|
||||||
"subset": product_name,
|
|
||||||
"product": {
|
"product": {
|
||||||
"name": product_name,
|
"name": product_name,
|
||||||
"type": product_type,
|
"type": product_type,
|
||||||
|
|
|
||||||
|
|
@ -52,7 +52,7 @@ class CollectAudio(pyblish.api.ContextPlugin):
|
||||||
context, self.__class__
|
context, self.__class__
|
||||||
):
|
):
|
||||||
# Skip instances that already have audio filled
|
# Skip instances that already have audio filled
|
||||||
if instance.data.get("audio"):
|
if "audio" in instance.data:
|
||||||
self.log.debug(
|
self.log.debug(
|
||||||
"Skipping Audio collection. It is already collected"
|
"Skipping Audio collection. It is already collected"
|
||||||
)
|
)
|
||||||
|
|
|
||||||
|
|
@ -32,6 +32,7 @@ class CollectCoreJobEnvVars(pyblish.api.ContextPlugin):
|
||||||
|
|
||||||
for key in [
|
for key in [
|
||||||
"AYON_BUNDLE_NAME",
|
"AYON_BUNDLE_NAME",
|
||||||
|
"AYON_STUDIO_BUNDLE_NAME",
|
||||||
"AYON_USE_STAGING",
|
"AYON_USE_STAGING",
|
||||||
"AYON_IN_TESTS",
|
"AYON_IN_TESTS",
|
||||||
# NOTE Not sure why workdir is needed?
|
# NOTE Not sure why workdir is needed?
|
||||||
|
|
|
||||||
|
|
@ -25,7 +25,7 @@ class CollectManagedStagingDir(pyblish.api.InstancePlugin):
|
||||||
Location of the folder is configured in:
|
Location of the folder is configured in:
|
||||||
`ayon+anatomy://_/templates/staging`.
|
`ayon+anatomy://_/templates/staging`.
|
||||||
|
|
||||||
Which family/task type/subset is applicable is configured in:
|
Which product type/task type/product is applicable is configured in:
|
||||||
`ayon+settings://core/tools/publish/custom_staging_dir_profiles`
|
`ayon+settings://core/tools/publish/custom_staging_dir_profiles`
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -71,6 +71,12 @@ class CollectOtioRanges(pyblish.api.InstancePlugin):
|
||||||
import opentimelineio as otio
|
import opentimelineio as otio
|
||||||
|
|
||||||
otio_clip = instance.data["otioClip"]
|
otio_clip = instance.data["otioClip"]
|
||||||
|
if isinstance(
|
||||||
|
otio_clip.media_reference,
|
||||||
|
otio.schema.MissingReference
|
||||||
|
):
|
||||||
|
self.log.info("Clip has no media reference")
|
||||||
|
return
|
||||||
|
|
||||||
# Collect timeline ranges if workfile start frame is available
|
# Collect timeline ranges if workfile start frame is available
|
||||||
if "workfileFrameStart" in instance.data:
|
if "workfileFrameStart" in instance.data:
|
||||||
|
|
|
||||||
|
|
@ -60,6 +60,13 @@ class CollectOtioSubsetResources(
|
||||||
|
|
||||||
# get basic variables
|
# get basic variables
|
||||||
otio_clip = instance.data["otioClip"]
|
otio_clip = instance.data["otioClip"]
|
||||||
|
if isinstance(
|
||||||
|
otio_clip.media_reference,
|
||||||
|
otio.schema.MissingReference
|
||||||
|
):
|
||||||
|
self.log.info("Clip has no media reference")
|
||||||
|
return
|
||||||
|
|
||||||
otio_available_range = otio_clip.available_range()
|
otio_available_range = otio_clip.available_range()
|
||||||
media_fps = otio_available_range.start_time.rate
|
media_fps = otio_available_range.start_time.rate
|
||||||
available_duration = otio_available_range.duration.value
|
available_duration = otio_available_range.duration.value
|
||||||
|
|
|
||||||
|
|
@ -1,3 +1,5 @@
|
||||||
|
from __future__ import annotations
|
||||||
|
from typing import Any
|
||||||
import ayon_api
|
import ayon_api
|
||||||
import ayon_api.utils
|
import ayon_api.utils
|
||||||
|
|
||||||
|
|
@ -11,20 +13,6 @@ class CollectSceneLoadedVersions(pyblish.api.ContextPlugin):
|
||||||
|
|
||||||
order = pyblish.api.CollectorOrder + 0.0001
|
order = pyblish.api.CollectorOrder + 0.0001
|
||||||
label = "Collect Versions Loaded in Scene"
|
label = "Collect Versions Loaded in Scene"
|
||||||
hosts = [
|
|
||||||
"aftereffects",
|
|
||||||
"blender",
|
|
||||||
"celaction",
|
|
||||||
"fusion",
|
|
||||||
"harmony",
|
|
||||||
"hiero",
|
|
||||||
"houdini",
|
|
||||||
"maya",
|
|
||||||
"nuke",
|
|
||||||
"photoshop",
|
|
||||||
"resolve",
|
|
||||||
"tvpaint"
|
|
||||||
]
|
|
||||||
|
|
||||||
def process(self, context):
|
def process(self, context):
|
||||||
host = registered_host()
|
host = registered_host()
|
||||||
|
|
@ -46,6 +34,8 @@ class CollectSceneLoadedVersions(pyblish.api.ContextPlugin):
|
||||||
self.log.debug("No loaded containers found in scene.")
|
self.log.debug("No loaded containers found in scene.")
|
||||||
return
|
return
|
||||||
|
|
||||||
|
containers = self._filter_invalid_containers(containers)
|
||||||
|
|
||||||
repre_ids = {
|
repre_ids = {
|
||||||
container["representation"]
|
container["representation"]
|
||||||
for container in containers
|
for container in containers
|
||||||
|
|
@ -92,3 +82,28 @@ class CollectSceneLoadedVersions(pyblish.api.ContextPlugin):
|
||||||
|
|
||||||
self.log.debug(f"Collected {len(loaded_versions)} loaded versions.")
|
self.log.debug(f"Collected {len(loaded_versions)} loaded versions.")
|
||||||
context.data["loadedVersions"] = loaded_versions
|
context.data["loadedVersions"] = loaded_versions
|
||||||
|
|
||||||
|
def _filter_invalid_containers(
|
||||||
|
self,
|
||||||
|
containers: list[dict[str, Any]]
|
||||||
|
) -> list[dict[str, Any]]:
|
||||||
|
"""Filter out invalid containers lacking required keys.
|
||||||
|
|
||||||
|
Skip any invalid containers that lack 'representation' or 'name'
|
||||||
|
keys to avoid KeyError.
|
||||||
|
"""
|
||||||
|
# Only filter by what's required for this plug-in instead of validating
|
||||||
|
# a full container schema.
|
||||||
|
required_keys = {"name", "representation"}
|
||||||
|
valid = []
|
||||||
|
for container in containers:
|
||||||
|
missing = [key for key in required_keys if key not in container]
|
||||||
|
if missing:
|
||||||
|
self.log.warning(
|
||||||
|
"Skipping invalid container, missing required keys:"
|
||||||
|
" {}. {}".format(", ".join(missing), container)
|
||||||
|
)
|
||||||
|
continue
|
||||||
|
valid.append(container)
|
||||||
|
|
||||||
|
return valid
|
||||||
|
|
|
||||||
|
|
@ -316,22 +316,8 @@ class ExtractBurnin(publish.Extractor):
|
||||||
burnin_values = {}
|
burnin_values = {}
|
||||||
for key in self.positions:
|
for key in self.positions:
|
||||||
value = burnin_def.get(key)
|
value = burnin_def.get(key)
|
||||||
if not value:
|
if value:
|
||||||
continue
|
burnin_values[key] = value
|
||||||
# TODO remove replacements
|
|
||||||
burnin_values[key] = (
|
|
||||||
value
|
|
||||||
.replace("{task}", "{task[name]}")
|
|
||||||
.replace("{product[name]}", "{subset}")
|
|
||||||
.replace("{Product[name]}", "{Subset}")
|
|
||||||
.replace("{PRODUCT[NAME]}", "{SUBSET}")
|
|
||||||
.replace("{product[type]}", "{family}")
|
|
||||||
.replace("{Product[type]}", "{Family}")
|
|
||||||
.replace("{PRODUCT[TYPE]}", "{FAMILY}")
|
|
||||||
.replace("{folder[name]}", "{asset}")
|
|
||||||
.replace("{Folder[name]}", "{Asset}")
|
|
||||||
.replace("{FOLDER[NAME]}", "{ASSET}")
|
|
||||||
)
|
|
||||||
|
|
||||||
# Remove "delete" tag from new representation
|
# Remove "delete" tag from new representation
|
||||||
if "delete" in new_repre["tags"]:
|
if "delete" in new_repre["tags"]:
|
||||||
|
|
|
||||||
|
|
@ -11,6 +11,7 @@ from ayon_core.lib import (
|
||||||
is_oiio_supported,
|
is_oiio_supported,
|
||||||
)
|
)
|
||||||
from ayon_core.lib.transcoding import (
|
from ayon_core.lib.transcoding import (
|
||||||
|
MissingRGBAChannelsError,
|
||||||
oiio_color_convert,
|
oiio_color_convert,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
@ -86,15 +87,19 @@ class ExtractOIIOTranscode(publish.Extractor):
|
||||||
profile_output_defs = profile["outputs"]
|
profile_output_defs = profile["outputs"]
|
||||||
new_representations = []
|
new_representations = []
|
||||||
repres = instance.data["representations"]
|
repres = instance.data["representations"]
|
||||||
for idx, repre in enumerate(list(repres)):
|
|
||||||
# target space, display and view might be defined upstream
|
|
||||||
# TODO: address https://github.com/ynput/ayon-core/pull/1268#discussion_r2156555474
|
|
||||||
# Implement upstream logic to handle target_colorspace,
|
|
||||||
# target_display, target_view in other DCCs
|
|
||||||
target_colorspace = False
|
|
||||||
target_display = instance.data.get("colorspaceDisplay")
|
|
||||||
target_view = instance.data.get("colorspaceView")
|
|
||||||
|
|
||||||
|
scene_display = instance.data.get(
|
||||||
|
"sceneDisplay",
|
||||||
|
# Backward compatibility
|
||||||
|
instance.data.get("colorspaceDisplay")
|
||||||
|
)
|
||||||
|
scene_view = instance.data.get(
|
||||||
|
"sceneView",
|
||||||
|
# Backward compatibility
|
||||||
|
instance.data.get("colorspaceView")
|
||||||
|
)
|
||||||
|
|
||||||
|
for idx, repre in enumerate(list(repres)):
|
||||||
self.log.debug("repre ({}): `{}`".format(idx + 1, repre["name"]))
|
self.log.debug("repre ({}): `{}`".format(idx + 1, repre["name"]))
|
||||||
if not self._repre_is_valid(repre):
|
if not self._repre_is_valid(repre):
|
||||||
continue
|
continue
|
||||||
|
|
@ -111,7 +116,17 @@ class ExtractOIIOTranscode(publish.Extractor):
|
||||||
self.log.warning("Config file doesn't exist, skipping")
|
self.log.warning("Config file doesn't exist, skipping")
|
||||||
continue
|
continue
|
||||||
|
|
||||||
|
# Get representation files to convert
|
||||||
|
if isinstance(repre["files"], list):
|
||||||
|
repre_files_to_convert = copy.deepcopy(repre["files"])
|
||||||
|
else:
|
||||||
|
repre_files_to_convert = [repre["files"]]
|
||||||
|
|
||||||
|
# Process each output definition
|
||||||
for output_def in profile_output_defs:
|
for output_def in profile_output_defs:
|
||||||
|
# Local copy to avoid accidental mutable changes
|
||||||
|
files_to_convert = list(repre_files_to_convert)
|
||||||
|
|
||||||
output_name = output_def["name"]
|
output_name = output_def["name"]
|
||||||
new_repre = copy.deepcopy(repre)
|
new_repre = copy.deepcopy(repre)
|
||||||
|
|
||||||
|
|
@ -122,11 +137,6 @@ class ExtractOIIOTranscode(publish.Extractor):
|
||||||
)
|
)
|
||||||
new_repre["stagingDir"] = new_staging_dir
|
new_repre["stagingDir"] = new_staging_dir
|
||||||
|
|
||||||
if isinstance(new_repre["files"], list):
|
|
||||||
files_to_convert = copy.deepcopy(new_repre["files"])
|
|
||||||
else:
|
|
||||||
files_to_convert = [new_repre["files"]]
|
|
||||||
|
|
||||||
output_extension = output_def["extension"]
|
output_extension = output_def["extension"]
|
||||||
output_extension = output_extension.replace('.', '')
|
output_extension = output_extension.replace('.', '')
|
||||||
self._rename_in_representation(new_repre,
|
self._rename_in_representation(new_repre,
|
||||||
|
|
@ -136,24 +146,18 @@ class ExtractOIIOTranscode(publish.Extractor):
|
||||||
|
|
||||||
transcoding_type = output_def["transcoding_type"]
|
transcoding_type = output_def["transcoding_type"]
|
||||||
|
|
||||||
# NOTE: we use colorspace_data as the fallback values for
|
# Set target colorspace/display/view based on transcoding type
|
||||||
# the target colorspace.
|
target_colorspace = None
|
||||||
|
target_view = None
|
||||||
|
target_display = None
|
||||||
if transcoding_type == "colorspace":
|
if transcoding_type == "colorspace":
|
||||||
# TODO: Should we fallback to the colorspace
|
target_colorspace = output_def["colorspace"]
|
||||||
# (which used as source above) ?
|
|
||||||
# or should we compute the target colorspace from
|
|
||||||
# current view and display ?
|
|
||||||
target_colorspace = (output_def["colorspace"] or
|
|
||||||
colorspace_data.get("colorspace"))
|
|
||||||
elif transcoding_type == "display_view":
|
elif transcoding_type == "display_view":
|
||||||
display_view = output_def["display_view"]
|
display_view = output_def["display_view"]
|
||||||
target_view = (
|
# If empty values are provided in output definition,
|
||||||
display_view["view"]
|
# fallback to scene display/view that is collected from DCC
|
||||||
or colorspace_data.get("view"))
|
target_view = display_view["view"] or scene_view
|
||||||
target_display = (
|
target_display = display_view["display"] or scene_display
|
||||||
display_view["display"]
|
|
||||||
or colorspace_data.get("display")
|
|
||||||
)
|
|
||||||
|
|
||||||
# both could be already collected by DCC,
|
# both could be already collected by DCC,
|
||||||
# but could be overwritten when transcoding
|
# but could be overwritten when transcoding
|
||||||
|
|
@ -168,17 +172,37 @@ class ExtractOIIOTranscode(publish.Extractor):
|
||||||
additional_command_args = (output_def["oiiotool_args"]
|
additional_command_args = (output_def["oiiotool_args"]
|
||||||
["additional_command_args"])
|
["additional_command_args"])
|
||||||
|
|
||||||
files_to_convert = self._translate_to_sequence(
|
sequence_files = self._translate_to_sequence(
|
||||||
files_to_convert)
|
files_to_convert)
|
||||||
self.log.debug("Files to convert: {}".format(files_to_convert))
|
self.log.debug("Files to convert: {}".format(sequence_files))
|
||||||
for file_name in files_to_convert:
|
missing_rgba_review_channels = False
|
||||||
|
for file_name in sequence_files:
|
||||||
|
if isinstance(file_name, clique.Collection):
|
||||||
|
# Support sequences with holes by supplying
|
||||||
|
# dedicated `--frames` argument to `oiiotool`
|
||||||
|
# Create `frames` string like "1001-1002,1004,1010-1012
|
||||||
|
# Create `filename` string like "file.#.exr"
|
||||||
|
frames = file_name.format("{ranges}").replace(" ", "")
|
||||||
|
frame_padding = file_name.padding
|
||||||
|
file_name = file_name.format("{head}#{tail}")
|
||||||
|
parallel_frames = True
|
||||||
|
elif isinstance(file_name, str):
|
||||||
|
# Single file
|
||||||
|
frames = None
|
||||||
|
frame_padding = None
|
||||||
|
parallel_frames = False
|
||||||
|
else:
|
||||||
|
raise TypeError(
|
||||||
|
f"Unsupported file name type: {type(file_name)}."
|
||||||
|
" Expected str or clique.Collection."
|
||||||
|
)
|
||||||
|
|
||||||
self.log.debug("Transcoding file: `{}`".format(file_name))
|
self.log.debug("Transcoding file: `{}`".format(file_name))
|
||||||
input_path = os.path.join(original_staging_dir,
|
input_path = os.path.join(original_staging_dir, file_name)
|
||||||
file_name)
|
|
||||||
output_path = self._get_output_file_path(input_path,
|
output_path = self._get_output_file_path(input_path,
|
||||||
new_staging_dir,
|
new_staging_dir,
|
||||||
output_extension)
|
output_extension)
|
||||||
|
try:
|
||||||
oiio_color_convert(
|
oiio_color_convert(
|
||||||
input_path=input_path,
|
input_path=input_path,
|
||||||
output_path=output_path,
|
output_path=output_path,
|
||||||
|
|
@ -190,8 +214,23 @@ class ExtractOIIOTranscode(publish.Extractor):
|
||||||
source_display=source_display,
|
source_display=source_display,
|
||||||
source_view=source_view,
|
source_view=source_view,
|
||||||
additional_command_args=additional_command_args,
|
additional_command_args=additional_command_args,
|
||||||
|
frames=frames,
|
||||||
|
frame_padding=frame_padding,
|
||||||
|
parallel_frames=parallel_frames,
|
||||||
logger=self.log
|
logger=self.log
|
||||||
)
|
)
|
||||||
|
except MissingRGBAChannelsError as exc:
|
||||||
|
missing_rgba_review_channels = True
|
||||||
|
self.log.error(exc)
|
||||||
|
self.log.error(
|
||||||
|
"Skipping OIIO Transcode. Unknown RGBA channels"
|
||||||
|
f" for colorspace conversion in file: {input_path}"
|
||||||
|
)
|
||||||
|
break
|
||||||
|
|
||||||
|
if missing_rgba_review_channels:
|
||||||
|
# Stop processing this representation
|
||||||
|
break
|
||||||
|
|
||||||
# cleanup temporary transcoded files
|
# cleanup temporary transcoded files
|
||||||
for file_name in new_repre["files"]:
|
for file_name in new_repre["files"]:
|
||||||
|
|
@ -217,11 +256,11 @@ class ExtractOIIOTranscode(publish.Extractor):
|
||||||
added_review = True
|
added_review = True
|
||||||
|
|
||||||
# If there is only 1 file outputted then convert list to
|
# If there is only 1 file outputted then convert list to
|
||||||
# string, cause that'll indicate that its not a sequence.
|
# string, because that'll indicate that it is not a sequence.
|
||||||
if len(new_repre["files"]) == 1:
|
if len(new_repre["files"]) == 1:
|
||||||
new_repre["files"] = new_repre["files"][0]
|
new_repre["files"] = new_repre["files"][0]
|
||||||
|
|
||||||
# If the source representation has "review" tag, but its not
|
# If the source representation has "review" tag, but it's not
|
||||||
# part of the output definition tags, then both the
|
# part of the output definition tags, then both the
|
||||||
# representations will be transcoded in ExtractReview and
|
# representations will be transcoded in ExtractReview and
|
||||||
# their outputs will clash in integration.
|
# their outputs will clash in integration.
|
||||||
|
|
@ -271,42 +310,29 @@ class ExtractOIIOTranscode(publish.Extractor):
|
||||||
new_repre["files"] = renamed_files
|
new_repre["files"] = renamed_files
|
||||||
|
|
||||||
def _translate_to_sequence(self, files_to_convert):
|
def _translate_to_sequence(self, files_to_convert):
|
||||||
"""Returns original list or list with filename formatted in single
|
"""Returns original individual filepaths or list of clique.Collection.
|
||||||
sequence format.
|
|
||||||
|
|
||||||
Uses clique to find frame sequence, in this case it merges all frames
|
Uses clique to find frame sequence, and return the collections instead.
|
||||||
into sequence format (FRAMESTART-FRAMEEND#) and returns it.
|
If sequence not detected in input filenames, it returns original list.
|
||||||
If sequence not found, it returns original list
|
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
files_to_convert (list): list of file names
|
files_to_convert (list[str]): list of file names
|
||||||
Returns:
|
Returns:
|
||||||
(list) of [file.1001-1010#.exr] or [fileA.exr, fileB.exr]
|
list[str | clique.Collection]: List of
|
||||||
|
filepaths ['fileA.exr', 'fileB.exr']
|
||||||
|
or clique.Collection for a sequence.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
pattern = [clique.PATTERNS["frames"]]
|
pattern = [clique.PATTERNS["frames"]]
|
||||||
collections, _ = clique.assemble(
|
collections, _ = clique.assemble(
|
||||||
files_to_convert, patterns=pattern,
|
files_to_convert, patterns=pattern,
|
||||||
assume_padded_when_ambiguous=True)
|
assume_padded_when_ambiguous=True)
|
||||||
|
|
||||||
if collections:
|
if collections:
|
||||||
if len(collections) > 1:
|
if len(collections) > 1:
|
||||||
raise ValueError(
|
raise ValueError(
|
||||||
"Too many collections {}".format(collections))
|
"Too many collections {}".format(collections))
|
||||||
|
|
||||||
collection = collections[0]
|
return collections
|
||||||
frames = list(collection.indexes)
|
|
||||||
if collection.holes().indexes:
|
|
||||||
return files_to_convert
|
|
||||||
|
|
||||||
# Get the padding from the collection
|
|
||||||
# This is the number of digits used in the frame numbers
|
|
||||||
padding = collection.padding
|
|
||||||
|
|
||||||
frame_str = "{}-{}%0{}d".format(frames[0], frames[-1], padding)
|
|
||||||
file_name = "{}{}{}".format(collection.head, frame_str,
|
|
||||||
collection.tail)
|
|
||||||
|
|
||||||
files_to_convert = [file_name]
|
|
||||||
|
|
||||||
return files_to_convert
|
return files_to_convert
|
||||||
|
|
||||||
|
|
|
||||||
353
client/ayon_core/plugins/publish/extract_oiio_postprocess.py
Normal file
353
client/ayon_core/plugins/publish/extract_oiio_postprocess.py
Normal file
|
|
@ -0,0 +1,353 @@
|
||||||
|
from __future__ import annotations
|
||||||
|
from typing import Any, Optional
|
||||||
|
import os
|
||||||
|
import copy
|
||||||
|
import clique
|
||||||
|
import pyblish.api
|
||||||
|
|
||||||
|
from ayon_core.pipeline import (
|
||||||
|
publish,
|
||||||
|
get_temp_dir
|
||||||
|
)
|
||||||
|
from ayon_core.lib import (
|
||||||
|
is_oiio_supported,
|
||||||
|
get_oiio_tool_args,
|
||||||
|
run_subprocess
|
||||||
|
)
|
||||||
|
from ayon_core.lib.transcoding import IMAGE_EXTENSIONS
|
||||||
|
from ayon_core.lib.profiles_filtering import filter_profiles
|
||||||
|
|
||||||
|
|
||||||
|
class ExtractOIIOPostProcess(publish.Extractor):
|
||||||
|
"""Process representations through `oiiotool` with profile defined
|
||||||
|
settings so that e.g. color space conversions can be applied or images
|
||||||
|
could be converted to scanline, resized, etc. regardless of colorspace
|
||||||
|
data.
|
||||||
|
"""
|
||||||
|
|
||||||
|
label = "OIIO Post Process"
|
||||||
|
order = pyblish.api.ExtractorOrder + 0.020
|
||||||
|
|
||||||
|
settings_category = "core"
|
||||||
|
|
||||||
|
optional = True
|
||||||
|
|
||||||
|
# Supported extensions
|
||||||
|
supported_exts = {ext.lstrip(".") for ext in IMAGE_EXTENSIONS}
|
||||||
|
|
||||||
|
# Configurable by Settings
|
||||||
|
profiles = None
|
||||||
|
options = None
|
||||||
|
|
||||||
|
def process(self, instance):
|
||||||
|
if instance.data.get("farm"):
|
||||||
|
self.log.debug("Should be processed on farm, skipping.")
|
||||||
|
return
|
||||||
|
|
||||||
|
if not self.profiles:
|
||||||
|
self.log.debug("No profiles present for OIIO Post Process")
|
||||||
|
return
|
||||||
|
|
||||||
|
if not instance.data.get("representations"):
|
||||||
|
self.log.debug("No representations, skipping.")
|
||||||
|
return
|
||||||
|
|
||||||
|
if not is_oiio_supported():
|
||||||
|
self.log.warning("OIIO not supported, no transcoding possible.")
|
||||||
|
return
|
||||||
|
|
||||||
|
new_representations = []
|
||||||
|
for idx, repre in enumerate(list(instance.data["representations"])):
|
||||||
|
self.log.debug("repre ({}): `{}`".format(idx + 1, repre["name"]))
|
||||||
|
if not self._repre_is_valid(repre):
|
||||||
|
continue
|
||||||
|
|
||||||
|
# We check profile per representation name and extension because
|
||||||
|
# it's included in the profile check. As such, an instance may have
|
||||||
|
# a different profile applied per representation.
|
||||||
|
profile = self._get_profile(
|
||||||
|
instance,
|
||||||
|
repre
|
||||||
|
)
|
||||||
|
if not profile:
|
||||||
|
continue
|
||||||
|
|
||||||
|
# Get representation files to convert
|
||||||
|
if isinstance(repre["files"], list):
|
||||||
|
repre_files_to_convert = copy.deepcopy(repre["files"])
|
||||||
|
else:
|
||||||
|
repre_files_to_convert = [repre["files"]]
|
||||||
|
|
||||||
|
added_representations = False
|
||||||
|
added_review = False
|
||||||
|
|
||||||
|
# Process each output definition
|
||||||
|
for output_def in profile["outputs"]:
|
||||||
|
|
||||||
|
# Local copy to avoid accidental mutable changes
|
||||||
|
files_to_convert = list(repre_files_to_convert)
|
||||||
|
|
||||||
|
output_name = output_def["name"]
|
||||||
|
new_repre = copy.deepcopy(repre)
|
||||||
|
|
||||||
|
original_staging_dir = new_repre["stagingDir"]
|
||||||
|
new_staging_dir = get_temp_dir(
|
||||||
|
project_name=instance.context.data["projectName"],
|
||||||
|
use_local_temp=True,
|
||||||
|
)
|
||||||
|
new_repre["stagingDir"] = new_staging_dir
|
||||||
|
|
||||||
|
output_extension = output_def["extension"]
|
||||||
|
output_extension = output_extension.replace('.', '')
|
||||||
|
self._rename_in_representation(new_repre,
|
||||||
|
files_to_convert,
|
||||||
|
output_name,
|
||||||
|
output_extension)
|
||||||
|
|
||||||
|
sequence_files = self._translate_to_sequence(files_to_convert)
|
||||||
|
self.log.debug("Files to convert: {}".format(sequence_files))
|
||||||
|
for file_name in sequence_files:
|
||||||
|
if isinstance(file_name, clique.Collection):
|
||||||
|
# Convert to filepath that can be directly converted
|
||||||
|
# by oiio like `frame.1001-1025%04d.exr`
|
||||||
|
file_name: str = file_name.format(
|
||||||
|
"{head}{range}{padding}{tail}"
|
||||||
|
)
|
||||||
|
|
||||||
|
self.log.debug("Transcoding file: `{}`".format(file_name))
|
||||||
|
input_path = os.path.join(original_staging_dir,
|
||||||
|
file_name)
|
||||||
|
output_path = self._get_output_file_path(input_path,
|
||||||
|
new_staging_dir,
|
||||||
|
output_extension)
|
||||||
|
|
||||||
|
# TODO: Support formatting with dynamic keys from the
|
||||||
|
# representation, like e.g. colorspace config, display,
|
||||||
|
# view, etc.
|
||||||
|
input_arguments: list[str] = output_def.get(
|
||||||
|
"input_arguments", []
|
||||||
|
)
|
||||||
|
output_arguments: list[str] = output_def.get(
|
||||||
|
"output_arguments", []
|
||||||
|
)
|
||||||
|
|
||||||
|
# Prepare subprocess arguments
|
||||||
|
oiio_cmd = get_oiio_tool_args(
|
||||||
|
"oiiotool",
|
||||||
|
*input_arguments,
|
||||||
|
input_path,
|
||||||
|
*output_arguments,
|
||||||
|
"-o",
|
||||||
|
output_path
|
||||||
|
)
|
||||||
|
|
||||||
|
self.log.debug(
|
||||||
|
"Conversion command: {}".format(" ".join(oiio_cmd)))
|
||||||
|
run_subprocess(oiio_cmd, logger=self.log)
|
||||||
|
|
||||||
|
# cleanup temporary transcoded files
|
||||||
|
for file_name in new_repre["files"]:
|
||||||
|
transcoded_file_path = os.path.join(new_staging_dir,
|
||||||
|
file_name)
|
||||||
|
instance.context.data["cleanupFullPaths"].append(
|
||||||
|
transcoded_file_path)
|
||||||
|
|
||||||
|
custom_tags = output_def.get("custom_tags")
|
||||||
|
if custom_tags:
|
||||||
|
if new_repre.get("custom_tags") is None:
|
||||||
|
new_repre["custom_tags"] = []
|
||||||
|
new_repre["custom_tags"].extend(custom_tags)
|
||||||
|
|
||||||
|
# Add additional tags from output definition to representation
|
||||||
|
if new_repre.get("tags") is None:
|
||||||
|
new_repre["tags"] = []
|
||||||
|
for tag in output_def["tags"]:
|
||||||
|
if tag not in new_repre["tags"]:
|
||||||
|
new_repre["tags"].append(tag)
|
||||||
|
|
||||||
|
if tag == "review":
|
||||||
|
added_review = True
|
||||||
|
|
||||||
|
# If there is only 1 file outputted then convert list to
|
||||||
|
# string, because that'll indicate that it is not a sequence.
|
||||||
|
if len(new_repre["files"]) == 1:
|
||||||
|
new_repre["files"] = new_repre["files"][0]
|
||||||
|
|
||||||
|
# If the source representation has "review" tag, but it's not
|
||||||
|
# part of the output definition tags, then both the
|
||||||
|
# representations will be transcoded in ExtractReview and
|
||||||
|
# their outputs will clash in integration.
|
||||||
|
if "review" in repre.get("tags", []):
|
||||||
|
added_review = True
|
||||||
|
|
||||||
|
new_representations.append(new_repre)
|
||||||
|
added_representations = True
|
||||||
|
|
||||||
|
if added_representations:
|
||||||
|
self._mark_original_repre_for_deletion(
|
||||||
|
repre, profile, added_review
|
||||||
|
)
|
||||||
|
|
||||||
|
tags = repre.get("tags") or []
|
||||||
|
if "delete" in tags and "thumbnail" not in tags:
|
||||||
|
instance.data["representations"].remove(repre)
|
||||||
|
|
||||||
|
instance.data["representations"].extend(new_representations)
|
||||||
|
|
||||||
|
def _rename_in_representation(self, new_repre, files_to_convert,
|
||||||
|
output_name, output_extension):
|
||||||
|
"""Replace old extension with new one everywhere in representation.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
new_repre (dict)
|
||||||
|
files_to_convert (list): of filenames from repre["files"],
|
||||||
|
standardized to always list
|
||||||
|
output_name (str): key of output definition from Settings,
|
||||||
|
if "<passthrough>" token used, keep original repre name
|
||||||
|
output_extension (str): extension from output definition
|
||||||
|
"""
|
||||||
|
if output_name != "passthrough":
|
||||||
|
new_repre["name"] = output_name
|
||||||
|
if not output_extension:
|
||||||
|
return
|
||||||
|
|
||||||
|
new_repre["ext"] = output_extension
|
||||||
|
new_repre["outputName"] = output_name
|
||||||
|
|
||||||
|
renamed_files = []
|
||||||
|
for file_name in files_to_convert:
|
||||||
|
file_name, _ = os.path.splitext(file_name)
|
||||||
|
file_name = '{}.{}'.format(file_name,
|
||||||
|
output_extension)
|
||||||
|
renamed_files.append(file_name)
|
||||||
|
new_repre["files"] = renamed_files
|
||||||
|
|
||||||
|
def _translate_to_sequence(self, files_to_convert):
|
||||||
|
"""Returns original list or a clique.Collection of a sequence.
|
||||||
|
|
||||||
|
Uses clique to find frame sequence Collection.
|
||||||
|
If sequence not found, it returns original list.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
files_to_convert (list): list of file names
|
||||||
|
Returns:
|
||||||
|
list[str | clique.Collection]: List of filepaths or a list
|
||||||
|
of Collections (usually one, unless there are holes)
|
||||||
|
"""
|
||||||
|
pattern = [clique.PATTERNS["frames"]]
|
||||||
|
collections, _ = clique.assemble(
|
||||||
|
files_to_convert, patterns=pattern,
|
||||||
|
assume_padded_when_ambiguous=True)
|
||||||
|
if collections:
|
||||||
|
if len(collections) > 1:
|
||||||
|
raise ValueError(
|
||||||
|
"Too many collections {}".format(collections))
|
||||||
|
|
||||||
|
collection = collections[0]
|
||||||
|
# TODO: Technically oiiotool supports holes in the sequence as well
|
||||||
|
# using the dedicated --frames argument to specify the frames.
|
||||||
|
# We may want to use that too so conversions of sequences with
|
||||||
|
# holes will perform faster as well.
|
||||||
|
# Separate the collection so that we have no holes/gaps per
|
||||||
|
# collection.
|
||||||
|
return collection.separate()
|
||||||
|
|
||||||
|
return files_to_convert
|
||||||
|
|
||||||
|
def _get_output_file_path(self, input_path, output_dir,
|
||||||
|
output_extension):
|
||||||
|
"""Create output file name path."""
|
||||||
|
file_name = os.path.basename(input_path)
|
||||||
|
file_name, input_extension = os.path.splitext(file_name)
|
||||||
|
if not output_extension:
|
||||||
|
output_extension = input_extension.replace(".", "")
|
||||||
|
new_file_name = '{}.{}'.format(file_name,
|
||||||
|
output_extension)
|
||||||
|
return os.path.join(output_dir, new_file_name)
|
||||||
|
|
||||||
|
def _get_profile(
|
||||||
|
self,
|
||||||
|
instance: pyblish.api.Instance,
|
||||||
|
repre: dict
|
||||||
|
) -> Optional[dict[str, Any]]:
|
||||||
|
"""Returns profile if it should process this instance."""
|
||||||
|
host_name = instance.context.data["hostName"]
|
||||||
|
product_type = instance.data["productType"]
|
||||||
|
product_name = instance.data["productName"]
|
||||||
|
task_data = instance.data["anatomyData"].get("task", {})
|
||||||
|
task_name = task_data.get("name")
|
||||||
|
task_type = task_data.get("type")
|
||||||
|
repre_name: str = repre["name"]
|
||||||
|
repre_ext: str = repre["ext"]
|
||||||
|
filtering_criteria = {
|
||||||
|
"host_names": host_name,
|
||||||
|
"product_types": product_type,
|
||||||
|
"product_names": product_name,
|
||||||
|
"task_names": task_name,
|
||||||
|
"task_types": task_type,
|
||||||
|
"representation_names": repre_name,
|
||||||
|
"representation_exts": repre_ext,
|
||||||
|
}
|
||||||
|
profile = filter_profiles(self.profiles, filtering_criteria,
|
||||||
|
logger=self.log)
|
||||||
|
|
||||||
|
if not profile:
|
||||||
|
self.log.debug(
|
||||||
|
"Skipped instance. None of profiles in presets are for"
|
||||||
|
f" Host: \"{host_name}\" |"
|
||||||
|
f" Product types: \"{product_type}\" |"
|
||||||
|
f" Product names: \"{product_name}\" |"
|
||||||
|
f" Task name \"{task_name}\" |"
|
||||||
|
f" Task type \"{task_type}\" |"
|
||||||
|
f" Representation: \"{repre_name}\" (.{repre_ext})"
|
||||||
|
)
|
||||||
|
|
||||||
|
return profile
|
||||||
|
|
||||||
|
def _repre_is_valid(self, repre: dict) -> bool:
|
||||||
|
"""Validation if representation should be processed.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
repre (dict): Representation which should be checked.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
bool: False if can't be processed else True.
|
||||||
|
"""
|
||||||
|
if repre.get("ext") not in self.supported_exts:
|
||||||
|
self.log.debug((
|
||||||
|
"Representation '{}' has unsupported extension: '{}'. Skipped."
|
||||||
|
).format(repre["name"], repre.get("ext")))
|
||||||
|
return False
|
||||||
|
|
||||||
|
if not repre.get("files"):
|
||||||
|
self.log.debug((
|
||||||
|
"Representation '{}' has empty files. Skipped."
|
||||||
|
).format(repre["name"]))
|
||||||
|
return False
|
||||||
|
|
||||||
|
if "delete" in repre.get("tags", []):
|
||||||
|
self.log.debug((
|
||||||
|
"Representation '{}' has 'delete' tag. Skipped."
|
||||||
|
).format(repre["name"]))
|
||||||
|
return False
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
|
def _mark_original_repre_for_deletion(
|
||||||
|
self,
|
||||||
|
repre: dict,
|
||||||
|
profile: dict,
|
||||||
|
added_review: bool
|
||||||
|
):
|
||||||
|
"""If new transcoded representation created, delete old."""
|
||||||
|
if not repre.get("tags"):
|
||||||
|
repre["tags"] = []
|
||||||
|
|
||||||
|
delete_original = profile["delete_original"]
|
||||||
|
|
||||||
|
if delete_original:
|
||||||
|
if "delete" not in repre["tags"]:
|
||||||
|
repre["tags"].append("delete")
|
||||||
|
|
||||||
|
if added_review and "review" in repre["tags"]:
|
||||||
|
repre["tags"].remove("review")
|
||||||
|
|
@ -1,12 +1,83 @@
|
||||||
|
import collections
|
||||||
|
import hashlib
|
||||||
import os
|
import os
|
||||||
import tempfile
|
import tempfile
|
||||||
|
import uuid
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
import pyblish
|
import pyblish
|
||||||
|
from ayon_core.lib import get_ffmpeg_tool_args, run_subprocess
|
||||||
|
|
||||||
from ayon_core.lib import (
|
|
||||||
get_ffmpeg_tool_args,
|
def get_audio_instances(context):
|
||||||
run_subprocess
|
"""Return only instances which are having audio in families
|
||||||
|
|
||||||
|
Args:
|
||||||
|
context (pyblish.context): context of publisher
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
list: list of selected instances
|
||||||
|
"""
|
||||||
|
audio_instances = []
|
||||||
|
for instance in context:
|
||||||
|
if not instance.data.get("parent_instance_id"):
|
||||||
|
continue
|
||||||
|
if (
|
||||||
|
instance.data["productType"] == "audio"
|
||||||
|
or instance.data.get("reviewAudio")
|
||||||
|
):
|
||||||
|
audio_instances.append(instance)
|
||||||
|
return audio_instances
|
||||||
|
|
||||||
|
|
||||||
|
def map_instances_by_parent_id(context):
|
||||||
|
"""Create a mapping of instances by their parent id
|
||||||
|
|
||||||
|
Args:
|
||||||
|
context (pyblish.context): context of publisher
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
dict: mapping of instances by their parent id
|
||||||
|
"""
|
||||||
|
instances_by_parent_id = collections.defaultdict(list)
|
||||||
|
for instance in context:
|
||||||
|
parent_instance_id = instance.data.get("parent_instance_id")
|
||||||
|
if not parent_instance_id:
|
||||||
|
continue
|
||||||
|
instances_by_parent_id[parent_instance_id].append(instance)
|
||||||
|
return instances_by_parent_id
|
||||||
|
|
||||||
|
|
||||||
|
class CollectParentAudioInstanceAttribute(pyblish.api.ContextPlugin):
|
||||||
|
"""Collect audio instance attribute"""
|
||||||
|
|
||||||
|
order = pyblish.api.CollectorOrder
|
||||||
|
label = "Collect Audio Instance Attribute"
|
||||||
|
|
||||||
|
def process(self, context):
|
||||||
|
|
||||||
|
audio_instances = get_audio_instances(context)
|
||||||
|
|
||||||
|
# no need to continue if no audio instances found
|
||||||
|
if not audio_instances:
|
||||||
|
return
|
||||||
|
|
||||||
|
# create mapped instances by parent id
|
||||||
|
instances_by_parent_id = map_instances_by_parent_id(context)
|
||||||
|
|
||||||
|
# distribute audio related attribute
|
||||||
|
for audio_instance in audio_instances:
|
||||||
|
parent_instance_id = audio_instance.data["parent_instance_id"]
|
||||||
|
|
||||||
|
for sibl_instance in instances_by_parent_id[parent_instance_id]:
|
||||||
|
# exclude the same audio instance
|
||||||
|
if sibl_instance.id == audio_instance.id:
|
||||||
|
continue
|
||||||
|
self.log.info(
|
||||||
|
"Adding audio to Sibling instance: "
|
||||||
|
f"{sibl_instance.data['label']}"
|
||||||
)
|
)
|
||||||
|
sibl_instance.data["audio"] = None
|
||||||
|
|
||||||
|
|
||||||
class ExtractOtioAudioTracks(pyblish.api.ContextPlugin):
|
class ExtractOtioAudioTracks(pyblish.api.ContextPlugin):
|
||||||
|
|
@ -19,7 +90,8 @@ class ExtractOtioAudioTracks(pyblish.api.ContextPlugin):
|
||||||
|
|
||||||
order = pyblish.api.ExtractorOrder - 0.44
|
order = pyblish.api.ExtractorOrder - 0.44
|
||||||
label = "Extract OTIO Audio Tracks"
|
label = "Extract OTIO Audio Tracks"
|
||||||
hosts = ["hiero", "resolve", "flame"]
|
|
||||||
|
temp_dir_path = None
|
||||||
|
|
||||||
def process(self, context):
|
def process(self, context):
|
||||||
"""Convert otio audio track's content to audio representations
|
"""Convert otio audio track's content to audio representations
|
||||||
|
|
@ -28,13 +100,14 @@ class ExtractOtioAudioTracks(pyblish.api.ContextPlugin):
|
||||||
context (pyblish.Context): context of publisher
|
context (pyblish.Context): context of publisher
|
||||||
"""
|
"""
|
||||||
# split the long audio file to peces devided by isntances
|
# split the long audio file to peces devided by isntances
|
||||||
audio_instances = self.get_audio_instances(context)
|
audio_instances = get_audio_instances(context)
|
||||||
self.log.debug("Audio instances: {}".format(len(audio_instances)))
|
|
||||||
|
|
||||||
if len(audio_instances) < 1:
|
# no need to continue if no audio instances found
|
||||||
self.log.info("No audio instances available")
|
if not audio_instances:
|
||||||
return
|
return
|
||||||
|
|
||||||
|
self.log.debug("Audio instances: {}".format(len(audio_instances)))
|
||||||
|
|
||||||
# get sequence
|
# get sequence
|
||||||
otio_timeline = context.data["otioTimeline"]
|
otio_timeline = context.data["otioTimeline"]
|
||||||
|
|
||||||
|
|
@ -44,8 +117,8 @@ class ExtractOtioAudioTracks(pyblish.api.ContextPlugin):
|
||||||
if not audio_inputs:
|
if not audio_inputs:
|
||||||
return
|
return
|
||||||
|
|
||||||
# temp file
|
# Convert all available audio into single file for trimming
|
||||||
audio_temp_fpath = self.create_temp_file("audio")
|
audio_temp_fpath = self.create_temp_file("timeline_audio_track")
|
||||||
|
|
||||||
# create empty audio with longest duration
|
# create empty audio with longest duration
|
||||||
empty = self.create_empty(audio_inputs)
|
empty = self.create_empty(audio_inputs)
|
||||||
|
|
@ -59,19 +132,25 @@ class ExtractOtioAudioTracks(pyblish.api.ContextPlugin):
|
||||||
# remove empty
|
# remove empty
|
||||||
os.remove(empty["mediaPath"])
|
os.remove(empty["mediaPath"])
|
||||||
|
|
||||||
|
# create mapped instances by parent id
|
||||||
|
instances_by_parent_id = map_instances_by_parent_id(context)
|
||||||
|
|
||||||
# cut instance framerange and add to representations
|
# cut instance framerange and add to representations
|
||||||
self.add_audio_to_instances(audio_temp_fpath, audio_instances)
|
self.add_audio_to_instances(
|
||||||
|
audio_temp_fpath, audio_instances, instances_by_parent_id)
|
||||||
|
|
||||||
# remove full mixed audio file
|
# remove full mixed audio file
|
||||||
os.remove(audio_temp_fpath)
|
os.remove(audio_temp_fpath)
|
||||||
|
|
||||||
def add_audio_to_instances(self, audio_file, instances):
|
def add_audio_to_instances(
|
||||||
|
self, audio_file, audio_instances, instances_by_parent_id):
|
||||||
created_files = []
|
created_files = []
|
||||||
for inst in instances:
|
for audio_instance in audio_instances:
|
||||||
name = inst.data["folderPath"]
|
folder_path = audio_instance.data["folderPath"]
|
||||||
|
file_suffix = folder_path.replace("/", "-")
|
||||||
|
|
||||||
recycling_file = [f for f in created_files if name in f]
|
recycling_file = [f for f in created_files if file_suffix in f]
|
||||||
audio_clip = inst.data["otioClip"]
|
audio_clip = audio_instance.data["otioClip"]
|
||||||
audio_range = audio_clip.range_in_parent()
|
audio_range = audio_clip.range_in_parent()
|
||||||
duration = audio_range.duration.to_frames()
|
duration = audio_range.duration.to_frames()
|
||||||
|
|
||||||
|
|
@ -84,68 +163,70 @@ class ExtractOtioAudioTracks(pyblish.api.ContextPlugin):
|
||||||
start_sec = relative_start_time.to_seconds()
|
start_sec = relative_start_time.to_seconds()
|
||||||
duration_sec = audio_range.duration.to_seconds()
|
duration_sec = audio_range.duration.to_seconds()
|
||||||
|
|
||||||
# temp audio file
|
# shot related audio file
|
||||||
audio_fpath = self.create_temp_file(name)
|
shot_audio_fpath = self.create_temp_file(file_suffix)
|
||||||
|
|
||||||
cmd = get_ffmpeg_tool_args(
|
cmd = get_ffmpeg_tool_args(
|
||||||
"ffmpeg",
|
"ffmpeg",
|
||||||
"-ss", str(start_sec),
|
"-ss", str(start_sec),
|
||||||
"-t", str(duration_sec),
|
"-t", str(duration_sec),
|
||||||
"-i", audio_file,
|
"-i", audio_file,
|
||||||
audio_fpath
|
shot_audio_fpath
|
||||||
)
|
)
|
||||||
|
|
||||||
# run subprocess
|
# run subprocess
|
||||||
self.log.debug("Executing: {}".format(" ".join(cmd)))
|
self.log.debug("Executing: {}".format(" ".join(cmd)))
|
||||||
run_subprocess(cmd, logger=self.log)
|
run_subprocess(cmd, logger=self.log)
|
||||||
else:
|
|
||||||
audio_fpath = recycling_file.pop()
|
|
||||||
|
|
||||||
if "audio" in (
|
# add generated audio file to created files for recycling
|
||||||
inst.data["families"] + [inst.data["productType"]]
|
if shot_audio_fpath not in created_files:
|
||||||
):
|
created_files.append(shot_audio_fpath)
|
||||||
|
else:
|
||||||
|
shot_audio_fpath = recycling_file.pop()
|
||||||
|
|
||||||
|
# audio file needs to be published as representation
|
||||||
|
if audio_instance.data["productType"] == "audio":
|
||||||
# create empty representation attr
|
# create empty representation attr
|
||||||
if "representations" not in inst.data:
|
if "representations" not in audio_instance.data:
|
||||||
inst.data["representations"] = []
|
audio_instance.data["representations"] = []
|
||||||
# add to representations
|
# add to representations
|
||||||
inst.data["representations"].append({
|
audio_instance.data["representations"].append({
|
||||||
"files": os.path.basename(audio_fpath),
|
"files": os.path.basename(shot_audio_fpath),
|
||||||
"name": "wav",
|
"name": "wav",
|
||||||
"ext": "wav",
|
"ext": "wav",
|
||||||
"stagingDir": os.path.dirname(audio_fpath),
|
"stagingDir": os.path.dirname(shot_audio_fpath),
|
||||||
"frameStart": 0,
|
"frameStart": 0,
|
||||||
"frameEnd": duration
|
"frameEnd": duration
|
||||||
})
|
})
|
||||||
|
|
||||||
elif "reviewAudio" in inst.data.keys():
|
# audio file needs to be reviewable too
|
||||||
audio_attr = inst.data.get("audio") or []
|
elif "reviewAudio" in audio_instance.data.keys():
|
||||||
|
audio_attr = audio_instance.data.get("audio") or []
|
||||||
audio_attr.append({
|
audio_attr.append({
|
||||||
"filename": audio_fpath,
|
"filename": shot_audio_fpath,
|
||||||
"offset": 0
|
"offset": 0
|
||||||
})
|
})
|
||||||
inst.data["audio"] = audio_attr
|
audio_instance.data["audio"] = audio_attr
|
||||||
|
|
||||||
# add generated audio file to created files for recycling
|
# Make sure if the audio instance is having siblink instances
|
||||||
if audio_fpath not in created_files:
|
# which needs audio for reviewable media so it is also added
|
||||||
created_files.append(audio_fpath)
|
# to its instance data
|
||||||
|
# Retrieve instance data from parent instance shot instance.
|
||||||
def get_audio_instances(self, context):
|
parent_instance_id = audio_instance.data["parent_instance_id"]
|
||||||
"""Return only instances which are having audio in families
|
for sibl_instance in instances_by_parent_id[parent_instance_id]:
|
||||||
|
# exclude the same audio instance
|
||||||
Args:
|
if sibl_instance.id == audio_instance.id:
|
||||||
context (pyblish.context): context of publisher
|
continue
|
||||||
|
self.log.info(
|
||||||
Returns:
|
"Adding audio to Sibling instance: "
|
||||||
list: list of selected instances
|
f"{sibl_instance.data['label']}"
|
||||||
"""
|
)
|
||||||
return [
|
audio_attr = sibl_instance.data.get("audio") or []
|
||||||
_i for _i in context
|
audio_attr.append({
|
||||||
# filter only those with audio product type or family
|
"filename": shot_audio_fpath,
|
||||||
# and also with reviewAudio data key
|
"offset": 0
|
||||||
if bool("audio" in (
|
})
|
||||||
_i.data.get("families", []) + [_i.data["productType"]])
|
sibl_instance.data["audio"] = audio_attr
|
||||||
) or _i.data.get("reviewAudio")
|
|
||||||
]
|
|
||||||
|
|
||||||
def get_audio_track_items(self, otio_timeline):
|
def get_audio_track_items(self, otio_timeline):
|
||||||
"""Get all audio clips form OTIO audio tracks
|
"""Get all audio clips form OTIO audio tracks
|
||||||
|
|
@ -321,19 +402,23 @@ class ExtractOtioAudioTracks(pyblish.api.ContextPlugin):
|
||||||
|
|
||||||
os.remove(filters_tmp_filepath)
|
os.remove(filters_tmp_filepath)
|
||||||
|
|
||||||
def create_temp_file(self, name):
|
def create_temp_file(self, file_suffix):
|
||||||
"""Create temp wav file
|
"""Create temp wav file
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
name (str): name to be used in file name
|
file_suffix (str): name to be used in file name
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
str: temp fpath
|
str: temp fpath
|
||||||
"""
|
"""
|
||||||
name = name.replace("/", "_")
|
extension = ".wav"
|
||||||
return os.path.normpath(
|
# get 8 characters
|
||||||
tempfile.mktemp(
|
hash = hashlib.md5(str(uuid.uuid4()).encode()).hexdigest()[:8]
|
||||||
prefix="pyblish_tmp_{}_".format(name),
|
file_name = f"{hash}_{file_suffix}{extension}"
|
||||||
suffix=".wav"
|
|
||||||
)
|
if not self.temp_dir_path:
|
||||||
)
|
audio_temp_dir_path = tempfile.mkdtemp(prefix="AYON_audio_")
|
||||||
|
self.temp_dir_path = Path(audio_temp_dir_path)
|
||||||
|
self.temp_dir_path.mkdir(parents=True, exist_ok=True)
|
||||||
|
|
||||||
|
return (self.temp_dir_path / file_name).as_posix()
|
||||||
|
|
|
||||||
|
|
@ -130,7 +130,7 @@ class ExtractOTIOReview(
|
||||||
# NOTE it looks like it is set only in hiero integration
|
# NOTE it looks like it is set only in hiero integration
|
||||||
res_data = {"width": self.to_width, "height": self.to_height}
|
res_data = {"width": self.to_width, "height": self.to_height}
|
||||||
for key in res_data:
|
for key in res_data:
|
||||||
for meta_prefix in ("ayon.source.", "openpype.source."):
|
for meta_prefix in ("ayon.source", "openpype.source"):
|
||||||
meta_key = f"{meta_prefix}.{key}"
|
meta_key = f"{meta_prefix}.{key}"
|
||||||
value = media_metadata.get(meta_key)
|
value = media_metadata.get(meta_key)
|
||||||
if value is not None:
|
if value is not None:
|
||||||
|
|
|
||||||
|
|
@ -163,12 +163,15 @@ class ExtractReview(pyblish.api.InstancePlugin):
|
||||||
"flame",
|
"flame",
|
||||||
"unreal",
|
"unreal",
|
||||||
"batchdelivery",
|
"batchdelivery",
|
||||||
"photoshop"
|
"photoshop",
|
||||||
|
"substancepainter",
|
||||||
]
|
]
|
||||||
|
|
||||||
settings_category = "core"
|
settings_category = "core"
|
||||||
# Supported extensions
|
# Supported extensions
|
||||||
image_exts = {"exr", "jpg", "jpeg", "png", "dpx", "tga", "tiff", "tif"}
|
image_exts = {
|
||||||
|
"exr", "jpg", "jpeg", "png", "dpx", "tga", "tiff", "tif", "psd"
|
||||||
|
}
|
||||||
video_exts = {"mov", "mp4"}
|
video_exts = {"mov", "mp4"}
|
||||||
supported_exts = image_exts | video_exts
|
supported_exts = image_exts | video_exts
|
||||||
|
|
||||||
|
|
@ -361,14 +364,14 @@ class ExtractReview(pyblish.api.InstancePlugin):
|
||||||
if not filtered_output_defs:
|
if not filtered_output_defs:
|
||||||
self.log.debug((
|
self.log.debug((
|
||||||
"Repre: {} - All output definitions were filtered"
|
"Repre: {} - All output definitions were filtered"
|
||||||
" out by single frame filter. Skipping"
|
" out by single frame filter. Skipped."
|
||||||
).format(repre["name"]))
|
).format(repre["name"]))
|
||||||
continue
|
continue
|
||||||
|
|
||||||
# Skip if file is not set
|
# Skip if file is not set
|
||||||
if first_input_path is None:
|
if first_input_path is None:
|
||||||
self.log.warning((
|
self.log.warning((
|
||||||
"Representation \"{}\" have empty files. Skipped."
|
"Representation \"{}\" has empty files. Skipped."
|
||||||
).format(repre["name"]))
|
).format(repre["name"]))
|
||||||
continue
|
continue
|
||||||
|
|
||||||
|
|
@ -400,6 +403,10 @@ class ExtractReview(pyblish.api.InstancePlugin):
|
||||||
new_staging_dir,
|
new_staging_dir,
|
||||||
self.log
|
self.log
|
||||||
)
|
)
|
||||||
|
# The OIIO conversion will remap the RGBA channels just to
|
||||||
|
# `R,G,B,A` so we will pass the intermediate file to FFMPEG
|
||||||
|
# without layer name.
|
||||||
|
layer_name = ""
|
||||||
|
|
||||||
try:
|
try:
|
||||||
self._render_output_definitions(
|
self._render_output_definitions(
|
||||||
|
|
|
||||||
|
|
@ -1,8 +1,9 @@
|
||||||
import copy
|
import copy
|
||||||
|
from dataclasses import dataclass, field, fields
|
||||||
import os
|
import os
|
||||||
import subprocess
|
import subprocess
|
||||||
import tempfile
|
import tempfile
|
||||||
import re
|
from typing import Dict, Any, List, Tuple, Optional
|
||||||
|
|
||||||
import pyblish.api
|
import pyblish.api
|
||||||
from ayon_core.lib import (
|
from ayon_core.lib import (
|
||||||
|
|
@ -15,8 +16,10 @@ from ayon_core.lib import (
|
||||||
|
|
||||||
path_to_subprocess_arg,
|
path_to_subprocess_arg,
|
||||||
run_subprocess,
|
run_subprocess,
|
||||||
|
filter_profiles,
|
||||||
)
|
)
|
||||||
from ayon_core.lib.transcoding import (
|
from ayon_core.lib.transcoding import (
|
||||||
|
MissingRGBAChannelsError,
|
||||||
oiio_color_convert,
|
oiio_color_convert,
|
||||||
get_oiio_input_and_channel_args,
|
get_oiio_input_and_channel_args,
|
||||||
get_oiio_info_for_input,
|
get_oiio_info_for_input,
|
||||||
|
|
@ -25,6 +28,61 @@ from ayon_core.lib.transcoding import (
|
||||||
from ayon_core.lib.transcoding import VIDEO_EXTENSIONS, IMAGE_EXTENSIONS
|
from ayon_core.lib.transcoding import VIDEO_EXTENSIONS, IMAGE_EXTENSIONS
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class ThumbnailDef:
|
||||||
|
"""
|
||||||
|
Data class representing the full configuration for selected profile
|
||||||
|
|
||||||
|
Any change of controllable fields in Settings must propagate here!
|
||||||
|
"""
|
||||||
|
integrate_thumbnail: bool = False
|
||||||
|
|
||||||
|
target_size: Dict[str, Any] = field(
|
||||||
|
default_factory=lambda: {
|
||||||
|
"type": "source",
|
||||||
|
"resize": {"width": 1920, "height": 1080},
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
duration_split: float = 0.5
|
||||||
|
|
||||||
|
oiiotool_defaults: Dict[str, str] = field(
|
||||||
|
default_factory=lambda: {
|
||||||
|
"type": "colorspace",
|
||||||
|
"colorspace": "color_picking"
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
ffmpeg_args: Dict[str, List[Any]] = field(
|
||||||
|
default_factory=lambda: {"input": [], "output": []}
|
||||||
|
)
|
||||||
|
|
||||||
|
# Background color defined as (R, G, B, A) tuple.
|
||||||
|
# Note: Use float for alpha channel (0.0 to 1.0).
|
||||||
|
background_color: Tuple[int, int, int, float] = (0, 0, 0, 0.0)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def from_dict(cls, data: Dict[str, Any]) -> "ThumbnailDef":
|
||||||
|
"""
|
||||||
|
Creates a ThumbnailDef instance from a dictionary, safely ignoring
|
||||||
|
any keys in the dictionary that are not fields in the dataclass.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
data (Dict[str, Any]): The dictionary containing configuration data
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
MediaConfig: A new instance of the dataclass.
|
||||||
|
"""
|
||||||
|
# Get all field names defined in the dataclass
|
||||||
|
field_names = {f.name for f in fields(cls)}
|
||||||
|
|
||||||
|
# Filter the input dictionary to include only keys matching field names
|
||||||
|
filtered_data = {k: v for k, v in data.items() if k in field_names}
|
||||||
|
|
||||||
|
# Unpack the filtered dictionary into the constructor
|
||||||
|
return cls(**filtered_data)
|
||||||
|
|
||||||
|
|
||||||
class ExtractThumbnail(pyblish.api.InstancePlugin):
|
class ExtractThumbnail(pyblish.api.InstancePlugin):
|
||||||
"""Create jpg thumbnail from sequence using ffmpeg"""
|
"""Create jpg thumbnail from sequence using ffmpeg"""
|
||||||
|
|
||||||
|
|
@ -51,30 +109,7 @@ class ExtractThumbnail(pyblish.api.InstancePlugin):
|
||||||
settings_category = "core"
|
settings_category = "core"
|
||||||
enabled = False
|
enabled = False
|
||||||
|
|
||||||
integrate_thumbnail = False
|
profiles = []
|
||||||
target_size = {
|
|
||||||
"type": "source",
|
|
||||||
"resize": {
|
|
||||||
"width": 1920,
|
|
||||||
"height": 1080
|
|
||||||
}
|
|
||||||
}
|
|
||||||
background_color = (0, 0, 0, 0.0)
|
|
||||||
duration_split = 0.5
|
|
||||||
# attribute presets from settings
|
|
||||||
oiiotool_defaults = {
|
|
||||||
"type": "colorspace",
|
|
||||||
"colorspace": "color_picking",
|
|
||||||
"display_and_view": {
|
|
||||||
"display": "default",
|
|
||||||
"view": "sRGB"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
ffmpeg_args = {
|
|
||||||
"input": [],
|
|
||||||
"output": []
|
|
||||||
}
|
|
||||||
product_names = []
|
|
||||||
|
|
||||||
def process(self, instance):
|
def process(self, instance):
|
||||||
# run main process
|
# run main process
|
||||||
|
|
@ -97,6 +132,13 @@ class ExtractThumbnail(pyblish.api.InstancePlugin):
|
||||||
instance.data["representations"].remove(repre)
|
instance.data["representations"].remove(repre)
|
||||||
|
|
||||||
def _main_process(self, instance):
|
def _main_process(self, instance):
|
||||||
|
if not self.profiles:
|
||||||
|
self.log.debug("No profiles present for extract review thumbnail.")
|
||||||
|
return
|
||||||
|
thumbnail_def = self._get_config_from_profile(instance)
|
||||||
|
if not thumbnail_def:
|
||||||
|
return
|
||||||
|
|
||||||
product_name = instance.data["productName"]
|
product_name = instance.data["productName"]
|
||||||
instance_repres = instance.data.get("representations")
|
instance_repres = instance.data.get("representations")
|
||||||
if not instance_repres:
|
if not instance_repres:
|
||||||
|
|
@ -129,24 +171,6 @@ class ExtractThumbnail(pyblish.api.InstancePlugin):
|
||||||
self.log.debug("Skipping crypto passes.")
|
self.log.debug("Skipping crypto passes.")
|
||||||
return
|
return
|
||||||
|
|
||||||
# We only want to process the produces needed from settings.
|
|
||||||
def validate_string_against_patterns(input_str, patterns):
|
|
||||||
for pattern in patterns:
|
|
||||||
if re.match(pattern, input_str):
|
|
||||||
return True
|
|
||||||
return False
|
|
||||||
|
|
||||||
product_names = self.product_names
|
|
||||||
if product_names:
|
|
||||||
result = validate_string_against_patterns(
|
|
||||||
product_name, product_names
|
|
||||||
)
|
|
||||||
if not result:
|
|
||||||
self.log.debug((
|
|
||||||
"Product name \"{}\" did not match settings filters: {}"
|
|
||||||
).format(product_name, product_names))
|
|
||||||
return
|
|
||||||
|
|
||||||
# first check for any explicitly marked representations for thumbnail
|
# first check for any explicitly marked representations for thumbnail
|
||||||
explicit_repres = self._get_explicit_repres_for_thumbnail(instance)
|
explicit_repres = self._get_explicit_repres_for_thumbnail(instance)
|
||||||
if explicit_repres:
|
if explicit_repres:
|
||||||
|
|
@ -191,7 +215,8 @@ class ExtractThumbnail(pyblish.api.InstancePlugin):
|
||||||
)
|
)
|
||||||
file_path = self._create_frame_from_video(
|
file_path = self._create_frame_from_video(
|
||||||
video_file_path,
|
video_file_path,
|
||||||
dst_staging
|
dst_staging,
|
||||||
|
thumbnail_def
|
||||||
)
|
)
|
||||||
if file_path:
|
if file_path:
|
||||||
src_staging, input_file = os.path.split(file_path)
|
src_staging, input_file = os.path.split(file_path)
|
||||||
|
|
@ -204,7 +229,8 @@ class ExtractThumbnail(pyblish.api.InstancePlugin):
|
||||||
if "slate-frame" in repre.get("tags", []):
|
if "slate-frame" in repre.get("tags", []):
|
||||||
repre_files_thumb = repre_files_thumb[1:]
|
repre_files_thumb = repre_files_thumb[1:]
|
||||||
file_index = int(
|
file_index = int(
|
||||||
float(len(repre_files_thumb)) * self.duration_split)
|
float(len(repre_files_thumb)) * thumbnail_def.duration_split # noqa: E501
|
||||||
|
)
|
||||||
input_file = repre_files[file_index]
|
input_file = repre_files[file_index]
|
||||||
|
|
||||||
full_input_path = os.path.join(src_staging, input_file)
|
full_input_path = os.path.join(src_staging, input_file)
|
||||||
|
|
@ -233,7 +259,8 @@ class ExtractThumbnail(pyblish.api.InstancePlugin):
|
||||||
repre_thumb_created = self._create_colorspace_thumbnail(
|
repre_thumb_created = self._create_colorspace_thumbnail(
|
||||||
full_input_path,
|
full_input_path,
|
||||||
full_output_path,
|
full_output_path,
|
||||||
colorspace_data
|
colorspace_data,
|
||||||
|
thumbnail_def,
|
||||||
)
|
)
|
||||||
|
|
||||||
# Try to use FFMPEG if OIIO is not supported or for cases when
|
# Try to use FFMPEG if OIIO is not supported or for cases when
|
||||||
|
|
@ -241,13 +268,13 @@ class ExtractThumbnail(pyblish.api.InstancePlugin):
|
||||||
# colorspace data
|
# colorspace data
|
||||||
if not repre_thumb_created:
|
if not repre_thumb_created:
|
||||||
repre_thumb_created = self._create_thumbnail_ffmpeg(
|
repre_thumb_created = self._create_thumbnail_ffmpeg(
|
||||||
full_input_path, full_output_path
|
full_input_path, full_output_path, thumbnail_def
|
||||||
)
|
)
|
||||||
|
|
||||||
# Skip representation and try next one if wasn't created
|
# Skip representation and try next one if wasn't created
|
||||||
if not repre_thumb_created and oiio_supported:
|
if not repre_thumb_created and oiio_supported:
|
||||||
repre_thumb_created = self._create_thumbnail_oiio(
|
repre_thumb_created = self._create_thumbnail_oiio(
|
||||||
full_input_path, full_output_path
|
full_input_path, full_output_path, thumbnail_def
|
||||||
)
|
)
|
||||||
|
|
||||||
if not repre_thumb_created:
|
if not repre_thumb_created:
|
||||||
|
|
@ -275,7 +302,7 @@ class ExtractThumbnail(pyblish.api.InstancePlugin):
|
||||||
new_repre_tags = ["thumbnail"]
|
new_repre_tags = ["thumbnail"]
|
||||||
# for workflows which needs to have thumbnails published as
|
# for workflows which needs to have thumbnails published as
|
||||||
# separate representations `delete` tag should not be added
|
# separate representations `delete` tag should not be added
|
||||||
if not self.integrate_thumbnail:
|
if not thumbnail_def.integrate_thumbnail:
|
||||||
new_repre_tags.append("delete")
|
new_repre_tags.append("delete")
|
||||||
|
|
||||||
new_repre = {
|
new_repre = {
|
||||||
|
|
@ -374,7 +401,7 @@ class ExtractThumbnail(pyblish.api.InstancePlugin):
|
||||||
|
|
||||||
return review_repres + other_repres
|
return review_repres + other_repres
|
||||||
|
|
||||||
def _is_valid_images_repre(self, repre):
|
def _is_valid_images_repre(self, repre: dict) -> bool:
|
||||||
"""Check if representation contains valid image files
|
"""Check if representation contains valid image files
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
|
|
@ -394,9 +421,10 @@ class ExtractThumbnail(pyblish.api.InstancePlugin):
|
||||||
|
|
||||||
def _create_colorspace_thumbnail(
|
def _create_colorspace_thumbnail(
|
||||||
self,
|
self,
|
||||||
src_path,
|
src_path: str,
|
||||||
dst_path,
|
dst_path: str,
|
||||||
colorspace_data,
|
colorspace_data: dict,
|
||||||
|
thumbnail_def: ThumbnailDef,
|
||||||
):
|
):
|
||||||
"""Create thumbnail using OIIO tool oiiotool
|
"""Create thumbnail using OIIO tool oiiotool
|
||||||
|
|
||||||
|
|
@ -409,12 +437,15 @@ class ExtractThumbnail(pyblish.api.InstancePlugin):
|
||||||
config (dict)
|
config (dict)
|
||||||
display (Optional[str])
|
display (Optional[str])
|
||||||
view (Optional[str])
|
view (Optional[str])
|
||||||
|
thumbnail_def (ThumbnailDefinition): Thumbnail definition.
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
str: path to created thumbnail
|
str: path to created thumbnail
|
||||||
"""
|
"""
|
||||||
self.log.info("Extracting thumbnail {}".format(dst_path))
|
self.log.info(f"Extracting thumbnail {dst_path}")
|
||||||
resolution_arg = self._get_resolution_arg("oiiotool", src_path)
|
resolution_arg = self._get_resolution_args(
|
||||||
|
"oiiotool", src_path, thumbnail_def
|
||||||
|
)
|
||||||
|
|
||||||
repre_display = colorspace_data.get("display")
|
repre_display = colorspace_data.get("display")
|
||||||
repre_view = colorspace_data.get("view")
|
repre_view = colorspace_data.get("view")
|
||||||
|
|
@ -433,12 +464,13 @@ class ExtractThumbnail(pyblish.api.InstancePlugin):
|
||||||
)
|
)
|
||||||
# if representation doesn't have display and view then use
|
# if representation doesn't have display and view then use
|
||||||
# oiiotool_defaults
|
# oiiotool_defaults
|
||||||
elif self.oiiotool_defaults:
|
elif thumbnail_def.oiiotool_defaults:
|
||||||
oiio_default_type = self.oiiotool_defaults["type"]
|
oiiotool_defaults = thumbnail_def.oiiotool_defaults
|
||||||
|
oiio_default_type = oiiotool_defaults["type"]
|
||||||
if "colorspace" == oiio_default_type:
|
if "colorspace" == oiio_default_type:
|
||||||
oiio_default_colorspace = self.oiiotool_defaults["colorspace"]
|
oiio_default_colorspace = oiiotool_defaults["colorspace"]
|
||||||
else:
|
else:
|
||||||
display_and_view = self.oiiotool_defaults["display_and_view"]
|
display_and_view = oiiotool_defaults["display_and_view"]
|
||||||
oiio_default_display = display_and_view["display"]
|
oiio_default_display = display_and_view["display"]
|
||||||
oiio_default_view = display_and_view["view"]
|
oiio_default_view = display_and_view["view"]
|
||||||
|
|
||||||
|
|
@ -465,19 +497,34 @@ class ExtractThumbnail(pyblish.api.InstancePlugin):
|
||||||
|
|
||||||
return True
|
return True
|
||||||
|
|
||||||
def _create_thumbnail_oiio(self, src_path, dst_path):
|
def _create_thumbnail_oiio(self, src_path, dst_path, thumbnail_def):
|
||||||
self.log.debug(f"Extracting thumbnail with OIIO: {dst_path}")
|
self.log.debug(f"Extracting thumbnail with OIIO: {dst_path}")
|
||||||
|
|
||||||
try:
|
try:
|
||||||
resolution_arg = self._get_resolution_arg("oiiotool", src_path)
|
resolution_arg = self._get_resolution_args(
|
||||||
|
"oiiotool", src_path, thumbnail_def
|
||||||
|
)
|
||||||
except RuntimeError:
|
except RuntimeError:
|
||||||
self.log.warning(
|
self.log.warning(
|
||||||
"Failed to create thumbnail using oiio", exc_info=True
|
"Failed to create thumbnail using oiio", exc_info=True
|
||||||
)
|
)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
input_info = get_oiio_info_for_input(src_path, logger=self.log)
|
input_info = get_oiio_info_for_input(
|
||||||
input_arg, channels_arg = get_oiio_input_and_channel_args(input_info)
|
src_path,
|
||||||
|
logger=self.log,
|
||||||
|
verbose=False,
|
||||||
|
)
|
||||||
|
try:
|
||||||
|
input_arg, channels_arg = get_oiio_input_and_channel_args(
|
||||||
|
input_info
|
||||||
|
)
|
||||||
|
except MissingRGBAChannelsError:
|
||||||
|
self.log.debug(
|
||||||
|
"Unable to find relevant reviewable channel for thumbnail "
|
||||||
|
"creation"
|
||||||
|
)
|
||||||
|
return False
|
||||||
oiio_cmd = get_oiio_tool_args(
|
oiio_cmd = get_oiio_tool_args(
|
||||||
"oiiotool",
|
"oiiotool",
|
||||||
input_arg, src_path,
|
input_arg, src_path,
|
||||||
|
|
@ -500,9 +547,11 @@ class ExtractThumbnail(pyblish.api.InstancePlugin):
|
||||||
)
|
)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
def _create_thumbnail_ffmpeg(self, src_path, dst_path):
|
def _create_thumbnail_ffmpeg(self, src_path, dst_path, thumbnail_def):
|
||||||
try:
|
try:
|
||||||
resolution_arg = self._get_resolution_arg("ffmpeg", src_path)
|
resolution_arg = self._get_resolution_args(
|
||||||
|
"ffmpeg", src_path, thumbnail_def
|
||||||
|
)
|
||||||
except RuntimeError:
|
except RuntimeError:
|
||||||
self.log.warning(
|
self.log.warning(
|
||||||
"Failed to create thumbnail using ffmpeg", exc_info=True
|
"Failed to create thumbnail using ffmpeg", exc_info=True
|
||||||
|
|
@ -510,7 +559,7 @@ class ExtractThumbnail(pyblish.api.InstancePlugin):
|
||||||
return False
|
return False
|
||||||
|
|
||||||
ffmpeg_path_args = get_ffmpeg_tool_args("ffmpeg")
|
ffmpeg_path_args = get_ffmpeg_tool_args("ffmpeg")
|
||||||
ffmpeg_args = self.ffmpeg_args or {}
|
ffmpeg_args = thumbnail_def.ffmpeg_args or {}
|
||||||
|
|
||||||
jpeg_items = [
|
jpeg_items = [
|
||||||
subprocess.list2cmdline(ffmpeg_path_args)
|
subprocess.list2cmdline(ffmpeg_path_args)
|
||||||
|
|
@ -550,7 +599,12 @@ class ExtractThumbnail(pyblish.api.InstancePlugin):
|
||||||
)
|
)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
def _create_frame_from_video(self, video_file_path, output_dir):
|
def _create_frame_from_video(
|
||||||
|
self,
|
||||||
|
video_file_path: str,
|
||||||
|
output_dir: str,
|
||||||
|
thumbnail_def: ThumbnailDef,
|
||||||
|
) -> Optional[str]:
|
||||||
"""Convert video file to one frame image via ffmpeg"""
|
"""Convert video file to one frame image via ffmpeg"""
|
||||||
# create output file path
|
# create output file path
|
||||||
base_name = os.path.basename(video_file_path)
|
base_name = os.path.basename(video_file_path)
|
||||||
|
|
@ -575,7 +629,7 @@ class ExtractThumbnail(pyblish.api.InstancePlugin):
|
||||||
seek_position = 0.0
|
seek_position = 0.0
|
||||||
# Only use timestamp calculation for videos longer than 0.1 seconds
|
# Only use timestamp calculation for videos longer than 0.1 seconds
|
||||||
if duration > 0.1:
|
if duration > 0.1:
|
||||||
seek_position = duration * self.duration_split
|
seek_position = duration * thumbnail_def.duration_split
|
||||||
|
|
||||||
# Build command args
|
# Build command args
|
||||||
cmd_args = []
|
cmd_args = []
|
||||||
|
|
@ -649,16 +703,17 @@ class ExtractThumbnail(pyblish.api.InstancePlugin):
|
||||||
):
|
):
|
||||||
os.remove(output_thumb_file_path)
|
os.remove(output_thumb_file_path)
|
||||||
|
|
||||||
def _get_resolution_arg(
|
def _get_resolution_args(
|
||||||
self,
|
self,
|
||||||
application,
|
application: str,
|
||||||
input_path,
|
input_path: str,
|
||||||
):
|
thumbnail_def: ThumbnailDef,
|
||||||
|
) -> list:
|
||||||
# get settings
|
# get settings
|
||||||
if self.target_size["type"] == "source":
|
if thumbnail_def.target_size["type"] == "source":
|
||||||
return []
|
return []
|
||||||
|
|
||||||
resize = self.target_size["resize"]
|
resize = thumbnail_def.target_size["resize"]
|
||||||
target_width = resize["width"]
|
target_width = resize["width"]
|
||||||
target_height = resize["height"]
|
target_height = resize["height"]
|
||||||
|
|
||||||
|
|
@ -668,6 +723,43 @@ class ExtractThumbnail(pyblish.api.InstancePlugin):
|
||||||
input_path,
|
input_path,
|
||||||
target_width,
|
target_width,
|
||||||
target_height,
|
target_height,
|
||||||
bg_color=self.background_color,
|
bg_color=thumbnail_def.background_color,
|
||||||
log=self.log
|
log=self.log
|
||||||
)
|
)
|
||||||
|
|
||||||
|
def _get_config_from_profile(
|
||||||
|
self,
|
||||||
|
instance: pyblish.api.Instance
|
||||||
|
) -> Optional[ThumbnailDef]:
|
||||||
|
"""Returns profile if and how repre should be color transcoded."""
|
||||||
|
host_name = instance.context.data["hostName"]
|
||||||
|
product_type = instance.data["productType"]
|
||||||
|
product_name = instance.data["productName"]
|
||||||
|
task_data = instance.data["anatomyData"].get("task", {})
|
||||||
|
task_name = task_data.get("name")
|
||||||
|
task_type = task_data.get("type")
|
||||||
|
filtering_criteria = {
|
||||||
|
"host_names": host_name,
|
||||||
|
"product_types": product_type,
|
||||||
|
"product_names": product_name,
|
||||||
|
"task_names": task_name,
|
||||||
|
"task_types": task_type,
|
||||||
|
}
|
||||||
|
profile = filter_profiles(
|
||||||
|
self.profiles,
|
||||||
|
filtering_criteria,
|
||||||
|
logger=self.log
|
||||||
|
)
|
||||||
|
|
||||||
|
if not profile:
|
||||||
|
self.log.debug(
|
||||||
|
"Skipped instance. None of profiles in presets are for"
|
||||||
|
f' Host: "{host_name}"'
|
||||||
|
f' | Product types: "{product_type}"'
|
||||||
|
f' | Product names: "{product_name}"'
|
||||||
|
f' | Task name "{task_name}"'
|
||||||
|
f' | Task type "{task_type}"'
|
||||||
|
)
|
||||||
|
return None
|
||||||
|
|
||||||
|
return ThumbnailDef.from_dict(profile)
|
||||||
|
|
|
||||||
|
|
@ -14,6 +14,7 @@ Todos:
|
||||||
|
|
||||||
import os
|
import os
|
||||||
import tempfile
|
import tempfile
|
||||||
|
from typing import List, Optional
|
||||||
|
|
||||||
import pyblish.api
|
import pyblish.api
|
||||||
from ayon_core.lib import (
|
from ayon_core.lib import (
|
||||||
|
|
@ -22,6 +23,7 @@ from ayon_core.lib import (
|
||||||
is_oiio_supported,
|
is_oiio_supported,
|
||||||
|
|
||||||
run_subprocess,
|
run_subprocess,
|
||||||
|
get_rescaled_command_arguments,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -31,17 +33,20 @@ class ExtractThumbnailFromSource(pyblish.api.InstancePlugin):
|
||||||
Thumbnail source must be a single image or video filepath.
|
Thumbnail source must be a single image or video filepath.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
label = "Extract Thumbnail (from source)"
|
label = "Extract Thumbnail from source"
|
||||||
# Before 'ExtractThumbnail' in global plugins
|
# Before 'ExtractThumbnail' in global plugins
|
||||||
order = pyblish.api.ExtractorOrder - 0.00001
|
order = pyblish.api.ExtractorOrder - 0.00001
|
||||||
|
|
||||||
def process(self, instance):
|
# Settings
|
||||||
|
target_size = {
|
||||||
|
"type": "resize",
|
||||||
|
"resize": {"width": 1920, "height": 1080}
|
||||||
|
}
|
||||||
|
background_color = (0, 0, 0, 0.0)
|
||||||
|
|
||||||
|
def process(self, instance: pyblish.api.Instance):
|
||||||
self._create_context_thumbnail(instance.context)
|
self._create_context_thumbnail(instance.context)
|
||||||
|
|
||||||
product_name = instance.data["productName"]
|
|
||||||
self.log.debug(
|
|
||||||
"Processing instance with product name {}".format(product_name)
|
|
||||||
)
|
|
||||||
thumbnail_source = instance.data.get("thumbnailSource")
|
thumbnail_source = instance.data.get("thumbnailSource")
|
||||||
if not thumbnail_source:
|
if not thumbnail_source:
|
||||||
self.log.debug("Thumbnail source not filled. Skipping.")
|
self.log.debug("Thumbnail source not filled. Skipping.")
|
||||||
|
|
@ -69,6 +74,8 @@ class ExtractThumbnailFromSource(pyblish.api.InstancePlugin):
|
||||||
"outputName": "thumbnail",
|
"outputName": "thumbnail",
|
||||||
}
|
}
|
||||||
|
|
||||||
|
new_repre["tags"].append("delete")
|
||||||
|
|
||||||
# adding representation
|
# adding representation
|
||||||
self.log.debug(
|
self.log.debug(
|
||||||
"Adding thumbnail representation: {}".format(new_repre)
|
"Adding thumbnail representation: {}".format(new_repre)
|
||||||
|
|
@ -76,7 +83,11 @@ class ExtractThumbnailFromSource(pyblish.api.InstancePlugin):
|
||||||
instance.data["representations"].append(new_repre)
|
instance.data["representations"].append(new_repre)
|
||||||
instance.data["thumbnailPath"] = dst_filepath
|
instance.data["thumbnailPath"] = dst_filepath
|
||||||
|
|
||||||
def _create_thumbnail(self, context, thumbnail_source):
|
def _create_thumbnail(
|
||||||
|
self,
|
||||||
|
context: pyblish.api.Context,
|
||||||
|
thumbnail_source: str,
|
||||||
|
) -> Optional[str]:
|
||||||
if not thumbnail_source:
|
if not thumbnail_source:
|
||||||
self.log.debug("Thumbnail source not filled. Skipping.")
|
self.log.debug("Thumbnail source not filled. Skipping.")
|
||||||
return
|
return
|
||||||
|
|
@ -131,7 +142,7 @@ class ExtractThumbnailFromSource(pyblish.api.InstancePlugin):
|
||||||
|
|
||||||
self.log.warning("Thumbnail has not been created.")
|
self.log.warning("Thumbnail has not been created.")
|
||||||
|
|
||||||
def _instance_has_thumbnail(self, instance):
|
def _instance_has_thumbnail(self, instance: pyblish.api.Instance) -> bool:
|
||||||
if "representations" not in instance.data:
|
if "representations" not in instance.data:
|
||||||
self.log.warning(
|
self.log.warning(
|
||||||
"Instance does not have 'representations' key filled"
|
"Instance does not have 'representations' key filled"
|
||||||
|
|
@ -143,14 +154,29 @@ class ExtractThumbnailFromSource(pyblish.api.InstancePlugin):
|
||||||
return True
|
return True
|
||||||
return False
|
return False
|
||||||
|
|
||||||
def create_thumbnail_oiio(self, src_path, dst_path):
|
def create_thumbnail_oiio(
|
||||||
|
self,
|
||||||
|
src_path: str,
|
||||||
|
dst_path: str,
|
||||||
|
) -> bool:
|
||||||
self.log.debug("Outputting thumbnail with OIIO: {}".format(dst_path))
|
self.log.debug("Outputting thumbnail with OIIO: {}".format(dst_path))
|
||||||
oiio_cmd = get_oiio_tool_args(
|
try:
|
||||||
"oiiotool",
|
resolution_args = self._get_resolution_args(
|
||||||
"-a", src_path,
|
"oiiotool", src_path
|
||||||
"--ch", "R,G,B",
|
|
||||||
"-o", dst_path
|
|
||||||
)
|
)
|
||||||
|
except Exception:
|
||||||
|
self.log.warning("Failed to get resolution args for OIIO.")
|
||||||
|
return False
|
||||||
|
|
||||||
|
oiio_cmd = get_oiio_tool_args("oiiotool", "-a", src_path)
|
||||||
|
if resolution_args:
|
||||||
|
# resize must be before -o
|
||||||
|
oiio_cmd.extend(resolution_args)
|
||||||
|
else:
|
||||||
|
# resize provides own -ch, must be only one
|
||||||
|
oiio_cmd.extend(["--ch", "R,G,B"])
|
||||||
|
|
||||||
|
oiio_cmd.extend(["-o", dst_path])
|
||||||
self.log.debug("Running: {}".format(" ".join(oiio_cmd)))
|
self.log.debug("Running: {}".format(" ".join(oiio_cmd)))
|
||||||
try:
|
try:
|
||||||
run_subprocess(oiio_cmd, logger=self.log)
|
run_subprocess(oiio_cmd, logger=self.log)
|
||||||
|
|
@ -162,7 +188,19 @@ class ExtractThumbnailFromSource(pyblish.api.InstancePlugin):
|
||||||
)
|
)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
def create_thumbnail_ffmpeg(self, src_path, dst_path):
|
def create_thumbnail_ffmpeg(
|
||||||
|
self,
|
||||||
|
src_path: str,
|
||||||
|
dst_path: str,
|
||||||
|
) -> bool:
|
||||||
|
try:
|
||||||
|
resolution_args = self._get_resolution_args(
|
||||||
|
"ffmpeg", src_path
|
||||||
|
)
|
||||||
|
except Exception:
|
||||||
|
self.log.warning("Failed to get resolution args for ffmpeg.")
|
||||||
|
return False
|
||||||
|
|
||||||
max_int = str(2147483647)
|
max_int = str(2147483647)
|
||||||
ffmpeg_cmd = get_ffmpeg_tool_args(
|
ffmpeg_cmd = get_ffmpeg_tool_args(
|
||||||
"ffmpeg",
|
"ffmpeg",
|
||||||
|
|
@ -171,9 +209,13 @@ class ExtractThumbnailFromSource(pyblish.api.InstancePlugin):
|
||||||
"-probesize", max_int,
|
"-probesize", max_int,
|
||||||
"-i", src_path,
|
"-i", src_path,
|
||||||
"-frames:v", "1",
|
"-frames:v", "1",
|
||||||
dst_path
|
|
||||||
)
|
)
|
||||||
|
|
||||||
|
ffmpeg_cmd.extend(resolution_args)
|
||||||
|
|
||||||
|
# possible resize must be before output args
|
||||||
|
ffmpeg_cmd.append(dst_path)
|
||||||
|
|
||||||
self.log.debug("Running: {}".format(" ".join(ffmpeg_cmd)))
|
self.log.debug("Running: {}".format(" ".join(ffmpeg_cmd)))
|
||||||
try:
|
try:
|
||||||
run_subprocess(ffmpeg_cmd, logger=self.log)
|
run_subprocess(ffmpeg_cmd, logger=self.log)
|
||||||
|
|
@ -185,10 +227,37 @@ class ExtractThumbnailFromSource(pyblish.api.InstancePlugin):
|
||||||
)
|
)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
def _create_context_thumbnail(self, context):
|
def _create_context_thumbnail(
|
||||||
|
self,
|
||||||
|
context: pyblish.api.Context,
|
||||||
|
):
|
||||||
if "thumbnailPath" in context.data:
|
if "thumbnailPath" in context.data:
|
||||||
return
|
return
|
||||||
|
|
||||||
thumbnail_source = context.data.get("thumbnailSource")
|
thumbnail_source = context.data.get("thumbnailSource")
|
||||||
thumbnail_path = self._create_thumbnail(context, thumbnail_source)
|
context.data["thumbnailPath"] = self._create_thumbnail(
|
||||||
context.data["thumbnailPath"] = thumbnail_path
|
context, thumbnail_source
|
||||||
|
)
|
||||||
|
|
||||||
|
def _get_resolution_args(
|
||||||
|
self,
|
||||||
|
application: str,
|
||||||
|
input_path: str,
|
||||||
|
) -> List[str]:
|
||||||
|
# get settings
|
||||||
|
if self.target_size["type"] == "source":
|
||||||
|
return []
|
||||||
|
|
||||||
|
resize = self.target_size["resize"]
|
||||||
|
target_width = resize["width"]
|
||||||
|
target_height = resize["height"]
|
||||||
|
|
||||||
|
# form arg string per application
|
||||||
|
return get_rescaled_command_arguments(
|
||||||
|
application,
|
||||||
|
input_path,
|
||||||
|
target_width,
|
||||||
|
target_height,
|
||||||
|
bg_color=self.background_color,
|
||||||
|
log=self.log,
|
||||||
|
)
|
||||||
|
|
|
||||||
|
|
@ -1,6 +1,8 @@
|
||||||
from operator import attrgetter
|
from operator import attrgetter
|
||||||
import dataclasses
|
import dataclasses
|
||||||
import os
|
import os
|
||||||
|
import platform
|
||||||
|
from collections import defaultdict
|
||||||
from typing import Any, Dict, List
|
from typing import Any, Dict, List
|
||||||
|
|
||||||
import pyblish.api
|
import pyblish.api
|
||||||
|
|
@ -12,10 +14,11 @@ except ImportError:
|
||||||
from ayon_core.lib import (
|
from ayon_core.lib import (
|
||||||
TextDef,
|
TextDef,
|
||||||
BoolDef,
|
BoolDef,
|
||||||
|
NumberDef,
|
||||||
UISeparatorDef,
|
UISeparatorDef,
|
||||||
UILabelDef,
|
UILabelDef,
|
||||||
EnumDef,
|
EnumDef,
|
||||||
filter_profiles
|
filter_profiles,
|
||||||
)
|
)
|
||||||
try:
|
try:
|
||||||
from ayon_core.pipeline.usdlib import (
|
from ayon_core.pipeline.usdlib import (
|
||||||
|
|
@ -24,7 +27,8 @@ try:
|
||||||
variant_nested_prim_path,
|
variant_nested_prim_path,
|
||||||
setup_asset_layer,
|
setup_asset_layer,
|
||||||
add_ordered_sublayer,
|
add_ordered_sublayer,
|
||||||
set_layer_defaults
|
set_layer_defaults,
|
||||||
|
get_standard_default_prim_name
|
||||||
)
|
)
|
||||||
except ImportError:
|
except ImportError:
|
||||||
pass
|
pass
|
||||||
|
|
@ -175,10 +179,17 @@ def get_instance_uri_path(
|
||||||
|
|
||||||
# If for whatever reason we were unable to retrieve from the context
|
# If for whatever reason we were unable to retrieve from the context
|
||||||
# then get the path from an existing database entry
|
# then get the path from an existing database entry
|
||||||
path = get_representation_path_by_names(**query)
|
path = get_representation_path_by_names(
|
||||||
|
anatomy=context.data["anatomy"],
|
||||||
|
**names
|
||||||
|
)
|
||||||
|
if not path:
|
||||||
|
raise RuntimeError(f"Unable to resolve publish path for: {names}")
|
||||||
|
|
||||||
# Ensure `None` for now is also a string
|
# Ensure `None` for now is also a string
|
||||||
path = str(path)
|
path = str(path)
|
||||||
|
if platform.system().lower() == "windows":
|
||||||
|
path = path.replace("\\", "/")
|
||||||
|
|
||||||
return path
|
return path
|
||||||
|
|
||||||
|
|
@ -269,20 +280,24 @@ class CollectUSDLayerContributions(pyblish.api.InstancePlugin,
|
||||||
# level, you can add it directly from the publisher at that particular
|
# level, you can add it directly from the publisher at that particular
|
||||||
# order. Future publishes will then see the existing contribution and will
|
# order. Future publishes will then see the existing contribution and will
|
||||||
# persist adding it to future bootstraps at that order
|
# persist adding it to future bootstraps at that order
|
||||||
contribution_layers: Dict[str, int] = {
|
contribution_layers: Dict[str, Dict[str, int]] = {
|
||||||
# asset layers
|
# asset layers
|
||||||
|
"asset": {
|
||||||
"model": 100,
|
"model": 100,
|
||||||
"assembly": 150,
|
"assembly": 150,
|
||||||
"groom": 175,
|
"groom": 175,
|
||||||
"look": 200,
|
"look": 200,
|
||||||
"rig": 300,
|
"rig": 300,
|
||||||
|
},
|
||||||
# shot layers
|
# shot layers
|
||||||
|
"shot": {
|
||||||
"layout": 200,
|
"layout": 200,
|
||||||
"animation": 300,
|
"animation": 300,
|
||||||
"simulation": 400,
|
"simulation": 400,
|
||||||
"fx": 500,
|
"fx": 500,
|
||||||
"lighting": 600,
|
"lighting": 600,
|
||||||
}
|
}
|
||||||
|
}
|
||||||
# Default profiles to set certain instance attribute defaults based on
|
# Default profiles to set certain instance attribute defaults based on
|
||||||
# profiles in settings
|
# profiles in settings
|
||||||
profiles: List[Dict[str, Any]] = []
|
profiles: List[Dict[str, Any]] = []
|
||||||
|
|
@ -296,12 +311,18 @@ class CollectUSDLayerContributions(pyblish.api.InstancePlugin,
|
||||||
|
|
||||||
cls.enabled = plugin_settings.get("enabled", cls.enabled)
|
cls.enabled = plugin_settings.get("enabled", cls.enabled)
|
||||||
|
|
||||||
# Define contribution layers via settings
|
# Define contribution layers via settings by their scope
|
||||||
contribution_layers = {}
|
contribution_layers = defaultdict(dict)
|
||||||
for entry in plugin_settings.get("contribution_layers", []):
|
for entry in plugin_settings.get("contribution_layers", []):
|
||||||
contribution_layers[entry["name"]] = int(entry["order"])
|
for scope in entry.get("scope", []):
|
||||||
|
contribution_layers[scope][entry["name"]] = int(entry["order"])
|
||||||
if contribution_layers:
|
if contribution_layers:
|
||||||
cls.contribution_layers = contribution_layers
|
cls.contribution_layers = dict(contribution_layers)
|
||||||
|
else:
|
||||||
|
cls.log.warning(
|
||||||
|
"No scoped contribution layers found in settings, falling back"
|
||||||
|
" to CollectUSDLayerContributions plug-in defaults..."
|
||||||
|
)
|
||||||
|
|
||||||
cls.profiles = plugin_settings.get("profiles", [])
|
cls.profiles = plugin_settings.get("profiles", [])
|
||||||
|
|
||||||
|
|
@ -325,10 +346,7 @@ class CollectUSDLayerContributions(pyblish.api.InstancePlugin,
|
||||||
attr_values[key] = attr_values[key].format(**data)
|
attr_values[key] = attr_values[key].format(**data)
|
||||||
|
|
||||||
# Define contribution
|
# Define contribution
|
||||||
order = self.contribution_layers.get(
|
in_layer_order: int = attr_values.get("contribution_in_layer_order", 0)
|
||||||
attr_values["contribution_layer"], 0
|
|
||||||
)
|
|
||||||
|
|
||||||
if attr_values["contribution_apply_as_variant"]:
|
if attr_values["contribution_apply_as_variant"]:
|
||||||
contribution = VariantContribution(
|
contribution = VariantContribution(
|
||||||
instance=instance,
|
instance=instance,
|
||||||
|
|
@ -337,19 +355,23 @@ class CollectUSDLayerContributions(pyblish.api.InstancePlugin,
|
||||||
variant_set_name=attr_values["contribution_variant_set_name"],
|
variant_set_name=attr_values["contribution_variant_set_name"],
|
||||||
variant_name=attr_values["contribution_variant"],
|
variant_name=attr_values["contribution_variant"],
|
||||||
variant_is_default=attr_values["contribution_variant_is_default"], # noqa: E501
|
variant_is_default=attr_values["contribution_variant_is_default"], # noqa: E501
|
||||||
order=order
|
order=in_layer_order
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
contribution = SublayerContribution(
|
contribution = SublayerContribution(
|
||||||
instance=instance,
|
instance=instance,
|
||||||
layer_id=attr_values["contribution_layer"],
|
layer_id=attr_values["contribution_layer"],
|
||||||
target_product=attr_values["contribution_target_product"],
|
target_product=attr_values["contribution_target_product"],
|
||||||
order=order
|
order=in_layer_order
|
||||||
)
|
)
|
||||||
|
|
||||||
asset_product = contribution.target_product
|
asset_product = contribution.target_product
|
||||||
layer_product = "{}_{}".format(asset_product, contribution.layer_id)
|
layer_product = "{}_{}".format(asset_product, contribution.layer_id)
|
||||||
|
|
||||||
|
scope: str = attr_values["contribution_target_product_init"]
|
||||||
|
layer_order: int = (
|
||||||
|
self.contribution_layers[scope][attr_values["contribution_layer"]]
|
||||||
|
)
|
||||||
# Layer contribution instance
|
# Layer contribution instance
|
||||||
layer_instance = self.get_or_create_instance(
|
layer_instance = self.get_or_create_instance(
|
||||||
product_name=layer_product,
|
product_name=layer_product,
|
||||||
|
|
@ -361,7 +383,7 @@ class CollectUSDLayerContributions(pyblish.api.InstancePlugin,
|
||||||
contribution
|
contribution
|
||||||
)
|
)
|
||||||
layer_instance.data["usd_layer_id"] = contribution.layer_id
|
layer_instance.data["usd_layer_id"] = contribution.layer_id
|
||||||
layer_instance.data["usd_layer_order"] = contribution.order
|
layer_instance.data["usd_layer_order"] = layer_order
|
||||||
|
|
||||||
layer_instance.data["productGroup"] = (
|
layer_instance.data["productGroup"] = (
|
||||||
instance.data.get("productGroup") or "USD Layer"
|
instance.data.get("productGroup") or "USD Layer"
|
||||||
|
|
@ -480,18 +502,18 @@ class CollectUSDLayerContributions(pyblish.api.InstancePlugin,
|
||||||
profile = {}
|
profile = {}
|
||||||
|
|
||||||
# Define defaults
|
# Define defaults
|
||||||
default_enabled = profile.get("contribution_enabled", True)
|
default_enabled: bool = profile.get("contribution_enabled", True)
|
||||||
default_contribution_layer = profile.get(
|
default_contribution_layer = profile.get(
|
||||||
"contribution_layer", None)
|
"contribution_layer", None)
|
||||||
default_apply_as_variant = profile.get(
|
default_apply_as_variant: bool = profile.get(
|
||||||
"contribution_apply_as_variant", False)
|
"contribution_apply_as_variant", False)
|
||||||
default_target_product = profile.get(
|
default_target_product: str = profile.get(
|
||||||
"contribution_target_product", "usdAsset")
|
"contribution_target_product", "usdAsset")
|
||||||
default_init_as = (
|
default_init_as: str = (
|
||||||
"asset"
|
"asset"
|
||||||
if profile.get("contribution_target_product") == "usdAsset"
|
if profile.get("contribution_target_product") == "usdAsset"
|
||||||
else "shot")
|
else "shot")
|
||||||
init_as_visible = False
|
init_as_visible = True
|
||||||
|
|
||||||
# Attributes logic
|
# Attributes logic
|
||||||
publish_attributes = instance["publish_attributes"].get(
|
publish_attributes = instance["publish_attributes"].get(
|
||||||
|
|
@ -500,6 +522,12 @@ class CollectUSDLayerContributions(pyblish.api.InstancePlugin,
|
||||||
visible = publish_attributes.get("contribution_enabled", True)
|
visible = publish_attributes.get("contribution_enabled", True)
|
||||||
variant_visible = visible and publish_attributes.get(
|
variant_visible = visible and publish_attributes.get(
|
||||||
"contribution_apply_as_variant", True)
|
"contribution_apply_as_variant", True)
|
||||||
|
init_as: str = publish_attributes.get(
|
||||||
|
"contribution_target_product_init", default_init_as)
|
||||||
|
|
||||||
|
contribution_layers = cls.contribution_layers.get(
|
||||||
|
init_as, {}
|
||||||
|
)
|
||||||
|
|
||||||
return [
|
return [
|
||||||
UISeparatorDef("usd_container_settings1"),
|
UISeparatorDef("usd_container_settings1"),
|
||||||
|
|
@ -549,9 +577,22 @@ class CollectUSDLayerContributions(pyblish.api.InstancePlugin,
|
||||||
"predefined ordering.\nA higher order (further down "
|
"predefined ordering.\nA higher order (further down "
|
||||||
"the list) will contribute as a stronger opinion."
|
"the list) will contribute as a stronger opinion."
|
||||||
),
|
),
|
||||||
items=list(cls.contribution_layers.keys()),
|
items=list(contribution_layers.keys()),
|
||||||
default=default_contribution_layer,
|
default=default_contribution_layer,
|
||||||
visible=visible),
|
visible=visible),
|
||||||
|
# TODO: We may want to make the visibility of this optional
|
||||||
|
# based on studio preference, to avoid complexity when not needed
|
||||||
|
NumberDef("contribution_in_layer_order",
|
||||||
|
label="Strength order",
|
||||||
|
tooltip=(
|
||||||
|
"The contribution inside the department layer will be "
|
||||||
|
"made with this offset applied. A higher number means "
|
||||||
|
"a stronger opinion."
|
||||||
|
),
|
||||||
|
default=0,
|
||||||
|
minimum=-99999,
|
||||||
|
maximum=99999,
|
||||||
|
visible=visible),
|
||||||
BoolDef("contribution_apply_as_variant",
|
BoolDef("contribution_apply_as_variant",
|
||||||
label="Add as variant",
|
label="Add as variant",
|
||||||
tooltip=(
|
tooltip=(
|
||||||
|
|
@ -597,7 +638,11 @@ class CollectUSDLayerContributions(pyblish.api.InstancePlugin,
|
||||||
|
|
||||||
# Update attributes if any of the following plug-in attributes
|
# Update attributes if any of the following plug-in attributes
|
||||||
# change:
|
# change:
|
||||||
keys = ["contribution_enabled", "contribution_apply_as_variant"]
|
keys = {
|
||||||
|
"contribution_enabled",
|
||||||
|
"contribution_apply_as_variant",
|
||||||
|
"contribution_target_product_init",
|
||||||
|
}
|
||||||
|
|
||||||
for instance_change in event["changes"]:
|
for instance_change in event["changes"]:
|
||||||
instance = instance_change["instance"]
|
instance = instance_change["instance"]
|
||||||
|
|
@ -637,6 +682,7 @@ class ExtractUSDLayerContribution(publish.Extractor):
|
||||||
settings_category = "core"
|
settings_category = "core"
|
||||||
|
|
||||||
use_ayon_entity_uri = False
|
use_ayon_entity_uri = False
|
||||||
|
enforce_default_prim = False
|
||||||
|
|
||||||
def process(self, instance):
|
def process(self, instance):
|
||||||
|
|
||||||
|
|
@ -647,9 +693,18 @@ class ExtractUSDLayerContribution(publish.Extractor):
|
||||||
path = get_last_publish(instance)
|
path = get_last_publish(instance)
|
||||||
if path and BUILD_INTO_LAST_VERSIONS:
|
if path and BUILD_INTO_LAST_VERSIONS:
|
||||||
sdf_layer = Sdf.Layer.OpenAsAnonymous(path)
|
sdf_layer = Sdf.Layer.OpenAsAnonymous(path)
|
||||||
|
|
||||||
|
# If enabled in settings, ignore any default prim specified on
|
||||||
|
# older publish versions and always publish with the AYON
|
||||||
|
# standard default prim
|
||||||
|
if self.enforce_default_prim:
|
||||||
|
sdf_layer.defaultPrim = get_standard_default_prim_name(
|
||||||
|
folder_path
|
||||||
|
)
|
||||||
|
|
||||||
default_prim = sdf_layer.defaultPrim
|
default_prim = sdf_layer.defaultPrim
|
||||||
else:
|
else:
|
||||||
default_prim = folder_path.rsplit("/", 1)[-1] # use folder name
|
default_prim = get_standard_default_prim_name(folder_path)
|
||||||
sdf_layer = Sdf.Layer.CreateAnonymous()
|
sdf_layer = Sdf.Layer.CreateAnonymous()
|
||||||
set_layer_defaults(sdf_layer, default_prim=default_prim)
|
set_layer_defaults(sdf_layer, default_prim=default_prim)
|
||||||
|
|
||||||
|
|
@ -710,7 +765,7 @@ class ExtractUSDLayerContribution(publish.Extractor):
|
||||||
layer=sdf_layer,
|
layer=sdf_layer,
|
||||||
contribution_path=path,
|
contribution_path=path,
|
||||||
layer_id=product_name,
|
layer_id=product_name,
|
||||||
order=None, # unordered
|
order=contribution.order,
|
||||||
add_sdf_arguments_metadata=True
|
add_sdf_arguments_metadata=True
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
|
|
@ -807,7 +862,7 @@ class ExtractUSDAssetContribution(publish.Extractor):
|
||||||
folder_path = instance.data["folderPath"]
|
folder_path = instance.data["folderPath"]
|
||||||
product_name = instance.data["productName"]
|
product_name = instance.data["productName"]
|
||||||
self.log.debug(f"Building asset: {folder_path} > {product_name}")
|
self.log.debug(f"Building asset: {folder_path} > {product_name}")
|
||||||
folder_name = folder_path.rsplit("/", 1)[-1]
|
asset_name = get_standard_default_prim_name(folder_path)
|
||||||
|
|
||||||
# Contribute layers to asset
|
# Contribute layers to asset
|
||||||
# Use existing asset and add to it, or initialize a new asset layer
|
# Use existing asset and add to it, or initialize a new asset layer
|
||||||
|
|
@ -825,8 +880,9 @@ class ExtractUSDAssetContribution(publish.Extractor):
|
||||||
# If no existing publish of this product exists then we initialize
|
# If no existing publish of this product exists then we initialize
|
||||||
# the layer as either a default asset or shot structure.
|
# the layer as either a default asset or shot structure.
|
||||||
init_type = instance.data["contribution_target_product_init"]
|
init_type = instance.data["contribution_target_product_init"]
|
||||||
|
self.log.debug("Initializing layer as type: %s", init_type)
|
||||||
asset_layer, payload_layer = self.init_layer(
|
asset_layer, payload_layer = self.init_layer(
|
||||||
asset_name=folder_name, init_type=init_type
|
asset_name=asset_name, init_type=init_type
|
||||||
)
|
)
|
||||||
|
|
||||||
# Author timeCodesPerSecond and framesPerSecond if the asset layer
|
# Author timeCodesPerSecond and framesPerSecond if the asset layer
|
||||||
|
|
@ -906,7 +962,7 @@ class ExtractUSDAssetContribution(publish.Extractor):
|
||||||
payload_layer.Export(payload_path, args={"format": "usda"})
|
payload_layer.Export(payload_path, args={"format": "usda"})
|
||||||
self.add_relative_file(instance, payload_path)
|
self.add_relative_file(instance, payload_path)
|
||||||
|
|
||||||
def init_layer(self, asset_name, init_type):
|
def init_layer(self, asset_name: str, init_type: str):
|
||||||
"""Initialize layer if no previous version exists"""
|
"""Initialize layer if no previous version exists"""
|
||||||
|
|
||||||
if init_type == "asset":
|
if init_type == "asset":
|
||||||
|
|
|
||||||
21
client/ayon_core/plugins/publish/help/upload_file.xml
Normal file
21
client/ayon_core/plugins/publish/help/upload_file.xml
Normal file
|
|
@ -0,0 +1,21 @@
|
||||||
|
<?xml version="1.0" encoding="UTF-8"?>
|
||||||
|
<root>
|
||||||
|
<error id="main">
|
||||||
|
<title>{upload_type} upload timed out</title>
|
||||||
|
<description>
|
||||||
|
## {upload_type} upload failed after retries
|
||||||
|
|
||||||
|
The connection to the AYON server timed out while uploading a file.
|
||||||
|
|
||||||
|
### How to resolve?
|
||||||
|
|
||||||
|
1. Try publishing again. Intermittent network hiccups often resolve on retry.
|
||||||
|
2. Ensure your network/VPN is stable and large uploads are allowed.
|
||||||
|
3. If it keeps failing, try again later or contact your admin.
|
||||||
|
|
||||||
|
<pre>File: {file}
|
||||||
|
Error: {error}</pre>
|
||||||
|
|
||||||
|
</description>
|
||||||
|
</error>
|
||||||
|
</root>
|
||||||
|
|
@ -28,6 +28,7 @@ from ayon_core.pipeline.publish import (
|
||||||
KnownPublishError,
|
KnownPublishError,
|
||||||
get_publish_template_name,
|
get_publish_template_name,
|
||||||
)
|
)
|
||||||
|
from ayon_core.pipeline import is_product_base_type_supported
|
||||||
|
|
||||||
log = logging.getLogger(__name__)
|
log = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
@ -121,12 +122,7 @@ class IntegrateAsset(pyblish.api.InstancePlugin):
|
||||||
"version",
|
"version",
|
||||||
"representation",
|
"representation",
|
||||||
"username",
|
"username",
|
||||||
"user",
|
|
||||||
"output",
|
"output",
|
||||||
# OpenPype keys - should be removed
|
|
||||||
"asset", # folder[name]
|
|
||||||
"subset", # product[name]
|
|
||||||
"family", # product[type]
|
|
||||||
]
|
]
|
||||||
|
|
||||||
def process(self, instance):
|
def process(self, instance):
|
||||||
|
|
@ -368,6 +364,8 @@ class IntegrateAsset(pyblish.api.InstancePlugin):
|
||||||
folder_entity = instance.data["folderEntity"]
|
folder_entity = instance.data["folderEntity"]
|
||||||
product_name = instance.data["productName"]
|
product_name = instance.data["productName"]
|
||||||
product_type = instance.data["productType"]
|
product_type = instance.data["productType"]
|
||||||
|
product_base_type = instance.data.get("productBaseType")
|
||||||
|
|
||||||
self.log.debug("Product: {}".format(product_name))
|
self.log.debug("Product: {}".format(product_name))
|
||||||
|
|
||||||
# Get existing product if it exists
|
# Get existing product if it exists
|
||||||
|
|
@ -395,15 +393,34 @@ class IntegrateAsset(pyblish.api.InstancePlugin):
|
||||||
product_id = None
|
product_id = None
|
||||||
if existing_product_entity:
|
if existing_product_entity:
|
||||||
product_id = existing_product_entity["id"]
|
product_id = existing_product_entity["id"]
|
||||||
product_entity = new_product_entity(
|
|
||||||
product_name,
|
new_product_entity_kwargs = {
|
||||||
product_type,
|
"name": product_name,
|
||||||
folder_entity["id"],
|
"product_type": product_type,
|
||||||
data=data,
|
"folder_id": folder_entity["id"],
|
||||||
attribs=attributes,
|
"data": data,
|
||||||
entity_id=product_id
|
"attribs": attributes,
|
||||||
|
"entity_id": product_id,
|
||||||
|
"product_base_type": product_base_type,
|
||||||
|
}
|
||||||
|
|
||||||
|
if not is_product_base_type_supported():
|
||||||
|
new_product_entity_kwargs.pop("product_base_type")
|
||||||
|
if (
|
||||||
|
product_base_type is not None
|
||||||
|
and product_base_type != product_type):
|
||||||
|
self.log.warning((
|
||||||
|
"Product base type %s is not supported by the server, "
|
||||||
|
"but it's defined - and it differs from product type %s. "
|
||||||
|
"Using product base type as product type."
|
||||||
|
), product_base_type, product_type)
|
||||||
|
|
||||||
|
new_product_entity_kwargs["product_type"] = (
|
||||||
|
product_base_type
|
||||||
)
|
)
|
||||||
|
|
||||||
|
product_entity = new_product_entity(**new_product_entity_kwargs)
|
||||||
|
|
||||||
if existing_product_entity is None:
|
if existing_product_entity is None:
|
||||||
# Create a new product
|
# Create a new product
|
||||||
self.log.info(
|
self.log.info(
|
||||||
|
|
@ -458,6 +475,9 @@ class IntegrateAsset(pyblish.api.InstancePlugin):
|
||||||
else:
|
else:
|
||||||
version_data[key] = value
|
version_data[key] = value
|
||||||
|
|
||||||
|
host_name = instance.context.data["hostName"]
|
||||||
|
version_data["host_name"] = host_name
|
||||||
|
|
||||||
version_entity = new_version_entity(
|
version_entity = new_version_entity(
|
||||||
version_number,
|
version_number,
|
||||||
product_entity["id"],
|
product_entity["id"],
|
||||||
|
|
@ -796,6 +816,14 @@ class IntegrateAsset(pyblish.api.InstancePlugin):
|
||||||
if value is not None:
|
if value is not None:
|
||||||
repre_context[key] = value
|
repre_context[key] = value
|
||||||
|
|
||||||
|
# Keep only username
|
||||||
|
# NOTE This is to avoid storing all user attributes and data
|
||||||
|
# to representation
|
||||||
|
if "user" not in repre_context:
|
||||||
|
repre_context["user"] = {
|
||||||
|
"name": template_data["user"]["name"]
|
||||||
|
}
|
||||||
|
|
||||||
# Use previous representation's id if there is a name match
|
# Use previous representation's id if there is a name match
|
||||||
existing = existing_repres_by_name.get(repre["name"].lower())
|
existing = existing_repres_by_name.get(repre["name"].lower())
|
||||||
repre_id = None
|
repre_id = None
|
||||||
|
|
@ -892,8 +920,12 @@ class IntegrateAsset(pyblish.api.InstancePlugin):
|
||||||
|
|
||||||
# Include optional data if present in
|
# Include optional data if present in
|
||||||
optionals = [
|
optionals = [
|
||||||
"frameStart", "frameEnd", "step",
|
"frameStart", "frameEnd",
|
||||||
"handleEnd", "handleStart", "sourceHashes"
|
"handleEnd", "handleStart",
|
||||||
|
"step",
|
||||||
|
"resolutionWidth", "resolutionHeight",
|
||||||
|
"pixelAspect",
|
||||||
|
"sourceHashes"
|
||||||
]
|
]
|
||||||
for key in optionals:
|
for key in optionals:
|
||||||
if key in instance.data:
|
if key in instance.data:
|
||||||
|
|
@ -917,6 +949,7 @@ class IntegrateAsset(pyblish.api.InstancePlugin):
|
||||||
host_name = context.data["hostName"]
|
host_name = context.data["hostName"]
|
||||||
anatomy_data = instance.data["anatomyData"]
|
anatomy_data = instance.data["anatomyData"]
|
||||||
product_type = instance.data["productType"]
|
product_type = instance.data["productType"]
|
||||||
|
product_base_type = instance.data.get("productBaseType")
|
||||||
task_info = anatomy_data.get("task") or {}
|
task_info = anatomy_data.get("task") or {}
|
||||||
|
|
||||||
return get_publish_template_name(
|
return get_publish_template_name(
|
||||||
|
|
@ -926,7 +959,8 @@ class IntegrateAsset(pyblish.api.InstancePlugin):
|
||||||
task_name=task_info.get("name"),
|
task_name=task_info.get("name"),
|
||||||
task_type=task_info.get("type"),
|
task_type=task_info.get("type"),
|
||||||
project_settings=context.data["project_settings"],
|
project_settings=context.data["project_settings"],
|
||||||
logger=self.log
|
logger=self.log,
|
||||||
|
product_base_type=product_base_type
|
||||||
)
|
)
|
||||||
|
|
||||||
def get_rootless_path(self, anatomy, path):
|
def get_rootless_path(self, anatomy, path):
|
||||||
|
|
|
||||||
|
|
@ -1,11 +1,8 @@
|
||||||
import os
|
import os
|
||||||
|
import sys
|
||||||
import copy
|
import copy
|
||||||
import errno
|
|
||||||
import itertools
|
import itertools
|
||||||
import shutil
|
import shutil
|
||||||
from concurrent.futures import ThreadPoolExecutor
|
|
||||||
|
|
||||||
from speedcopy import copyfile
|
|
||||||
|
|
||||||
import clique
|
import clique
|
||||||
import pyblish.api
|
import pyblish.api
|
||||||
|
|
@ -16,11 +13,15 @@ from ayon_api.operations import (
|
||||||
)
|
)
|
||||||
from ayon_api.utils import create_entity_id
|
from ayon_api.utils import create_entity_id
|
||||||
|
|
||||||
from ayon_core.lib import create_hard_link, source_hash
|
from ayon_core.lib import source_hash
|
||||||
from ayon_core.lib.file_transaction import wait_for_future_errors
|
from ayon_core.lib.file_transaction import (
|
||||||
|
FileTransaction,
|
||||||
|
DuplicateDestinationError,
|
||||||
|
)
|
||||||
from ayon_core.pipeline.publish import (
|
from ayon_core.pipeline.publish import (
|
||||||
get_publish_template_name,
|
get_publish_template_name,
|
||||||
OptionalPyblishPluginMixin,
|
OptionalPyblishPluginMixin,
|
||||||
|
KnownPublishError,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -81,15 +82,11 @@ class IntegrateHeroVersion(
|
||||||
db_representation_context_keys = [
|
db_representation_context_keys = [
|
||||||
"project",
|
"project",
|
||||||
"folder",
|
"folder",
|
||||||
"asset",
|
|
||||||
"hierarchy",
|
"hierarchy",
|
||||||
"task",
|
"task",
|
||||||
"product",
|
"product",
|
||||||
"subset",
|
|
||||||
"family",
|
|
||||||
"representation",
|
"representation",
|
||||||
"username",
|
"username",
|
||||||
"user",
|
|
||||||
"output"
|
"output"
|
||||||
]
|
]
|
||||||
# QUESTION/TODO this process should happen on server if crashed due to
|
# QUESTION/TODO this process should happen on server if crashed due to
|
||||||
|
|
@ -364,6 +361,14 @@ class IntegrateHeroVersion(
|
||||||
if value is not None:
|
if value is not None:
|
||||||
repre_context[key] = value
|
repre_context[key] = value
|
||||||
|
|
||||||
|
# Keep only username
|
||||||
|
# NOTE This is to avoid storing all user attributes and data
|
||||||
|
# to representation
|
||||||
|
if "user" not in repre_context:
|
||||||
|
repre_context["user"] = {
|
||||||
|
"name": anatomy_data["user"]["name"]
|
||||||
|
}
|
||||||
|
|
||||||
# Prepare new repre
|
# Prepare new repre
|
||||||
repre_entity = copy.deepcopy(repre_info["representation"])
|
repre_entity = copy.deepcopy(repre_info["representation"])
|
||||||
repre_entity.pop("id", None)
|
repre_entity.pop("id", None)
|
||||||
|
|
@ -417,19 +422,40 @@ class IntegrateHeroVersion(
|
||||||
(repre_entity, dst_paths)
|
(repre_entity, dst_paths)
|
||||||
)
|
)
|
||||||
|
|
||||||
self.path_checks = []
|
file_transactions = FileTransaction(
|
||||||
|
log=self.log,
|
||||||
# Copy(hardlink) paths of source and destination files
|
# Enforce unique transfers
|
||||||
# TODO should we *only* create hardlinks?
|
allow_queue_replacements=False
|
||||||
# TODO should we keep files for deletion until this is successful?
|
|
||||||
with ThreadPoolExecutor(max_workers=8) as executor:
|
|
||||||
futures = [
|
|
||||||
executor.submit(self.copy_file, src_path, dst_path)
|
|
||||||
for src_path, dst_path in itertools.chain(
|
|
||||||
src_to_dst_file_paths, other_file_paths_mapping
|
|
||||||
)
|
)
|
||||||
]
|
mode = FileTransaction.MODE_COPY
|
||||||
wait_for_future_errors(executor, futures)
|
if self.use_hardlinks:
|
||||||
|
mode = FileTransaction.MODE_LINK
|
||||||
|
|
||||||
|
try:
|
||||||
|
for src_path, dst_path in itertools.chain(
|
||||||
|
src_to_dst_file_paths,
|
||||||
|
other_file_paths_mapping
|
||||||
|
):
|
||||||
|
file_transactions.add(src_path, dst_path, mode=mode)
|
||||||
|
|
||||||
|
self.log.debug("Integrating source files to destination ...")
|
||||||
|
file_transactions.process()
|
||||||
|
|
||||||
|
except DuplicateDestinationError as exc:
|
||||||
|
# Raise DuplicateDestinationError as KnownPublishError
|
||||||
|
# and rollback the transactions
|
||||||
|
file_transactions.rollback()
|
||||||
|
raise KnownPublishError(exc).with_traceback(sys.exc_info()[2])
|
||||||
|
|
||||||
|
except Exception as exc:
|
||||||
|
# Rollback the transactions
|
||||||
|
file_transactions.rollback()
|
||||||
|
self.log.critical("Error when copying files", exc_info=True)
|
||||||
|
raise exc
|
||||||
|
|
||||||
|
# Finalizing can't rollback safely so no use for moving it to
|
||||||
|
# the try, except.
|
||||||
|
file_transactions.finalize()
|
||||||
|
|
||||||
# Update prepared representation etity data with files
|
# Update prepared representation etity data with files
|
||||||
# and integrate it to server.
|
# and integrate it to server.
|
||||||
|
|
@ -618,48 +644,6 @@ class IntegrateHeroVersion(
|
||||||
).format(path))
|
).format(path))
|
||||||
return path
|
return path
|
||||||
|
|
||||||
def copy_file(self, src_path, dst_path):
|
|
||||||
# TODO check drives if are the same to check if cas hardlink
|
|
||||||
dirname = os.path.dirname(dst_path)
|
|
||||||
|
|
||||||
try:
|
|
||||||
os.makedirs(dirname)
|
|
||||||
self.log.debug("Folder(s) created: \"{}\"".format(dirname))
|
|
||||||
except OSError as exc:
|
|
||||||
if exc.errno != errno.EEXIST:
|
|
||||||
self.log.error("An unexpected error occurred.", exc_info=True)
|
|
||||||
raise
|
|
||||||
|
|
||||||
self.log.debug("Folder already exists: \"{}\"".format(dirname))
|
|
||||||
|
|
||||||
if self.use_hardlinks:
|
|
||||||
# First try hardlink and copy if paths are cross drive
|
|
||||||
self.log.debug("Hardlinking file \"{}\" to \"{}\"".format(
|
|
||||||
src_path, dst_path
|
|
||||||
))
|
|
||||||
try:
|
|
||||||
create_hard_link(src_path, dst_path)
|
|
||||||
# Return when successful
|
|
||||||
return
|
|
||||||
|
|
||||||
except OSError as exc:
|
|
||||||
# re-raise exception if different than
|
|
||||||
# EXDEV - cross drive path
|
|
||||||
# EINVAL - wrong format, must be NTFS
|
|
||||||
self.log.debug(
|
|
||||||
"Hardlink failed with errno:'{}'".format(exc.errno))
|
|
||||||
if exc.errno not in [errno.EXDEV, errno.EINVAL]:
|
|
||||||
raise
|
|
||||||
|
|
||||||
self.log.debug(
|
|
||||||
"Hardlinking failed, falling back to regular copy...")
|
|
||||||
|
|
||||||
self.log.debug("Copying file \"{}\" to \"{}\"".format(
|
|
||||||
src_path, dst_path
|
|
||||||
))
|
|
||||||
|
|
||||||
copyfile(src_path, dst_path)
|
|
||||||
|
|
||||||
def version_from_representations(self, project_name, repres):
|
def version_from_representations(self, project_name, repres):
|
||||||
for repre in repres:
|
for repre in repres:
|
||||||
version = ayon_api.get_version_by_id(
|
version = ayon_api.get_version_by_id(
|
||||||
|
|
|
||||||
|
|
@ -105,7 +105,7 @@ class IntegrateInputLinksAYON(pyblish.api.ContextPlugin):
|
||||||
created links by its type
|
created links by its type
|
||||||
"""
|
"""
|
||||||
if workfile_instance is None:
|
if workfile_instance is None:
|
||||||
self.log.warning("No workfile in this publish session.")
|
self.log.debug("No workfile in this publish session.")
|
||||||
return
|
return
|
||||||
|
|
||||||
workfile_version_id = workfile_instance.data["versionEntity"]["id"]
|
workfile_version_id = workfile_instance.data["versionEntity"]["id"]
|
||||||
|
|
|
||||||
|
|
@ -62,10 +62,8 @@ class IntegrateProductGroup(pyblish.api.InstancePlugin):
|
||||||
product_type = instance.data["productType"]
|
product_type = instance.data["productType"]
|
||||||
|
|
||||||
fill_pairs = prepare_template_data({
|
fill_pairs = prepare_template_data({
|
||||||
"family": product_type,
|
|
||||||
"task": filter_criteria["tasks"],
|
"task": filter_criteria["tasks"],
|
||||||
"host": filter_criteria["hosts"],
|
"host": filter_criteria["hosts"],
|
||||||
"subset": product_name,
|
|
||||||
"product": {
|
"product": {
|
||||||
"name": product_name,
|
"name": product_name,
|
||||||
"type": product_type,
|
"type": product_type,
|
||||||
|
|
|
||||||
|
|
@ -1,11 +1,17 @@
|
||||||
import os
|
import os
|
||||||
|
import time
|
||||||
|
|
||||||
import pyblish.api
|
|
||||||
import ayon_api
|
import ayon_api
|
||||||
|
from ayon_api import TransferProgress
|
||||||
from ayon_api.server_api import RequestTypes
|
from ayon_api.server_api import RequestTypes
|
||||||
|
import pyblish.api
|
||||||
|
|
||||||
from ayon_core.lib import get_media_mime_type
|
from ayon_core.lib import get_media_mime_type, format_file_size
|
||||||
from ayon_core.pipeline.publish import get_publish_repre_path
|
from ayon_core.pipeline.publish import (
|
||||||
|
PublishXmlValidationError,
|
||||||
|
get_publish_repre_path,
|
||||||
|
)
|
||||||
|
import requests.exceptions
|
||||||
|
|
||||||
|
|
||||||
class IntegrateAYONReview(pyblish.api.InstancePlugin):
|
class IntegrateAYONReview(pyblish.api.InstancePlugin):
|
||||||
|
|
@ -44,7 +50,7 @@ class IntegrateAYONReview(pyblish.api.InstancePlugin):
|
||||||
if "webreview" not in repre_tags:
|
if "webreview" not in repre_tags:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
# exclude representations with are going to be published on farm
|
# exclude representations going to be published on farm
|
||||||
if "publish_on_farm" in repre_tags:
|
if "publish_on_farm" in repre_tags:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
|
|
@ -75,18 +81,13 @@ class IntegrateAYONReview(pyblish.api.InstancePlugin):
|
||||||
f"/projects/{project_name}"
|
f"/projects/{project_name}"
|
||||||
f"/versions/{version_id}/reviewables{query}"
|
f"/versions/{version_id}/reviewables{query}"
|
||||||
)
|
)
|
||||||
filename = os.path.basename(repre_path)
|
|
||||||
# Upload the reviewable
|
|
||||||
self.log.info(f"Uploading reviewable '{label or filename}' ...")
|
|
||||||
|
|
||||||
headers = ayon_con.get_headers(content_type)
|
|
||||||
headers["x-file-name"] = filename
|
|
||||||
self.log.info(f"Uploading reviewable {repre_path}")
|
self.log.info(f"Uploading reviewable {repre_path}")
|
||||||
ayon_con.upload_file(
|
# Upload with retries and clear help if it keeps failing
|
||||||
|
self._upload_with_retries(
|
||||||
|
ayon_con,
|
||||||
endpoint,
|
endpoint,
|
||||||
repre_path,
|
repre_path,
|
||||||
headers=headers,
|
content_type,
|
||||||
request_type=RequestTypes.post,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
def _get_review_label(self, repre, uploaded_labels):
|
def _get_review_label(self, repre, uploaded_labels):
|
||||||
|
|
@ -100,3 +101,74 @@ class IntegrateAYONReview(pyblish.api.InstancePlugin):
|
||||||
idx += 1
|
idx += 1
|
||||||
label = f"{orig_label}_{idx}"
|
label = f"{orig_label}_{idx}"
|
||||||
return label
|
return label
|
||||||
|
|
||||||
|
def _upload_with_retries(
|
||||||
|
self,
|
||||||
|
ayon_con: ayon_api.ServerAPI,
|
||||||
|
endpoint: str,
|
||||||
|
repre_path: str,
|
||||||
|
content_type: str,
|
||||||
|
):
|
||||||
|
"""Upload file with simple retries."""
|
||||||
|
filename = os.path.basename(repre_path)
|
||||||
|
|
||||||
|
headers = ayon_con.get_headers(content_type)
|
||||||
|
headers["x-file-name"] = filename
|
||||||
|
max_retries = ayon_con.get_default_max_retries()
|
||||||
|
# Retries are already implemented in 'ayon_api.upload_file'
|
||||||
|
# - added in ayon api 1.2.7
|
||||||
|
if hasattr(TransferProgress, "get_attempt"):
|
||||||
|
max_retries = 1
|
||||||
|
|
||||||
|
size = os.path.getsize(repre_path)
|
||||||
|
self.log.info(
|
||||||
|
f"Uploading '{repre_path}' (size: {format_file_size(size)})"
|
||||||
|
)
|
||||||
|
|
||||||
|
# How long to sleep before next attempt
|
||||||
|
wait_time = 1
|
||||||
|
last_error = None
|
||||||
|
for attempt in range(max_retries):
|
||||||
|
attempt += 1
|
||||||
|
start = time.time()
|
||||||
|
try:
|
||||||
|
output = ayon_con.upload_file(
|
||||||
|
endpoint,
|
||||||
|
repre_path,
|
||||||
|
headers=headers,
|
||||||
|
request_type=RequestTypes.post,
|
||||||
|
)
|
||||||
|
self.log.debug(f"Uploaded in {time.time() - start}s.")
|
||||||
|
return output
|
||||||
|
|
||||||
|
except (
|
||||||
|
requests.exceptions.Timeout,
|
||||||
|
requests.exceptions.ConnectionError
|
||||||
|
) as exc:
|
||||||
|
# Log and retry with backoff if attempts remain
|
||||||
|
if attempt >= max_retries:
|
||||||
|
last_error = exc
|
||||||
|
break
|
||||||
|
|
||||||
|
self.log.warning(
|
||||||
|
f"Review upload failed ({attempt}/{max_retries})"
|
||||||
|
f" after {time.time() - start}s."
|
||||||
|
f" Retrying in {wait_time}s...",
|
||||||
|
exc_info=True,
|
||||||
|
)
|
||||||
|
time.sleep(wait_time)
|
||||||
|
|
||||||
|
# Exhausted retries - raise a user-friendly validation error with help
|
||||||
|
raise PublishXmlValidationError(
|
||||||
|
self,
|
||||||
|
(
|
||||||
|
"Upload of reviewable timed out or failed after multiple"
|
||||||
|
" attempts. Please try publishing again."
|
||||||
|
),
|
||||||
|
formatting_data={
|
||||||
|
"upload_type": "Review",
|
||||||
|
"file": repre_path,
|
||||||
|
"error": str(last_error),
|
||||||
|
},
|
||||||
|
help_filename="upload_file.xml",
|
||||||
|
)
|
||||||
|
|
|
||||||
|
|
@ -24,11 +24,16 @@
|
||||||
|
|
||||||
import os
|
import os
|
||||||
import collections
|
import collections
|
||||||
|
import time
|
||||||
|
|
||||||
import pyblish.api
|
|
||||||
import ayon_api
|
import ayon_api
|
||||||
from ayon_api import RequestTypes
|
from ayon_api import RequestTypes, TransferProgress
|
||||||
from ayon_api.operations import OperationsSession
|
from ayon_api.operations import OperationsSession
|
||||||
|
import pyblish.api
|
||||||
|
import requests
|
||||||
|
|
||||||
|
from ayon_core.lib import get_media_mime_type, format_file_size
|
||||||
|
from ayon_core.pipeline.publish import PublishXmlValidationError
|
||||||
|
|
||||||
|
|
||||||
InstanceFilterResult = collections.namedtuple(
|
InstanceFilterResult = collections.namedtuple(
|
||||||
|
|
@ -164,25 +169,17 @@ class IntegrateThumbnailsAYON(pyblish.api.ContextPlugin):
|
||||||
return os.path.normpath(filled_path)
|
return os.path.normpath(filled_path)
|
||||||
|
|
||||||
def _create_thumbnail(self, project_name: str, src_filepath: str) -> str:
|
def _create_thumbnail(self, project_name: str, src_filepath: str) -> str:
|
||||||
"""Upload thumbnail to AYON and return its id.
|
"""Upload thumbnail to AYON and return its id."""
|
||||||
|
mime_type = get_media_mime_type(src_filepath)
|
||||||
This is temporary fix of 'create_thumbnail' function in ayon_api to
|
|
||||||
fix jpeg mime type.
|
|
||||||
|
|
||||||
"""
|
|
||||||
mime_type = None
|
|
||||||
with open(src_filepath, "rb") as stream:
|
|
||||||
if b"\xff\xd8\xff" == stream.read(3):
|
|
||||||
mime_type = "image/jpeg"
|
|
||||||
|
|
||||||
if mime_type is None:
|
if mime_type is None:
|
||||||
return ayon_api.create_thumbnail(project_name, src_filepath)
|
return ayon_api.create_thumbnail(
|
||||||
|
project_name, src_filepath
|
||||||
|
)
|
||||||
|
|
||||||
response = ayon_api.upload_file(
|
response = self._upload_with_retries(
|
||||||
f"projects/{project_name}/thumbnails",
|
f"projects/{project_name}/thumbnails",
|
||||||
src_filepath,
|
src_filepath,
|
||||||
request_type=RequestTypes.post,
|
mime_type,
|
||||||
headers={"Content-Type": mime_type},
|
|
||||||
)
|
)
|
||||||
response.raise_for_status()
|
response.raise_for_status()
|
||||||
return response.json()["id"]
|
return response.json()["id"]
|
||||||
|
|
@ -248,3 +245,71 @@ class IntegrateThumbnailsAYON(pyblish.api.ContextPlugin):
|
||||||
or instance.data.get("name")
|
or instance.data.get("name")
|
||||||
or "N/A"
|
or "N/A"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
def _upload_with_retries(
|
||||||
|
self,
|
||||||
|
endpoint: str,
|
||||||
|
repre_path: str,
|
||||||
|
content_type: str,
|
||||||
|
):
|
||||||
|
"""Upload file with simple retries."""
|
||||||
|
ayon_con = ayon_api.get_server_api_connection()
|
||||||
|
headers = ayon_con.get_headers(content_type)
|
||||||
|
max_retries = ayon_con.get_default_max_retries()
|
||||||
|
# Retries are already implemented in 'ayon_api.upload_file'
|
||||||
|
# - added in ayon api 1.2.7
|
||||||
|
if hasattr(TransferProgress, "get_attempt"):
|
||||||
|
max_retries = 1
|
||||||
|
|
||||||
|
size = os.path.getsize(repre_path)
|
||||||
|
self.log.info(
|
||||||
|
f"Uploading '{repre_path}' (size: {format_file_size(size)})"
|
||||||
|
)
|
||||||
|
|
||||||
|
# How long to sleep before next attempt
|
||||||
|
wait_time = 1
|
||||||
|
last_error = None
|
||||||
|
for attempt in range(max_retries):
|
||||||
|
attempt += 1
|
||||||
|
start = time.time()
|
||||||
|
try:
|
||||||
|
output = ayon_con.upload_file(
|
||||||
|
endpoint,
|
||||||
|
repre_path,
|
||||||
|
headers=headers,
|
||||||
|
request_type=RequestTypes.post,
|
||||||
|
)
|
||||||
|
self.log.debug(f"Uploaded in {time.time() - start}s.")
|
||||||
|
return output
|
||||||
|
|
||||||
|
except (
|
||||||
|
requests.exceptions.Timeout,
|
||||||
|
requests.exceptions.ConnectionError
|
||||||
|
) as exc:
|
||||||
|
# Log and retry with backoff if attempts remain
|
||||||
|
if attempt >= max_retries:
|
||||||
|
last_error = exc
|
||||||
|
break
|
||||||
|
|
||||||
|
self.log.warning(
|
||||||
|
f"Review upload failed ({attempt}/{max_retries})"
|
||||||
|
f" after {time.time() - start}s."
|
||||||
|
f" Retrying in {wait_time}s...",
|
||||||
|
exc_info=True,
|
||||||
|
)
|
||||||
|
time.sleep(wait_time)
|
||||||
|
|
||||||
|
# Exhausted retries - raise a user-friendly validation error with help
|
||||||
|
raise PublishXmlValidationError(
|
||||||
|
self,
|
||||||
|
(
|
||||||
|
"Upload of thumbnail timed out or failed after multiple"
|
||||||
|
" attempts. Please try publishing again."
|
||||||
|
),
|
||||||
|
formatting_data={
|
||||||
|
"upload_type": "Thumbnail",
|
||||||
|
"file": repre_path,
|
||||||
|
"error": str(last_error),
|
||||||
|
},
|
||||||
|
help_filename="upload_file.xml",
|
||||||
|
)
|
||||||
|
|
|
||||||
|
|
@ -969,12 +969,6 @@ SearchItemDisplayWidget #ValueWidget {
|
||||||
background: {color:bg-buttons};
|
background: {color:bg-buttons};
|
||||||
}
|
}
|
||||||
|
|
||||||
/* Subset Manager */
|
|
||||||
#SubsetManagerDetailsText {}
|
|
||||||
#SubsetManagerDetailsText[state="invalid"] {
|
|
||||||
border: 1px solid #ff0000;
|
|
||||||
}
|
|
||||||
|
|
||||||
/* Creator */
|
/* Creator */
|
||||||
#CreatorsView::item {
|
#CreatorsView::item {
|
||||||
padding: 1px 5px;
|
padding: 1px 5px;
|
||||||
|
|
|
||||||
|
|
@ -56,6 +56,7 @@ class AttributeDefinitionsDialog(QtWidgets.QDialog):
|
||||||
btns_layout.addWidget(cancel_btn, 0)
|
btns_layout.addWidget(cancel_btn, 0)
|
||||||
|
|
||||||
main_layout = QtWidgets.QVBoxLayout(self)
|
main_layout = QtWidgets.QVBoxLayout(self)
|
||||||
|
main_layout.setContentsMargins(10, 10, 10, 10)
|
||||||
main_layout.addWidget(attrs_widget, 0)
|
main_layout.addWidget(attrs_widget, 0)
|
||||||
main_layout.addStretch(1)
|
main_layout.addStretch(1)
|
||||||
main_layout.addWidget(btns_widget, 0)
|
main_layout.addWidget(btns_widget, 0)
|
||||||
|
|
|
||||||
|
|
@ -182,6 +182,7 @@ class AttributeDefinitionsWidget(QtWidgets.QWidget):
|
||||||
layout.deleteLater()
|
layout.deleteLater()
|
||||||
|
|
||||||
new_layout = QtWidgets.QGridLayout()
|
new_layout = QtWidgets.QGridLayout()
|
||||||
|
new_layout.setContentsMargins(0, 0, 0, 0)
|
||||||
new_layout.setColumnStretch(0, 0)
|
new_layout.setColumnStretch(0, 0)
|
||||||
new_layout.setColumnStretch(1, 1)
|
new_layout.setColumnStretch(1, 1)
|
||||||
self.setLayout(new_layout)
|
self.setLayout(new_layout)
|
||||||
|
|
@ -210,12 +211,8 @@ class AttributeDefinitionsWidget(QtWidgets.QWidget):
|
||||||
if not attr_def.visible:
|
if not attr_def.visible:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
|
col_num = 0
|
||||||
expand_cols = 2
|
expand_cols = 2
|
||||||
if attr_def.is_value_def and attr_def.is_label_horizontal:
|
|
||||||
expand_cols = 1
|
|
||||||
|
|
||||||
col_num = 2 - expand_cols
|
|
||||||
|
|
||||||
if attr_def.is_value_def and attr_def.label:
|
if attr_def.is_value_def and attr_def.label:
|
||||||
label_widget = AttributeDefinitionsLabel(
|
label_widget = AttributeDefinitionsLabel(
|
||||||
attr_def.id, attr_def.label, self
|
attr_def.id, attr_def.label, self
|
||||||
|
|
@ -233,9 +230,12 @@ class AttributeDefinitionsWidget(QtWidgets.QWidget):
|
||||||
| QtCore.Qt.AlignVCenter
|
| QtCore.Qt.AlignVCenter
|
||||||
)
|
)
|
||||||
layout.addWidget(
|
layout.addWidget(
|
||||||
label_widget, row, 0, 1, expand_cols
|
label_widget, row, col_num, 1, 1
|
||||||
)
|
)
|
||||||
if not attr_def.is_label_horizontal:
|
if attr_def.is_label_horizontal:
|
||||||
|
col_num += 1
|
||||||
|
expand_cols = 1
|
||||||
|
else:
|
||||||
row += 1
|
row += 1
|
||||||
|
|
||||||
if attr_def.is_value_def:
|
if attr_def.is_value_def:
|
||||||
|
|
|
||||||
|
|
@ -1,11 +1,13 @@
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import json
|
||||||
import contextlib
|
import contextlib
|
||||||
from abc import ABC, abstractmethod
|
from abc import ABC, abstractmethod
|
||||||
from typing import Any, Optional
|
from typing import Any, Optional
|
||||||
from dataclasses import dataclass
|
from dataclasses import dataclass
|
||||||
|
|
||||||
import ayon_api
|
import ayon_api
|
||||||
|
from ayon_api.graphql_queries import projects_graphql_query
|
||||||
|
|
||||||
from ayon_core.style import get_default_entity_icon_color
|
from ayon_core.style import get_default_entity_icon_color
|
||||||
from ayon_core.lib import CacheItem, NestedCacheItem
|
from ayon_core.lib import CacheItem, NestedCacheItem
|
||||||
|
|
@ -275,7 +277,7 @@ class ProductTypeIconMapping:
|
||||||
return self._definitions_by_name
|
return self._definitions_by_name
|
||||||
|
|
||||||
|
|
||||||
def _get_project_items_from_entitiy(
|
def _get_project_items_from_entity(
|
||||||
projects: list[dict[str, Any]]
|
projects: list[dict[str, Any]]
|
||||||
) -> list[ProjectItem]:
|
) -> list[ProjectItem]:
|
||||||
"""
|
"""
|
||||||
|
|
@ -290,6 +292,7 @@ def _get_project_items_from_entitiy(
|
||||||
return [
|
return [
|
||||||
ProjectItem.from_entity(project)
|
ProjectItem.from_entity(project)
|
||||||
for project in projects
|
for project in projects
|
||||||
|
if project["active"]
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -538,8 +541,32 @@ class ProjectsModel(object):
|
||||||
self._projects_cache.update_data(project_items)
|
self._projects_cache.update_data(project_items)
|
||||||
return self._projects_cache.get_data()
|
return self._projects_cache.get_data()
|
||||||
|
|
||||||
|
def _fetch_graphql_projects(self) -> list[dict[str, Any]]:
|
||||||
|
"""Fetch projects using GraphQl.
|
||||||
|
|
||||||
|
This method was added because ayon_api had a bug in 'get_projects'.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
list[dict[str, Any]]: List of projects.
|
||||||
|
|
||||||
|
"""
|
||||||
|
api = ayon_api.get_server_api_connection()
|
||||||
|
query = projects_graphql_query({"name", "active", "library", "data"})
|
||||||
|
|
||||||
|
projects = []
|
||||||
|
for parsed_data in query.continuous_query(api):
|
||||||
|
for project in parsed_data["projects"]:
|
||||||
|
project_data = project["data"]
|
||||||
|
if project_data is None:
|
||||||
|
project["data"] = {}
|
||||||
|
elif isinstance(project_data, str):
|
||||||
|
project["data"] = json.loads(project_data)
|
||||||
|
projects.append(project)
|
||||||
|
return projects
|
||||||
|
|
||||||
def _query_projects(self) -> list[ProjectItem]:
|
def _query_projects(self) -> list[ProjectItem]:
|
||||||
projects = ayon_api.get_projects(fields=["name", "active", "library"])
|
projects = self._fetch_graphql_projects()
|
||||||
|
|
||||||
user = ayon_api.get_user()
|
user = ayon_api.get_user()
|
||||||
pinned_projects = (
|
pinned_projects = (
|
||||||
user
|
user
|
||||||
|
|
@ -548,7 +575,7 @@ class ProjectsModel(object):
|
||||||
.get("pinnedProjects")
|
.get("pinnedProjects")
|
||||||
) or []
|
) or []
|
||||||
pinned_projects = set(pinned_projects)
|
pinned_projects = set(pinned_projects)
|
||||||
project_items = _get_project_items_from_entitiy(list(projects))
|
project_items = _get_project_items_from_entity(list(projects))
|
||||||
for project in project_items:
|
for project in project_items:
|
||||||
project.is_pinned = project.name in pinned_projects
|
project.is_pinned = project.name in pinned_projects
|
||||||
return project_items
|
return project_items
|
||||||
|
|
|
||||||
|
|
@ -1,10 +1,13 @@
|
||||||
import json
|
import json
|
||||||
import collections
|
import collections
|
||||||
|
from typing import Optional
|
||||||
|
|
||||||
import ayon_api
|
import ayon_api
|
||||||
from ayon_api.graphql import FIELD_VALUE, GraphQlQuery, fields_to_dict
|
from ayon_api.graphql import FIELD_VALUE, GraphQlQuery, fields_to_dict
|
||||||
|
|
||||||
from ayon_core.lib import NestedCacheItem
|
from ayon_core.lib import NestedCacheItem, get_ayon_username
|
||||||
|
|
||||||
|
NOT_SET = object()
|
||||||
|
|
||||||
|
|
||||||
# --- Implementation that should be in ayon-python-api ---
|
# --- Implementation that should be in ayon-python-api ---
|
||||||
|
|
@ -105,9 +108,18 @@ class UserItem:
|
||||||
|
|
||||||
class UsersModel:
|
class UsersModel:
|
||||||
def __init__(self, controller):
|
def __init__(self, controller):
|
||||||
|
self._current_username = NOT_SET
|
||||||
self._controller = controller
|
self._controller = controller
|
||||||
self._users_cache = NestedCacheItem(default_factory=list)
|
self._users_cache = NestedCacheItem(default_factory=list)
|
||||||
|
|
||||||
|
def get_current_username(self) -> Optional[str]:
|
||||||
|
if self._current_username is NOT_SET:
|
||||||
|
self._current_username = get_ayon_username()
|
||||||
|
return self._current_username
|
||||||
|
|
||||||
|
def reset(self) -> None:
|
||||||
|
self._users_cache.reset()
|
||||||
|
|
||||||
def get_user_items(self, project_name):
|
def get_user_items(self, project_name):
|
||||||
"""Get user items.
|
"""Get user items.
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -1,6 +1,8 @@
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
from abc import ABC, abstractmethod
|
from abc import ABC, abstractmethod
|
||||||
from dataclasses import dataclass, field
|
from dataclasses import dataclass, field
|
||||||
from typing import List, Dict, Optional
|
from typing import Optional
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
@dataclass
|
||||||
|
|
@ -13,8 +15,8 @@ class TabItem:
|
||||||
class InterpreterConfig:
|
class InterpreterConfig:
|
||||||
width: Optional[int]
|
width: Optional[int]
|
||||||
height: Optional[int]
|
height: Optional[int]
|
||||||
splitter_sizes: List[int] = field(default_factory=list)
|
splitter_sizes: list[int] = field(default_factory=list)
|
||||||
tabs: List[TabItem] = field(default_factory=list)
|
tabs: list[TabItem] = field(default_factory=list)
|
||||||
|
|
||||||
|
|
||||||
class AbstractInterpreterController(ABC):
|
class AbstractInterpreterController(ABC):
|
||||||
|
|
@ -27,7 +29,7 @@ class AbstractInterpreterController(ABC):
|
||||||
self,
|
self,
|
||||||
width: int,
|
width: int,
|
||||||
height: int,
|
height: int,
|
||||||
splitter_sizes: List[int],
|
splitter_sizes: list[int],
|
||||||
tabs: List[Dict[str, str]],
|
tabs: list[dict[str, str]],
|
||||||
):
|
) -> None:
|
||||||
pass
|
pass
|
||||||
|
|
|
||||||
|
|
@ -1,4 +1,5 @@
|
||||||
from typing import List, Dict
|
from __future__ import annotations
|
||||||
|
from typing import Optional
|
||||||
|
|
||||||
from ayon_core.lib import JSONSettingRegistry
|
from ayon_core.lib import JSONSettingRegistry
|
||||||
from ayon_core.lib.local_settings import get_launcher_local_dir
|
from ayon_core.lib.local_settings import get_launcher_local_dir
|
||||||
|
|
@ -11,13 +12,15 @@ from .abstract import (
|
||||||
|
|
||||||
|
|
||||||
class InterpreterController(AbstractInterpreterController):
|
class InterpreterController(AbstractInterpreterController):
|
||||||
def __init__(self):
|
def __init__(self, name: Optional[str] = None) -> None:
|
||||||
|
if name is None:
|
||||||
|
name = "python_interpreter_tool"
|
||||||
self._registry = JSONSettingRegistry(
|
self._registry = JSONSettingRegistry(
|
||||||
"python_interpreter_tool",
|
name,
|
||||||
get_launcher_local_dir(),
|
get_launcher_local_dir(),
|
||||||
)
|
)
|
||||||
|
|
||||||
def get_config(self):
|
def get_config(self) -> InterpreterConfig:
|
||||||
width = None
|
width = None
|
||||||
height = None
|
height = None
|
||||||
splitter_sizes = []
|
splitter_sizes = []
|
||||||
|
|
@ -54,9 +57,9 @@ class InterpreterController(AbstractInterpreterController):
|
||||||
self,
|
self,
|
||||||
width: int,
|
width: int,
|
||||||
height: int,
|
height: int,
|
||||||
splitter_sizes: List[int],
|
splitter_sizes: list[int],
|
||||||
tabs: List[Dict[str, str]],
|
tabs: list[dict[str, str]],
|
||||||
):
|
) -> None:
|
||||||
self._registry.set_item("width", width)
|
self._registry.set_item("width", width)
|
||||||
self._registry.set_item("height", height)
|
self._registry.set_item("height", height)
|
||||||
self._registry.set_item("splitter_sizes", splitter_sizes)
|
self._registry.set_item("splitter_sizes", splitter_sizes)
|
||||||
|
|
|
||||||
|
|
@ -1,42 +1,42 @@
|
||||||
import os
|
|
||||||
import sys
|
import sys
|
||||||
import collections
|
import collections
|
||||||
|
|
||||||
|
|
||||||
|
class _CustomSTD:
|
||||||
|
def __init__(self, orig_std, write_callback):
|
||||||
|
self.orig_std = orig_std
|
||||||
|
self._valid_orig = bool(orig_std)
|
||||||
|
self._write_callback = write_callback
|
||||||
|
|
||||||
|
def __getattr__(self, attr):
|
||||||
|
return getattr(self.orig_std, attr)
|
||||||
|
|
||||||
|
def __setattr__(self, key, value):
|
||||||
|
if key in ("orig_std", "_valid_orig", "_write_callback"):
|
||||||
|
super().__setattr__(key, value)
|
||||||
|
else:
|
||||||
|
setattr(self.orig_std, key, value)
|
||||||
|
|
||||||
|
def write(self, text):
|
||||||
|
if self._valid_orig:
|
||||||
|
self.orig_std.write(text)
|
||||||
|
self._write_callback(text)
|
||||||
|
|
||||||
|
|
||||||
class StdOEWrap:
|
class StdOEWrap:
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
self._origin_stdout_write = None
|
|
||||||
self._origin_stderr_write = None
|
|
||||||
self._listening = False
|
|
||||||
self.lines = collections.deque()
|
self.lines = collections.deque()
|
||||||
|
|
||||||
if not sys.stdout:
|
|
||||||
sys.stdout = open(os.devnull, "w")
|
|
||||||
|
|
||||||
if not sys.stderr:
|
|
||||||
sys.stderr = open(os.devnull, "w")
|
|
||||||
|
|
||||||
if self._origin_stdout_write is None:
|
|
||||||
self._origin_stdout_write = sys.stdout.write
|
|
||||||
|
|
||||||
if self._origin_stderr_write is None:
|
|
||||||
self._origin_stderr_write = sys.stderr.write
|
|
||||||
|
|
||||||
self._listening = True
|
self._listening = True
|
||||||
sys.stdout.write = self._stdout_listener
|
|
||||||
sys.stderr.write = self._stderr_listener
|
self._stdout_wrap = _CustomSTD(sys.stdout, self._listener)
|
||||||
|
self._stderr_wrap = _CustomSTD(sys.stderr, self._listener)
|
||||||
|
|
||||||
|
sys.stdout = self._stdout_wrap
|
||||||
|
sys.stderr = self._stderr_wrap
|
||||||
|
|
||||||
def stop_listen(self):
|
def stop_listen(self):
|
||||||
self._listening = False
|
self._listening = False
|
||||||
|
|
||||||
def _stdout_listener(self, text):
|
def _listener(self, text):
|
||||||
if self._listening:
|
if self._listening:
|
||||||
self.lines.append(text)
|
self.lines.append(text)
|
||||||
if self._origin_stdout_write is not None:
|
|
||||||
self._origin_stdout_write(text)
|
|
||||||
|
|
||||||
def _stderr_listener(self, text):
|
|
||||||
if self._listening:
|
|
||||||
self.lines.append(text)
|
|
||||||
if self._origin_stderr_write is not None:
|
|
||||||
self._origin_stderr_write(text)
|
|
||||||
|
|
|
||||||
|
|
@ -1,10 +1,14 @@
|
||||||
from typing import Optional
|
from typing import Optional
|
||||||
|
|
||||||
from ayon_core.lib import Logger, get_ayon_username
|
from ayon_core.lib import Logger
|
||||||
from ayon_core.lib.events import QueuedEventSystem
|
from ayon_core.lib.events import QueuedEventSystem
|
||||||
from ayon_core.addon import AddonsManager
|
from ayon_core.addon import AddonsManager
|
||||||
from ayon_core.settings import get_project_settings, get_studio_settings
|
from ayon_core.settings import get_project_settings, get_studio_settings
|
||||||
from ayon_core.tools.common_models import ProjectsModel, HierarchyModel
|
from ayon_core.tools.common_models import (
|
||||||
|
ProjectsModel,
|
||||||
|
HierarchyModel,
|
||||||
|
UsersModel,
|
||||||
|
)
|
||||||
|
|
||||||
from .abstract import (
|
from .abstract import (
|
||||||
AbstractLauncherFrontEnd,
|
AbstractLauncherFrontEnd,
|
||||||
|
|
@ -30,13 +34,12 @@ class BaseLauncherController(
|
||||||
|
|
||||||
self._addons_manager = None
|
self._addons_manager = None
|
||||||
|
|
||||||
self._username = NOT_SET
|
|
||||||
|
|
||||||
self._selection_model = LauncherSelectionModel(self)
|
self._selection_model = LauncherSelectionModel(self)
|
||||||
self._projects_model = ProjectsModel(self)
|
self._projects_model = ProjectsModel(self)
|
||||||
self._hierarchy_model = HierarchyModel(self)
|
self._hierarchy_model = HierarchyModel(self)
|
||||||
self._actions_model = ActionsModel(self)
|
self._actions_model = ActionsModel(self)
|
||||||
self._workfiles_model = WorkfilesModel(self)
|
self._workfiles_model = WorkfilesModel(self)
|
||||||
|
self._users_model = UsersModel(self)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def log(self):
|
def log(self):
|
||||||
|
|
@ -209,6 +212,7 @@ class BaseLauncherController(
|
||||||
|
|
||||||
self._projects_model.reset()
|
self._projects_model.reset()
|
||||||
self._hierarchy_model.reset()
|
self._hierarchy_model.reset()
|
||||||
|
self._users_model.reset()
|
||||||
|
|
||||||
self._actions_model.refresh()
|
self._actions_model.refresh()
|
||||||
self._projects_model.refresh()
|
self._projects_model.refresh()
|
||||||
|
|
@ -229,8 +233,10 @@ class BaseLauncherController(
|
||||||
|
|
||||||
self._emit_event("controller.refresh.actions.finished")
|
self._emit_event("controller.refresh.actions.finished")
|
||||||
|
|
||||||
def get_my_tasks_entity_ids(self, project_name: str):
|
def get_my_tasks_entity_ids(
|
||||||
username = self._get_my_username()
|
self, project_name: str
|
||||||
|
) -> dict[str, list[str]]:
|
||||||
|
username = self._users_model.get_current_username()
|
||||||
assignees = []
|
assignees = []
|
||||||
if username:
|
if username:
|
||||||
assignees.append(username)
|
assignees.append(username)
|
||||||
|
|
@ -238,10 +244,5 @@ class BaseLauncherController(
|
||||||
project_name, assignees
|
project_name, assignees
|
||||||
)
|
)
|
||||||
|
|
||||||
def _get_my_username(self):
|
|
||||||
if self._username is NOT_SET:
|
|
||||||
self._username = get_ayon_username()
|
|
||||||
return self._username
|
|
||||||
|
|
||||||
def _emit_event(self, topic, data=None):
|
def _emit_event(self, topic, data=None):
|
||||||
self.emit_event(topic, data, "controller")
|
self.emit_event(topic, data, "controller")
|
||||||
|
|
|
||||||
|
|
@ -1,22 +1,12 @@
|
||||||
import time
|
import time
|
||||||
import uuid
|
|
||||||
import collections
|
import collections
|
||||||
|
|
||||||
from qtpy import QtWidgets, QtCore, QtGui
|
from qtpy import QtWidgets, QtCore, QtGui
|
||||||
|
|
||||||
from ayon_core.lib import Logger
|
from ayon_core.lib import Logger
|
||||||
from ayon_core.lib.attribute_definitions import (
|
from ayon_core.pipeline.actions import webaction_fields_to_attribute_defs
|
||||||
UILabelDef,
|
|
||||||
EnumDef,
|
|
||||||
TextDef,
|
|
||||||
BoolDef,
|
|
||||||
NumberDef,
|
|
||||||
HiddenDef,
|
|
||||||
)
|
|
||||||
from ayon_core.tools.flickcharm import FlickCharm
|
from ayon_core.tools.flickcharm import FlickCharm
|
||||||
from ayon_core.tools.utils import (
|
from ayon_core.tools.utils import get_qt_icon
|
||||||
get_qt_icon,
|
|
||||||
)
|
|
||||||
from ayon_core.tools.attribute_defs import AttributeDefinitionsDialog
|
from ayon_core.tools.attribute_defs import AttributeDefinitionsDialog
|
||||||
from ayon_core.tools.launcher.abstract import WebactionContext
|
from ayon_core.tools.launcher.abstract import WebactionContext
|
||||||
|
|
||||||
|
|
@ -1173,74 +1163,7 @@ class ActionsWidget(QtWidgets.QWidget):
|
||||||
float - 'label', 'value', 'placeholder', 'min', 'max'
|
float - 'label', 'value', 'placeholder', 'min', 'max'
|
||||||
|
|
||||||
"""
|
"""
|
||||||
attr_defs = []
|
attr_defs = webaction_fields_to_attribute_defs(config_fields)
|
||||||
for config_field in config_fields:
|
|
||||||
field_type = config_field["type"]
|
|
||||||
attr_def = None
|
|
||||||
if field_type == "label":
|
|
||||||
label = config_field.get("value")
|
|
||||||
if label is None:
|
|
||||||
label = config_field.get("text")
|
|
||||||
attr_def = UILabelDef(
|
|
||||||
label, key=uuid.uuid4().hex
|
|
||||||
)
|
|
||||||
elif field_type == "boolean":
|
|
||||||
value = config_field["value"]
|
|
||||||
if isinstance(value, str):
|
|
||||||
value = value.lower() == "true"
|
|
||||||
|
|
||||||
attr_def = BoolDef(
|
|
||||||
config_field["name"],
|
|
||||||
default=value,
|
|
||||||
label=config_field.get("label"),
|
|
||||||
)
|
|
||||||
elif field_type == "text":
|
|
||||||
attr_def = TextDef(
|
|
||||||
config_field["name"],
|
|
||||||
default=config_field.get("value"),
|
|
||||||
label=config_field.get("label"),
|
|
||||||
placeholder=config_field.get("placeholder"),
|
|
||||||
multiline=config_field.get("multiline", False),
|
|
||||||
regex=config_field.get("regex"),
|
|
||||||
# syntax=config_field["syntax"],
|
|
||||||
)
|
|
||||||
elif field_type in ("integer", "float"):
|
|
||||||
value = config_field.get("value")
|
|
||||||
if isinstance(value, str):
|
|
||||||
if field_type == "integer":
|
|
||||||
value = int(value)
|
|
||||||
else:
|
|
||||||
value = float(value)
|
|
||||||
attr_def = NumberDef(
|
|
||||||
config_field["name"],
|
|
||||||
default=value,
|
|
||||||
label=config_field.get("label"),
|
|
||||||
decimals=0 if field_type == "integer" else 5,
|
|
||||||
# placeholder=config_field.get("placeholder"),
|
|
||||||
minimum=config_field.get("min"),
|
|
||||||
maximum=config_field.get("max"),
|
|
||||||
)
|
|
||||||
elif field_type in ("select", "multiselect"):
|
|
||||||
attr_def = EnumDef(
|
|
||||||
config_field["name"],
|
|
||||||
items=config_field["options"],
|
|
||||||
default=config_field.get("value"),
|
|
||||||
label=config_field.get("label"),
|
|
||||||
multiselection=field_type == "multiselect",
|
|
||||||
)
|
|
||||||
elif field_type == "hidden":
|
|
||||||
attr_def = HiddenDef(
|
|
||||||
config_field["name"],
|
|
||||||
default=config_field.get("value"),
|
|
||||||
)
|
|
||||||
|
|
||||||
if attr_def is None:
|
|
||||||
print(f"Unknown config field type: {field_type}")
|
|
||||||
attr_def = UILabelDef(
|
|
||||||
f"Unknown field type '{field_type}",
|
|
||||||
key=uuid.uuid4().hex
|
|
||||||
)
|
|
||||||
attr_defs.append(attr_def)
|
|
||||||
|
|
||||||
dialog = AttributeDefinitionsDialog(
|
dialog = AttributeDefinitionsDialog(
|
||||||
attr_defs,
|
attr_defs,
|
||||||
|
|
|
||||||
|
|
@ -2,19 +2,47 @@ import qtawesome
|
||||||
from qtpy import QtWidgets, QtCore
|
from qtpy import QtWidgets, QtCore
|
||||||
|
|
||||||
from ayon_core.tools.utils import (
|
from ayon_core.tools.utils import (
|
||||||
PlaceholderLineEdit,
|
|
||||||
SquareButton,
|
SquareButton,
|
||||||
RefreshButton,
|
RefreshButton,
|
||||||
ProjectsCombobox,
|
ProjectsCombobox,
|
||||||
FoldersWidget,
|
FoldersWidget,
|
||||||
TasksWidget,
|
TasksWidget,
|
||||||
NiceCheckbox,
|
|
||||||
)
|
)
|
||||||
from ayon_core.tools.utils.lib import checkstate_int_to_enum
|
from ayon_core.tools.utils.folders_widget import FoldersFiltersWidget
|
||||||
|
|
||||||
from .workfiles_page import WorkfilesPage
|
from .workfiles_page import WorkfilesPage
|
||||||
|
|
||||||
|
|
||||||
|
class LauncherFoldersWidget(FoldersWidget):
|
||||||
|
focused_in = QtCore.Signal()
|
||||||
|
|
||||||
|
def __init__(self, *args, **kwargs) -> None:
|
||||||
|
super().__init__(*args, **kwargs)
|
||||||
|
self._folders_view.installEventFilter(self)
|
||||||
|
|
||||||
|
def eventFilter(self, obj, event):
|
||||||
|
if event.type() == QtCore.QEvent.FocusIn:
|
||||||
|
self.focused_in.emit()
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
class LauncherTasksWidget(TasksWidget):
|
||||||
|
focused_in = QtCore.Signal()
|
||||||
|
|
||||||
|
def __init__(self, *args, **kwargs) -> None:
|
||||||
|
super().__init__(*args, **kwargs)
|
||||||
|
self._tasks_view.installEventFilter(self)
|
||||||
|
|
||||||
|
def deselect(self):
|
||||||
|
sel_model = self._tasks_view.selectionModel()
|
||||||
|
sel_model.clearSelection()
|
||||||
|
|
||||||
|
def eventFilter(self, obj, event):
|
||||||
|
if event.type() == QtCore.QEvent.FocusIn:
|
||||||
|
self.focused_in.emit()
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
class HierarchyPage(QtWidgets.QWidget):
|
class HierarchyPage(QtWidgets.QWidget):
|
||||||
def __init__(self, controller, parent):
|
def __init__(self, controller, parent):
|
||||||
super().__init__(parent)
|
super().__init__(parent)
|
||||||
|
|
@ -46,34 +74,15 @@ class HierarchyPage(QtWidgets.QWidget):
|
||||||
content_body.setOrientation(QtCore.Qt.Horizontal)
|
content_body.setOrientation(QtCore.Qt.Horizontal)
|
||||||
|
|
||||||
# - filters
|
# - filters
|
||||||
filters_widget = QtWidgets.QWidget(self)
|
filters_widget = FoldersFiltersWidget(self)
|
||||||
|
|
||||||
folders_filter_text = PlaceholderLineEdit(filters_widget)
|
|
||||||
folders_filter_text.setPlaceholderText("Filter folders...")
|
|
||||||
|
|
||||||
my_tasks_tooltip = (
|
|
||||||
"Filter folders and task to only those you are assigned to."
|
|
||||||
)
|
|
||||||
my_tasks_label = QtWidgets.QLabel("My tasks", filters_widget)
|
|
||||||
my_tasks_label.setToolTip(my_tasks_tooltip)
|
|
||||||
|
|
||||||
my_tasks_checkbox = NiceCheckbox(filters_widget)
|
|
||||||
my_tasks_checkbox.setChecked(False)
|
|
||||||
my_tasks_checkbox.setToolTip(my_tasks_tooltip)
|
|
||||||
|
|
||||||
filters_layout = QtWidgets.QHBoxLayout(filters_widget)
|
|
||||||
filters_layout.setContentsMargins(0, 0, 0, 0)
|
|
||||||
filters_layout.addWidget(folders_filter_text, 1)
|
|
||||||
filters_layout.addWidget(my_tasks_label, 0)
|
|
||||||
filters_layout.addWidget(my_tasks_checkbox, 0)
|
|
||||||
|
|
||||||
# - Folders widget
|
# - Folders widget
|
||||||
folders_widget = FoldersWidget(controller, content_body)
|
folders_widget = LauncherFoldersWidget(controller, content_body)
|
||||||
folders_widget.set_header_visible(True)
|
folders_widget.set_header_visible(True)
|
||||||
folders_widget.set_deselectable(True)
|
folders_widget.set_deselectable(True)
|
||||||
|
|
||||||
# - Tasks widget
|
# - Tasks widget
|
||||||
tasks_widget = TasksWidget(controller, content_body)
|
tasks_widget = LauncherTasksWidget(controller, content_body)
|
||||||
|
|
||||||
# - Third page - Workfiles
|
# - Third page - Workfiles
|
||||||
workfiles_page = WorkfilesPage(controller, content_body)
|
workfiles_page = WorkfilesPage(controller, content_body)
|
||||||
|
|
@ -93,17 +102,19 @@ class HierarchyPage(QtWidgets.QWidget):
|
||||||
|
|
||||||
btn_back.clicked.connect(self._on_back_clicked)
|
btn_back.clicked.connect(self._on_back_clicked)
|
||||||
refresh_btn.clicked.connect(self._on_refresh_clicked)
|
refresh_btn.clicked.connect(self._on_refresh_clicked)
|
||||||
folders_filter_text.textChanged.connect(self._on_filter_text_changed)
|
filters_widget.text_changed.connect(self._on_filter_text_changed)
|
||||||
my_tasks_checkbox.stateChanged.connect(
|
filters_widget.my_tasks_changed.connect(
|
||||||
self._on_my_tasks_checkbox_state_changed
|
self._on_my_tasks_checkbox_state_changed
|
||||||
)
|
)
|
||||||
|
folders_widget.focused_in.connect(self._on_folders_focus)
|
||||||
|
tasks_widget.focused_in.connect(self._on_tasks_focus)
|
||||||
|
|
||||||
self._is_visible = False
|
self._is_visible = False
|
||||||
self._controller = controller
|
self._controller = controller
|
||||||
|
|
||||||
|
self._filters_widget = filters_widget
|
||||||
self._btn_back = btn_back
|
self._btn_back = btn_back
|
||||||
self._projects_combobox = projects_combobox
|
self._projects_combobox = projects_combobox
|
||||||
self._my_tasks_checkbox = my_tasks_checkbox
|
|
||||||
self._folders_widget = folders_widget
|
self._folders_widget = folders_widget
|
||||||
self._tasks_widget = tasks_widget
|
self._tasks_widget = tasks_widget
|
||||||
self._workfiles_page = workfiles_page
|
self._workfiles_page = workfiles_page
|
||||||
|
|
@ -126,8 +137,9 @@ class HierarchyPage(QtWidgets.QWidget):
|
||||||
self._folders_widget.refresh()
|
self._folders_widget.refresh()
|
||||||
self._tasks_widget.refresh()
|
self._tasks_widget.refresh()
|
||||||
self._workfiles_page.refresh()
|
self._workfiles_page.refresh()
|
||||||
|
# Update my tasks
|
||||||
self._on_my_tasks_checkbox_state_changed(
|
self._on_my_tasks_checkbox_state_changed(
|
||||||
self._my_tasks_checkbox.checkState()
|
self._filters_widget.is_my_tasks_checked()
|
||||||
)
|
)
|
||||||
|
|
||||||
def _on_back_clicked(self):
|
def _on_back_clicked(self):
|
||||||
|
|
@ -139,15 +151,21 @@ class HierarchyPage(QtWidgets.QWidget):
|
||||||
def _on_filter_text_changed(self, text):
|
def _on_filter_text_changed(self, text):
|
||||||
self._folders_widget.set_name_filter(text)
|
self._folders_widget.set_name_filter(text)
|
||||||
|
|
||||||
def _on_my_tasks_checkbox_state_changed(self, state):
|
def _on_my_tasks_checkbox_state_changed(self, enabled: bool) -> None:
|
||||||
folder_ids = None
|
folder_ids = None
|
||||||
task_ids = None
|
task_ids = None
|
||||||
state = checkstate_int_to_enum(state)
|
if enabled:
|
||||||
if state == QtCore.Qt.Checked:
|
|
||||||
entity_ids = self._controller.get_my_tasks_entity_ids(
|
entity_ids = self._controller.get_my_tasks_entity_ids(
|
||||||
self._project_name
|
self._project_name
|
||||||
)
|
)
|
||||||
folder_ids = entity_ids["folder_ids"]
|
folder_ids = entity_ids["folder_ids"]
|
||||||
task_ids = entity_ids["task_ids"]
|
task_ids = entity_ids["task_ids"]
|
||||||
|
|
||||||
self._folders_widget.set_folder_ids_filter(folder_ids)
|
self._folders_widget.set_folder_ids_filter(folder_ids)
|
||||||
self._tasks_widget.set_task_ids_filter(task_ids)
|
self._tasks_widget.set_task_ids_filter(task_ids)
|
||||||
|
|
||||||
|
def _on_folders_focus(self):
|
||||||
|
self._workfiles_page.deselect()
|
||||||
|
|
||||||
|
def _on_tasks_focus(self):
|
||||||
|
self._workfiles_page.deselect()
|
||||||
|
|
|
||||||
|
|
@ -3,7 +3,7 @@ from typing import Optional
|
||||||
import ayon_api
|
import ayon_api
|
||||||
from qtpy import QtCore, QtWidgets, QtGui
|
from qtpy import QtCore, QtWidgets, QtGui
|
||||||
|
|
||||||
from ayon_core.tools.utils import get_qt_icon
|
from ayon_core.tools.utils import get_qt_icon, DeselectableTreeView
|
||||||
from ayon_core.tools.launcher.abstract import AbstractLauncherFrontEnd
|
from ayon_core.tools.launcher.abstract import AbstractLauncherFrontEnd
|
||||||
|
|
||||||
VERSION_ROLE = QtCore.Qt.UserRole + 1
|
VERSION_ROLE = QtCore.Qt.UserRole + 1
|
||||||
|
|
@ -127,7 +127,7 @@ class WorkfilesModel(QtGui.QStandardItemModel):
|
||||||
return icon
|
return icon
|
||||||
|
|
||||||
|
|
||||||
class WorkfilesView(QtWidgets.QTreeView):
|
class WorkfilesView(DeselectableTreeView):
|
||||||
def drawBranches(self, painter, rect, index):
|
def drawBranches(self, painter, rect, index):
|
||||||
return
|
return
|
||||||
|
|
||||||
|
|
@ -165,6 +165,10 @@ class WorkfilesPage(QtWidgets.QWidget):
|
||||||
def refresh(self) -> None:
|
def refresh(self) -> None:
|
||||||
self._workfiles_model.refresh()
|
self._workfiles_model.refresh()
|
||||||
|
|
||||||
|
def deselect(self):
|
||||||
|
sel_model = self._workfiles_view.selectionModel()
|
||||||
|
sel_model.clearSelection()
|
||||||
|
|
||||||
def _on_refresh(self) -> None:
|
def _on_refresh(self) -> None:
|
||||||
self._workfiles_proxy.sort(0, QtCore.Qt.DescendingOrder)
|
self._workfiles_proxy.sort(0, QtCore.Qt.DescendingOrder)
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -316,43 +316,34 @@ class ActionItem:
|
||||||
Args:
|
Args:
|
||||||
identifier (str): Action identifier.
|
identifier (str): Action identifier.
|
||||||
label (str): Action label.
|
label (str): Action label.
|
||||||
icon (dict[str, Any]): Action icon definition.
|
group_label (Optional[str]): Group label.
|
||||||
tooltip (str): Action tooltip.
|
icon (Optional[dict[str, Any]]): Action icon definition.
|
||||||
|
tooltip (Optional[str]): Action tooltip.
|
||||||
|
order (int): Action order.
|
||||||
|
data (Optional[dict[str, Any]]): Additional action data.
|
||||||
options (Union[list[AbstractAttrDef], list[qargparse.QArgument]]):
|
options (Union[list[AbstractAttrDef], list[qargparse.QArgument]]):
|
||||||
Action options. Note: 'qargparse' is considered as deprecated.
|
Action options. Note: 'qargparse' is considered as deprecated.
|
||||||
order (int): Action order.
|
|
||||||
project_name (str): Project name.
|
|
||||||
folder_ids (list[str]): Folder ids.
|
|
||||||
product_ids (list[str]): Product ids.
|
|
||||||
version_ids (list[str]): Version ids.
|
|
||||||
representation_ids (list[str]): Representation ids.
|
|
||||||
"""
|
|
||||||
|
|
||||||
|
"""
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
identifier,
|
identifier: str,
|
||||||
label,
|
label: str,
|
||||||
icon,
|
group_label: Optional[str],
|
||||||
tooltip,
|
icon: Optional[dict[str, Any]],
|
||||||
options,
|
tooltip: Optional[str],
|
||||||
order,
|
order: int,
|
||||||
project_name,
|
data: Optional[dict[str, Any]],
|
||||||
folder_ids,
|
options: Optional[list],
|
||||||
product_ids,
|
|
||||||
version_ids,
|
|
||||||
representation_ids,
|
|
||||||
):
|
):
|
||||||
self.identifier = identifier
|
self.identifier = identifier
|
||||||
self.label = label
|
self.label = label
|
||||||
|
self.group_label = group_label
|
||||||
self.icon = icon
|
self.icon = icon
|
||||||
self.tooltip = tooltip
|
self.tooltip = tooltip
|
||||||
self.options = options
|
self.data = data
|
||||||
self.order = order
|
self.order = order
|
||||||
self.project_name = project_name
|
self.options = options
|
||||||
self.folder_ids = folder_ids
|
|
||||||
self.product_ids = product_ids
|
|
||||||
self.version_ids = version_ids
|
|
||||||
self.representation_ids = representation_ids
|
|
||||||
|
|
||||||
def _options_to_data(self):
|
def _options_to_data(self):
|
||||||
options = self.options
|
options = self.options
|
||||||
|
|
@ -364,30 +355,26 @@ class ActionItem:
|
||||||
# future development of detached UI tools it would be better to be
|
# future development of detached UI tools it would be better to be
|
||||||
# prepared for it.
|
# prepared for it.
|
||||||
raise NotImplementedError(
|
raise NotImplementedError(
|
||||||
"{}.to_data is not implemented. Use Attribute definitions"
|
f"{self.__class__.__name__}.to_data is not implemented."
|
||||||
" from 'ayon_core.lib' instead of 'qargparse'.".format(
|
" Use Attribute definitions from 'ayon_core.lib'"
|
||||||
self.__class__.__name__
|
" instead of 'qargparse'."
|
||||||
)
|
|
||||||
)
|
)
|
||||||
|
|
||||||
def to_data(self):
|
def to_data(self) -> dict[str, Any]:
|
||||||
options = self._options_to_data()
|
options = self._options_to_data()
|
||||||
return {
|
return {
|
||||||
"identifier": self.identifier,
|
"identifier": self.identifier,
|
||||||
"label": self.label,
|
"label": self.label,
|
||||||
|
"group_label": self.group_label,
|
||||||
"icon": self.icon,
|
"icon": self.icon,
|
||||||
"tooltip": self.tooltip,
|
"tooltip": self.tooltip,
|
||||||
"options": options,
|
|
||||||
"order": self.order,
|
"order": self.order,
|
||||||
"project_name": self.project_name,
|
"data": self.data,
|
||||||
"folder_ids": self.folder_ids,
|
"options": options,
|
||||||
"product_ids": self.product_ids,
|
|
||||||
"version_ids": self.version_ids,
|
|
||||||
"representation_ids": self.representation_ids,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def from_data(cls, data):
|
def from_data(cls, data) -> "ActionItem":
|
||||||
options = data["options"]
|
options = data["options"]
|
||||||
if options:
|
if options:
|
||||||
options = deserialize_attr_defs(options)
|
options = deserialize_attr_defs(options)
|
||||||
|
|
@ -666,6 +653,21 @@ class FrontendLoaderController(_BaseLoaderController):
|
||||||
"""
|
"""
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
def get_my_tasks_entity_ids(
|
||||||
|
self, project_name: str
|
||||||
|
) -> dict[str, list[str]]:
|
||||||
|
"""Get entity ids for my tasks.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
project_name (str): Project name.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
dict[str, list[str]]: Folder and task ids.
|
||||||
|
|
||||||
|
"""
|
||||||
|
pass
|
||||||
|
|
||||||
@abstractmethod
|
@abstractmethod
|
||||||
def get_available_tags_by_entity_type(
|
def get_available_tags_by_entity_type(
|
||||||
self, project_name: str
|
self, project_name: str
|
||||||
|
|
@ -990,43 +992,35 @@ class FrontendLoaderController(_BaseLoaderController):
|
||||||
|
|
||||||
# Load action items
|
# Load action items
|
||||||
@abstractmethod
|
@abstractmethod
|
||||||
def get_versions_action_items(self, project_name, version_ids):
|
def get_action_items(
|
||||||
|
self,
|
||||||
|
project_name: str,
|
||||||
|
entity_ids: set[str],
|
||||||
|
entity_type: str,
|
||||||
|
) -> list[ActionItem]:
|
||||||
"""Action items for versions selection.
|
"""Action items for versions selection.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
project_name (str): Project name.
|
project_name (str): Project name.
|
||||||
version_ids (Iterable[str]): Version ids.
|
entity_ids (set[str]): Entity ids.
|
||||||
|
entity_type (str): Entity type.
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
list[ActionItem]: List of action items.
|
list[ActionItem]: List of action items.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
pass
|
|
||||||
|
|
||||||
@abstractmethod
|
|
||||||
def get_representations_action_items(
|
|
||||||
self, project_name, representation_ids
|
|
||||||
):
|
|
||||||
"""Action items for representations selection.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
project_name (str): Project name.
|
|
||||||
representation_ids (Iterable[str]): Representation ids.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
list[ActionItem]: List of action items.
|
|
||||||
"""
|
|
||||||
|
|
||||||
pass
|
pass
|
||||||
|
|
||||||
@abstractmethod
|
@abstractmethod
|
||||||
def trigger_action_item(
|
def trigger_action_item(
|
||||||
self,
|
self,
|
||||||
identifier,
|
identifier: str,
|
||||||
options,
|
project_name: str,
|
||||||
project_name,
|
selected_ids: set[str],
|
||||||
version_ids,
|
selected_entity_type: str,
|
||||||
representation_ids
|
data: Optional[dict[str, Any]],
|
||||||
|
options: dict[str, Any],
|
||||||
|
form_values: dict[str, Any],
|
||||||
):
|
):
|
||||||
"""Trigger action item.
|
"""Trigger action item.
|
||||||
|
|
||||||
|
|
@ -1044,13 +1038,15 @@ class FrontendLoaderController(_BaseLoaderController):
|
||||||
}
|
}
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
identifier (str): Action identifier.
|
identifier (sttr): Plugin identifier.
|
||||||
options (dict[str, Any]): Action option values from UI.
|
|
||||||
project_name (str): Project name.
|
project_name (str): Project name.
|
||||||
version_ids (Iterable[str]): Version ids.
|
selected_ids (set[str]): Selected entity ids.
|
||||||
representation_ids (Iterable[str]): Representation ids.
|
selected_entity_type (str): Selected entity type.
|
||||||
"""
|
data (Optional[dict[str, Any]]): Additional action item data.
|
||||||
|
options (dict[str, Any]): Action option values from UI.
|
||||||
|
form_values (dict[str, Any]): Action form values from UI.
|
||||||
|
|
||||||
|
"""
|
||||||
pass
|
pass
|
||||||
|
|
||||||
@abstractmethod
|
@abstractmethod
|
||||||
|
|
|
||||||
|
|
@ -2,13 +2,17 @@ from __future__ import annotations
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
import uuid
|
import uuid
|
||||||
from typing import Optional
|
from typing import Optional, Any
|
||||||
|
|
||||||
import ayon_api
|
import ayon_api
|
||||||
|
|
||||||
from ayon_core.settings import get_project_settings
|
from ayon_core.settings import get_project_settings
|
||||||
from ayon_core.pipeline import get_current_host_name
|
from ayon_core.pipeline import get_current_host_name
|
||||||
from ayon_core.lib import NestedCacheItem, CacheItem, filter_profiles
|
from ayon_core.lib import (
|
||||||
|
NestedCacheItem,
|
||||||
|
CacheItem,
|
||||||
|
filter_profiles,
|
||||||
|
)
|
||||||
from ayon_core.lib.events import QueuedEventSystem
|
from ayon_core.lib.events import QueuedEventSystem
|
||||||
from ayon_core.pipeline import Anatomy, get_current_context
|
from ayon_core.pipeline import Anatomy, get_current_context
|
||||||
from ayon_core.host import ILoadHost
|
from ayon_core.host import ILoadHost
|
||||||
|
|
@ -18,12 +22,14 @@ from ayon_core.tools.common_models import (
|
||||||
ThumbnailsModel,
|
ThumbnailsModel,
|
||||||
TagItem,
|
TagItem,
|
||||||
ProductTypeIconMapping,
|
ProductTypeIconMapping,
|
||||||
|
UsersModel,
|
||||||
)
|
)
|
||||||
|
|
||||||
from .abstract import (
|
from .abstract import (
|
||||||
BackendLoaderController,
|
BackendLoaderController,
|
||||||
FrontendLoaderController,
|
FrontendLoaderController,
|
||||||
ProductTypesFilter
|
ProductTypesFilter,
|
||||||
|
ActionItem,
|
||||||
)
|
)
|
||||||
from .models import (
|
from .models import (
|
||||||
SelectionModel,
|
SelectionModel,
|
||||||
|
|
@ -32,6 +38,8 @@ from .models import (
|
||||||
SiteSyncModel
|
SiteSyncModel
|
||||||
)
|
)
|
||||||
|
|
||||||
|
NOT_SET = object()
|
||||||
|
|
||||||
|
|
||||||
class ExpectedSelection:
|
class ExpectedSelection:
|
||||||
def __init__(self, controller):
|
def __init__(self, controller):
|
||||||
|
|
@ -124,6 +132,7 @@ class LoaderController(BackendLoaderController, FrontendLoaderController):
|
||||||
self._loader_actions_model = LoaderActionsModel(self)
|
self._loader_actions_model = LoaderActionsModel(self)
|
||||||
self._thumbnails_model = ThumbnailsModel()
|
self._thumbnails_model = ThumbnailsModel()
|
||||||
self._sitesync_model = SiteSyncModel(self)
|
self._sitesync_model = SiteSyncModel(self)
|
||||||
|
self._users_model = UsersModel(self)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def log(self):
|
def log(self):
|
||||||
|
|
@ -160,6 +169,7 @@ class LoaderController(BackendLoaderController, FrontendLoaderController):
|
||||||
self._projects_model.reset()
|
self._projects_model.reset()
|
||||||
self._thumbnails_model.reset()
|
self._thumbnails_model.reset()
|
||||||
self._sitesync_model.reset()
|
self._sitesync_model.reset()
|
||||||
|
self._users_model.reset()
|
||||||
|
|
||||||
self._projects_model.refresh()
|
self._projects_model.refresh()
|
||||||
|
|
||||||
|
|
@ -235,6 +245,17 @@ class LoaderController(BackendLoaderController, FrontendLoaderController):
|
||||||
output[folder_id] = label
|
output[folder_id] = label
|
||||||
return output
|
return output
|
||||||
|
|
||||||
|
def get_my_tasks_entity_ids(
|
||||||
|
self, project_name: str
|
||||||
|
) -> dict[str, list[str]]:
|
||||||
|
username = self._users_model.get_current_username()
|
||||||
|
assignees = []
|
||||||
|
if username:
|
||||||
|
assignees.append(username)
|
||||||
|
return self._hierarchy_model.get_entity_ids_for_assignees(
|
||||||
|
project_name, assignees
|
||||||
|
)
|
||||||
|
|
||||||
def get_available_tags_by_entity_type(
|
def get_available_tags_by_entity_type(
|
||||||
self, project_name: str
|
self, project_name: str
|
||||||
) -> dict[str, list[str]]:
|
) -> dict[str, list[str]]:
|
||||||
|
|
@ -296,45 +317,47 @@ class LoaderController(BackendLoaderController, FrontendLoaderController):
|
||||||
project_name, product_ids, group_name
|
project_name, product_ids, group_name
|
||||||
)
|
)
|
||||||
|
|
||||||
def get_versions_action_items(self, project_name, version_ids):
|
def get_action_items(
|
||||||
return self._loader_actions_model.get_versions_action_items(
|
self,
|
||||||
project_name, version_ids)
|
project_name: str,
|
||||||
|
entity_ids: set[str],
|
||||||
def get_representations_action_items(
|
entity_type: str,
|
||||||
self, project_name, representation_ids):
|
) -> list[ActionItem]:
|
||||||
action_items = (
|
action_items = self._loader_actions_model.get_action_items(
|
||||||
self._loader_actions_model.get_representations_action_items(
|
project_name, entity_ids, entity_type
|
||||||
project_name, representation_ids)
|
|
||||||
)
|
)
|
||||||
|
|
||||||
action_items.extend(self._sitesync_model.get_sitesync_action_items(
|
site_sync_items = self._sitesync_model.get_sitesync_action_items(
|
||||||
project_name, representation_ids)
|
project_name, entity_ids, entity_type
|
||||||
)
|
)
|
||||||
|
action_items.extend(site_sync_items)
|
||||||
return action_items
|
return action_items
|
||||||
|
|
||||||
def trigger_action_item(
|
def trigger_action_item(
|
||||||
self,
|
self,
|
||||||
identifier,
|
identifier: str,
|
||||||
options,
|
project_name: str,
|
||||||
project_name,
|
selected_ids: set[str],
|
||||||
version_ids,
|
selected_entity_type: str,
|
||||||
representation_ids
|
data: Optional[dict[str, Any]],
|
||||||
|
options: dict[str, Any],
|
||||||
|
form_values: dict[str, Any],
|
||||||
):
|
):
|
||||||
if self._sitesync_model.is_sitesync_action(identifier):
|
if self._sitesync_model.is_sitesync_action(identifier):
|
||||||
self._sitesync_model.trigger_action_item(
|
self._sitesync_model.trigger_action_item(
|
||||||
identifier,
|
|
||||||
project_name,
|
project_name,
|
||||||
representation_ids
|
data,
|
||||||
)
|
)
|
||||||
return
|
return
|
||||||
|
|
||||||
self._loader_actions_model.trigger_action_item(
|
self._loader_actions_model.trigger_action_item(
|
||||||
identifier,
|
identifier=identifier,
|
||||||
options,
|
project_name=project_name,
|
||||||
project_name,
|
selected_ids=selected_ids,
|
||||||
version_ids,
|
selected_entity_type=selected_entity_type,
|
||||||
representation_ids
|
data=data,
|
||||||
|
options=options,
|
||||||
|
form_values=form_values,
|
||||||
)
|
)
|
||||||
|
|
||||||
# Selection model wrappers
|
# Selection model wrappers
|
||||||
|
|
@ -476,20 +499,6 @@ class LoaderController(BackendLoaderController, FrontendLoaderController):
|
||||||
def is_standard_projects_filter_enabled(self):
|
def is_standard_projects_filter_enabled(self):
|
||||||
return self._host is not None
|
return self._host is not None
|
||||||
|
|
||||||
def _get_project_anatomy(self, project_name):
|
|
||||||
if not project_name:
|
|
||||||
return None
|
|
||||||
cache = self._project_anatomy_cache[project_name]
|
|
||||||
if not cache.is_valid:
|
|
||||||
cache.update_data(Anatomy(project_name))
|
|
||||||
return cache.get_data()
|
|
||||||
|
|
||||||
def _create_event_system(self):
|
|
||||||
return QueuedEventSystem()
|
|
||||||
|
|
||||||
def _emit_event(self, topic, data=None):
|
|
||||||
self._event_system.emit(topic, data or {}, "controller")
|
|
||||||
|
|
||||||
def get_product_types_filter(self):
|
def get_product_types_filter(self):
|
||||||
output = ProductTypesFilter(
|
output = ProductTypesFilter(
|
||||||
is_allow_list=False,
|
is_allow_list=False,
|
||||||
|
|
@ -545,3 +554,17 @@ class LoaderController(BackendLoaderController, FrontendLoaderController):
|
||||||
product_types=profile["filter_product_types"]
|
product_types=profile["filter_product_types"]
|
||||||
)
|
)
|
||||||
return output
|
return output
|
||||||
|
|
||||||
|
def _create_event_system(self):
|
||||||
|
return QueuedEventSystem()
|
||||||
|
|
||||||
|
def _emit_event(self, topic, data=None):
|
||||||
|
self._event_system.emit(topic, data or {}, "controller")
|
||||||
|
|
||||||
|
def _get_project_anatomy(self, project_name):
|
||||||
|
if not project_name:
|
||||||
|
return None
|
||||||
|
cache = self._project_anatomy_cache[project_name]
|
||||||
|
if not cache.is_valid:
|
||||||
|
cache.update_data(Anatomy(project_name))
|
||||||
|
return cache.get_data()
|
||||||
|
|
|
||||||
|
|
@ -5,10 +5,16 @@ import traceback
|
||||||
import inspect
|
import inspect
|
||||||
import collections
|
import collections
|
||||||
import uuid
|
import uuid
|
||||||
|
from typing import Optional, Callable, Any
|
||||||
|
|
||||||
import ayon_api
|
import ayon_api
|
||||||
|
|
||||||
from ayon_core.lib import NestedCacheItem
|
from ayon_core.lib import NestedCacheItem, Logger
|
||||||
|
from ayon_core.pipeline.actions import (
|
||||||
|
LoaderActionsContext,
|
||||||
|
LoaderActionSelection,
|
||||||
|
SelectionEntitiesCache,
|
||||||
|
)
|
||||||
from ayon_core.pipeline.load import (
|
from ayon_core.pipeline.load import (
|
||||||
discover_loader_plugins,
|
discover_loader_plugins,
|
||||||
ProductLoaderPlugin,
|
ProductLoaderPlugin,
|
||||||
|
|
@ -23,6 +29,7 @@ from ayon_core.pipeline.load import (
|
||||||
from ayon_core.tools.loader.abstract import ActionItem
|
from ayon_core.tools.loader.abstract import ActionItem
|
||||||
|
|
||||||
ACTIONS_MODEL_SENDER = "actions.model"
|
ACTIONS_MODEL_SENDER = "actions.model"
|
||||||
|
LOADER_PLUGIN_ID = "__loader_plugin__"
|
||||||
NOT_SET = object()
|
NOT_SET = object()
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -44,6 +51,7 @@ class LoaderActionsModel:
|
||||||
loaders_cache_lifetime = 30
|
loaders_cache_lifetime = 30
|
||||||
|
|
||||||
def __init__(self, controller):
|
def __init__(self, controller):
|
||||||
|
self._log = Logger.get_logger(self.__class__.__name__)
|
||||||
self._controller = controller
|
self._controller = controller
|
||||||
self._current_context_project = NOT_SET
|
self._current_context_project = NOT_SET
|
||||||
self._loaders_by_identifier = NestedCacheItem(
|
self._loaders_by_identifier = NestedCacheItem(
|
||||||
|
|
@ -52,6 +60,15 @@ class LoaderActionsModel:
|
||||||
levels=1, lifetime=self.loaders_cache_lifetime)
|
levels=1, lifetime=self.loaders_cache_lifetime)
|
||||||
self._repre_loaders = NestedCacheItem(
|
self._repre_loaders = NestedCacheItem(
|
||||||
levels=1, lifetime=self.loaders_cache_lifetime)
|
levels=1, lifetime=self.loaders_cache_lifetime)
|
||||||
|
self._loader_actions = LoaderActionsContext()
|
||||||
|
|
||||||
|
self._projects_cache = NestedCacheItem(levels=1, lifetime=60)
|
||||||
|
self._folders_cache = NestedCacheItem(levels=2, lifetime=300)
|
||||||
|
self._tasks_cache = NestedCacheItem(levels=2, lifetime=300)
|
||||||
|
self._products_cache = NestedCacheItem(levels=2, lifetime=300)
|
||||||
|
self._versions_cache = NestedCacheItem(levels=2, lifetime=1200)
|
||||||
|
self._representations_cache = NestedCacheItem(levels=2, lifetime=1200)
|
||||||
|
self._repre_parents_cache = NestedCacheItem(levels=2, lifetime=1200)
|
||||||
|
|
||||||
def reset(self):
|
def reset(self):
|
||||||
"""Reset the model with all cached items."""
|
"""Reset the model with all cached items."""
|
||||||
|
|
@ -60,64 +77,58 @@ class LoaderActionsModel:
|
||||||
self._loaders_by_identifier.reset()
|
self._loaders_by_identifier.reset()
|
||||||
self._product_loaders.reset()
|
self._product_loaders.reset()
|
||||||
self._repre_loaders.reset()
|
self._repre_loaders.reset()
|
||||||
|
self._loader_actions.reset()
|
||||||
|
|
||||||
def get_versions_action_items(self, project_name, version_ids):
|
self._folders_cache.reset()
|
||||||
"""Get action items for given version ids.
|
self._tasks_cache.reset()
|
||||||
|
self._products_cache.reset()
|
||||||
Args:
|
self._versions_cache.reset()
|
||||||
project_name (str): Project name.
|
self._representations_cache.reset()
|
||||||
version_ids (Iterable[str]): Version ids.
|
self._repre_parents_cache.reset()
|
||||||
|
|
||||||
Returns:
|
|
||||||
list[ActionItem]: List of action items.
|
|
||||||
"""
|
|
||||||
|
|
||||||
|
def get_action_items(
|
||||||
|
self,
|
||||||
|
project_name: str,
|
||||||
|
entity_ids: set[str],
|
||||||
|
entity_type: str,
|
||||||
|
) -> list[ActionItem]:
|
||||||
|
version_context_by_id = {}
|
||||||
|
repre_context_by_id = {}
|
||||||
|
if entity_type == "representation":
|
||||||
(
|
(
|
||||||
version_context_by_id,
|
version_context_by_id,
|
||||||
repre_context_by_id
|
repre_context_by_id
|
||||||
) = self._contexts_for_versions(
|
) = self._contexts_for_representations(project_name, entity_ids)
|
||||||
project_name,
|
|
||||||
version_ids
|
if entity_type == "version":
|
||||||
)
|
(
|
||||||
return self._get_action_items_for_contexts(
|
version_context_by_id,
|
||||||
|
repre_context_by_id
|
||||||
|
) = self._contexts_for_versions(project_name, entity_ids)
|
||||||
|
|
||||||
|
action_items = self._get_action_items_for_contexts(
|
||||||
project_name,
|
project_name,
|
||||||
version_context_by_id,
|
version_context_by_id,
|
||||||
repre_context_by_id
|
repre_context_by_id
|
||||||
)
|
)
|
||||||
|
action_items.extend(self._get_loader_action_items(
|
||||||
def get_representations_action_items(
|
|
||||||
self, project_name, representation_ids
|
|
||||||
):
|
|
||||||
"""Get action items for given representation ids.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
project_name (str): Project name.
|
|
||||||
representation_ids (Iterable[str]): Representation ids.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
list[ActionItem]: List of action items.
|
|
||||||
"""
|
|
||||||
|
|
||||||
(
|
|
||||||
product_context_by_id,
|
|
||||||
repre_context_by_id
|
|
||||||
) = self._contexts_for_representations(
|
|
||||||
project_name,
|
project_name,
|
||||||
representation_ids
|
entity_ids,
|
||||||
)
|
entity_type,
|
||||||
return self._get_action_items_for_contexts(
|
version_context_by_id,
|
||||||
project_name,
|
repre_context_by_id,
|
||||||
product_context_by_id,
|
))
|
||||||
repre_context_by_id
|
return action_items
|
||||||
)
|
|
||||||
|
|
||||||
def trigger_action_item(
|
def trigger_action_item(
|
||||||
self,
|
self,
|
||||||
identifier,
|
identifier: str,
|
||||||
options,
|
project_name: str,
|
||||||
project_name,
|
selected_ids: set[str],
|
||||||
version_ids,
|
selected_entity_type: str,
|
||||||
representation_ids
|
data: Optional[dict[str, Any]],
|
||||||
|
options: dict[str, Any],
|
||||||
|
form_values: dict[str, Any],
|
||||||
):
|
):
|
||||||
"""Trigger action by identifier.
|
"""Trigger action by identifier.
|
||||||
|
|
||||||
|
|
@ -128,15 +139,21 @@ class LoaderActionsModel:
|
||||||
happened.
|
happened.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
identifier (str): Loader identifier.
|
identifier (str): Plugin identifier.
|
||||||
options (dict[str, Any]): Loader option values.
|
|
||||||
project_name (str): Project name.
|
project_name (str): Project name.
|
||||||
version_ids (Iterable[str]): Version ids.
|
selected_ids (set[str]): Selected entity ids.
|
||||||
representation_ids (Iterable[str]): Representation ids.
|
selected_entity_type (str): Selected entity type.
|
||||||
"""
|
data (Optional[dict[str, Any]]): Additional action item data.
|
||||||
|
options (dict[str, Any]): Loader option values.
|
||||||
|
form_values (dict[str, Any]): Form values.
|
||||||
|
|
||||||
|
"""
|
||||||
event_data = {
|
event_data = {
|
||||||
"identifier": identifier,
|
"identifier": identifier,
|
||||||
|
"project_name": project_name,
|
||||||
|
"selected_ids": list(selected_ids),
|
||||||
|
"selected_entity_type": selected_entity_type,
|
||||||
|
"data": data,
|
||||||
"id": uuid.uuid4().hex,
|
"id": uuid.uuid4().hex,
|
||||||
}
|
}
|
||||||
self._controller.emit_event(
|
self._controller.emit_event(
|
||||||
|
|
@ -144,24 +161,60 @@ class LoaderActionsModel:
|
||||||
event_data,
|
event_data,
|
||||||
ACTIONS_MODEL_SENDER,
|
ACTIONS_MODEL_SENDER,
|
||||||
)
|
)
|
||||||
loader = self._get_loader_by_identifier(project_name, identifier)
|
if identifier != LOADER_PLUGIN_ID:
|
||||||
if representation_ids is not None:
|
result = None
|
||||||
error_info = self._trigger_representation_loader(
|
crashed = False
|
||||||
loader,
|
try:
|
||||||
options,
|
result = self._loader_actions.execute_action(
|
||||||
|
identifier=identifier,
|
||||||
|
selection=LoaderActionSelection(
|
||||||
project_name,
|
project_name,
|
||||||
representation_ids,
|
selected_ids,
|
||||||
|
selected_entity_type,
|
||||||
|
),
|
||||||
|
data=data,
|
||||||
|
form_values=form_values,
|
||||||
)
|
)
|
||||||
elif version_ids is not None:
|
|
||||||
|
except Exception:
|
||||||
|
crashed = True
|
||||||
|
self._log.warning(
|
||||||
|
f"Failed to execute action '{identifier}'",
|
||||||
|
exc_info=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
event_data["result"] = result
|
||||||
|
event_data["crashed"] = crashed
|
||||||
|
self._controller.emit_event(
|
||||||
|
"loader.action.finished",
|
||||||
|
event_data,
|
||||||
|
ACTIONS_MODEL_SENDER,
|
||||||
|
)
|
||||||
|
return
|
||||||
|
|
||||||
|
loader = self._get_loader_by_identifier(
|
||||||
|
project_name, data["loader"]
|
||||||
|
)
|
||||||
|
entity_type = data["entity_type"]
|
||||||
|
entity_ids = data["entity_ids"]
|
||||||
|
if entity_type == "version":
|
||||||
error_info = self._trigger_version_loader(
|
error_info = self._trigger_version_loader(
|
||||||
loader,
|
loader,
|
||||||
options,
|
options,
|
||||||
project_name,
|
project_name,
|
||||||
version_ids,
|
entity_ids,
|
||||||
|
)
|
||||||
|
elif entity_type == "representation":
|
||||||
|
error_info = self._trigger_representation_loader(
|
||||||
|
loader,
|
||||||
|
options,
|
||||||
|
project_name,
|
||||||
|
entity_ids,
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
raise NotImplementedError(
|
raise NotImplementedError(
|
||||||
"Invalid arguments to trigger action item")
|
f"Invalid entity type '{entity_type}' to trigger action item"
|
||||||
|
)
|
||||||
|
|
||||||
event_data["error_info"] = error_info
|
event_data["error_info"] = error_info
|
||||||
self._controller.emit_event(
|
self._controller.emit_event(
|
||||||
|
|
@ -276,28 +329,26 @@ class LoaderActionsModel:
|
||||||
self,
|
self,
|
||||||
loader,
|
loader,
|
||||||
contexts,
|
contexts,
|
||||||
project_name,
|
entity_ids,
|
||||||
folder_ids=None,
|
entity_type,
|
||||||
product_ids=None,
|
|
||||||
version_ids=None,
|
|
||||||
representation_ids=None,
|
|
||||||
repre_name=None,
|
repre_name=None,
|
||||||
):
|
):
|
||||||
label = self._get_action_label(loader)
|
label = self._get_action_label(loader)
|
||||||
if repre_name:
|
if repre_name:
|
||||||
label = "{} ({})".format(label, repre_name)
|
label = f"{label} ({repre_name})"
|
||||||
return ActionItem(
|
return ActionItem(
|
||||||
get_loader_identifier(loader),
|
LOADER_PLUGIN_ID,
|
||||||
|
data={
|
||||||
|
"entity_ids": entity_ids,
|
||||||
|
"entity_type": entity_type,
|
||||||
|
"loader": get_loader_identifier(loader),
|
||||||
|
},
|
||||||
label=label,
|
label=label,
|
||||||
|
group_label=None,
|
||||||
icon=self._get_action_icon(loader),
|
icon=self._get_action_icon(loader),
|
||||||
tooltip=self._get_action_tooltip(loader),
|
tooltip=self._get_action_tooltip(loader),
|
||||||
options=loader.get_options(contexts),
|
|
||||||
order=loader.order,
|
order=loader.order,
|
||||||
project_name=project_name,
|
options=loader.get_options(contexts),
|
||||||
folder_ids=folder_ids,
|
|
||||||
product_ids=product_ids,
|
|
||||||
version_ids=version_ids,
|
|
||||||
representation_ids=representation_ids,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
def _get_loaders(self, project_name):
|
def _get_loaders(self, project_name):
|
||||||
|
|
@ -351,15 +402,6 @@ class LoaderActionsModel:
|
||||||
loaders_by_identifier = loaders_by_identifier_c.get_data()
|
loaders_by_identifier = loaders_by_identifier_c.get_data()
|
||||||
return loaders_by_identifier.get(identifier)
|
return loaders_by_identifier.get(identifier)
|
||||||
|
|
||||||
def _actions_sorter(self, action_item):
|
|
||||||
"""Sort the Loaders by their order and then their name.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
tuple[int, str]: Sort keys.
|
|
||||||
"""
|
|
||||||
|
|
||||||
return action_item.order, action_item.label
|
|
||||||
|
|
||||||
def _contexts_for_versions(self, project_name, version_ids):
|
def _contexts_for_versions(self, project_name, version_ids):
|
||||||
"""Get contexts for given version ids.
|
"""Get contexts for given version ids.
|
||||||
|
|
||||||
|
|
@ -385,8 +427,8 @@ class LoaderActionsModel:
|
||||||
if not project_name and not version_ids:
|
if not project_name and not version_ids:
|
||||||
return version_context_by_id, repre_context_by_id
|
return version_context_by_id, repre_context_by_id
|
||||||
|
|
||||||
version_entities = ayon_api.get_versions(
|
version_entities = self._get_versions(
|
||||||
project_name, version_ids=version_ids
|
project_name, version_ids
|
||||||
)
|
)
|
||||||
version_entities_by_id = {}
|
version_entities_by_id = {}
|
||||||
version_entities_by_product_id = collections.defaultdict(list)
|
version_entities_by_product_id = collections.defaultdict(list)
|
||||||
|
|
@ -397,18 +439,18 @@ class LoaderActionsModel:
|
||||||
version_entities_by_product_id[product_id].append(version_entity)
|
version_entities_by_product_id[product_id].append(version_entity)
|
||||||
|
|
||||||
_product_ids = set(version_entities_by_product_id.keys())
|
_product_ids = set(version_entities_by_product_id.keys())
|
||||||
_product_entities = ayon_api.get_products(
|
_product_entities = self._get_products(
|
||||||
project_name, product_ids=_product_ids
|
project_name, _product_ids
|
||||||
)
|
)
|
||||||
product_entities_by_id = {p["id"]: p for p in _product_entities}
|
product_entities_by_id = {p["id"]: p for p in _product_entities}
|
||||||
|
|
||||||
_folder_ids = {p["folderId"] for p in product_entities_by_id.values()}
|
_folder_ids = {p["folderId"] for p in product_entities_by_id.values()}
|
||||||
_folder_entities = ayon_api.get_folders(
|
_folder_entities = self._get_folders(
|
||||||
project_name, folder_ids=_folder_ids
|
project_name, _folder_ids
|
||||||
)
|
)
|
||||||
folder_entities_by_id = {f["id"]: f for f in _folder_entities}
|
folder_entities_by_id = {f["id"]: f for f in _folder_entities}
|
||||||
|
|
||||||
project_entity = ayon_api.get_project(project_name)
|
project_entity = self._get_project(project_name)
|
||||||
|
|
||||||
for version_id, version_entity in version_entities_by_id.items():
|
for version_id, version_entity in version_entities_by_id.items():
|
||||||
product_id = version_entity["productId"]
|
product_id = version_entity["productId"]
|
||||||
|
|
@ -422,8 +464,15 @@ class LoaderActionsModel:
|
||||||
"version": version_entity,
|
"version": version_entity,
|
||||||
}
|
}
|
||||||
|
|
||||||
repre_entities = ayon_api.get_representations(
|
all_repre_ids = set()
|
||||||
project_name, version_ids=version_ids)
|
for repre_ids in self._get_repre_ids_by_version_ids(
|
||||||
|
project_name, version_ids
|
||||||
|
).values():
|
||||||
|
all_repre_ids |= repre_ids
|
||||||
|
|
||||||
|
repre_entities = self._get_representations(
|
||||||
|
project_name, all_repre_ids
|
||||||
|
)
|
||||||
for repre_entity in repre_entities:
|
for repre_entity in repre_entities:
|
||||||
version_id = repre_entity["versionId"]
|
version_id = repre_entity["versionId"]
|
||||||
version_entity = version_entities_by_id[version_id]
|
version_entity = version_entities_by_id[version_id]
|
||||||
|
|
@ -459,49 +508,54 @@ class LoaderActionsModel:
|
||||||
Returns:
|
Returns:
|
||||||
tuple[list[dict[str, Any]], list[dict[str, Any]]]: Version and
|
tuple[list[dict[str, Any]], list[dict[str, Any]]]: Version and
|
||||||
representation contexts.
|
representation contexts.
|
||||||
"""
|
|
||||||
|
|
||||||
product_context_by_id = {}
|
"""
|
||||||
|
version_context_by_id = {}
|
||||||
repre_context_by_id = {}
|
repre_context_by_id = {}
|
||||||
if not project_name and not repre_ids:
|
if not project_name and not repre_ids:
|
||||||
return product_context_by_id, repre_context_by_id
|
return version_context_by_id, repre_context_by_id
|
||||||
|
|
||||||
repre_entities = list(ayon_api.get_representations(
|
repre_entities = self._get_representations(
|
||||||
project_name, representation_ids=repre_ids
|
project_name, repre_ids
|
||||||
))
|
)
|
||||||
version_ids = {r["versionId"] for r in repre_entities}
|
version_ids = {r["versionId"] for r in repre_entities}
|
||||||
version_entities = ayon_api.get_versions(
|
version_entities = self._get_versions(
|
||||||
project_name, version_ids=version_ids
|
project_name, version_ids
|
||||||
)
|
)
|
||||||
version_entities_by_id = {
|
version_entities_by_id = {
|
||||||
v["id"]: v for v in version_entities
|
v["id"]: v for v in version_entities
|
||||||
}
|
}
|
||||||
|
|
||||||
product_ids = {v["productId"] for v in version_entities_by_id.values()}
|
product_ids = {v["productId"] for v in version_entities_by_id.values()}
|
||||||
product_entities = ayon_api.get_products(
|
product_entities = self._get_products(
|
||||||
project_name, product_ids=product_ids
|
project_name, product_ids
|
||||||
|
|
||||||
)
|
)
|
||||||
product_entities_by_id = {
|
product_entities_by_id = {
|
||||||
p["id"]: p for p in product_entities
|
p["id"]: p for p in product_entities
|
||||||
}
|
}
|
||||||
|
|
||||||
folder_ids = {p["folderId"] for p in product_entities_by_id.values()}
|
folder_ids = {p["folderId"] for p in product_entities_by_id.values()}
|
||||||
folder_entities = ayon_api.get_folders(
|
folder_entities = self._get_folders(
|
||||||
project_name, folder_ids=folder_ids
|
project_name, folder_ids
|
||||||
)
|
)
|
||||||
folder_entities_by_id = {
|
folder_entities_by_id = {
|
||||||
f["id"]: f for f in folder_entities
|
f["id"]: f for f in folder_entities
|
||||||
}
|
}
|
||||||
|
|
||||||
project_entity = ayon_api.get_project(project_name)
|
project_entity = self._get_project(project_name)
|
||||||
|
|
||||||
for product_id, product_entity in product_entities_by_id.items():
|
version_context_by_id = {}
|
||||||
|
for version_id, version_entity in version_entities_by_id.items():
|
||||||
|
product_id = version_entity["productId"]
|
||||||
|
product_entity = product_entities_by_id[product_id]
|
||||||
folder_id = product_entity["folderId"]
|
folder_id = product_entity["folderId"]
|
||||||
folder_entity = folder_entities_by_id[folder_id]
|
folder_entity = folder_entities_by_id[folder_id]
|
||||||
product_context_by_id[product_id] = {
|
version_context_by_id[version_id] = {
|
||||||
"project": project_entity,
|
"project": project_entity,
|
||||||
"folder": folder_entity,
|
"folder": folder_entity,
|
||||||
"product": product_entity,
|
"product": product_entity,
|
||||||
|
"version": version_entity,
|
||||||
}
|
}
|
||||||
|
|
||||||
for repre_entity in repre_entities:
|
for repre_entity in repre_entities:
|
||||||
|
|
@ -519,7 +573,125 @@ class LoaderActionsModel:
|
||||||
"version": version_entity,
|
"version": version_entity,
|
||||||
"representation": repre_entity,
|
"representation": repre_entity,
|
||||||
}
|
}
|
||||||
return product_context_by_id, repre_context_by_id
|
return version_context_by_id, repre_context_by_id
|
||||||
|
|
||||||
|
def _get_project(self, project_name: str) -> dict[str, Any]:
|
||||||
|
cache = self._projects_cache[project_name]
|
||||||
|
if not cache.is_valid:
|
||||||
|
cache.update_data(ayon_api.get_project(project_name))
|
||||||
|
return cache.get_data()
|
||||||
|
|
||||||
|
def _get_folders(
|
||||||
|
self, project_name: str, folder_ids: set[str]
|
||||||
|
) -> list[dict[str, Any]]:
|
||||||
|
"""Get folders by ids."""
|
||||||
|
return self._get_entities(
|
||||||
|
project_name,
|
||||||
|
folder_ids,
|
||||||
|
self._folders_cache,
|
||||||
|
ayon_api.get_folders,
|
||||||
|
"folder_ids",
|
||||||
|
)
|
||||||
|
|
||||||
|
def _get_products(
|
||||||
|
self, project_name: str, product_ids: set[str]
|
||||||
|
) -> list[dict[str, Any]]:
|
||||||
|
"""Get products by ids."""
|
||||||
|
return self._get_entities(
|
||||||
|
project_name,
|
||||||
|
product_ids,
|
||||||
|
self._products_cache,
|
||||||
|
ayon_api.get_products,
|
||||||
|
"product_ids",
|
||||||
|
)
|
||||||
|
|
||||||
|
def _get_versions(
|
||||||
|
self, project_name: str, version_ids: set[str]
|
||||||
|
) -> list[dict[str, Any]]:
|
||||||
|
"""Get versions by ids."""
|
||||||
|
return self._get_entities(
|
||||||
|
project_name,
|
||||||
|
version_ids,
|
||||||
|
self._versions_cache,
|
||||||
|
ayon_api.get_versions,
|
||||||
|
"version_ids",
|
||||||
|
)
|
||||||
|
|
||||||
|
def _get_representations(
|
||||||
|
self, project_name: str, representation_ids: set[str]
|
||||||
|
) -> list[dict[str, Any]]:
|
||||||
|
"""Get representations by ids."""
|
||||||
|
return self._get_entities(
|
||||||
|
project_name,
|
||||||
|
representation_ids,
|
||||||
|
self._representations_cache,
|
||||||
|
ayon_api.get_representations,
|
||||||
|
"representation_ids",
|
||||||
|
)
|
||||||
|
|
||||||
|
def _get_repre_ids_by_version_ids(
|
||||||
|
self, project_name: str, version_ids: set[str]
|
||||||
|
) -> dict[str, set[str]]:
|
||||||
|
output = {}
|
||||||
|
if not version_ids:
|
||||||
|
return output
|
||||||
|
|
||||||
|
project_cache = self._repre_parents_cache[project_name]
|
||||||
|
missing_ids = set()
|
||||||
|
for version_id in version_ids:
|
||||||
|
cache = project_cache[version_id]
|
||||||
|
if cache.is_valid:
|
||||||
|
output[version_id] = cache.get_data()
|
||||||
|
else:
|
||||||
|
missing_ids.add(version_id)
|
||||||
|
|
||||||
|
if missing_ids:
|
||||||
|
repre_cache = self._representations_cache[project_name]
|
||||||
|
repres_by_parent_id = collections.defaultdict(list)
|
||||||
|
for repre in ayon_api.get_representations(
|
||||||
|
project_name, version_ids=missing_ids
|
||||||
|
):
|
||||||
|
version_id = repre["versionId"]
|
||||||
|
repre_cache[repre["id"]].update_data(repre)
|
||||||
|
repres_by_parent_id[version_id].append(repre)
|
||||||
|
|
||||||
|
for version_id, repres in repres_by_parent_id.items():
|
||||||
|
repre_ids = {
|
||||||
|
repre["id"]
|
||||||
|
for repre in repres
|
||||||
|
}
|
||||||
|
output[version_id] = set(repre_ids)
|
||||||
|
project_cache[version_id].update_data(repre_ids)
|
||||||
|
|
||||||
|
return output
|
||||||
|
|
||||||
|
def _get_entities(
|
||||||
|
self,
|
||||||
|
project_name: str,
|
||||||
|
entity_ids: set[str],
|
||||||
|
cache: NestedCacheItem,
|
||||||
|
getter: Callable,
|
||||||
|
filter_arg: str,
|
||||||
|
) -> list[dict[str, Any]]:
|
||||||
|
entities = []
|
||||||
|
if not entity_ids:
|
||||||
|
return entities
|
||||||
|
|
||||||
|
missing_ids = set()
|
||||||
|
project_cache = cache[project_name]
|
||||||
|
for entity_id in entity_ids:
|
||||||
|
entity_cache = project_cache[entity_id]
|
||||||
|
if entity_cache.is_valid:
|
||||||
|
entities.append(entity_cache.get_data())
|
||||||
|
else:
|
||||||
|
missing_ids.add(entity_id)
|
||||||
|
|
||||||
|
if missing_ids:
|
||||||
|
for entity in getter(project_name, **{filter_arg: missing_ids}):
|
||||||
|
entities.append(entity)
|
||||||
|
entity_id = entity["id"]
|
||||||
|
project_cache[entity_id].update_data(entity)
|
||||||
|
return entities
|
||||||
|
|
||||||
def _get_action_items_for_contexts(
|
def _get_action_items_for_contexts(
|
||||||
self,
|
self,
|
||||||
|
|
@ -557,51 +729,137 @@ class LoaderActionsModel:
|
||||||
if not filtered_repre_contexts:
|
if not filtered_repre_contexts:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
repre_ids = set()
|
repre_ids = {
|
||||||
repre_version_ids = set()
|
repre_context["representation"]["id"]
|
||||||
repre_product_ids = set()
|
for repre_context in filtered_repre_contexts
|
||||||
repre_folder_ids = set()
|
}
|
||||||
for repre_context in filtered_repre_contexts:
|
|
||||||
repre_ids.add(repre_context["representation"]["id"])
|
|
||||||
repre_product_ids.add(repre_context["product"]["id"])
|
|
||||||
repre_version_ids.add(repre_context["version"]["id"])
|
|
||||||
repre_folder_ids.add(repre_context["folder"]["id"])
|
|
||||||
|
|
||||||
item = self._create_loader_action_item(
|
item = self._create_loader_action_item(
|
||||||
loader,
|
loader,
|
||||||
repre_contexts,
|
repre_contexts,
|
||||||
project_name=project_name,
|
repre_ids,
|
||||||
folder_ids=repre_folder_ids,
|
"representation",
|
||||||
product_ids=repre_product_ids,
|
|
||||||
version_ids=repre_version_ids,
|
|
||||||
representation_ids=repre_ids,
|
|
||||||
repre_name=repre_name,
|
repre_name=repre_name,
|
||||||
)
|
)
|
||||||
action_items.append(item)
|
action_items.append(item)
|
||||||
|
|
||||||
# Product Loaders.
|
# Product Loaders.
|
||||||
version_ids = set(version_context_by_id.keys())
|
|
||||||
product_folder_ids = set()
|
product_folder_ids = set()
|
||||||
product_ids = set()
|
product_ids = set()
|
||||||
for product_context in version_context_by_id.values():
|
for product_context in version_context_by_id.values():
|
||||||
product_ids.add(product_context["product"]["id"])
|
product_ids.add(product_context["product"]["id"])
|
||||||
product_folder_ids.add(product_context["folder"]["id"])
|
product_folder_ids.add(product_context["folder"]["id"])
|
||||||
|
|
||||||
|
version_ids = set(version_context_by_id.keys())
|
||||||
version_contexts = list(version_context_by_id.values())
|
version_contexts = list(version_context_by_id.values())
|
||||||
for loader in product_loaders:
|
for loader in product_loaders:
|
||||||
item = self._create_loader_action_item(
|
item = self._create_loader_action_item(
|
||||||
loader,
|
loader,
|
||||||
version_contexts,
|
version_contexts,
|
||||||
project_name=project_name,
|
version_ids,
|
||||||
folder_ids=product_folder_ids,
|
"version",
|
||||||
product_ids=product_ids,
|
|
||||||
version_ids=version_ids,
|
|
||||||
)
|
)
|
||||||
action_items.append(item)
|
action_items.append(item)
|
||||||
|
|
||||||
action_items.sort(key=self._actions_sorter)
|
|
||||||
return action_items
|
return action_items
|
||||||
|
|
||||||
|
def _get_loader_action_items(
|
||||||
|
self,
|
||||||
|
project_name: str,
|
||||||
|
entity_ids: set[str],
|
||||||
|
entity_type: str,
|
||||||
|
version_context_by_id: dict[str, dict[str, Any]],
|
||||||
|
repre_context_by_id: dict[str, dict[str, Any]],
|
||||||
|
) -> list[ActionItem]:
|
||||||
|
"""
|
||||||
|
|
||||||
|
Args:
|
||||||
|
project_name (str): Project name.
|
||||||
|
entity_ids (set[str]): Selected entity ids.
|
||||||
|
entity_type (str): Selected entity type.
|
||||||
|
version_context_by_id (dict[str, dict[str, Any]]): Version context
|
||||||
|
by id.
|
||||||
|
repre_context_by_id (dict[str, dict[str, Any]]): Representation
|
||||||
|
context by id.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
list[ActionItem]: List of action items.
|
||||||
|
|
||||||
|
"""
|
||||||
|
entities_cache = self._prepare_entities_cache(
|
||||||
|
project_name,
|
||||||
|
entity_type,
|
||||||
|
version_context_by_id,
|
||||||
|
repre_context_by_id,
|
||||||
|
)
|
||||||
|
selection = LoaderActionSelection(
|
||||||
|
project_name,
|
||||||
|
entity_ids,
|
||||||
|
entity_type,
|
||||||
|
entities_cache=entities_cache
|
||||||
|
)
|
||||||
|
items = []
|
||||||
|
for action in self._loader_actions.get_action_items(selection):
|
||||||
|
items.append(ActionItem(
|
||||||
|
action.identifier,
|
||||||
|
label=action.label,
|
||||||
|
group_label=action.group_label,
|
||||||
|
icon=action.icon,
|
||||||
|
tooltip=None, # action.tooltip,
|
||||||
|
order=action.order,
|
||||||
|
data=action.data,
|
||||||
|
options=None, # action.options,
|
||||||
|
))
|
||||||
|
return items
|
||||||
|
|
||||||
|
def _prepare_entities_cache(
|
||||||
|
self,
|
||||||
|
project_name: str,
|
||||||
|
entity_type: str,
|
||||||
|
version_context_by_id: dict[str, dict[str, Any]],
|
||||||
|
repre_context_by_id: dict[str, dict[str, Any]],
|
||||||
|
):
|
||||||
|
project_entity = None
|
||||||
|
folders_by_id = {}
|
||||||
|
products_by_id = {}
|
||||||
|
versions_by_id = {}
|
||||||
|
representations_by_id = {}
|
||||||
|
for context in version_context_by_id.values():
|
||||||
|
if project_entity is None:
|
||||||
|
project_entity = context["project"]
|
||||||
|
folder_entity = context["folder"]
|
||||||
|
product_entity = context["product"]
|
||||||
|
version_entity = context["version"]
|
||||||
|
folders_by_id[folder_entity["id"]] = folder_entity
|
||||||
|
products_by_id[product_entity["id"]] = product_entity
|
||||||
|
versions_by_id[version_entity["id"]] = version_entity
|
||||||
|
|
||||||
|
for context in repre_context_by_id.values():
|
||||||
|
repre_entity = context["representation"]
|
||||||
|
representations_by_id[repre_entity["id"]] = repre_entity
|
||||||
|
|
||||||
|
# Mapping has to be for all child entities which is available for
|
||||||
|
# representations only if version is selected
|
||||||
|
representation_ids_by_version_id = {}
|
||||||
|
if entity_type == "version":
|
||||||
|
representation_ids_by_version_id = {
|
||||||
|
version_id: set()
|
||||||
|
for version_id in versions_by_id
|
||||||
|
}
|
||||||
|
for context in repre_context_by_id.values():
|
||||||
|
repre_entity = context["representation"]
|
||||||
|
v_id = repre_entity["versionId"]
|
||||||
|
representation_ids_by_version_id[v_id].add(repre_entity["id"])
|
||||||
|
|
||||||
|
return SelectionEntitiesCache(
|
||||||
|
project_name,
|
||||||
|
project_entity=project_entity,
|
||||||
|
folders_by_id=folders_by_id,
|
||||||
|
products_by_id=products_by_id,
|
||||||
|
versions_by_id=versions_by_id,
|
||||||
|
representations_by_id=representations_by_id,
|
||||||
|
representation_ids_by_version_id=representation_ids_by_version_id,
|
||||||
|
)
|
||||||
|
|
||||||
def _trigger_version_loader(
|
def _trigger_version_loader(
|
||||||
self,
|
self,
|
||||||
loader,
|
loader,
|
||||||
|
|
@ -634,12 +892,12 @@ class LoaderActionsModel:
|
||||||
project_name, version_ids=version_ids
|
project_name, version_ids=version_ids
|
||||||
))
|
))
|
||||||
product_ids = {v["productId"] for v in version_entities}
|
product_ids = {v["productId"] for v in version_entities}
|
||||||
product_entities = ayon_api.get_products(
|
product_entities = self._get_products(
|
||||||
project_name, product_ids=product_ids
|
project_name, product_ids
|
||||||
)
|
)
|
||||||
product_entities_by_id = {p["id"]: p for p in product_entities}
|
product_entities_by_id = {p["id"]: p for p in product_entities}
|
||||||
folder_ids = {p["folderId"] for p in product_entities_by_id.values()}
|
folder_ids = {p["folderId"] for p in product_entities_by_id.values()}
|
||||||
folder_entities = ayon_api.get_folders(
|
folder_entities = self._get_folders(
|
||||||
project_name, folder_ids=folder_ids
|
project_name, folder_ids=folder_ids
|
||||||
)
|
)
|
||||||
folder_entities_by_id = {f["id"]: f for f in folder_entities}
|
folder_entities_by_id = {f["id"]: f for f in folder_entities}
|
||||||
|
|
|
||||||
|
|
@ -7,6 +7,7 @@ from typing import TYPE_CHECKING, Iterable, Optional
|
||||||
|
|
||||||
import arrow
|
import arrow
|
||||||
import ayon_api
|
import ayon_api
|
||||||
|
from ayon_api.graphql_queries import project_graphql_query
|
||||||
from ayon_api.operations import OperationsSession
|
from ayon_api.operations import OperationsSession
|
||||||
|
|
||||||
from ayon_core.lib import NestedCacheItem
|
from ayon_core.lib import NestedCacheItem
|
||||||
|
|
@ -202,7 +203,7 @@ class ProductsModel:
|
||||||
cache = self._product_type_items_cache[project_name]
|
cache = self._product_type_items_cache[project_name]
|
||||||
if not cache.is_valid:
|
if not cache.is_valid:
|
||||||
icons_mapping = self._get_product_type_icons(project_name)
|
icons_mapping = self._get_product_type_icons(project_name)
|
||||||
product_types = ayon_api.get_project_product_types(project_name)
|
product_types = self._get_project_product_types(project_name)
|
||||||
cache.update_data([
|
cache.update_data([
|
||||||
ProductTypeItem(
|
ProductTypeItem(
|
||||||
product_type["name"],
|
product_type["name"],
|
||||||
|
|
@ -462,6 +463,24 @@ class ProductsModel:
|
||||||
PRODUCTS_MODEL_SENDER
|
PRODUCTS_MODEL_SENDER
|
||||||
)
|
)
|
||||||
|
|
||||||
|
def _get_project_product_types(self, project_name: str) -> list[dict]:
|
||||||
|
"""This is a temporary solution for product types fetching.
|
||||||
|
|
||||||
|
There was a bug in ayon_api.get_project(...) which did not use GraphQl
|
||||||
|
but REST instead. That is fixed in ayon-python-api 1.2.6 that will
|
||||||
|
be as part of ayon launcher 1.4.3 release.
|
||||||
|
|
||||||
|
"""
|
||||||
|
if not project_name:
|
||||||
|
return []
|
||||||
|
query = project_graphql_query({"productTypes.name"})
|
||||||
|
query.set_variable_value("projectName", project_name)
|
||||||
|
parsed_data = query.query(ayon_api.get_server_api_connection())
|
||||||
|
project = parsed_data["project"]
|
||||||
|
if project is None:
|
||||||
|
return []
|
||||||
|
return project["productTypes"]
|
||||||
|
|
||||||
def _get_product_type_icons(
|
def _get_product_type_icons(
|
||||||
self, project_name: Optional[str]
|
self, project_name: Optional[str]
|
||||||
) -> ProductTypeIconMapping:
|
) -> ProductTypeIconMapping:
|
||||||
|
|
|
||||||
|
|
@ -1,6 +1,7 @@
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
import collections
|
import collections
|
||||||
|
from typing import Any
|
||||||
|
|
||||||
from ayon_api import (
|
from ayon_api import (
|
||||||
get_representations,
|
get_representations,
|
||||||
|
|
@ -246,26 +247,32 @@ class SiteSyncModel:
|
||||||
output[repre_id] = repre_cache.get_data()
|
output[repre_id] = repre_cache.get_data()
|
||||||
return output
|
return output
|
||||||
|
|
||||||
def get_sitesync_action_items(self, project_name, representation_ids):
|
def get_sitesync_action_items(
|
||||||
|
self, project_name, entity_ids, entity_type
|
||||||
|
):
|
||||||
"""
|
"""
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
project_name (str): Project name.
|
project_name (str): Project name.
|
||||||
representation_ids (Iterable[str]): Representation ids.
|
entity_ids (set[str]): Selected entity ids.
|
||||||
|
entity_type (str): Selected entity type.
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
list[ActionItem]: Actions that can be shown in loader.
|
list[ActionItem]: Actions that can be shown in loader.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
if entity_type != "representation":
|
||||||
|
return []
|
||||||
|
|
||||||
if not self.is_sitesync_enabled(project_name):
|
if not self.is_sitesync_enabled(project_name):
|
||||||
return []
|
return []
|
||||||
|
|
||||||
repres_status = self.get_representations_sync_status(
|
repres_status = self.get_representations_sync_status(
|
||||||
project_name, representation_ids
|
project_name, entity_ids
|
||||||
)
|
)
|
||||||
|
|
||||||
repre_ids_per_identifier = collections.defaultdict(set)
|
repre_ids_per_identifier = collections.defaultdict(set)
|
||||||
for repre_id in representation_ids:
|
for repre_id in entity_ids:
|
||||||
repre_status = repres_status[repre_id]
|
repre_status = repres_status[repre_id]
|
||||||
local_status, remote_status = repre_status
|
local_status, remote_status = repre_status
|
||||||
|
|
||||||
|
|
@ -293,36 +300,32 @@ class SiteSyncModel:
|
||||||
|
|
||||||
return action_items
|
return action_items
|
||||||
|
|
||||||
def is_sitesync_action(self, identifier):
|
def is_sitesync_action(self, identifier: str) -> bool:
|
||||||
"""Should be `identifier` handled by SiteSync.
|
"""Should be `identifier` handled by SiteSync.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
identifier (str): Action identifier.
|
identifier (str): Plugin identifier.
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
bool: Should action be handled by SiteSync.
|
bool: Should action be handled by SiteSync.
|
||||||
"""
|
|
||||||
|
|
||||||
return identifier in {
|
"""
|
||||||
UPLOAD_IDENTIFIER,
|
return identifier == "sitesync.loader.action"
|
||||||
DOWNLOAD_IDENTIFIER,
|
|
||||||
REMOVE_IDENTIFIER,
|
|
||||||
}
|
|
||||||
|
|
||||||
def trigger_action_item(
|
def trigger_action_item(
|
||||||
self,
|
self,
|
||||||
identifier,
|
project_name: str,
|
||||||
project_name,
|
data: dict[str, Any],
|
||||||
representation_ids
|
|
||||||
):
|
):
|
||||||
"""Resets status for site_name or remove local files.
|
"""Resets status for site_name or remove local files.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
identifier (str): Action identifier.
|
|
||||||
project_name (str): Project name.
|
project_name (str): Project name.
|
||||||
representation_ids (Iterable[str]): Representation ids.
|
data (dict[str, Any]): Action item data.
|
||||||
"""
|
|
||||||
|
|
||||||
|
"""
|
||||||
|
representation_ids = data["representation_ids"]
|
||||||
|
action_identifier = data["action_identifier"]
|
||||||
active_site = self.get_active_site(project_name)
|
active_site = self.get_active_site(project_name)
|
||||||
remote_site = self.get_remote_site(project_name)
|
remote_site = self.get_remote_site(project_name)
|
||||||
|
|
||||||
|
|
@ -346,17 +349,17 @@ class SiteSyncModel:
|
||||||
for repre_id in representation_ids:
|
for repre_id in representation_ids:
|
||||||
repre_entity = repre_entities_by_id.get(repre_id)
|
repre_entity = repre_entities_by_id.get(repre_id)
|
||||||
product_type = product_type_by_repre_id[repre_id]
|
product_type = product_type_by_repre_id[repre_id]
|
||||||
if identifier == DOWNLOAD_IDENTIFIER:
|
if action_identifier == DOWNLOAD_IDENTIFIER:
|
||||||
self._add_site(
|
self._add_site(
|
||||||
project_name, repre_entity, active_site, product_type
|
project_name, repre_entity, active_site, product_type
|
||||||
)
|
)
|
||||||
|
|
||||||
elif identifier == UPLOAD_IDENTIFIER:
|
elif action_identifier == UPLOAD_IDENTIFIER:
|
||||||
self._add_site(
|
self._add_site(
|
||||||
project_name, repre_entity, remote_site, product_type
|
project_name, repre_entity, remote_site, product_type
|
||||||
)
|
)
|
||||||
|
|
||||||
elif identifier == REMOVE_IDENTIFIER:
|
elif action_identifier == REMOVE_IDENTIFIER:
|
||||||
self._sitesync_addon.remove_site(
|
self._sitesync_addon.remove_site(
|
||||||
project_name,
|
project_name,
|
||||||
repre_id,
|
repre_id,
|
||||||
|
|
@ -476,27 +479,27 @@ class SiteSyncModel:
|
||||||
self,
|
self,
|
||||||
project_name,
|
project_name,
|
||||||
representation_ids,
|
representation_ids,
|
||||||
identifier,
|
action_identifier,
|
||||||
label,
|
label,
|
||||||
tooltip,
|
tooltip,
|
||||||
icon_name
|
icon_name
|
||||||
):
|
):
|
||||||
return ActionItem(
|
return ActionItem(
|
||||||
identifier,
|
"sitesync.loader.action",
|
||||||
label,
|
label=label,
|
||||||
|
group_label=None,
|
||||||
icon={
|
icon={
|
||||||
"type": "awesome-font",
|
"type": "awesome-font",
|
||||||
"name": icon_name,
|
"name": icon_name,
|
||||||
"color": "#999999"
|
"color": "#999999"
|
||||||
},
|
},
|
||||||
tooltip=tooltip,
|
tooltip=tooltip,
|
||||||
options={},
|
|
||||||
order=1,
|
order=1,
|
||||||
project_name=project_name,
|
data={
|
||||||
folder_ids=[],
|
"representation_ids": representation_ids,
|
||||||
product_ids=[],
|
"action_identifier": action_identifier,
|
||||||
version_ids=[],
|
},
|
||||||
representation_ids=representation_ids,
|
options=None,
|
||||||
)
|
)
|
||||||
|
|
||||||
def _add_site(self, project_name, repre_entity, site_name, product_type):
|
def _add_site(self, project_name, repre_entity, site_name, product_type):
|
||||||
|
|
|
||||||
|
|
@ -1,6 +1,7 @@
|
||||||
import uuid
|
import uuid
|
||||||
|
from typing import Optional, Any
|
||||||
|
|
||||||
from qtpy import QtWidgets, QtGui
|
from qtpy import QtWidgets, QtGui, QtCore
|
||||||
import qtawesome
|
import qtawesome
|
||||||
|
|
||||||
from ayon_core.lib.attribute_definitions import AbstractAttrDef
|
from ayon_core.lib.attribute_definitions import AbstractAttrDef
|
||||||
|
|
@ -11,9 +12,29 @@ from ayon_core.tools.utils.widgets import (
|
||||||
OptionDialog,
|
OptionDialog,
|
||||||
)
|
)
|
||||||
from ayon_core.tools.utils import get_qt_icon
|
from ayon_core.tools.utils import get_qt_icon
|
||||||
|
from ayon_core.tools.loader.abstract import ActionItem
|
||||||
|
|
||||||
|
|
||||||
def show_actions_menu(action_items, global_point, one_item_selected, parent):
|
def _actions_sorter(item: tuple[ActionItem, str, str]):
|
||||||
|
"""Sort the Loaders by their order and then their name.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
tuple[int, str]: Sort keys.
|
||||||
|
|
||||||
|
"""
|
||||||
|
action_item, group_label, label = item
|
||||||
|
if group_label is None:
|
||||||
|
group_label = label
|
||||||
|
label = ""
|
||||||
|
return action_item.order, group_label, label
|
||||||
|
|
||||||
|
|
||||||
|
def show_actions_menu(
|
||||||
|
action_items: list[ActionItem],
|
||||||
|
global_point: QtCore.QPoint,
|
||||||
|
one_item_selected: bool,
|
||||||
|
parent: QtWidgets.QWidget,
|
||||||
|
) -> tuple[Optional[ActionItem], Optional[dict[str, Any]]]:
|
||||||
selected_action_item = None
|
selected_action_item = None
|
||||||
selected_options = None
|
selected_options = None
|
||||||
|
|
||||||
|
|
@ -26,8 +47,16 @@ def show_actions_menu(action_items, global_point, one_item_selected, parent):
|
||||||
|
|
||||||
menu = OptionalMenu(parent)
|
menu = OptionalMenu(parent)
|
||||||
|
|
||||||
action_items_by_id = {}
|
action_items_with_labels = []
|
||||||
for action_item in action_items:
|
for action_item in action_items:
|
||||||
|
action_items_with_labels.append(
|
||||||
|
(action_item, action_item.group_label, action_item.label)
|
||||||
|
)
|
||||||
|
|
||||||
|
group_menu_by_label = {}
|
||||||
|
action_items_by_id = {}
|
||||||
|
for item in sorted(action_items_with_labels, key=_actions_sorter):
|
||||||
|
action_item, _, _ = item
|
||||||
item_id = uuid.uuid4().hex
|
item_id = uuid.uuid4().hex
|
||||||
action_items_by_id[item_id] = action_item
|
action_items_by_id[item_id] = action_item
|
||||||
item_options = action_item.options
|
item_options = action_item.options
|
||||||
|
|
@ -50,6 +79,17 @@ def show_actions_menu(action_items, global_point, one_item_selected, parent):
|
||||||
|
|
||||||
action.setData(item_id)
|
action.setData(item_id)
|
||||||
|
|
||||||
|
group_label = action_item.group_label
|
||||||
|
if group_label:
|
||||||
|
group_menu = group_menu_by_label.get(group_label)
|
||||||
|
if group_menu is None:
|
||||||
|
group_menu = OptionalMenu(group_label, menu)
|
||||||
|
if icon is not None:
|
||||||
|
group_menu.setIcon(icon)
|
||||||
|
menu.addMenu(group_menu)
|
||||||
|
group_menu_by_label[group_label] = group_menu
|
||||||
|
group_menu.addAction(action)
|
||||||
|
else:
|
||||||
menu.addAction(action)
|
menu.addAction(action)
|
||||||
|
|
||||||
action = menu.exec_(global_point)
|
action = menu.exec_(global_point)
|
||||||
|
|
|
||||||
|
|
@ -1,11 +1,11 @@
|
||||||
|
from typing import Optional
|
||||||
|
|
||||||
import qtpy
|
import qtpy
|
||||||
from qtpy import QtWidgets, QtCore, QtGui
|
from qtpy import QtWidgets, QtCore, QtGui
|
||||||
|
|
||||||
from ayon_core.tools.utils import (
|
|
||||||
RecursiveSortFilterProxyModel,
|
|
||||||
DeselectableTreeView,
|
|
||||||
)
|
|
||||||
from ayon_core.style import get_objected_colors
|
from ayon_core.style import get_objected_colors
|
||||||
|
from ayon_core.tools.utils import DeselectableTreeView
|
||||||
|
from ayon_core.tools.utils.folders_widget import FoldersProxyModel
|
||||||
|
|
||||||
from ayon_core.tools.utils import (
|
from ayon_core.tools.utils import (
|
||||||
FoldersQtModel,
|
FoldersQtModel,
|
||||||
|
|
@ -260,7 +260,7 @@ class LoaderFoldersWidget(QtWidgets.QWidget):
|
||||||
QtWidgets.QAbstractItemView.ExtendedSelection)
|
QtWidgets.QAbstractItemView.ExtendedSelection)
|
||||||
|
|
||||||
folders_model = LoaderFoldersModel(controller)
|
folders_model = LoaderFoldersModel(controller)
|
||||||
folders_proxy_model = RecursiveSortFilterProxyModel()
|
folders_proxy_model = FoldersProxyModel()
|
||||||
folders_proxy_model.setSourceModel(folders_model)
|
folders_proxy_model.setSourceModel(folders_model)
|
||||||
folders_proxy_model.setSortCaseSensitivity(QtCore.Qt.CaseInsensitive)
|
folders_proxy_model.setSortCaseSensitivity(QtCore.Qt.CaseInsensitive)
|
||||||
|
|
||||||
|
|
@ -314,6 +314,15 @@ class LoaderFoldersWidget(QtWidgets.QWidget):
|
||||||
if name:
|
if name:
|
||||||
self._folders_view.expandAll()
|
self._folders_view.expandAll()
|
||||||
|
|
||||||
|
def set_folder_ids_filter(self, folder_ids: Optional[list[str]]):
|
||||||
|
"""Set filter of folder ids.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
folder_ids (list[str]): The list of folder ids.
|
||||||
|
|
||||||
|
"""
|
||||||
|
self._folders_proxy_model.set_folder_ids_filter(folder_ids)
|
||||||
|
|
||||||
def set_merged_products_selection(self, items):
|
def set_merged_products_selection(self, items):
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -420,8 +420,9 @@ class ProductsWidget(QtWidgets.QWidget):
|
||||||
if version_id is not None:
|
if version_id is not None:
|
||||||
version_ids.add(version_id)
|
version_ids.add(version_id)
|
||||||
|
|
||||||
action_items = self._controller.get_versions_action_items(
|
action_items = self._controller.get_action_items(
|
||||||
project_name, version_ids)
|
project_name, version_ids, "version"
|
||||||
|
)
|
||||||
|
|
||||||
# Prepare global point where to show the menu
|
# Prepare global point where to show the menu
|
||||||
global_point = self._products_view.mapToGlobal(point)
|
global_point = self._products_view.mapToGlobal(point)
|
||||||
|
|
@ -437,11 +438,13 @@ class ProductsWidget(QtWidgets.QWidget):
|
||||||
return
|
return
|
||||||
|
|
||||||
self._controller.trigger_action_item(
|
self._controller.trigger_action_item(
|
||||||
action_item.identifier,
|
identifier=action_item.identifier,
|
||||||
options,
|
project_name=project_name,
|
||||||
action_item.project_name,
|
selected_ids=version_ids,
|
||||||
version_ids=action_item.version_ids,
|
selected_entity_type="version",
|
||||||
representation_ids=action_item.representation_ids,
|
data=action_item.data,
|
||||||
|
options=options,
|
||||||
|
form_values={},
|
||||||
)
|
)
|
||||||
|
|
||||||
def _on_selection_change(self):
|
def _on_selection_change(self):
|
||||||
|
|
|
||||||
|
|
@ -384,8 +384,8 @@ class RepresentationsWidget(QtWidgets.QWidget):
|
||||||
|
|
||||||
def _on_context_menu(self, point):
|
def _on_context_menu(self, point):
|
||||||
repre_ids = self._get_selected_repre_ids()
|
repre_ids = self._get_selected_repre_ids()
|
||||||
action_items = self._controller.get_representations_action_items(
|
action_items = self._controller.get_action_items(
|
||||||
self._selected_project_name, repre_ids
|
self._selected_project_name, repre_ids, "representation"
|
||||||
)
|
)
|
||||||
global_point = self._repre_view.mapToGlobal(point)
|
global_point = self._repre_view.mapToGlobal(point)
|
||||||
result = show_actions_menu(
|
result = show_actions_menu(
|
||||||
|
|
@ -399,9 +399,11 @@ class RepresentationsWidget(QtWidgets.QWidget):
|
||||||
return
|
return
|
||||||
|
|
||||||
self._controller.trigger_action_item(
|
self._controller.trigger_action_item(
|
||||||
action_item.identifier,
|
identifier=action_item.identifier,
|
||||||
options,
|
project_name=self._selected_project_name,
|
||||||
action_item.project_name,
|
selected_ids=repre_ids,
|
||||||
version_ids=action_item.version_ids,
|
selected_entity_type="representation",
|
||||||
representation_ids=action_item.representation_ids,
|
data=action_item.data,
|
||||||
|
options=options,
|
||||||
|
form_values={},
|
||||||
)
|
)
|
||||||
|
|
|
||||||
|
|
@ -1,11 +1,11 @@
|
||||||
import collections
|
import collections
|
||||||
import hashlib
|
import hashlib
|
||||||
|
from typing import Optional
|
||||||
|
|
||||||
from qtpy import QtWidgets, QtCore, QtGui
|
from qtpy import QtWidgets, QtCore, QtGui
|
||||||
|
|
||||||
from ayon_core.style import get_default_entity_icon_color
|
from ayon_core.style import get_default_entity_icon_color
|
||||||
from ayon_core.tools.utils import (
|
from ayon_core.tools.utils import (
|
||||||
RecursiveSortFilterProxyModel,
|
|
||||||
DeselectableTreeView,
|
DeselectableTreeView,
|
||||||
TasksQtModel,
|
TasksQtModel,
|
||||||
TASKS_MODEL_SENDER_NAME,
|
TASKS_MODEL_SENDER_NAME,
|
||||||
|
|
@ -15,9 +15,11 @@ from ayon_core.tools.utils.tasks_widget import (
|
||||||
ITEM_NAME_ROLE,
|
ITEM_NAME_ROLE,
|
||||||
PARENT_ID_ROLE,
|
PARENT_ID_ROLE,
|
||||||
TASK_TYPE_ROLE,
|
TASK_TYPE_ROLE,
|
||||||
|
TasksProxyModel,
|
||||||
)
|
)
|
||||||
from ayon_core.tools.utils.lib import RefreshThread, get_qt_icon
|
from ayon_core.tools.utils.lib import RefreshThread, get_qt_icon
|
||||||
|
|
||||||
|
|
||||||
# Role that can't clash with default 'tasks_widget' roles
|
# Role that can't clash with default 'tasks_widget' roles
|
||||||
FOLDER_LABEL_ROLE = QtCore.Qt.UserRole + 100
|
FOLDER_LABEL_ROLE = QtCore.Qt.UserRole + 100
|
||||||
NO_TASKS_ID = "--no-task--"
|
NO_TASKS_ID = "--no-task--"
|
||||||
|
|
@ -295,7 +297,7 @@ class LoaderTasksQtModel(TasksQtModel):
|
||||||
return super().data(index, role)
|
return super().data(index, role)
|
||||||
|
|
||||||
|
|
||||||
class LoaderTasksProxyModel(RecursiveSortFilterProxyModel):
|
class LoaderTasksProxyModel(TasksProxyModel):
|
||||||
def lessThan(self, left, right):
|
def lessThan(self, left, right):
|
||||||
if left.data(ITEM_ID_ROLE) == NO_TASKS_ID:
|
if left.data(ITEM_ID_ROLE) == NO_TASKS_ID:
|
||||||
return False
|
return False
|
||||||
|
|
@ -303,6 +305,12 @@ class LoaderTasksProxyModel(RecursiveSortFilterProxyModel):
|
||||||
return True
|
return True
|
||||||
return super().lessThan(left, right)
|
return super().lessThan(left, right)
|
||||||
|
|
||||||
|
def filterAcceptsRow(self, row, parent_index):
|
||||||
|
source_index = self.sourceModel().index(row, 0, parent_index)
|
||||||
|
if source_index.data(ITEM_ID_ROLE) == NO_TASKS_ID:
|
||||||
|
return True
|
||||||
|
return super().filterAcceptsRow(row, parent_index)
|
||||||
|
|
||||||
|
|
||||||
class LoaderTasksWidget(QtWidgets.QWidget):
|
class LoaderTasksWidget(QtWidgets.QWidget):
|
||||||
refreshed = QtCore.Signal()
|
refreshed = QtCore.Signal()
|
||||||
|
|
@ -363,6 +371,15 @@ class LoaderTasksWidget(QtWidgets.QWidget):
|
||||||
if name:
|
if name:
|
||||||
self._tasks_view.expandAll()
|
self._tasks_view.expandAll()
|
||||||
|
|
||||||
|
def set_task_ids_filter(self, task_ids: Optional[list[str]]):
|
||||||
|
"""Set filter of folder ids.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
task_ids (list[str]): The list of folder ids.
|
||||||
|
|
||||||
|
"""
|
||||||
|
self._tasks_proxy_model.set_task_ids_filter(task_ids)
|
||||||
|
|
||||||
def refresh(self):
|
def refresh(self):
|
||||||
self._tasks_model.refresh()
|
self._tasks_model.refresh()
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -1,18 +1,24 @@
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from typing import Optional
|
||||||
|
|
||||||
from qtpy import QtWidgets, QtCore, QtGui
|
from qtpy import QtWidgets, QtCore, QtGui
|
||||||
|
|
||||||
from ayon_core.resources import get_ayon_icon_filepath
|
from ayon_core.resources import get_ayon_icon_filepath
|
||||||
from ayon_core.style import load_stylesheet
|
from ayon_core.style import load_stylesheet
|
||||||
|
from ayon_core.pipeline.actions import LoaderActionResult
|
||||||
from ayon_core.tools.utils import (
|
from ayon_core.tools.utils import (
|
||||||
PlaceholderLineEdit,
|
MessageOverlayObject,
|
||||||
ErrorMessageBox,
|
ErrorMessageBox,
|
||||||
ThumbnailPainterWidget,
|
ThumbnailPainterWidget,
|
||||||
RefreshButton,
|
RefreshButton,
|
||||||
GoToCurrentButton,
|
GoToCurrentButton,
|
||||||
|
ProjectsCombobox,
|
||||||
|
get_qt_icon,
|
||||||
|
FoldersFiltersWidget,
|
||||||
)
|
)
|
||||||
|
from ayon_core.tools.attribute_defs import AttributeDefinitionsDialog
|
||||||
from ayon_core.tools.utils.lib import center_window
|
from ayon_core.tools.utils.lib import center_window
|
||||||
from ayon_core.tools.utils import ProjectsCombobox
|
|
||||||
from ayon_core.tools.common_models import StatusItem
|
from ayon_core.tools.common_models import StatusItem
|
||||||
from ayon_core.tools.loader.abstract import ProductTypeItem
|
from ayon_core.tools.loader.abstract import ProductTypeItem
|
||||||
from ayon_core.tools.loader.control import LoaderController
|
from ayon_core.tools.loader.control import LoaderController
|
||||||
|
|
@ -141,6 +147,8 @@ class LoaderWindow(QtWidgets.QWidget):
|
||||||
if controller is None:
|
if controller is None:
|
||||||
controller = LoaderController()
|
controller = LoaderController()
|
||||||
|
|
||||||
|
overlay_object = MessageOverlayObject(self)
|
||||||
|
|
||||||
main_splitter = QtWidgets.QSplitter(self)
|
main_splitter = QtWidgets.QSplitter(self)
|
||||||
|
|
||||||
context_splitter = QtWidgets.QSplitter(main_splitter)
|
context_splitter = QtWidgets.QSplitter(main_splitter)
|
||||||
|
|
@ -170,15 +178,14 @@ class LoaderWindow(QtWidgets.QWidget):
|
||||||
context_top_layout.addWidget(go_to_current_btn, 0)
|
context_top_layout.addWidget(go_to_current_btn, 0)
|
||||||
context_top_layout.addWidget(refresh_btn, 0)
|
context_top_layout.addWidget(refresh_btn, 0)
|
||||||
|
|
||||||
folders_filter_input = PlaceholderLineEdit(context_widget)
|
filters_widget = FoldersFiltersWidget(context_widget)
|
||||||
folders_filter_input.setPlaceholderText("Folder name filter...")
|
|
||||||
|
|
||||||
folders_widget = LoaderFoldersWidget(controller, context_widget)
|
folders_widget = LoaderFoldersWidget(controller, context_widget)
|
||||||
|
|
||||||
context_layout = QtWidgets.QVBoxLayout(context_widget)
|
context_layout = QtWidgets.QVBoxLayout(context_widget)
|
||||||
context_layout.setContentsMargins(0, 0, 0, 0)
|
context_layout.setContentsMargins(0, 0, 0, 0)
|
||||||
context_layout.addWidget(context_top_widget, 0)
|
context_layout.addWidget(context_top_widget, 0)
|
||||||
context_layout.addWidget(folders_filter_input, 0)
|
context_layout.addWidget(filters_widget, 0)
|
||||||
context_layout.addWidget(folders_widget, 1)
|
context_layout.addWidget(folders_widget, 1)
|
||||||
|
|
||||||
tasks_widget = LoaderTasksWidget(controller, context_widget)
|
tasks_widget = LoaderTasksWidget(controller, context_widget)
|
||||||
|
|
@ -247,9 +254,12 @@ class LoaderWindow(QtWidgets.QWidget):
|
||||||
projects_combobox.refreshed.connect(self._on_projects_refresh)
|
projects_combobox.refreshed.connect(self._on_projects_refresh)
|
||||||
folders_widget.refreshed.connect(self._on_folders_refresh)
|
folders_widget.refreshed.connect(self._on_folders_refresh)
|
||||||
products_widget.refreshed.connect(self._on_products_refresh)
|
products_widget.refreshed.connect(self._on_products_refresh)
|
||||||
folders_filter_input.textChanged.connect(
|
filters_widget.text_changed.connect(
|
||||||
self._on_folder_filter_change
|
self._on_folder_filter_change
|
||||||
)
|
)
|
||||||
|
filters_widget.my_tasks_changed.connect(
|
||||||
|
self._on_my_tasks_checkbox_state_changed
|
||||||
|
)
|
||||||
search_bar.filter_changed.connect(self._on_filter_change)
|
search_bar.filter_changed.connect(self._on_filter_change)
|
||||||
product_group_checkbox.stateChanged.connect(
|
product_group_checkbox.stateChanged.connect(
|
||||||
self._on_product_group_change
|
self._on_product_group_change
|
||||||
|
|
@ -294,6 +304,12 @@ class LoaderWindow(QtWidgets.QWidget):
|
||||||
"controller.reset.finished",
|
"controller.reset.finished",
|
||||||
self._on_controller_reset_finish,
|
self._on_controller_reset_finish,
|
||||||
)
|
)
|
||||||
|
controller.register_event_callback(
|
||||||
|
"loader.action.finished",
|
||||||
|
self._on_loader_action_finished,
|
||||||
|
)
|
||||||
|
|
||||||
|
self._overlay_object = overlay_object
|
||||||
|
|
||||||
self._group_dialog = ProductGroupDialog(controller, self)
|
self._group_dialog = ProductGroupDialog(controller, self)
|
||||||
|
|
||||||
|
|
@ -303,7 +319,7 @@ class LoaderWindow(QtWidgets.QWidget):
|
||||||
self._refresh_btn = refresh_btn
|
self._refresh_btn = refresh_btn
|
||||||
self._projects_combobox = projects_combobox
|
self._projects_combobox = projects_combobox
|
||||||
|
|
||||||
self._folders_filter_input = folders_filter_input
|
self._filters_widget = filters_widget
|
||||||
self._folders_widget = folders_widget
|
self._folders_widget = folders_widget
|
||||||
|
|
||||||
self._tasks_widget = tasks_widget
|
self._tasks_widget = tasks_widget
|
||||||
|
|
@ -406,6 +422,20 @@ class LoaderWindow(QtWidgets.QWidget):
|
||||||
if self._reset_on_show:
|
if self._reset_on_show:
|
||||||
self.refresh()
|
self.refresh()
|
||||||
|
|
||||||
|
def _show_toast_message(
|
||||||
|
self,
|
||||||
|
message: str,
|
||||||
|
success: bool = True,
|
||||||
|
message_id: Optional[str] = None,
|
||||||
|
):
|
||||||
|
message_type = None
|
||||||
|
if not success:
|
||||||
|
message_type = "error"
|
||||||
|
|
||||||
|
self._overlay_object.add_message(
|
||||||
|
message, message_type, message_id=message_id
|
||||||
|
)
|
||||||
|
|
||||||
def _show_group_dialog(self):
|
def _show_group_dialog(self):
|
||||||
project_name = self._projects_combobox.get_selected_project_name()
|
project_name = self._projects_combobox.get_selected_project_name()
|
||||||
if not project_name:
|
if not project_name:
|
||||||
|
|
@ -421,9 +451,21 @@ class LoaderWindow(QtWidgets.QWidget):
|
||||||
self._group_dialog.set_product_ids(project_name, product_ids)
|
self._group_dialog.set_product_ids(project_name, product_ids)
|
||||||
self._group_dialog.show()
|
self._group_dialog.show()
|
||||||
|
|
||||||
def _on_folder_filter_change(self, text):
|
def _on_folder_filter_change(self, text: str) -> None:
|
||||||
self._folders_widget.set_name_filter(text)
|
self._folders_widget.set_name_filter(text)
|
||||||
|
|
||||||
|
def _on_my_tasks_checkbox_state_changed(self, enabled: bool) -> None:
|
||||||
|
folder_ids = None
|
||||||
|
task_ids = None
|
||||||
|
if enabled:
|
||||||
|
entity_ids = self._controller.get_my_tasks_entity_ids(
|
||||||
|
self._selected_project_name
|
||||||
|
)
|
||||||
|
folder_ids = entity_ids["folder_ids"]
|
||||||
|
task_ids = entity_ids["task_ids"]
|
||||||
|
self._folders_widget.set_folder_ids_filter(folder_ids)
|
||||||
|
self._tasks_widget.set_task_ids_filter(task_ids)
|
||||||
|
|
||||||
def _on_product_group_change(self):
|
def _on_product_group_change(self):
|
||||||
self._products_widget.set_enable_grouping(
|
self._products_widget.set_enable_grouping(
|
||||||
self._product_group_checkbox.isChecked()
|
self._product_group_checkbox.isChecked()
|
||||||
|
|
@ -485,6 +527,10 @@ class LoaderWindow(QtWidgets.QWidget):
|
||||||
if not self._refresh_handler.project_refreshed:
|
if not self._refresh_handler.project_refreshed:
|
||||||
self._projects_combobox.refresh()
|
self._projects_combobox.refresh()
|
||||||
self._update_filters()
|
self._update_filters()
|
||||||
|
# Update my tasks
|
||||||
|
self._on_my_tasks_checkbox_state_changed(
|
||||||
|
self._filters_widget.is_my_tasks_checked()
|
||||||
|
)
|
||||||
|
|
||||||
def _on_load_finished(self, event):
|
def _on_load_finished(self, event):
|
||||||
error_info = event["error_info"]
|
error_info = event["error_info"]
|
||||||
|
|
@ -494,6 +540,77 @@ class LoaderWindow(QtWidgets.QWidget):
|
||||||
box = LoadErrorMessageBox(error_info, self)
|
box = LoadErrorMessageBox(error_info, self)
|
||||||
box.show()
|
box.show()
|
||||||
|
|
||||||
|
def _on_loader_action_finished(self, event):
|
||||||
|
crashed = event["crashed"]
|
||||||
|
if crashed:
|
||||||
|
self._show_toast_message(
|
||||||
|
"Action failed",
|
||||||
|
success=False,
|
||||||
|
)
|
||||||
|
return
|
||||||
|
|
||||||
|
result: Optional[LoaderActionResult] = event["result"]
|
||||||
|
if result is None:
|
||||||
|
return
|
||||||
|
|
||||||
|
if result.message:
|
||||||
|
self._show_toast_message(
|
||||||
|
result.message, result.success
|
||||||
|
)
|
||||||
|
|
||||||
|
if result.form is None:
|
||||||
|
return
|
||||||
|
|
||||||
|
form = result.form
|
||||||
|
dialog = AttributeDefinitionsDialog(
|
||||||
|
form.fields,
|
||||||
|
title=form.title,
|
||||||
|
parent=self,
|
||||||
|
)
|
||||||
|
if result.form_values:
|
||||||
|
dialog.set_values(result.form_values)
|
||||||
|
submit_label = form.submit_label
|
||||||
|
submit_icon = form.submit_icon
|
||||||
|
cancel_label = form.cancel_label
|
||||||
|
cancel_icon = form.cancel_icon
|
||||||
|
|
||||||
|
if submit_icon:
|
||||||
|
submit_icon = get_qt_icon(submit_icon)
|
||||||
|
if cancel_icon:
|
||||||
|
cancel_icon = get_qt_icon(cancel_icon)
|
||||||
|
|
||||||
|
if submit_label:
|
||||||
|
dialog.set_submit_label(submit_label)
|
||||||
|
else:
|
||||||
|
dialog.set_submit_visible(False)
|
||||||
|
|
||||||
|
if submit_icon:
|
||||||
|
dialog.set_submit_icon(submit_icon)
|
||||||
|
|
||||||
|
if cancel_label:
|
||||||
|
dialog.set_cancel_label(cancel_label)
|
||||||
|
else:
|
||||||
|
dialog.set_cancel_visible(False)
|
||||||
|
|
||||||
|
if cancel_icon:
|
||||||
|
dialog.set_cancel_icon(cancel_icon)
|
||||||
|
|
||||||
|
dialog.setMinimumSize(300, 140)
|
||||||
|
result = dialog.exec_()
|
||||||
|
if result != QtWidgets.QDialog.Accepted:
|
||||||
|
return
|
||||||
|
|
||||||
|
form_values = dialog.get_values()
|
||||||
|
self._controller.trigger_action_item(
|
||||||
|
identifier=event["identifier"],
|
||||||
|
project_name=event["project_name"],
|
||||||
|
selected_ids=event["selected_ids"],
|
||||||
|
selected_entity_type=event["selected_entity_type"],
|
||||||
|
options={},
|
||||||
|
data=event["data"],
|
||||||
|
form_values=form_values,
|
||||||
|
)
|
||||||
|
|
||||||
def _on_project_selection_changed(self, event):
|
def _on_project_selection_changed(self, event):
|
||||||
self._selected_project_name = event["project_name"]
|
self._selected_project_name = event["project_name"]
|
||||||
self._update_filters()
|
self._update_filters()
|
||||||
|
|
|
||||||
|
|
@ -295,6 +295,21 @@ class AbstractPublisherFrontend(AbstractPublisherCommon):
|
||||||
"""Get folder id from folder path."""
|
"""Get folder id from folder path."""
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
def get_my_tasks_entity_ids(
|
||||||
|
self, project_name: str
|
||||||
|
) -> dict[str, list[str]]:
|
||||||
|
"""Get entity ids for my tasks.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
project_name (str): Project name.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
dict[str, list[str]]: Folder and task ids.
|
||||||
|
|
||||||
|
"""
|
||||||
|
pass
|
||||||
|
|
||||||
# --- Create ---
|
# --- Create ---
|
||||||
@abstractmethod
|
@abstractmethod
|
||||||
def get_creator_items(self) -> Dict[str, "CreatorItem"]:
|
def get_creator_items(self) -> Dict[str, "CreatorItem"]:
|
||||||
|
|
|
||||||
|
|
@ -11,7 +11,11 @@ from ayon_core.pipeline import (
|
||||||
registered_host,
|
registered_host,
|
||||||
get_process_id,
|
get_process_id,
|
||||||
)
|
)
|
||||||
from ayon_core.tools.common_models import ProjectsModel, HierarchyModel
|
from ayon_core.tools.common_models import (
|
||||||
|
ProjectsModel,
|
||||||
|
HierarchyModel,
|
||||||
|
UsersModel,
|
||||||
|
)
|
||||||
|
|
||||||
from .models import (
|
from .models import (
|
||||||
PublishModel,
|
PublishModel,
|
||||||
|
|
@ -101,6 +105,7 @@ class PublisherController(
|
||||||
# Cacher of avalon documents
|
# Cacher of avalon documents
|
||||||
self._projects_model = ProjectsModel(self)
|
self._projects_model = ProjectsModel(self)
|
||||||
self._hierarchy_model = HierarchyModel(self)
|
self._hierarchy_model = HierarchyModel(self)
|
||||||
|
self._users_model = UsersModel(self)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def log(self):
|
def log(self):
|
||||||
|
|
@ -317,6 +322,17 @@ class PublisherController(
|
||||||
return False
|
return False
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
def get_my_tasks_entity_ids(
|
||||||
|
self, project_name: str
|
||||||
|
) -> dict[str, list[str]]:
|
||||||
|
username = self._users_model.get_current_username()
|
||||||
|
assignees = []
|
||||||
|
if username:
|
||||||
|
assignees.append(username)
|
||||||
|
return self._hierarchy_model.get_entity_ids_for_assignees(
|
||||||
|
project_name, assignees
|
||||||
|
)
|
||||||
|
|
||||||
# --- Publish specific callbacks ---
|
# --- Publish specific callbacks ---
|
||||||
def get_context_title(self):
|
def get_context_title(self):
|
||||||
"""Get context title for artist shown at the top of main window."""
|
"""Get context title for artist shown at the top of main window."""
|
||||||
|
|
@ -359,6 +375,7 @@ class PublisherController(
|
||||||
self._emit_event("controller.reset.started")
|
self._emit_event("controller.reset.started")
|
||||||
|
|
||||||
self._hierarchy_model.reset()
|
self._hierarchy_model.reset()
|
||||||
|
self._users_model.reset()
|
||||||
|
|
||||||
# Publish part must be reset after plugins
|
# Publish part must be reset after plugins
|
||||||
self._create_model.reset()
|
self._create_model.reset()
|
||||||
|
|
|
||||||
|
|
@ -1,5 +1,6 @@
|
||||||
import logging
|
import logging
|
||||||
import re
|
import re
|
||||||
|
import copy
|
||||||
from typing import (
|
from typing import (
|
||||||
Union,
|
Union,
|
||||||
List,
|
List,
|
||||||
|
|
@ -34,6 +35,7 @@ from ayon_core.pipeline.create import (
|
||||||
ConvertorsOperationFailed,
|
ConvertorsOperationFailed,
|
||||||
ConvertorItem,
|
ConvertorItem,
|
||||||
)
|
)
|
||||||
|
|
||||||
from ayon_core.tools.publisher.abstract import (
|
from ayon_core.tools.publisher.abstract import (
|
||||||
AbstractPublisherBackend,
|
AbstractPublisherBackend,
|
||||||
CardMessageTypes,
|
CardMessageTypes,
|
||||||
|
|
@ -1098,7 +1100,7 @@ class CreateModel:
|
||||||
creator_attributes[key] = attr_def.default
|
creator_attributes[key] = attr_def.default
|
||||||
|
|
||||||
elif attr_def.is_value_valid(value):
|
elif attr_def.is_value_valid(value):
|
||||||
creator_attributes[key] = value
|
creator_attributes[key] = copy.deepcopy(value)
|
||||||
|
|
||||||
def _set_instances_publish_attr_values(
|
def _set_instances_publish_attr_values(
|
||||||
self, instance_ids, plugin_name, key, value
|
self, instance_ids, plugin_name, key, value
|
||||||
|
|
|
||||||
|
|
@ -21,6 +21,7 @@ from ayon_core.pipeline.plugin_discover import DiscoverResult
|
||||||
from ayon_core.pipeline.publish import (
|
from ayon_core.pipeline.publish import (
|
||||||
get_publish_instance_label,
|
get_publish_instance_label,
|
||||||
PublishError,
|
PublishError,
|
||||||
|
filter_crashed_publish_paths,
|
||||||
)
|
)
|
||||||
from ayon_core.tools.publisher.abstract import AbstractPublisherBackend
|
from ayon_core.tools.publisher.abstract import AbstractPublisherBackend
|
||||||
|
|
||||||
|
|
@ -107,11 +108,14 @@ class PublishReportMaker:
|
||||||
creator_discover_result: Optional[DiscoverResult] = None,
|
creator_discover_result: Optional[DiscoverResult] = None,
|
||||||
convertor_discover_result: Optional[DiscoverResult] = None,
|
convertor_discover_result: Optional[DiscoverResult] = None,
|
||||||
publish_discover_result: Optional[DiscoverResult] = None,
|
publish_discover_result: Optional[DiscoverResult] = None,
|
||||||
|
blocking_crashed_paths: Optional[list[str]] = None,
|
||||||
):
|
):
|
||||||
self._create_discover_result: Union[DiscoverResult, None] = None
|
self._create_discover_result: Union[DiscoverResult, None] = None
|
||||||
self._convert_discover_result: Union[DiscoverResult, None] = None
|
self._convert_discover_result: Union[DiscoverResult, None] = None
|
||||||
self._publish_discover_result: Union[DiscoverResult, None] = None
|
self._publish_discover_result: Union[DiscoverResult, None] = None
|
||||||
|
|
||||||
|
self._blocking_crashed_paths: list[str] = []
|
||||||
|
|
||||||
self._all_instances_by_id: Dict[str, pyblish.api.Instance] = {}
|
self._all_instances_by_id: Dict[str, pyblish.api.Instance] = {}
|
||||||
self._plugin_data_by_id: Dict[str, Any] = {}
|
self._plugin_data_by_id: Dict[str, Any] = {}
|
||||||
self._current_plugin_id: Optional[str] = None
|
self._current_plugin_id: Optional[str] = None
|
||||||
|
|
@ -120,6 +124,7 @@ class PublishReportMaker:
|
||||||
creator_discover_result,
|
creator_discover_result,
|
||||||
convertor_discover_result,
|
convertor_discover_result,
|
||||||
publish_discover_result,
|
publish_discover_result,
|
||||||
|
blocking_crashed_paths,
|
||||||
)
|
)
|
||||||
|
|
||||||
def reset(
|
def reset(
|
||||||
|
|
@ -127,12 +132,14 @@ class PublishReportMaker:
|
||||||
creator_discover_result: Union[DiscoverResult, None],
|
creator_discover_result: Union[DiscoverResult, None],
|
||||||
convertor_discover_result: Union[DiscoverResult, None],
|
convertor_discover_result: Union[DiscoverResult, None],
|
||||||
publish_discover_result: Union[DiscoverResult, None],
|
publish_discover_result: Union[DiscoverResult, None],
|
||||||
|
blocking_crashed_paths: list[str],
|
||||||
):
|
):
|
||||||
"""Reset report and clear all data."""
|
"""Reset report and clear all data."""
|
||||||
|
|
||||||
self._create_discover_result = creator_discover_result
|
self._create_discover_result = creator_discover_result
|
||||||
self._convert_discover_result = convertor_discover_result
|
self._convert_discover_result = convertor_discover_result
|
||||||
self._publish_discover_result = publish_discover_result
|
self._publish_discover_result = publish_discover_result
|
||||||
|
self._blocking_crashed_paths = blocking_crashed_paths
|
||||||
|
|
||||||
self._all_instances_by_id = {}
|
self._all_instances_by_id = {}
|
||||||
self._plugin_data_by_id = {}
|
self._plugin_data_by_id = {}
|
||||||
|
|
@ -242,9 +249,10 @@ class PublishReportMaker:
|
||||||
"instances": instances_details,
|
"instances": instances_details,
|
||||||
"context": self._extract_context_data(publish_context),
|
"context": self._extract_context_data(publish_context),
|
||||||
"crashed_file_paths": crashed_file_paths,
|
"crashed_file_paths": crashed_file_paths,
|
||||||
|
"blocking_crashed_paths": list(self._blocking_crashed_paths),
|
||||||
"id": uuid.uuid4().hex,
|
"id": uuid.uuid4().hex,
|
||||||
"created_at": now.isoformat(),
|
"created_at": now.isoformat(),
|
||||||
"report_version": "1.1.0",
|
"report_version": "1.1.1",
|
||||||
}
|
}
|
||||||
|
|
||||||
def _add_plugin_data_item(self, plugin: pyblish.api.Plugin):
|
def _add_plugin_data_item(self, plugin: pyblish.api.Plugin):
|
||||||
|
|
@ -959,11 +967,16 @@ class PublishModel:
|
||||||
self._publish_plugins_proxy = PublishPluginsProxy(
|
self._publish_plugins_proxy = PublishPluginsProxy(
|
||||||
publish_plugins
|
publish_plugins
|
||||||
)
|
)
|
||||||
|
blocking_crashed_paths = filter_crashed_publish_paths(
|
||||||
|
create_context.get_current_project_name(),
|
||||||
|
set(create_context.publish_discover_result.crashed_file_paths),
|
||||||
|
project_settings=create_context.get_current_project_settings(),
|
||||||
|
)
|
||||||
self._publish_report.reset(
|
self._publish_report.reset(
|
||||||
create_context.creator_discover_result,
|
create_context.creator_discover_result,
|
||||||
create_context.convertor_discover_result,
|
create_context.convertor_discover_result,
|
||||||
create_context.publish_discover_result,
|
create_context.publish_discover_result,
|
||||||
|
blocking_crashed_paths,
|
||||||
)
|
)
|
||||||
for plugin in create_context.publish_plugins_mismatch_targets:
|
for plugin in create_context.publish_plugins_mismatch_targets:
|
||||||
self._publish_report.set_plugin_skipped(plugin.id)
|
self._publish_report.set_plugin_skipped(plugin.id)
|
||||||
|
|
|
||||||
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue