Compare commits
1939 Commits
temp-inter
...
geometry-n
Author | SHA1 | Date | |
---|---|---|---|
778a07a30b | |||
8ede468a5d | |||
df4e97214d | |||
a7a5a348b3 | |||
6c777e9bb7 | |||
abf3ce811f | |||
cee67f3be2 | |||
a14ee85ccd | |||
231b313c55 | |||
![]() |
3059853732 | ||
a9121640be | |||
14f94fd1ca | |||
1b53fde9fc | |||
71d7505487 | |||
a0cba9fb95 | |||
faa65f151d | |||
c17a266e29 | |||
![]() |
1a3cb90e4e | ||
12afb19959 | |||
![]() |
6abf63f463 | ||
501bca9f5b | |||
828c66f393 | |||
471d4b105a | |||
a77d203932 | |||
0cb25a51de | |||
6a673b6000 | |||
8fda1f363a | |||
4886ed2863 | |||
0aad8100ae | |||
![]() |
aa32121174 | ||
c971c851d3 | |||
a2ccd0e495 | |||
1029577a51 | |||
05c7d935e7 | |||
8fe0aecfde | |||
ede1ce6e9a | |||
8031326262 | |||
![]() |
2beff6197e | ||
3123f33380 | |||
![]() |
a092baa7f9 | ||
5e3d0840a3 | |||
c41b93bda5 | |||
eb43477851 | |||
239a74a130 | |||
6ac378e685 | |||
25fc77f46c | |||
de2c4ee587 | |||
f23b14091f | |||
ced94bc11c | |||
7ce0d9d791 | |||
36c0649d32 | |||
66548007a8 | |||
eea65cbd42 | |||
66dfef10a7 | |||
fd9fc809b7 | |||
39b2a7bb7e | |||
4ba06ad0a8 | |||
3fb47956c0 | |||
![]() |
fafd21b14c | ||
49e68f15f2 | |||
f76dfe8fb4 | |||
524d172742 | |||
![]() |
f013e3de81 | ||
e77a1dc6b0 | |||
1b44b47f69 | |||
1a91c57320 | |||
b1bf4c2a05 | |||
48e27ad122 | |||
91e2b1dcaf | |||
5f28a90b34 | |||
97712b018d | |||
![]() |
0e8d1c6bcf | ||
320f34af86 | |||
19b597c55d | |||
f2cce48698 | |||
0eccf57161 | |||
![]() |
574995478a | ||
![]() |
57668d84cf | ||
8d5b9478a2 | |||
64bb49fa4e | |||
![]() |
178086d581 | ||
785d87ee42 | |||
c830c5b16b | |||
e850c2b06d | |||
10e28bd270 | |||
8de2b6a020 | |||
952ded57de | |||
581a6da804 | |||
63da2c4082 | |||
![]() |
22bef356ae | ||
3cfd6439c6 | |||
16804297e6 | |||
d1e1d6c491 | |||
bbcc8330f7 | |||
e7082fbdb0 | |||
ab101d444d | |||
8b0fac4116 | |||
82ff5dd911 | |||
9aa0a3f533 | |||
75e41b1279 | |||
265c3a4724 | |||
![]() |
dc8a924efa | ||
![]() |
14d5a45750 | ||
807bb450a0 | |||
bfe6b55aa7 | |||
d3445496b1 | |||
cb0b017d8f | |||
0af08cea40 | |||
e99801390c | |||
5a1b1c0ed2 | |||
207df439e1 | |||
36fb03e2b9 | |||
8ffc3ee257 | |||
aeee7f390d | |||
cf28398471 | |||
1e063a0242 | |||
b65ec08bbb | |||
![]() |
26b2a35dd4 | ||
f53ca7e41c | |||
6754d7aef6 | |||
c4f71f3193 | |||
3e4c98429b | |||
f763929486 | |||
3ea4c6b9c9 | |||
f164188a6d | |||
3e29175af3 | |||
![]() |
83b6fcc70c | ||
76a3ff70d5 | |||
c3a400b73f | |||
48a45c43e4 | |||
9e1fdd1f14 | |||
6c5f8bf5aa | |||
9471715720 | |||
75c9788c27 | |||
d218ba8009 | |||
77e927b58f | |||
fe2f43a15c | |||
59eb71afa1 | |||
0e9c04a3ea | |||
cf8ea741f2 | |||
0733926a34 | |||
24aae7ed63 | |||
3021f9b48c | |||
468765d29e | |||
9aa88a6604 | |||
b35efa932e | |||
300fe84bf0 | |||
b848d5cdf5 | |||
0a0c2c0217 | |||
45b46e5de9 | |||
582c5530b6 | |||
ebe32e01e1 | |||
07a77816a1 | |||
bae1b64525 | |||
132522cba8 | |||
ceb612a79c | |||
62a2faa7ef | |||
101a493ab5 | |||
ff01070b5c | |||
ee857cc266 | |||
ad7455cd14 | |||
83883ae66a | |||
75ae328d62 | |||
76f0ef294a | |||
72d1ddfc9c | |||
4ed029fc02 | |||
![]() |
e7a800c52f | ||
24801e0a4a | |||
fc32567cda | |||
![]() |
9eafdb985d | ||
3c8d261557 | |||
![]() |
e82c5c6607 | ||
118803893e | |||
0793ced8ad | |||
e7b441a05c | |||
53743adc29 | |||
1dcf0f9cf1 | |||
feba1fe974 | |||
c4df8ac1a4 | |||
6025897cd6 | |||
802a59a58a | |||
1ab6d5c1dc | |||
a770faa811 | |||
15cfb375a3 | |||
e78e235cc5 | |||
3ec9681ac4 | |||
038345fa56 | |||
38131cc5e5 | |||
f61f4c89bb | |||
e26887598f | |||
42017b006e | |||
bf5b1fa726 | |||
09b89da349 | |||
d3c454d66c | |||
![]() |
98c4224ff0 | ||
![]() |
eccd03a826 | ||
7d618b4dde | |||
a566dc25aa | |||
9d9c879f4b | |||
0cc2a72f94 | |||
3af0e1f6fd | |||
34771cc9f5 | |||
72c34068cb | |||
f52e3aa210 | |||
8925d3b7bf | |||
6bd42baaeb | |||
26b098c04f | |||
bc4f99aa86 | |||
0c83ef567c | |||
89fd3afd1e | |||
87c1c8112f | |||
8edcb3af96 | |||
ae1dc8f5f9 | |||
00c7ea68a8 | |||
788d380460 | |||
6e01b52100 | |||
f3610a23d1 | |||
28dc07a153 | |||
9473c61b36 | |||
b40e05fcd1 | |||
637731dc61 | |||
55d48defc5 | |||
a26a059244 | |||
add6fa0924 | |||
35affaa971 | |||
3feb3ce32d | |||
7898089de3 | |||
![]() |
59f9a5e6ac | ||
60fee69682 | |||
![]() |
3df40cc343 | ||
![]() |
468d59e496 | ||
709e443152 | |||
ddc7e5f1b6 | |||
e509f9c3a0 | |||
e7e5fd96c4 | |||
2e8641e45e | |||
ae30f72c80 | |||
49b798ca7e | |||
c614eadb47 | |||
757ec00f83 | |||
de913516dd | |||
c27ef1e9e8 | |||
d5e626b243 | |||
8e8a6b80cf | |||
a63a0ee24b | |||
d35b14449e | |||
41e0a5b5ed | |||
15cdcb4e90 | |||
5cd1aaf080 | |||
4be166b6b5 | |||
4e65b1ef6c | |||
ebf7673f83 | |||
093074aefe | |||
2829caa9f8 | |||
c202d38659 | |||
37a5ff4a84 | |||
2acebcae24 | |||
3de3c3c23a | |||
c9e9a42215 | |||
f0ddbcb31d | |||
7cd91a06eb | |||
efe90944ee | |||
765406cb51 | |||
2761679180 | |||
5583d51773 | |||
192f0c9e17 | |||
271f34f77e | |||
3e125d12af | |||
6644e96f01 | |||
ae379714e4 | |||
96a4b54cfb | |||
538f452ea9 | |||
2ea47057d3 | |||
209aff0a35 | |||
3b6ee8cee7 | |||
52b94049f2 | |||
2373a2196e | |||
6a0fe79db5 | |||
10428ca472 | |||
d31abfef2c | |||
6a5e1bf9a1 | |||
488690c864 | |||
d374469f4c | |||
af42b35e53 | |||
71b4a1687e | |||
25c2875e0f | |||
8839b4c32a | |||
1b4d5c7a35 | |||
f9c9e000ca | |||
7a4fc9f59d | |||
22c4323b21 | |||
7a084c2eee | |||
b90b1af25c | |||
5098678308 | |||
dc679f6247 | |||
2e7e7a6fb6 | |||
d6b1d35bf8 | |||
![]() |
bfa3dc91b7 | ||
7b6c77aa84 | |||
7b954ba7a1 | |||
5e1702e7a5 | |||
![]() |
2a41ab5e6c | ||
a072e87e04 | |||
2ea565b0ec | |||
f709f12d93 | |||
45a47142fc | |||
5f3f5db95d | |||
8e69409eed | |||
280dac323c | |||
c4f9bfcf5e | |||
2289e26fa3 | |||
0f201049b4 | |||
49a363f0e0 | |||
77a28f34d5 | |||
62d9dabc7d | |||
d5e91ae883 | |||
53cf8e83b3 | |||
![]() |
d7f88982a8 | ||
4c716ced09 | |||
09082f6e85 | |||
c749c24682 | |||
![]() |
c04cceb40e | ||
b69ab42982 | |||
16099c00d0 | |||
8dd941cac0 | |||
249a7e2307 | |||
8225b610dd | |||
07faa3c5ac | |||
d0c5c67e94 | |||
0416aa767f | |||
7592a5097c | |||
ab70133db0 | |||
af8fb707da | |||
![]() |
4e5537d0ed | ||
80d0b68290 | |||
5d54f38949 | |||
c304807099 | |||
4c8a8950cf | |||
b6a35a8153 | |||
7489427e4d | |||
c3cb565250 | |||
8346417d09 | |||
![]() |
695e025c82 | ||
51019fbfce | |||
139c3f791e | |||
54fa5041e2 | |||
2b41b2f05b | |||
f4cb3ccd9c | |||
fb98f22ddd | |||
6b0869039a | |||
13672f8b32 | |||
cffbfe5568 | |||
ecbf838fee | |||
0153e99780 | |||
77834aff22 | |||
3fe0088cab | |||
8f94724f22 | |||
89831fae0c | |||
0b1050bf09 | |||
![]() |
eccdced972 | ||
5bbbc98471 | |||
![]() |
63a8b3b972 | ||
31e6f0dc7a | |||
f85ef3d442 | |||
cd38daeff4 | |||
0521272ab3 | |||
4e80573a76 | |||
c5b2381703 | |||
45d54ea67f | |||
6ac3a10619 | |||
1657fa039d | |||
f49f406f67 | |||
![]() |
4a24c6fe37 | ||
46a261e108 | |||
40de5742af | |||
586cf8b190 | |||
933eddc9a1 | |||
c26b46ddca | |||
257bfb65b2 | |||
a069fffcc4 | |||
9182c882fe | |||
1364f1e35c | |||
563ef943c7 | |||
e2c4a4c510 | |||
5780de2ae0 | |||
fc5be0b598 | |||
a070dd8bdd | |||
1af722b819 | |||
![]() |
b05ba2ef0e | ||
7af40ccf5f | |||
8f5a4a2453 | |||
834e87af7b | |||
ae8fa7062c | |||
3382b07ad6 | |||
432bfbf7a3 | |||
36584bbc2d | |||
4eeec6e9b5 | |||
ceff86aafe | |||
cf17f7e0cc | |||
3d9ecf1cf2 | |||
0c90aa097d | |||
bf75106ae9 | |||
c94877ae3d | |||
00c6cbb985 | |||
61afbf55f1 | |||
![]() |
f3ec0d8e58 | ||
b66c21f8b0 | |||
![]() |
29d6750134 | ||
de70bcbb36 | |||
![]() |
fd0370acc2 | ||
08241b313c | |||
![]() |
0ef794b553 | ||
![]() |
83e2f8c993 | ||
![]() |
ac1ed19eae | ||
673c254c7d | |||
cadda7aa5c | |||
b8115f0c5b | |||
dac81ad71b | |||
d27db03444 | |||
![]() |
2d0b9faaf6 | ||
![]() |
32124b940e | ||
bd0de99b52 | |||
2ecc33d84b | |||
6eb8340ef4 | |||
416e006a2a | |||
92b775d319 | |||
ab6a011b3b | |||
aad8b8405c | |||
2eca9c7ed4 | |||
![]() |
2c6c1b6cc0 | ||
![]() |
c1ba68dd04 | ||
24a77745a4 | |||
fb46c047bd | |||
ded4dc7761 | |||
dc3f46d96b | |||
bb971bead9 | |||
e785569c95 | |||
c9eaf04afb | |||
a87593e62a | |||
9009ac2c3d | |||
04313f1bb5 | |||
afe7387be8 | |||
b29a8a5dfe | |||
9075f63e8f | |||
76f7b22989 | |||
dccadc9e78 | |||
5b2d2b2319 | |||
2929cfe5de | |||
f0f7282d9d | |||
b73dc36859 | |||
9b89de2571 | |||
05f970847e | |||
2d146b61d8 | |||
5f5cf21a83 | |||
55f27617cf | |||
00e30f122b | |||
82466ca2e5 | |||
0c84939117 | |||
8221d64844 | |||
5f8969bb4b | |||
a1609340b4 | |||
a96b52e37f | |||
88c855174d | |||
5a693ce9e3 | |||
bc8ae58727 | |||
01e1944cd4 | |||
29b65f5345 | |||
9f5c0ffb5e | |||
016a2707f5 | |||
841b2cea7b | |||
addb1a5c9a | |||
b39d66adde | |||
0ff1c38fcc | |||
7eecf77f0b | |||
fd1fec5600 | |||
b7b5c23b80 | |||
519c12da41 | |||
4a7951fede | |||
4617172740 | |||
a112adf16a | |||
19d19970e4 | |||
8c3855dc6e | |||
![]() |
229c0580e2 | ||
96d487b3ae | |||
aaa8ee1307 | |||
927456491a | |||
02df67875a | |||
f578e31a8e | |||
4546f176eb | |||
1e532eb37a | |||
aa112dc77c | |||
39188f3c99 | |||
930ecef9b5 | |||
753806c731 | |||
a689b5932d | |||
dba675fb65 | |||
924596abad | |||
![]() |
c1fc180861 | ||
5a64c687dd | |||
4f3ec01101 | |||
17a67bf778 | |||
ca12d70af0 | |||
5737193d81 | |||
8de8ab38f6 | |||
0e4245bc28 | |||
be451354c9 | |||
918d9291d6 | |||
c57b0cae28 | |||
4f9ef65dac | |||
2d4ec90497 | |||
501d2443d0 | |||
df9597cfba | |||
![]() |
8884d2d61b | ||
c36d2a9a7a | |||
77ac1f39c4 | |||
![]() |
d3788207aa | ||
![]() |
21ebee2580 | ||
![]() |
86c6769e20 | ||
2d35eed34d | |||
929d9ec0c7 | |||
dd14ea1819 | |||
fba9cd019f | |||
ea43ae4194 | |||
516a060bb3 | |||
999f1f7504 | |||
2ff714269e | |||
63aa6dd845 | |||
ae636994cd | |||
037035921b | |||
e7fc15e2ef | |||
d1e0059eac | |||
5d5cf53081 | |||
8f6a9c5176 | |||
5130bb116a | |||
df7db41e1b | |||
f2c52aa0e1 | |||
855f7fee63 | |||
203d405299 | |||
66d48b272e | |||
ee0c3081b0 | |||
2262d6c45a | |||
28135c06bb | |||
24c321cfe9 | |||
91b31173e9 | |||
34b213d604 | |||
c109a39995 | |||
f8d8f28e2c | |||
a3a7e19f07 | |||
3471ff1a5c | |||
d6480167ad | |||
60a2038fba | |||
515d9f9a35 | |||
e1a719c043 | |||
2271b9b584 | |||
d0e6b59cd1 | |||
d2e473a2dd | |||
a0c45a2d54 | |||
790cb28766 | |||
dc0c81337d | |||
![]() |
643720f8ab | ||
c0fb8375f6 | |||
578ccdf75b | |||
a5ed075110 | |||
afb17552e1 | |||
37458798fa | |||
e8d75b957f | |||
6c97c7f767 | |||
6ce4d39e6b | |||
0afe4e81cb | |||
222c39fe70 | |||
8de878e202 | |||
413bd71aaf | |||
cf21ba37ef | |||
![]() |
9c6a382f95 | ||
ce25b5812b | |||
7d281a4f7d | |||
f7e2559fd6 | |||
2dbb492268 | |||
1b942ef90c | |||
dbd4ff4163 | |||
c4d56199cc | |||
![]() |
98cd0fed36 | ||
![]() |
1c8c91384c | ||
![]() |
ab31c24322 | ||
addb2034a7 | |||
c297769d22 | |||
bb2648ebf0 | |||
2ff490f9e3 | |||
b2cd225623 | |||
f53bd31690 | |||
e94ec79341 | |||
7beb57f3bd | |||
6f42e69b58 | |||
7efc87dcd2 | |||
1d8648b13a | |||
23c4854f45 | |||
7223a0348f | |||
abc6200331 | |||
46a222afd7 | |||
624bce6b4d | |||
f1e4903854 | |||
fae5a907d4 | |||
b5542c1ea4 | |||
c1fe582446 | |||
a287c8d3c1 | |||
3826fcf035 | |||
5759bbe9f9 | |||
eae7090464 | |||
cb548329ea | |||
9586472b9a | |||
23042a3fb1 | |||
f863ef8a34 | |||
74450265d0 | |||
20e68d848e | |||
![]() |
5ec6c8d267 | ||
5f9677fe0c | |||
3558bb8eae | |||
ae085e301c | |||
841df831e8 | |||
3d7021b4ec | |||
b129a0b397 | |||
5f27a5fff7 | |||
b801db1181 | |||
b5840f9b5b | |||
![]() |
beb6399ae5 | ||
![]() |
15868b1ff4 | ||
7b524d9b71 | |||
a13314a03f | |||
cbdddc5648 | |||
51568030e9 | |||
27da305a40 | |||
67b352f9c5 | |||
8cdb99d51c | |||
3a8347f823 | |||
6570159929 | |||
1f778dbefc | |||
4b9ff3cd42 | |||
2e99a74df9 | |||
879b89e967 | |||
0469f2392f | |||
55bffa82da | |||
de6c6501f0 | |||
de05e261ec | |||
f7fbb518c8 | |||
d6212f67a9 | |||
ef7fcaf8e6 | |||
6e4b9f5836 | |||
c9f12b21e2 | |||
35db01325f | |||
8f4d991594 | |||
a50a51eef0 | |||
276790cbfa | |||
7dd7849ddd | |||
7561e3dad0 | |||
354ecc2f1e | |||
157081069d | |||
befb9d99f8 | |||
b964f73e7d | |||
eed9ac5b6e | |||
e291432f5f | |||
635b4db162 | |||
![]() |
a6f275cad3 | ||
82c17082ba | |||
071799d4fc | |||
61b22d27c8 | |||
5cc8e7ab53 | |||
2c916c97ac | |||
0f427f4eb1 | |||
58e58310b7 | |||
a8f37763ca | |||
dbc054bb88 | |||
abb07a38b8 | |||
c88c331384 | |||
![]() |
187e217b5a | ||
fa1868ae97 | |||
c3b641613c | |||
0732a9f1b2 | |||
87833f8f95 | |||
b35ba22d84 | |||
c6e6a9046e | |||
ee8b284d11 | |||
![]() |
c317f111c1 | ||
2fcd3f0296 | |||
1917d0345e | |||
ebfad93039 | |||
3a48147b8a | |||
0b15353baa | |||
f3eecfe386 | |||
026de343e3 | |||
f8d219dfd4 | |||
6ff0d59967 | |||
feaf309de7 | |||
f4e3b1e573 | |||
e3bdb189a7 | |||
ab063db34d | |||
3f1111b2a8 | |||
ad9fd47d7b | |||
01234b430b | |||
956c539e59 | |||
d3a792431e | |||
53ba9f01e2 | |||
1725bfc3cb | |||
6a3bd04e42 | |||
8af2c87f7e | |||
cd39e3dec1 | |||
338be95874 | |||
9ac56bad4c | |||
67ee87a6e9 | |||
53e1442ac2 | |||
e1d6219731 | |||
dc3b7602ee | |||
d086570c7a | |||
4947aa29db | |||
d443dcc733 | |||
6f158f834d | |||
445d506ac9 | |||
![]() |
b11a463e4f | ||
b665ad8621 | |||
41af27c582 | |||
47473bee34 | |||
b9ccfb89ce | |||
ce64cfd6ed | |||
3cf39c09bf | |||
4246898ad3 | |||
2851602052 | |||
4968a0bdf9 | |||
![]() |
f383cad329 | ||
002d563bbb | |||
4d91808710 | |||
4044c29069 | |||
54d651c344 | |||
19df0e3cfd | |||
b2510b9e20 | |||
feb6fd632f | |||
a1c3e45100 | |||
b45cee1aaf | |||
5df6b4004c | |||
5f71b1edd5 | |||
80083ac773 | |||
0cd34967c0 | |||
6afafc46f6 | |||
2d75b39b64 | |||
eee3529eaf | |||
859b3ff346 | |||
bce482f476 | |||
d9b1592c88 | |||
3462b4c7ae | |||
2d60c496a2 | |||
513f566b40 | |||
b5e5fbcfc8 | |||
3da25dc625 | |||
f3c5a84bb9 | |||
033641aa74 | |||
f9aea19d98 | |||
d52b7dbe26 | |||
3e6f2c7a99 | |||
f7022fc73f | |||
8a048593ca | |||
b6030711a2 | |||
638c16f410 | |||
682a74e090 | |||
aee04d4960 | |||
4c19fe4707 | |||
4998ceebfc | |||
408726000a | |||
![]() |
e0f2f07d1e | ||
![]() |
6d73d98fb6 | ||
03a83b4eb5 | |||
![]() |
ea4309925f | ||
a9d5c8f97f | |||
847b66e81d | |||
b8cf8e0bc2 | |||
80bc819d50 | |||
adefdbc9df | |||
7c68147709 | |||
060d668ae6 | |||
6c1fdd52c1 | |||
3caafd24a9 | |||
c4958bc540 | |||
2e8d7fa7ee | |||
253c5d25f7 | |||
c290ac2ab1 | |||
af4167441b | |||
50a4b9d502 | |||
5f0d4fef91 | |||
68b06208d2 | |||
f3c88f8ba5 | |||
![]() |
e178a273fa | ||
94c4a9995e | |||
4b673ebb99 | |||
577d6d3f87 | |||
ed4222258e | |||
7c1bb239be | |||
56db09e2fd | |||
1388e9de8a | |||
a971409d5a | |||
ccd5f80550 | |||
8ff6322152 | |||
6db290641e | |||
286bd87445 | |||
eb85de027c | |||
78693d524c | |||
e7003bc965 | |||
c73be23e17 | |||
845f4cebad | |||
d03b26edbd | |||
a1cc7042a7 | |||
247abdbf41 | |||
b37093de7b | |||
45d59e0df5 | |||
0cebe554d1 | |||
af3d7123c9 | |||
3953b82030 | |||
88aa056d1a | |||
b66600b9f3 | |||
b406b6717f | |||
94084b2d3c | |||
00fc110d3f | |||
1c0a490d0e | |||
c3fa7b7e4f | |||
4b36c5b1a7 | |||
3385c04598 | |||
9fed00341e | |||
![]() |
ed4b2ba75a | ||
2209321f78 | |||
be8a201a16 | |||
0b0c7ca017 | |||
c8e331f450 | |||
4891da8ae2 | |||
1a5fa2b319 | |||
143a81ccce | |||
c29afa5156 | |||
732e8c723e | |||
![]() |
6bb980b0f4 | ||
8e84938dd0 | |||
a4f840e15b | |||
fcc844f8fb | |||
![]() |
b3f0dc2907 | ||
71997921c4 | |||
0c75a98561 | |||
![]() |
174ed69c1b | ||
7c8b9c7a9a | |||
62906cdbea | |||
7f570a7174 | |||
9cd2e80d5d | |||
462bd81399 | |||
819152527f | |||
89e2b441ed | |||
ae34808114 | |||
fdad77d73d | |||
0a361eb5ec | |||
2ba804d7b7 | |||
5dc0fd08a7 | |||
2053e1f533 | |||
4ae06b6123 | |||
013fc69ea8 | |||
3bf98d1cec | |||
9ce49af32e | |||
1f251b7a27 | |||
61fdc45034 | |||
bf7f918a0e | |||
fe0fa7cec6 | |||
![]() |
4a540b9b48 | ||
fcbb20286a | |||
d08e925ef1 | |||
bcff0ef9ca | |||
2e5671a959 | |||
aadd355028 | |||
3de6fe0b3e | |||
e9b4de43d8 | |||
90b0fb135f | |||
8a1860bd9a | |||
6bef255904 | |||
8083527f90 | |||
1d2eb461b5 | |||
![]() |
b84707df17 | ||
ada47c4772 | |||
748475b943 | |||
b21db5e698 | |||
![]() |
5add6f2ed9 | ||
c9dc55301c | |||
a19c9e9351 | |||
![]() |
2db09f67a4 | ||
03544ed54f | |||
54a03d4247 | |||
84adc23941 | |||
0f68e5c30a | |||
5181bc46b3 | |||
452590571c | |||
ab38223047 | |||
f731bce6cd | |||
4c3bb60d0f | |||
9ff4e0068f | |||
84e98ba182 | |||
8e58f93215 | |||
aab4794512 | |||
952d6663e0 | |||
7b0c8097a7 | |||
b313525c1b | |||
d75e45d10c | |||
bd87ba90e6 | |||
![]() |
7bc5246156 | ||
f6c5af3d47 | |||
c8fcea0c33 | |||
20ece8736f | |||
![]() |
b4adb85933 | ||
605ce623be | |||
![]() |
7b30a3e98d | ||
![]() |
7b76a160a4 | ||
0eb9351296 | |||
![]() |
2330cec2c6 | ||
fe22635bf6 | |||
c0367b19e2 | |||
4adbe31e2f | |||
1fb2eaf1c5 | |||
2f280d4b92 | |||
e9c8ae767a | |||
![]() |
7fc220517f | ||
![]() |
28617bb167 | ||
84f025c6fd | |||
![]() |
066f5a4469 | ||
![]() |
d07cc5e680 | ||
![]() |
162cf8e81d | ||
![]() |
fd5c94c48a | ||
![]() |
2724d08cf5 | ||
![]() |
461ba4438f | ||
d581c1b304 | |||
e8a4bddef4 | |||
fbd889ec28 | |||
509e0c5b76 | |||
7b62a54230 | |||
![]() |
53c98e45cf | ||
bcefce33f2 | |||
9df1e0cad5 | |||
aa0bd29546 | |||
1a72ee4cbe | |||
e4ef8cbf7e | |||
5fa6cdb77a | |||
5304c6ed7d | |||
b669fd376a | |||
4d4608363c | |||
7141eb75ef | |||
b282a065f1 | |||
![]() |
4f81b4b4ce | ||
a3226bdf3e | |||
![]() |
d96e9de9de | ||
93fd07e19c | |||
5f19646d7e | |||
675677ec67 | |||
![]() |
df2a19eac7 | ||
2856f3b583 | |||
965bd53e02 | |||
05f15645ec | |||
cb0cab48ef | |||
029fb002dd | |||
4443c4082e | |||
bda8887e0c | |||
5575aba025 | |||
25d8ce16b5 | |||
59553d47c0 | |||
84af1eaa92 | |||
059f19d821 | |||
0f156a2436 | |||
f42a501c61 | |||
d8b8b4d7e2 | |||
14f3b2cdad | |||
f546b0800b | |||
92ae3ff84c | |||
5954b351f0 | |||
33c4eefabb | |||
![]() |
6e999e08ab | ||
ec98bb318b | |||
3a7ab62eac | |||
1f55e12206 | |||
ea3895558d | |||
![]() |
e4c6da29b2 | ||
4ced8900f5 | |||
f087a225dc | |||
dd98f6b55c | |||
b18a214ecb | |||
d7c812f15b | |||
3ba16afa1e | |||
8c3f4f7edf | |||
307f8c8e76 | |||
8bd09b1d77 | |||
f29a738e23 | |||
c18ff180e3 | |||
22ee056c3a | |||
f5a2d93224 | |||
a31bd2609f | |||
![]() |
2e19649bb9 | ||
7124c66340 | |||
![]() |
259b9c73d0 | ||
08b0de45f3 | |||
![]() |
2246d456aa | ||
9553ba1373 | |||
a2ebbeb836 | |||
5b014911a5 | |||
23fd576cf8 | |||
43464c94f4 | |||
322a614497 | |||
340c535dbf | |||
088ea59b7e | |||
cac9828ae3 | |||
9e9d45ae16 | |||
89d0cc3a0c | |||
e54a4b355e | |||
933c2cffd6 | |||
ed1fc9d96b | |||
496045fc30 | |||
f651cc6c4e | |||
0efb627bbd | |||
1b07b7a068 | |||
0abce91940 | |||
2e46a8c864 | |||
ef5a362a5b | |||
a6715213c3 | |||
482465e18a | |||
b0ec1d2747 | |||
1ef33be2d4 | |||
d2aee304e8 | |||
6e56b42faa | |||
1c6e338d59 | |||
7313b243f2 | |||
1182c26978 | |||
8cbff7093d | |||
0fcc063fd9 | |||
1949643ee5 | |||
7bf9d2c580 | |||
4a9c5c60b7 | |||
0e285fa23c | |||
214a78a46f | |||
f87f8532c3 | |||
3da0b52c97 | |||
785a518ebe | |||
2bf56f7fbb | |||
93a865dde7 | |||
72d2355af5 | |||
dfac5a63bd | |||
c87327ddeb | |||
7ca5ba14b5 | |||
51bf1680bd | |||
91d3a54869 | |||
ee0000b8bb | |||
7f1d1b03ad | |||
abee9a85d4 | |||
![]() |
223016a408 | ||
6e6a1838ea | |||
1d3ffc93ec | |||
e517aaa136 | |||
8b8c3c34dd | |||
4f6cab176a | |||
98876d46ef | |||
7ef2b760dc | |||
c27b7df563 | |||
a496af8680 | |||
54ce344bc7 | |||
dbfde0fe70 | |||
8e43ef5f31 | |||
edaaa2afdd | |||
d2dc452333 | |||
2cd1bc3aa7 | |||
1a912462f4 | |||
bcf9c73cbc | |||
3c9c557580 | |||
c7fee64dea | |||
ca3891e83b | |||
a5114bfb85 | |||
022f8b552d | |||
14508ef100 | |||
bfaf09b5bc | |||
c2fa36999f | |||
![]() |
00073651d4 | ||
eb030204f1 | |||
![]() |
56005ef499 | ||
b6d6d8a1aa | |||
0037e08b06 | |||
b6b20c4d92 | |||
![]() |
d486ee2dbd | ||
9ba6b64efa | |||
e4ca6b93ad | |||
77d7cae266 | |||
f4e0a19d4f | |||
053082e9d8 | |||
![]() |
ddd4b2b785 | ||
e5a1cadb2f | |||
c18675b12c | |||
dba3fb9e09 | |||
3e695a27cd | |||
a1063fc6c2 | |||
b414322f26 | |||
899eefd1bb | |||
72607feb91 | |||
870bcf6e1a | |||
8b78510fc4 | |||
e0bc5c4087 | |||
1fd653dd82 | |||
d9aae38bc8 | |||
efad9bcdda | |||
b5d7fb813f | |||
bb16f96973 | |||
7197017ea9 | |||
ed6fd01ba9 | |||
826bed4349 | |||
797f6e1483 | |||
e011e4ce76 | |||
92f8a6ac21 | |||
7b8d812277 | |||
2ae4e860f6 | |||
2ef192a55b | |||
a51f8f94d5 | |||
17f72be3cb | |||
2a868d277e | |||
b60a72eaab | |||
2dcb6782e0 | |||
4d64de2853 | |||
5af7225816 | |||
925df8ef26 | |||
2ee575fc1f | |||
1f55786791 | |||
a9dfde7b49 | |||
0ea0ccc4ff | |||
81366b7d2c | |||
dc960a81d1 | |||
4f8edc8e7f | |||
b8ae30e9e3 | |||
ae28ceb9d8 | |||
05b685989b | |||
7654203cc8 | |||
2489f72d79 | |||
730a46e87d | |||
f944121700 | |||
21de669141 | |||
5b176b66da | |||
a55b73417f | |||
ea6d099082 | |||
34f99bc6be | |||
6cd64f8caa | |||
f92f5d1ac6 | |||
69e15eb1e4 | |||
facc62d0d5 | |||
46447594de | |||
507c19c0f7 | |||
3b3742c75f | |||
b5a883fef9 | |||
711ddea60e | |||
6b5bbd22d9 | |||
![]() |
464797078d | ||
3400ba329e | |||
17b09b509c | |||
5b6e0bad1b | |||
404b946ac0 | |||
6583fb67c6 | |||
![]() |
8a63466ca3 | ||
6899dcab53 | |||
c078540512 | |||
6f1af44695 | |||
633b70905a | |||
f0d93a71df | |||
98c6626729 | |||
4b57bbac22 | |||
bf8659930f | |||
f25316e97a | |||
84e16c4992 | |||
db15c9d1bd | |||
27910ccce6 | |||
9adfd278f7 | |||
930ad9257d | |||
13deb5088a | |||
b8d0f28f70 | |||
3a18e304be | |||
5e7fb77dc4 | |||
f8ce744c83 | |||
f71cf99616 | |||
c145cb7998 | |||
e7b8a3cb0a | |||
c59d2c739d | |||
f253e59221 | |||
8180d478e1 | |||
73967e2047 | |||
09fb81f66d | |||
25316ef9d7 | |||
875a8a6c79 | |||
![]() |
ee54a8ace7 | ||
261a10edb0 | |||
d647e730fb | |||
46a14bd6a3 | |||
b862916eaf | |||
![]() |
44d2479dc3 | ||
aebeb85fe0 | |||
d67c13ca76 | |||
d94ba979d8 | |||
e0a1c3da46 | |||
e9f2f17e85 | |||
83fe479c7f | |||
ce649c7344 | |||
2161840d07 | |||
2534609262 | |||
86eaddb3ca | |||
632bfee0a5 | |||
26fb7b9474 | |||
421c0b45e5 | |||
![]() |
5f749a03ca | ||
a1556fa05c | |||
e6a69f7653 | |||
7b5796dcaa | |||
f5d14e36e8 | |||
870aaf3fb6 | |||
6f86d50b92 | |||
30f7acfffa | |||
115547991a | |||
3c02e648f3 | |||
![]() |
ffe7a41540 | ||
![]() |
74c7e21f6c | ||
9225fe933a | |||
3311350670 | |||
5b8a41d387 | |||
653bbaa246 | |||
c369382977 | |||
c0ce7fce89 | |||
20e250dae3 | |||
![]() |
fc2b56e68c | ||
a4700549e0 | |||
c65f4b3d76 | |||
11e32332dd | |||
080623b8ac | |||
418888f1c9 | |||
bdac47f8d4 | |||
adafd7257d | |||
97ccd592ce | |||
![]() |
51bbdfbef3 | ||
63e50cf265 | |||
![]() |
5baf1dddd5 | ||
3ec57ce6c7 | |||
3bee77bb7c | |||
6b03621c01 | |||
4a1ba155d5 | |||
02395fead7 | |||
0a83f32d79 | |||
758c115103 | |||
311ffd967b | |||
530f2994e9 | |||
dfa3dcbab9 | |||
8590cb26a9 | |||
ba4228bcf7 | |||
3025c34825 | |||
24b2482ad9 | |||
a3edf4a381 | |||
e8ca635e43 | |||
ac833108db | |||
5621a8ed7f | |||
a12fd5c3ad | |||
5721c89ba8 | |||
![]() |
9200e73136 | ||
6ad4b8b764 | |||
f45270b4fb | |||
42fba7f0d2 | |||
ac2266fe57 | |||
2414a5787d | |||
7b5f4c8837 | |||
616b071c4f | |||
15df83e9c9 | |||
8f43e31a71 | |||
4ea7742973 | |||
bf7a67c5c6 | |||
06d48367fa | |||
46e1ad711c | |||
ffa2a1771e | |||
fd0bb24e4a | |||
41f2ea4045 | |||
1276d0024f | |||
f0342065b1 | |||
2ad3a1c318 | |||
223c6e1ead | |||
7d20cf92dd | |||
deb71cef38 | |||
a10e41b02e | |||
b132dd042e | |||
c97b6215a3 | |||
f3944cf503 | |||
![]() |
6fc9ec9257 | ||
e5b51cb511 | |||
2c607ec2f6 | |||
7438f0c6c0 | |||
c07c7957c6 | |||
f12e2ec088 | |||
de3d54eb9a | |||
df32b50344 | |||
6b00df1105 | |||
72d660443b | |||
51ca9833d9 | |||
daf39af70a | |||
653d39cec3 | |||
6cbe5dd1c3 | |||
ee849ca0f8 | |||
![]() |
8f9599d17e | ||
![]() |
87055dc71b | ||
e459a25e6c | |||
a6f3bb36df | |||
d5a5575685 | |||
3f27efd31e | |||
ba5b4d1bd6 | |||
06f86dd4d9 | |||
b67423f806 | |||
afec66c024 | |||
12a06292af | |||
2a09634d41 | |||
534fcab994 | |||
a72a580948 | |||
e6c0e6c2a9 | |||
1e6b028580 | |||
09e77f904d | |||
03c0fa1cdb | |||
b813007a0f | |||
ff35332610 | |||
ebde6e1852 | |||
c0bb7d9cb7 | |||
95690dd362 | |||
490dd279cc | |||
45b28c0f88 | |||
9f60188cd8 | |||
![]() |
cb8a6814fd | ||
00955cd31e | |||
![]() |
fd94e03344 | ||
b046bc536b | |||
8cd506639a | |||
54ae7baa4c | |||
c3c576c8c4 | |||
a20ef4207d | |||
1bdd5becc7 | |||
5fddb5ab62 | |||
0f7becece7 | |||
cccfcca33d | |||
067587e329 | |||
405534c756 | |||
768d4c6cfe | |||
0a7bd3b6d2 | |||
e3faef686d | |||
ad447705c0 | |||
8c7766dc03 | |||
![]() |
0606a37e1a | ||
9c53690b30 | |||
1a69d491e5 | |||
2265104f0b | |||
197fa644ee | |||
1318cb26fe | |||
![]() |
6c2c16b5f8 | ||
24004e74fa | |||
43153e2324 | |||
7e841c797f | |||
9dcbc195ad | |||
f7b22fc3d2 | |||
7adbe62a09 | |||
2175cbe2ce | |||
2521e17a58 | |||
d5c3bff6e7 | |||
![]() |
9c4ecef62f | ||
4fb052f08d | |||
24deeee09e | |||
adf2f146d8 | |||
305b08b521 | |||
5158d1b42d | |||
![]() |
865d1889da | ||
cf42586737 | |||
ff51c2e89a | |||
c154b078b5 | |||
aa6f0f3d1f | |||
12a91f7f5d | |||
2b640622ff | |||
b13953b1f2 | |||
6c6551c398 | |||
9bbee7dd92 | |||
![]() |
a43c7538b8 | ||
b18d5dac53 | |||
ba9cf91736 | |||
695fa3a4a1 | |||
6a0906c09a | |||
1400fdf558 | |||
ef04fd9571 | |||
677909cdc3 | |||
2e7d3ad7b5 | |||
9fef934530 | |||
991a1cdf5d | |||
cd16123761 | |||
ee5bfde9e6 | |||
47f4f3c932 | |||
4bb2a5b5cb | |||
92178c7e0e | |||
729c579030 | |||
933999db75 | |||
0745afeddb | |||
![]() |
a1954e3807 | ||
3e3ecc329c | |||
f41a753e75 | |||
f3a0267016 | |||
b51bd859fd | |||
b084b57fbf | |||
44e7192745 | |||
a5ac062c51 | |||
7655cc45eb | |||
acba8f617b | |||
47e88345a1 | |||
d1c9a99c07 | |||
0b7744f4da | |||
97cf2a9fb1 | |||
58893eaef8 | |||
3d604ba867 | |||
627f357127 | |||
192a3f1a05 | |||
0456223cde | |||
3e472d87a8 | |||
![]() |
330fecc9b7 | ||
857f39a3d7 | |||
23b642cbb4 | |||
ca74ebc96e | |||
3620dbbe97 | |||
c810672ea7 | |||
ee5ad46a0e | |||
41b912925c | |||
0fa4286ade | |||
38450fc916 | |||
![]() |
9e6c4be731 | ||
afb66b3c3a | |||
c5a898bcac | |||
d67223ca29 | |||
81e3d904db | |||
![]() |
a294670bac | ||
7adb415070 | |||
5e6f3b8564 | |||
02b80276b3 | |||
1a81d268a1 | |||
5239198f17 | |||
d373b43f07 | |||
f7a14c116c | |||
748b5f025d | |||
8dd43ac23e | |||
5400be9ffe | |||
f9567f6c63 | |||
a881b5272b | |||
3826c161ad | |||
019ab774d4 | |||
1b909c726b | |||
7aa7cc4ca1 | |||
a9da73ab95 | |||
2b660e825b | |||
b158477551 | |||
ea7b00c64f | |||
4402c3006b | |||
eaf3160f13 | |||
53376ec7fc | |||
f4eae747f9 | |||
1f9eb7e2c6 | |||
aa06be9148 | |||
97f1e4782a | |||
1c1fa15ce1 | |||
b236653f8a | |||
8a26fec6ea | |||
4dfbaa34c9 | |||
5e67dcf3d2 | |||
52e53d61f5 | |||
dfb963c70d | |||
cdc1ddf20b | |||
8057b98572 | |||
38fe8d8b38 | |||
73080ddf38 | |||
f96a0f384d | |||
ea11b4e10c | |||
c17433bb55 | |||
c9afea5bbf | |||
43046d82b7 | |||
542b8da831 | |||
1b0ecb2f89 | |||
![]() |
1a4e7b16b2 | ||
85be72c1cc | |||
912f2b1a29 | |||
e48cdf3d0d | |||
88e884bfc6 | |||
91a5dbbd17 | |||
a86e815dd8 | |||
f09606cc68 | |||
6cfa8531cc | |||
50eceb2a37 | |||
bb32ecadb5 | |||
![]() |
ba698f0812 | ||
7e552b356d | |||
e3a76feeef | |||
![]() |
e95f71bf15 | ||
![]() |
0447aedb96 | ||
45863059f7 | |||
67c40cc51c | |||
27c0d29138 | |||
6a771175b6 | |||
0e581f6196 | |||
49cb30bb0c | |||
9dabb342ba | |||
4da16a0707 | |||
038c6e229c | |||
cb12fb78ca | |||
3e6609a0dc | |||
a56cc26b48 | |||
610c0ecc3b | |||
c2ee7ab2e3 | |||
5d97e293c3 | |||
45d3303ae4 | |||
6e8685e1fd | |||
65997a058f | |||
f432b5ccf5 | |||
ccb22740c1 | |||
4707c86179 | |||
3458d09a74 | |||
baf7f78f9f | |||
e7e183aa2b | |||
3c978a73d1 | |||
782ccbde70 | |||
db59f0b943 | |||
7178e54122 | |||
d4a36c7ad5 | |||
7efe92f63b | |||
f6cb9433d4 | |||
8c80267352 | |||
![]() |
b11499939c | ||
![]() |
48fa029dd1 | ||
![]() |
3f4f109646 | ||
c3e13d5a2b | |||
079f415deb | |||
d19d79c5a6 | |||
265d97556a | |||
65f9550813 | |||
a4f0780acf | |||
![]() |
16bcf7dc8e | ||
bd5bab961e | |||
3c09beb3b1 | |||
2871fadcad | |||
a939317862 | |||
26b4ef6823 | |||
![]() |
9797b95f61 | ||
a156843112 | |||
c1c0b661c0 | |||
20c3db6604 | |||
55b9b1ff50 | |||
bd76184966 | |||
01718ad952 | |||
fce795415a | |||
![]() |
d70cfb90e0 | ||
01a614c699 | |||
c5d38a2be8 | |||
88aee2986f | |||
![]() |
2953732fc5 | ||
d889e9684a | |||
37570a7317 | |||
b90fec5d46 | |||
23a788b8bd | |||
64f1d5e7c1 | |||
d288eeb79a | |||
522868001c | |||
8b87fc1c77 | |||
2fb0eeb707 | |||
777ba780a6 | |||
d4bca00a47 | |||
6b33dafb64 | |||
250a5442cf | |||
df07188750 | |||
bddc987ba3 | |||
f1ecbf26b7 | |||
a5d2c19a5c | |||
06e62adfb8 | |||
fa472d46fc | |||
5368859a66 | |||
7bccdfd8d2 | |||
41c0c79e31 | |||
1a94ae9e47 | |||
50bf033d3f | |||
3ef01692c8 | |||
1892b131ed | |||
d4783e019e | |||
![]() |
c1f7f18a8e | ||
8dc95f61f3 | |||
79b7c46eec | |||
ead084b6e1 | |||
44db4e50b2 | |||
aaa07a3a8f | |||
bf23083852 | |||
3836d10faf | |||
21bed44001 | |||
2074729308 | |||
a43a455fdd | |||
47dd23694c | |||
2e750a42a1 | |||
7e823969b5 | |||
93b7c9595b | |||
7f34363633 | |||
0d21228dce | |||
193425ce1d | |||
9a6fd69993 | |||
cb385a117b | |||
![]() |
cbeeca8167 | ||
![]() |
65244ac1c3 | ||
2770d43da6 | |||
0e3475a0ec | |||
96bcd80c5a | |||
![]() |
e637149166 | ||
0830458bcc | |||
6796ff5880 | |||
15b180b240 | |||
![]() |
93933ee8bb | ||
![]() |
fc5bf09fd8 | ||
a02be602f9 | |||
a63d6a97f0 | |||
1151adf62d | |||
091d7336e6 | |||
4226f19a03 | |||
5bae9d4e38 | |||
cc7691797c | |||
5950208f04 | |||
9801735b67 | |||
7d187e91bb | |||
5aee4ba143 | |||
4333991b7b | |||
a5761bbde2 | |||
6b1034d520 | |||
5613c61275 | |||
e19ee2e212 | |||
![]() |
ce62d65094 | ||
3a5f3fb7e4 | |||
8815e3e330 | |||
e76b43efcf | |||
0467979425 | |||
c3de3c2749 | |||
1ef5604e8c | |||
![]() |
1b0890ccda | ||
cacfa75e67 | |||
6cd4bb9435 | |||
bc868f7b52 | |||
caf92a647f | |||
![]() |
273db9294a | ||
0aa05c7fbb | |||
78316ac50e | |||
f966f6ed55 | |||
eba9404cc7 | |||
0061150e4c | |||
f2370bb22d | |||
3b7df2e157 | |||
1d6425cb64 | |||
c63d40c165 | |||
2f61602497 | |||
d800470ac2 | |||
f694321db0 | |||
7029cc2f8a | |||
518c5ce4cd | |||
b7afb8ea70 | |||
3c7b80ae2c | |||
1325e95ad5 | |||
9569a522f2 | |||
34439f05ab | |||
39044e68c2 | |||
e1e23595f0 | |||
9708215611 | |||
124572a6e5 | |||
a9eea6d9dc | |||
57b473a10a | |||
5ec6bcd474 | |||
e46b9de6ab | |||
3185084efb | |||
960535ddf3 | |||
c2b6dc7e53 | |||
9c509a7219 | |||
8f04ddbbc6 | |||
8d6264ea12 | |||
5bfd5e77b7 | |||
36ffa5b915 | |||
9cdf11676e | |||
![]() |
7d90b0ab11 | ||
8e3a73bf81 | |||
8241678e6e | |||
bcfdbbf4b4 | |||
94838ba661 | |||
a932b9eab7 | |||
b78a1c05a3 | |||
d0c63a2abe | |||
c58bf31aed | |||
e7312effb1 | |||
a54253f335 | |||
aa8e058a59 | |||
2bd85d9cc6 | |||
23acca0c32 | |||
6b46e9e524 | |||
700fe73560 | |||
232b10f663 | |||
cfc644186a | |||
5d7de5745b | |||
eaffa83fec | |||
f81dca971d | |||
3ee49c8711 | |||
8cd0ac8223 | |||
5052ab7558 | |||
ac2941fb15 | |||
a7b72e624c | |||
c8cbaeb329 | |||
0f769edafe | |||
fd3e44492e | |||
![]() |
ba06bc16ae | ||
026a9cdc21 | |||
5a3307230e | |||
ff1b35af39 | |||
eb9762a159 | |||
4243ee7d19 | |||
78b2b5fc0e | |||
7b9c865afc | |||
72cefdfd59 | |||
51b8032c70 | |||
![]() |
ee0ec0f2af | ||
20a878158f | |||
b0271e4f05 | |||
529de3d6f8 | |||
7cade013c1 | |||
568d55d4bc | |||
98a1540627 | |||
ec30cf0b74 | |||
3e77f747c0 | |||
47d76e0903 | |||
b24b197cec | |||
42e350b9a5 | |||
a1069b6c66 | |||
89858e1c5d | |||
59df16e2e4 | |||
e83a8e9083 | |||
11b50b2b77 | |||
f74b7e6ce9 | |||
c17792a709 | |||
0499dbc5c1 | |||
0433bc7e4e | |||
2d3379e243 | |||
efc6f4675d | |||
832f7170dc | |||
![]() |
6c8c30d865 | ||
ebd912ca8f | |||
ccb5caef08 | |||
c8293d6258 | |||
a1a9f8e6c3 | |||
95889fe071 | |||
a7b7f67fc4 | |||
5f7f90d5a2 | |||
0e5f58b68e | |||
cbb057e378 | |||
632c3304bc | |||
d9d7169837 | |||
1f184066ba | |||
fff80afe39 | |||
693a215dc4 | |||
b616c522d9 | |||
8e0995d006 | |||
f7a14b2ac0 | |||
515afeb11b | |||
20b6f7abb2 | |||
736a7dcca9 | |||
89bc6b5508 | |||
9576612d45 | |||
08f4bab658 | |||
20aed341ee | |||
909506200a | |||
d08cc63e2f | |||
1f5710326f | |||
d18be66a54 | |||
7ab7ae80c5 | |||
96abe5ebbc | |||
441160930b | |||
d28c46183d | |||
5ea113a555 | |||
b3ba41ecd1 | |||
87ee8ddfe3 | |||
f8f6e0b256 | |||
![]() |
7904899d02 | ||
d2311de218 | |||
46d56bd956 | |||
191664acd2 | |||
ee51e73355 | |||
57b94cf34b | |||
d0b3f9c81b | |||
756c9b2219 | |||
4599cea15d | |||
e6bf272abd | |||
b874c152a8 | |||
874c70d088 | |||
7149ccee57 | |||
472765d44d | |||
8ce11c8b66 | |||
![]() |
042df5fd6a | ||
8216b759e9 | |||
c9d81678d7 | |||
55bf704427 | |||
73a76608c8 | |||
29e5504331 | |||
888a697e24 | |||
12d8720b9b | |||
0ac6852a42 | |||
2f615ad3a9 | |||
9f1e20e74f | |||
e2c671e34c | |||
3e4863376e | |||
efe9928545 | |||
04905c5652 | |||
2e9dc2b999 | |||
04b90ee18a | |||
2b78d3d7f3 | |||
1b4f0bf32a | |||
1d7ee50fef | |||
2b46606af1 | |||
6899dbef77 | |||
a53d34a28a | |||
ee8593fe76 | |||
da9b1b9636 | |||
8884385afb | |||
3bc44233c0 | |||
5a964664d6 | |||
5ffab01930 | |||
28bf1d4037 | |||
d8fdb06a81 | |||
52e977d518 | |||
6b6ae92436 | |||
![]() |
018cca94b8 | ||
ecc7a83798 | |||
06af6a9efa | |||
7c5e009655 | |||
![]() |
6fff2427d6 | ||
![]() |
6111ec8bd3 | ||
503dc97708 | |||
b7f3ebe014 | |||
37e16e3589 | |||
d540a858d8 | |||
f11a3c36e3 | |||
5e53504b3b | |||
e61020049a | |||
a8e7c8d59b | |||
436529d4f6 | |||
aadfa31cf0 | |||
75536b5a61 | |||
405c3b59e4 | |||
99eca899c0 | |||
313a1c072d | |||
e7274f9f2d | |||
2bd9a28ff8 | |||
a711aa5b3c | |||
a35d33d520 | |||
c3a980436d | |||
2510bd3a5f | |||
![]() |
04b6296e81 | ||
c96506d54a | |||
![]() |
a58a8296d9 | ||
8f27ea40d6 | |||
0d07cfd940 | |||
![]() |
95d2d0d35a | ||
6cf28e98fb | |||
3182844914 | |||
a79b09e762 | |||
f4d5a69cf8 | |||
7f208be929 | |||
7041568ff9 | |||
2b723abea0 | |||
ea17a92cd7 | |||
67cd6beea0 | |||
4d7d1b5dae | |||
c51720a6ad | |||
af3eda8185 | |||
0dde73b4fc | |||
b316aaa0d2 | |||
5806d7ef54 | |||
d5ae0290e8 | |||
4a82df366f | |||
d286adc26e | |||
8a1d3bf2b1 | |||
a636909aa7 | |||
452f32dcb3 | |||
5a40c79520 | |||
a9a048c726 | |||
0959618c2e | |||
1597eb82d6 | |||
093ab05bcd | |||
d1cdbbc5fd | |||
3be50c849a | |||
d6b26b3fa0 | |||
![]() |
47aca2b4c4 | ||
0eef9df355 | |||
bf8962e442 | |||
2ed847317e | |||
ddaeaa4b98 | |||
caf1643f4a | |||
45a14a20de | |||
c195c061d5 | |||
e5c2e5e9e5 | |||
8dd5fdca3b | |||
39226cd437 | |||
b4f0d52473 | |||
f0eda57392 | |||
![]() |
41820e8a8e | ||
c9d1143b33 | |||
48b8b17da0 | |||
9f1a409f3a | |||
![]() |
9291128bff | ||
bcf524328f | |||
3e3363a6ed | |||
315582f28c | |||
97923d9b98 | |||
![]() |
7134b9daca | ||
738a890025 | |||
5946352ae2 | |||
dcf2c6e225 | |||
9bdb2f5e0b | |||
734c8f9a77 | |||
![]() |
87ba01dba9 | ||
ffa70e7690 | |||
cd05a05bca | |||
![]() |
8b9b87aee8 | ||
db021ee2ea | |||
a5fdff26b1 | |||
1ace224c63 | |||
7627e0980d | |||
933de8201e | |||
![]() |
6a2bc40e01 | ||
d8cdc80263 | |||
4225a18b35 | |||
41945454f7 | |||
88400f0c03 | |||
4e10b196ac | |||
![]() |
8c73f44a01 | ||
![]() |
f903e3a3fd | ||
868c8e8617 | |||
73af884df0 | |||
b10649f27b | |||
816fc5a308 | |||
737a1a85b4 | |||
7a475a89eb | |||
d83f5f0f01 | |||
4338595a97 | |||
4a2c92ea9c | |||
27370aef8f | |||
d31d5523d5 | |||
14f59a0349 | |||
48bbeaf383 | |||
a71d8ecb69 |
@@ -255,6 +255,7 @@ ForEachMacros:
|
|||||||
- SCULPT_VERTEX_DUPLICATES_AND_NEIGHBORS_ITER_BEGIN
|
- SCULPT_VERTEX_DUPLICATES_AND_NEIGHBORS_ITER_BEGIN
|
||||||
- SCULPT_VERTEX_NEIGHBORS_ITER_BEGIN
|
- SCULPT_VERTEX_NEIGHBORS_ITER_BEGIN
|
||||||
- SEQ_ALL_BEGIN
|
- SEQ_ALL_BEGIN
|
||||||
|
- SEQ_ITERATOR_FOREACH
|
||||||
- SURFACE_QUAD_ITER_BEGIN
|
- SURFACE_QUAD_ITER_BEGIN
|
||||||
- foreach
|
- foreach
|
||||||
- ED_screen_areas_iter
|
- ED_screen_areas_iter
|
||||||
@@ -264,4 +265,5 @@ ForEachMacros:
|
|||||||
- VECTOR_SET_SLOT_PROBING_BEGIN
|
- VECTOR_SET_SLOT_PROBING_BEGIN
|
||||||
|
|
||||||
StatementMacros:
|
StatementMacros:
|
||||||
|
- PyObject_HEAD
|
||||||
- PyObject_VAR_HEAD
|
- PyObject_VAR_HEAD
|
||||||
|
@@ -349,7 +349,7 @@ mark_as_advanced(WITH_SYSTEM_GLOG)
|
|||||||
option(WITH_FREESTYLE "Enable Freestyle (advanced edges rendering)" ON)
|
option(WITH_FREESTYLE "Enable Freestyle (advanced edges rendering)" ON)
|
||||||
|
|
||||||
# Misc
|
# Misc
|
||||||
if(WIN32)
|
if(WIN32 OR APPLE)
|
||||||
option(WITH_INPUT_IME "Enable Input Method Editor (IME) for complex Asian character input" ON)
|
option(WITH_INPUT_IME "Enable Input Method Editor (IME) for complex Asian character input" ON)
|
||||||
endif()
|
endif()
|
||||||
option(WITH_INPUT_NDOF "Enable NDOF input devices (SpaceNavigator and friends)" ON)
|
option(WITH_INPUT_NDOF "Enable NDOF input devices (SpaceNavigator and friends)" ON)
|
||||||
@@ -836,7 +836,7 @@ if(WITH_PYTHON)
|
|||||||
# because UNIX will search for the old Python paths which may not exist.
|
# because UNIX will search for the old Python paths which may not exist.
|
||||||
# giving errors about missing paths before this case is met.
|
# giving errors about missing paths before this case is met.
|
||||||
if(DEFINED PYTHON_VERSION AND "${PYTHON_VERSION}" VERSION_LESS "3.9")
|
if(DEFINED PYTHON_VERSION AND "${PYTHON_VERSION}" VERSION_LESS "3.9")
|
||||||
message(FATAL_ERROR "At least Python 3.9 is required to build")
|
message(FATAL_ERROR "At least Python 3.9 is required to build, but found Python ${PYTHON_VERSION}")
|
||||||
endif()
|
endif()
|
||||||
|
|
||||||
file(GLOB RESULT "${CMAKE_SOURCE_DIR}/release/scripts/addons")
|
file(GLOB RESULT "${CMAKE_SOURCE_DIR}/release/scripts/addons")
|
||||||
@@ -1705,22 +1705,18 @@ if(WITH_PYTHON)
|
|||||||
endif()
|
endif()
|
||||||
endif()
|
endif()
|
||||||
|
|
||||||
if(MSVC)
|
# Select C++17 as the standard for C++ projects.
|
||||||
string(APPEND CMAKE_CXX_FLAGS " /std:c++17")
|
set(CMAKE_CXX_STANDARD 17)
|
||||||
# Make MSVC properly report the value of the __cplusplus preprocessor macro
|
# If C++17 is not available, downgrading to an earlier standard is NOT OK.
|
||||||
# Available MSVC 15.7 (1914) and up, without this it reports 199711L regardless
|
set(CMAKE_CXX_STANDARD_REQUIRED ON)
|
||||||
# of the C++ standard chosen above
|
# Do not enable compiler specific language extentions.
|
||||||
if(MSVC_VERSION GREATER 1913)
|
set(CMAKE_CXX_EXTENSIONS OFF)
|
||||||
string(APPEND CMAKE_CXX_FLAGS " /Zc:__cplusplus")
|
|
||||||
endif()
|
# Make MSVC properly report the value of the __cplusplus preprocessor macro
|
||||||
elseif(
|
# Available MSVC 15.7 (1914) and up, without this it reports 199711L regardless
|
||||||
CMAKE_COMPILER_IS_GNUCC OR
|
# of the C++ standard chosen above.
|
||||||
CMAKE_C_COMPILER_ID MATCHES "Clang" OR
|
if(MSVC AND MSVC_VERSION GREATER 1913)
|
||||||
CMAKE_C_COMPILER_ID MATCHES "Intel"
|
string(APPEND CMAKE_CXX_FLAGS " /Zc:__cplusplus")
|
||||||
)
|
|
||||||
string(APPEND CMAKE_CXX_FLAGS " -std=c++17")
|
|
||||||
else()
|
|
||||||
message(FATAL_ERROR "Unknown compiler ${CMAKE_C_COMPILER_ID}, can't enable C++17 build")
|
|
||||||
endif()
|
endif()
|
||||||
|
|
||||||
# Visual Studio has all standards it supports available by default
|
# Visual Studio has all standards it supports available by default
|
||||||
@@ -1915,6 +1911,7 @@ if(FIRST_RUN)
|
|||||||
info_cfg_option(WITH_IK_ITASC)
|
info_cfg_option(WITH_IK_ITASC)
|
||||||
info_cfg_option(WITH_IK_SOLVER)
|
info_cfg_option(WITH_IK_SOLVER)
|
||||||
info_cfg_option(WITH_INPUT_NDOF)
|
info_cfg_option(WITH_INPUT_NDOF)
|
||||||
|
info_cfg_option(WITH_INPUT_IME)
|
||||||
info_cfg_option(WITH_INTERNATIONAL)
|
info_cfg_option(WITH_INTERNATIONAL)
|
||||||
info_cfg_option(WITH_OPENCOLLADA)
|
info_cfg_option(WITH_OPENCOLLADA)
|
||||||
info_cfg_option(WITH_OPENCOLORIO)
|
info_cfg_option(WITH_OPENCOLORIO)
|
||||||
|
@@ -56,6 +56,7 @@ else()
|
|||||||
endif()
|
endif()
|
||||||
|
|
||||||
include(cmake/zlib.cmake)
|
include(cmake/zlib.cmake)
|
||||||
|
include(cmake/zstd.cmake)
|
||||||
include(cmake/openal.cmake)
|
include(cmake/openal.cmake)
|
||||||
include(cmake/png.cmake)
|
include(cmake/png.cmake)
|
||||||
include(cmake/jpeg.cmake)
|
include(cmake/jpeg.cmake)
|
||||||
@@ -164,6 +165,7 @@ endif()
|
|||||||
if(UNIX AND NOT APPLE)
|
if(UNIX AND NOT APPLE)
|
||||||
include(cmake/libglu.cmake)
|
include(cmake/libglu.cmake)
|
||||||
include(cmake/mesa.cmake)
|
include(cmake/mesa.cmake)
|
||||||
|
include(cmake/wayland_protocols.cmake)
|
||||||
endif()
|
endif()
|
||||||
|
|
||||||
include(cmake/harvest.cmake)
|
include(cmake/harvest.cmake)
|
||||||
|
@@ -29,7 +29,7 @@ set(BLOSC_EXTRA_ARGS
|
|||||||
-DCMAKE_POSITION_INDEPENDENT_CODE=ON
|
-DCMAKE_POSITION_INDEPENDENT_CODE=ON
|
||||||
)
|
)
|
||||||
|
|
||||||
# Prevent blosc from including it's own local copy of zlib in the object file
|
# Prevent blosc from including its own local copy of zlib in the object file
|
||||||
# and cause linker errors with everybody else.
|
# and cause linker errors with everybody else.
|
||||||
set(BLOSC_EXTRA_ARGS ${BLOSC_EXTRA_ARGS}
|
set(BLOSC_EXTRA_ARGS ${BLOSC_EXTRA_ARGS}
|
||||||
-DPREFER_EXTERNAL_ZLIB=ON
|
-DPREFER_EXTERNAL_ZLIB=ON
|
||||||
|
@@ -87,7 +87,9 @@ download_source(LIBGLU)
|
|||||||
download_source(MESA)
|
download_source(MESA)
|
||||||
download_source(NASM)
|
download_source(NASM)
|
||||||
download_source(XR_OPENXR_SDK)
|
download_source(XR_OPENXR_SDK)
|
||||||
|
download_source(WL_PROTOCOLS)
|
||||||
download_source(ISPC)
|
download_source(ISPC)
|
||||||
download_source(GMP)
|
download_source(GMP)
|
||||||
download_source(POTRACE)
|
download_source(POTRACE)
|
||||||
download_source(HARU)
|
download_source(HARU)
|
||||||
|
download_source(ZSTD)
|
||||||
|
@@ -43,6 +43,12 @@ endif()
|
|||||||
|
|
||||||
if(WIN32)
|
if(WIN32)
|
||||||
set(EMBREE_BUILD_DIR ${BUILD_MODE}/)
|
set(EMBREE_BUILD_DIR ${BUILD_MODE}/)
|
||||||
|
if(BUILD_MODE STREQUAL Debug)
|
||||||
|
list(APPEND EMBREE_EXTRA_ARGS
|
||||||
|
-DEMBREE_TBBMALLOC_LIBRARY_NAME=tbbmalloc_debug
|
||||||
|
-DEMBREE_TBB_LIBRARY_NAME=tbb_debug
|
||||||
|
)
|
||||||
|
endif()
|
||||||
else()
|
else()
|
||||||
set(EMBREE_BUILD_DIR)
|
set(EMBREE_BUILD_DIR)
|
||||||
endif()
|
endif()
|
||||||
|
@@ -126,6 +126,8 @@ if(UNIX AND NOT APPLE)
|
|||||||
|
|
||||||
harvest(xml2/include xml2/include "*.h")
|
harvest(xml2/include xml2/include "*.h")
|
||||||
harvest(xml2/lib xml2/lib "*.a")
|
harvest(xml2/lib xml2/lib "*.a")
|
||||||
|
|
||||||
|
harvest(wayland-protocols/share/wayland-protocols wayland-protocols/share/wayland-protocols/ "*.xml")
|
||||||
else()
|
else()
|
||||||
harvest(blosc/lib openvdb/lib "*.a")
|
harvest(blosc/lib openvdb/lib "*.a")
|
||||||
harvest(xml2/lib opencollada/lib "*.a")
|
harvest(xml2/lib opencollada/lib "*.a")
|
||||||
@@ -190,6 +192,8 @@ harvest(potrace/include potrace/include "*.h")
|
|||||||
harvest(potrace/lib potrace/lib "*.a")
|
harvest(potrace/lib potrace/lib "*.a")
|
||||||
harvest(haru/include haru/include "*.h")
|
harvest(haru/include haru/include "*.h")
|
||||||
harvest(haru/lib haru/lib "*.a")
|
harvest(haru/lib haru/lib "*.a")
|
||||||
|
harvest(zstd/include zstd/include "*.h")
|
||||||
|
harvest(zstd/lib zstd/lib "*.a")
|
||||||
|
|
||||||
if(UNIX AND NOT APPLE)
|
if(UNIX AND NOT APPLE)
|
||||||
harvest(libglu/lib mesa/lib "*.so*")
|
harvest(libglu/lib mesa/lib "*.so*")
|
||||||
|
@@ -22,6 +22,7 @@ if(WIN32)
|
|||||||
-DTBB_BUILD_TBBMALLOC_PROXY=On
|
-DTBB_BUILD_TBBMALLOC_PROXY=On
|
||||||
-DTBB_BUILD_STATIC=Off
|
-DTBB_BUILD_STATIC=Off
|
||||||
-DTBB_BUILD_TESTS=Off
|
-DTBB_BUILD_TESTS=Off
|
||||||
|
-DCMAKE_DEBUG_POSTFIX=_debug
|
||||||
)
|
)
|
||||||
set(TBB_LIBRARY tbb)
|
set(TBB_LIBRARY tbb)
|
||||||
set(TBB_STATIC_LIBRARY Off)
|
set(TBB_STATIC_LIBRARY Off)
|
||||||
@@ -55,17 +56,17 @@ if(WIN32)
|
|||||||
ExternalProject_Add_Step(external_tbb after_install
|
ExternalProject_Add_Step(external_tbb after_install
|
||||||
# findtbb.cmake in some deps *NEEDS* to find tbb_debug.lib even if they are not going to use it
|
# findtbb.cmake in some deps *NEEDS* to find tbb_debug.lib even if they are not going to use it
|
||||||
# to make that test pass, we place a copy with the right name in the lib folder.
|
# to make that test pass, we place a copy with the right name in the lib folder.
|
||||||
COMMAND ${CMAKE_COMMAND} -E copy ${LIBDIR}/tbb/lib/tbb.lib ${HARVEST_TARGET}/tbb/lib/tbb_debug.lib
|
COMMAND ${CMAKE_COMMAND} -E copy ${LIBDIR}/tbb/lib/tbb.lib ${LIBDIR}/tbb/lib/tbb_debug.lib
|
||||||
COMMAND ${CMAKE_COMMAND} -E copy ${LIBDIR}/tbb/lib/tbbmalloc.lib ${HARVEST_TARGET}/tbb/lib/tbbmalloc_debug.lib
|
COMMAND ${CMAKE_COMMAND} -E copy ${LIBDIR}/tbb/lib/tbbmalloc.lib ${LIBDIR}/tbb/lib/tbbmalloc_debug.lib
|
||||||
COMMAND ${CMAKE_COMMAND} -E copy ${LIBDIR}/tbb/lib/tbb.dll ${HARVEST_TARGET}/tbb/lib/tbb_debug.dll
|
COMMAND ${CMAKE_COMMAND} -E copy ${LIBDIR}/tbb/bin/tbb.dll ${LIBDIR}/tbb/bin/tbb_debug.dll
|
||||||
COMMAND ${CMAKE_COMMAND} -E copy ${LIBDIR}/tbb/lib/tbbmalloc.dll ${HARVEST_TARGET}/tbb/lib/tbbmalloc_debug.dll
|
COMMAND ${CMAKE_COMMAND} -E copy ${LIBDIR}/tbb/bin/tbbmalloc.dll ${LIBDIR}/tbb/bin/tbbmalloc_debug.dll
|
||||||
# Normal collection of build artifacts
|
# Normal collection of build artifacts
|
||||||
COMMAND ${CMAKE_COMMAND} -E copy ${LIBDIR}/tbb/lib/tbb.lib ${HARVEST_TARGET}/tbb/lib/tbb.lib
|
COMMAND ${CMAKE_COMMAND} -E copy ${LIBDIR}/tbb/lib/tbb.lib ${HARVEST_TARGET}/tbb/lib/tbb.lib
|
||||||
COMMAND ${CMAKE_COMMAND} -E copy ${LIBDIR}/tbb/lib/tbb.dll ${HARVEST_TARGET}/tbb/lib/tbb.dll
|
COMMAND ${CMAKE_COMMAND} -E copy ${LIBDIR}/tbb/bin/tbb.dll ${HARVEST_TARGET}/tbb/bin/tbb.dll
|
||||||
COMMAND ${CMAKE_COMMAND} -E copy ${LIBDIR}/tbb/lib/tbbmalloc.lib ${HARVEST_TARGET}/tbb/lib/tbbmalloc.lib
|
COMMAND ${CMAKE_COMMAND} -E copy ${LIBDIR}/tbb/lib/tbbmalloc.lib ${HARVEST_TARGET}/tbb/lib/tbbmalloc.lib
|
||||||
COMMAND ${CMAKE_COMMAND} -E copy ${LIBDIR}/tbb/lib/tbbmalloc.dll ${HARVEST_TARGET}/tbb/lib/tbbmalloc.dll
|
COMMAND ${CMAKE_COMMAND} -E copy ${LIBDIR}/tbb/bin/tbbmalloc.dll ${HARVEST_TARGET}/tbb/bin/tbbmalloc.dll
|
||||||
COMMAND ${CMAKE_COMMAND} -E copy ${LIBDIR}/tbb/lib/tbbmalloc_proxy.lib ${HARVEST_TARGET}/tbb/lib/tbbmalloc_proxy.lib
|
COMMAND ${CMAKE_COMMAND} -E copy ${LIBDIR}/tbb/lib/tbbmalloc_proxy.lib ${HARVEST_TARGET}/tbb/lib/tbbmalloc_proxy.lib
|
||||||
COMMAND ${CMAKE_COMMAND} -E copy ${LIBDIR}/tbb/lib/tbbmalloc_proxy.dll ${HARVEST_TARGET}/tbb/lib/tbbmalloc_proxy.dll
|
COMMAND ${CMAKE_COMMAND} -E copy ${LIBDIR}/tbb/bin/tbbmalloc_proxy.dll ${HARVEST_TARGET}/tbb/bin/tbbmalloc_proxy.dll
|
||||||
COMMAND ${CMAKE_COMMAND} -E copy_directory ${LIBDIR}/tbb/include/ ${HARVEST_TARGET}/tbb/include/
|
COMMAND ${CMAKE_COMMAND} -E copy_directory ${LIBDIR}/tbb/include/ ${HARVEST_TARGET}/tbb/include/
|
||||||
DEPENDEES install
|
DEPENDEES install
|
||||||
)
|
)
|
||||||
@@ -76,11 +77,12 @@ if(WIN32)
|
|||||||
# to make that test pass, we place a copy with the right name in the lib folder.
|
# to make that test pass, we place a copy with the right name in the lib folder.
|
||||||
COMMAND ${CMAKE_COMMAND} -E copy ${LIBDIR}/tbb/lib/tbb_debug.lib ${LIBDIR}/tbb/lib/tbb.lib
|
COMMAND ${CMAKE_COMMAND} -E copy ${LIBDIR}/tbb/lib/tbb_debug.lib ${LIBDIR}/tbb/lib/tbb.lib
|
||||||
# Normal collection of build artifacts
|
# Normal collection of build artifacts
|
||||||
COMMAND ${CMAKE_COMMAND} -E copy ${LIBDIR}/tbb/lib/tbb_debug.lib ${HARVEST_TARGET}/tbb/lib/debug/tbb_debug.lib
|
COMMAND ${CMAKE_COMMAND} -E copy ${LIBDIR}/tbb/lib/tbb_debug.lib ${HARVEST_TARGET}/tbb/lib/tbb_debug.lib
|
||||||
COMMAND ${CMAKE_COMMAND} -E copy ${LIBDIR}/tbb/lib/tbb_debug.dll ${HARVEST_TARGET}/tbb/lib/debug/tbb_debug.dll
|
COMMAND ${CMAKE_COMMAND} -E copy ${LIBDIR}/tbb/bin/tbb_debug.dll ${HARVEST_TARGET}/tbb/bin/tbb_debug.dll
|
||||||
COMMAND ${CMAKE_COMMAND} -E copy ${LIBDIR}/tbb/lib/tbbmalloc_proxy.lib ${HARVEST_TARGET}/tbb/lib/tbbmalloc_proxy_debug.lib
|
COMMAND ${CMAKE_COMMAND} -E copy ${LIBDIR}/tbb/lib/tbbmalloc_debug.lib ${HARVEST_TARGET}/tbb/lib/tbbmalloc_debug.lib
|
||||||
COMMAND ${CMAKE_COMMAND} -E copy ${LIBDIR}/tbb/lib/tbbmalloc.dll ${HARVEST_TARGET}/tbb/lib/debug/tbbmalloc.dll
|
COMMAND ${CMAKE_COMMAND} -E copy ${LIBDIR}/tbb/lib/tbbmalloc_proxy_debug.lib ${HARVEST_TARGET}/tbb/lib/tbbmalloc_proxy_debug.lib
|
||||||
COMMAND ${CMAKE_COMMAND} -E copy ${LIBDIR}/tbb/lib/tbbmalloc_proxy.dll ${HARVEST_TARGET}/tbb/lib/debug/tbbmalloc_proxy.dll
|
COMMAND ${CMAKE_COMMAND} -E copy ${LIBDIR}/tbb/bin/tbbmalloc_debug.dll ${HARVEST_TARGET}/tbb/bin/tbbmalloc_debug.dll
|
||||||
|
COMMAND ${CMAKE_COMMAND} -E copy ${LIBDIR}/tbb/bin/tbbmalloc_proxy_debug.dll ${HARVEST_TARGET}/tbb/bin/tbbmalloc_proxy_debug.dll
|
||||||
DEPENDEES install
|
DEPENDEES install
|
||||||
)
|
)
|
||||||
endif()
|
endif()
|
||||||
|
@@ -43,7 +43,7 @@ set(JPEG_FILE libjpeg-turbo-${JPEG_VERSION}.tar.gz)
|
|||||||
set(BOOST_VERSION 1.73.0)
|
set(BOOST_VERSION 1.73.0)
|
||||||
set(BOOST_VERSION_NODOTS 1_73_0)
|
set(BOOST_VERSION_NODOTS 1_73_0)
|
||||||
set(BOOST_VERSION_NODOTS_SHORT 1_73)
|
set(BOOST_VERSION_NODOTS_SHORT 1_73)
|
||||||
set(BOOST_URI https://dl.bintray.com/boostorg/release/${BOOST_VERSION}/source/boost_${BOOST_VERSION_NODOTS}.tar.gz)
|
set(BOOST_URI https://boostorg.jfrog.io/artifactory/main/release/${BOOST_VERSION}/source/boost_${BOOST_VERSION_NODOTS}.tar.gz)
|
||||||
set(BOOST_HASH 4036cd27ef7548b8d29c30ea10956196)
|
set(BOOST_HASH 4036cd27ef7548b8d29c30ea10956196)
|
||||||
set(BOOST_HASH_TYPE MD5)
|
set(BOOST_HASH_TYPE MD5)
|
||||||
set(BOOST_FILE boost_${BOOST_VERSION_NODOTS}.tar.gz)
|
set(BOOST_FILE boost_${BOOST_VERSION_NODOTS}.tar.gz)
|
||||||
@@ -216,9 +216,9 @@ set(OPENVDB_HASH 01b490be16cc0e15c690f9a153c21461)
|
|||||||
set(OPENVDB_HASH_TYPE MD5)
|
set(OPENVDB_HASH_TYPE MD5)
|
||||||
set(OPENVDB_FILE openvdb-${OPENVDB_VERSION}.tar.gz)
|
set(OPENVDB_FILE openvdb-${OPENVDB_VERSION}.tar.gz)
|
||||||
|
|
||||||
set(NANOVDB_GIT_UID e62f7a0bf1e27397223c61ddeaaf57edf111b77f)
|
set(NANOVDB_GIT_UID dc37d8a631922e7bef46712947dc19b755f3e841)
|
||||||
set(NANOVDB_URI https://github.com/AcademySoftwareFoundation/openvdb/archive/${NANOVDB_GIT_UID}.tar.gz)
|
set(NANOVDB_URI https://github.com/AcademySoftwareFoundation/openvdb/archive/${NANOVDB_GIT_UID}.tar.gz)
|
||||||
set(NANOVDB_HASH 90919510bc6ccd630fedc56f748cb199)
|
set(NANOVDB_HASH e7b9e863ec2f3b04ead171dec2322807)
|
||||||
set(NANOVDB_HASH_TYPE MD5)
|
set(NANOVDB_HASH_TYPE MD5)
|
||||||
set(NANOVDB_FILE nano-vdb-${NANOVDB_GIT_UID}.tar.gz)
|
set(NANOVDB_FILE nano-vdb-${NANOVDB_GIT_UID}.tar.gz)
|
||||||
|
|
||||||
@@ -297,10 +297,10 @@ set(OPENJPEG_HASH 63f5a4713ecafc86de51bfad89cc07bb788e9bba24ebbf0c4ca637621aadb6
|
|||||||
set(OPENJPEG_HASH_TYPE SHA256)
|
set(OPENJPEG_HASH_TYPE SHA256)
|
||||||
set(OPENJPEG_FILE openjpeg-v${OPENJPEG_VERSION}.tar.gz)
|
set(OPENJPEG_FILE openjpeg-v${OPENJPEG_VERSION}.tar.gz)
|
||||||
|
|
||||||
set(FFMPEG_VERSION 4.2.3)
|
set(FFMPEG_VERSION 4.4)
|
||||||
set(FFMPEG_URI http://ffmpeg.org/releases/ffmpeg-${FFMPEG_VERSION}.tar.bz2)
|
set(FFMPEG_URI http://ffmpeg.org/releases/ffmpeg-${FFMPEG_VERSION}.tar.bz2)
|
||||||
set(FFMPEG_HASH 695fad11f3baf27784e24cb0e977b65a)
|
set(FFMPEG_HASH 42093549751b582cf0f338a21a3664f52e0a9fbe0d238d3c992005e493607d0e)
|
||||||
set(FFMPEG_HASH_TYPE MD5)
|
set(FFMPEG_HASH_TYPE SHA256)
|
||||||
set(FFMPEG_FILE ffmpeg-${FFMPEG_VERSION}.tar.bz2)
|
set(FFMPEG_FILE ffmpeg-${FFMPEG_VERSION}.tar.bz2)
|
||||||
|
|
||||||
set(FFTW_VERSION 3.3.8)
|
set(FFTW_VERSION 3.3.8)
|
||||||
@@ -432,9 +432,9 @@ set(USD_HASH 1dd1e2092d085ed393c1f7c450a4155a)
|
|||||||
set(USD_HASH_TYPE MD5)
|
set(USD_HASH_TYPE MD5)
|
||||||
set(USD_FILE usd-v${USD_VERSION}.tar.gz)
|
set(USD_FILE usd-v${USD_VERSION}.tar.gz)
|
||||||
|
|
||||||
set(OIDN_VERSION 1.3.0)
|
set(OIDN_VERSION 1.4.0)
|
||||||
set(OIDN_URI https://github.com/OpenImageDenoise/oidn/releases/download/v${OIDN_VERSION}/oidn-${OIDN_VERSION}.src.tar.gz)
|
set(OIDN_URI https://github.com/OpenImageDenoise/oidn/releases/download/v${OIDN_VERSION}/oidn-${OIDN_VERSION}.src.tar.gz)
|
||||||
set(OIDN_HASH 301a5a0958d375a942014df0679b9270)
|
set(OIDN_HASH 421824019becc5b664a22a2b98332bc5)
|
||||||
set(OIDN_HASH_TYPE MD5)
|
set(OIDN_HASH_TYPE MD5)
|
||||||
set(OIDN_FILE oidn-${OIDN_VERSION}.src.tar.gz)
|
set(OIDN_FILE oidn-${OIDN_VERSION}.src.tar.gz)
|
||||||
|
|
||||||
@@ -456,12 +456,18 @@ set(NASM_HASH aded8b796c996a486a56e0515c83e414116decc3b184d88043480b32eb0a8589)
|
|||||||
set(NASM_HASH_TYPE SHA256)
|
set(NASM_HASH_TYPE SHA256)
|
||||||
set(NASM_FILE nasm-${NASM_VERSION}.tar.gz)
|
set(NASM_FILE nasm-${NASM_VERSION}.tar.gz)
|
||||||
|
|
||||||
set(XR_OPENXR_SDK_VERSION 1.0.14)
|
set(XR_OPENXR_SDK_VERSION 1.0.17)
|
||||||
set(XR_OPENXR_SDK_URI https://github.com/KhronosGroup/OpenXR-SDK/archive/release-${XR_OPENXR_SDK_VERSION}.tar.gz)
|
set(XR_OPENXR_SDK_URI https://github.com/KhronosGroup/OpenXR-SDK/archive/release-${XR_OPENXR_SDK_VERSION}.tar.gz)
|
||||||
set(XR_OPENXR_SDK_HASH 0df6b2fd6045423451a77ff6bc3e1a75)
|
set(XR_OPENXR_SDK_HASH bf0fd8828837edff01047474e90013e1)
|
||||||
set(XR_OPENXR_SDK_HASH_TYPE MD5)
|
set(XR_OPENXR_SDK_HASH_TYPE MD5)
|
||||||
set(XR_OPENXR_SDK_FILE OpenXR-SDK-${XR_OPENXR_SDK_VERSION}.tar.gz)
|
set(XR_OPENXR_SDK_FILE OpenXR-SDK-${XR_OPENXR_SDK_VERSION}.tar.gz)
|
||||||
|
|
||||||
|
set(WL_PROTOCOLS_VERSION 1.21)
|
||||||
|
set(WL_PROTOCOLS_FILE wayland-protocols-${WL_PROTOCOLS_VERSION}.tar.gz)
|
||||||
|
set(WL_PROTOCOLS_URI https://gitlab.freedesktop.org/wayland/wayland-protocols/-/archive/${WL_PROTOCOLS_VERSION}/${WL_PROTOCOLS_FILE})
|
||||||
|
set(WL_PROTOCOLS_HASH af5ca07e13517cdbab33504492cef54a)
|
||||||
|
set(WL_PROTOCOLS_HASH_TYPE MD5)
|
||||||
|
|
||||||
if(BLENDER_PLATFORM_ARM)
|
if(BLENDER_PLATFORM_ARM)
|
||||||
# Unreleased version with macOS arm support.
|
# Unreleased version with macOS arm support.
|
||||||
set(ISPC_URI https://github.com/ispc/ispc/archive/f5949c055eb9eeb93696978a3da4bfb3a6a30b35.zip)
|
set(ISPC_URI https://github.com/ispc/ispc/archive/f5949c055eb9eeb93696978a3da4bfb3a6a30b35.zip)
|
||||||
@@ -494,5 +500,11 @@ set(HARU_HASH 4f916aa49c3069b3a10850013c507460)
|
|||||||
set(HARU_HASH_TYPE MD5)
|
set(HARU_HASH_TYPE MD5)
|
||||||
set(HARU_FILE libharu-${HARU_VERSION}.tar.gz)
|
set(HARU_FILE libharu-${HARU_VERSION}.tar.gz)
|
||||||
|
|
||||||
|
set(ZSTD_VERSION 1.5.0)
|
||||||
|
set(ZSTD_URI https://github.com/facebook/zstd/releases/download/v${ZSTD_VERSION}/zstd-${ZSTD_VERSION}.tar.gz)
|
||||||
|
set(ZSTD_HASH 5194fbfa781fcf45b98c5e849651aa7b3b0a008c6b72d4a0db760f3002291e94)
|
||||||
|
set(ZSTD_HASH_TYPE SHA256)
|
||||||
|
set(ZSTD_FILE zstd-${ZSTD_VERSION}.tar.gz)
|
||||||
|
|
||||||
set(SSE2NEON_GIT https://github.com/DLTcollab/sse2neon.git)
|
set(SSE2NEON_GIT https://github.com/DLTcollab/sse2neon.git)
|
||||||
set(SSE2NEON_GIT_HASH fe5ff00bb8d19b327714a3c290f3e2ce81ba3525)
|
set(SSE2NEON_GIT_HASH fe5ff00bb8d19b327714a3c290f3e2ce81ba3525)
|
||||||
|
27
build_files/build_environment/cmake/wayland_protocols.cmake
Normal file
27
build_files/build_environment/cmake/wayland_protocols.cmake
Normal file
@@ -0,0 +1,27 @@
|
|||||||
|
# ***** BEGIN GPL LICENSE BLOCK *****
|
||||||
|
#
|
||||||
|
# This program is free software; you can redistribute it and/or
|
||||||
|
# modify it under the terms of the GNU General Public License
|
||||||
|
# as published by the Free Software Foundation; either version 2
|
||||||
|
# of the License, or (at your option) any later version.
|
||||||
|
#
|
||||||
|
# This program is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
# GNU General Public License for more details.
|
||||||
|
#
|
||||||
|
# You should have received a copy of the GNU General Public License
|
||||||
|
# along with this program; if not, write to the Free Software Foundation,
|
||||||
|
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
||||||
|
#
|
||||||
|
# ***** END GPL LICENSE BLOCK *****
|
||||||
|
|
||||||
|
ExternalProject_Add(external_wayland_protocols
|
||||||
|
URL file://${PACKAGE_DIR}/${WL_PROTOCOLS_FILE}
|
||||||
|
DOWNLOAD_DIR ${DOWNLOAD_DIR}
|
||||||
|
URL_HASH ${WL_PROTOCOLS_HASH_TYPE}=${WL_PROTOCOLS_HASH}
|
||||||
|
PREFIX ${BUILD_DIR}/wayland-protocols
|
||||||
|
CONFIGURE_COMMAND meson --prefix ${LIBDIR}/wayland-protocols . ../external_wayland_protocols -Dtests=false
|
||||||
|
BUILD_COMMAND ninja
|
||||||
|
INSTALL_COMMAND ninja install
|
||||||
|
)
|
51
build_files/build_environment/cmake/zstd.cmake
Normal file
51
build_files/build_environment/cmake/zstd.cmake
Normal file
@@ -0,0 +1,51 @@
|
|||||||
|
# ***** BEGIN GPL LICENSE BLOCK *****
|
||||||
|
#
|
||||||
|
# This program is free software; you can redistribute it and/or
|
||||||
|
# modify it under the terms of the GNU General Public License
|
||||||
|
# as published by the Free Software Foundation; either version 2
|
||||||
|
# of the License, or (at your option) any later version.
|
||||||
|
#
|
||||||
|
# This program is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
# GNU General Public License for more details.
|
||||||
|
#
|
||||||
|
# You should have received a copy of the GNU General Public License
|
||||||
|
# along with this program; if not, write to the Free Software Foundation,
|
||||||
|
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
||||||
|
#
|
||||||
|
# ***** END GPL LICENSE BLOCK *****
|
||||||
|
|
||||||
|
set(ZSTD_EXTRA_ARGS
|
||||||
|
-DZSTD_BUILD_PROGRAMS=OFF
|
||||||
|
-DZSTD_BUILD_SHARED=OFF
|
||||||
|
-DZSTD_BUILD_STATIC=ON
|
||||||
|
-DZSTD_BUILD_TESTS=OFF
|
||||||
|
-DZSTD_LEGACY_SUPPORT=OFF
|
||||||
|
-DZSTD_LZ4_SUPPORT=OFF
|
||||||
|
-DZSTD_LZMA_SUPPORT=OFF
|
||||||
|
-DZSTD_MULTITHREAD_SUPPORT=ON
|
||||||
|
-DZSTD_PROGRAMS_LINK_SHARED=OFF
|
||||||
|
-DZSTD_USE_STATIC_RUNTIME=OFF
|
||||||
|
-DZSTD_ZLIB_SUPPORT=OFF
|
||||||
|
)
|
||||||
|
|
||||||
|
ExternalProject_Add(external_zstd
|
||||||
|
URL file://${PACKAGE_DIR}/${ZSTD_FILE}
|
||||||
|
DOWNLOAD_DIR ${DOWNLOAD_DIR}
|
||||||
|
URL_HASH ${ZSTD_HASH_TYPE}=${ZSTD_HASH}
|
||||||
|
PREFIX ${BUILD_DIR}/zstd
|
||||||
|
SOURCE_SUBDIR build/cmake
|
||||||
|
CMAKE_ARGS -DCMAKE_INSTALL_PREFIX=${LIBDIR}/zstd ${DEFAULT_CMAKE_FLAGS} ${ZSTD_EXTRA_ARGS}
|
||||||
|
INSTALL_DIR ${LIBDIR}/zstd
|
||||||
|
)
|
||||||
|
|
||||||
|
if(WIN32)
|
||||||
|
if(BUILD_MODE STREQUAL Release)
|
||||||
|
ExternalProject_Add_Step(external_zstd after_install
|
||||||
|
COMMAND ${CMAKE_COMMAND} -E copy ${LIBDIR}/zstd/lib/zstd_static${LIBEXT} ${HARVEST_TARGET}/zstd/lib/zstd_static${LIBEXT}
|
||||||
|
COMMAND ${CMAKE_COMMAND} -E copy_directory ${LIBDIR}/zstd/include/ ${HARVEST_TARGET}/zstd/include/
|
||||||
|
DEPENDEES install
|
||||||
|
)
|
||||||
|
endif()
|
||||||
|
endif()
|
@@ -37,7 +37,7 @@ if [ $USE_DEBUG_TRAP -ne 0 ]; then
|
|||||||
trap 'err_report $LINENO' ERR
|
trap 'err_report $LINENO' ERR
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# Noisy, show every line that runs with it's line number.
|
# Noisy, show every line that runs with its line number.
|
||||||
if [ $USE_DEBUG_LOG -ne 0 ]; then
|
if [ $USE_DEBUG_LOG -ne 0 ]; then
|
||||||
PS4='\e[0;33m$(printf %4d ${LINENO}):\e\033[0m '
|
PS4='\e[0;33m$(printf %4d ${LINENO}):\e\033[0m '
|
||||||
set -x
|
set -x
|
||||||
@@ -553,18 +553,18 @@ EMBREE_FORCE_BUILD=false
|
|||||||
EMBREE_FORCE_REBUILD=false
|
EMBREE_FORCE_REBUILD=false
|
||||||
EMBREE_SKIP=false
|
EMBREE_SKIP=false
|
||||||
|
|
||||||
OIDN_VERSION="1.3.0"
|
OIDN_VERSION="1.4.0"
|
||||||
OIDN_VERSION_SHORT="1.3"
|
OIDN_VERSION_SHORT="1.4"
|
||||||
OIDN_VERSION_MIN="1.3.0"
|
OIDN_VERSION_MIN="1.4.0"
|
||||||
OIDN_VERSION_MAX="1.4"
|
OIDN_VERSION_MAX="1.5"
|
||||||
OIDN_FORCE_BUILD=false
|
OIDN_FORCE_BUILD=false
|
||||||
OIDN_FORCE_REBUILD=false
|
OIDN_FORCE_REBUILD=false
|
||||||
OIDN_SKIP=false
|
OIDN_SKIP=false
|
||||||
|
|
||||||
ISPC_VERSION="1.14.1"
|
ISPC_VERSION="1.14.1"
|
||||||
|
|
||||||
FFMPEG_VERSION="4.2.3"
|
FFMPEG_VERSION="4.4"
|
||||||
FFMPEG_VERSION_SHORT="4.2"
|
FFMPEG_VERSION_SHORT="4.4"
|
||||||
FFMPEG_VERSION_MIN="3.0"
|
FFMPEG_VERSION_MIN="3.0"
|
||||||
FFMPEG_VERSION_MAX="5.0"
|
FFMPEG_VERSION_MAX="5.0"
|
||||||
FFMPEG_FORCE_BUILD=false
|
FFMPEG_FORCE_BUILD=false
|
||||||
@@ -572,7 +572,7 @@ FFMPEG_FORCE_REBUILD=false
|
|||||||
FFMPEG_SKIP=false
|
FFMPEG_SKIP=false
|
||||||
_ffmpeg_list_sep=";"
|
_ffmpeg_list_sep=";"
|
||||||
|
|
||||||
XR_OPENXR_VERSION="1.0.14"
|
XR_OPENXR_VERSION="1.0.17"
|
||||||
XR_OPENXR_VERSION_SHORT="1.0"
|
XR_OPENXR_VERSION_SHORT="1.0"
|
||||||
XR_OPENXR_VERSION_MIN="1.0.8"
|
XR_OPENXR_VERSION_MIN="1.0.8"
|
||||||
XR_OPENXR_VERSION_MAX="2.0"
|
XR_OPENXR_VERSION_MAX="2.0"
|
||||||
@@ -1073,7 +1073,7 @@ OPENVDB_SOURCE=( "https://github.com/AcademySoftwareFoundation/openvdb/archive/v
|
|||||||
#~ OPENVDB_SOURCE_REPO_BRANCH="dev"
|
#~ OPENVDB_SOURCE_REPO_BRANCH="dev"
|
||||||
|
|
||||||
NANOVDB_USE_REPO=false
|
NANOVDB_USE_REPO=false
|
||||||
NANOVDB_SOURCE_REPO_UID="e62f7a0bf1e27397223c61ddeaaf57edf111b77f"
|
NANOVDB_SOURCE_REPO_UID="dc37d8a631922e7bef46712947dc19b755f3e841"
|
||||||
NANOVDB_SOURCE=( "https://github.com/AcademySoftwareFoundation/openvdb/archive/${NANOVDB_SOURCE_REPO_UID}.tar.gz" )
|
NANOVDB_SOURCE=( "https://github.com/AcademySoftwareFoundation/openvdb/archive/${NANOVDB_SOURCE_REPO_UID}.tar.gz" )
|
||||||
|
|
||||||
ALEMBIC_USE_REPO=false
|
ALEMBIC_USE_REPO=false
|
||||||
@@ -1108,9 +1108,9 @@ FFMPEG_SOURCE=( "http://ffmpeg.org/releases/ffmpeg-$FFMPEG_VERSION.tar.bz2" )
|
|||||||
|
|
||||||
XR_OPENXR_USE_REPO=false
|
XR_OPENXR_USE_REPO=false
|
||||||
XR_OPENXR_SOURCE=("https://github.com/KhronosGroup/OpenXR-SDK/archive/release-${XR_OPENXR_VERSION}.tar.gz")
|
XR_OPENXR_SOURCE=("https://github.com/KhronosGroup/OpenXR-SDK/archive/release-${XR_OPENXR_VERSION}.tar.gz")
|
||||||
#~ XR_OPENXR_SOURCE_REPO=("https://github.com/KhronosGroup/OpenXR-SDK.git")
|
XR_OPENXR_SOURCE_REPO=("https://github.com/KhronosGroup/OpenXR-SDK.git")
|
||||||
#~ XR_OPENXR_REPO_UID="5900c51562769b03bea699dc0352cae56acb6419d"
|
XR_OPENXR_REPO_UID="bf21ccb1007bb531b45d9978919a56ea5059c245"
|
||||||
#~ XR_OPENXR_REPO_BRANCH="master"
|
XR_OPENXR_REPO_BRANCH="master"
|
||||||
|
|
||||||
# C++11 is required now
|
# C++11 is required now
|
||||||
CXXFLAGS_BACK=$CXXFLAGS
|
CXXFLAGS_BACK=$CXXFLAGS
|
||||||
@@ -1128,6 +1128,7 @@ Those libraries should be available as packages in all recent distributions (opt
|
|||||||
* Basics of dev environment (cmake, gcc, svn , git, ...).
|
* Basics of dev environment (cmake, gcc, svn , git, ...).
|
||||||
* libjpeg, libpng, libtiff, [openjpeg2], [libopenal].
|
* libjpeg, libpng, libtiff, [openjpeg2], [libopenal].
|
||||||
* libx11, libxcursor, libxi, libxrandr, libxinerama (and other libx... as needed).
|
* libx11, libxcursor, libxi, libxrandr, libxinerama (and other libx... as needed).
|
||||||
|
* libwayland-client0, libwayland-cursor0, libwayland-egl1, libxkbcommon0, libdbus-1-3, libegl1 (Wayland)
|
||||||
* libsqlite3, libbz2, libssl, libfftw3, libxml2, libtinyxml, yasm, libyaml-cpp.
|
* libsqlite3, libbz2, libssl, libfftw3, libxml2, libtinyxml, yasm, libyaml-cpp.
|
||||||
* libsdl2, libglew, libpugixml, libpotrace, [libgmp], [libglewmx], fontconfig, [libharu/libhpdf].\""
|
* libsdl2, libglew, libpugixml, libpotrace, [libgmp], [libglewmx], fontconfig, [libharu/libhpdf].\""
|
||||||
|
|
||||||
@@ -2737,7 +2738,7 @@ _init_openvdb() {
|
|||||||
_git=false
|
_git=false
|
||||||
_inst=$INST/openvdb-$OPENVDB_VERSION_SHORT
|
_inst=$INST/openvdb-$OPENVDB_VERSION_SHORT
|
||||||
_inst_shortcut=$INST/openvdb
|
_inst_shortcut=$INST/openvdb
|
||||||
|
|
||||||
_openvdb_source=$OPENVDB_SOURCE
|
_openvdb_source=$OPENVDB_SOURCE
|
||||||
if [ "$WITH_NANOVDB" = true ]; then
|
if [ "$WITH_NANOVDB" = true ]; then
|
||||||
_openvdb_source=$NANOVDB_SOURCE
|
_openvdb_source=$NANOVDB_SOURCE
|
||||||
@@ -2842,7 +2843,7 @@ compile_OPENVDB() {
|
|||||||
if [ -d $INST/blosc ]; then
|
if [ -d $INST/blosc ]; then
|
||||||
cmake_d="$cmake_d -D Blosc_ROOT=$INST/blosc"
|
cmake_d="$cmake_d -D Blosc_ROOT=$INST/blosc"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
cmake $cmake_d ..
|
cmake $cmake_d ..
|
||||||
|
|
||||||
make -j$THREADS install
|
make -j$THREADS install
|
||||||
@@ -3839,6 +3840,7 @@ install_DEB() {
|
|||||||
_packages="gawk cmake cmake-curses-gui build-essential libjpeg-dev libpng-dev libtiff-dev \
|
_packages="gawk cmake cmake-curses-gui build-essential libjpeg-dev libpng-dev libtiff-dev \
|
||||||
git libfreetype6-dev libfontconfig-dev libx11-dev flex bison libxxf86vm-dev \
|
git libfreetype6-dev libfontconfig-dev libx11-dev flex bison libxxf86vm-dev \
|
||||||
libxcursor-dev libxi-dev wget libsqlite3-dev libxrandr-dev libxinerama-dev \
|
libxcursor-dev libxi-dev wget libsqlite3-dev libxrandr-dev libxinerama-dev \
|
||||||
|
libwayland-dev wayland-protocols libegl-dev libxkbcommon-dev libdbus-1-dev linux-libc-dev \
|
||||||
libbz2-dev libncurses5-dev libssl-dev liblzma-dev libreadline-dev \
|
libbz2-dev libncurses5-dev libssl-dev liblzma-dev libreadline-dev \
|
||||||
libopenal-dev libglew-dev yasm $THEORA_DEV $VORBIS_DEV $OGG_DEV \
|
libopenal-dev libglew-dev yasm $THEORA_DEV $VORBIS_DEV $OGG_DEV \
|
||||||
libsdl2-dev libfftw3-dev patch bzip2 libxml2-dev libtinyxml-dev libjemalloc-dev \
|
libsdl2-dev libfftw3-dev patch bzip2 libxml2-dev libtinyxml-dev libjemalloc-dev \
|
||||||
@@ -4508,6 +4510,7 @@ install_RPM() {
|
|||||||
_packages="gcc gcc-c++ git make cmake tar bzip2 xz findutils flex bison fontconfig-devel \
|
_packages="gcc gcc-c++ git make cmake tar bzip2 xz findutils flex bison fontconfig-devel \
|
||||||
libtiff-devel libjpeg-devel libpng-devel sqlite-devel fftw-devel SDL2-devel \
|
libtiff-devel libjpeg-devel libpng-devel sqlite-devel fftw-devel SDL2-devel \
|
||||||
libX11-devel libXi-devel libXcursor-devel libXrandr-devel libXinerama-devel \
|
libX11-devel libXi-devel libXcursor-devel libXrandr-devel libXinerama-devel \
|
||||||
|
wayland-devel wayland-protocols-devel mesa-libEGL-devel libxkbcommon-devel dbus-devel kernel-headers \
|
||||||
wget ncurses-devel readline-devel $OPENJPEG_DEV openal-soft-devel \
|
wget ncurses-devel readline-devel $OPENJPEG_DEV openal-soft-devel \
|
||||||
glew-devel yasm $THEORA_DEV $VORBIS_DEV $OGG_DEV patch \
|
glew-devel yasm $THEORA_DEV $VORBIS_DEV $OGG_DEV patch \
|
||||||
libxml2-devel yaml-cpp-devel tinyxml-devel jemalloc-devel \
|
libxml2-devel yaml-cpp-devel tinyxml-devel jemalloc-devel \
|
||||||
|
@@ -20,7 +20,7 @@
|
|||||||
# ILMBASE_LIBRARIES - list of libraries to link against when using IlmBase.
|
# ILMBASE_LIBRARIES - list of libraries to link against when using IlmBase.
|
||||||
# ILMBASE_FOUND - True if IlmBase was found.
|
# ILMBASE_FOUND - True if IlmBase was found.
|
||||||
|
|
||||||
# Other standarnd issue macros
|
# Other standard issue macros
|
||||||
include(FindPackageHandleStandardArgs)
|
include(FindPackageHandleStandardArgs)
|
||||||
include(FindPackageMessage)
|
include(FindPackageMessage)
|
||||||
include(SelectLibraryConfigurations)
|
include(SelectLibraryConfigurations)
|
||||||
|
@@ -22,7 +22,7 @@
|
|||||||
# These are defined by the FindIlmBase module.
|
# These are defined by the FindIlmBase module.
|
||||||
# OPENEXR_FOUND - True if OpenEXR was found.
|
# OPENEXR_FOUND - True if OpenEXR was found.
|
||||||
|
|
||||||
# Other standarnd issue macros
|
# Other standard issue macros
|
||||||
include(SelectLibraryConfigurations)
|
include(SelectLibraryConfigurations)
|
||||||
include(FindPackageHandleStandardArgs)
|
include(FindPackageHandleStandardArgs)
|
||||||
include(FindPackageMessage)
|
include(FindPackageMessage)
|
||||||
|
@@ -68,3 +68,32 @@
|
|||||||
+
|
+
|
||||||
return ret;
|
return ret;
|
||||||
}
|
}
|
||||||
|
--- a/libavcodec/rl.c
|
||||||
|
+++ b/libavcodec/rl.c
|
||||||
|
@@ -71,7 +71,7 @@ av_cold void ff_rl_init(RLTable *rl,
|
||||||
|
av_cold void ff_rl_init_vlc(RLTable *rl, unsigned static_size)
|
||||||
|
{
|
||||||
|
int i, q;
|
||||||
|
- VLC_TYPE table[1500][2] = {{0}};
|
||||||
|
+ VLC_TYPE (*table)[2] = av_calloc(sizeof(VLC_TYPE), 1500 * 2);
|
||||||
|
VLC vlc = { .table = table, .table_allocated = static_size };
|
||||||
|
av_assert0(static_size <= FF_ARRAY_ELEMS(table));
|
||||||
|
init_vlc(&vlc, 9, rl->n + 1, &rl->table_vlc[0][1], 4, 2, &rl->table_vlc[0][0], 4, 2, INIT_VLC_USE_NEW_STATIC);
|
||||||
|
@@ -80,8 +80,10 @@ av_cold void ff_rl_init_vlc(RLTable *rl, unsigned static_size)
|
||||||
|
int qmul = q * 2;
|
||||||
|
int qadd = (q - 1) | 1;
|
||||||
|
|
||||||
|
- if (!rl->rl_vlc[q])
|
||||||
|
+ if (!rl->rl_vlc[q]){
|
||||||
|
+ av_free(table);
|
||||||
|
return;
|
||||||
|
+ }
|
||||||
|
|
||||||
|
if (q == 0) {
|
||||||
|
qmul = 1;
|
||||||
|
@@ -113,4 +115,5 @@ av_cold void ff_rl_init_vlc(RLTable *rl, unsigned static_size)
|
||||||
|
rl->rl_vlc[q][i].run = run;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
+ av_free(table);
|
||||||
|
}
|
||||||
|
@@ -1,33 +1,3 @@
|
|||||||
diff -Naur oidn-1.3.0/cmake/FindTBB.cmake external_openimagedenoise/cmake/FindTBB.cmake
|
|
||||||
--- oidn-1.3.0/cmake/FindTBB.cmake 2021-02-04 16:20:26 -0700
|
|
||||||
+++ external_openimagedenoise/cmake/FindTBB.cmake 2021-02-12 09:35:53 -0700
|
|
||||||
@@ -332,20 +332,22 @@
|
|
||||||
${TBB_ROOT}/lib/${TBB_ARCH}/${TBB_VCVER}
|
|
||||||
${TBB_ROOT}/lib
|
|
||||||
)
|
|
||||||
-
|
|
||||||
# On Windows, also search the DLL so that the client may install it.
|
|
||||||
file(GLOB DLL_NAMES
|
|
||||||
${TBB_ROOT}/bin/${TBB_ARCH}/${TBB_VCVER}/${LIB_NAME}.dll
|
|
||||||
${TBB_ROOT}/bin/${LIB_NAME}.dll
|
|
||||||
+ ${TBB_ROOT}/lib/${LIB_NAME}.dll
|
|
||||||
${TBB_ROOT}/redist/${TBB_ARCH}/${TBB_VCVER}/${LIB_NAME}.dll
|
|
||||||
${TBB_ROOT}/redist/${TBB_ARCH}/${TBB_VCVER}/${LIB_NAME_GLOB1}.dll
|
|
||||||
${TBB_ROOT}/redist/${TBB_ARCH}/${TBB_VCVER}/${LIB_NAME_GLOB2}.dll
|
|
||||||
${TBB_ROOT}/../redist/${TBB_ARCH}/tbb/${TBB_VCVER}/${LIB_NAME}.dll
|
|
||||||
${TBB_ROOT}/../redist/${TBB_ARCH}_win/tbb/${TBB_VCVER}/${LIB_NAME}.dll
|
|
||||||
)
|
|
||||||
- list(GET DLL_NAMES 0 DLL_NAME)
|
|
||||||
- get_filename_component(${BIN_DIR_VAR} "${DLL_NAME}" DIRECTORY)
|
|
||||||
- set(${DLL_VAR} "${DLL_NAME}" CACHE PATH "${COMPONENT_NAME} ${BUILD_CONFIG} dll path")
|
|
||||||
+ if (DLL_NAMES)
|
|
||||||
+ list(GET DLL_NAMES 0 DLL_NAME)
|
|
||||||
+ get_filename_component(${BIN_DIR_VAR} "${DLL_NAME}" DIRECTORY)
|
|
||||||
+ set(${DLL_VAR} "${DLL_NAME}" CACHE PATH "${COMPONENT_NAME} ${BUILD_CONFIG} dll path")
|
|
||||||
+ endif()
|
|
||||||
elseif(APPLE)
|
|
||||||
set(LIB_PATHS ${TBB_ROOT}/lib)
|
|
||||||
else()
|
|
||||||
--- external_openimagedenoise/cmake/oidn_ispc.cmake 2021-02-15 17:29:34.000000000 +0100
|
--- external_openimagedenoise/cmake/oidn_ispc.cmake 2021-02-15 17:29:34.000000000 +0100
|
||||||
+++ external_openimagedenoise/cmake/oidn_ispc.cmake2 2021-02-15 17:29:28.000000000 +0100
|
+++ external_openimagedenoise/cmake/oidn_ispc.cmake2 2021-02-15 17:29:28.000000000 +0100
|
||||||
@@ -98,7 +98,7 @@
|
@@ -98,7 +98,7 @@
|
||||||
|
@@ -1,70 +1,4 @@
|
|||||||
Blender Buildbot
|
Buildbot Configuration
|
||||||
================
|
=====================
|
||||||
|
|
||||||
Code signing
|
Files used by Buildbot's `compile-code` step.
|
||||||
------------
|
|
||||||
|
|
||||||
Code signing is done as part of INSTALL target, which makes it possible to sign
|
|
||||||
files which are aimed into a bundle and coming from a non-signed source (such as
|
|
||||||
libraries SVN).
|
|
||||||
|
|
||||||
This is achieved by specifying `worker_codesign.cmake` as a post-install script
|
|
||||||
run by CMake. This CMake script simply involves an utility script written in
|
|
||||||
Python which takes care of an actual signing.
|
|
||||||
|
|
||||||
### Configuration
|
|
||||||
|
|
||||||
Client configuration doesn't need anything special, other than variable
|
|
||||||
`SHARED_STORAGE_DIR` pointing to a location which is watched by a server.
|
|
||||||
This is done in `config_builder.py` file and is stored in Git (which makes it
|
|
||||||
possible to have almost zero-configuration buildbot machines).
|
|
||||||
|
|
||||||
Server configuration requires copying `config_server_template.py` under the
|
|
||||||
name of `config_server.py` and tweaking values, which are platform-specific.
|
|
||||||
|
|
||||||
#### Windows configuration
|
|
||||||
|
|
||||||
There are two things which are needed on Windows in order to have code signing
|
|
||||||
to work:
|
|
||||||
|
|
||||||
- `TIMESTAMP_AUTHORITY_URL` which is most likely set http://timestamp.digicert.com
|
|
||||||
- `CERTIFICATE_FILEPATH` which is a full file path to a PKCS #12 key (.pfx).
|
|
||||||
|
|
||||||
## Tips
|
|
||||||
|
|
||||||
### Self-signed certificate on Windows
|
|
||||||
|
|
||||||
It is easiest to test configuration using self-signed certificate.
|
|
||||||
|
|
||||||
The certificate manipulation utilities are coming with Windows SDK.
|
|
||||||
Unfortunately, they are not added to PATH. Here is an example of how to make
|
|
||||||
sure they are easily available:
|
|
||||||
|
|
||||||
```
|
|
||||||
set PATH=C:\Program Files (x86)\Windows Kits\10\App Certification Kit;%PATH%
|
|
||||||
set PATH=C:\Program Files (x86)\Windows Kits\10\bin\10.0.18362.0\x64;%PATH%
|
|
||||||
```
|
|
||||||
|
|
||||||
Generate CA:
|
|
||||||
|
|
||||||
```
|
|
||||||
makecert -r -pe -n "CN=Blender Test CA" -ss CA -sr CurrentUser -a sha256 ^
|
|
||||||
-cy authority -sky signature -sv BlenderTestCA.pvk BlenderTestCA.cer
|
|
||||||
```
|
|
||||||
|
|
||||||
Import the generated CA:
|
|
||||||
|
|
||||||
```
|
|
||||||
certutil -user -addstore Root BlenderTestCA.cer
|
|
||||||
```
|
|
||||||
|
|
||||||
Create self-signed certificate and pack it into PKCS #12:
|
|
||||||
|
|
||||||
```
|
|
||||||
makecert -pe -n "CN=Blender Test SPC" -a sha256 -cy end ^
|
|
||||||
-sky signature ^
|
|
||||||
-ic BlenderTestCA.cer -iv BlenderTestCA.pvk ^
|
|
||||||
-sv BlenderTestSPC.pvk BlenderTestSPC.cer
|
|
||||||
|
|
||||||
pvk2pfx -pvk BlenderTestSPC.pvk -spc BlenderTestSPC.cer -pfx BlenderTestSPC.pfx
|
|
||||||
```
|
|
||||||
|
@@ -1,127 +0,0 @@
|
|||||||
# ##### BEGIN GPL LICENSE BLOCK #####
|
|
||||||
#
|
|
||||||
# This program is free software; you can redistribute it and/or
|
|
||||||
# modify it under the terms of the GNU General Public License
|
|
||||||
# as published by the Free Software Foundation; either version 2
|
|
||||||
# of the License, or (at your option) any later version.
|
|
||||||
#
|
|
||||||
# This program is distributed in the hope that it will be useful,
|
|
||||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
||||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
||||||
# GNU General Public License for more details.
|
|
||||||
#
|
|
||||||
# You should have received a copy of the GNU General Public License
|
|
||||||
# along with this program; if not, write to the Free Software Foundation,
|
|
||||||
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
|
||||||
#
|
|
||||||
# ##### END GPL LICENSE BLOCK #####
|
|
||||||
|
|
||||||
# <pep8 compliant>
|
|
||||||
|
|
||||||
import argparse
|
|
||||||
import os
|
|
||||||
import re
|
|
||||||
import subprocess
|
|
||||||
import sys
|
|
||||||
|
|
||||||
|
|
||||||
def is_tool(name):
|
|
||||||
"""Check whether `name` is on PATH and marked as executable."""
|
|
||||||
|
|
||||||
# from whichcraft import which
|
|
||||||
from shutil import which
|
|
||||||
|
|
||||||
return which(name) is not None
|
|
||||||
|
|
||||||
|
|
||||||
class Builder:
|
|
||||||
def __init__(self, name, branch, codesign):
|
|
||||||
self.name = name
|
|
||||||
self.branch = branch
|
|
||||||
self.is_release_branch = re.match("^blender-v(.*)-release$", branch) is not None
|
|
||||||
self.codesign = codesign
|
|
||||||
|
|
||||||
# Buildbot runs from build/ directory
|
|
||||||
self.blender_dir = os.path.abspath(os.path.join('..', 'blender.git'))
|
|
||||||
self.build_dir = os.path.abspath(os.path.join('..', 'build'))
|
|
||||||
self.install_dir = os.path.abspath(os.path.join('..', 'install'))
|
|
||||||
self.upload_dir = os.path.abspath(os.path.join('..', 'install'))
|
|
||||||
|
|
||||||
# Detect platform
|
|
||||||
if name.startswith('mac'):
|
|
||||||
self.platform = 'mac'
|
|
||||||
self.command_prefix = []
|
|
||||||
elif name.startswith('linux'):
|
|
||||||
self.platform = 'linux'
|
|
||||||
if is_tool('scl'):
|
|
||||||
self.command_prefix = ['scl', 'enable', 'devtoolset-9', '--']
|
|
||||||
else:
|
|
||||||
self.command_prefix = []
|
|
||||||
elif name.startswith('win'):
|
|
||||||
self.platform = 'win'
|
|
||||||
self.command_prefix = []
|
|
||||||
else:
|
|
||||||
raise ValueError('Unkonw platform for builder ' + self.platform)
|
|
||||||
|
|
||||||
# Always 64 bit now
|
|
||||||
self.bits = 64
|
|
||||||
|
|
||||||
|
|
||||||
def create_builder_from_arguments():
|
|
||||||
parser = argparse.ArgumentParser()
|
|
||||||
parser.add_argument('builder_name')
|
|
||||||
parser.add_argument('branch', default='master', nargs='?')
|
|
||||||
parser.add_argument("--codesign", action="store_true")
|
|
||||||
args = parser.parse_args()
|
|
||||||
return Builder(args.builder_name, args.branch, args.codesign)
|
|
||||||
|
|
||||||
|
|
||||||
class VersionInfo:
|
|
||||||
def __init__(self, builder):
|
|
||||||
# Get version information
|
|
||||||
buildinfo_h = os.path.join(builder.build_dir, "source", "creator", "buildinfo.h")
|
|
||||||
blender_h = os.path.join(builder.blender_dir, "source", "blender", "blenkernel", "BKE_blender_version.h")
|
|
||||||
|
|
||||||
version_number = int(self._parse_header_file(blender_h, 'BLENDER_VERSION'))
|
|
||||||
version_number_patch = int(self._parse_header_file(blender_h, 'BLENDER_VERSION_PATCH'))
|
|
||||||
version_numbers = (version_number // 100, version_number % 100, version_number_patch)
|
|
||||||
self.short_version = "%d.%d" % (version_numbers[0], version_numbers[1])
|
|
||||||
self.version = "%d.%d.%d" % version_numbers
|
|
||||||
self.version_cycle = self._parse_header_file(blender_h, 'BLENDER_VERSION_CYCLE')
|
|
||||||
self.hash = self._parse_header_file(buildinfo_h, 'BUILD_HASH')[1:-1]
|
|
||||||
|
|
||||||
if self.version_cycle == "release":
|
|
||||||
# Final release
|
|
||||||
self.full_version = self.version
|
|
||||||
self.is_development_build = False
|
|
||||||
elif self.version_cycle == "rc":
|
|
||||||
# Release candidate
|
|
||||||
self.full_version = self.version + self.version_cycle
|
|
||||||
self.is_development_build = False
|
|
||||||
else:
|
|
||||||
# Development build
|
|
||||||
self.full_version = self.version + '-' + self.hash
|
|
||||||
self.is_development_build = True
|
|
||||||
|
|
||||||
def _parse_header_file(self, filename, define):
|
|
||||||
import re
|
|
||||||
regex = re.compile(r"^#\s*define\s+%s\s+(.*)" % define)
|
|
||||||
with open(filename, "r") as file:
|
|
||||||
for l in file:
|
|
||||||
match = regex.match(l)
|
|
||||||
if match:
|
|
||||||
return match.group(1)
|
|
||||||
return None
|
|
||||||
|
|
||||||
|
|
||||||
def call(cmd, env=None, exit_on_error=True):
|
|
||||||
print(' '.join(cmd))
|
|
||||||
|
|
||||||
# Flush to ensure correct order output on Windows.
|
|
||||||
sys.stdout.flush()
|
|
||||||
sys.stderr.flush()
|
|
||||||
|
|
||||||
retcode = subprocess.call(cmd, env=env)
|
|
||||||
if exit_on_error and retcode != 0:
|
|
||||||
sys.exit(retcode)
|
|
||||||
return retcode
|
|
@@ -1,81 +0,0 @@
|
|||||||
# ##### BEGIN GPL LICENSE BLOCK #####
|
|
||||||
#
|
|
||||||
# This program is free software; you can redistribute it and/or
|
|
||||||
# modify it under the terms of the GNU General Public License
|
|
||||||
# as published by the Free Software Foundation; either version 2
|
|
||||||
# of the License, or (at your option) any later version.
|
|
||||||
#
|
|
||||||
# This program is distributed in the hope that it will be useful,
|
|
||||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
||||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
||||||
# GNU General Public License for more details.
|
|
||||||
#
|
|
||||||
# You should have received a copy of the GNU General Public License
|
|
||||||
# along with this program; if not, write to the Free Software Foundation,
|
|
||||||
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
|
||||||
#
|
|
||||||
# ##### END GPL LICENSE BLOCK #####
|
|
||||||
|
|
||||||
# <pep8 compliant>
|
|
||||||
|
|
||||||
from dataclasses import dataclass
|
|
||||||
from pathlib import Path
|
|
||||||
from typing import List
|
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
|
||||||
class AbsoluteAndRelativeFileName:
|
|
||||||
"""
|
|
||||||
Helper class which keeps track of absolute file path for a direct access and
|
|
||||||
corresponding relative path against given base.
|
|
||||||
|
|
||||||
The relative part is used to construct a file name within an archive which
|
|
||||||
contains files which are to be signed or which has been signed already
|
|
||||||
(depending on whether the archive is addressed to signing server or back
|
|
||||||
to the buildbot worker).
|
|
||||||
"""
|
|
||||||
|
|
||||||
# Base directory which is where relative_filepath is relative to.
|
|
||||||
base_dir: Path
|
|
||||||
|
|
||||||
# Full absolute path of the corresponding file.
|
|
||||||
absolute_filepath: Path
|
|
||||||
|
|
||||||
# Derived from full file path, contains part of the path which is relative
|
|
||||||
# to a desired base path.
|
|
||||||
relative_filepath: Path
|
|
||||||
|
|
||||||
def __init__(self, base_dir: Path, filepath: Path):
|
|
||||||
self.base_dir = base_dir
|
|
||||||
self.absolute_filepath = filepath.resolve()
|
|
||||||
self.relative_filepath = self.absolute_filepath.relative_to(
|
|
||||||
self.base_dir)
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def from_path(cls, path: Path) -> 'AbsoluteAndRelativeFileName':
|
|
||||||
assert path.is_absolute()
|
|
||||||
assert path.is_file()
|
|
||||||
|
|
||||||
base_dir = path.parent
|
|
||||||
return AbsoluteAndRelativeFileName(base_dir, path)
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def recursively_from_directory(cls, base_dir: Path) \
|
|
||||||
-> List['AbsoluteAndRelativeFileName']:
|
|
||||||
"""
|
|
||||||
Create list of AbsoluteAndRelativeFileName for all the files in the
|
|
||||||
given directory.
|
|
||||||
|
|
||||||
NOTE: Result will be pointing to a resolved paths.
|
|
||||||
"""
|
|
||||||
assert base_dir.is_absolute()
|
|
||||||
assert base_dir.is_dir()
|
|
||||||
|
|
||||||
base_dir = base_dir.resolve()
|
|
||||||
|
|
||||||
result = []
|
|
||||||
for filename in base_dir.glob('**/*'):
|
|
||||||
if not filename.is_file():
|
|
||||||
continue
|
|
||||||
result.append(AbsoluteAndRelativeFileName(base_dir, filename))
|
|
||||||
return result
|
|
@@ -1,245 +0,0 @@
|
|||||||
# ##### BEGIN GPL LICENSE BLOCK #####
|
|
||||||
#
|
|
||||||
# This program is free software; you can redistribute it and/or
|
|
||||||
# modify it under the terms of the GNU General Public License
|
|
||||||
# as published by the Free Software Foundation; either version 2
|
|
||||||
# of the License, or (at your option) any later version.
|
|
||||||
#
|
|
||||||
# This program is distributed in the hope that it will be useful,
|
|
||||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
||||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
||||||
# GNU General Public License for more details.
|
|
||||||
#
|
|
||||||
# You should have received a copy of the GNU General Public License
|
|
||||||
# along with this program; if not, write to the Free Software Foundation,
|
|
||||||
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
|
||||||
#
|
|
||||||
# ##### END GPL LICENSE BLOCK #####
|
|
||||||
|
|
||||||
# <pep8 compliant>
|
|
||||||
|
|
||||||
import dataclasses
|
|
||||||
import json
|
|
||||||
import os
|
|
||||||
|
|
||||||
from pathlib import Path
|
|
||||||
from typing import Optional
|
|
||||||
|
|
||||||
import codesign.util as util
|
|
||||||
|
|
||||||
|
|
||||||
class ArchiveStateError(Exception):
|
|
||||||
message: str
|
|
||||||
|
|
||||||
def __init__(self, message):
|
|
||||||
self.message = message
|
|
||||||
super().__init__(self.message)
|
|
||||||
|
|
||||||
|
|
||||||
@dataclasses.dataclass
|
|
||||||
class ArchiveState:
|
|
||||||
"""
|
|
||||||
Additional information (state) of the archive
|
|
||||||
|
|
||||||
Includes information like expected file size of the archive file in the case
|
|
||||||
the archive file is expected to be successfully created.
|
|
||||||
|
|
||||||
If the archive can not be created, this state will contain error message
|
|
||||||
indicating details of error.
|
|
||||||
"""
|
|
||||||
|
|
||||||
# Size in bytes of the corresponding archive.
|
|
||||||
file_size: Optional[int] = None
|
|
||||||
|
|
||||||
# Non-empty value indicates that error has happenned.
|
|
||||||
error_message: str = ''
|
|
||||||
|
|
||||||
def has_error(self) -> bool:
|
|
||||||
"""
|
|
||||||
Check whether the archive is at error state
|
|
||||||
"""
|
|
||||||
|
|
||||||
return self.error_message
|
|
||||||
|
|
||||||
def serialize_to_string(self) -> str:
|
|
||||||
payload = dataclasses.asdict(self)
|
|
||||||
return json.dumps(payload, sort_keys=True, indent=4)
|
|
||||||
|
|
||||||
def serialize_to_file(self, filepath: Path) -> None:
|
|
||||||
string = self.serialize_to_string()
|
|
||||||
filepath.write_text(string)
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def deserialize_from_string(cls, string: str) -> 'ArchiveState':
|
|
||||||
try:
|
|
||||||
object_as_dict = json.loads(string)
|
|
||||||
except json.decoder.JSONDecodeError:
|
|
||||||
raise ArchiveStateError('Error parsing JSON')
|
|
||||||
|
|
||||||
return cls(**object_as_dict)
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def deserialize_from_file(cls, filepath: Path):
|
|
||||||
string = filepath.read_text()
|
|
||||||
return cls.deserialize_from_string(string)
|
|
||||||
|
|
||||||
|
|
||||||
class ArchiveWithIndicator:
|
|
||||||
"""
|
|
||||||
The idea of this class is to wrap around logic which takes care of keeping
|
|
||||||
track of a name of an archive and synchronization routines between buildbot
|
|
||||||
worker and signing server.
|
|
||||||
|
|
||||||
The synchronization is done based on creating a special file after the
|
|
||||||
archive file is knowingly ready for access.
|
|
||||||
"""
|
|
||||||
|
|
||||||
# Base directory where the archive is stored (basically, a basename() of
|
|
||||||
# the absolute archive file name).
|
|
||||||
#
|
|
||||||
# For example, 'X:\\TEMP\\'.
|
|
||||||
base_dir: Path
|
|
||||||
|
|
||||||
# Absolute file name of the archive.
|
|
||||||
#
|
|
||||||
# For example, 'X:\\TEMP\\FOO.ZIP'.
|
|
||||||
archive_filepath: Path
|
|
||||||
|
|
||||||
# Absolute name of a file which acts as an indication of the fact that the
|
|
||||||
# archive is ready and is available for access.
|
|
||||||
#
|
|
||||||
# This is how synchronization between buildbot worker and signing server is
|
|
||||||
# done:
|
|
||||||
# - First, the archive is created under archive_filepath name.
|
|
||||||
# - Second, the indication file is created under ready_indicator_filepath
|
|
||||||
# name.
|
|
||||||
# - Third, the colleague of whoever created the indicator name watches for
|
|
||||||
# the indication file to appear, and once it's there it access the
|
|
||||||
# archive.
|
|
||||||
ready_indicator_filepath: Path
|
|
||||||
|
|
||||||
def __init__(
|
|
||||||
self, base_dir: Path, archive_name: str, ready_indicator_name: str):
|
|
||||||
"""
|
|
||||||
Construct the object from given base directory and name of the archive
|
|
||||||
file:
|
|
||||||
ArchiveWithIndicator(Path('X:\\TEMP'), 'FOO.ZIP', 'INPUT_READY')
|
|
||||||
"""
|
|
||||||
|
|
||||||
self.base_dir = base_dir
|
|
||||||
self.archive_filepath = self.base_dir / archive_name
|
|
||||||
self.ready_indicator_filepath = self.base_dir / ready_indicator_name
|
|
||||||
|
|
||||||
def is_ready_unsafe(self) -> bool:
|
|
||||||
"""
|
|
||||||
Check whether the archive is ready for access.
|
|
||||||
|
|
||||||
No guarding about possible network failres is done here.
|
|
||||||
"""
|
|
||||||
if not self.ready_indicator_filepath.exists():
|
|
||||||
return False
|
|
||||||
|
|
||||||
try:
|
|
||||||
archive_state = ArchiveState.deserialize_from_file(
|
|
||||||
self.ready_indicator_filepath)
|
|
||||||
except ArchiveStateError as error:
|
|
||||||
print(f'Error deserializing archive state: {error.message}')
|
|
||||||
return False
|
|
||||||
|
|
||||||
if archive_state.has_error():
|
|
||||||
# If the error did happen during codesign procedure there will be no
|
|
||||||
# corresponding archive file.
|
|
||||||
# The caller code will deal with the error check further.
|
|
||||||
return True
|
|
||||||
|
|
||||||
# Sometimes on macOS indicator file appears prior to the actual archive
|
|
||||||
# despite the order of creation and os.sync() used in tag_ready().
|
|
||||||
# So consider archive not ready if there is an indicator without an
|
|
||||||
# actual archive.
|
|
||||||
if not self.archive_filepath.exists():
|
|
||||||
print('Found indicator without actual archive, waiting for archive '
|
|
||||||
f'({self.archive_filepath}) to appear.')
|
|
||||||
return False
|
|
||||||
|
|
||||||
# Wait for until archive is fully stored.
|
|
||||||
actual_archive_size = self.archive_filepath.stat().st_size
|
|
||||||
if actual_archive_size != archive_state.file_size:
|
|
||||||
print('Partial/invalid archive size (expected '
|
|
||||||
f'{archive_state.file_size} got {actual_archive_size})')
|
|
||||||
return False
|
|
||||||
|
|
||||||
return True
|
|
||||||
|
|
||||||
def is_ready(self) -> bool:
|
|
||||||
"""
|
|
||||||
Check whether the archive is ready for access.
|
|
||||||
|
|
||||||
Will tolerate possible network failures: if there is a network failure
|
|
||||||
or if there is still no proper permission on a file False is returned.
|
|
||||||
"""
|
|
||||||
|
|
||||||
# There are some intermitten problem happening at a random which is
|
|
||||||
# translates to "OSError : [WinError 59] An unexpected network error occurred".
|
|
||||||
# Some reports suggests it might be due to lack of permissions to the file,
|
|
||||||
# which might be applicable in our case since it's possible that file is
|
|
||||||
# initially created with non-accessible permissions and gets chmod-ed
|
|
||||||
# after initial creation.
|
|
||||||
try:
|
|
||||||
return self.is_ready_unsafe()
|
|
||||||
except OSError as e:
|
|
||||||
print(f'Exception checking archive: {e}')
|
|
||||||
return False
|
|
||||||
|
|
||||||
def tag_ready(self, error_message='') -> None:
|
|
||||||
"""
|
|
||||||
Tag the archive as ready by creating the corresponding indication file.
|
|
||||||
|
|
||||||
NOTE: It is expected that the archive was never tagged as ready before
|
|
||||||
and that there are no subsequent tags of the same archive.
|
|
||||||
If it is violated, an assert will fail.
|
|
||||||
"""
|
|
||||||
assert not self.is_ready()
|
|
||||||
|
|
||||||
# Try the best to make sure everything is synced to the file system,
|
|
||||||
# to avoid any possibility of stamp appearing on a network share prior to
|
|
||||||
# an actual file.
|
|
||||||
if util.get_current_platform() != util.Platform.WINDOWS:
|
|
||||||
os.sync()
|
|
||||||
|
|
||||||
archive_size = -1
|
|
||||||
if self.archive_filepath.exists():
|
|
||||||
archive_size = self.archive_filepath.stat().st_size
|
|
||||||
|
|
||||||
archive_info = ArchiveState(
|
|
||||||
file_size=archive_size, error_message=error_message)
|
|
||||||
|
|
||||||
self.ready_indicator_filepath.write_text(
|
|
||||||
archive_info.serialize_to_string())
|
|
||||||
|
|
||||||
def get_state(self) -> ArchiveState:
|
|
||||||
"""
|
|
||||||
Get state object for this archive
|
|
||||||
|
|
||||||
The state is read from the corresponding state file.
|
|
||||||
"""
|
|
||||||
|
|
||||||
try:
|
|
||||||
return ArchiveState.deserialize_from_file(self.ready_indicator_filepath)
|
|
||||||
except ArchiveStateError as error:
|
|
||||||
return ArchiveState(error_message=f'Error in information format: {error}')
|
|
||||||
|
|
||||||
def clean(self) -> None:
|
|
||||||
"""
|
|
||||||
Remove both archive and the ready indication file.
|
|
||||||
"""
|
|
||||||
util.ensure_file_does_not_exist_or_die(self.ready_indicator_filepath)
|
|
||||||
util.ensure_file_does_not_exist_or_die(self.archive_filepath)
|
|
||||||
|
|
||||||
def is_fully_absent(self) -> bool:
|
|
||||||
"""
|
|
||||||
Check whether both archive and its ready indicator are absent.
|
|
||||||
Is used for a sanity check during code signing process by both
|
|
||||||
buildbot worker and signing server.
|
|
||||||
"""
|
|
||||||
return (not self.archive_filepath.exists() and
|
|
||||||
not self.ready_indicator_filepath.exists())
|
|
@@ -1,501 +0,0 @@
|
|||||||
# ##### BEGIN GPL LICENSE BLOCK #####
|
|
||||||
#
|
|
||||||
# This program is free software; you can redistribute it and/or
|
|
||||||
# modify it under the terms of the GNU General Public License
|
|
||||||
# as published by the Free Software Foundation; either version 2
|
|
||||||
# of the License, or (at your option) any later version.
|
|
||||||
#
|
|
||||||
# This program is distributed in the hope that it will be useful,
|
|
||||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
||||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
||||||
# GNU General Public License for more details.
|
|
||||||
#
|
|
||||||
# You should have received a copy of the GNU General Public License
|
|
||||||
# along with this program; if not, write to the Free Software Foundation,
|
|
||||||
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
|
||||||
#
|
|
||||||
# ##### END GPL LICENSE BLOCK #####
|
|
||||||
|
|
||||||
# <pep8 compliant>
|
|
||||||
|
|
||||||
# Signing process overview.
|
|
||||||
#
|
|
||||||
# From buildbot worker side:
|
|
||||||
# - Files which needs to be signed are collected from either a directory to
|
|
||||||
# sign all signable files in there, or by filename of a single file to sign.
|
|
||||||
# - Those files gets packed into an archive and stored in a location location
|
|
||||||
# which is watched by the signing server.
|
|
||||||
# - A marker READY file is created which indicates the archive is ready for
|
|
||||||
# access.
|
|
||||||
# - Wait for the server to provide an archive with signed files.
|
|
||||||
# This is done by watching for the READY file which corresponds to an archive
|
|
||||||
# coming from the signing server.
|
|
||||||
# - Unpack the signed signed files from the archives and replace original ones.
|
|
||||||
#
|
|
||||||
# From code sign server:
|
|
||||||
# - Watch special location for a READY file which indicates the there is an
|
|
||||||
# archive with files which are to be signed.
|
|
||||||
# - Unpack the archive to a temporary location.
|
|
||||||
# - Run codesign tool and make sure all the files are signed.
|
|
||||||
# - Pack the signed files and store them in a location which is watched by
|
|
||||||
# the buildbot worker.
|
|
||||||
# - Create a READY file which indicates that the archive with signed files is
|
|
||||||
# ready.
|
|
||||||
|
|
||||||
import abc
|
|
||||||
import logging
|
|
||||||
import shutil
|
|
||||||
import subprocess
|
|
||||||
import time
|
|
||||||
import tarfile
|
|
||||||
import uuid
|
|
||||||
|
|
||||||
from pathlib import Path
|
|
||||||
from tempfile import TemporaryDirectory
|
|
||||||
from typing import Iterable, List
|
|
||||||
|
|
||||||
import codesign.util as util
|
|
||||||
|
|
||||||
from codesign.absolute_and_relative_filename import AbsoluteAndRelativeFileName
|
|
||||||
from codesign.archive_with_indicator import ArchiveWithIndicator
|
|
||||||
from codesign.exception import CodeSignException
|
|
||||||
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
|
||||||
logger_builder = logger.getChild('builder')
|
|
||||||
logger_server = logger.getChild('server')
|
|
||||||
|
|
||||||
|
|
||||||
def pack_files(files: Iterable[AbsoluteAndRelativeFileName],
|
|
||||||
archive_filepath: Path) -> None:
|
|
||||||
"""
|
|
||||||
Create tar archive from given files for the signing pipeline.
|
|
||||||
Is used by buildbot worker to create an archive of files which are to be
|
|
||||||
signed, and by signing server to send signed files back to the worker.
|
|
||||||
"""
|
|
||||||
with tarfile.TarFile.open(archive_filepath, 'w') as tar_file_handle:
|
|
||||||
for file_info in files:
|
|
||||||
tar_file_handle.add(file_info.absolute_filepath,
|
|
||||||
arcname=file_info.relative_filepath)
|
|
||||||
|
|
||||||
|
|
||||||
def extract_files(archive_filepath: Path,
|
|
||||||
extraction_dir: Path) -> None:
|
|
||||||
"""
|
|
||||||
Extract all files form the given archive into the given direcotry.
|
|
||||||
"""
|
|
||||||
|
|
||||||
# TODO(sergey): Verify files in the archive have relative path.
|
|
||||||
|
|
||||||
with tarfile.TarFile.open(archive_filepath, mode='r') as tar_file_handle:
|
|
||||||
tar_file_handle.extractall(path=extraction_dir)
|
|
||||||
|
|
||||||
|
|
||||||
class BaseCodeSigner(metaclass=abc.ABCMeta):
|
|
||||||
"""
|
|
||||||
Base class for a platform-specific signer of binaries.
|
|
||||||
|
|
||||||
Contains all the logic shared across platform-specific implementations, such
|
|
||||||
as synchronization and notification logic.
|
|
||||||
|
|
||||||
Platform specific bits (such as actual command for signing the binary) are
|
|
||||||
to be implemented as a subclass.
|
|
||||||
|
|
||||||
Provides utilities code signing as a whole, including functionality needed
|
|
||||||
by a signing server and a buildbot worker.
|
|
||||||
|
|
||||||
The signer and builder may run on separate machines, the only requirement is
|
|
||||||
that they have access to a directory which is shared between them. For the
|
|
||||||
security concerns this is to be done as a separate machine (or as a Shared
|
|
||||||
Folder configuration in VirtualBox configuration). This directory might be
|
|
||||||
mounted under different base paths, but its underlying storage is to be
|
|
||||||
the same.
|
|
||||||
|
|
||||||
The code signer is short-lived on a buildbot worker side, and is living
|
|
||||||
forever on a code signing server side.
|
|
||||||
"""
|
|
||||||
|
|
||||||
# TODO(sergey): Find a neat way to have config annotated.
|
|
||||||
# config: Config
|
|
||||||
|
|
||||||
# Storage directory where builder puts files which are requested to be
|
|
||||||
# signed.
|
|
||||||
# Consider this an input of the code signing server.
|
|
||||||
unsigned_storage_dir: Path
|
|
||||||
|
|
||||||
# Storage where signed files are stored.
|
|
||||||
# Consider this an output of the code signer server.
|
|
||||||
signed_storage_dir: Path
|
|
||||||
|
|
||||||
# Platform the code is currently executing on.
|
|
||||||
platform: util.Platform
|
|
||||||
|
|
||||||
def __init__(self, config):
|
|
||||||
self.config = config
|
|
||||||
|
|
||||||
absolute_shared_storage_dir = config.SHARED_STORAGE_DIR.resolve()
|
|
||||||
|
|
||||||
# Unsigned (signing server input) configuration.
|
|
||||||
self.unsigned_storage_dir = absolute_shared_storage_dir / 'unsigned'
|
|
||||||
|
|
||||||
# Signed (signing server output) configuration.
|
|
||||||
self.signed_storage_dir = absolute_shared_storage_dir / 'signed'
|
|
||||||
|
|
||||||
self.platform = util.get_current_platform()
|
|
||||||
|
|
||||||
def cleanup_environment_for_builder(self) -> None:
|
|
||||||
# TODO(sergey): Revisit need of cleaning up the existing files.
|
|
||||||
# In practice it wasn't so helpful, and with multiple clients
|
|
||||||
# talking to the same server it becomes even more tricky.
|
|
||||||
pass
|
|
||||||
|
|
||||||
def cleanup_environment_for_signing_server(self) -> None:
|
|
||||||
# TODO(sergey): Revisit need of cleaning up the existing files.
|
|
||||||
# In practice it wasn't so helpful, and with multiple clients
|
|
||||||
# talking to the same server it becomes even more tricky.
|
|
||||||
pass
|
|
||||||
|
|
||||||
def generate_request_id(self) -> str:
|
|
||||||
"""
|
|
||||||
Generate an unique identifier for code signing request.
|
|
||||||
"""
|
|
||||||
return str(uuid.uuid4())
|
|
||||||
|
|
||||||
def archive_info_for_request_id(
|
|
||||||
self, path: Path, request_id: str) -> ArchiveWithIndicator:
|
|
||||||
return ArchiveWithIndicator(
|
|
||||||
path, f'{request_id}.tar', f'{request_id}.ready')
|
|
||||||
|
|
||||||
def signed_archive_info_for_request_id(
|
|
||||||
self, request_id: str) -> ArchiveWithIndicator:
|
|
||||||
return self.archive_info_for_request_id(
|
|
||||||
self.signed_storage_dir, request_id)
|
|
||||||
|
|
||||||
def unsigned_archive_info_for_request_id(
|
|
||||||
self, request_id: str) -> ArchiveWithIndicator:
|
|
||||||
return self.archive_info_for_request_id(
|
|
||||||
self.unsigned_storage_dir, request_id)
|
|
||||||
|
|
||||||
############################################################################
|
|
||||||
# Buildbot worker side helpers.
|
|
||||||
|
|
||||||
@abc.abstractmethod
|
|
||||||
def check_file_is_to_be_signed(
|
|
||||||
self, file: AbsoluteAndRelativeFileName) -> bool:
|
|
||||||
"""
|
|
||||||
Check whether file is to be signed.
|
|
||||||
|
|
||||||
Is used by both single file signing pipeline and recursive directory
|
|
||||||
signing pipeline.
|
|
||||||
|
|
||||||
This is where code signer is to check whether file is to be signed or
|
|
||||||
not. This check might be based on a simple extension test or on actual
|
|
||||||
test whether file have a digital signature already or not.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def collect_files_to_sign(self, path: Path) \
|
|
||||||
-> List[AbsoluteAndRelativeFileName]:
|
|
||||||
"""
|
|
||||||
Get all files which need to be signed from the given path.
|
|
||||||
|
|
||||||
NOTE: The path might either be a file or directory.
|
|
||||||
|
|
||||||
This function is run from the buildbot worker side.
|
|
||||||
"""
|
|
||||||
|
|
||||||
# If there is a single file provided trust the buildbot worker that it
|
|
||||||
# is eligible for signing.
|
|
||||||
if path.is_file():
|
|
||||||
file = AbsoluteAndRelativeFileName.from_path(path)
|
|
||||||
if not self.check_file_is_to_be_signed(file):
|
|
||||||
return []
|
|
||||||
return [file]
|
|
||||||
|
|
||||||
all_files = AbsoluteAndRelativeFileName.recursively_from_directory(
|
|
||||||
path)
|
|
||||||
files_to_be_signed = [file for file in all_files
|
|
||||||
if self.check_file_is_to_be_signed(file)]
|
|
||||||
return files_to_be_signed
|
|
||||||
|
|
||||||
def wait_for_signed_archive_or_die(self, request_id) -> None:
|
|
||||||
"""
|
|
||||||
Wait until archive with signed files is available.
|
|
||||||
|
|
||||||
Will only return if the archive with signed files is available. If there
|
|
||||||
was an error during code sign procedure the SystemExit exception is
|
|
||||||
raised, with the message set to the error reported by the codesign
|
|
||||||
server.
|
|
||||||
|
|
||||||
Will only wait for the configured time. If that time exceeds and there
|
|
||||||
is still no responce from the signing server the application will exit
|
|
||||||
with a non-zero exit code.
|
|
||||||
|
|
||||||
"""
|
|
||||||
|
|
||||||
signed_archive_info = self.signed_archive_info_for_request_id(
|
|
||||||
request_id)
|
|
||||||
unsigned_archive_info = self.unsigned_archive_info_for_request_id(
|
|
||||||
request_id)
|
|
||||||
|
|
||||||
timeout_in_seconds = self.config.TIMEOUT_IN_SECONDS
|
|
||||||
time_start = time.monotonic()
|
|
||||||
while not signed_archive_info.is_ready():
|
|
||||||
time.sleep(1)
|
|
||||||
time_slept_in_seconds = time.monotonic() - time_start
|
|
||||||
if time_slept_in_seconds > timeout_in_seconds:
|
|
||||||
signed_archive_info.clean()
|
|
||||||
unsigned_archive_info.clean()
|
|
||||||
raise SystemExit("Signing server didn't finish signing in "
|
|
||||||
f'{timeout_in_seconds} seconds, dying :(')
|
|
||||||
|
|
||||||
archive_state = signed_archive_info.get_state()
|
|
||||||
if archive_state.has_error():
|
|
||||||
signed_archive_info.clean()
|
|
||||||
unsigned_archive_info.clean()
|
|
||||||
raise SystemExit(
|
|
||||||
f'Error happenned during codesign procedure: {archive_state.error_message}')
|
|
||||||
|
|
||||||
def copy_signed_files_to_directory(
|
|
||||||
self, signed_dir: Path, destination_dir: Path) -> None:
|
|
||||||
"""
|
|
||||||
Copy all files from signed_dir to destination_dir.
|
|
||||||
|
|
||||||
This function will overwrite any existing file. Permissions are copied
|
|
||||||
from the source files, but other metadata, such as timestamps, are not.
|
|
||||||
"""
|
|
||||||
for signed_filepath in signed_dir.glob('**/*'):
|
|
||||||
if not signed_filepath.is_file():
|
|
||||||
continue
|
|
||||||
|
|
||||||
relative_filepath = signed_filepath.relative_to(signed_dir)
|
|
||||||
destination_filepath = destination_dir / relative_filepath
|
|
||||||
destination_filepath.parent.mkdir(parents=True, exist_ok=True)
|
|
||||||
|
|
||||||
shutil.copy(signed_filepath, destination_filepath)
|
|
||||||
|
|
||||||
def run_buildbot_path_sign_pipeline(self, path: Path) -> None:
|
|
||||||
"""
|
|
||||||
Run all steps needed to make given path signed.
|
|
||||||
|
|
||||||
Path points to an unsigned file or a directory which contains unsigned
|
|
||||||
files.
|
|
||||||
|
|
||||||
If the path points to a single file then this file will be signed.
|
|
||||||
This is used to sign a final bundle such as .msi on Windows or .dmg on
|
|
||||||
macOS.
|
|
||||||
|
|
||||||
NOTE: The code signed implementation might actually reject signing the
|
|
||||||
file, in which case the file will be left unsigned. This isn't anything
|
|
||||||
to be considered a failure situation, just might happen when buildbot
|
|
||||||
worker can not detect whether signing is really required in a specific
|
|
||||||
case or not.
|
|
||||||
|
|
||||||
If the path points to a directory then code signer will sign all
|
|
||||||
signable files from it (finding them recursively).
|
|
||||||
"""
|
|
||||||
|
|
||||||
self.cleanup_environment_for_builder()
|
|
||||||
|
|
||||||
# Make sure storage directory exists.
|
|
||||||
self.unsigned_storage_dir.mkdir(parents=True, exist_ok=True)
|
|
||||||
|
|
||||||
# Collect all files which needs to be signed and pack them into a single
|
|
||||||
# archive which will be sent to the signing server.
|
|
||||||
logger_builder.info('Collecting files which are to be signed...')
|
|
||||||
files = self.collect_files_to_sign(path)
|
|
||||||
if not files:
|
|
||||||
logger_builder.info('No files to be signed, ignoring.')
|
|
||||||
return
|
|
||||||
logger_builder.info('Found %d files to sign.', len(files))
|
|
||||||
|
|
||||||
request_id = self.generate_request_id()
|
|
||||||
signed_archive_info = self.signed_archive_info_for_request_id(
|
|
||||||
request_id)
|
|
||||||
unsigned_archive_info = self.unsigned_archive_info_for_request_id(
|
|
||||||
request_id)
|
|
||||||
|
|
||||||
pack_files(files=files,
|
|
||||||
archive_filepath=unsigned_archive_info.archive_filepath)
|
|
||||||
unsigned_archive_info.tag_ready()
|
|
||||||
|
|
||||||
# Wait for the signing server to finish signing.
|
|
||||||
logger_builder.info('Waiting signing server to sign the files...')
|
|
||||||
self.wait_for_signed_archive_or_die(request_id)
|
|
||||||
|
|
||||||
# Extract signed files from archive and move files to final location.
|
|
||||||
with TemporaryDirectory(prefix='blender-buildbot-') as temp_dir_str:
|
|
||||||
unpacked_signed_files_dir = Path(temp_dir_str)
|
|
||||||
|
|
||||||
logger_builder.info('Extracting signed files from archive...')
|
|
||||||
extract_files(
|
|
||||||
archive_filepath=signed_archive_info.archive_filepath,
|
|
||||||
extraction_dir=unpacked_signed_files_dir)
|
|
||||||
|
|
||||||
destination_dir = path
|
|
||||||
if destination_dir.is_file():
|
|
||||||
destination_dir = destination_dir.parent
|
|
||||||
self.copy_signed_files_to_directory(
|
|
||||||
unpacked_signed_files_dir, destination_dir)
|
|
||||||
|
|
||||||
logger_builder.info('Removing archive with signed files...')
|
|
||||||
signed_archive_info.clean()
|
|
||||||
|
|
||||||
############################################################################
|
|
||||||
# Signing server side helpers.
|
|
||||||
|
|
||||||
def wait_for_sign_request(self) -> str:
|
|
||||||
"""
|
|
||||||
Wait for the buildbot to request signing of an archive.
|
|
||||||
|
|
||||||
Returns an identifier of signing request.
|
|
||||||
"""
|
|
||||||
|
|
||||||
# TOOD(sergey): Support graceful shutdown on Ctrl-C.
|
|
||||||
|
|
||||||
logger_server.info(
|
|
||||||
f'Waiting for a request directory {self.unsigned_storage_dir} to appear.')
|
|
||||||
while not self.unsigned_storage_dir.exists():
|
|
||||||
time.sleep(1)
|
|
||||||
|
|
||||||
logger_server.info(
|
|
||||||
'Waiting for a READY indicator of any signing request.')
|
|
||||||
request_id = None
|
|
||||||
while request_id is None:
|
|
||||||
for file in self.unsigned_storage_dir.iterdir():
|
|
||||||
if file.suffix != '.ready':
|
|
||||||
continue
|
|
||||||
request_id = file.stem
|
|
||||||
logger_server.info(f'Found READY for request ID {request_id}.')
|
|
||||||
if request_id is None:
|
|
||||||
time.sleep(1)
|
|
||||||
|
|
||||||
unsigned_archive_info = self.unsigned_archive_info_for_request_id(
|
|
||||||
request_id)
|
|
||||||
while not unsigned_archive_info.is_ready():
|
|
||||||
time.sleep(1)
|
|
||||||
|
|
||||||
return request_id
|
|
||||||
|
|
||||||
@abc.abstractmethod
|
|
||||||
def sign_all_files(self, files: List[AbsoluteAndRelativeFileName]) -> None:
|
|
||||||
"""
|
|
||||||
Sign all files in the given directory.
|
|
||||||
|
|
||||||
NOTE: Signing should happen in-place.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def run_signing_pipeline(self, request_id: str):
|
|
||||||
"""
|
|
||||||
Run the full signing pipeline starting from the point when buildbot
|
|
||||||
worker have requested signing.
|
|
||||||
"""
|
|
||||||
|
|
||||||
# Make sure storage directory exists.
|
|
||||||
self.signed_storage_dir.mkdir(parents=True, exist_ok=True)
|
|
||||||
|
|
||||||
with TemporaryDirectory(prefix='blender-codesign-') as temp_dir_str:
|
|
||||||
temp_dir = Path(temp_dir_str)
|
|
||||||
|
|
||||||
signed_archive_info = self.signed_archive_info_for_request_id(
|
|
||||||
request_id)
|
|
||||||
unsigned_archive_info = self.unsigned_archive_info_for_request_id(
|
|
||||||
request_id)
|
|
||||||
|
|
||||||
logger_server.info('Extracting unsigned files from archive...')
|
|
||||||
extract_files(
|
|
||||||
archive_filepath=unsigned_archive_info.archive_filepath,
|
|
||||||
extraction_dir=temp_dir)
|
|
||||||
|
|
||||||
logger_server.info('Collecting all files which needs signing...')
|
|
||||||
files = AbsoluteAndRelativeFileName.recursively_from_directory(
|
|
||||||
temp_dir)
|
|
||||||
|
|
||||||
logger_server.info('Signing all requested files...')
|
|
||||||
try:
|
|
||||||
self.sign_all_files(files)
|
|
||||||
except CodeSignException as error:
|
|
||||||
signed_archive_info.tag_ready(error_message=error.message)
|
|
||||||
unsigned_archive_info.clean()
|
|
||||||
logger_server.info('Signing is complete with errors.')
|
|
||||||
return
|
|
||||||
|
|
||||||
logger_server.info('Packing signed files...')
|
|
||||||
pack_files(files=files,
|
|
||||||
archive_filepath=signed_archive_info.archive_filepath)
|
|
||||||
signed_archive_info.tag_ready()
|
|
||||||
|
|
||||||
logger_server.info('Removing signing request...')
|
|
||||||
unsigned_archive_info.clean()
|
|
||||||
|
|
||||||
logger_server.info('Signing is complete.')
|
|
||||||
|
|
||||||
def run_signing_server(self):
|
|
||||||
logger_server.info('Starting new code signing server...')
|
|
||||||
self.cleanup_environment_for_signing_server()
|
|
||||||
logger_server.info('Code signing server is ready')
|
|
||||||
while True:
|
|
||||||
logger_server.info('Waiting for the signing request in %s...',
|
|
||||||
self.unsigned_storage_dir)
|
|
||||||
request_id = self.wait_for_sign_request()
|
|
||||||
|
|
||||||
logger_server.info(
|
|
||||||
f'Beging signign procedure for request ID {request_id}.')
|
|
||||||
self.run_signing_pipeline(request_id)
|
|
||||||
|
|
||||||
############################################################################
|
|
||||||
# Command executing.
|
|
||||||
#
|
|
||||||
# Abstracted to a degree that allows to run commands from a foreign
|
|
||||||
# platform.
|
|
||||||
# The goal with this is to allow performing dry-run tests of code signer
|
|
||||||
# server from other platforms (for example, to test that macOS code signer
|
|
||||||
# does what it is supposed to after doing a refactor on Linux).
|
|
||||||
|
|
||||||
# TODO(sergey): What is the type annotation for the command?
|
|
||||||
def run_command_or_mock(self, command, platform: util.Platform) -> None:
|
|
||||||
"""
|
|
||||||
Run given command if current platform matches given one
|
|
||||||
|
|
||||||
If the platform is different then it will only be printed allowing
|
|
||||||
to verify logic of the code signing process.
|
|
||||||
"""
|
|
||||||
|
|
||||||
if platform != self.platform:
|
|
||||||
logger_server.info(
|
|
||||||
f'Will run command for {platform}: {command}')
|
|
||||||
return
|
|
||||||
|
|
||||||
logger_server.info(f'Running command: {command}')
|
|
||||||
subprocess.run(command)
|
|
||||||
|
|
||||||
# TODO(sergey): What is the type annotation for the command?
|
|
||||||
def check_output_or_mock(self, command,
|
|
||||||
platform: util.Platform,
|
|
||||||
allow_nonzero_exit_code=False) -> str:
|
|
||||||
"""
|
|
||||||
Run given command if current platform matches given one
|
|
||||||
|
|
||||||
If the platform is different then it will only be printed allowing
|
|
||||||
to verify logic of the code signing process.
|
|
||||||
|
|
||||||
If allow_nonzero_exit_code is truth then the output will be returned
|
|
||||||
even if application quit with non-zero exit code.
|
|
||||||
Otherwise an subprocess.CalledProcessError exception will be raised
|
|
||||||
in such case.
|
|
||||||
"""
|
|
||||||
|
|
||||||
if platform != self.platform:
|
|
||||||
logger_server.info(
|
|
||||||
f'Will run command for {platform}: {command}')
|
|
||||||
return
|
|
||||||
|
|
||||||
if allow_nonzero_exit_code:
|
|
||||||
process = subprocess.Popen(command,
|
|
||||||
stdout=subprocess.PIPE,
|
|
||||||
stderr=subprocess.STDOUT)
|
|
||||||
output = process.communicate()[0]
|
|
||||||
return output.decode()
|
|
||||||
|
|
||||||
logger_server.info(f'Running command: {command}')
|
|
||||||
return subprocess.check_output(
|
|
||||||
command, stderr=subprocess.STDOUT).decode()
|
|
@@ -1,62 +0,0 @@
|
|||||||
# ##### BEGIN GPL LICENSE BLOCK #####
|
|
||||||
#
|
|
||||||
# This program is free software; you can redistribute it and/or
|
|
||||||
# modify it under the terms of the GNU General Public License
|
|
||||||
# as published by the Free Software Foundation; either version 2
|
|
||||||
# of the License, or (at your option) any later version.
|
|
||||||
#
|
|
||||||
# This program is distributed in the hope that it will be useful,
|
|
||||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
||||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
||||||
# GNU General Public License for more details.
|
|
||||||
#
|
|
||||||
# You should have received a copy of the GNU General Public License
|
|
||||||
# along with this program; if not, write to the Free Software Foundation,
|
|
||||||
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
|
||||||
#
|
|
||||||
# ##### END GPL LICENSE BLOCK #####
|
|
||||||
|
|
||||||
# <pep8 compliant>
|
|
||||||
|
|
||||||
# Configuration of a code signer which is specific to the code running from
|
|
||||||
# buildbot's worker.
|
|
||||||
|
|
||||||
import sys
|
|
||||||
|
|
||||||
from pathlib import Path
|
|
||||||
|
|
||||||
import codesign.util as util
|
|
||||||
|
|
||||||
from codesign.config_common import *
|
|
||||||
|
|
||||||
platform = util.get_current_platform()
|
|
||||||
if platform == util.Platform.LINUX:
|
|
||||||
SHARED_STORAGE_DIR = Path('/data/codesign')
|
|
||||||
elif platform == util.Platform.WINDOWS:
|
|
||||||
SHARED_STORAGE_DIR = Path('Z:\\codesign')
|
|
||||||
elif platform == util.Platform.MACOS:
|
|
||||||
SHARED_STORAGE_DIR = Path('/Volumes/codesign_macos/codesign')
|
|
||||||
|
|
||||||
# https://docs.python.org/3/library/logging.config.html#configuration-dictionary-schema
|
|
||||||
LOGGING = {
|
|
||||||
'version': 1,
|
|
||||||
'formatters': {
|
|
||||||
'default': {'format': '%(asctime)-15s %(levelname)8s %(name)s %(message)s'}
|
|
||||||
},
|
|
||||||
'handlers': {
|
|
||||||
'console': {
|
|
||||||
'class': 'logging.StreamHandler',
|
|
||||||
'formatter': 'default',
|
|
||||||
'stream': 'ext://sys.stderr',
|
|
||||||
}
|
|
||||||
},
|
|
||||||
'loggers': {
|
|
||||||
'codesign': {'level': 'INFO'},
|
|
||||||
},
|
|
||||||
'root': {
|
|
||||||
'level': 'WARNING',
|
|
||||||
'handlers': [
|
|
||||||
'console',
|
|
||||||
],
|
|
||||||
}
|
|
||||||
}
|
|
@@ -1,36 +0,0 @@
|
|||||||
# ##### BEGIN GPL LICENSE BLOCK #####
|
|
||||||
#
|
|
||||||
# This program is free software; you can redistribute it and/or
|
|
||||||
# modify it under the terms of the GNU General Public License
|
|
||||||
# as published by the Free Software Foundation; either version 2
|
|
||||||
# of the License, or (at your option) any later version.
|
|
||||||
#
|
|
||||||
# This program is distributed in the hope that it will be useful,
|
|
||||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
||||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
||||||
# GNU General Public License for more details.
|
|
||||||
#
|
|
||||||
# You should have received a copy of the GNU General Public License
|
|
||||||
# along with this program; if not, write to the Free Software Foundation,
|
|
||||||
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
|
||||||
#
|
|
||||||
# ##### END GPL LICENSE BLOCK #####
|
|
||||||
|
|
||||||
# <pep8 compliant>
|
|
||||||
|
|
||||||
from pathlib import Path
|
|
||||||
|
|
||||||
# Timeout in seconds for the signing process.
|
|
||||||
#
|
|
||||||
# This is how long buildbot packing step will wait signing server to
|
|
||||||
# perform signing.
|
|
||||||
#
|
|
||||||
# NOTE: Notarization could take a long time, hence the rather high value
|
|
||||||
# here. Might consider using different timeout for different platforms.
|
|
||||||
TIMEOUT_IN_SECONDS = 45 * 60 * 60
|
|
||||||
|
|
||||||
# Directory which is shared across buildbot worker and signing server.
|
|
||||||
#
|
|
||||||
# This is where worker puts files requested for signing as well as where
|
|
||||||
# server puts signed files.
|
|
||||||
SHARED_STORAGE_DIR: Path
|
|
@@ -1,101 +0,0 @@
|
|||||||
# ##### BEGIN GPL LICENSE BLOCK #####
|
|
||||||
#
|
|
||||||
# This program is free software; you can redistribute it and/or
|
|
||||||
# modify it under the terms of the GNU General Public License
|
|
||||||
# as published by the Free Software Foundation; either version 2
|
|
||||||
# of the License, or (at your option) any later version.
|
|
||||||
#
|
|
||||||
# This program is distributed in the hope that it will be useful,
|
|
||||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
||||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
||||||
# GNU General Public License for more details.
|
|
||||||
#
|
|
||||||
# You should have received a copy of the GNU General Public License
|
|
||||||
# along with this program; if not, write to the Free Software Foundation,
|
|
||||||
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
|
||||||
#
|
|
||||||
# ##### END GPL LICENSE BLOCK #####
|
|
||||||
|
|
||||||
# <pep8 compliant>
|
|
||||||
|
|
||||||
# Configuration of a code signer which is specific to the code signing server.
|
|
||||||
#
|
|
||||||
# NOTE: DO NOT put any sensitive information here, put it in an actual
|
|
||||||
# configuration on the signing machine.
|
|
||||||
|
|
||||||
from pathlib import Path
|
|
||||||
|
|
||||||
from codesign.config_common import *
|
|
||||||
|
|
||||||
CODESIGN_DIRECTORY = Path(__file__).absolute().parent
|
|
||||||
BLENDER_GIT_ROOT_DIRECTORY = CODESIGN_DIRECTORY.parent.parent.parent
|
|
||||||
|
|
||||||
################################################################################
|
|
||||||
# Common configuration.
|
|
||||||
|
|
||||||
# Directory where folders for codesign requests and signed result are stored.
|
|
||||||
# For example, /data/codesign
|
|
||||||
SHARED_STORAGE_DIR: Path
|
|
||||||
|
|
||||||
################################################################################
|
|
||||||
# macOS-specific configuration.
|
|
||||||
|
|
||||||
MACOS_ENTITLEMENTS_FILE = \
|
|
||||||
BLENDER_GIT_ROOT_DIRECTORY / 'release' / 'darwin' / 'entitlements.plist'
|
|
||||||
|
|
||||||
# Identity of the Developer ID Application certificate which is to be used for
|
|
||||||
# codesign tool.
|
|
||||||
# Use `security find-identity -v -p codesigning` to find the identity.
|
|
||||||
#
|
|
||||||
# NOTE: This identity is just an example from release/darwin/README.txt.
|
|
||||||
MACOS_CODESIGN_IDENTITY = 'AE825E26F12D08B692F360133210AF46F4CF7B97'
|
|
||||||
|
|
||||||
# User name (Apple ID) which will be used to request notarization.
|
|
||||||
MACOS_XCRUN_USERNAME = 'me@example.com'
|
|
||||||
|
|
||||||
# One-time application password which will be used to request notarization.
|
|
||||||
MACOS_XCRUN_PASSWORD = '@keychain:altool-password'
|
|
||||||
|
|
||||||
# Timeout in seconds within which the notarial office is supposed to reply.
|
|
||||||
MACOS_NOTARIZE_TIMEOUT_IN_SECONDS = 60 * 60
|
|
||||||
|
|
||||||
################################################################################
|
|
||||||
# Windows-specific configuration.
|
|
||||||
|
|
||||||
# URL to the timestamping authority.
|
|
||||||
WIN_TIMESTAMP_AUTHORITY_URL = 'http://timestamp.digicert.com'
|
|
||||||
|
|
||||||
# Full path to the certificate used for signing.
|
|
||||||
#
|
|
||||||
# The path and expected file format might vary depending on a platform.
|
|
||||||
#
|
|
||||||
# On Windows it is usually is a PKCS #12 key (.pfx), so the path will look
|
|
||||||
# like Path('C:\\Secret\\Blender.pfx').
|
|
||||||
WIN_CERTIFICATE_FILEPATH: Path
|
|
||||||
|
|
||||||
################################################################################
|
|
||||||
# Logging configuration, common for all platforms.
|
|
||||||
|
|
||||||
# https://docs.python.org/3/library/logging.config.html#configuration-dictionary-schema
|
|
||||||
LOGGING = {
|
|
||||||
'version': 1,
|
|
||||||
'formatters': {
|
|
||||||
'default': {'format': '%(asctime)-15s %(levelname)8s %(name)s %(message)s'}
|
|
||||||
},
|
|
||||||
'handlers': {
|
|
||||||
'console': {
|
|
||||||
'class': 'logging.StreamHandler',
|
|
||||||
'formatter': 'default',
|
|
||||||
'stream': 'ext://sys.stderr',
|
|
||||||
}
|
|
||||||
},
|
|
||||||
'loggers': {
|
|
||||||
'codesign': {'level': 'INFO'},
|
|
||||||
},
|
|
||||||
'root': {
|
|
||||||
'level': 'WARNING',
|
|
||||||
'handlers': [
|
|
||||||
'console',
|
|
||||||
],
|
|
||||||
}
|
|
||||||
}
|
|
@@ -1,26 +0,0 @@
|
|||||||
# ##### BEGIN GPL LICENSE BLOCK #####
|
|
||||||
#
|
|
||||||
# This program is free software; you can redistribute it and/or
|
|
||||||
# modify it under the terms of the GNU General Public License
|
|
||||||
# as published by the Free Software Foundation; either version 2
|
|
||||||
# of the License, or (at your option) any later version.
|
|
||||||
#
|
|
||||||
# This program is distributed in the hope that it will be useful,
|
|
||||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
||||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
||||||
# GNU General Public License for more details.
|
|
||||||
#
|
|
||||||
# You should have received a copy of the GNU General Public License
|
|
||||||
# along with this program; if not, write to the Free Software Foundation,
|
|
||||||
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
|
||||||
#
|
|
||||||
# ##### END GPL LICENSE BLOCK #####
|
|
||||||
|
|
||||||
# <pep8 compliant>
|
|
||||||
|
|
||||||
class CodeSignException(Exception):
|
|
||||||
message: str
|
|
||||||
|
|
||||||
def __init__(self, message):
|
|
||||||
self.message = message
|
|
||||||
super().__init__(self.message)
|
|
@@ -1,72 +0,0 @@
|
|||||||
# ##### BEGIN GPL LICENSE BLOCK #####
|
|
||||||
#
|
|
||||||
# This program is free software; you can redistribute it and/or
|
|
||||||
# modify it under the terms of the GNU General Public License
|
|
||||||
# as published by the Free Software Foundation; either version 2
|
|
||||||
# of the License, or (at your option) any later version.
|
|
||||||
#
|
|
||||||
# This program is distributed in the hope that it will be useful,
|
|
||||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
||||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
||||||
# GNU General Public License for more details.
|
|
||||||
#
|
|
||||||
# You should have received a copy of the GNU General Public License
|
|
||||||
# along with this program; if not, write to the Free Software Foundation,
|
|
||||||
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
|
||||||
#
|
|
||||||
# ##### END GPL LICENSE BLOCK #####
|
|
||||||
|
|
||||||
# <pep8 compliant>
|
|
||||||
|
|
||||||
# NOTE: This is a no-op signer (since there isn't really a procedure to sign
|
|
||||||
# Linux binaries yet). Used to debug and verify the code signing routines on
|
|
||||||
# a Linux environment.
|
|
||||||
|
|
||||||
import logging
|
|
||||||
|
|
||||||
from pathlib import Path
|
|
||||||
from typing import List
|
|
||||||
|
|
||||||
from codesign.absolute_and_relative_filename import AbsoluteAndRelativeFileName
|
|
||||||
from codesign.base_code_signer import BaseCodeSigner
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
|
||||||
logger_server = logger.getChild('server')
|
|
||||||
|
|
||||||
|
|
||||||
class LinuxCodeSigner(BaseCodeSigner):
|
|
||||||
def is_active(self) -> bool:
|
|
||||||
"""
|
|
||||||
Check whether this signer is active.
|
|
||||||
|
|
||||||
if it is inactive, no files will be signed.
|
|
||||||
|
|
||||||
Is used to be able to debug code signing pipeline on Linux, where there
|
|
||||||
is no code signing happening in the actual buildbot and release
|
|
||||||
environment.
|
|
||||||
"""
|
|
||||||
return False
|
|
||||||
|
|
||||||
def check_file_is_to_be_signed(
|
|
||||||
self, file: AbsoluteAndRelativeFileName) -> bool:
|
|
||||||
if file.relative_filepath == Path('blender'):
|
|
||||||
return True
|
|
||||||
if (file.relative_filepath.parts[-3:-1] == ('python', 'bin') and
|
|
||||||
file.relative_filepath.name.startwith('python')):
|
|
||||||
return True
|
|
||||||
if file.relative_filepath.suffix == '.so':
|
|
||||||
return True
|
|
||||||
return False
|
|
||||||
|
|
||||||
def collect_files_to_sign(self, path: Path) \
|
|
||||||
-> List[AbsoluteAndRelativeFileName]:
|
|
||||||
if not self.is_active():
|
|
||||||
return []
|
|
||||||
|
|
||||||
return super().collect_files_to_sign(path)
|
|
||||||
|
|
||||||
def sign_all_files(self, files: List[AbsoluteAndRelativeFileName]) -> None:
|
|
||||||
num_files = len(files)
|
|
||||||
for file_index, file in enumerate(files):
|
|
||||||
logger.info('Server: Signed file [%d/%d] %s',
|
|
||||||
file_index + 1, num_files, file.relative_filepath)
|
|
@@ -1,456 +0,0 @@
|
|||||||
# ##### BEGIN GPL LICENSE BLOCK #####
|
|
||||||
#
|
|
||||||
# This program is free software; you can redistribute it and/or
|
|
||||||
# modify it under the terms of the GNU General Public License
|
|
||||||
# as published by the Free Software Foundation; either version 2
|
|
||||||
# of the License, or (at your option) any later version.
|
|
||||||
#
|
|
||||||
# This program is distributed in the hope that it will be useful,
|
|
||||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
||||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
||||||
# GNU General Public License for more details.
|
|
||||||
#
|
|
||||||
# You should have received a copy of the GNU General Public License
|
|
||||||
# along with this program; if not, write to the Free Software Foundation,
|
|
||||||
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
|
||||||
#
|
|
||||||
# ##### END GPL LICENSE BLOCK #####
|
|
||||||
|
|
||||||
# <pep8 compliant>
|
|
||||||
|
|
||||||
import logging
|
|
||||||
import re
|
|
||||||
import stat
|
|
||||||
import subprocess
|
|
||||||
import time
|
|
||||||
|
|
||||||
from pathlib import Path
|
|
||||||
from typing import List
|
|
||||||
|
|
||||||
import codesign.util as util
|
|
||||||
|
|
||||||
from buildbot_utils import Builder
|
|
||||||
|
|
||||||
from codesign.absolute_and_relative_filename import AbsoluteAndRelativeFileName
|
|
||||||
from codesign.base_code_signer import BaseCodeSigner
|
|
||||||
from codesign.exception import CodeSignException
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
|
||||||
logger_server = logger.getChild('server')
|
|
||||||
|
|
||||||
# NOTE: Check is done as filename.endswith(), so keep the dot
|
|
||||||
EXTENSIONS_TO_BE_SIGNED = {'.dylib', '.so', '.dmg'}
|
|
||||||
|
|
||||||
# Prefixes of a file (not directory) name which are to be signed.
|
|
||||||
# Used to sign extra executable files in Contents/Resources.
|
|
||||||
NAME_PREFIXES_TO_BE_SIGNED = {'python'}
|
|
||||||
|
|
||||||
|
|
||||||
class NotarizationException(CodeSignException):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
def is_file_from_bundle(file: AbsoluteAndRelativeFileName) -> bool:
|
|
||||||
"""
|
|
||||||
Check whether file is coming from an .app bundle
|
|
||||||
"""
|
|
||||||
parts = file.relative_filepath.parts
|
|
||||||
if not parts:
|
|
||||||
return False
|
|
||||||
if not parts[0].endswith('.app'):
|
|
||||||
return False
|
|
||||||
return True
|
|
||||||
|
|
||||||
|
|
||||||
def get_bundle_from_file(
|
|
||||||
file: AbsoluteAndRelativeFileName) -> AbsoluteAndRelativeFileName:
|
|
||||||
"""
|
|
||||||
Get AbsoluteAndRelativeFileName descriptor of bundle
|
|
||||||
"""
|
|
||||||
assert(is_file_from_bundle(file))
|
|
||||||
|
|
||||||
parts = file.relative_filepath.parts
|
|
||||||
bundle_name = parts[0]
|
|
||||||
|
|
||||||
base_dir = file.base_dir
|
|
||||||
bundle_filepath = file.base_dir / bundle_name
|
|
||||||
return AbsoluteAndRelativeFileName(base_dir, bundle_filepath)
|
|
||||||
|
|
||||||
|
|
||||||
def is_bundle_executable_file(file: AbsoluteAndRelativeFileName) -> bool:
|
|
||||||
"""
|
|
||||||
Check whether given file is an executable within an app bundle
|
|
||||||
"""
|
|
||||||
if not is_file_from_bundle(file):
|
|
||||||
return False
|
|
||||||
|
|
||||||
parts = file.relative_filepath.parts
|
|
||||||
num_parts = len(parts)
|
|
||||||
if num_parts < 3:
|
|
||||||
return False
|
|
||||||
|
|
||||||
if parts[1:3] != ('Contents', 'MacOS'):
|
|
||||||
return False
|
|
||||||
|
|
||||||
return True
|
|
||||||
|
|
||||||
|
|
||||||
def xcrun_field_value_from_output(field: str, output: str) -> str:
|
|
||||||
"""
|
|
||||||
Get value of a given field from xcrun output.
|
|
||||||
|
|
||||||
If field is not found empty string is returned.
|
|
||||||
"""
|
|
||||||
|
|
||||||
field_prefix = field + ': '
|
|
||||||
for line in output.splitlines():
|
|
||||||
line = line.strip()
|
|
||||||
if line.startswith(field_prefix):
|
|
||||||
return line[len(field_prefix):]
|
|
||||||
return ''
|
|
||||||
|
|
||||||
|
|
||||||
class MacOSCodeSigner(BaseCodeSigner):
|
|
||||||
def check_file_is_to_be_signed(
|
|
||||||
self, file: AbsoluteAndRelativeFileName) -> bool:
|
|
||||||
if file.relative_filepath.name.startswith('.'):
|
|
||||||
return False
|
|
||||||
|
|
||||||
if is_bundle_executable_file(file):
|
|
||||||
return True
|
|
||||||
|
|
||||||
base_name = file.relative_filepath.name
|
|
||||||
if any(base_name.startswith(prefix)
|
|
||||||
for prefix in NAME_PREFIXES_TO_BE_SIGNED):
|
|
||||||
return True
|
|
||||||
|
|
||||||
mode = file.absolute_filepath.lstat().st_mode
|
|
||||||
if mode & stat.S_IXUSR != 0:
|
|
||||||
file_output = subprocess.check_output(
|
|
||||||
("file", file.absolute_filepath)).decode()
|
|
||||||
if "64-bit executable" in file_output:
|
|
||||||
return True
|
|
||||||
|
|
||||||
return file.relative_filepath.suffix in EXTENSIONS_TO_BE_SIGNED
|
|
||||||
|
|
||||||
def collect_files_to_sign(self, path: Path) \
|
|
||||||
-> List[AbsoluteAndRelativeFileName]:
|
|
||||||
# Include all files when signing app or dmg bundle: all the files are
|
|
||||||
# needed to do valid signature of bundle.
|
|
||||||
if path.name.endswith('.app'):
|
|
||||||
return AbsoluteAndRelativeFileName.recursively_from_directory(path)
|
|
||||||
if path.is_dir():
|
|
||||||
files = []
|
|
||||||
for child in path.iterdir():
|
|
||||||
if child.name.endswith('.app'):
|
|
||||||
current_files = AbsoluteAndRelativeFileName.recursively_from_directory(
|
|
||||||
child)
|
|
||||||
else:
|
|
||||||
current_files = super().collect_files_to_sign(child)
|
|
||||||
for current_file in current_files:
|
|
||||||
files.append(AbsoluteAndRelativeFileName(
|
|
||||||
path, current_file.absolute_filepath))
|
|
||||||
return files
|
|
||||||
return super().collect_files_to_sign(path)
|
|
||||||
|
|
||||||
############################################################################
|
|
||||||
# Codesign.
|
|
||||||
|
|
||||||
def codesign_remove_signature(
|
|
||||||
self, file: AbsoluteAndRelativeFileName) -> None:
|
|
||||||
"""
|
|
||||||
Make sure given file does not have codesign signature
|
|
||||||
|
|
||||||
This is needed because codesigning is not possible for file which has
|
|
||||||
signature already.
|
|
||||||
"""
|
|
||||||
|
|
||||||
logger_server.info(
|
|
||||||
'Removing codesign signature from %s...', file.relative_filepath)
|
|
||||||
|
|
||||||
command = ['codesign', '--remove-signature', file.absolute_filepath]
|
|
||||||
self.run_command_or_mock(command, util.Platform.MACOS)
|
|
||||||
|
|
||||||
def codesign_file(
|
|
||||||
self, file: AbsoluteAndRelativeFileName) -> None:
|
|
||||||
"""
|
|
||||||
Sign given file
|
|
||||||
|
|
||||||
NOTE: File must not have any signatures.
|
|
||||||
"""
|
|
||||||
|
|
||||||
logger_server.info(
|
|
||||||
'Codesigning %s...', file.relative_filepath)
|
|
||||||
|
|
||||||
entitlements_file = self.config.MACOS_ENTITLEMENTS_FILE
|
|
||||||
command = ['codesign',
|
|
||||||
'--timestamp',
|
|
||||||
'--options', 'runtime',
|
|
||||||
f'--entitlements={entitlements_file}',
|
|
||||||
'--sign', self.config.MACOS_CODESIGN_IDENTITY,
|
|
||||||
file.absolute_filepath]
|
|
||||||
self.run_command_or_mock(command, util.Platform.MACOS)
|
|
||||||
|
|
||||||
def codesign_all_files(self, files: List[AbsoluteAndRelativeFileName]) -> None:
|
|
||||||
"""
|
|
||||||
Run codesign tool on all eligible files in the given list.
|
|
||||||
|
|
||||||
Will ignore all files which are not to be signed. For the rest will
|
|
||||||
remove possible existing signature and add a new signature.
|
|
||||||
"""
|
|
||||||
|
|
||||||
num_files = len(files)
|
|
||||||
have_ignored_files = False
|
|
||||||
signed_files = []
|
|
||||||
for file_index, file in enumerate(files):
|
|
||||||
# Ignore file if it is not to be signed.
|
|
||||||
# Allows to manually construct ZIP of a bundle and get it signed.
|
|
||||||
if not self.check_file_is_to_be_signed(file):
|
|
||||||
logger_server.info(
|
|
||||||
'Ignoring file [%d/%d] %s',
|
|
||||||
file_index + 1, num_files, file.relative_filepath)
|
|
||||||
have_ignored_files = True
|
|
||||||
continue
|
|
||||||
|
|
||||||
logger_server.info(
|
|
||||||
'Running codesigning routines for file [%d/%d] %s...',
|
|
||||||
file_index + 1, num_files, file.relative_filepath)
|
|
||||||
|
|
||||||
self.codesign_remove_signature(file)
|
|
||||||
self.codesign_file(file)
|
|
||||||
|
|
||||||
signed_files.append(file)
|
|
||||||
|
|
||||||
if have_ignored_files:
|
|
||||||
logger_server.info('Signed %d files:', len(signed_files))
|
|
||||||
num_signed_files = len(signed_files)
|
|
||||||
for file_index, signed_file in enumerate(signed_files):
|
|
||||||
logger_server.info(
|
|
||||||
'- [%d/%d] %s',
|
|
||||||
file_index + 1, num_signed_files,
|
|
||||||
signed_file.relative_filepath)
|
|
||||||
|
|
||||||
def codesign_bundles(
|
|
||||||
self, files: List[AbsoluteAndRelativeFileName]) -> None:
|
|
||||||
"""
|
|
||||||
Codesign all .app bundles in the given list of files.
|
|
||||||
|
|
||||||
Bundle is deducted from paths of the files, and every bundle is only
|
|
||||||
signed once.
|
|
||||||
"""
|
|
||||||
|
|
||||||
signed_bundles = set()
|
|
||||||
extra_files = []
|
|
||||||
|
|
||||||
for file in files:
|
|
||||||
if not is_file_from_bundle(file):
|
|
||||||
continue
|
|
||||||
bundle = get_bundle_from_file(file)
|
|
||||||
bundle_name = bundle.relative_filepath
|
|
||||||
if bundle_name in signed_bundles:
|
|
||||||
continue
|
|
||||||
|
|
||||||
logger_server.info('Running codesign routines on bundle %s',
|
|
||||||
bundle_name)
|
|
||||||
|
|
||||||
# It is not possible to remove signature from DMG.
|
|
||||||
if bundle.relative_filepath.name.endswith('.app'):
|
|
||||||
self.codesign_remove_signature(bundle)
|
|
||||||
self.codesign_file(bundle)
|
|
||||||
|
|
||||||
signed_bundles.add(bundle_name)
|
|
||||||
|
|
||||||
# Codesign on a bundle adds an extra folder with information.
|
|
||||||
# It needs to be compied to the source.
|
|
||||||
code_signature_directory = \
|
|
||||||
bundle.absolute_filepath / 'Contents' / '_CodeSignature'
|
|
||||||
code_signature_files = \
|
|
||||||
AbsoluteAndRelativeFileName.recursively_from_directory(
|
|
||||||
code_signature_directory)
|
|
||||||
for code_signature_file in code_signature_files:
|
|
||||||
bundle_relative_file = AbsoluteAndRelativeFileName(
|
|
||||||
bundle.base_dir,
|
|
||||||
code_signature_directory /
|
|
||||||
code_signature_file.relative_filepath)
|
|
||||||
extra_files.append(bundle_relative_file)
|
|
||||||
|
|
||||||
files.extend(extra_files)
|
|
||||||
|
|
||||||
############################################################################
|
|
||||||
# Notarization.
|
|
||||||
|
|
||||||
def notarize_get_bundle_id(self, file: AbsoluteAndRelativeFileName) -> str:
|
|
||||||
"""
|
|
||||||
Get bundle ID which will be used to notarize DMG
|
|
||||||
"""
|
|
||||||
name = file.relative_filepath.name
|
|
||||||
app_name = name.split('-', 2)[0].lower()
|
|
||||||
|
|
||||||
app_name_words = app_name.split()
|
|
||||||
if len(app_name_words) > 1:
|
|
||||||
app_name_id = ''.join(word.capitalize() for word in app_name_words)
|
|
||||||
else:
|
|
||||||
app_name_id = app_name_words[0]
|
|
||||||
|
|
||||||
# TODO(sergey): Consider using "alpha" for buildbot builds.
|
|
||||||
return f'org.blenderfoundation.{app_name_id}.release'
|
|
||||||
|
|
||||||
def notarize_request(self, file) -> str:
|
|
||||||
"""
|
|
||||||
Request notarization of the given file.
|
|
||||||
|
|
||||||
Returns UUID of the notarization request. If error occurred None is
|
|
||||||
returned instead of UUID.
|
|
||||||
"""
|
|
||||||
|
|
||||||
bundle_id = self.notarize_get_bundle_id(file)
|
|
||||||
logger_server.info('Bundle ID: %s', bundle_id)
|
|
||||||
|
|
||||||
logger_server.info('Submitting file to the notarial office.')
|
|
||||||
command = [
|
|
||||||
'xcrun', 'altool', '--notarize-app', '--verbose',
|
|
||||||
'-f', file.absolute_filepath,
|
|
||||||
'--primary-bundle-id', bundle_id,
|
|
||||||
'--username', self.config.MACOS_XCRUN_USERNAME,
|
|
||||||
'--password', self.config.MACOS_XCRUN_PASSWORD]
|
|
||||||
|
|
||||||
output = self.check_output_or_mock(
|
|
||||||
command, util.Platform.MACOS, allow_nonzero_exit_code=True)
|
|
||||||
|
|
||||||
for line in output.splitlines():
|
|
||||||
line = line.strip()
|
|
||||||
if line.startswith('RequestUUID = '):
|
|
||||||
request_uuid = line[14:]
|
|
||||||
return request_uuid
|
|
||||||
|
|
||||||
# Check whether the package has been already submitted.
|
|
||||||
if 'The software asset has already been uploaded.' in line:
|
|
||||||
request_uuid = re.sub(
|
|
||||||
'.*The upload ID is ([A-Fa-f0-9\-]+).*', '\\1', line)
|
|
||||||
logger_server.warning(
|
|
||||||
f'The package has been already submitted under UUID {request_uuid}')
|
|
||||||
return request_uuid
|
|
||||||
|
|
||||||
logger_server.error(output)
|
|
||||||
logger_server.error('xcrun command did not report RequestUUID')
|
|
||||||
return None
|
|
||||||
|
|
||||||
def notarize_review_status(self, xcrun_output: str) -> bool:
|
|
||||||
"""
|
|
||||||
Review status returned by xcrun's notarization info
|
|
||||||
|
|
||||||
Returns truth if the notarization process has finished.
|
|
||||||
If there are errors during notarization, a NotarizationException()
|
|
||||||
exception is thrown with status message from the notarial office.
|
|
||||||
"""
|
|
||||||
|
|
||||||
# Parse status and message
|
|
||||||
status = xcrun_field_value_from_output('Status', xcrun_output)
|
|
||||||
status_message = xcrun_field_value_from_output(
|
|
||||||
'Status Message', xcrun_output)
|
|
||||||
|
|
||||||
if status == 'success':
|
|
||||||
logger_server.info(
|
|
||||||
'Package successfully notarized: %s', status_message)
|
|
||||||
return True
|
|
||||||
|
|
||||||
if status == 'invalid':
|
|
||||||
logger_server.error(xcrun_output)
|
|
||||||
logger_server.error(
|
|
||||||
'Package notarization has failed: %s', status_message)
|
|
||||||
raise NotarizationException(status_message)
|
|
||||||
|
|
||||||
if status == 'in progress':
|
|
||||||
return False
|
|
||||||
|
|
||||||
logger_server.info(
|
|
||||||
'Unknown notarization status %s (%s)', status, status_message)
|
|
||||||
|
|
||||||
return False
|
|
||||||
|
|
||||||
def notarize_wait_result(self, request_uuid: str) -> None:
|
|
||||||
"""
|
|
||||||
Wait for until notarial office have a reply
|
|
||||||
"""
|
|
||||||
|
|
||||||
logger_server.info(
|
|
||||||
'Waiting for a result from the notarization office.')
|
|
||||||
|
|
||||||
command = ['xcrun', 'altool',
|
|
||||||
'--notarization-info', request_uuid,
|
|
||||||
'--username', self.config.MACOS_XCRUN_USERNAME,
|
|
||||||
'--password', self.config.MACOS_XCRUN_PASSWORD]
|
|
||||||
|
|
||||||
time_start = time.monotonic()
|
|
||||||
timeout_in_seconds = self.config.MACOS_NOTARIZE_TIMEOUT_IN_SECONDS
|
|
||||||
|
|
||||||
while True:
|
|
||||||
xcrun_output = self.check_output_or_mock(
|
|
||||||
command, util.Platform.MACOS, allow_nonzero_exit_code=True)
|
|
||||||
|
|
||||||
if self.notarize_review_status(xcrun_output):
|
|
||||||
break
|
|
||||||
|
|
||||||
logger_server.info('Keep waiting for notarization office.')
|
|
||||||
time.sleep(30)
|
|
||||||
|
|
||||||
time_slept_in_seconds = time.monotonic() - time_start
|
|
||||||
if time_slept_in_seconds > timeout_in_seconds:
|
|
||||||
logger_server.error(
|
|
||||||
"Notarial office didn't reply in %f seconds.",
|
|
||||||
timeout_in_seconds)
|
|
||||||
|
|
||||||
def notarize_staple(self, file: AbsoluteAndRelativeFileName) -> bool:
|
|
||||||
"""
|
|
||||||
Staple notarial label on the file
|
|
||||||
"""
|
|
||||||
|
|
||||||
logger_server.info('Stapling notarial stamp.')
|
|
||||||
|
|
||||||
command = ['xcrun', 'stapler', 'staple', '-v', file.absolute_filepath]
|
|
||||||
self.check_output_or_mock(command, util.Platform.MACOS)
|
|
||||||
|
|
||||||
def notarize_dmg(self, file: AbsoluteAndRelativeFileName) -> bool:
|
|
||||||
"""
|
|
||||||
Run entire pipeline to get DMG notarized.
|
|
||||||
"""
|
|
||||||
logger_server.info('Begin notarization routines on %s',
|
|
||||||
file.relative_filepath)
|
|
||||||
|
|
||||||
# Submit file for notarization.
|
|
||||||
request_uuid = self.notarize_request(file)
|
|
||||||
if not request_uuid:
|
|
||||||
return False
|
|
||||||
logger_server.info('Received Request UUID: %s', request_uuid)
|
|
||||||
|
|
||||||
# Wait for the status from the notarization office.
|
|
||||||
if not self.notarize_wait_result(request_uuid):
|
|
||||||
return False
|
|
||||||
|
|
||||||
# Staple.
|
|
||||||
self.notarize_staple(file)
|
|
||||||
|
|
||||||
def notarize_all_dmg(
|
|
||||||
self, files: List[AbsoluteAndRelativeFileName]) -> bool:
|
|
||||||
"""
|
|
||||||
Notarize all DMG images from the input.
|
|
||||||
|
|
||||||
Images are supposed to be codesigned already.
|
|
||||||
"""
|
|
||||||
for file in files:
|
|
||||||
if not file.relative_filepath.name.endswith('.dmg'):
|
|
||||||
continue
|
|
||||||
if not self.check_file_is_to_be_signed(file):
|
|
||||||
continue
|
|
||||||
|
|
||||||
self.notarize_dmg(file)
|
|
||||||
|
|
||||||
############################################################################
|
|
||||||
# Entry point.
|
|
||||||
|
|
||||||
def sign_all_files(self, files: List[AbsoluteAndRelativeFileName]) -> None:
|
|
||||||
# TODO(sergey): Handle errors somehow.
|
|
||||||
|
|
||||||
self.codesign_all_files(files)
|
|
||||||
self.codesign_bundles(files)
|
|
||||||
self.notarize_all_dmg(files)
|
|
@@ -1,52 +0,0 @@
|
|||||||
# ##### BEGIN GPL LICENSE BLOCK #####
|
|
||||||
#
|
|
||||||
# This program is free software; you can redistribute it and/or
|
|
||||||
# modify it under the terms of the GNU General Public License
|
|
||||||
# as published by the Free Software Foundation; either version 2
|
|
||||||
# of the License, or (at your option) any later version.
|
|
||||||
#
|
|
||||||
# This program is distributed in the hope that it will be useful,
|
|
||||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
||||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
||||||
# GNU General Public License for more details.
|
|
||||||
#
|
|
||||||
# You should have received a copy of the GNU General Public License
|
|
||||||
# along with this program; if not, write to the Free Software Foundation,
|
|
||||||
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
|
||||||
#
|
|
||||||
# ##### END GPL LICENSE BLOCK #####
|
|
||||||
|
|
||||||
# <pep8 compliant>
|
|
||||||
|
|
||||||
|
|
||||||
import logging.config
|
|
||||||
import sys
|
|
||||||
|
|
||||||
from pathlib import Path
|
|
||||||
from typing import Optional
|
|
||||||
|
|
||||||
import codesign.config_builder
|
|
||||||
import codesign.util as util
|
|
||||||
from codesign.base_code_signer import BaseCodeSigner
|
|
||||||
|
|
||||||
|
|
||||||
class SimpleCodeSigner:
|
|
||||||
code_signer: Optional[BaseCodeSigner]
|
|
||||||
|
|
||||||
def __init__(self):
|
|
||||||
platform = util.get_current_platform()
|
|
||||||
if platform == util.Platform.LINUX:
|
|
||||||
from codesign.linux_code_signer import LinuxCodeSigner
|
|
||||||
self.code_signer = LinuxCodeSigner(codesign.config_builder)
|
|
||||||
elif platform == util.Platform.MACOS:
|
|
||||||
from codesign.macos_code_signer import MacOSCodeSigner
|
|
||||||
self.code_signer = MacOSCodeSigner(codesign.config_builder)
|
|
||||||
elif platform == util.Platform.WINDOWS:
|
|
||||||
from codesign.windows_code_signer import WindowsCodeSigner
|
|
||||||
self.code_signer = WindowsCodeSigner(codesign.config_builder)
|
|
||||||
else:
|
|
||||||
self.code_signer = None
|
|
||||||
|
|
||||||
def sign_file_or_directory(self, path: Path) -> None:
|
|
||||||
logging.config.dictConfig(codesign.config_builder.LOGGING)
|
|
||||||
self.code_signer.run_buildbot_path_sign_pipeline(path)
|
|
@@ -1,54 +0,0 @@
|
|||||||
# ##### BEGIN GPL LICENSE BLOCK #####
|
|
||||||
#
|
|
||||||
# This program is free software; you can redistribute it and/or
|
|
||||||
# modify it under the terms of the GNU General Public License
|
|
||||||
# as published by the Free Software Foundation; either version 2
|
|
||||||
# of the License, or (at your option) any later version.
|
|
||||||
#
|
|
||||||
# This program is distributed in the hope that it will be useful,
|
|
||||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
||||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
||||||
# GNU General Public License for more details.
|
|
||||||
#
|
|
||||||
# You should have received a copy of the GNU General Public License
|
|
||||||
# along with this program; if not, write to the Free Software Foundation,
|
|
||||||
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
|
||||||
#
|
|
||||||
# ##### END GPL LICENSE BLOCK #####
|
|
||||||
|
|
||||||
# <pep8 compliant>
|
|
||||||
|
|
||||||
import sys
|
|
||||||
|
|
||||||
from enum import Enum
|
|
||||||
from pathlib import Path
|
|
||||||
|
|
||||||
|
|
||||||
class Platform(Enum):
|
|
||||||
LINUX = 1
|
|
||||||
MACOS = 2
|
|
||||||
WINDOWS = 3
|
|
||||||
|
|
||||||
|
|
||||||
def get_current_platform() -> Platform:
|
|
||||||
if sys.platform == 'linux':
|
|
||||||
return Platform.LINUX
|
|
||||||
elif sys.platform == 'darwin':
|
|
||||||
return Platform.MACOS
|
|
||||||
elif sys.platform == 'win32':
|
|
||||||
return Platform.WINDOWS
|
|
||||||
raise Exception(f'Unknown platform {sys.platform}')
|
|
||||||
|
|
||||||
|
|
||||||
def ensure_file_does_not_exist_or_die(filepath: Path) -> None:
|
|
||||||
"""
|
|
||||||
If the file exists, unlink it.
|
|
||||||
If the file path exists and is not a file an assert will trigger.
|
|
||||||
If the file path does not exists nothing happens.
|
|
||||||
"""
|
|
||||||
if not filepath.exists():
|
|
||||||
return
|
|
||||||
if not filepath.is_file():
|
|
||||||
# TODO(sergey): Provide information about what the filepath actually is.
|
|
||||||
raise SystemExit(f'{filepath} is expected to be a file, but is not')
|
|
||||||
filepath.unlink()
|
|
@@ -1,117 +0,0 @@
|
|||||||
# ##### BEGIN GPL LICENSE BLOCK #####
|
|
||||||
#
|
|
||||||
# This program is free software; you can redistribute it and/or
|
|
||||||
# modify it under the terms of the GNU General Public License
|
|
||||||
# as published by the Free Software Foundation; either version 2
|
|
||||||
# of the License, or (at your option) any later version.
|
|
||||||
#
|
|
||||||
# This program is distributed in the hope that it will be useful,
|
|
||||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
||||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
||||||
# GNU General Public License for more details.
|
|
||||||
#
|
|
||||||
# You should have received a copy of the GNU General Public License
|
|
||||||
# along with this program; if not, write to the Free Software Foundation,
|
|
||||||
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
|
||||||
#
|
|
||||||
# ##### END GPL LICENSE BLOCK #####
|
|
||||||
|
|
||||||
# <pep8 compliant>
|
|
||||||
|
|
||||||
import logging
|
|
||||||
|
|
||||||
from pathlib import Path
|
|
||||||
from typing import List
|
|
||||||
|
|
||||||
import codesign.util as util
|
|
||||||
|
|
||||||
from buildbot_utils import Builder
|
|
||||||
|
|
||||||
from codesign.absolute_and_relative_filename import AbsoluteAndRelativeFileName
|
|
||||||
from codesign.base_code_signer import BaseCodeSigner
|
|
||||||
from codesign.exception import CodeSignException
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
|
||||||
logger_server = logger.getChild('server')
|
|
||||||
|
|
||||||
# NOTE: Check is done as filename.endswith(), so keep the dot
|
|
||||||
EXTENSIONS_TO_BE_SIGNED = {'.exe', '.dll', '.pyd', '.msi'}
|
|
||||||
|
|
||||||
BLACKLIST_FILE_PREFIXES = (
|
|
||||||
'api-ms-', 'concrt', 'msvcp', 'ucrtbase', 'vcomp', 'vcruntime')
|
|
||||||
|
|
||||||
|
|
||||||
class SigntoolException(CodeSignException):
|
|
||||||
pass
|
|
||||||
|
|
||||||
class WindowsCodeSigner(BaseCodeSigner):
|
|
||||||
def check_file_is_to_be_signed(
|
|
||||||
self, file: AbsoluteAndRelativeFileName) -> bool:
|
|
||||||
base_name = file.relative_filepath.name
|
|
||||||
if any(base_name.startswith(prefix)
|
|
||||||
for prefix in BLACKLIST_FILE_PREFIXES):
|
|
||||||
return False
|
|
||||||
|
|
||||||
return file.relative_filepath.suffix in EXTENSIONS_TO_BE_SIGNED
|
|
||||||
|
|
||||||
|
|
||||||
def get_sign_command_prefix(self) -> List[str]:
|
|
||||||
return [
|
|
||||||
'signtool', 'sign', '/v',
|
|
||||||
'/f', self.config.WIN_CERTIFICATE_FILEPATH,
|
|
||||||
'/tr', self.config.WIN_TIMESTAMP_AUTHORITY_URL]
|
|
||||||
|
|
||||||
|
|
||||||
def run_codesign_tool(self, filepath: Path) -> None:
|
|
||||||
command = self.get_sign_command_prefix() + [filepath]
|
|
||||||
|
|
||||||
try:
|
|
||||||
codesign_output = self.check_output_or_mock(command, util.Platform.WINDOWS)
|
|
||||||
except subprocess.CalledProcessError as e:
|
|
||||||
raise SigntoolException(f'Error running signtool {e}')
|
|
||||||
|
|
||||||
logger_server.info(f'signtool output:\n{codesign_output}')
|
|
||||||
|
|
||||||
got_number_of_success = False
|
|
||||||
|
|
||||||
for line in codesign_output.split('\n'):
|
|
||||||
line_clean = line.strip()
|
|
||||||
line_clean_lower = line_clean.lower()
|
|
||||||
|
|
||||||
if line_clean_lower.startswith('number of warnings') or \
|
|
||||||
line_clean_lower.startswith('number of errors'):
|
|
||||||
number = int(line_clean_lower.split(':')[1])
|
|
||||||
if number != 0:
|
|
||||||
raise SigntoolException('Non-clean success of signtool')
|
|
||||||
|
|
||||||
if line_clean_lower.startswith('number of files successfully signed'):
|
|
||||||
got_number_of_success = True
|
|
||||||
number = int(line_clean_lower.split(':')[1])
|
|
||||||
if number != 1:
|
|
||||||
raise SigntoolException('Signtool did not consider codesign a success')
|
|
||||||
|
|
||||||
if not got_number_of_success:
|
|
||||||
raise SigntoolException('Signtool did not report number of files signed')
|
|
||||||
|
|
||||||
|
|
||||||
def sign_all_files(self, files: List[AbsoluteAndRelativeFileName]) -> None:
|
|
||||||
# NOTE: Sign files one by one to avoid possible command line length
|
|
||||||
# overflow (which could happen if we ever decide to sign every binary
|
|
||||||
# in the install folder, for example).
|
|
||||||
#
|
|
||||||
# TODO(sergey): Consider doing batched signing of handful of files in
|
|
||||||
# one go (but only if this actually known to be much faster).
|
|
||||||
num_files = len(files)
|
|
||||||
for file_index, file in enumerate(files):
|
|
||||||
# Ignore file if it is not to be signed.
|
|
||||||
# Allows to manually construct ZIP of package and get it signed.
|
|
||||||
if not self.check_file_is_to_be_signed(file):
|
|
||||||
logger_server.info(
|
|
||||||
'Ignoring file [%d/%d] %s',
|
|
||||||
file_index + 1, num_files, file.relative_filepath)
|
|
||||||
continue
|
|
||||||
|
|
||||||
logger_server.info(
|
|
||||||
'Running signtool command for file [%d/%d] %s...',
|
|
||||||
file_index + 1, num_files, file.relative_filepath)
|
|
||||||
self.run_codesign_tool(file.absolute_filepath)
|
|
@@ -1,37 +0,0 @@
|
|||||||
#!/usr/bin/env python3
|
|
||||||
|
|
||||||
# ##### BEGIN GPL LICENSE BLOCK #####
|
|
||||||
#
|
|
||||||
# This program is free software; you can redistribute it and/or
|
|
||||||
# modify it under the terms of the GNU General Public License
|
|
||||||
# as published by the Free Software Foundation; either version 2
|
|
||||||
# of the License, or (at your option) any later version.
|
|
||||||
#
|
|
||||||
# This program is distributed in the hope that it will be useful,
|
|
||||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
||||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
||||||
# GNU General Public License for more details.
|
|
||||||
#
|
|
||||||
# You should have received a copy of the GNU General Public License
|
|
||||||
# along with this program; if not, write to the Free Software Foundation,
|
|
||||||
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
|
||||||
#
|
|
||||||
# ##### END GPL LICENSE BLOCK #####
|
|
||||||
|
|
||||||
# <pep8 compliant>
|
|
||||||
|
|
||||||
# NOTE: This is a no-op signer (since there isn't really a procedure to sign
|
|
||||||
# Linux binaries yet). Used to debug and verify the code signing routines on
|
|
||||||
# a Linux environment.
|
|
||||||
|
|
||||||
import logging.config
|
|
||||||
from pathlib import Path
|
|
||||||
from typing import List
|
|
||||||
|
|
||||||
from codesign.linux_code_signer import LinuxCodeSigner
|
|
||||||
import codesign.config_server
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
logging.config.dictConfig(codesign.config_server.LOGGING)
|
|
||||||
code_signer = LinuxCodeSigner(codesign.config_server)
|
|
||||||
code_signer.run_signing_server()
|
|
@@ -1,41 +0,0 @@
|
|||||||
#!/usr/bin/env python3
|
|
||||||
|
|
||||||
# ##### BEGIN GPL LICENSE BLOCK #####
|
|
||||||
#
|
|
||||||
# This program is free software; you can redistribute it and/or
|
|
||||||
# modify it under the terms of the GNU General Public License
|
|
||||||
# as published by the Free Software Foundation; either version 2
|
|
||||||
# of the License, or (at your option) any later version.
|
|
||||||
#
|
|
||||||
# This program is distributed in the hope that it will be useful,
|
|
||||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
||||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
||||||
# GNU General Public License for more details.
|
|
||||||
#
|
|
||||||
# You should have received a copy of the GNU General Public License
|
|
||||||
# along with this program; if not, write to the Free Software Foundation,
|
|
||||||
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
|
||||||
#
|
|
||||||
# ##### END GPL LICENSE BLOCK #####
|
|
||||||
|
|
||||||
# <pep8 compliant>
|
|
||||||
|
|
||||||
import logging.config
|
|
||||||
from pathlib import Path
|
|
||||||
from typing import List
|
|
||||||
|
|
||||||
from codesign.macos_code_signer import MacOSCodeSigner
|
|
||||||
import codesign.config_server
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
entitlements_file = codesign.config_server.MACOS_ENTITLEMENTS_FILE
|
|
||||||
if not entitlements_file.exists():
|
|
||||||
raise SystemExit(
|
|
||||||
'Entitlements file {entitlements_file} does not exist.')
|
|
||||||
if not entitlements_file.is_file():
|
|
||||||
raise SystemExit(
|
|
||||||
'Entitlements file {entitlements_file} is not a file.')
|
|
||||||
|
|
||||||
logging.config.dictConfig(codesign.config_server.LOGGING)
|
|
||||||
code_signer = MacOSCodeSigner(codesign.config_server)
|
|
||||||
code_signer.run_signing_server()
|
|
@@ -1,11 +0,0 @@
|
|||||||
@echo off
|
|
||||||
|
|
||||||
rem This is an entry point of the codesign server for Windows.
|
|
||||||
rem It makes sure that signtool.exe is within the current PATH and can be
|
|
||||||
rem used by the Python script.
|
|
||||||
|
|
||||||
SETLOCAL
|
|
||||||
|
|
||||||
set PATH=C:\Program Files (x86)\Windows Kits\10\App Certification Kit;%PATH%
|
|
||||||
|
|
||||||
codesign_server_windows.py
|
|
@@ -1,54 +0,0 @@
|
|||||||
#!/usr/bin/env python3
|
|
||||||
|
|
||||||
# ##### BEGIN GPL LICENSE BLOCK #####
|
|
||||||
#
|
|
||||||
# This program is free software; you can redistribute it and/or
|
|
||||||
# modify it under the terms of the GNU General Public License
|
|
||||||
# as published by the Free Software Foundation; either version 2
|
|
||||||
# of the License, or (at your option) any later version.
|
|
||||||
#
|
|
||||||
# This program is distributed in the hope that it will be useful,
|
|
||||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
||||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
||||||
# GNU General Public License for more details.
|
|
||||||
#
|
|
||||||
# You should have received a copy of the GNU General Public License
|
|
||||||
# along with this program; if not, write to the Free Software Foundation,
|
|
||||||
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
|
||||||
#
|
|
||||||
# ##### END GPL LICENSE BLOCK #####
|
|
||||||
|
|
||||||
# <pep8 compliant>
|
|
||||||
|
|
||||||
# Implementation of codesign server for Windows.
|
|
||||||
#
|
|
||||||
# NOTE: If signtool.exe is not in the PATH use codesign_server_windows.bat
|
|
||||||
|
|
||||||
import logging.config
|
|
||||||
import shutil
|
|
||||||
|
|
||||||
from pathlib import Path
|
|
||||||
from typing import List
|
|
||||||
|
|
||||||
import codesign.util as util
|
|
||||||
|
|
||||||
from codesign.windows_code_signer import WindowsCodeSigner
|
|
||||||
import codesign.config_server
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
logging.config.dictConfig(codesign.config_server.LOGGING)
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
|
||||||
logger_server = logger.getChild('server')
|
|
||||||
|
|
||||||
# TODO(sergey): Consider moving such sanity checks into
|
|
||||||
# CodeSigner.check_environment_or_die().
|
|
||||||
if not shutil.which('signtool.exe'):
|
|
||||||
if util.get_current_platform() == util.Platform.WINDOWS:
|
|
||||||
raise SystemExit("signtool.exe is not found in %PATH%")
|
|
||||||
logger_server.info(
|
|
||||||
'signtool.exe not found, '
|
|
||||||
'but will not be used on this foreign platform')
|
|
||||||
|
|
||||||
code_signer = WindowsCodeSigner(codesign.config_server)
|
|
||||||
code_signer.run_signing_server()
|
|
@@ -1,551 +0,0 @@
|
|||||||
#!/usr/bin/env python3
|
|
||||||
|
|
||||||
# ##### BEGIN GPL LICENSE BLOCK #####
|
|
||||||
#
|
|
||||||
# This program is free software; you can redistribute it and/or
|
|
||||||
# modify it under the terms of the GNU General Public License
|
|
||||||
# as published by the Free Software Foundation; either version 2
|
|
||||||
# of the License, or (at your option) any later version.
|
|
||||||
#
|
|
||||||
# This program is distributed in the hope that it will be useful,
|
|
||||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
||||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
||||||
# GNU General Public License for more details.
|
|
||||||
#
|
|
||||||
# You should have received a copy of the GNU General Public License
|
|
||||||
# along with this program; if not, write to the Free Software Foundation,
|
|
||||||
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
|
||||||
#
|
|
||||||
# ##### END GPL LICENSE BLOCK #####
|
|
||||||
|
|
||||||
import argparse
|
|
||||||
import re
|
|
||||||
import shutil
|
|
||||||
import subprocess
|
|
||||||
import sys
|
|
||||||
import time
|
|
||||||
|
|
||||||
from pathlib import Path
|
|
||||||
from tempfile import TemporaryDirectory, NamedTemporaryFile
|
|
||||||
from typing import List
|
|
||||||
|
|
||||||
BUILDBOT_DIRECTORY = Path(__file__).absolute().parent
|
|
||||||
CODESIGN_SCRIPT = BUILDBOT_DIRECTORY / 'worker_codesign.py'
|
|
||||||
BLENDER_GIT_ROOT_DIRECTORY = BUILDBOT_DIRECTORY.parent.parent
|
|
||||||
DARWIN_DIRECTORY = BLENDER_GIT_ROOT_DIRECTORY / 'release' / 'darwin'
|
|
||||||
|
|
||||||
|
|
||||||
# Extra size which is added on top of actual files size when estimating size
|
|
||||||
# of destination DNG.
|
|
||||||
EXTRA_DMG_SIZE_IN_BYTES = 800 * 1024 * 1024
|
|
||||||
|
|
||||||
################################################################################
|
|
||||||
# Common utilities
|
|
||||||
|
|
||||||
|
|
||||||
def get_directory_size(root_directory: Path) -> int:
|
|
||||||
"""
|
|
||||||
Get size of directory on disk
|
|
||||||
"""
|
|
||||||
|
|
||||||
total_size = 0
|
|
||||||
for file in root_directory.glob('**/*'):
|
|
||||||
total_size += file.lstat().st_size
|
|
||||||
return total_size
|
|
||||||
|
|
||||||
|
|
||||||
################################################################################
|
|
||||||
# DMG bundling specific logic
|
|
||||||
|
|
||||||
def create_argument_parser():
|
|
||||||
parser = argparse.ArgumentParser()
|
|
||||||
parser.add_argument(
|
|
||||||
'source_dir',
|
|
||||||
type=Path,
|
|
||||||
help='Source directory which points to either existing .app bundle'
|
|
||||||
'or to a directory with .app bundles.')
|
|
||||||
parser.add_argument(
|
|
||||||
'--background-image',
|
|
||||||
type=Path,
|
|
||||||
help="Optional background picture which will be set on the DMG."
|
|
||||||
"If not provided default Blender's one is used.")
|
|
||||||
parser.add_argument(
|
|
||||||
'--volume-name',
|
|
||||||
type=str,
|
|
||||||
help='Optional name of a volume which will be used for DMG.')
|
|
||||||
parser.add_argument(
|
|
||||||
'--dmg',
|
|
||||||
type=Path,
|
|
||||||
help='Optional argument which points to a final DMG file name.')
|
|
||||||
parser.add_argument(
|
|
||||||
'--applescript',
|
|
||||||
type=Path,
|
|
||||||
help="Optional path to applescript to set up folder looks of DMG."
|
|
||||||
"If not provided default Blender's one is used.")
|
|
||||||
parser.add_argument(
|
|
||||||
'--codesign',
|
|
||||||
action="store_true",
|
|
||||||
help="Code sign and notarize DMG contents.")
|
|
||||||
return parser
|
|
||||||
|
|
||||||
|
|
||||||
def collect_app_bundles(source_dir: Path) -> List[Path]:
|
|
||||||
"""
|
|
||||||
Collect all app bundles which are to be put into DMG
|
|
||||||
|
|
||||||
If the source directory points to FOO.app it will be the only app bundle
|
|
||||||
packed.
|
|
||||||
|
|
||||||
Otherwise all .app bundles from given directory are placed to a single
|
|
||||||
DMG.
|
|
||||||
"""
|
|
||||||
|
|
||||||
if source_dir.name.endswith('.app'):
|
|
||||||
return [source_dir]
|
|
||||||
|
|
||||||
app_bundles = []
|
|
||||||
for filename in source_dir.glob('*'):
|
|
||||||
if not filename.is_dir():
|
|
||||||
continue
|
|
||||||
if not filename.name.endswith('.app'):
|
|
||||||
continue
|
|
||||||
|
|
||||||
app_bundles.append(filename)
|
|
||||||
|
|
||||||
return app_bundles
|
|
||||||
|
|
||||||
|
|
||||||
def collect_and_log_app_bundles(source_dir: Path) -> List[Path]:
|
|
||||||
app_bundles = collect_app_bundles(source_dir)
|
|
||||||
|
|
||||||
if not app_bundles:
|
|
||||||
print('No app bundles found for packing')
|
|
||||||
return
|
|
||||||
|
|
||||||
print(f'Found {len(app_bundles)} to pack:')
|
|
||||||
for app_bundle in app_bundles:
|
|
||||||
print(f'- {app_bundle}')
|
|
||||||
|
|
||||||
return app_bundles
|
|
||||||
|
|
||||||
|
|
||||||
def estimate_dmg_size(app_bundles: List[Path]) -> int:
|
|
||||||
"""
|
|
||||||
Estimate size of DMG to hold requested app bundles
|
|
||||||
|
|
||||||
The size is based on actual size of all files in all bundles plus some
|
|
||||||
space to compensate for different size-on-disk plus some space to hold
|
|
||||||
codesign signatures.
|
|
||||||
|
|
||||||
Is better to be on a high side since the empty space is compressed, but
|
|
||||||
lack of space might cause silent failures later on.
|
|
||||||
"""
|
|
||||||
|
|
||||||
app_bundles_size = 0
|
|
||||||
for app_bundle in app_bundles:
|
|
||||||
app_bundles_size += get_directory_size(app_bundle)
|
|
||||||
|
|
||||||
return app_bundles_size + EXTRA_DMG_SIZE_IN_BYTES
|
|
||||||
|
|
||||||
|
|
||||||
def copy_app_bundles_to_directory(app_bundles: List[Path],
|
|
||||||
directory: Path) -> None:
|
|
||||||
"""
|
|
||||||
Copy all bundles to a given directory
|
|
||||||
|
|
||||||
This directory is what the DMG will be created from.
|
|
||||||
"""
|
|
||||||
for app_bundle in app_bundles:
|
|
||||||
print(f'Copying {app_bundle.name}...')
|
|
||||||
shutil.copytree(app_bundle, directory / app_bundle.name)
|
|
||||||
|
|
||||||
|
|
||||||
def get_main_app_bundle(app_bundles: List[Path]) -> Path:
|
|
||||||
"""
|
|
||||||
Get application bundle main for the installation
|
|
||||||
"""
|
|
||||||
return app_bundles[0]
|
|
||||||
|
|
||||||
|
|
||||||
def create_dmg_image(app_bundles: List[Path],
|
|
||||||
dmg_filepath: Path,
|
|
||||||
volume_name: str) -> None:
|
|
||||||
"""
|
|
||||||
Create DMG disk image and put app bundles in it
|
|
||||||
|
|
||||||
No DMG configuration or codesigning is happening here.
|
|
||||||
"""
|
|
||||||
|
|
||||||
if dmg_filepath.exists():
|
|
||||||
print(f'Removing existing writable DMG {dmg_filepath}...')
|
|
||||||
dmg_filepath.unlink()
|
|
||||||
|
|
||||||
print('Preparing directory with app bundles for the DMG...')
|
|
||||||
with TemporaryDirectory(prefix='blender-dmg-content-') as content_dir_str:
|
|
||||||
# Copy all bundles to a clean directory.
|
|
||||||
content_dir = Path(content_dir_str)
|
|
||||||
copy_app_bundles_to_directory(app_bundles, content_dir)
|
|
||||||
|
|
||||||
# Estimate size of the DMG.
|
|
||||||
dmg_size = estimate_dmg_size(app_bundles)
|
|
||||||
print(f'Estimated DMG size: {dmg_size:,} bytes.')
|
|
||||||
|
|
||||||
# Create the DMG.
|
|
||||||
print(f'Creating writable DMG {dmg_filepath}')
|
|
||||||
command = ('hdiutil',
|
|
||||||
'create',
|
|
||||||
'-size', str(dmg_size),
|
|
||||||
'-fs', 'HFS+',
|
|
||||||
'-srcfolder', content_dir,
|
|
||||||
'-volname', volume_name,
|
|
||||||
'-format', 'UDRW',
|
|
||||||
dmg_filepath)
|
|
||||||
subprocess.run(command)
|
|
||||||
|
|
||||||
|
|
||||||
def get_writable_dmg_filepath(dmg_filepath: Path):
|
|
||||||
"""
|
|
||||||
Get file path for writable DMG image
|
|
||||||
"""
|
|
||||||
parent = dmg_filepath.parent
|
|
||||||
return parent / (dmg_filepath.stem + '-temp.dmg')
|
|
||||||
|
|
||||||
|
|
||||||
def mount_readwrite_dmg(dmg_filepath: Path) -> None:
|
|
||||||
"""
|
|
||||||
Mount writable DMG
|
|
||||||
|
|
||||||
Mounting point would be /Volumes/<volume name>
|
|
||||||
"""
|
|
||||||
|
|
||||||
print(f'Mounting read-write DMG ${dmg_filepath}')
|
|
||||||
command = ('hdiutil',
|
|
||||||
'attach', '-readwrite',
|
|
||||||
'-noverify',
|
|
||||||
'-noautoopen',
|
|
||||||
dmg_filepath)
|
|
||||||
subprocess.run(command)
|
|
||||||
|
|
||||||
|
|
||||||
def get_mount_directory_for_volume_name(volume_name: str) -> Path:
|
|
||||||
"""
|
|
||||||
Get directory under which the volume will be mounted
|
|
||||||
"""
|
|
||||||
|
|
||||||
return Path('/Volumes') / volume_name
|
|
||||||
|
|
||||||
|
|
||||||
def eject_volume(volume_name: str) -> None:
|
|
||||||
"""
|
|
||||||
Eject given volume, if mounted
|
|
||||||
"""
|
|
||||||
mount_directory = get_mount_directory_for_volume_name(volume_name)
|
|
||||||
if not mount_directory.exists():
|
|
||||||
return
|
|
||||||
mount_directory_str = str(mount_directory)
|
|
||||||
|
|
||||||
print(f'Ejecting volume {volume_name}')
|
|
||||||
|
|
||||||
# Figure out which device to eject.
|
|
||||||
mount_output = subprocess.check_output(['mount']).decode()
|
|
||||||
device = ''
|
|
||||||
for line in mount_output.splitlines():
|
|
||||||
if f'on {mount_directory_str} (' not in line:
|
|
||||||
continue
|
|
||||||
tokens = line.split(' ', 3)
|
|
||||||
if len(tokens) < 3:
|
|
||||||
continue
|
|
||||||
if tokens[1] != 'on':
|
|
||||||
continue
|
|
||||||
if device:
|
|
||||||
raise Exception(
|
|
||||||
f'Multiple devices found for mounting point {mount_directory}')
|
|
||||||
device = tokens[0]
|
|
||||||
|
|
||||||
if not device:
|
|
||||||
raise Exception(
|
|
||||||
f'No device found for mounting point {mount_directory}')
|
|
||||||
|
|
||||||
print(f'{mount_directory} is mounted as device {device}, ejecting...')
|
|
||||||
subprocess.run(['diskutil', 'eject', device])
|
|
||||||
|
|
||||||
|
|
||||||
def copy_background_if_needed(background_image_filepath: Path,
|
|
||||||
mount_directory: Path) -> None:
|
|
||||||
"""
|
|
||||||
Copy background to the DMG
|
|
||||||
|
|
||||||
If the background image is not specified it will not be copied.
|
|
||||||
"""
|
|
||||||
|
|
||||||
if not background_image_filepath:
|
|
||||||
print('No background image provided.')
|
|
||||||
return
|
|
||||||
|
|
||||||
print(f'Copying background image {background_image_filepath}')
|
|
||||||
|
|
||||||
destination_dir = mount_directory / '.background'
|
|
||||||
destination_dir.mkdir(exist_ok=True)
|
|
||||||
|
|
||||||
destination_filepath = destination_dir / background_image_filepath.name
|
|
||||||
shutil.copy(background_image_filepath, destination_filepath)
|
|
||||||
|
|
||||||
|
|
||||||
def create_applications_link(mount_directory: Path) -> None:
|
|
||||||
"""
|
|
||||||
Create link to /Applications in the given location
|
|
||||||
"""
|
|
||||||
|
|
||||||
print('Creating link to /Applications')
|
|
||||||
|
|
||||||
command = ('ln', '-s', '/Applications', mount_directory / ' ')
|
|
||||||
subprocess.run(command)
|
|
||||||
|
|
||||||
|
|
||||||
def run_applescript(applescript: Path,
|
|
||||||
volume_name: str,
|
|
||||||
app_bundles: List[Path],
|
|
||||||
background_image_filepath: Path) -> None:
|
|
||||||
"""
|
|
||||||
Run given applescript to adjust look and feel of the DMG
|
|
||||||
"""
|
|
||||||
|
|
||||||
main_app_bundle = get_main_app_bundle(app_bundles)
|
|
||||||
|
|
||||||
with NamedTemporaryFile(
|
|
||||||
mode='w', suffix='.applescript') as temp_applescript:
|
|
||||||
print('Adjusting applescript for volume name...')
|
|
||||||
# Adjust script to the specific volume name.
|
|
||||||
with open(applescript, mode='r') as input:
|
|
||||||
for line in input.readlines():
|
|
||||||
stripped_line = line.strip()
|
|
||||||
if stripped_line.startswith('tell disk'):
|
|
||||||
line = re.sub('tell disk ".*"',
|
|
||||||
f'tell disk "{volume_name}"',
|
|
||||||
line)
|
|
||||||
elif stripped_line.startswith('set background picture'):
|
|
||||||
if not background_image_filepath:
|
|
||||||
continue
|
|
||||||
else:
|
|
||||||
background_image_short = \
|
|
||||||
'.background:' + background_image_filepath.name
|
|
||||||
line = re.sub('to file ".*"',
|
|
||||||
f'to file "{background_image_short}"',
|
|
||||||
line)
|
|
||||||
line = line.replace('blender.app', main_app_bundle.name)
|
|
||||||
temp_applescript.write(line)
|
|
||||||
|
|
||||||
temp_applescript.flush()
|
|
||||||
|
|
||||||
print('Running applescript...')
|
|
||||||
command = ('osascript', temp_applescript.name)
|
|
||||||
subprocess.run(command)
|
|
||||||
|
|
||||||
print('Waiting for applescript...')
|
|
||||||
|
|
||||||
# NOTE: This is copied from bundle.sh. The exact reason for sleep is
|
|
||||||
# still remained a mystery.
|
|
||||||
time.sleep(5)
|
|
||||||
|
|
||||||
|
|
||||||
def codesign(subject: Path):
|
|
||||||
"""
|
|
||||||
Codesign file or directory
|
|
||||||
|
|
||||||
NOTE: For DMG it will also notarize.
|
|
||||||
"""
|
|
||||||
|
|
||||||
command = (CODESIGN_SCRIPT, subject)
|
|
||||||
subprocess.run(command)
|
|
||||||
|
|
||||||
|
|
||||||
def codesign_app_bundles_in_dmg(mount_directory: str) -> None:
|
|
||||||
"""
|
|
||||||
Code sign all binaries and bundles in the mounted directory
|
|
||||||
"""
|
|
||||||
|
|
||||||
print(f'Codesigning all app bundles in {mount_directory}')
|
|
||||||
codesign(mount_directory)
|
|
||||||
|
|
||||||
|
|
||||||
def codesign_and_notarize_dmg(dmg_filepath: Path) -> None:
|
|
||||||
"""
|
|
||||||
Run codesign and notarization pipeline on the DMG
|
|
||||||
"""
|
|
||||||
|
|
||||||
print(f'Codesigning and notarizing DMG {dmg_filepath}')
|
|
||||||
codesign(dmg_filepath)
|
|
||||||
|
|
||||||
|
|
||||||
def compress_dmg(writable_dmg_filepath: Path,
|
|
||||||
final_dmg_filepath: Path) -> None:
|
|
||||||
"""
|
|
||||||
Compress temporary read-write DMG
|
|
||||||
"""
|
|
||||||
command = ('hdiutil', 'convert',
|
|
||||||
writable_dmg_filepath,
|
|
||||||
'-format', 'UDZO',
|
|
||||||
'-o', final_dmg_filepath)
|
|
||||||
|
|
||||||
if final_dmg_filepath.exists():
|
|
||||||
print(f'Removing old compressed DMG {final_dmg_filepath}')
|
|
||||||
final_dmg_filepath.unlink()
|
|
||||||
|
|
||||||
print('Compressing disk image...')
|
|
||||||
subprocess.run(command)
|
|
||||||
|
|
||||||
|
|
||||||
def create_final_dmg(app_bundles: List[Path],
|
|
||||||
dmg_filepath: Path,
|
|
||||||
background_image_filepath: Path,
|
|
||||||
volume_name: str,
|
|
||||||
applescript: Path,
|
|
||||||
codesign: bool) -> None:
|
|
||||||
"""
|
|
||||||
Create DMG with all app bundles
|
|
||||||
|
|
||||||
Will take care configuring background, signing all binaries and app bundles
|
|
||||||
and notarizing the DMG.
|
|
||||||
"""
|
|
||||||
|
|
||||||
print('Running all routines to create final DMG')
|
|
||||||
|
|
||||||
writable_dmg_filepath = get_writable_dmg_filepath(dmg_filepath)
|
|
||||||
mount_directory = get_mount_directory_for_volume_name(volume_name)
|
|
||||||
|
|
||||||
# Make sure volume is not mounted.
|
|
||||||
# If it is mounted it will prevent removing old DMG files and could make
|
|
||||||
# it so app bundles are copied to the wrong place.
|
|
||||||
eject_volume(volume_name)
|
|
||||||
|
|
||||||
create_dmg_image(app_bundles, writable_dmg_filepath, volume_name)
|
|
||||||
|
|
||||||
mount_readwrite_dmg(writable_dmg_filepath)
|
|
||||||
|
|
||||||
# Run codesign first, prior to copying amything else.
|
|
||||||
#
|
|
||||||
# This allows to recurs into the content of bundles without worrying about
|
|
||||||
# possible interfereice of Application symlink.
|
|
||||||
if codesign:
|
|
||||||
codesign_app_bundles_in_dmg(mount_directory)
|
|
||||||
|
|
||||||
copy_background_if_needed(background_image_filepath, mount_directory)
|
|
||||||
create_applications_link(mount_directory)
|
|
||||||
run_applescript(applescript, volume_name, app_bundles,
|
|
||||||
background_image_filepath)
|
|
||||||
|
|
||||||
print('Ejecting read-write DMG image...')
|
|
||||||
eject_volume(volume_name)
|
|
||||||
|
|
||||||
compress_dmg(writable_dmg_filepath, dmg_filepath)
|
|
||||||
writable_dmg_filepath.unlink()
|
|
||||||
|
|
||||||
if codesign:
|
|
||||||
codesign_and_notarize_dmg(dmg_filepath)
|
|
||||||
|
|
||||||
|
|
||||||
def ensure_dmg_extension(filepath: Path) -> Path:
|
|
||||||
"""
|
|
||||||
Make sure given file have .dmg extension
|
|
||||||
"""
|
|
||||||
|
|
||||||
if filepath.suffix != '.dmg':
|
|
||||||
return filepath.with_suffix(f'{filepath.suffix}.dmg')
|
|
||||||
return filepath
|
|
||||||
|
|
||||||
|
|
||||||
def get_dmg_filepath(requested_name: Path, app_bundles: List[Path]) -> Path:
|
|
||||||
"""
|
|
||||||
Get full file path for the final DMG image
|
|
||||||
|
|
||||||
Will use the provided one when possible, otherwise will deduct it from
|
|
||||||
app bundles.
|
|
||||||
|
|
||||||
If the name is deducted, the DMG is stored in the current directory.
|
|
||||||
"""
|
|
||||||
|
|
||||||
if requested_name:
|
|
||||||
return ensure_dmg_extension(requested_name.absolute())
|
|
||||||
|
|
||||||
# TODO(sergey): This is not necessarily the main one.
|
|
||||||
main_bundle = app_bundles[0]
|
|
||||||
# Strip .app from the name
|
|
||||||
return Path(main_bundle.name[:-4] + '.dmg').absolute()
|
|
||||||
|
|
||||||
|
|
||||||
def get_background_image(requested_background_image: Path) -> Path:
|
|
||||||
"""
|
|
||||||
Get effective filepath for the background image
|
|
||||||
"""
|
|
||||||
|
|
||||||
if requested_background_image:
|
|
||||||
return requested_background_image.absolute()
|
|
||||||
|
|
||||||
return DARWIN_DIRECTORY / 'background.tif'
|
|
||||||
|
|
||||||
|
|
||||||
def get_applescript(requested_applescript: Path) -> Path:
|
|
||||||
"""
|
|
||||||
Get effective filepath for the applescript
|
|
||||||
"""
|
|
||||||
|
|
||||||
if requested_applescript:
|
|
||||||
return requested_applescript.absolute()
|
|
||||||
|
|
||||||
return DARWIN_DIRECTORY / 'blender.applescript'
|
|
||||||
|
|
||||||
|
|
||||||
def get_volume_name_from_dmg_filepath(dmg_filepath: Path) -> str:
|
|
||||||
"""
|
|
||||||
Deduct volume name from the DMG path
|
|
||||||
|
|
||||||
Will use first part of the DMG file name prior to dash.
|
|
||||||
"""
|
|
||||||
|
|
||||||
tokens = dmg_filepath.stem.split('-')
|
|
||||||
words = tokens[0].split()
|
|
||||||
|
|
||||||
return ' '.join(word.capitalize() for word in words)
|
|
||||||
|
|
||||||
|
|
||||||
def get_volume_name(requested_volume_name: str,
|
|
||||||
dmg_filepath: Path) -> str:
|
|
||||||
"""
|
|
||||||
Get effective name for DMG volume
|
|
||||||
"""
|
|
||||||
|
|
||||||
if requested_volume_name:
|
|
||||||
return requested_volume_name
|
|
||||||
|
|
||||||
return get_volume_name_from_dmg_filepath(dmg_filepath)
|
|
||||||
|
|
||||||
|
|
||||||
def main():
|
|
||||||
parser = create_argument_parser()
|
|
||||||
args = parser.parse_args()
|
|
||||||
|
|
||||||
# Get normalized input parameters.
|
|
||||||
source_dir = args.source_dir.absolute()
|
|
||||||
background_image_filepath = get_background_image(args.background_image)
|
|
||||||
applescript = get_applescript(args.applescript)
|
|
||||||
codesign = args.codesign
|
|
||||||
|
|
||||||
app_bundles = collect_and_log_app_bundles(source_dir)
|
|
||||||
if not app_bundles:
|
|
||||||
return
|
|
||||||
|
|
||||||
dmg_filepath = get_dmg_filepath(args.dmg, app_bundles)
|
|
||||||
volume_name = get_volume_name(args.volume_name, dmg_filepath)
|
|
||||||
|
|
||||||
print(f'Will produce DMG "{dmg_filepath.name}" (without quotes)')
|
|
||||||
|
|
||||||
create_final_dmg(app_bundles,
|
|
||||||
dmg_filepath,
|
|
||||||
background_image_filepath,
|
|
||||||
volume_name,
|
|
||||||
applescript,
|
|
||||||
codesign)
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
main()
|
|
@@ -1,44 +0,0 @@
|
|||||||
# ##### BEGIN GPL LICENSE BLOCK #####
|
|
||||||
#
|
|
||||||
# This program is free software; you can redistribute it and/or
|
|
||||||
# modify it under the terms of the GNU General Public License
|
|
||||||
# as published by the Free Software Foundation; either version 2
|
|
||||||
# of the License, or (at your option) any later version.
|
|
||||||
#
|
|
||||||
# This program is distributed in the hope that it will be useful,
|
|
||||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
||||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
||||||
# GNU General Public License for more details.
|
|
||||||
#
|
|
||||||
# You should have received a copy of the GNU General Public License
|
|
||||||
# along with this program; if not, write to the Free Software Foundation,
|
|
||||||
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
|
||||||
#
|
|
||||||
# ##### END GPL LICENSE BLOCK #####
|
|
||||||
|
|
||||||
# This is a script which is used as POST-INSTALL one for regular CMake's
|
|
||||||
# INSTALL target.
|
|
||||||
# It is used by buildbot workers to sign every binary which is going into
|
|
||||||
# the final buundle.
|
|
||||||
|
|
||||||
# On Windows Python 3 there only is python.exe, no python3.exe.
|
|
||||||
#
|
|
||||||
# On other platforms it is possible to have python2 and python3, and a
|
|
||||||
# symbolic link to python to either of them. So on those platforms use
|
|
||||||
# an explicit Python version.
|
|
||||||
if(WIN32)
|
|
||||||
set(PYTHON_EXECUTABLE python)
|
|
||||||
else()
|
|
||||||
set(PYTHON_EXECUTABLE python3)
|
|
||||||
endif()
|
|
||||||
|
|
||||||
execute_process(
|
|
||||||
COMMAND ${PYTHON_EXECUTABLE} "${CMAKE_CURRENT_LIST_DIR}/worker_codesign.py"
|
|
||||||
"${CMAKE_INSTALL_PREFIX}"
|
|
||||||
WORKING_DIRECTORY ${CMAKE_CURRENT_LIST_DIR}
|
|
||||||
RESULT_VARIABLE exit_code
|
|
||||||
)
|
|
||||||
|
|
||||||
if(NOT exit_code EQUAL "0")
|
|
||||||
message(FATAL_ERROR "Non-zero exit code of codesign tool")
|
|
||||||
endif()
|
|
@@ -1,74 +0,0 @@
|
|||||||
#!/usr/bin/env python3
|
|
||||||
|
|
||||||
# ##### BEGIN GPL LICENSE BLOCK #####
|
|
||||||
#
|
|
||||||
# This program is free software; you can redistribute it and/or
|
|
||||||
# modify it under the terms of the GNU General Public License
|
|
||||||
# as published by the Free Software Foundation; either version 2
|
|
||||||
# of the License, or (at your option) any later version.
|
|
||||||
#
|
|
||||||
# This program is distributed in the hope that it will be useful,
|
|
||||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
||||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
||||||
# GNU General Public License for more details.
|
|
||||||
#
|
|
||||||
# You should have received a copy of the GNU General Public License
|
|
||||||
# along with this program; if not, write to the Free Software Foundation,
|
|
||||||
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
|
||||||
#
|
|
||||||
# ##### END GPL LICENSE BLOCK #####
|
|
||||||
|
|
||||||
# Helper script which takes care of signing provided location.
|
|
||||||
#
|
|
||||||
# The location can either be a directory (in which case all eligible binaries
|
|
||||||
# will be signed) or a single file (in which case a single file will be signed).
|
|
||||||
#
|
|
||||||
# This script takes care of all the complexity of communicating between process
|
|
||||||
# which requests file to be signed and the code signing server.
|
|
||||||
#
|
|
||||||
# NOTE: Signing happens in-place.
|
|
||||||
|
|
||||||
import argparse
|
|
||||||
import sys
|
|
||||||
|
|
||||||
from pathlib import Path
|
|
||||||
|
|
||||||
from codesign.simple_code_signer import SimpleCodeSigner
|
|
||||||
|
|
||||||
|
|
||||||
def create_argument_parser():
|
|
||||||
parser = argparse.ArgumentParser()
|
|
||||||
parser.add_argument('path_to_sign', type=Path)
|
|
||||||
return parser
|
|
||||||
|
|
||||||
|
|
||||||
def main():
|
|
||||||
parser = create_argument_parser()
|
|
||||||
args = parser.parse_args()
|
|
||||||
path_to_sign = args.path_to_sign.absolute()
|
|
||||||
|
|
||||||
if sys.platform == 'win32':
|
|
||||||
# When WIX packed is used to generate .msi on Windows the CPack will
|
|
||||||
# install two different projects and install them to different
|
|
||||||
# installation prefix:
|
|
||||||
#
|
|
||||||
# - C:\b\build\_CPack_Packages\WIX\Blender
|
|
||||||
# - C:\b\build\_CPack_Packages\WIX\Unspecified
|
|
||||||
#
|
|
||||||
# Annoying part is: CMake's post-install script will only be run
|
|
||||||
# once, with the install prefix which corresponds to a project which
|
|
||||||
# was installed last. But we want to sign binaries from all projects.
|
|
||||||
# So in order to do so we detect that we are running for a CPack's
|
|
||||||
# project used for WIX and force parent directory (which includes both
|
|
||||||
# projects) to be signed.
|
|
||||||
#
|
|
||||||
# Here we force both projects to be signed.
|
|
||||||
if path_to_sign.name == 'Unspecified' and 'WIX' in str(path_to_sign):
|
|
||||||
path_to_sign = path_to_sign.parent
|
|
||||||
|
|
||||||
code_signer = SimpleCodeSigner()
|
|
||||||
code_signer.sign_file_or_directory(path_to_sign)
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
main()
|
|
@@ -1,135 +0,0 @@
|
|||||||
# ##### BEGIN GPL LICENSE BLOCK #####
|
|
||||||
#
|
|
||||||
# This program is free software; you can redistribute it and/or
|
|
||||||
# modify it under the terms of the GNU General Public License
|
|
||||||
# as published by the Free Software Foundation; either version 2
|
|
||||||
# of the License, or (at your option) any later version.
|
|
||||||
#
|
|
||||||
# This program is distributed in the hope that it will be useful,
|
|
||||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
||||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
||||||
# GNU General Public License for more details.
|
|
||||||
#
|
|
||||||
# You should have received a copy of the GNU General Public License
|
|
||||||
# along with this program; if not, write to the Free Software Foundation,
|
|
||||||
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
|
||||||
#
|
|
||||||
# ##### END GPL LICENSE BLOCK #####
|
|
||||||
|
|
||||||
# <pep8 compliant>
|
|
||||||
|
|
||||||
import os
|
|
||||||
import shutil
|
|
||||||
|
|
||||||
import buildbot_utils
|
|
||||||
|
|
||||||
|
|
||||||
def get_cmake_options(builder):
|
|
||||||
codesign_script = os.path.join(
|
|
||||||
builder.blender_dir, 'build_files', 'buildbot', 'worker_codesign.cmake')
|
|
||||||
|
|
||||||
config_file = "build_files/cmake/config/blender_release.cmake"
|
|
||||||
options = ['-DCMAKE_BUILD_TYPE:STRING=Release',
|
|
||||||
'-DWITH_GTESTS=ON']
|
|
||||||
|
|
||||||
if builder.platform == 'mac':
|
|
||||||
options.append('-DCMAKE_OSX_ARCHITECTURES:STRING=x86_64')
|
|
||||||
options.append('-DCMAKE_OSX_DEPLOYMENT_TARGET=10.9')
|
|
||||||
elif builder.platform == 'win':
|
|
||||||
options.extend(['-G', 'Visual Studio 16 2019', '-A', 'x64'])
|
|
||||||
if builder.codesign:
|
|
||||||
options.extend(['-DPOSTINSTALL_SCRIPT:PATH=' + codesign_script])
|
|
||||||
elif builder.platform == 'linux':
|
|
||||||
config_file = "build_files/buildbot/config/blender_linux.cmake"
|
|
||||||
|
|
||||||
optix_sdk_dir = os.path.join(builder.blender_dir, '..', '..', 'NVIDIA-Optix-SDK-7.1')
|
|
||||||
options.append('-DOPTIX_ROOT_DIR:PATH=' + optix_sdk_dir)
|
|
||||||
|
|
||||||
# Workaround to build sm_30 kernels with CUDA 10, since CUDA 11 no longer supports that architecture
|
|
||||||
if builder.platform == 'win':
|
|
||||||
options.append('-DCUDA10_TOOLKIT_ROOT_DIR:PATH=C:/Program Files/NVIDIA GPU Computing Toolkit/CUDA/v10.1')
|
|
||||||
options.append('-DCUDA10_NVCC_EXECUTABLE:FILEPATH=C:/Program Files/NVIDIA GPU Computing Toolkit/CUDA/v10.1/bin/nvcc.exe')
|
|
||||||
options.append('-DCUDA11_TOOLKIT_ROOT_DIR:PATH=C:/Program Files/NVIDIA GPU Computing Toolkit/CUDA/v11.1')
|
|
||||||
options.append('-DCUDA11_NVCC_EXECUTABLE:FILEPATH=C:/Program Files/NVIDIA GPU Computing Toolkit/CUDA/v11.1/bin/nvcc.exe')
|
|
||||||
elif builder.platform == 'linux':
|
|
||||||
options.append('-DCUDA10_TOOLKIT_ROOT_DIR:PATH=/usr/local/cuda-10.1')
|
|
||||||
options.append('-DCUDA10_NVCC_EXECUTABLE:FILEPATH=/usr/local/cuda-10.1/bin/nvcc')
|
|
||||||
options.append('-DCUDA11_TOOLKIT_ROOT_DIR:PATH=/usr/local/cuda-11.1')
|
|
||||||
options.append('-DCUDA11_NVCC_EXECUTABLE:FILEPATH=/usr/local/cuda-11.1/bin/nvcc')
|
|
||||||
|
|
||||||
options.append("-C" + os.path.join(builder.blender_dir, config_file))
|
|
||||||
options.append("-DCMAKE_INSTALL_PREFIX=%s" % (builder.install_dir))
|
|
||||||
|
|
||||||
return options
|
|
||||||
|
|
||||||
|
|
||||||
def update_git(builder):
|
|
||||||
# Do extra git fetch because not all platform/git/buildbot combinations
|
|
||||||
# update the origin remote, causing buildinfo to detect local changes.
|
|
||||||
os.chdir(builder.blender_dir)
|
|
||||||
|
|
||||||
print("Fetching remotes")
|
|
||||||
command = ['git', 'fetch', '--all']
|
|
||||||
buildbot_utils.call(builder.command_prefix + command)
|
|
||||||
|
|
||||||
|
|
||||||
def clean_directories(builder):
|
|
||||||
# Make sure no garbage remained from the previous run
|
|
||||||
if os.path.isdir(builder.install_dir):
|
|
||||||
shutil.rmtree(builder.install_dir)
|
|
||||||
|
|
||||||
# Make sure build directory exists and enter it
|
|
||||||
os.makedirs(builder.build_dir, exist_ok=True)
|
|
||||||
|
|
||||||
# Remove buildinfo files to force buildbot to re-generate them.
|
|
||||||
for buildinfo in ('buildinfo.h', 'buildinfo.h.txt', ):
|
|
||||||
full_path = os.path.join(builder.build_dir, 'source', 'creator', buildinfo)
|
|
||||||
if os.path.exists(full_path):
|
|
||||||
print("Removing {}" . format(buildinfo))
|
|
||||||
os.remove(full_path)
|
|
||||||
|
|
||||||
|
|
||||||
def cmake_configure(builder):
|
|
||||||
# CMake configuration
|
|
||||||
os.chdir(builder.build_dir)
|
|
||||||
|
|
||||||
cmake_cache = os.path.join(builder.build_dir, 'CMakeCache.txt')
|
|
||||||
if os.path.exists(cmake_cache):
|
|
||||||
print("Removing CMake cache")
|
|
||||||
os.remove(cmake_cache)
|
|
||||||
|
|
||||||
print("CMake configure:")
|
|
||||||
cmake_options = get_cmake_options(builder)
|
|
||||||
command = ['cmake', builder.blender_dir] + cmake_options
|
|
||||||
buildbot_utils.call(builder.command_prefix + command)
|
|
||||||
|
|
||||||
|
|
||||||
def cmake_build(builder):
|
|
||||||
# CMake build
|
|
||||||
os.chdir(builder.build_dir)
|
|
||||||
|
|
||||||
# NOTE: CPack will build an INSTALL target, which would mean that code
|
|
||||||
# signing will happen twice when using `make install` and CPack.
|
|
||||||
# The tricky bit here is that it is not possible to know whether INSTALL
|
|
||||||
# target is used by CPack or by a buildbot itaself. Extra level on top of
|
|
||||||
# this is that on Windows it is required to build INSTALL target in order
|
|
||||||
# to have unit test binaries to run.
|
|
||||||
# So on the one hand we do an extra unneeded code sign on Windows, but on
|
|
||||||
# a positive side we don't add complexity and don't make build process more
|
|
||||||
# fragile trying to avoid this. The signing process is way faster than just
|
|
||||||
# a clean build of buildbot, especially with regression tests enabled.
|
|
||||||
if builder.platform == 'win':
|
|
||||||
command = ['cmake', '--build', '.', '--target', 'install', '--config', 'Release']
|
|
||||||
else:
|
|
||||||
command = ['make', '-s', '-j16', 'install']
|
|
||||||
|
|
||||||
print("CMake build:")
|
|
||||||
buildbot_utils.call(builder.command_prefix + command)
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
builder = buildbot_utils.create_builder_from_arguments()
|
|
||||||
update_git(builder)
|
|
||||||
clean_directories(builder)
|
|
||||||
cmake_configure(builder)
|
|
||||||
cmake_build(builder)
|
|
@@ -1,208 +0,0 @@
|
|||||||
# ##### BEGIN GPL LICENSE BLOCK #####
|
|
||||||
#
|
|
||||||
# This program is free software; you can redistribute it and/or
|
|
||||||
# modify it under the terms of the GNU General Public License
|
|
||||||
# as published by the Free Software Foundation; either version 2
|
|
||||||
# of the License, or (at your option) any later version.
|
|
||||||
#
|
|
||||||
# This program is distributed in the hope that it will be useful,
|
|
||||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
||||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
||||||
# GNU General Public License for more details.
|
|
||||||
#
|
|
||||||
# You should have received a copy of the GNU General Public License
|
|
||||||
# along with this program; if not, write to the Free Software Foundation,
|
|
||||||
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
|
||||||
#
|
|
||||||
# ##### END GPL LICENSE BLOCK #####
|
|
||||||
|
|
||||||
# <pep8 compliant>
|
|
||||||
|
|
||||||
# Runs on buildbot worker, creating a release package using the build
|
|
||||||
# system and zipping it into buildbot_upload.zip. This is then uploaded
|
|
||||||
# to the master in the next buildbot step.
|
|
||||||
|
|
||||||
import os
|
|
||||||
import sys
|
|
||||||
|
|
||||||
from pathlib import Path
|
|
||||||
|
|
||||||
import buildbot_utils
|
|
||||||
|
|
||||||
|
|
||||||
def get_package_name(builder, platform=None):
|
|
||||||
info = buildbot_utils.VersionInfo(builder)
|
|
||||||
|
|
||||||
package_name = 'blender-' + info.full_version
|
|
||||||
if platform:
|
|
||||||
package_name += '-' + platform
|
|
||||||
if not (builder.branch == 'master' or builder.is_release_branch):
|
|
||||||
if info.is_development_build:
|
|
||||||
package_name = builder.branch + "-" + package_name
|
|
||||||
|
|
||||||
return package_name
|
|
||||||
|
|
||||||
|
|
||||||
def sign_file_or_directory(path):
|
|
||||||
from codesign.simple_code_signer import SimpleCodeSigner
|
|
||||||
code_signer = SimpleCodeSigner()
|
|
||||||
code_signer.sign_file_or_directory(Path(path))
|
|
||||||
|
|
||||||
|
|
||||||
def create_buildbot_upload_zip(builder, package_files):
|
|
||||||
import zipfile
|
|
||||||
|
|
||||||
buildbot_upload_zip = os.path.join(builder.upload_dir, "buildbot_upload.zip")
|
|
||||||
if os.path.exists(buildbot_upload_zip):
|
|
||||||
os.remove(buildbot_upload_zip)
|
|
||||||
|
|
||||||
try:
|
|
||||||
z = zipfile.ZipFile(buildbot_upload_zip, "w", compression=zipfile.ZIP_STORED)
|
|
||||||
for filepath, filename in package_files:
|
|
||||||
print("Packaged", filename)
|
|
||||||
z.write(filepath, arcname=filename)
|
|
||||||
z.close()
|
|
||||||
except Exception as ex:
|
|
||||||
sys.stderr.write('Create buildbot_upload.zip failed: ' + str(ex) + '\n')
|
|
||||||
sys.exit(1)
|
|
||||||
|
|
||||||
|
|
||||||
def create_tar_xz(src, dest, package_name):
|
|
||||||
# One extra to remove leading os.sep when cleaning root for package_root
|
|
||||||
ln = len(src) + 1
|
|
||||||
flist = list()
|
|
||||||
|
|
||||||
# Create list of tuples containing file and archive name
|
|
||||||
for root, dirs, files in os.walk(src):
|
|
||||||
package_root = os.path.join(package_name, root[ln:])
|
|
||||||
flist.extend([(os.path.join(root, file), os.path.join(package_root, file)) for file in files])
|
|
||||||
|
|
||||||
import tarfile
|
|
||||||
|
|
||||||
# Set UID/GID of archived files to 0, otherwise they'd be owned by whatever
|
|
||||||
# user compiled the package. If root then unpacks it to /usr/local/ you get
|
|
||||||
# a security issue.
|
|
||||||
def _fakeroot(tarinfo):
|
|
||||||
tarinfo.gid = 0
|
|
||||||
tarinfo.gname = "root"
|
|
||||||
tarinfo.uid = 0
|
|
||||||
tarinfo.uname = "root"
|
|
||||||
return tarinfo
|
|
||||||
|
|
||||||
package = tarfile.open(dest, 'w:xz', preset=9)
|
|
||||||
for entry in flist:
|
|
||||||
package.add(entry[0], entry[1], recursive=False, filter=_fakeroot)
|
|
||||||
package.close()
|
|
||||||
|
|
||||||
|
|
||||||
def cleanup_files(dirpath, extension):
|
|
||||||
for f in os.listdir(dirpath):
|
|
||||||
filepath = os.path.join(dirpath, f)
|
|
||||||
if os.path.isfile(filepath) and f.endswith(extension):
|
|
||||||
os.remove(filepath)
|
|
||||||
|
|
||||||
|
|
||||||
def pack_mac(builder):
|
|
||||||
info = buildbot_utils.VersionInfo(builder)
|
|
||||||
|
|
||||||
os.chdir(builder.build_dir)
|
|
||||||
cleanup_files(builder.build_dir, '.dmg')
|
|
||||||
|
|
||||||
package_name = get_package_name(builder, 'macOS')
|
|
||||||
package_filename = package_name + '.dmg'
|
|
||||||
package_filepath = os.path.join(builder.build_dir, package_filename)
|
|
||||||
|
|
||||||
release_dir = os.path.join(builder.blender_dir, 'release', 'darwin')
|
|
||||||
buildbot_dir = os.path.join(builder.blender_dir, 'build_files', 'buildbot')
|
|
||||||
bundle_script = os.path.join(buildbot_dir, 'worker_bundle_dmg.py')
|
|
||||||
|
|
||||||
command = [bundle_script]
|
|
||||||
command += ['--dmg', package_filepath]
|
|
||||||
if info.is_development_build:
|
|
||||||
background_image = os.path.join(release_dir, 'buildbot', 'background.tif')
|
|
||||||
command += ['--background-image', background_image]
|
|
||||||
if builder.codesign:
|
|
||||||
command += ['--codesign']
|
|
||||||
command += [builder.install_dir]
|
|
||||||
buildbot_utils.call(command)
|
|
||||||
|
|
||||||
create_buildbot_upload_zip(builder, [(package_filepath, package_filename)])
|
|
||||||
|
|
||||||
|
|
||||||
def pack_win(builder):
|
|
||||||
info = buildbot_utils.VersionInfo(builder)
|
|
||||||
|
|
||||||
os.chdir(builder.build_dir)
|
|
||||||
cleanup_files(builder.build_dir, '.zip')
|
|
||||||
|
|
||||||
# CPack will add the platform name
|
|
||||||
cpack_name = get_package_name(builder, None)
|
|
||||||
package_name = get_package_name(builder, 'windows' + str(builder.bits))
|
|
||||||
|
|
||||||
command = ['cmake', '-DCPACK_OVERRIDE_PACKAGENAME:STRING=' + cpack_name, '.']
|
|
||||||
buildbot_utils.call(builder.command_prefix + command)
|
|
||||||
command = ['cpack', '-G', 'ZIP']
|
|
||||||
buildbot_utils.call(builder.command_prefix + command)
|
|
||||||
|
|
||||||
package_filename = package_name + '.zip'
|
|
||||||
package_filepath = os.path.join(builder.build_dir, package_filename)
|
|
||||||
package_files = [(package_filepath, package_filename)]
|
|
||||||
|
|
||||||
if info.version_cycle == 'release':
|
|
||||||
# Installer only for final release builds, otherwise will get
|
|
||||||
# 'this product is already installed' messages.
|
|
||||||
command = ['cpack', '-G', 'WIX']
|
|
||||||
buildbot_utils.call(builder.command_prefix + command)
|
|
||||||
|
|
||||||
package_filename = package_name + '.msi'
|
|
||||||
package_filepath = os.path.join(builder.build_dir, package_filename)
|
|
||||||
if builder.codesign:
|
|
||||||
sign_file_or_directory(package_filepath)
|
|
||||||
|
|
||||||
package_files += [(package_filepath, package_filename)]
|
|
||||||
|
|
||||||
create_buildbot_upload_zip(builder, package_files)
|
|
||||||
|
|
||||||
|
|
||||||
def pack_linux(builder):
|
|
||||||
blender_executable = os.path.join(builder.install_dir, 'blender')
|
|
||||||
|
|
||||||
info = buildbot_utils.VersionInfo(builder)
|
|
||||||
|
|
||||||
# Strip all unused symbols from the binaries
|
|
||||||
print("Stripping binaries...")
|
|
||||||
buildbot_utils.call(builder.command_prefix + ['strip', '--strip-all', blender_executable])
|
|
||||||
|
|
||||||
print("Stripping python...")
|
|
||||||
py_target = os.path.join(builder.install_dir, info.short_version)
|
|
||||||
buildbot_utils.call(
|
|
||||||
builder.command_prefix + [
|
|
||||||
'find', py_target, '-iname', '*.so', '-exec', 'strip', '-s', '{}', ';',
|
|
||||||
],
|
|
||||||
)
|
|
||||||
|
|
||||||
# Construct package name
|
|
||||||
platform_name = 'linux64'
|
|
||||||
package_name = get_package_name(builder, platform_name)
|
|
||||||
package_filename = package_name + ".tar.xz"
|
|
||||||
|
|
||||||
print("Creating .tar.xz archive")
|
|
||||||
package_filepath = builder.install_dir + '.tar.xz'
|
|
||||||
create_tar_xz(builder.install_dir, package_filepath, package_name)
|
|
||||||
|
|
||||||
# Create buildbot_upload.zip
|
|
||||||
create_buildbot_upload_zip(builder, [(package_filepath, package_filename)])
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
builder = buildbot_utils.create_builder_from_arguments()
|
|
||||||
|
|
||||||
# Make sure install directory always exists
|
|
||||||
os.makedirs(builder.install_dir, exist_ok=True)
|
|
||||||
|
|
||||||
if builder.platform == 'mac':
|
|
||||||
pack_mac(builder)
|
|
||||||
elif builder.platform == 'win':
|
|
||||||
pack_win(builder)
|
|
||||||
elif builder.platform == 'linux':
|
|
||||||
pack_linux(builder)
|
|
@@ -1,42 +0,0 @@
|
|||||||
# ##### BEGIN GPL LICENSE BLOCK #####
|
|
||||||
#
|
|
||||||
# This program is free software; you can redistribute it and/or
|
|
||||||
# modify it under the terms of the GNU General Public License
|
|
||||||
# as published by the Free Software Foundation; either version 2
|
|
||||||
# of the License, or (at your option) any later version.
|
|
||||||
#
|
|
||||||
# This program is distributed in the hope that it will be useful,
|
|
||||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
||||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
||||||
# GNU General Public License for more details.
|
|
||||||
#
|
|
||||||
# You should have received a copy of the GNU General Public License
|
|
||||||
# along with this program; if not, write to the Free Software Foundation,
|
|
||||||
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
|
||||||
#
|
|
||||||
# ##### END GPL LICENSE BLOCK #####
|
|
||||||
|
|
||||||
# <pep8 compliant>
|
|
||||||
|
|
||||||
import buildbot_utils
|
|
||||||
import os
|
|
||||||
import sys
|
|
||||||
|
|
||||||
|
|
||||||
def get_ctest_arguments(builder):
|
|
||||||
args = ['--output-on-failure']
|
|
||||||
if builder.platform == 'win':
|
|
||||||
args += ['-C', 'Release']
|
|
||||||
return args
|
|
||||||
|
|
||||||
|
|
||||||
def test(builder):
|
|
||||||
os.chdir(builder.build_dir)
|
|
||||||
|
|
||||||
command = builder.command_prefix + ['ctest'] + get_ctest_arguments(builder)
|
|
||||||
buildbot_utils.call(command)
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
builder = buildbot_utils.create_builder_from_arguments()
|
|
||||||
test(builder)
|
|
@@ -1,31 +0,0 @@
|
|||||||
# ##### BEGIN GPL LICENSE BLOCK #####
|
|
||||||
#
|
|
||||||
# This program is free software; you can redistribute it and/or
|
|
||||||
# modify it under the terms of the GNU General Public License
|
|
||||||
# as published by the Free Software Foundation; either version 2
|
|
||||||
# of the License, or (at your option) any later version.
|
|
||||||
#
|
|
||||||
# This program is distributed in the hope that it will be useful,
|
|
||||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
||||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
||||||
# GNU General Public License for more details.
|
|
||||||
#
|
|
||||||
# You should have received a copy of the GNU General Public License
|
|
||||||
# along with this program; if not, write to the Free Software Foundation,
|
|
||||||
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
|
||||||
#
|
|
||||||
# ##### END GPL LICENSE BLOCK #####
|
|
||||||
|
|
||||||
# <pep8 compliant>
|
|
||||||
|
|
||||||
import buildbot_utils
|
|
||||||
import os
|
|
||||||
import sys
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
builder = buildbot_utils.create_builder_from_arguments()
|
|
||||||
os.chdir(builder.blender_dir)
|
|
||||||
|
|
||||||
# Run make update which handles all libraries and submodules.
|
|
||||||
make_update = os.path.join(builder.blender_dir, "build_files", "utils", "make_update.py")
|
|
||||||
buildbot_utils.call([sys.executable, make_update, '--no-blender', "--use-tests", "--use-centos-libraries"])
|
|
@@ -20,8 +20,24 @@ if(NOT CLANG_ROOT_DIR AND NOT $ENV{CLANG_ROOT_DIR} STREQUAL "")
|
|||||||
set(CLANG_ROOT_DIR $ENV{CLANG_ROOT_DIR})
|
set(CLANG_ROOT_DIR $ENV{CLANG_ROOT_DIR})
|
||||||
endif()
|
endif()
|
||||||
|
|
||||||
|
if(NOT LLVM_ROOT_DIR)
|
||||||
|
if(DEFINED LLVM_VERSION)
|
||||||
|
message(running llvm-config-${LLVM_VERSION})
|
||||||
|
find_program(LLVM_CONFIG llvm-config-${LLVM_VERSION})
|
||||||
|
endif()
|
||||||
|
if(NOT LLVM_CONFIG)
|
||||||
|
find_program(LLVM_CONFIG llvm-config)
|
||||||
|
endif()
|
||||||
|
|
||||||
|
execute_process(COMMAND ${LLVM_CONFIG} --prefix
|
||||||
|
OUTPUT_VARIABLE LLVM_ROOT_DIR
|
||||||
|
OUTPUT_STRIP_TRAILING_WHITESPACE)
|
||||||
|
set(LLVM_ROOT_DIR ${LLVM_ROOT_DIR} CACHE PATH "Path to the LLVM installation")
|
||||||
|
endif()
|
||||||
|
|
||||||
set(_CLANG_SEARCH_DIRS
|
set(_CLANG_SEARCH_DIRS
|
||||||
${CLANG_ROOT_DIR}
|
${CLANG_ROOT_DIR}
|
||||||
|
${LLVM_ROOT_DIR}
|
||||||
/opt/lib/clang
|
/opt/lib/clang
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@@ -472,8 +472,7 @@ if(NOT GFLAGS_FOUND)
|
|||||||
gflags_report_not_found(
|
gflags_report_not_found(
|
||||||
"Could not find gflags include directory, set GFLAGS_INCLUDE_DIR "
|
"Could not find gflags include directory, set GFLAGS_INCLUDE_DIR "
|
||||||
"to directory containing gflags/gflags.h")
|
"to directory containing gflags/gflags.h")
|
||||||
endif(NOT GFLAGS_INCLUDE_DIR OR
|
endif()
|
||||||
NOT EXISTS ${GFLAGS_INCLUDE_DIR})
|
|
||||||
|
|
||||||
find_library(GFLAGS_LIBRARY NAMES gflags
|
find_library(GFLAGS_LIBRARY NAMES gflags
|
||||||
PATHS ${GFLAGS_LIBRARY_DIR_HINTS}
|
PATHS ${GFLAGS_LIBRARY_DIR_HINTS}
|
||||||
@@ -484,8 +483,7 @@ if(NOT GFLAGS_FOUND)
|
|||||||
gflags_report_not_found(
|
gflags_report_not_found(
|
||||||
"Could not find gflags library, set GFLAGS_LIBRARY "
|
"Could not find gflags library, set GFLAGS_LIBRARY "
|
||||||
"to full path to libgflags.")
|
"to full path to libgflags.")
|
||||||
endif(NOT GFLAGS_LIBRARY OR
|
endif()
|
||||||
NOT EXISTS ${GFLAGS_LIBRARY})
|
|
||||||
|
|
||||||
# gflags typically requires a threading library (which is OS dependent), note
|
# gflags typically requires a threading library (which is OS dependent), note
|
||||||
# that this defines the CMAKE_THREAD_LIBS_INIT variable. If we are able to
|
# that this defines the CMAKE_THREAD_LIBS_INIT variable. If we are able to
|
||||||
@@ -560,8 +558,7 @@ if(NOT GFLAGS_FOUND)
|
|||||||
gflags_report_not_found(
|
gflags_report_not_found(
|
||||||
"Caller defined GFLAGS_INCLUDE_DIR:"
|
"Caller defined GFLAGS_INCLUDE_DIR:"
|
||||||
" ${GFLAGS_INCLUDE_DIR} does not contain gflags/gflags.h header.")
|
" ${GFLAGS_INCLUDE_DIR} does not contain gflags/gflags.h header.")
|
||||||
endif(GFLAGS_INCLUDE_DIR AND
|
endif()
|
||||||
NOT EXISTS ${GFLAGS_INCLUDE_DIR}/gflags/gflags.h)
|
|
||||||
# TODO: This regex for gflags library is pretty primitive, we use lowercase
|
# TODO: This regex for gflags library is pretty primitive, we use lowercase
|
||||||
# for comparison to handle Windows using CamelCase library names, could
|
# for comparison to handle Windows using CamelCase library names, could
|
||||||
# this check be better?
|
# this check be better?
|
||||||
@@ -571,8 +568,7 @@ if(NOT GFLAGS_FOUND)
|
|||||||
gflags_report_not_found(
|
gflags_report_not_found(
|
||||||
"Caller defined GFLAGS_LIBRARY: "
|
"Caller defined GFLAGS_LIBRARY: "
|
||||||
"${GFLAGS_LIBRARY} does not match gflags.")
|
"${GFLAGS_LIBRARY} does not match gflags.")
|
||||||
endif(GFLAGS_LIBRARY AND
|
endif()
|
||||||
NOT "${LOWERCASE_GFLAGS_LIBRARY}" MATCHES ".*gflags[^/]*")
|
|
||||||
|
|
||||||
gflags_reset_find_library_prefix()
|
gflags_reset_find_library_prefix()
|
||||||
|
|
||||||
|
@@ -40,7 +40,7 @@ FIND_PACKAGE_HANDLE_STANDARD_ARGS(NanoVDB DEFAULT_MSG
|
|||||||
|
|
||||||
IF(NANOVDB_FOUND)
|
IF(NANOVDB_FOUND)
|
||||||
SET(NANOVDB_INCLUDE_DIRS ${NANOVDB_INCLUDE_DIR})
|
SET(NANOVDB_INCLUDE_DIRS ${NANOVDB_INCLUDE_DIR})
|
||||||
ENDIF(NANOVDB_FOUND)
|
ENDIF()
|
||||||
|
|
||||||
MARK_AS_ADVANCED(
|
MARK_AS_ADVANCED(
|
||||||
NANOVDB_INCLUDE_DIR
|
NANOVDB_INCLUDE_DIR
|
||||||
|
@@ -46,7 +46,7 @@ SET(_opencollada_FIND_COMPONENTS
|
|||||||
)
|
)
|
||||||
|
|
||||||
# Fedora openCOLLADA package links these statically
|
# Fedora openCOLLADA package links these statically
|
||||||
# note that order is important here ot it wont link
|
# note that order is important here or it won't link
|
||||||
SET(_opencollada_FIND_STATIC_COMPONENTS
|
SET(_opencollada_FIND_STATIC_COMPONENTS
|
||||||
buffer
|
buffer
|
||||||
ftoa
|
ftoa
|
||||||
|
@@ -44,7 +44,7 @@ SET(PYTHON_LINKFLAGS "-Xlinker -export-dynamic" CACHE STRING "Linker flags for p
|
|||||||
MARK_AS_ADVANCED(PYTHON_LINKFLAGS)
|
MARK_AS_ADVANCED(PYTHON_LINKFLAGS)
|
||||||
|
|
||||||
|
|
||||||
# if the user passes these defines as args, we dont want to overwrite
|
# if the user passes these defines as args, we don't want to overwrite
|
||||||
SET(_IS_INC_DEF OFF)
|
SET(_IS_INC_DEF OFF)
|
||||||
SET(_IS_INC_CONF_DEF OFF)
|
SET(_IS_INC_CONF_DEF OFF)
|
||||||
SET(_IS_LIB_DEF OFF)
|
SET(_IS_LIB_DEF OFF)
|
||||||
@@ -143,7 +143,7 @@ IF((NOT _IS_INC_DEF) OR (NOT _IS_INC_CONF_DEF) OR (NOT _IS_LIB_DEF) OR (NOT _IS_
|
|||||||
SET(_PYTHON_ABI_FLAGS "${_CURRENT_ABI_FLAGS}")
|
SET(_PYTHON_ABI_FLAGS "${_CURRENT_ABI_FLAGS}")
|
||||||
break()
|
break()
|
||||||
ELSE()
|
ELSE()
|
||||||
# ensure we dont find values from 2 different ABI versions
|
# ensure we don't find values from 2 different ABI versions
|
||||||
IF(NOT _IS_INC_DEF)
|
IF(NOT _IS_INC_DEF)
|
||||||
UNSET(PYTHON_INCLUDE_DIR CACHE)
|
UNSET(PYTHON_INCLUDE_DIR CACHE)
|
||||||
ENDIF()
|
ENDIF()
|
||||||
|
@@ -40,7 +40,7 @@ FIND_PACKAGE_HANDLE_STANDARD_ARGS(sse2neon DEFAULT_MSG
|
|||||||
|
|
||||||
IF(SSE2NEON_FOUND)
|
IF(SSE2NEON_FOUND)
|
||||||
SET(SSE2NEON_INCLUDE_DIRS ${SSE2NEON_INCLUDE_DIR})
|
SET(SSE2NEON_INCLUDE_DIRS ${SSE2NEON_INCLUDE_DIR})
|
||||||
ENDIF(SSE2NEON_FOUND)
|
ENDIF()
|
||||||
|
|
||||||
MARK_AS_ADVANCED(
|
MARK_AS_ADVANCED(
|
||||||
SSE2NEON_INCLUDE_DIR
|
SSE2NEON_INCLUDE_DIR
|
||||||
|
@@ -79,7 +79,7 @@ if(EXISTS ${SOURCE_DIR}/.git)
|
|||||||
ERROR_QUIET)
|
ERROR_QUIET)
|
||||||
if(NOT _git_below_check STREQUAL "")
|
if(NOT _git_below_check STREQUAL "")
|
||||||
# If there're commits between HEAD and upstream this means
|
# If there're commits between HEAD and upstream this means
|
||||||
# that we're reset-ed to older revision. Use it's hash then.
|
# that we're reset-ed to older revision. Use its hash then.
|
||||||
execute_process(COMMAND git rev-parse --short=12 HEAD
|
execute_process(COMMAND git rev-parse --short=12 HEAD
|
||||||
WORKING_DIRECTORY ${SOURCE_DIR}
|
WORKING_DIRECTORY ${SOURCE_DIR}
|
||||||
OUTPUT_VARIABLE MY_WC_HASH
|
OUTPUT_VARIABLE MY_WC_HASH
|
||||||
|
@@ -305,7 +305,7 @@ def file_check_arg_sizes(tu):
|
|||||||
for i, node_child in enumerate(children):
|
for i, node_child in enumerate(children):
|
||||||
children = list(node_child.get_children())
|
children = list(node_child.get_children())
|
||||||
|
|
||||||
# skip if we dont have an index...
|
# skip if we don't have an index...
|
||||||
size_def = args_size_definition.get(i, -1)
|
size_def = args_size_definition.get(i, -1)
|
||||||
|
|
||||||
if size_def == -1:
|
if size_def == -1:
|
||||||
@@ -354,7 +354,7 @@ def file_check_arg_sizes(tu):
|
|||||||
filepath # always the same but useful when running threaded
|
filepath # always the same but useful when running threaded
|
||||||
))
|
))
|
||||||
|
|
||||||
# we dont really care what we are looking at, just scan entire file for
|
# we don't really care what we are looking at, just scan entire file for
|
||||||
# function calls.
|
# function calls.
|
||||||
|
|
||||||
def recursive_func_call_check(node):
|
def recursive_func_call_check(node):
|
||||||
|
@@ -8,6 +8,9 @@ IGNORE_SOURCE = (
|
|||||||
# specific source files
|
# specific source files
|
||||||
"extern/audaspace/",
|
"extern/audaspace/",
|
||||||
|
|
||||||
|
# Use for `WIN32` only.
|
||||||
|
"source/creator/blender_launcher_win32.c",
|
||||||
|
|
||||||
# specific source files
|
# specific source files
|
||||||
"extern/bullet2/src/BulletCollision/CollisionDispatch/btBox2dBox2dCollisionAlgorithm.cpp",
|
"extern/bullet2/src/BulletCollision/CollisionDispatch/btBox2dBox2dCollisionAlgorithm.cpp",
|
||||||
"extern/bullet2/src/BulletCollision/CollisionDispatch/btConvex2dConvex2dAlgorithm.cpp",
|
"extern/bullet2/src/BulletCollision/CollisionDispatch/btConvex2dConvex2dAlgorithm.cpp",
|
||||||
|
@@ -82,7 +82,7 @@ def create_nb_project_main():
|
|||||||
make_exe = cmake_cache_var("CMAKE_MAKE_PROGRAM")
|
make_exe = cmake_cache_var("CMAKE_MAKE_PROGRAM")
|
||||||
make_exe_basename = os.path.basename(make_exe)
|
make_exe_basename = os.path.basename(make_exe)
|
||||||
|
|
||||||
# --------------- NB specific
|
# --------------- NetBeans specific.
|
||||||
defines = [("%s=%s" % cdef) if cdef[1] else cdef[0] for cdef in defines]
|
defines = [("%s=%s" % cdef) if cdef[1] else cdef[0] for cdef in defines]
|
||||||
defines += [cdef.replace("#define", "").strip() for cdef in cmake_compiler_defines()]
|
defines += [cdef.replace("#define", "").strip() for cdef in cmake_compiler_defines()]
|
||||||
|
|
||||||
|
@@ -29,6 +29,7 @@ set(WITH_IMAGE_OPENEXR ON CACHE BOOL "" FORCE)
|
|||||||
set(WITH_IMAGE_OPENJPEG ON CACHE BOOL "" FORCE)
|
set(WITH_IMAGE_OPENJPEG ON CACHE BOOL "" FORCE)
|
||||||
set(WITH_IMAGE_TIFF ON CACHE BOOL "" FORCE)
|
set(WITH_IMAGE_TIFF ON CACHE BOOL "" FORCE)
|
||||||
set(WITH_INPUT_NDOF ON CACHE BOOL "" FORCE)
|
set(WITH_INPUT_NDOF ON CACHE BOOL "" FORCE)
|
||||||
|
set(WITH_INPUT_IME ON CACHE BOOL "" FORCE)
|
||||||
set(WITH_INTERNATIONAL ON CACHE BOOL "" FORCE)
|
set(WITH_INTERNATIONAL ON CACHE BOOL "" FORCE)
|
||||||
set(WITH_LIBMV ON CACHE BOOL "" FORCE)
|
set(WITH_LIBMV ON CACHE BOOL "" FORCE)
|
||||||
set(WITH_LIBMV_SCHUR_SPECIALIZATIONS ON CACHE BOOL "" FORCE)
|
set(WITH_LIBMV_SCHUR_SPECIALIZATIONS ON CACHE BOOL "" FORCE)
|
||||||
|
@@ -30,6 +30,7 @@ set(WITH_IMAGE_OPENEXR ON CACHE BOOL "" FORCE)
|
|||||||
set(WITH_IMAGE_OPENJPEG ON CACHE BOOL "" FORCE)
|
set(WITH_IMAGE_OPENJPEG ON CACHE BOOL "" FORCE)
|
||||||
set(WITH_IMAGE_TIFF ON CACHE BOOL "" FORCE)
|
set(WITH_IMAGE_TIFF ON CACHE BOOL "" FORCE)
|
||||||
set(WITH_INPUT_NDOF ON CACHE BOOL "" FORCE)
|
set(WITH_INPUT_NDOF ON CACHE BOOL "" FORCE)
|
||||||
|
set(WITH_INPUT_IME ON CACHE BOOL "" FORCE)
|
||||||
set(WITH_INTERNATIONAL ON CACHE BOOL "" FORCE)
|
set(WITH_INTERNATIONAL ON CACHE BOOL "" FORCE)
|
||||||
set(WITH_LIBMV ON CACHE BOOL "" FORCE)
|
set(WITH_LIBMV ON CACHE BOOL "" FORCE)
|
||||||
set(WITH_LIBMV_SCHUR_SPECIALIZATIONS ON CACHE BOOL "" FORCE)
|
set(WITH_LIBMV_SCHUR_SPECIALIZATIONS ON CACHE BOOL "" FORCE)
|
||||||
@@ -56,10 +57,6 @@ set(WITH_TBB ON CACHE BOOL "" FORCE)
|
|||||||
set(WITH_USD ON CACHE BOOL "" FORCE)
|
set(WITH_USD ON CACHE BOOL "" FORCE)
|
||||||
|
|
||||||
set(WITH_MEM_JEMALLOC ON CACHE BOOL "" FORCE)
|
set(WITH_MEM_JEMALLOC ON CACHE BOOL "" FORCE)
|
||||||
set(WITH_CYCLES_CUDA_BINARIES ON CACHE BOOL "" FORCE)
|
|
||||||
set(WITH_CYCLES_CUBIN_COMPILER OFF CACHE BOOL "" FORCE)
|
|
||||||
set(CYCLES_CUDA_BINARIES_ARCH sm_30;sm_35;sm_37;sm_50;sm_52;sm_60;sm_61;sm_70;sm_75;sm_86;compute_75 CACHE STRING "" FORCE)
|
|
||||||
set(WITH_CYCLES_DEVICE_OPTIX ON CACHE BOOL "" FORCE)
|
|
||||||
|
|
||||||
# platform dependent options
|
# platform dependent options
|
||||||
if(APPLE)
|
if(APPLE)
|
||||||
@@ -80,4 +77,8 @@ if(UNIX AND NOT APPLE)
|
|||||||
endif()
|
endif()
|
||||||
if(NOT APPLE)
|
if(NOT APPLE)
|
||||||
set(WITH_XR_OPENXR ON CACHE BOOL "" FORCE)
|
set(WITH_XR_OPENXR ON CACHE BOOL "" FORCE)
|
||||||
|
|
||||||
|
set(WITH_CYCLES_DEVICE_OPTIX ON CACHE BOOL "" FORCE)
|
||||||
|
set(WITH_CYCLES_CUDA_BINARIES ON CACHE BOOL "" FORCE)
|
||||||
|
set(WITH_CYCLES_CUBIN_COMPILER OFF CACHE BOOL "" FORCE)
|
||||||
endif()
|
endif()
|
||||||
|
@@ -596,14 +596,6 @@ function(SETUP_LIBDIRS)
|
|||||||
link_directories(${GMP_LIBPATH})
|
link_directories(${GMP_LIBPATH})
|
||||||
endif()
|
endif()
|
||||||
|
|
||||||
if(WITH_GHOST_WAYLAND)
|
|
||||||
link_directories(
|
|
||||||
${wayland-client_LIBRARY_DIRS}
|
|
||||||
${wayland-egl_LIBRARY_DIRS}
|
|
||||||
${xkbcommon_LIBRARY_DIRS}
|
|
||||||
${wayland-cursor_LIBRARY_DIRS})
|
|
||||||
endif()
|
|
||||||
|
|
||||||
if(WIN32 AND NOT UNIX)
|
if(WIN32 AND NOT UNIX)
|
||||||
link_directories(${PTHREADS_LIBPATH})
|
link_directories(${PTHREADS_LIBPATH})
|
||||||
endif()
|
endif()
|
||||||
@@ -702,7 +694,7 @@ macro(message_first_run)
|
|||||||
endmacro()
|
endmacro()
|
||||||
|
|
||||||
# when we have warnings as errors applied globally this
|
# when we have warnings as errors applied globally this
|
||||||
# needs to be removed for some external libs which we dont maintain.
|
# needs to be removed for some external libs which we don't maintain.
|
||||||
|
|
||||||
# utility macro
|
# utility macro
|
||||||
macro(remove_cc_flag
|
macro(remove_cc_flag
|
||||||
@@ -802,7 +794,7 @@ macro(remove_extra_strict_flags)
|
|||||||
endmacro()
|
endmacro()
|
||||||
|
|
||||||
# note, we can only append flags on a single file so we need to negate the options.
|
# note, we can only append flags on a single file so we need to negate the options.
|
||||||
# at the moment we cant shut up ffmpeg deprecations, so use this, but will
|
# at the moment we can't shut up ffmpeg deprecations, so use this, but will
|
||||||
# probably add more removals here.
|
# probably add more removals here.
|
||||||
macro(remove_strict_c_flags_file
|
macro(remove_strict_c_flags_file
|
||||||
filenames)
|
filenames)
|
||||||
@@ -971,14 +963,6 @@ macro(blender_project_hack_post)
|
|||||||
unset(_reset_standard_cflags_rel)
|
unset(_reset_standard_cflags_rel)
|
||||||
unset(_reset_standard_cxxflags_rel)
|
unset(_reset_standard_cxxflags_rel)
|
||||||
|
|
||||||
# ------------------------------------------------------------------
|
|
||||||
# workaround for omission in cmake 2.8.4's GNU.cmake, fixed in 2.8.5
|
|
||||||
if(CMAKE_COMPILER_IS_GNUCC)
|
|
||||||
if(NOT DARWIN)
|
|
||||||
set(CMAKE_INCLUDE_SYSTEM_FLAG_C "-isystem ")
|
|
||||||
endif()
|
|
||||||
endif()
|
|
||||||
|
|
||||||
endmacro()
|
endmacro()
|
||||||
|
|
||||||
# pair of macros to allow libraries to be specify files to install, but to
|
# pair of macros to allow libraries to be specify files to install, but to
|
||||||
|
@@ -104,8 +104,8 @@ if(WIN32)
|
|||||||
set(CPACK_WIX_LIGHT_EXTRA_FLAGS -dcl:medium)
|
set(CPACK_WIX_LIGHT_EXTRA_FLAGS -dcl:medium)
|
||||||
endif()
|
endif()
|
||||||
|
|
||||||
set(CPACK_PACKAGE_EXECUTABLES "blender" "blender")
|
set(CPACK_PACKAGE_EXECUTABLES "blender-launcher" "blender")
|
||||||
set(CPACK_CREATE_DESKTOP_LINKS "blender" "blender")
|
set(CPACK_CREATE_DESKTOP_LINKS "blender-launcher" "blender")
|
||||||
|
|
||||||
include(CPack)
|
include(CPack)
|
||||||
|
|
||||||
|
@@ -20,12 +20,6 @@
|
|||||||
|
|
||||||
# Libraries configuration for Apple.
|
# Libraries configuration for Apple.
|
||||||
|
|
||||||
if("${CMAKE_OSX_ARCHITECTURES}" STREQUAL "arm64")
|
|
||||||
set(MACOSX_DEPLOYMENT_TARGET 11.00)
|
|
||||||
else()
|
|
||||||
set(MACOSX_DEPLOYMENT_TARGET 10.13)
|
|
||||||
endif()
|
|
||||||
|
|
||||||
macro(find_package_wrapper)
|
macro(find_package_wrapper)
|
||||||
# do nothing, just satisfy the macro
|
# do nothing, just satisfy the macro
|
||||||
endmacro()
|
endmacro()
|
||||||
@@ -394,6 +388,10 @@ endif()
|
|||||||
|
|
||||||
if(WITH_TBB)
|
if(WITH_TBB)
|
||||||
find_package(TBB)
|
find_package(TBB)
|
||||||
|
if(NOT TBB_FOUND)
|
||||||
|
message(WARNING "TBB not found, disabling WITH_TBB")
|
||||||
|
set(WITH_TBB OFF)
|
||||||
|
endif()
|
||||||
endif()
|
endif()
|
||||||
|
|
||||||
if(WITH_POTRACE)
|
if(WITH_POTRACE)
|
||||||
@@ -406,7 +404,7 @@ endif()
|
|||||||
|
|
||||||
# CMake FindOpenMP doesn't know about AppleClang before 3.12, so provide custom flags.
|
# CMake FindOpenMP doesn't know about AppleClang before 3.12, so provide custom flags.
|
||||||
if(WITH_OPENMP)
|
if(WITH_OPENMP)
|
||||||
if(CMAKE_C_COMPILER_ID MATCHES "Clang" AND CMAKE_C_COMPILER_VERSION VERSION_GREATER_EQUAL "7.0")
|
if(CMAKE_C_COMPILER_ID MATCHES "Clang")
|
||||||
# Use OpenMP from our precompiled libraries.
|
# Use OpenMP from our precompiled libraries.
|
||||||
message(STATUS "Using ${LIBDIR}/openmp for OpenMP")
|
message(STATUS "Using ${LIBDIR}/openmp for OpenMP")
|
||||||
set(OPENMP_CUSTOM ON)
|
set(OPENMP_CUSTOM ON)
|
||||||
@@ -482,10 +480,8 @@ else()
|
|||||||
set(CMAKE_CXX_FLAGS_RELEASE "-O2 -mdynamic-no-pic")
|
set(CMAKE_CXX_FLAGS_RELEASE "-O2 -mdynamic-no-pic")
|
||||||
endif()
|
endif()
|
||||||
|
|
||||||
if(${XCODE_VERSION} VERSION_EQUAL 5 OR ${XCODE_VERSION} VERSION_GREATER 5)
|
# Clang has too low template depth of 128 for libmv.
|
||||||
# Xcode 5 is always using CLANG, which has too low template depth of 128 for libmv
|
string(APPEND CMAKE_CXX_FLAGS " -ftemplate-depth=1024")
|
||||||
string(APPEND CMAKE_CXX_FLAGS " -ftemplate-depth=1024")
|
|
||||||
endif()
|
|
||||||
|
|
||||||
# Avoid conflicts with Luxrender, and other plug-ins that may use the same
|
# Avoid conflicts with Luxrender, and other plug-ins that may use the same
|
||||||
# libraries as Blender with a different version or build options.
|
# libraries as Blender with a different version or build options.
|
||||||
|
@@ -168,21 +168,15 @@ endif()
|
|||||||
unset(OSX_SDKROOT)
|
unset(OSX_SDKROOT)
|
||||||
|
|
||||||
|
|
||||||
# 10.13 is our min. target, if you use higher sdk, weak linking happens
|
|
||||||
if("${CMAKE_OSX_ARCHITECTURES}" STREQUAL "arm64")
|
if("${CMAKE_OSX_ARCHITECTURES}" STREQUAL "arm64")
|
||||||
|
# M1 chips run Big Sur onwards.
|
||||||
set(OSX_MIN_DEPLOYMENT_TARGET 11.00)
|
set(OSX_MIN_DEPLOYMENT_TARGET 11.00)
|
||||||
else()
|
else()
|
||||||
|
# 10.13 is our min. target, if you use higher sdk, weak linking happens
|
||||||
set(OSX_MIN_DEPLOYMENT_TARGET 10.13)
|
set(OSX_MIN_DEPLOYMENT_TARGET 10.13)
|
||||||
endif()
|
endif()
|
||||||
|
|
||||||
if(CMAKE_OSX_DEPLOYMENT_TARGET)
|
set(CMAKE_OSX_DEPLOYMENT_TARGET "${OSX_MIN_DEPLOYMENT_TARGET}" CACHE STRING "" FORCE)
|
||||||
if(${CMAKE_OSX_DEPLOYMENT_TARGET} VERSION_LESS ${OSX_MIN_DEPLOYMENT_TARGET})
|
|
||||||
message(STATUS "Setting deployment target to ${OSX_MIN_DEPLOYMENT_TARGET}, lower versions are not supported")
|
|
||||||
set(CMAKE_OSX_DEPLOYMENT_TARGET "${OSX_MIN_DEPLOYMENT_TARGET}" CACHE STRING "" FORCE)
|
|
||||||
endif()
|
|
||||||
else()
|
|
||||||
set(CMAKE_OSX_DEPLOYMENT_TARGET "${OSX_MIN_DEPLOYMENT_TARGET}" CACHE STRING "" FORCE)
|
|
||||||
endif()
|
|
||||||
|
|
||||||
if(NOT ${CMAKE_GENERATOR} MATCHES "Xcode")
|
if(NOT ${CMAKE_GENERATOR} MATCHES "Xcode")
|
||||||
# Force CMAKE_OSX_DEPLOYMENT_TARGET for makefiles, will not work else (CMake bug?)
|
# Force CMAKE_OSX_DEPLOYMENT_TARGET for makefiles, will not work else (CMake bug?)
|
||||||
|
@@ -457,6 +457,10 @@ endif()
|
|||||||
|
|
||||||
if(WITH_TBB)
|
if(WITH_TBB)
|
||||||
find_package_wrapper(TBB)
|
find_package_wrapper(TBB)
|
||||||
|
if(NOT TBB_FOUND)
|
||||||
|
message(WARNING "TBB not found, disabling WITH_TBB")
|
||||||
|
set(WITH_TBB OFF)
|
||||||
|
endif()
|
||||||
endif()
|
endif()
|
||||||
|
|
||||||
if(WITH_XR_OPENXR)
|
if(WITH_XR_OPENXR)
|
||||||
@@ -575,17 +579,17 @@ if(WITH_GHOST_WAYLAND)
|
|||||||
pkg_check_modules(wayland-scanner REQUIRED wayland-scanner)
|
pkg_check_modules(wayland-scanner REQUIRED wayland-scanner)
|
||||||
pkg_check_modules(xkbcommon REQUIRED xkbcommon)
|
pkg_check_modules(xkbcommon REQUIRED xkbcommon)
|
||||||
pkg_check_modules(wayland-cursor REQUIRED wayland-cursor)
|
pkg_check_modules(wayland-cursor REQUIRED wayland-cursor)
|
||||||
|
pkg_check_modules(dbus REQUIRED dbus-1)
|
||||||
|
|
||||||
set(WITH_GL_EGL ON)
|
set(WITH_GL_EGL ON)
|
||||||
|
|
||||||
if(WITH_GHOST_WAYLAND)
|
list(APPEND PLATFORM_LINKLIBS
|
||||||
list(APPEND PLATFORM_LINKLIBS
|
${wayland-client_LINK_LIBRARIES}
|
||||||
${wayland-client_LIBRARIES}
|
${wayland-egl_LINK_LIBRARIES}
|
||||||
${wayland-egl_LIBRARIES}
|
${xkbcommon_LINK_LIBRARIES}
|
||||||
${xkbcommon_LIBRARIES}
|
${wayland-cursor_LINK_LIBRARIES}
|
||||||
${wayland-cursor_LIBRARIES}
|
${dbus_LINK_LIBRARIES}
|
||||||
)
|
)
|
||||||
endif()
|
|
||||||
endif()
|
endif()
|
||||||
|
|
||||||
if(WITH_GHOST_X11)
|
if(WITH_GHOST_X11)
|
||||||
|
@@ -119,7 +119,7 @@ string(APPEND CMAKE_MODULE_LINKER_FLAGS " /SAFESEH:NO /ignore:4099")
|
|||||||
list(APPEND PLATFORM_LINKLIBS
|
list(APPEND PLATFORM_LINKLIBS
|
||||||
ws2_32 vfw32 winmm kernel32 user32 gdi32 comdlg32 Comctl32 version
|
ws2_32 vfw32 winmm kernel32 user32 gdi32 comdlg32 Comctl32 version
|
||||||
advapi32 shfolder shell32 ole32 oleaut32 uuid psapi Dbghelp Shlwapi
|
advapi32 shfolder shell32 ole32 oleaut32 uuid psapi Dbghelp Shlwapi
|
||||||
pathcch
|
pathcch Shcore
|
||||||
)
|
)
|
||||||
|
|
||||||
if(WITH_INPUT_IME)
|
if(WITH_INPUT_IME)
|
||||||
@@ -144,8 +144,8 @@ add_definitions(-D_ALLOW_KEYWORD_MACROS)
|
|||||||
# that both /GR and /GR- are specified.
|
# that both /GR and /GR- are specified.
|
||||||
remove_cc_flag("/GR")
|
remove_cc_flag("/GR")
|
||||||
|
|
||||||
# We want to support Windows 7 level ABI
|
# Make the Windows 8.1 API available for use.
|
||||||
add_definitions(-D_WIN32_WINNT=0x601)
|
add_definitions(-D_WIN32_WINNT=0x603)
|
||||||
include(build_files/cmake/platform/platform_win32_bundle_crt.cmake)
|
include(build_files/cmake/platform/platform_win32_bundle_crt.cmake)
|
||||||
remove_cc_flag("/MDd" "/MD" "/Zi")
|
remove_cc_flag("/MDd" "/MD" "/Zi")
|
||||||
|
|
||||||
@@ -261,8 +261,10 @@ if(NOT DEFINED LIBDIR)
|
|||||||
else()
|
else()
|
||||||
message(FATAL_ERROR "32 bit compiler detected, blender no longer provides pre-build libraries for 32 bit windows, please set the LIBDIR cmake variable to your own library folder")
|
message(FATAL_ERROR "32 bit compiler detected, blender no longer provides pre-build libraries for 32 bit windows, please set the LIBDIR cmake variable to your own library folder")
|
||||||
endif()
|
endif()
|
||||||
# Can be 1910..1912
|
if(CMAKE_CXX_COMPILER_VERSION VERSION_GREATER_EQUAL 19.29.30130)
|
||||||
if(MSVC_VERSION GREATER 1919)
|
message(STATUS "Visual Studio 2022 detected.")
|
||||||
|
set(LIBDIR ${CMAKE_SOURCE_DIR}/../lib/${LIBDIR_BASE}_vc15)
|
||||||
|
elseif(MSVC_VERSION GREATER 1919)
|
||||||
message(STATUS "Visual Studio 2019 detected.")
|
message(STATUS "Visual Studio 2019 detected.")
|
||||||
set(LIBDIR ${CMAKE_SOURCE_DIR}/../lib/${LIBDIR_BASE}_vc15)
|
set(LIBDIR ${CMAKE_SOURCE_DIR}/../lib/${LIBDIR_BASE}_vc15)
|
||||||
elseif(MSVC_VERSION GREATER 1909)
|
elseif(MSVC_VERSION GREATER 1909)
|
||||||
@@ -548,7 +550,6 @@ if(WITH_OPENIMAGEIO)
|
|||||||
set(OPENIMAGEIO_LIBRARIES ${OIIO_OPTIMIZED} ${OIIO_DEBUG})
|
set(OPENIMAGEIO_LIBRARIES ${OIIO_OPTIMIZED} ${OIIO_DEBUG})
|
||||||
|
|
||||||
set(OPENIMAGEIO_DEFINITIONS "-DUSE_TBB=0")
|
set(OPENIMAGEIO_DEFINITIONS "-DUSE_TBB=0")
|
||||||
set(OPENCOLORIO_DEFINITIONS "-DDOpenColorIO_SKIP_IMPORTS")
|
|
||||||
set(OPENIMAGEIO_IDIFF "${OPENIMAGEIO}/bin/idiff.exe")
|
set(OPENIMAGEIO_IDIFF "${OPENIMAGEIO}/bin/idiff.exe")
|
||||||
add_definitions(-DOIIO_STATIC_DEFINE)
|
add_definitions(-DOIIO_STATIC_DEFINE)
|
||||||
add_definitions(-DOIIO_NO_SSE=1)
|
add_definitions(-DOIIO_NO_SSE=1)
|
||||||
@@ -594,7 +595,7 @@ if(WITH_OPENCOLORIO)
|
|||||||
debug ${OPENCOLORIO_LIBPATH}/libexpatdMD.lib
|
debug ${OPENCOLORIO_LIBPATH}/libexpatdMD.lib
|
||||||
debug ${OPENCOLORIO_LIBPATH}/pystring_d.lib
|
debug ${OPENCOLORIO_LIBPATH}/pystring_d.lib
|
||||||
)
|
)
|
||||||
set(OPENCOLORIO_DEFINITIONS)
|
set(OPENCOLORIO_DEFINITIONS "-DOpenColorIO_SKIP_IMPORTS")
|
||||||
endif()
|
endif()
|
||||||
|
|
||||||
if(WITH_OPENVDB)
|
if(WITH_OPENVDB)
|
||||||
@@ -675,10 +676,11 @@ if(WITH_SYSTEM_AUDASPACE)
|
|||||||
endif()
|
endif()
|
||||||
|
|
||||||
if(WITH_TBB)
|
if(WITH_TBB)
|
||||||
set(TBB_LIBRARIES optimized ${LIBDIR}/tbb/lib/tbb.lib debug ${LIBDIR}/tbb/lib/debug/tbb_debug.lib)
|
set(TBB_LIBRARIES optimized ${LIBDIR}/tbb/lib/tbb.lib debug ${LIBDIR}/tbb/lib/tbb_debug.lib)
|
||||||
set(TBB_INCLUDE_DIR ${LIBDIR}/tbb/include)
|
set(TBB_INCLUDE_DIR ${LIBDIR}/tbb/include)
|
||||||
set(TBB_INCLUDE_DIRS ${TBB_INCLUDE_DIR})
|
set(TBB_INCLUDE_DIRS ${TBB_INCLUDE_DIR})
|
||||||
if(WITH_TBB_MALLOC_PROXY)
|
if(WITH_TBB_MALLOC_PROXY)
|
||||||
|
set(TBB_MALLOC_LIBRARIES optimized ${LIBDIR}/tbb/lib/tbbmalloc.lib debug ${LIBDIR}/tbb/lib/tbbmalloc_debug.lib)
|
||||||
add_definitions(-DWITH_TBB_MALLOC)
|
add_definitions(-DWITH_TBB_MALLOC)
|
||||||
endif()
|
endif()
|
||||||
endif()
|
endif()
|
||||||
|
@@ -15,6 +15,15 @@ if(WITH_WINDOWS_BUNDLE_CRT)
|
|||||||
|
|
||||||
include(InstallRequiredSystemLibraries)
|
include(InstallRequiredSystemLibraries)
|
||||||
|
|
||||||
|
# ucrtbase(d).dll cannot be in the manifest, due to the way windows 10 handles
|
||||||
|
# redirects for this dll, for details see T88813.
|
||||||
|
foreach(lib ${CMAKE_INSTALL_SYSTEM_RUNTIME_LIBS})
|
||||||
|
string(FIND ${lib} "ucrtbase" pos)
|
||||||
|
if(NOT pos EQUAL -1)
|
||||||
|
list(REMOVE_ITEM CMAKE_INSTALL_SYSTEM_RUNTIME_LIBS ${lib})
|
||||||
|
install(FILES ${lib} DESTINATION . COMPONENT Libraries)
|
||||||
|
endif()
|
||||||
|
endforeach()
|
||||||
# Install the CRT to the blender.crt Sub folder.
|
# Install the CRT to the blender.crt Sub folder.
|
||||||
install(FILES ${CMAKE_INSTALL_SYSTEM_RUNTIME_LIBS} DESTINATION ./blender.crt COMPONENT Libraries)
|
install(FILES ${CMAKE_INSTALL_SYSTEM_RUNTIME_LIBS} DESTINATION ./blender.crt COMPONENT Libraries)
|
||||||
|
|
||||||
|
8
build_files/config/README.md
Normal file
8
build_files/config/README.md
Normal file
@@ -0,0 +1,8 @@
|
|||||||
|
Pipeline Config
|
||||||
|
===============
|
||||||
|
|
||||||
|
This configuration file is used by buildbot new pipeline for the `update-code` step.
|
||||||
|
|
||||||
|
It will soon be used by the ../utils/make_update.py script.
|
||||||
|
|
||||||
|
Both buildbot and developers will eventually use the same configuration file.
|
87
build_files/config/pipeline_config.json
Normal file
87
build_files/config/pipeline_config.json
Normal file
@@ -0,0 +1,87 @@
|
|||||||
|
{
|
||||||
|
"update-code":
|
||||||
|
{
|
||||||
|
"git" :
|
||||||
|
{
|
||||||
|
"submodules":
|
||||||
|
[
|
||||||
|
{ "path": "release/scripts/addons", "branch": "master", "commit_id": "HEAD" },
|
||||||
|
{ "path": "release/scripts/addons_contrib", "branch": "master", "commit_id": "HEAD" },
|
||||||
|
{ "path": "release/datafiles/locale", "branch": "master", "commit_id": "HEAD" },
|
||||||
|
{ "path": "source/tools", "branch": "master", "commit_id": "HEAD" }
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"svn":
|
||||||
|
{
|
||||||
|
"tests": { "path": "lib/tests", "branch": "trunk", "commit_id": "HEAD" },
|
||||||
|
"libraries":
|
||||||
|
{
|
||||||
|
"darwin-x86_64": { "path": "lib/darwin", "branch": "trunk", "commit_id": "HEAD" },
|
||||||
|
"darwin-arm64": { "path": "lib/darwin_arm64", "branch": "trunk", "commit_id": "HEAD" },
|
||||||
|
"linux-x86_64": { "path": "lib/linux_centos7_x86_64", "branch": "trunk", "commit_id": "HEAD" },
|
||||||
|
"windows-amd64": { "path": "lib/win64_vc15", "branch": "trunk", "commit_id": "HEAD" }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"buildbot":
|
||||||
|
{
|
||||||
|
"gcc":
|
||||||
|
{
|
||||||
|
"version": "9.0"
|
||||||
|
},
|
||||||
|
"sdks":
|
||||||
|
{
|
||||||
|
"optix":
|
||||||
|
{
|
||||||
|
"version": "7.1.0"
|
||||||
|
},
|
||||||
|
"cuda10":
|
||||||
|
{
|
||||||
|
"version": "10.1"
|
||||||
|
},
|
||||||
|
"cuda11":
|
||||||
|
{
|
||||||
|
"version": "11.3"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"cmake":
|
||||||
|
{
|
||||||
|
"default":
|
||||||
|
{
|
||||||
|
"version": "any",
|
||||||
|
"overrides":
|
||||||
|
{
|
||||||
|
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"darwin-x86_64":
|
||||||
|
{
|
||||||
|
"overrides":
|
||||||
|
{
|
||||||
|
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"darwin-arm64":
|
||||||
|
{
|
||||||
|
"overrides":
|
||||||
|
{
|
||||||
|
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"linux-x86_64":
|
||||||
|
{
|
||||||
|
"overrides":
|
||||||
|
{
|
||||||
|
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"windows-amd64":
|
||||||
|
{
|
||||||
|
"overrides":
|
||||||
|
{
|
||||||
|
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
5
build_files/utils/README.md
Normal file
5
build_files/utils/README.md
Normal file
@@ -0,0 +1,5 @@
|
|||||||
|
Make Utility Scripts
|
||||||
|
====================
|
||||||
|
|
||||||
|
Scripts used only by developers for now
|
||||||
|
|
@@ -1,9 +1,12 @@
|
|||||||
echo No explicit msvc version requested, autodetecting version.
|
echo No explicit msvc version requested, autodetecting version.
|
||||||
|
|
||||||
|
call "%~dp0\detect_msvc2019.cmd"
|
||||||
|
if %ERRORLEVEL% EQU 0 goto DetectionComplete
|
||||||
|
|
||||||
call "%~dp0\detect_msvc2017.cmd"
|
call "%~dp0\detect_msvc2017.cmd"
|
||||||
if %ERRORLEVEL% EQU 0 goto DetectionComplete
|
if %ERRORLEVEL% EQU 0 goto DetectionComplete
|
||||||
|
|
||||||
call "%~dp0\detect_msvc2019.cmd"
|
call "%~dp0\detect_msvc2022.cmd"
|
||||||
if %ERRORLEVEL% EQU 0 goto DetectionComplete
|
if %ERRORLEVEL% EQU 0 goto DetectionComplete
|
||||||
|
|
||||||
echo Compiler Detection failed. Use verbose switch for more information.
|
echo Compiler Detection failed. Use verbose switch for more information.
|
||||||
|
@@ -1,5 +1,6 @@
|
|||||||
if "%BUILD_VS_YEAR%"=="2017" set BUILD_VS_LIBDIRPOST=vc15
|
if "%BUILD_VS_YEAR%"=="2017" set BUILD_VS_LIBDIRPOST=vc15
|
||||||
if "%BUILD_VS_YEAR%"=="2019" set BUILD_VS_LIBDIRPOST=vc15
|
if "%BUILD_VS_YEAR%"=="2019" set BUILD_VS_LIBDIRPOST=vc15
|
||||||
|
if "%BUILD_VS_YEAR%"=="2022" set BUILD_VS_LIBDIRPOST=vc15
|
||||||
|
|
||||||
set BUILD_VS_SVNDIR=win64_%BUILD_VS_LIBDIRPOST%
|
set BUILD_VS_SVNDIR=win64_%BUILD_VS_LIBDIRPOST%
|
||||||
set BUILD_VS_LIBDIR="%BLENDER_DIR%..\lib\%BUILD_VS_SVNDIR%"
|
set BUILD_VS_LIBDIR="%BLENDER_DIR%..\lib\%BUILD_VS_SVNDIR%"
|
||||||
|
@@ -19,10 +19,10 @@ if "%WITH_PYDEBUG%"=="1" (
|
|||||||
set PYDEBUG_CMAKE_ARGS=-DWINDOWS_PYTHON_DEBUG=On
|
set PYDEBUG_CMAKE_ARGS=-DWINDOWS_PYTHON_DEBUG=On
|
||||||
)
|
)
|
||||||
|
|
||||||
if "%BUILD_VS_YEAR%"=="2019" (
|
if "%BUILD_VS_YEAR%"=="2017" (
|
||||||
set BUILD_PLATFORM_SELECT=-A %MSBUILD_PLATFORM%
|
|
||||||
) else (
|
|
||||||
set BUILD_GENERATOR_POST=%WINDOWS_ARCH%
|
set BUILD_GENERATOR_POST=%WINDOWS_ARCH%
|
||||||
|
) else (
|
||||||
|
set BUILD_PLATFORM_SELECT=-A %MSBUILD_PLATFORM%
|
||||||
)
|
)
|
||||||
|
|
||||||
set BUILD_CMAKE_ARGS=%BUILD_CMAKE_ARGS% -G "Visual Studio %BUILD_VS_VER% %BUILD_VS_YEAR%%BUILD_GENERATOR_POST%" %BUILD_PLATFORM_SELECT% %TESTS_CMAKE_ARGS% %CLANG_CMAKE_ARGS% %ASAN_CMAKE_ARGS% %PYDEBUG_CMAKE_ARGS%
|
set BUILD_CMAKE_ARGS=%BUILD_CMAKE_ARGS% -G "Visual Studio %BUILD_VS_VER% %BUILD_VS_YEAR%%BUILD_GENERATOR_POST%" %BUILD_PLATFORM_SELECT% %TESTS_CMAKE_ARGS% %CLANG_CMAKE_ARGS% %ASAN_CMAKE_ARGS% %PYDEBUG_CMAKE_ARGS%
|
||||||
|
3
build_files/windows/detect_msvc2022.cmd
Normal file
3
build_files/windows/detect_msvc2022.cmd
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
set BUILD_VS_VER=17
|
||||||
|
set BUILD_VS_YEAR=2022
|
||||||
|
call "%~dp0\detect_msvc_vswhere.cmd"
|
@@ -66,6 +66,14 @@ if NOT "%1" == "" (
|
|||||||
) else if "%1" == "2019b" (
|
) else if "%1" == "2019b" (
|
||||||
set BUILD_VS_YEAR=2019
|
set BUILD_VS_YEAR=2019
|
||||||
set VSWHERE_ARGS=-products Microsoft.VisualStudio.Product.BuildTools
|
set VSWHERE_ARGS=-products Microsoft.VisualStudio.Product.BuildTools
|
||||||
|
) else if "%1" == "2022" (
|
||||||
|
set BUILD_VS_YEAR=2022
|
||||||
|
) else if "%1" == "2022pre" (
|
||||||
|
set BUILD_VS_YEAR=2022
|
||||||
|
set VSWHERE_ARGS=-prerelease
|
||||||
|
) else if "%1" == "2022b" (
|
||||||
|
set BUILD_VS_YEAR=2022
|
||||||
|
set VSWHERE_ARGS=-products Microsoft.VisualStudio.Product.BuildTools
|
||||||
) else if "%1" == "packagename" (
|
) else if "%1" == "packagename" (
|
||||||
set BUILD_CMAKE_ARGS=%BUILD_CMAKE_ARGS% -DCPACK_OVERRIDE_PACKAGENAME="%2"
|
set BUILD_CMAKE_ARGS=%BUILD_CMAKE_ARGS% -DCPACK_OVERRIDE_PACKAGENAME="%2"
|
||||||
shift /1
|
shift /1
|
||||||
|
@@ -85,7 +85,7 @@ def openBlendFile(filename):
|
|||||||
'''
|
'''
|
||||||
handle = open(filename, 'rb')
|
handle = open(filename, 'rb')
|
||||||
magic = ReadString(handle, 7)
|
magic = ReadString(handle, 7)
|
||||||
if magic in ("BLENDER", "BULLETf"):
|
if magic in {"BLENDER", "BULLETf"}:
|
||||||
log.debug("normal blendfile detected")
|
log.debug("normal blendfile detected")
|
||||||
handle.seek(0, os.SEEK_SET)
|
handle.seek(0, os.SEEK_SET)
|
||||||
return handle
|
return handle
|
||||||
@@ -137,7 +137,7 @@ class BlendFile:
|
|||||||
fileblock = BlendFileBlock(handle, self)
|
fileblock = BlendFileBlock(handle, self)
|
||||||
found_dna_block = False
|
found_dna_block = False
|
||||||
while not found_dna_block:
|
while not found_dna_block:
|
||||||
if fileblock.Header.Code in ("DNA1", "SDNA"):
|
if fileblock.Header.Code in {"DNA1", "SDNA"}:
|
||||||
self.Catalog = DNACatalog(self.Header, handle)
|
self.Catalog = DNACatalog(self.Header, handle)
|
||||||
found_dna_block = True
|
found_dna_block = True
|
||||||
else:
|
else:
|
||||||
|
@@ -1,4 +1,4 @@
|
|||||||
# Doxyfile 1.8.16
|
# Doxyfile 1.9.1
|
||||||
|
|
||||||
# This file describes the settings to be used by the documentation system
|
# This file describes the settings to be used by the documentation system
|
||||||
# doxygen (www.doxygen.org) for a project.
|
# doxygen (www.doxygen.org) for a project.
|
||||||
@@ -38,7 +38,7 @@ PROJECT_NAME = Blender
|
|||||||
# could be handy for archiving the generated documentation or if some version
|
# could be handy for archiving the generated documentation or if some version
|
||||||
# control system is used.
|
# control system is used.
|
||||||
|
|
||||||
PROJECT_NUMBER = "V3.0"
|
PROJECT_NUMBER = V3.0
|
||||||
|
|
||||||
# Using the PROJECT_BRIEF tag one can provide an optional one line description
|
# Using the PROJECT_BRIEF tag one can provide an optional one line description
|
||||||
# for a project that appears at the top of each page and should give viewer a
|
# for a project that appears at the top of each page and should give viewer a
|
||||||
@@ -227,6 +227,14 @@ QT_AUTOBRIEF = NO
|
|||||||
|
|
||||||
MULTILINE_CPP_IS_BRIEF = NO
|
MULTILINE_CPP_IS_BRIEF = NO
|
||||||
|
|
||||||
|
# By default Python docstrings are displayed as preformatted text and doxygen's
|
||||||
|
# special commands cannot be used. By setting PYTHON_DOCSTRING to NO the
|
||||||
|
# doxygen's special commands can be used and the contents of the docstring
|
||||||
|
# documentation blocks is shown as doxygen documentation.
|
||||||
|
# The default value is: YES.
|
||||||
|
|
||||||
|
PYTHON_DOCSTRING = YES
|
||||||
|
|
||||||
# If the INHERIT_DOCS tag is set to YES then an undocumented member inherits the
|
# If the INHERIT_DOCS tag is set to YES then an undocumented member inherits the
|
||||||
# documentation from any documented member that it re-implements.
|
# documentation from any documented member that it re-implements.
|
||||||
# The default value is: YES.
|
# The default value is: YES.
|
||||||
@@ -263,12 +271,6 @@ TAB_SIZE = 4
|
|||||||
|
|
||||||
ALIASES =
|
ALIASES =
|
||||||
|
|
||||||
# This tag can be used to specify a number of word-keyword mappings (TCL only).
|
|
||||||
# A mapping has the form "name=value". For example adding "class=itcl::class"
|
|
||||||
# will allow you to use the command class in the itcl::class meaning.
|
|
||||||
|
|
||||||
TCL_SUBST =
|
|
||||||
|
|
||||||
# Set the OPTIMIZE_OUTPUT_FOR_C tag to YES if your project consists of C sources
|
# Set the OPTIMIZE_OUTPUT_FOR_C tag to YES if your project consists of C sources
|
||||||
# only. Doxygen will then generate output that is more tailored for C. For
|
# only. Doxygen will then generate output that is more tailored for C. For
|
||||||
# instance, some of the names that are used will be different. The list of all
|
# instance, some of the names that are used will be different. The list of all
|
||||||
@@ -309,19 +311,22 @@ OPTIMIZE_OUTPUT_SLICE = NO
|
|||||||
# parses. With this tag you can assign which parser to use for a given
|
# parses. With this tag you can assign which parser to use for a given
|
||||||
# extension. Doxygen has a built-in mapping, but you can override or extend it
|
# extension. Doxygen has a built-in mapping, but you can override or extend it
|
||||||
# using this tag. The format is ext=language, where ext is a file extension, and
|
# using this tag. The format is ext=language, where ext is a file extension, and
|
||||||
# language is one of the parsers supported by doxygen: IDL, Java, Javascript,
|
# language is one of the parsers supported by doxygen: IDL, Java, JavaScript,
|
||||||
# Csharp (C#), C, C++, D, PHP, md (Markdown), Objective-C, Python, Slice,
|
# Csharp (C#), C, C++, D, PHP, md (Markdown), Objective-C, Python, Slice, VHDL,
|
||||||
# Fortran (fixed format Fortran: FortranFixed, free formatted Fortran:
|
# Fortran (fixed format Fortran: FortranFixed, free formatted Fortran:
|
||||||
# FortranFree, unknown formatted Fortran: Fortran. In the later case the parser
|
# FortranFree, unknown formatted Fortran: Fortran. In the later case the parser
|
||||||
# tries to guess whether the code is fixed or free formatted code, this is the
|
# tries to guess whether the code is fixed or free formatted code, this is the
|
||||||
# default for Fortran type files), VHDL, tcl. For instance to make doxygen treat
|
# default for Fortran type files). For instance to make doxygen treat .inc files
|
||||||
# .inc files as Fortran files (default is PHP), and .f files as C (default is
|
# as Fortran files (default is PHP), and .f files as C (default is Fortran),
|
||||||
# Fortran), use: inc=Fortran f=C.
|
# use: inc=Fortran f=C.
|
||||||
#
|
#
|
||||||
# Note: For files without extension you can use no_extension as a placeholder.
|
# Note: For files without extension you can use no_extension as a placeholder.
|
||||||
#
|
#
|
||||||
# Note that for custom extensions you also need to set FILE_PATTERNS otherwise
|
# Note that for custom extensions you also need to set FILE_PATTERNS otherwise
|
||||||
# the files are not read by doxygen.
|
# the files are not read by doxygen. When specifying no_extension you should add
|
||||||
|
# * to the FILE_PATTERNS.
|
||||||
|
#
|
||||||
|
# Note see also the list of default file extension mappings.
|
||||||
|
|
||||||
EXTENSION_MAPPING =
|
EXTENSION_MAPPING =
|
||||||
|
|
||||||
@@ -455,6 +460,19 @@ TYPEDEF_HIDES_STRUCT = NO
|
|||||||
|
|
||||||
LOOKUP_CACHE_SIZE = 3
|
LOOKUP_CACHE_SIZE = 3
|
||||||
|
|
||||||
|
# The NUM_PROC_THREADS specifies the number threads doxygen is allowed to use
|
||||||
|
# during processing. When set to 0 doxygen will based this on the number of
|
||||||
|
# cores available in the system. You can set it explicitly to a value larger
|
||||||
|
# than 0 to get more control over the balance between CPU load and processing
|
||||||
|
# speed. At this moment only the input processing can be done using multiple
|
||||||
|
# threads. Since this is still an experimental feature the default is set to 1,
|
||||||
|
# which efficively disables parallel processing. Please report any issues you
|
||||||
|
# encounter. Generating dot graphs in parallel is controlled by the
|
||||||
|
# DOT_NUM_THREADS setting.
|
||||||
|
# Minimum value: 0, maximum value: 32, default value: 1.
|
||||||
|
|
||||||
|
NUM_PROC_THREADS = 1
|
||||||
|
|
||||||
#---------------------------------------------------------------------------
|
#---------------------------------------------------------------------------
|
||||||
# Build related configuration options
|
# Build related configuration options
|
||||||
#---------------------------------------------------------------------------
|
#---------------------------------------------------------------------------
|
||||||
@@ -518,6 +536,13 @@ EXTRACT_LOCAL_METHODS = NO
|
|||||||
|
|
||||||
EXTRACT_ANON_NSPACES = NO
|
EXTRACT_ANON_NSPACES = NO
|
||||||
|
|
||||||
|
# If this flag is set to YES, the name of an unnamed parameter in a declaration
|
||||||
|
# will be determined by the corresponding definition. By default unnamed
|
||||||
|
# parameters remain unnamed in the output.
|
||||||
|
# The default value is: YES.
|
||||||
|
|
||||||
|
RESOLVE_UNNAMED_PARAMS = YES
|
||||||
|
|
||||||
# If the HIDE_UNDOC_MEMBERS tag is set to YES, doxygen will hide all
|
# If the HIDE_UNDOC_MEMBERS tag is set to YES, doxygen will hide all
|
||||||
# undocumented members inside documented classes or files. If set to NO these
|
# undocumented members inside documented classes or files. If set to NO these
|
||||||
# members will be included in the various overviews, but no documentation
|
# members will be included in the various overviews, but no documentation
|
||||||
@@ -535,8 +560,8 @@ HIDE_UNDOC_MEMBERS = NO
|
|||||||
HIDE_UNDOC_CLASSES = NO
|
HIDE_UNDOC_CLASSES = NO
|
||||||
|
|
||||||
# If the HIDE_FRIEND_COMPOUNDS tag is set to YES, doxygen will hide all friend
|
# If the HIDE_FRIEND_COMPOUNDS tag is set to YES, doxygen will hide all friend
|
||||||
# (class|struct|union) declarations. If set to NO, these declarations will be
|
# declarations. If set to NO, these declarations will be included in the
|
||||||
# included in the documentation.
|
# documentation.
|
||||||
# The default value is: NO.
|
# The default value is: NO.
|
||||||
|
|
||||||
HIDE_FRIEND_COMPOUNDS = NO
|
HIDE_FRIEND_COMPOUNDS = NO
|
||||||
@@ -555,11 +580,18 @@ HIDE_IN_BODY_DOCS = NO
|
|||||||
|
|
||||||
INTERNAL_DOCS = YES
|
INTERNAL_DOCS = YES
|
||||||
|
|
||||||
# If the CASE_SENSE_NAMES tag is set to NO then doxygen will only generate file
|
# With the correct setting of option CASE_SENSE_NAMES doxygen will better be
|
||||||
# names in lower-case letters. If set to YES, upper-case letters are also
|
# able to match the capabilities of the underlying filesystem. In case the
|
||||||
# allowed. This is useful if you have classes or files whose names only differ
|
# filesystem is case sensitive (i.e. it supports files in the same directory
|
||||||
# in case and if your file system supports case sensitive file names. Windows
|
# whose names only differ in casing), the option must be set to YES to properly
|
||||||
# (including Cygwin) ands Mac users are advised to set this option to NO.
|
# deal with such files in case they appear in the input. For filesystems that
|
||||||
|
# are not case sensitive the option should be be set to NO to properly deal with
|
||||||
|
# output files written for symbols that only differ in casing, such as for two
|
||||||
|
# classes, one named CLASS and the other named Class, and to also support
|
||||||
|
# references to files without having to specify the exact matching casing. On
|
||||||
|
# Windows (including Cygwin) and MacOS, users should typically set this option
|
||||||
|
# to NO, whereas on Linux or other Unix flavors it should typically be set to
|
||||||
|
# YES.
|
||||||
# The default value is: system dependent.
|
# The default value is: system dependent.
|
||||||
|
|
||||||
CASE_SENSE_NAMES = YES
|
CASE_SENSE_NAMES = YES
|
||||||
@@ -798,7 +830,10 @@ WARN_IF_DOC_ERROR = YES
|
|||||||
WARN_NO_PARAMDOC = NO
|
WARN_NO_PARAMDOC = NO
|
||||||
|
|
||||||
# If the WARN_AS_ERROR tag is set to YES then doxygen will immediately stop when
|
# If the WARN_AS_ERROR tag is set to YES then doxygen will immediately stop when
|
||||||
# a warning is encountered.
|
# a warning is encountered. If the WARN_AS_ERROR tag is set to FAIL_ON_WARNINGS
|
||||||
|
# then doxygen will continue running as if WARN_AS_ERROR tag is set to NO, but
|
||||||
|
# at the end of the doxygen process doxygen will return with a non-zero status.
|
||||||
|
# Possible values are: NO, YES and FAIL_ON_WARNINGS.
|
||||||
# The default value is: NO.
|
# The default value is: NO.
|
||||||
|
|
||||||
WARN_AS_ERROR = NO
|
WARN_AS_ERROR = NO
|
||||||
@@ -840,8 +875,8 @@ INPUT = doxygen.main.h \
|
|||||||
# This tag can be used to specify the character encoding of the source files
|
# This tag can be used to specify the character encoding of the source files
|
||||||
# that doxygen parses. Internally doxygen uses the UTF-8 encoding. Doxygen uses
|
# that doxygen parses. Internally doxygen uses the UTF-8 encoding. Doxygen uses
|
||||||
# libiconv (or the iconv built into libc) for the transcoding. See the libiconv
|
# libiconv (or the iconv built into libc) for the transcoding. See the libiconv
|
||||||
# documentation (see: https://www.gnu.org/software/libiconv/) for the list of
|
# documentation (see:
|
||||||
# possible encodings.
|
# https://www.gnu.org/software/libiconv/) for the list of possible encodings.
|
||||||
# The default value is: UTF-8.
|
# The default value is: UTF-8.
|
||||||
|
|
||||||
INPUT_ENCODING = UTF-8
|
INPUT_ENCODING = UTF-8
|
||||||
@@ -854,11 +889,15 @@ INPUT_ENCODING = UTF-8
|
|||||||
# need to set EXTENSION_MAPPING for the extension otherwise the files are not
|
# need to set EXTENSION_MAPPING for the extension otherwise the files are not
|
||||||
# read by doxygen.
|
# read by doxygen.
|
||||||
#
|
#
|
||||||
|
# Note the list of default checked file patterns might differ from the list of
|
||||||
|
# default file extension mappings.
|
||||||
|
#
|
||||||
# If left blank the following patterns are tested:*.c, *.cc, *.cxx, *.cpp,
|
# If left blank the following patterns are tested:*.c, *.cc, *.cxx, *.cpp,
|
||||||
# *.c++, *.java, *.ii, *.ixx, *.ipp, *.i++, *.inl, *.idl, *.ddl, *.odl, *.h,
|
# *.c++, *.java, *.ii, *.ixx, *.ipp, *.i++, *.inl, *.idl, *.ddl, *.odl, *.h,
|
||||||
# *.hh, *.hxx, *.hpp, *.h++, *.cs, *.d, *.php, *.php4, *.php5, *.phtml, *.inc,
|
# *.hh, *.hxx, *.hpp, *.h++, *.cs, *.d, *.php, *.php4, *.php5, *.phtml, *.inc,
|
||||||
# *.m, *.markdown, *.md, *.mm, *.dox, *.py, *.pyw, *.f90, *.f95, *.f03, *.f08,
|
# *.m, *.markdown, *.md, *.mm, *.dox (to be provided as doxygen C comment),
|
||||||
# *.f, *.for, *.tcl, *.vhd, *.vhdl, *.ucf, *.qsf and *.ice.
|
# *.py, *.pyw, *.f90, *.f95, *.f03, *.f08, *.f18, *.f, *.for, *.vhd, *.vhdl,
|
||||||
|
# *.ucf, *.qsf and *.ice.
|
||||||
|
|
||||||
FILE_PATTERNS =
|
FILE_PATTERNS =
|
||||||
|
|
||||||
@@ -1086,13 +1125,6 @@ VERBATIM_HEADERS = YES
|
|||||||
|
|
||||||
ALPHABETICAL_INDEX = YES
|
ALPHABETICAL_INDEX = YES
|
||||||
|
|
||||||
# The COLS_IN_ALPHA_INDEX tag can be used to specify the number of columns in
|
|
||||||
# which the alphabetical index list will be split.
|
|
||||||
# Minimum value: 1, maximum value: 20, default value: 5.
|
|
||||||
# This tag requires that the tag ALPHABETICAL_INDEX is set to YES.
|
|
||||||
|
|
||||||
COLS_IN_ALPHA_INDEX = 5
|
|
||||||
|
|
||||||
# In case all classes in a project start with a common prefix, all classes will
|
# In case all classes in a project start with a common prefix, all classes will
|
||||||
# be put under the same header in the alphabetical index. The IGNORE_PREFIX tag
|
# be put under the same header in the alphabetical index. The IGNORE_PREFIX tag
|
||||||
# can be used to specify a prefix (or a list of prefixes) that should be ignored
|
# can be used to specify a prefix (or a list of prefixes) that should be ignored
|
||||||
@@ -1231,9 +1263,9 @@ HTML_TIMESTAMP = YES
|
|||||||
|
|
||||||
# If the HTML_DYNAMIC_MENUS tag is set to YES then the generated HTML
|
# If the HTML_DYNAMIC_MENUS tag is set to YES then the generated HTML
|
||||||
# documentation will contain a main index with vertical navigation menus that
|
# documentation will contain a main index with vertical navigation menus that
|
||||||
# are dynamically created via Javascript. If disabled, the navigation index will
|
# are dynamically created via JavaScript. If disabled, the navigation index will
|
||||||
# consists of multiple levels of tabs that are statically embedded in every HTML
|
# consists of multiple levels of tabs that are statically embedded in every HTML
|
||||||
# page. Disable this option to support browsers that do not have Javascript,
|
# page. Disable this option to support browsers that do not have JavaScript,
|
||||||
# like the Qt help browser.
|
# like the Qt help browser.
|
||||||
# The default value is: YES.
|
# The default value is: YES.
|
||||||
# This tag requires that the tag GENERATE_HTML is set to YES.
|
# This tag requires that the tag GENERATE_HTML is set to YES.
|
||||||
@@ -1263,10 +1295,11 @@ HTML_INDEX_NUM_ENTRIES = 100
|
|||||||
|
|
||||||
# If the GENERATE_DOCSET tag is set to YES, additional index files will be
|
# If the GENERATE_DOCSET tag is set to YES, additional index files will be
|
||||||
# generated that can be used as input for Apple's Xcode 3 integrated development
|
# generated that can be used as input for Apple's Xcode 3 integrated development
|
||||||
# environment (see: https://developer.apple.com/xcode/), introduced with OSX
|
# environment (see:
|
||||||
# 10.5 (Leopard). To create a documentation set, doxygen will generate a
|
# https://developer.apple.com/xcode/), introduced with OSX 10.5 (Leopard). To
|
||||||
# Makefile in the HTML output directory. Running make will produce the docset in
|
# create a documentation set, doxygen will generate a Makefile in the HTML
|
||||||
# that directory and running make install will install the docset in
|
# output directory. Running make will produce the docset in that directory and
|
||||||
|
# running make install will install the docset in
|
||||||
# ~/Library/Developer/Shared/Documentation/DocSets so that Xcode will find it at
|
# ~/Library/Developer/Shared/Documentation/DocSets so that Xcode will find it at
|
||||||
# startup. See https://developer.apple.com/library/archive/featuredarticles/Doxy
|
# startup. See https://developer.apple.com/library/archive/featuredarticles/Doxy
|
||||||
# genXcode/_index.html for more information.
|
# genXcode/_index.html for more information.
|
||||||
@@ -1308,8 +1341,8 @@ DOCSET_PUBLISHER_NAME = Publisher
|
|||||||
# If the GENERATE_HTMLHELP tag is set to YES then doxygen generates three
|
# If the GENERATE_HTMLHELP tag is set to YES then doxygen generates three
|
||||||
# additional HTML index files: index.hhp, index.hhc, and index.hhk. The
|
# additional HTML index files: index.hhp, index.hhc, and index.hhk. The
|
||||||
# index.hhp is a project file that can be read by Microsoft's HTML Help Workshop
|
# index.hhp is a project file that can be read by Microsoft's HTML Help Workshop
|
||||||
# (see: https://www.microsoft.com/en-us/download/details.aspx?id=21138) on
|
# (see:
|
||||||
# Windows.
|
# https://www.microsoft.com/en-us/download/details.aspx?id=21138) on Windows.
|
||||||
#
|
#
|
||||||
# The HTML Help Workshop contains a compiler that can convert all HTML output
|
# The HTML Help Workshop contains a compiler that can convert all HTML output
|
||||||
# generated by doxygen into a single compiled HTML file (.chm). Compiled HTML
|
# generated by doxygen into a single compiled HTML file (.chm). Compiled HTML
|
||||||
@@ -1339,7 +1372,7 @@ CHM_FILE = blender.chm
|
|||||||
HHC_LOCATION = "C:/Program Files (x86)/HTML Help Workshop/hhc.exe"
|
HHC_LOCATION = "C:/Program Files (x86)/HTML Help Workshop/hhc.exe"
|
||||||
|
|
||||||
# The GENERATE_CHI flag controls if a separate .chi index file is generated
|
# The GENERATE_CHI flag controls if a separate .chi index file is generated
|
||||||
# (YES) or that it should be included in the master .chm file (NO).
|
# (YES) or that it should be included in the main .chm file (NO).
|
||||||
# The default value is: NO.
|
# The default value is: NO.
|
||||||
# This tag requires that the tag GENERATE_HTMLHELP is set to YES.
|
# This tag requires that the tag GENERATE_HTMLHELP is set to YES.
|
||||||
|
|
||||||
@@ -1384,7 +1417,8 @@ QCH_FILE =
|
|||||||
|
|
||||||
# The QHP_NAMESPACE tag specifies the namespace to use when generating Qt Help
|
# The QHP_NAMESPACE tag specifies the namespace to use when generating Qt Help
|
||||||
# Project output. For more information please see Qt Help Project / Namespace
|
# Project output. For more information please see Qt Help Project / Namespace
|
||||||
# (see: https://doc.qt.io/archives/qt-4.8/qthelpproject.html#namespace).
|
# (see:
|
||||||
|
# https://doc.qt.io/archives/qt-4.8/qthelpproject.html#namespace).
|
||||||
# The default value is: org.doxygen.Project.
|
# The default value is: org.doxygen.Project.
|
||||||
# This tag requires that the tag GENERATE_QHP is set to YES.
|
# This tag requires that the tag GENERATE_QHP is set to YES.
|
||||||
|
|
||||||
@@ -1392,8 +1426,8 @@ QHP_NAMESPACE = org.doxygen.Project
|
|||||||
|
|
||||||
# The QHP_VIRTUAL_FOLDER tag specifies the namespace to use when generating Qt
|
# The QHP_VIRTUAL_FOLDER tag specifies the namespace to use when generating Qt
|
||||||
# Help Project output. For more information please see Qt Help Project / Virtual
|
# Help Project output. For more information please see Qt Help Project / Virtual
|
||||||
# Folders (see: https://doc.qt.io/archives/qt-4.8/qthelpproject.html#virtual-
|
# Folders (see:
|
||||||
# folders).
|
# https://doc.qt.io/archives/qt-4.8/qthelpproject.html#virtual-folders).
|
||||||
# The default value is: doc.
|
# The default value is: doc.
|
||||||
# This tag requires that the tag GENERATE_QHP is set to YES.
|
# This tag requires that the tag GENERATE_QHP is set to YES.
|
||||||
|
|
||||||
@@ -1401,16 +1435,16 @@ QHP_VIRTUAL_FOLDER = doc
|
|||||||
|
|
||||||
# If the QHP_CUST_FILTER_NAME tag is set, it specifies the name of a custom
|
# If the QHP_CUST_FILTER_NAME tag is set, it specifies the name of a custom
|
||||||
# filter to add. For more information please see Qt Help Project / Custom
|
# filter to add. For more information please see Qt Help Project / Custom
|
||||||
# Filters (see: https://doc.qt.io/archives/qt-4.8/qthelpproject.html#custom-
|
# Filters (see:
|
||||||
# filters).
|
# https://doc.qt.io/archives/qt-4.8/qthelpproject.html#custom-filters).
|
||||||
# This tag requires that the tag GENERATE_QHP is set to YES.
|
# This tag requires that the tag GENERATE_QHP is set to YES.
|
||||||
|
|
||||||
QHP_CUST_FILTER_NAME =
|
QHP_CUST_FILTER_NAME =
|
||||||
|
|
||||||
# The QHP_CUST_FILTER_ATTRS tag specifies the list of the attributes of the
|
# The QHP_CUST_FILTER_ATTRS tag specifies the list of the attributes of the
|
||||||
# custom filter to add. For more information please see Qt Help Project / Custom
|
# custom filter to add. For more information please see Qt Help Project / Custom
|
||||||
# Filters (see: https://doc.qt.io/archives/qt-4.8/qthelpproject.html#custom-
|
# Filters (see:
|
||||||
# filters).
|
# https://doc.qt.io/archives/qt-4.8/qthelpproject.html#custom-filters).
|
||||||
# This tag requires that the tag GENERATE_QHP is set to YES.
|
# This tag requires that the tag GENERATE_QHP is set to YES.
|
||||||
|
|
||||||
QHP_CUST_FILTER_ATTRS =
|
QHP_CUST_FILTER_ATTRS =
|
||||||
@@ -1422,9 +1456,9 @@ QHP_CUST_FILTER_ATTRS =
|
|||||||
|
|
||||||
QHP_SECT_FILTER_ATTRS =
|
QHP_SECT_FILTER_ATTRS =
|
||||||
|
|
||||||
# The QHG_LOCATION tag can be used to specify the location of Qt's
|
# The QHG_LOCATION tag can be used to specify the location (absolute path
|
||||||
# qhelpgenerator. If non-empty doxygen will try to run qhelpgenerator on the
|
# including file name) of Qt's qhelpgenerator. If non-empty doxygen will try to
|
||||||
# generated .qhp file.
|
# run qhelpgenerator on the generated .qhp file.
|
||||||
# This tag requires that the tag GENERATE_QHP is set to YES.
|
# This tag requires that the tag GENERATE_QHP is set to YES.
|
||||||
|
|
||||||
QHG_LOCATION =
|
QHG_LOCATION =
|
||||||
@@ -1501,6 +1535,17 @@ TREEVIEW_WIDTH = 246
|
|||||||
|
|
||||||
EXT_LINKS_IN_WINDOW = NO
|
EXT_LINKS_IN_WINDOW = NO
|
||||||
|
|
||||||
|
# If the HTML_FORMULA_FORMAT option is set to svg, doxygen will use the pdf2svg
|
||||||
|
# tool (see https://github.com/dawbarton/pdf2svg) or inkscape (see
|
||||||
|
# https://inkscape.org) to generate formulas as SVG images instead of PNGs for
|
||||||
|
# the HTML output. These images will generally look nicer at scaled resolutions.
|
||||||
|
# Possible values are: png (the default) and svg (looks nicer but requires the
|
||||||
|
# pdf2svg or inkscape tool).
|
||||||
|
# The default value is: png.
|
||||||
|
# This tag requires that the tag GENERATE_HTML is set to YES.
|
||||||
|
|
||||||
|
HTML_FORMULA_FORMAT = png
|
||||||
|
|
||||||
# Use this tag to change the font size of LaTeX formulas included as images in
|
# Use this tag to change the font size of LaTeX formulas included as images in
|
||||||
# the HTML documentation. When you change the font size after a successful
|
# the HTML documentation. When you change the font size after a successful
|
||||||
# doxygen run you need to manually remove any form_*.png images from the HTML
|
# doxygen run you need to manually remove any form_*.png images from the HTML
|
||||||
@@ -1521,8 +1566,14 @@ FORMULA_FONTSIZE = 10
|
|||||||
|
|
||||||
FORMULA_TRANSPARENT = YES
|
FORMULA_TRANSPARENT = YES
|
||||||
|
|
||||||
|
# The FORMULA_MACROFILE can contain LaTeX \newcommand and \renewcommand commands
|
||||||
|
# to create new LaTeX commands to be used in formulas as building blocks. See
|
||||||
|
# the section "Including formulas" for details.
|
||||||
|
|
||||||
|
FORMULA_MACROFILE =
|
||||||
|
|
||||||
# Enable the USE_MATHJAX option to render LaTeX formulas using MathJax (see
|
# Enable the USE_MATHJAX option to render LaTeX formulas using MathJax (see
|
||||||
# https://www.mathjax.org) which uses client side Javascript for the rendering
|
# https://www.mathjax.org) which uses client side JavaScript for the rendering
|
||||||
# instead of using pre-rendered bitmaps. Use this if you do not have LaTeX
|
# instead of using pre-rendered bitmaps. Use this if you do not have LaTeX
|
||||||
# installed or if you want to formulas look prettier in the HTML output. When
|
# installed or if you want to formulas look prettier in the HTML output. When
|
||||||
# enabled you may also need to install MathJax separately and configure the path
|
# enabled you may also need to install MathJax separately and configure the path
|
||||||
@@ -1534,7 +1585,7 @@ USE_MATHJAX = NO
|
|||||||
|
|
||||||
# When MathJax is enabled you can set the default output format to be used for
|
# When MathJax is enabled you can set the default output format to be used for
|
||||||
# the MathJax output. See the MathJax site (see:
|
# the MathJax output. See the MathJax site (see:
|
||||||
# http://docs.mathjax.org/en/latest/output.html) for more details.
|
# http://docs.mathjax.org/en/v2.7-latest/output.html) for more details.
|
||||||
# Possible values are: HTML-CSS (which is slower, but has the best
|
# Possible values are: HTML-CSS (which is slower, but has the best
|
||||||
# compatibility), NativeMML (i.e. MathML) and SVG.
|
# compatibility), NativeMML (i.e. MathML) and SVG.
|
||||||
# The default value is: HTML-CSS.
|
# The default value is: HTML-CSS.
|
||||||
@@ -1550,7 +1601,7 @@ MATHJAX_FORMAT = HTML-CSS
|
|||||||
# Content Delivery Network so you can quickly see the result without installing
|
# Content Delivery Network so you can quickly see the result without installing
|
||||||
# MathJax. However, it is strongly recommended to install a local copy of
|
# MathJax. However, it is strongly recommended to install a local copy of
|
||||||
# MathJax from https://www.mathjax.org before deployment.
|
# MathJax from https://www.mathjax.org before deployment.
|
||||||
# The default value is: https://cdnjs.cloudflare.com/ajax/libs/mathjax/2.7.5/.
|
# The default value is: https://cdn.jsdelivr.net/npm/mathjax@2.
|
||||||
# This tag requires that the tag USE_MATHJAX is set to YES.
|
# This tag requires that the tag USE_MATHJAX is set to YES.
|
||||||
|
|
||||||
MATHJAX_RELPATH = http://www.mathjax.org/mathjax
|
MATHJAX_RELPATH = http://www.mathjax.org/mathjax
|
||||||
@@ -1564,7 +1615,8 @@ MATHJAX_EXTENSIONS =
|
|||||||
|
|
||||||
# The MATHJAX_CODEFILE tag can be used to specify a file with javascript pieces
|
# The MATHJAX_CODEFILE tag can be used to specify a file with javascript pieces
|
||||||
# of code that will be used on startup of the MathJax code. See the MathJax site
|
# of code that will be used on startup of the MathJax code. See the MathJax site
|
||||||
# (see: http://docs.mathjax.org/en/latest/output.html) for more details. For an
|
# (see:
|
||||||
|
# http://docs.mathjax.org/en/v2.7-latest/output.html) for more details. For an
|
||||||
# example see the documentation.
|
# example see the documentation.
|
||||||
# This tag requires that the tag USE_MATHJAX is set to YES.
|
# This tag requires that the tag USE_MATHJAX is set to YES.
|
||||||
|
|
||||||
@@ -1592,7 +1644,7 @@ MATHJAX_CODEFILE =
|
|||||||
SEARCHENGINE = NO
|
SEARCHENGINE = NO
|
||||||
|
|
||||||
# When the SERVER_BASED_SEARCH tag is enabled the search engine will be
|
# When the SERVER_BASED_SEARCH tag is enabled the search engine will be
|
||||||
# implemented using a web server instead of a web client using Javascript. There
|
# implemented using a web server instead of a web client using JavaScript. There
|
||||||
# are two flavors of web server based searching depending on the EXTERNAL_SEARCH
|
# are two flavors of web server based searching depending on the EXTERNAL_SEARCH
|
||||||
# setting. When disabled, doxygen will generate a PHP script for searching and
|
# setting. When disabled, doxygen will generate a PHP script for searching and
|
||||||
# an index file used by the script. When EXTERNAL_SEARCH is enabled the indexing
|
# an index file used by the script. When EXTERNAL_SEARCH is enabled the indexing
|
||||||
@@ -1611,7 +1663,8 @@ SERVER_BASED_SEARCH = NO
|
|||||||
#
|
#
|
||||||
# Doxygen ships with an example indexer (doxyindexer) and search engine
|
# Doxygen ships with an example indexer (doxyindexer) and search engine
|
||||||
# (doxysearch.cgi) which are based on the open source search engine library
|
# (doxysearch.cgi) which are based on the open source search engine library
|
||||||
# Xapian (see: https://xapian.org/).
|
# Xapian (see:
|
||||||
|
# https://xapian.org/).
|
||||||
#
|
#
|
||||||
# See the section "External Indexing and Searching" for details.
|
# See the section "External Indexing and Searching" for details.
|
||||||
# The default value is: NO.
|
# The default value is: NO.
|
||||||
@@ -1624,8 +1677,9 @@ EXTERNAL_SEARCH = NO
|
|||||||
#
|
#
|
||||||
# Doxygen ships with an example indexer (doxyindexer) and search engine
|
# Doxygen ships with an example indexer (doxyindexer) and search engine
|
||||||
# (doxysearch.cgi) which are based on the open source search engine library
|
# (doxysearch.cgi) which are based on the open source search engine library
|
||||||
# Xapian (see: https://xapian.org/). See the section "External Indexing and
|
# Xapian (see:
|
||||||
# Searching" for details.
|
# https://xapian.org/). See the section "External Indexing and Searching" for
|
||||||
|
# details.
|
||||||
# This tag requires that the tag SEARCHENGINE is set to YES.
|
# This tag requires that the tag SEARCHENGINE is set to YES.
|
||||||
|
|
||||||
SEARCHENGINE_URL =
|
SEARCHENGINE_URL =
|
||||||
@@ -1789,9 +1843,11 @@ LATEX_EXTRA_FILES =
|
|||||||
|
|
||||||
PDF_HYPERLINKS = NO
|
PDF_HYPERLINKS = NO
|
||||||
|
|
||||||
# If the USE_PDFLATEX tag is set to YES, doxygen will use pdflatex to generate
|
# If the USE_PDFLATEX tag is set to YES, doxygen will use the engine as
|
||||||
# the PDF file directly from the LaTeX files. Set this option to YES, to get a
|
# specified with LATEX_CMD_NAME to generate the PDF file directly from the LaTeX
|
||||||
# higher quality PDF documentation.
|
# files. Set this option to YES, to get a higher quality PDF documentation.
|
||||||
|
#
|
||||||
|
# See also section LATEX_CMD_NAME for selecting the engine.
|
||||||
# The default value is: YES.
|
# The default value is: YES.
|
||||||
# This tag requires that the tag GENERATE_LATEX is set to YES.
|
# This tag requires that the tag GENERATE_LATEX is set to YES.
|
||||||
|
|
||||||
@@ -2126,7 +2182,8 @@ INCLUDE_FILE_PATTERNS =
|
|||||||
# recursively expanded use the := operator instead of the = operator.
|
# recursively expanded use the := operator instead of the = operator.
|
||||||
# This tag requires that the tag ENABLE_PREPROCESSING is set to YES.
|
# This tag requires that the tag ENABLE_PREPROCESSING is set to YES.
|
||||||
|
|
||||||
PREDEFINED = BUILD_DATE
|
PREDEFINED = BUILD_DATE \
|
||||||
|
DOXYGEN=1
|
||||||
|
|
||||||
# If the MACRO_EXPANSION and EXPAND_ONLY_PREDEF tags are set to YES then this
|
# If the MACRO_EXPANSION and EXPAND_ONLY_PREDEF tags are set to YES then this
|
||||||
# tag can be used to specify a list of macro names that should be expanded. The
|
# tag can be used to specify a list of macro names that should be expanded. The
|
||||||
@@ -2303,10 +2360,32 @@ UML_LOOK = YES
|
|||||||
# but if the number exceeds 15, the total amount of fields shown is limited to
|
# but if the number exceeds 15, the total amount of fields shown is limited to
|
||||||
# 10.
|
# 10.
|
||||||
# Minimum value: 0, maximum value: 100, default value: 10.
|
# Minimum value: 0, maximum value: 100, default value: 10.
|
||||||
# This tag requires that the tag HAVE_DOT is set to YES.
|
# This tag requires that the tag UML_LOOK is set to YES.
|
||||||
|
|
||||||
UML_LIMIT_NUM_FIELDS = 10
|
UML_LIMIT_NUM_FIELDS = 10
|
||||||
|
|
||||||
|
# If the DOT_UML_DETAILS tag is set to NO, doxygen will show attributes and
|
||||||
|
# methods without types and arguments in the UML graphs. If the DOT_UML_DETAILS
|
||||||
|
# tag is set to YES, doxygen will add type and arguments for attributes and
|
||||||
|
# methods in the UML graphs. If the DOT_UML_DETAILS tag is set to NONE, doxygen
|
||||||
|
# will not generate fields with class member information in the UML graphs. The
|
||||||
|
# class diagrams will look similar to the default class diagrams but using UML
|
||||||
|
# notation for the relationships.
|
||||||
|
# Possible values are: NO, YES and NONE.
|
||||||
|
# The default value is: NO.
|
||||||
|
# This tag requires that the tag UML_LOOK is set to YES.
|
||||||
|
|
||||||
|
DOT_UML_DETAILS = NO
|
||||||
|
|
||||||
|
# The DOT_WRAP_THRESHOLD tag can be used to set the maximum number of characters
|
||||||
|
# to display on a single line. If the actual line length exceeds this threshold
|
||||||
|
# significantly it will wrapped across multiple lines. Some heuristics are apply
|
||||||
|
# to avoid ugly line breaks.
|
||||||
|
# Minimum value: 0, maximum value: 1000, default value: 17.
|
||||||
|
# This tag requires that the tag HAVE_DOT is set to YES.
|
||||||
|
|
||||||
|
DOT_WRAP_THRESHOLD = 17
|
||||||
|
|
||||||
# If the TEMPLATE_RELATIONS tag is set to YES then the inheritance and
|
# If the TEMPLATE_RELATIONS tag is set to YES then the inheritance and
|
||||||
# collaboration graphs will show the relations between templates and their
|
# collaboration graphs will show the relations between templates and their
|
||||||
# instances.
|
# instances.
|
||||||
@@ -2496,9 +2575,11 @@ DOT_MULTI_TARGETS = YES
|
|||||||
|
|
||||||
GENERATE_LEGEND = YES
|
GENERATE_LEGEND = YES
|
||||||
|
|
||||||
# If the DOT_CLEANUP tag is set to YES, doxygen will remove the intermediate dot
|
# If the DOT_CLEANUP tag is set to YES, doxygen will remove the intermediate
|
||||||
# files that are used to generate the various graphs.
|
# files that are used to generate the various graphs.
|
||||||
|
#
|
||||||
|
# Note: This setting is not only used for dot files but also for msc and
|
||||||
|
# plantuml temporary files.
|
||||||
# The default value is: YES.
|
# The default value is: YES.
|
||||||
# This tag requires that the tag HAVE_DOT is set to YES.
|
|
||||||
|
|
||||||
DOT_CLEANUP = YES
|
DOT_CLEANUP = YES
|
||||||
|
@@ -122,7 +122,7 @@ is a full-featured 3D application. It supports the entirety of the 3D pipeline -
|
|||||||
'''modeling, rigging, animation, simulation, rendering, compositing, motion tracking, and video editing.
|
'''modeling, rigging, animation, simulation, rendering, compositing, motion tracking, and video editing.
|
||||||
|
|
||||||
Use Blender to create 3D images and animations, films and commercials, content for games, '''
|
Use Blender to create 3D images and animations, films and commercials, content for games, '''
|
||||||
r'''architectural and industrial visualizatons, and scientific visualizations.
|
r'''architectural and industrial visualizations, and scientific visualizations.
|
||||||
|
|
||||||
https://www.blender.org''')
|
https://www.blender.org''')
|
||||||
|
|
||||||
|
@@ -12,6 +12,7 @@ such cases, lock the interface (Render → Lock Interface or
|
|||||||
Below is an example of a mesh that is altered from a handler:
|
Below is an example of a mesh that is altered from a handler:
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
||||||
def frame_change_pre(scene):
|
def frame_change_pre(scene):
|
||||||
# A triangle that shifts in the z direction
|
# A triangle that shifts in the z direction
|
||||||
zshift = scene.frame_current * 0.1
|
zshift = scene.frame_current * 0.1
|
||||||
|
@@ -16,10 +16,12 @@ execution_queue = queue.Queue()
|
|||||||
def run_in_main_thread(function):
|
def run_in_main_thread(function):
|
||||||
execution_queue.put(function)
|
execution_queue.put(function)
|
||||||
|
|
||||||
|
|
||||||
def execute_queued_functions():
|
def execute_queued_functions():
|
||||||
while not execution_queue.empty():
|
while not execution_queue.empty():
|
||||||
function = execution_queue.get()
|
function = execution_queue.get()
|
||||||
function()
|
function()
|
||||||
return 1.0
|
return 1.0
|
||||||
|
|
||||||
|
|
||||||
bpy.app.timers.register(execute_queued_functions)
|
bpy.app.timers.register(execute_queued_functions)
|
||||||
|
@@ -31,11 +31,13 @@ owner = object()
|
|||||||
|
|
||||||
subscribe_to = bpy.context.object.location
|
subscribe_to = bpy.context.object.location
|
||||||
|
|
||||||
|
|
||||||
def msgbus_callback(*args):
|
def msgbus_callback(*args):
|
||||||
# This will print:
|
# This will print:
|
||||||
# Something changed! (1, 2, 3)
|
# Something changed! (1, 2, 3)
|
||||||
print("Something changed!", args)
|
print("Something changed!", args)
|
||||||
|
|
||||||
|
|
||||||
bpy.msgbus.subscribe_rna(
|
bpy.msgbus.subscribe_rna(
|
||||||
key=subscribe_to,
|
key=subscribe_to,
|
||||||
owner=owner,
|
owner=owner,
|
||||||
|
@@ -44,7 +44,7 @@ class OBJECT_OT_object_to_curve(bpy.types.Operator):
|
|||||||
# Remove temporary curve.
|
# Remove temporary curve.
|
||||||
obj.to_curve_clear()
|
obj.to_curve_clear()
|
||||||
# Invoke to_curve() with applying modifiers.
|
# Invoke to_curve() with applying modifiers.
|
||||||
curve_with_modifiers = obj.to_curve(depsgraph, apply_modifiers = True)
|
curve_with_modifiers = obj.to_curve(depsgraph, apply_modifiers=True)
|
||||||
self.report({'INFO'}, f"{len(curve_with_modifiers.splines)} splines in new curve with modifiers.")
|
self.report({'INFO'}, f"{len(curve_with_modifiers.splines)} splines in new curve with modifiers.")
|
||||||
# Remove temporary curve.
|
# Remove temporary curve.
|
||||||
obj.to_curve_clear()
|
obj.to_curve_clear()
|
||||||
|
@@ -4,7 +4,9 @@ Simple Render Engine
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
import bpy
|
import bpy
|
||||||
import bgl
|
import array
|
||||||
|
import gpu
|
||||||
|
from gpu_extras.presets import draw_texture_2d
|
||||||
|
|
||||||
|
|
||||||
class CustomRenderEngine(bpy.types.RenderEngine):
|
class CustomRenderEngine(bpy.types.RenderEngine):
|
||||||
@@ -100,8 +102,7 @@ class CustomRenderEngine(bpy.types.RenderEngine):
|
|||||||
dimensions = region.width, region.height
|
dimensions = region.width, region.height
|
||||||
|
|
||||||
# Bind shader that converts from scene linear to display space,
|
# Bind shader that converts from scene linear to display space,
|
||||||
bgl.glEnable(bgl.GL_BLEND)
|
gpu.state.blend_set('ALPHA_PREMULT')
|
||||||
bgl.glBlendFunc(bgl.GL_ONE, bgl.GL_ONE_MINUS_SRC_ALPHA)
|
|
||||||
self.bind_display_space_shader(scene)
|
self.bind_display_space_shader(scene)
|
||||||
|
|
||||||
if not self.draw_data or self.draw_data.dimensions != dimensions:
|
if not self.draw_data or self.draw_data.dimensions != dimensions:
|
||||||
@@ -110,7 +111,7 @@ class CustomRenderEngine(bpy.types.RenderEngine):
|
|||||||
self.draw_data.draw()
|
self.draw_data.draw()
|
||||||
|
|
||||||
self.unbind_display_space_shader()
|
self.unbind_display_space_shader()
|
||||||
bgl.glDisable(bgl.GL_BLEND)
|
gpu.state.blend_set('NONE')
|
||||||
|
|
||||||
|
|
||||||
class CustomDrawData:
|
class CustomDrawData:
|
||||||
@@ -119,68 +120,21 @@ class CustomDrawData:
|
|||||||
self.dimensions = dimensions
|
self.dimensions = dimensions
|
||||||
width, height = dimensions
|
width, height = dimensions
|
||||||
|
|
||||||
pixels = [0.1, 0.2, 0.1, 1.0] * width * height
|
pixels = width * height * array.array('f', [0.1, 0.2, 0.1, 1.0])
|
||||||
pixels = bgl.Buffer(bgl.GL_FLOAT, width * height * 4, pixels)
|
pixels = gpu.types.Buffer('FLOAT', width * height * 4, pixels)
|
||||||
|
|
||||||
# Generate texture
|
# Generate texture
|
||||||
self.texture = bgl.Buffer(bgl.GL_INT, 1)
|
self.texture = gpu.types.GPUTexture((width, height), format='RGBA16F', data=pixels)
|
||||||
bgl.glGenTextures(1, self.texture)
|
|
||||||
bgl.glActiveTexture(bgl.GL_TEXTURE0)
|
|
||||||
bgl.glBindTexture(bgl.GL_TEXTURE_2D, self.texture[0])
|
|
||||||
bgl.glTexImage2D(bgl.GL_TEXTURE_2D, 0, bgl.GL_RGBA16F, width, height, 0, bgl.GL_RGBA, bgl.GL_FLOAT, pixels)
|
|
||||||
bgl.glTexParameteri(bgl.GL_TEXTURE_2D, bgl.GL_TEXTURE_MIN_FILTER, bgl.GL_LINEAR)
|
|
||||||
bgl.glTexParameteri(bgl.GL_TEXTURE_2D, bgl.GL_TEXTURE_MAG_FILTER, bgl.GL_LINEAR)
|
|
||||||
bgl.glBindTexture(bgl.GL_TEXTURE_2D, 0)
|
|
||||||
|
|
||||||
# Bind shader that converts from scene linear to display space,
|
# Note: This is just a didactic example.
|
||||||
# use the scene's color management settings.
|
# In this case it would be more convenient to fill the texture with:
|
||||||
shader_program = bgl.Buffer(bgl.GL_INT, 1)
|
# self.texture.clear('FLOAT', value=[0.1, 0.2, 0.1, 1.0])
|
||||||
bgl.glGetIntegerv(bgl.GL_CURRENT_PROGRAM, shader_program)
|
|
||||||
|
|
||||||
# Generate vertex array
|
|
||||||
self.vertex_array = bgl.Buffer(bgl.GL_INT, 1)
|
|
||||||
bgl.glGenVertexArrays(1, self.vertex_array)
|
|
||||||
bgl.glBindVertexArray(self.vertex_array[0])
|
|
||||||
|
|
||||||
texturecoord_location = bgl.glGetAttribLocation(shader_program[0], "texCoord")
|
|
||||||
position_location = bgl.glGetAttribLocation(shader_program[0], "pos")
|
|
||||||
|
|
||||||
bgl.glEnableVertexAttribArray(texturecoord_location)
|
|
||||||
bgl.glEnableVertexAttribArray(position_location)
|
|
||||||
|
|
||||||
# Generate geometry buffers for drawing textured quad
|
|
||||||
position = [0.0, 0.0, width, 0.0, width, height, 0.0, height]
|
|
||||||
position = bgl.Buffer(bgl.GL_FLOAT, len(position), position)
|
|
||||||
texcoord = [0.0, 0.0, 1.0, 0.0, 1.0, 1.0, 0.0, 1.0]
|
|
||||||
texcoord = bgl.Buffer(bgl.GL_FLOAT, len(texcoord), texcoord)
|
|
||||||
|
|
||||||
self.vertex_buffer = bgl.Buffer(bgl.GL_INT, 2)
|
|
||||||
|
|
||||||
bgl.glGenBuffers(2, self.vertex_buffer)
|
|
||||||
bgl.glBindBuffer(bgl.GL_ARRAY_BUFFER, self.vertex_buffer[0])
|
|
||||||
bgl.glBufferData(bgl.GL_ARRAY_BUFFER, 32, position, bgl.GL_STATIC_DRAW)
|
|
||||||
bgl.glVertexAttribPointer(position_location, 2, bgl.GL_FLOAT, bgl.GL_FALSE, 0, None)
|
|
||||||
|
|
||||||
bgl.glBindBuffer(bgl.GL_ARRAY_BUFFER, self.vertex_buffer[1])
|
|
||||||
bgl.glBufferData(bgl.GL_ARRAY_BUFFER, 32, texcoord, bgl.GL_STATIC_DRAW)
|
|
||||||
bgl.glVertexAttribPointer(texturecoord_location, 2, bgl.GL_FLOAT, bgl.GL_FALSE, 0, None)
|
|
||||||
|
|
||||||
bgl.glBindBuffer(bgl.GL_ARRAY_BUFFER, 0)
|
|
||||||
bgl.glBindVertexArray(0)
|
|
||||||
|
|
||||||
def __del__(self):
|
def __del__(self):
|
||||||
bgl.glDeleteBuffers(2, self.vertex_buffer)
|
del self.texture
|
||||||
bgl.glDeleteVertexArrays(1, self.vertex_array)
|
|
||||||
bgl.glBindTexture(bgl.GL_TEXTURE_2D, 0)
|
|
||||||
bgl.glDeleteTextures(1, self.texture)
|
|
||||||
|
|
||||||
def draw(self):
|
def draw(self):
|
||||||
bgl.glActiveTexture(bgl.GL_TEXTURE0)
|
draw_texture_2d(self.texture, (0, 0), self.texture.width, self.texture.height)
|
||||||
bgl.glBindTexture(bgl.GL_TEXTURE_2D, self.texture[0])
|
|
||||||
bgl.glBindVertexArray(self.vertex_array[0])
|
|
||||||
bgl.glDrawArrays(bgl.GL_TRIANGLE_FAN, 0, 4)
|
|
||||||
bgl.glBindVertexArray(0)
|
|
||||||
bgl.glBindTexture(bgl.GL_TEXTURE_2D, 0)
|
|
||||||
|
|
||||||
|
|
||||||
# RenderEngines also need to tell UI Panels that they are compatible with.
|
# RenderEngines also need to tell UI Panels that they are compatible with.
|
||||||
|
@@ -4,7 +4,6 @@ Mesh with Random Vertex Colors
|
|||||||
"""
|
"""
|
||||||
import bpy
|
import bpy
|
||||||
import gpu
|
import gpu
|
||||||
import bgl
|
|
||||||
import numpy as np
|
import numpy as np
|
||||||
from random import random
|
from random import random
|
||||||
from gpu_extras.batch import batch_for_shader
|
from gpu_extras.batch import batch_for_shader
|
||||||
@@ -31,9 +30,10 @@ batch = batch_for_shader(
|
|||||||
|
|
||||||
|
|
||||||
def draw():
|
def draw():
|
||||||
bgl.glEnable(bgl.GL_DEPTH_TEST)
|
gpu.state.depth_test_set('LESS_EQUAL')
|
||||||
|
gpu.state.depth_mask_set(True)
|
||||||
batch.draw(shader)
|
batch.draw(shader)
|
||||||
bgl.glDisable(bgl.GL_DEPTH_TEST)
|
gpu.state.depth_mask_set(False)
|
||||||
|
|
||||||
|
|
||||||
bpy.types.SpaceView3D.draw_handler_add(draw, (), 'WINDOW', 'POST_VIEW')
|
bpy.types.SpaceView3D.draw_handler_add(draw, (), 'WINDOW', 'POST_VIEW')
|
||||||
|
@@ -6,11 +6,11 @@ To use this example you have to provide an image that should be displayed.
|
|||||||
"""
|
"""
|
||||||
import bpy
|
import bpy
|
||||||
import gpu
|
import gpu
|
||||||
import bgl
|
|
||||||
from gpu_extras.batch import batch_for_shader
|
from gpu_extras.batch import batch_for_shader
|
||||||
|
|
||||||
IMAGE_NAME = "Untitled"
|
IMAGE_NAME = "Untitled"
|
||||||
image = bpy.data.images[IMAGE_NAME]
|
image = bpy.data.images[IMAGE_NAME]
|
||||||
|
texture = gpu.texture.from_image(image)
|
||||||
|
|
||||||
shader = gpu.shader.from_builtin('2D_IMAGE')
|
shader = gpu.shader.from_builtin('2D_IMAGE')
|
||||||
batch = batch_for_shader(
|
batch = batch_for_shader(
|
||||||
@@ -21,16 +21,10 @@ batch = batch_for_shader(
|
|||||||
},
|
},
|
||||||
)
|
)
|
||||||
|
|
||||||
if image.gl_load():
|
|
||||||
raise Exception()
|
|
||||||
|
|
||||||
|
|
||||||
def draw():
|
def draw():
|
||||||
bgl.glActiveTexture(bgl.GL_TEXTURE0)
|
|
||||||
bgl.glBindTexture(bgl.GL_TEXTURE_2D, image.bindcode)
|
|
||||||
|
|
||||||
shader.bind()
|
shader.bind()
|
||||||
shader.uniform_int("image", 0)
|
shader.uniform_sampler("image", texture)
|
||||||
batch.draw(shader)
|
batch.draw(shader)
|
||||||
|
|
||||||
|
|
||||||
|
@@ -9,7 +9,6 @@ Generate a texture using Offscreen Rendering
|
|||||||
"""
|
"""
|
||||||
import bpy
|
import bpy
|
||||||
import gpu
|
import gpu
|
||||||
import bgl
|
|
||||||
from mathutils import Matrix
|
from mathutils import Matrix
|
||||||
from gpu_extras.batch import batch_for_shader
|
from gpu_extras.batch import batch_for_shader
|
||||||
from gpu_extras.presets import draw_circle_2d
|
from gpu_extras.presets import draw_circle_2d
|
||||||
@@ -20,8 +19,8 @@ from gpu_extras.presets import draw_circle_2d
|
|||||||
offscreen = gpu.types.GPUOffScreen(512, 512)
|
offscreen = gpu.types.GPUOffScreen(512, 512)
|
||||||
|
|
||||||
with offscreen.bind():
|
with offscreen.bind():
|
||||||
bgl.glClearColor(0.0, 0.0, 0.0, 0.0)
|
fb = gpu.state.active_framebuffer_get()
|
||||||
bgl.glClear(bgl.GL_COLOR_BUFFER_BIT)
|
fb.clear(color=(0.0, 0.0, 0.0, 0.0))
|
||||||
with gpu.matrix.push_pop():
|
with gpu.matrix.push_pop():
|
||||||
# reset matrices -> use normalized device coordinates [-1, 1]
|
# reset matrices -> use normalized device coordinates [-1, 1]
|
||||||
gpu.matrix.load_matrix(Matrix.Identity(4))
|
gpu.matrix.load_matrix(Matrix.Identity(4))
|
||||||
@@ -30,7 +29,7 @@ with offscreen.bind():
|
|||||||
amount = 10
|
amount = 10
|
||||||
for i in range(-amount, amount + 1):
|
for i in range(-amount, amount + 1):
|
||||||
x_pos = i / amount
|
x_pos = i / amount
|
||||||
draw_circle_2d((x_pos, 0.0), (1, 1, 1, 1), 0.5, 200)
|
draw_circle_2d((x_pos, 0.0), (1, 1, 1, 1), 0.5, segments=200)
|
||||||
|
|
||||||
|
|
||||||
# Drawing the generated texture in 3D space
|
# Drawing the generated texture in 3D space
|
||||||
@@ -75,13 +74,10 @@ batch = batch_for_shader(
|
|||||||
|
|
||||||
|
|
||||||
def draw():
|
def draw():
|
||||||
bgl.glActiveTexture(bgl.GL_TEXTURE0)
|
|
||||||
bgl.glBindTexture(bgl.GL_TEXTURE_2D, offscreen.color_texture)
|
|
||||||
|
|
||||||
shader.bind()
|
shader.bind()
|
||||||
shader.uniform_float("modelMatrix", Matrix.Translation((1, 2, 3)) @ Matrix.Scale(3, 4))
|
shader.uniform_float("modelMatrix", Matrix.Translation((1, 2, 3)) @ Matrix.Scale(3, 4))
|
||||||
shader.uniform_float("viewProjectionMatrix", bpy.context.region_data.perspective_matrix)
|
shader.uniform_float("viewProjectionMatrix", bpy.context.region_data.perspective_matrix)
|
||||||
shader.uniform_float("image", 0)
|
shader.uniform_sampler("image", offscreen.texture_color)
|
||||||
batch.draw(shader)
|
batch.draw(shader)
|
||||||
|
|
||||||
|
|
||||||
|
@@ -7,11 +7,10 @@ If it already exists, it will override the existing one.
|
|||||||
|
|
||||||
Currently almost all of the execution time is spent in the last line.
|
Currently almost all of the execution time is spent in the last line.
|
||||||
In the future this will hopefully be solved by implementing the Python buffer protocol
|
In the future this will hopefully be solved by implementing the Python buffer protocol
|
||||||
for :class:`bgl.Buffer` and :class:`bpy.types.Image.pixels` (aka ``bpy_prop_array``).
|
for :class:`gpu.types.Buffer` and :class:`bpy.types.Image.pixels` (aka ``bpy_prop_array``).
|
||||||
"""
|
"""
|
||||||
import bpy
|
import bpy
|
||||||
import gpu
|
import gpu
|
||||||
import bgl
|
|
||||||
import random
|
import random
|
||||||
from mathutils import Matrix
|
from mathutils import Matrix
|
||||||
from gpu_extras.presets import draw_circle_2d
|
from gpu_extras.presets import draw_circle_2d
|
||||||
@@ -25,8 +24,8 @@ RING_AMOUNT = 10
|
|||||||
offscreen = gpu.types.GPUOffScreen(WIDTH, HEIGHT)
|
offscreen = gpu.types.GPUOffScreen(WIDTH, HEIGHT)
|
||||||
|
|
||||||
with offscreen.bind():
|
with offscreen.bind():
|
||||||
bgl.glClearColor(0.0, 0.0, 0.0, 0.0)
|
fb = gpu.state.active_framebuffer_get()
|
||||||
bgl.glClear(bgl.GL_COLOR_BUFFER_BIT)
|
fb.clear(color=(0.0, 0.0, 0.0, 0.0))
|
||||||
with gpu.matrix.push_pop():
|
with gpu.matrix.push_pop():
|
||||||
# reset matrices -> use normalized device coordinates [-1, 1]
|
# reset matrices -> use normalized device coordinates [-1, 1]
|
||||||
gpu.matrix.load_matrix(Matrix.Identity(4))
|
gpu.matrix.load_matrix(Matrix.Identity(4))
|
||||||
@@ -35,11 +34,11 @@ with offscreen.bind():
|
|||||||
for i in range(RING_AMOUNT):
|
for i in range(RING_AMOUNT):
|
||||||
draw_circle_2d(
|
draw_circle_2d(
|
||||||
(random.uniform(-1, 1), random.uniform(-1, 1)),
|
(random.uniform(-1, 1), random.uniform(-1, 1)),
|
||||||
(1, 1, 1, 1), random.uniform(0.1, 1), 20)
|
(1, 1, 1, 1), random.uniform(0.1, 1),
|
||||||
|
segments=20,
|
||||||
|
)
|
||||||
|
|
||||||
buffer = bgl.Buffer(bgl.GL_BYTE, WIDTH * HEIGHT * 4)
|
buffer = fb.read_color(0, 0, WIDTH, HEIGHT, 4, 0, 'UBYTE')
|
||||||
bgl.glReadBuffer(bgl.GL_BACK)
|
|
||||||
bgl.glReadPixels(0, 0, WIDTH, HEIGHT, bgl.GL_RGBA, bgl.GL_UNSIGNED_BYTE, buffer)
|
|
||||||
|
|
||||||
offscreen.free()
|
offscreen.free()
|
||||||
|
|
||||||
@@ -48,4 +47,6 @@ if not IMAGE_NAME in bpy.data.images:
|
|||||||
bpy.data.images.new(IMAGE_NAME, WIDTH, HEIGHT)
|
bpy.data.images.new(IMAGE_NAME, WIDTH, HEIGHT)
|
||||||
image = bpy.data.images[IMAGE_NAME]
|
image = bpy.data.images[IMAGE_NAME]
|
||||||
image.scale(WIDTH, HEIGHT)
|
image.scale(WIDTH, HEIGHT)
|
||||||
|
|
||||||
|
buffer.dimensions = WIDTH * HEIGHT * 4
|
||||||
image.pixels = [v / 255 for v in buffer]
|
image.pixels = [v / 255 for v in buffer]
|
||||||
|
@@ -7,7 +7,6 @@ You could also make this independent of a specific camera,
|
|||||||
but Blender does not expose good functions to create view and projection matrices yet.
|
but Blender does not expose good functions to create view and projection matrices yet.
|
||||||
"""
|
"""
|
||||||
import bpy
|
import bpy
|
||||||
import bgl
|
|
||||||
import gpu
|
import gpu
|
||||||
from gpu_extras.presets import draw_texture_2d
|
from gpu_extras.presets import draw_texture_2d
|
||||||
|
|
||||||
@@ -34,8 +33,8 @@ def draw():
|
|||||||
view_matrix,
|
view_matrix,
|
||||||
projection_matrix)
|
projection_matrix)
|
||||||
|
|
||||||
bgl.glDisable(bgl.GL_DEPTH_TEST)
|
gpu.state.depth_mask_set(False)
|
||||||
draw_texture_2d(offscreen.color_texture, (10, 10), WIDTH, HEIGHT)
|
draw_texture_2d(offscreen.texture_color, (10, 10), WIDTH, HEIGHT)
|
||||||
|
|
||||||
|
|
||||||
bpy.types.SpaceView3D.draw_handler_add(draw, (), 'WINDOW', 'POST_PIXEL')
|
bpy.types.SpaceView3D.draw_handler_add(draw, (), 'WINDOW', 'POST_PIXEL')
|
||||||
|
5
doc/python_api/examples/mathutils.Matrix.LocRotScale.py
Normal file
5
doc/python_api/examples/mathutils.Matrix.LocRotScale.py
Normal file
@@ -0,0 +1,5 @@
|
|||||||
|
# Compute local object transformation matrix:
|
||||||
|
if obj.rotation_mode == 'QUATERNION':
|
||||||
|
matrix = mathutils.Matrix.LocRotScale(obj.location, obj.rotation_quaternion, obj.scale)
|
||||||
|
else:
|
||||||
|
matrix = mathutils.Matrix.LocRotScale(obj.location, obj.rotation_euler, obj.scale)
|
@@ -14,10 +14,14 @@ mat_rot = mathutils.Matrix.Rotation(math.radians(45.0), 4, 'X')
|
|||||||
mat_out = mat_loc @ mat_rot @ mat_sca
|
mat_out = mat_loc @ mat_rot @ mat_sca
|
||||||
print(mat_out)
|
print(mat_out)
|
||||||
|
|
||||||
# extract components back out of the matrix
|
# extract components back out of the matrix as two vectors and a quaternion
|
||||||
loc, rot, sca = mat_out.decompose()
|
loc, rot, sca = mat_out.decompose()
|
||||||
print(loc, rot, sca)
|
print(loc, rot, sca)
|
||||||
|
|
||||||
|
# recombine extracted components
|
||||||
|
mat_out2 = mathutils.Matrix.LocRotScale(loc, rot, sca)
|
||||||
|
print(mat_out2)
|
||||||
|
|
||||||
# it can also be useful to access components of a matrix directly
|
# it can also be useful to access components of a matrix directly
|
||||||
mat = mathutils.Matrix()
|
mat = mathutils.Matrix()
|
||||||
mat[0][0], mat[1][0], mat[2][0] = 0.0, 1.0, 2.0
|
mat[0][0], mat[1][0], mat[2][0] = 0.0, 1.0, 2.0
|
||||||
|
@@ -1,2 +1,13 @@
|
|||||||
Sphinx==3.5.3
|
sphinx==3.5.4
|
||||||
|
|
||||||
|
# Sphinx dependencies that are important
|
||||||
|
Jinja2==2.11.3
|
||||||
|
Pygments==2.9.0
|
||||||
|
docutils==0.16
|
||||||
|
snowballstemmer==2.1.0
|
||||||
|
babel==2.9.1
|
||||||
|
requests==2.25.1
|
||||||
|
|
||||||
|
# Only needed to match the theme used for the official documentation.
|
||||||
|
# Without this theme, the default theme will be used.
|
||||||
sphinx_rtd_theme==0.5.2
|
sphinx_rtd_theme==0.5.2
|
||||||
|
@@ -545,6 +545,13 @@ def range_str(val):
|
|||||||
|
|
||||||
|
|
||||||
def example_extract_docstring(filepath):
|
def example_extract_docstring(filepath):
|
||||||
|
'''
|
||||||
|
Return (text, line_no, line_no_has_content) where:
|
||||||
|
- ``text`` is the doc-string text.
|
||||||
|
- ``line_no`` is the line the doc-string text ends.
|
||||||
|
- ``line_no_has_content`` when False, this file only contains a doc-string.
|
||||||
|
There is no need to include the remainder.
|
||||||
|
'''
|
||||||
file = open(filepath, "r", encoding="utf-8")
|
file = open(filepath, "r", encoding="utf-8")
|
||||||
line = file.readline()
|
line = file.readline()
|
||||||
line_no = 0
|
line_no = 0
|
||||||
@@ -553,7 +560,7 @@ def example_extract_docstring(filepath):
|
|||||||
line_no += 1
|
line_no += 1
|
||||||
else:
|
else:
|
||||||
file.close()
|
file.close()
|
||||||
return "", 0, False
|
return "", 0, True
|
||||||
|
|
||||||
for line in file:
|
for line in file:
|
||||||
line_no += 1
|
line_no += 1
|
||||||
@@ -947,7 +954,7 @@ def pymodule2sphinx(basepath, module_name, module, title, module_all_extra):
|
|||||||
# constant, not much fun we can do here except to list it.
|
# constant, not much fun we can do here except to list it.
|
||||||
# TODO, figure out some way to document these!
|
# TODO, figure out some way to document these!
|
||||||
fw(".. data:: %s\n\n" % attribute)
|
fw(".. data:: %s\n\n" % attribute)
|
||||||
write_indented_lines(" ", fw, "constant value %s" % repr(value), False)
|
write_indented_lines(" ", fw, "Constant value %s" % repr(value), False)
|
||||||
fw("\n")
|
fw("\n")
|
||||||
else:
|
else:
|
||||||
BPY_LOGGER.debug("\tnot documenting %s.%s of %r type" % (module_name, attribute, value_type.__name__))
|
BPY_LOGGER.debug("\tnot documenting %s.%s of %r type" % (module_name, attribute, value_type.__name__))
|
||||||
@@ -1029,7 +1036,6 @@ def pymodule2sphinx(basepath, module_name, module, title, module_all_extra):
|
|||||||
context_type_map = {
|
context_type_map = {
|
||||||
# context_member: (RNA type, is_collection)
|
# context_member: (RNA type, is_collection)
|
||||||
"active_annotation_layer": ("GPencilLayer", False),
|
"active_annotation_layer": ("GPencilLayer", False),
|
||||||
"active_base": ("ObjectBase", False),
|
|
||||||
"active_bone": ("EditBone", False),
|
"active_bone": ("EditBone", False),
|
||||||
"active_gpencil_frame": ("GreasePencilLayer", True),
|
"active_gpencil_frame": ("GreasePencilLayer", True),
|
||||||
"active_gpencil_layer": ("GPencilLayer", True),
|
"active_gpencil_layer": ("GPencilLayer", True),
|
||||||
@@ -1041,6 +1047,7 @@ context_type_map = {
|
|||||||
"annotation_data": ("GreasePencil", False),
|
"annotation_data": ("GreasePencil", False),
|
||||||
"annotation_data_owner": ("ID", False),
|
"annotation_data_owner": ("ID", False),
|
||||||
"armature": ("Armature", False),
|
"armature": ("Armature", False),
|
||||||
|
"asset_library": ("AssetLibraryReference", False),
|
||||||
"bone": ("Bone", False),
|
"bone": ("Bone", False),
|
||||||
"brush": ("Brush", False),
|
"brush": ("Brush", False),
|
||||||
"camera": ("Camera", False),
|
"camera": ("Camera", False),
|
||||||
@@ -1107,6 +1114,7 @@ context_type_map = {
|
|||||||
"texture_slot": ("MaterialTextureSlot", False),
|
"texture_slot": ("MaterialTextureSlot", False),
|
||||||
"texture_user": ("ID", False),
|
"texture_user": ("ID", False),
|
||||||
"texture_user_property": ("Property", False),
|
"texture_user_property": ("Property", False),
|
||||||
|
"ui_list": ("UIList", False),
|
||||||
"vertex_paint_object": ("Object", False),
|
"vertex_paint_object": ("Object", False),
|
||||||
"view_layer": ("ViewLayer", False),
|
"view_layer": ("ViewLayer", False),
|
||||||
"visible_bones": ("EditBone", True),
|
"visible_bones": ("EditBone", True),
|
||||||
@@ -1240,7 +1248,7 @@ def pyrna_enum2sphinx(prop, use_empty_descriptions=False):
|
|||||||
"%s.\n" % (
|
"%s.\n" % (
|
||||||
identifier,
|
identifier,
|
||||||
# Account for multi-line enum descriptions, allowing this to be a block of text.
|
# Account for multi-line enum descriptions, allowing this to be a block of text.
|
||||||
indent(", ".join(escape_rst(val) for val in (name, description) if val) or "Undocumented", " "),
|
indent(" -- ".join(escape_rst(val) for val in (name, description) if val) or "Undocumented", " "),
|
||||||
)
|
)
|
||||||
for identifier, name, description in prop.enum_items
|
for identifier, name, description in prop.enum_items
|
||||||
])
|
])
|
||||||
@@ -1549,8 +1557,8 @@ def pyrna2sphinx(basepath):
|
|||||||
fw(".. hlist::\n")
|
fw(".. hlist::\n")
|
||||||
fw(" :columns: 2\n\n")
|
fw(" :columns: 2\n\n")
|
||||||
|
|
||||||
# context does its own thing
|
# Context does its own thing.
|
||||||
# "active_base": ("ObjectBase", False),
|
# "active_object": ("Object", False),
|
||||||
for ref_attr, (ref_type, ref_is_seq) in sorted(context_type_map.items()):
|
for ref_attr, (ref_type, ref_is_seq) in sorted(context_type_map.items()):
|
||||||
if ref_type == struct_id:
|
if ref_type == struct_id:
|
||||||
fw(" * :mod:`bpy.context.%s`\n" % ref_attr)
|
fw(" * :mod:`bpy.context.%s`\n" % ref_attr)
|
||||||
|
@@ -1,7 +1,8 @@
|
|||||||
/* T76453: Prevent Long enum lists */
|
/* T76453: Prevent Long enum lists */
|
||||||
.field-list li {
|
.field-list > dd p {
|
||||||
max-height: 245px;
|
max-height: 245px;
|
||||||
overflow-y: auto !important;
|
overflow-y: auto !important;
|
||||||
|
word-break: break-word;
|
||||||
}
|
}
|
||||||
|
|
||||||
/* Hide home icon in search area */
|
/* Hide home icon in search area */
|
||||||
@@ -11,3 +12,15 @@
|
|||||||
.wy-nav-content {
|
.wy-nav-content {
|
||||||
max-width: 1000px !important;
|
max-width: 1000px !important;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/* Fix long titles on mobile */
|
||||||
|
h1, h2, h3, h4, h5, h6 {word-break: break-all}
|
||||||
|
|
||||||
|
/* Temp fix for https://github.com/readthedocs/sphinx_rtd_theme/pull/1109 */
|
||||||
|
.hlist tr {
|
||||||
|
display: -ms-flexbox;
|
||||||
|
display: flex;
|
||||||
|
flex-flow: row wrap;
|
||||||
|
}
|
||||||
|
|
||||||
|
.hlist td {margin-right: auto}
|
||||||
|
@@ -265,6 +265,12 @@ protected:
|
|||||||
*/
|
*/
|
||||||
void setSpecs(Specs specs);
|
void setSpecs(Specs specs);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Sets the audio output specification of the device.
|
||||||
|
* \param specs The output specification.
|
||||||
|
*/
|
||||||
|
void setSpecs(DeviceSpecs specs);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Empty default constructor. To setup the device call the function create()
|
* Empty default constructor. To setup the device call the function create()
|
||||||
* and to uninitialize call destroy().
|
* and to uninitialize call destroy().
|
||||||
|
6
extern/audaspace/include/respec/Mixer.h
vendored
6
extern/audaspace/include/respec/Mixer.h
vendored
@@ -87,6 +87,12 @@ public:
|
|||||||
*/
|
*/
|
||||||
void setSpecs(Specs specs);
|
void setSpecs(Specs specs);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Sets the target specification for superposing.
|
||||||
|
* \param specs The target specification.
|
||||||
|
*/
|
||||||
|
void setSpecs(DeviceSpecs specs);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Mixes a buffer.
|
* Mixes a buffer.
|
||||||
* \param buffer The buffer to superpose.
|
* \param buffer The buffer to superpose.
|
||||||
|
@@ -75,6 +75,7 @@ void FFMPEGWriter::encode()
|
|||||||
m_frame->nb_samples = m_input_samples;
|
m_frame->nb_samples = m_input_samples;
|
||||||
m_frame->format = m_codecCtx->sample_fmt;
|
m_frame->format = m_codecCtx->sample_fmt;
|
||||||
m_frame->channel_layout = m_codecCtx->channel_layout;
|
m_frame->channel_layout = m_codecCtx->channel_layout;
|
||||||
|
m_frame->channels = m_specs.channels;
|
||||||
|
|
||||||
if(avcodec_fill_audio_frame(m_frame, m_specs.channels, m_codecCtx->sample_fmt, reinterpret_cast<data_t*>(data), m_input_buffer.getSize(), 0) < 0)
|
if(avcodec_fill_audio_frame(m_frame, m_specs.channels, m_codecCtx->sample_fmt, reinterpret_cast<data_t*>(data), m_input_buffer.getSize(), 0) < 0)
|
||||||
AUD_THROW(FileException, "File couldn't be written, filling the audio frame failed with ffmpeg.");
|
AUD_THROW(FileException, "File couldn't be written, filling the audio frame failed with ffmpeg.");
|
||||||
|
@@ -78,6 +78,7 @@ void PulseAudioDevice::runMixingThread()
|
|||||||
if(shouldStop())
|
if(shouldStop())
|
||||||
{
|
{
|
||||||
AUD_pa_stream_cork(m_stream, 1, nullptr, nullptr);
|
AUD_pa_stream_cork(m_stream, 1, nullptr, nullptr);
|
||||||
|
AUD_pa_stream_flush(m_stream, nullptr, nullptr);
|
||||||
doStop();
|
doStop();
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
@@ -86,7 +87,10 @@ void PulseAudioDevice::runMixingThread()
|
|||||||
if(AUD_pa_stream_is_corked(m_stream))
|
if(AUD_pa_stream_is_corked(m_stream))
|
||||||
AUD_pa_stream_cork(m_stream, 0, nullptr, nullptr);
|
AUD_pa_stream_cork(m_stream, 0, nullptr, nullptr);
|
||||||
|
|
||||||
AUD_pa_mainloop_iterate(m_mainloop, true, nullptr);
|
// similar to AUD_pa_mainloop_iterate(m_mainloop, false, nullptr); except with a longer timeout
|
||||||
|
AUD_pa_mainloop_prepare(m_mainloop, 1 << 14);
|
||||||
|
AUD_pa_mainloop_poll(m_mainloop);
|
||||||
|
AUD_pa_mainloop_dispatch(m_mainloop);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@@ -24,6 +24,7 @@ PULSEAUDIO_SYMBOL(pa_context_unref);
|
|||||||
PULSEAUDIO_SYMBOL(pa_stream_begin_write);
|
PULSEAUDIO_SYMBOL(pa_stream_begin_write);
|
||||||
PULSEAUDIO_SYMBOL(pa_stream_connect_playback);
|
PULSEAUDIO_SYMBOL(pa_stream_connect_playback);
|
||||||
PULSEAUDIO_SYMBOL(pa_stream_cork);
|
PULSEAUDIO_SYMBOL(pa_stream_cork);
|
||||||
|
PULSEAUDIO_SYMBOL(pa_stream_flush);
|
||||||
PULSEAUDIO_SYMBOL(pa_stream_is_corked);
|
PULSEAUDIO_SYMBOL(pa_stream_is_corked);
|
||||||
PULSEAUDIO_SYMBOL(pa_stream_new);
|
PULSEAUDIO_SYMBOL(pa_stream_new);
|
||||||
PULSEAUDIO_SYMBOL(pa_stream_set_buffer_attr);
|
PULSEAUDIO_SYMBOL(pa_stream_set_buffer_attr);
|
||||||
@@ -35,3 +36,6 @@ PULSEAUDIO_SYMBOL(pa_mainloop_free);
|
|||||||
PULSEAUDIO_SYMBOL(pa_mainloop_get_api);
|
PULSEAUDIO_SYMBOL(pa_mainloop_get_api);
|
||||||
PULSEAUDIO_SYMBOL(pa_mainloop_new);
|
PULSEAUDIO_SYMBOL(pa_mainloop_new);
|
||||||
PULSEAUDIO_SYMBOL(pa_mainloop_iterate);
|
PULSEAUDIO_SYMBOL(pa_mainloop_iterate);
|
||||||
|
PULSEAUDIO_SYMBOL(pa_mainloop_prepare);
|
||||||
|
PULSEAUDIO_SYMBOL(pa_mainloop_poll);
|
||||||
|
PULSEAUDIO_SYMBOL(pa_mainloop_dispatch);
|
||||||
|
199
extern/audaspace/plugins/wasapi/WASAPIDevice.cpp
vendored
199
extern/audaspace/plugins/wasapi/WASAPIDevice.cpp
vendored
@@ -31,65 +31,81 @@ template <class T> void SafeRelease(T **ppT)
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
void WASAPIDevice::runMixingThread()
|
HRESULT WASAPIDevice::setupRenderClient(IAudioRenderClient*& render_client, UINT32& buffer_size)
|
||||||
{
|
{
|
||||||
UINT32 buffer_size;
|
const IID IID_IAudioRenderClient = __uuidof(IAudioRenderClient);
|
||||||
|
|
||||||
UINT32 padding;
|
UINT32 padding;
|
||||||
UINT32 length;
|
UINT32 length;
|
||||||
data_t* buffer;
|
data_t* buffer;
|
||||||
|
|
||||||
IAudioRenderClient* render_client = nullptr;
|
HRESULT result;
|
||||||
|
|
||||||
{
|
if(FAILED(result = m_audio_client->GetBufferSize(&buffer_size)))
|
||||||
std::lock_guard<ILockable> lock(*this);
|
return result;
|
||||||
|
|
||||||
const IID IID_IAudioRenderClient = __uuidof(IAudioRenderClient);
|
if(FAILED(result = m_audio_client->GetService(IID_IAudioRenderClient, reinterpret_cast<void**>(&render_client))))
|
||||||
|
return result;
|
||||||
|
|
||||||
if(FAILED(m_audio_client->GetBufferSize(&buffer_size)))
|
if(FAILED(result = m_audio_client->GetCurrentPadding(&padding)))
|
||||||
goto init_error;
|
return result;
|
||||||
|
|
||||||
if(FAILED(m_audio_client->GetService(IID_IAudioRenderClient, reinterpret_cast<void**>(&render_client))))
|
length = buffer_size - padding;
|
||||||
goto init_error;
|
|
||||||
|
|
||||||
if(FAILED(m_audio_client->GetCurrentPadding(&padding)))
|
if(FAILED(result = render_client->GetBuffer(length, &buffer)))
|
||||||
goto init_error;
|
return result;
|
||||||
|
|
||||||
length = buffer_size - padding;
|
mix((data_t*)buffer, length);
|
||||||
|
|
||||||
if(FAILED(render_client->GetBuffer(length, &buffer)))
|
if(FAILED(result = render_client->ReleaseBuffer(length, 0)))
|
||||||
goto init_error;
|
return result;
|
||||||
|
|
||||||
mix((data_t*)buffer, length);
|
|
||||||
|
|
||||||
if(FAILED(render_client->ReleaseBuffer(length, 0)))
|
|
||||||
{
|
|
||||||
init_error:
|
|
||||||
SafeRelease(&render_client);
|
|
||||||
doStop();
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
m_audio_client->Start();
|
m_audio_client->Start();
|
||||||
|
|
||||||
auto sleepDuration = std::chrono::milliseconds(buffer_size * 1000 / int(m_specs.rate) / 2);
|
return result;
|
||||||
|
}
|
||||||
|
|
||||||
|
void WASAPIDevice::runMixingThread()
|
||||||
|
{
|
||||||
|
UINT32 buffer_size;
|
||||||
|
|
||||||
|
IAudioRenderClient* render_client = nullptr;
|
||||||
|
|
||||||
|
std::chrono::milliseconds sleep_duration;
|
||||||
|
|
||||||
|
bool run_init = true;
|
||||||
|
|
||||||
for(;;)
|
for(;;)
|
||||||
{
|
{
|
||||||
|
HRESULT result = S_OK;
|
||||||
|
|
||||||
{
|
{
|
||||||
|
UINT32 padding;
|
||||||
|
UINT32 length;
|
||||||
|
data_t* buffer;
|
||||||
std::lock_guard<ILockable> lock(*this);
|
std::lock_guard<ILockable> lock(*this);
|
||||||
|
|
||||||
if(FAILED(m_audio_client->GetCurrentPadding(&padding)))
|
if(run_init)
|
||||||
|
{
|
||||||
|
result = setupRenderClient(render_client, buffer_size);
|
||||||
|
|
||||||
|
if(FAILED(result))
|
||||||
|
goto stop_thread;
|
||||||
|
|
||||||
|
sleep_duration = std::chrono::milliseconds(buffer_size * 1000 / int(m_specs.rate) / 2);
|
||||||
|
}
|
||||||
|
|
||||||
|
if(FAILED(result = m_audio_client->GetCurrentPadding(&padding)))
|
||||||
goto stop_thread;
|
goto stop_thread;
|
||||||
|
|
||||||
length = buffer_size - padding;
|
length = buffer_size - padding;
|
||||||
|
|
||||||
if(FAILED(render_client->GetBuffer(length, &buffer)))
|
if(FAILED(result = render_client->GetBuffer(length, &buffer)))
|
||||||
goto stop_thread;
|
goto stop_thread;
|
||||||
|
|
||||||
mix((data_t*)buffer, length);
|
mix((data_t*)buffer, length);
|
||||||
|
|
||||||
if(FAILED(render_client->ReleaseBuffer(length, 0)))
|
if(FAILED(result = render_client->ReleaseBuffer(length, 0)))
|
||||||
goto stop_thread;
|
goto stop_thread;
|
||||||
|
|
||||||
// stop thread
|
// stop thread
|
||||||
@@ -98,53 +114,51 @@ void WASAPIDevice::runMixingThread()
|
|||||||
stop_thread:
|
stop_thread:
|
||||||
m_audio_client->Stop();
|
m_audio_client->Stop();
|
||||||
SafeRelease(&render_client);
|
SafeRelease(&render_client);
|
||||||
doStop();
|
|
||||||
return;
|
if(result == AUDCLNT_E_DEVICE_INVALIDATED)
|
||||||
|
{
|
||||||
|
DeviceSpecs specs = m_specs;
|
||||||
|
if(!setupDevice(specs))
|
||||||
|
result = S_FALSE;
|
||||||
|
else
|
||||||
|
{
|
||||||
|
setSpecs(specs);
|
||||||
|
|
||||||
|
run_init = true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if(result != AUDCLNT_E_DEVICE_INVALIDATED)
|
||||||
|
{
|
||||||
|
doStop();
|
||||||
|
return;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
std::this_thread::sleep_for(sleepDuration);
|
std::this_thread::sleep_for(sleep_duration);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
WASAPIDevice::WASAPIDevice(DeviceSpecs specs, int buffersize) :
|
bool WASAPIDevice::setupDevice(DeviceSpecs &specs)
|
||||||
m_imm_device_enumerator(nullptr),
|
|
||||||
m_imm_device(nullptr),
|
|
||||||
m_audio_client(nullptr),
|
|
||||||
|
|
||||||
m_wave_format_extensible({})
|
|
||||||
{
|
{
|
||||||
// initialize COM if it hasn't happened yet
|
SafeRelease(&m_audio_client);
|
||||||
CoInitializeEx(nullptr, COINIT_MULTITHREADED);
|
SafeRelease(&m_imm_device);
|
||||||
|
|
||||||
const CLSID CLSID_MMDeviceEnumerator = __uuidof(MMDeviceEnumerator);
|
|
||||||
const IID IID_IMMDeviceEnumerator = __uuidof(IMMDeviceEnumerator);
|
|
||||||
const IID IID_IAudioClient = __uuidof(IAudioClient);
|
const IID IID_IAudioClient = __uuidof(IAudioClient);
|
||||||
|
|
||||||
|
if(FAILED(m_imm_device_enumerator->GetDefaultAudioEndpoint(eRender, eMultimedia, &m_imm_device)))
|
||||||
|
return false;
|
||||||
|
|
||||||
|
if(FAILED(m_imm_device->Activate(IID_IAudioClient, CLSCTX_ALL, nullptr, reinterpret_cast<void**>(&m_audio_client))))
|
||||||
|
return false;
|
||||||
|
|
||||||
WAVEFORMATEXTENSIBLE wave_format_extensible_closest_match;
|
WAVEFORMATEXTENSIBLE wave_format_extensible_closest_match;
|
||||||
WAVEFORMATEXTENSIBLE* closest_match_pointer = &wave_format_extensible_closest_match;
|
WAVEFORMATEXTENSIBLE* closest_match_pointer = &wave_format_extensible_closest_match;
|
||||||
|
|
||||||
HRESULT result;
|
|
||||||
|
|
||||||
REFERENCE_TIME minimum_time = 0;
|
REFERENCE_TIME minimum_time = 0;
|
||||||
REFERENCE_TIME buffer_duration;
|
REFERENCE_TIME buffer_duration;
|
||||||
|
|
||||||
if(FAILED(CoCreateInstance(CLSID_MMDeviceEnumerator, nullptr, CLSCTX_ALL, IID_IMMDeviceEnumerator, reinterpret_cast<void**>(&m_imm_device_enumerator))))
|
|
||||||
goto error;
|
|
||||||
|
|
||||||
if(FAILED(m_imm_device_enumerator->GetDefaultAudioEndpoint(eRender, eMultimedia, &m_imm_device)))
|
|
||||||
goto error;
|
|
||||||
|
|
||||||
if(FAILED(m_imm_device->Activate(IID_IAudioClient, CLSCTX_ALL, nullptr, reinterpret_cast<void**>(&m_audio_client))))
|
|
||||||
goto error;
|
|
||||||
|
|
||||||
if(specs.channels == CHANNELS_INVALID)
|
|
||||||
specs.channels = CHANNELS_STEREO;
|
|
||||||
if(specs.format == FORMAT_INVALID)
|
|
||||||
specs.format = FORMAT_FLOAT32;
|
|
||||||
if(specs.rate == RATE_INVALID)
|
|
||||||
specs.rate = RATE_48000;
|
|
||||||
|
|
||||||
switch(specs.format)
|
switch(specs.format)
|
||||||
{
|
{
|
||||||
case FORMAT_U8:
|
case FORMAT_U8:
|
||||||
@@ -203,12 +217,14 @@ WASAPIDevice::WASAPIDevice(DeviceSpecs specs, int buffersize) :
|
|||||||
m_wave_format_extensible.Format.cbSize = 22;
|
m_wave_format_extensible.Format.cbSize = 22;
|
||||||
m_wave_format_extensible.Samples.wValidBitsPerSample = m_wave_format_extensible.Format.wBitsPerSample;
|
m_wave_format_extensible.Samples.wValidBitsPerSample = m_wave_format_extensible.Format.wBitsPerSample;
|
||||||
|
|
||||||
result = m_audio_client->IsFormatSupported(AUDCLNT_SHAREMODE_SHARED, reinterpret_cast<const WAVEFORMATEX*>(&m_wave_format_extensible), reinterpret_cast<WAVEFORMATEX**>(&closest_match_pointer));
|
HRESULT result = m_audio_client->IsFormatSupported(AUDCLNT_SHAREMODE_SHARED, reinterpret_cast<const WAVEFORMATEX*>(&m_wave_format_extensible), reinterpret_cast<WAVEFORMATEX**>(&closest_match_pointer));
|
||||||
|
|
||||||
if(result == S_FALSE)
|
if(result == S_FALSE)
|
||||||
{
|
{
|
||||||
|
bool errored = false;
|
||||||
|
|
||||||
if(closest_match_pointer->Format.wFormatTag != WAVE_FORMAT_EXTENSIBLE)
|
if(closest_match_pointer->Format.wFormatTag != WAVE_FORMAT_EXTENSIBLE)
|
||||||
goto error;
|
goto closest_match_error;
|
||||||
|
|
||||||
specs.channels = Channels(closest_match_pointer->Format.nChannels);
|
specs.channels = Channels(closest_match_pointer->Format.nChannels);
|
||||||
specs.rate = closest_match_pointer->Format.nSamplesPerSec;
|
specs.rate = closest_match_pointer->Format.nSamplesPerSec;
|
||||||
@@ -220,7 +236,7 @@ WASAPIDevice::WASAPIDevice(DeviceSpecs specs, int buffersize) :
|
|||||||
else if(closest_match_pointer->Format.wBitsPerSample == 64)
|
else if(closest_match_pointer->Format.wBitsPerSample == 64)
|
||||||
specs.format = FORMAT_FLOAT64;
|
specs.format = FORMAT_FLOAT64;
|
||||||
else
|
else
|
||||||
goto error;
|
goto closest_match_error;
|
||||||
}
|
}
|
||||||
else if(closest_match_pointer->SubFormat == KSDATAFORMAT_SUBTYPE_PCM)
|
else if(closest_match_pointer->SubFormat == KSDATAFORMAT_SUBTYPE_PCM)
|
||||||
{
|
{
|
||||||
@@ -239,44 +255,81 @@ WASAPIDevice::WASAPIDevice(DeviceSpecs specs, int buffersize) :
|
|||||||
specs.format = FORMAT_S32;
|
specs.format = FORMAT_S32;
|
||||||
break;
|
break;
|
||||||
default:
|
default:
|
||||||
goto error;
|
goto closest_match_error;
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
else
|
else
|
||||||
goto error;
|
goto closest_match_error;
|
||||||
|
|
||||||
m_wave_format_extensible = *closest_match_pointer;
|
m_wave_format_extensible = *closest_match_pointer;
|
||||||
|
|
||||||
|
if(false)
|
||||||
|
{
|
||||||
|
closest_match_error:
|
||||||
|
errored = true;
|
||||||
|
}
|
||||||
|
|
||||||
if(closest_match_pointer != &wave_format_extensible_closest_match)
|
if(closest_match_pointer != &wave_format_extensible_closest_match)
|
||||||
{
|
{
|
||||||
CoTaskMemFree(closest_match_pointer);
|
CoTaskMemFree(closest_match_pointer);
|
||||||
closest_match_pointer = &wave_format_extensible_closest_match;
|
closest_match_pointer = &wave_format_extensible_closest_match;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if(errored)
|
||||||
|
return false;
|
||||||
}
|
}
|
||||||
else if(FAILED(result))
|
else if(FAILED(result))
|
||||||
goto error;
|
return false;
|
||||||
|
|
||||||
if(FAILED(m_audio_client->GetDevicePeriod(nullptr, &minimum_time)))
|
if(FAILED(m_audio_client->GetDevicePeriod(nullptr, &minimum_time)))
|
||||||
goto error;
|
return false;
|
||||||
|
|
||||||
buffer_duration = REFERENCE_TIME(buffersize) * REFERENCE_TIME(10000000) / REFERENCE_TIME(specs.rate);
|
buffer_duration = REFERENCE_TIME(m_buffersize) * REFERENCE_TIME(10000000) / REFERENCE_TIME(specs.rate);
|
||||||
|
|
||||||
if(minimum_time > buffer_duration)
|
if(minimum_time > buffer_duration)
|
||||||
buffer_duration = minimum_time;
|
buffer_duration = minimum_time;
|
||||||
|
|
||||||
m_specs = specs;
|
|
||||||
|
|
||||||
if(FAILED(m_audio_client->Initialize(AUDCLNT_SHAREMODE_SHARED, 0, buffer_duration, 0, reinterpret_cast<WAVEFORMATEX*>(&m_wave_format_extensible), nullptr)))
|
if(FAILED(m_audio_client->Initialize(AUDCLNT_SHAREMODE_SHARED, 0, buffer_duration, 0, reinterpret_cast<WAVEFORMATEX*>(&m_wave_format_extensible), nullptr)))
|
||||||
|
return false;
|
||||||
|
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
WASAPIDevice::WASAPIDevice(DeviceSpecs specs, int buffersize) :
|
||||||
|
m_buffersize(buffersize),
|
||||||
|
m_imm_device_enumerator(nullptr),
|
||||||
|
m_imm_device(nullptr),
|
||||||
|
m_audio_client(nullptr),
|
||||||
|
|
||||||
|
m_wave_format_extensible({})
|
||||||
|
{
|
||||||
|
// initialize COM if it hasn't happened yet
|
||||||
|
CoInitializeEx(nullptr, COINIT_MULTITHREADED);
|
||||||
|
|
||||||
|
const CLSID CLSID_MMDeviceEnumerator = __uuidof(MMDeviceEnumerator);
|
||||||
|
const IID IID_IMMDeviceEnumerator = __uuidof(IMMDeviceEnumerator);
|
||||||
|
|
||||||
|
if(specs.channels == CHANNELS_INVALID)
|
||||||
|
specs.channels = CHANNELS_STEREO;
|
||||||
|
if(specs.format == FORMAT_INVALID)
|
||||||
|
specs.format = FORMAT_FLOAT32;
|
||||||
|
if(specs.rate == RATE_INVALID)
|
||||||
|
specs.rate = RATE_48000;
|
||||||
|
|
||||||
|
if(FAILED(CoCreateInstance(CLSID_MMDeviceEnumerator, nullptr, CLSCTX_ALL, IID_IMMDeviceEnumerator, reinterpret_cast<void**>(&m_imm_device_enumerator))))
|
||||||
goto error;
|
goto error;
|
||||||
|
|
||||||
|
if(!setupDevice(specs))
|
||||||
|
goto error;
|
||||||
|
|
||||||
|
m_specs = specs;
|
||||||
|
|
||||||
create();
|
create();
|
||||||
|
|
||||||
return;
|
return;
|
||||||
|
|
||||||
error:
|
error:
|
||||||
if(closest_match_pointer != &wave_format_extensible_closest_match)
|
|
||||||
CoTaskMemFree(closest_match_pointer);
|
|
||||||
SafeRelease(&m_imm_device);
|
SafeRelease(&m_imm_device);
|
||||||
SafeRelease(&m_imm_device_enumerator);
|
SafeRelease(&m_imm_device_enumerator);
|
||||||
SafeRelease(&m_audio_client);
|
SafeRelease(&m_audio_client);
|
||||||
|
@@ -43,16 +43,21 @@ AUD_NAMESPACE_BEGIN
|
|||||||
class AUD_PLUGIN_API WASAPIDevice : public ThreadedDevice
|
class AUD_PLUGIN_API WASAPIDevice : public ThreadedDevice
|
||||||
{
|
{
|
||||||
private:
|
private:
|
||||||
|
int m_buffersize;
|
||||||
IMMDeviceEnumerator* m_imm_device_enumerator;
|
IMMDeviceEnumerator* m_imm_device_enumerator;
|
||||||
IMMDevice* m_imm_device;
|
IMMDevice* m_imm_device;
|
||||||
IAudioClient* m_audio_client;
|
IAudioClient* m_audio_client;
|
||||||
WAVEFORMATEXTENSIBLE m_wave_format_extensible;
|
WAVEFORMATEXTENSIBLE m_wave_format_extensible;
|
||||||
|
|
||||||
|
AUD_LOCAL HRESULT setupRenderClient(IAudioRenderClient*& render_client, UINT32& buffer_size);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Streaming thread main function.
|
* Streaming thread main function.
|
||||||
*/
|
*/
|
||||||
AUD_LOCAL void runMixingThread();
|
AUD_LOCAL void runMixingThread();
|
||||||
|
|
||||||
|
AUD_LOCAL bool setupDevice(DeviceSpecs& specs);
|
||||||
|
|
||||||
// delete copy constructor and operator=
|
// delete copy constructor and operator=
|
||||||
WASAPIDevice(const WASAPIDevice&) = delete;
|
WASAPIDevice(const WASAPIDevice&) = delete;
|
||||||
WASAPIDevice& operator=(const WASAPIDevice&) = delete;
|
WASAPIDevice& operator=(const WASAPIDevice&) = delete;
|
||||||
|
22
extern/audaspace/src/devices/SoftwareDevice.cpp
vendored
22
extern/audaspace/src/devices/SoftwareDevice.cpp
vendored
@@ -756,6 +756,7 @@ void SoftwareDevice::mix(data_t* buffer, int length)
|
|||||||
// get the buffer from the source
|
// get the buffer from the source
|
||||||
pos = 0;
|
pos = 0;
|
||||||
len = length;
|
len = length;
|
||||||
|
eos = false;
|
||||||
|
|
||||||
// update 3D Info
|
// update 3D Info
|
||||||
sound->update();
|
sound->update();
|
||||||
@@ -842,6 +843,27 @@ void SoftwareDevice::setSpecs(Specs specs)
|
|||||||
{
|
{
|
||||||
sound->setSpecs(specs);
|
sound->setSpecs(specs);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
for(auto& sound : m_pausedSounds)
|
||||||
|
{
|
||||||
|
sound->setSpecs(specs);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
void SoftwareDevice::setSpecs(DeviceSpecs specs)
|
||||||
|
{
|
||||||
|
m_specs = specs;
|
||||||
|
m_mixer->setSpecs(specs);
|
||||||
|
|
||||||
|
for(auto& sound : m_playingSounds)
|
||||||
|
{
|
||||||
|
sound->setSpecs(specs.specs);
|
||||||
|
}
|
||||||
|
|
||||||
|
for(auto& sound : m_pausedSounds)
|
||||||
|
{
|
||||||
|
sound->setSpecs(specs.specs);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
SoftwareDevice::SoftwareDevice()
|
SoftwareDevice::SoftwareDevice()
|
||||||
|
30
extern/audaspace/src/respec/Mixer.cpp
vendored
30
extern/audaspace/src/respec/Mixer.cpp
vendored
@@ -21,9 +21,25 @@
|
|||||||
|
|
||||||
AUD_NAMESPACE_BEGIN
|
AUD_NAMESPACE_BEGIN
|
||||||
|
|
||||||
Mixer::Mixer(DeviceSpecs specs) :
|
Mixer::Mixer(DeviceSpecs specs)
|
||||||
m_specs(specs)
|
|
||||||
{
|
{
|
||||||
|
setSpecs(specs);
|
||||||
|
}
|
||||||
|
|
||||||
|
DeviceSpecs Mixer::getSpecs() const
|
||||||
|
{
|
||||||
|
return m_specs;
|
||||||
|
}
|
||||||
|
|
||||||
|
void Mixer::setSpecs(Specs specs)
|
||||||
|
{
|
||||||
|
m_specs.specs = specs;
|
||||||
|
}
|
||||||
|
|
||||||
|
void Mixer::setSpecs(DeviceSpecs specs)
|
||||||
|
{
|
||||||
|
m_specs = specs;
|
||||||
|
|
||||||
switch(m_specs.format)
|
switch(m_specs.format)
|
||||||
{
|
{
|
||||||
case FORMAT_U8:
|
case FORMAT_U8:
|
||||||
@@ -54,16 +70,6 @@ Mixer::Mixer(DeviceSpecs specs) :
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
DeviceSpecs Mixer::getSpecs() const
|
|
||||||
{
|
|
||||||
return m_specs;
|
|
||||||
}
|
|
||||||
|
|
||||||
void Mixer::setSpecs(Specs specs)
|
|
||||||
{
|
|
||||||
m_specs.specs = specs;
|
|
||||||
}
|
|
||||||
|
|
||||||
void Mixer::clear(int length)
|
void Mixer::clear(int length)
|
||||||
{
|
{
|
||||||
m_buffer.assureSize(length * AUD_SAMPLE_SIZE(m_specs));
|
m_buffer.assureSize(length * AUD_SAMPLE_SIZE(m_specs));
|
||||||
|
1
extern/glog/README.blender
vendored
1
extern/glog/README.blender
vendored
@@ -7,3 +7,4 @@ Local modifications:
|
|||||||
checks for functions and so are needed.
|
checks for functions and so are needed.
|
||||||
* Added special definitions of HAVE_SNPRINTF and HAVE_LIB_GFLAGS
|
* Added special definitions of HAVE_SNPRINTF and HAVE_LIB_GFLAGS
|
||||||
in Windows' specific config.h.
|
in Windows' specific config.h.
|
||||||
|
* Silenced syscall deprecation warnings on macOS >= 10.12.
|
||||||
|
2
extern/glog/src/raw_logging.cc
vendored
2
extern/glog/src/raw_logging.cc
vendored
@@ -59,7 +59,7 @@
|
|||||||
# include <unistd.h>
|
# include <unistd.h>
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
#if defined(HAVE_SYSCALL_H) || defined(HAVE_SYS_SYSCALL_H)
|
#if (defined(HAVE_SYSCALL_H) || defined(HAVE_SYS_SYSCALL_H)) && (!(defined OS_MACOSX))
|
||||||
# define safe_write(fd, s, len) syscall(SYS_write, fd, s, len)
|
# define safe_write(fd, s, len) syscall(SYS_write, fd, s, len)
|
||||||
#else
|
#else
|
||||||
// Not so safe, but what can you do?
|
// Not so safe, but what can you do?
|
||||||
|
6
extern/glog/src/utilities.cc
vendored
6
extern/glog/src/utilities.cc
vendored
@@ -259,7 +259,13 @@ pid_t GetTID() {
|
|||||||
#endif
|
#endif
|
||||||
static bool lacks_gettid = false;
|
static bool lacks_gettid = false;
|
||||||
if (!lacks_gettid) {
|
if (!lacks_gettid) {
|
||||||
|
#ifdef OS_MACOSX
|
||||||
|
uint64_t tid64;
|
||||||
|
const int error = pthread_threadid_np(NULL, &tid64);
|
||||||
|
pid_t tid = error ? -1 : (pid_t)tid64;
|
||||||
|
#else
|
||||||
pid_t tid = syscall(__NR_gettid);
|
pid_t tid = syscall(__NR_gettid);
|
||||||
|
#endif
|
||||||
if (tid != -1) {
|
if (tid != -1) {
|
||||||
return tid;
|
return tid;
|
||||||
}
|
}
|
||||||
|
13
extern/mantaflow/preprocessed/fileio/iovdb.cpp
vendored
13
extern/mantaflow/preprocessed/fileio/iovdb.cpp
vendored
@@ -29,10 +29,10 @@
|
|||||||
|
|
||||||
#if OPENVDB == 1
|
#if OPENVDB == 1
|
||||||
# include "openvdb/openvdb.h"
|
# include "openvdb/openvdb.h"
|
||||||
# include <openvdb/points/PointConversion.h>
|
# include "openvdb/points/PointConversion.h"
|
||||||
# include <openvdb/points/PointCount.h>
|
# include "openvdb/points/PointCount.h"
|
||||||
# include <openvdb/tools/Clip.h>
|
# include "openvdb/tools/Clip.h"
|
||||||
# include <openvdb/tools/Dense.h>
|
# include "openvdb/tools/Dense.h"
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
#define POSITION_NAME "P"
|
#define POSITION_NAME "P"
|
||||||
@@ -519,7 +519,7 @@ int writeObjectsVDB(const string &filename,
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Write only if the is at least one grid, optionally write with compression.
|
// Write only if there is at least one grid, optionally write with compression.
|
||||||
if (gridsVDB.size()) {
|
if (gridsVDB.size()) {
|
||||||
int vdb_flags = openvdb::io::COMPRESS_ACTIVE_MASK;
|
int vdb_flags = openvdb::io::COMPRESS_ACTIVE_MASK;
|
||||||
switch (compression) {
|
switch (compression) {
|
||||||
@@ -534,7 +534,8 @@ int writeObjectsVDB(const string &filename,
|
|||||||
}
|
}
|
||||||
case COMPRESSION_BLOSC: {
|
case COMPRESSION_BLOSC: {
|
||||||
# if OPENVDB_BLOSC == 1
|
# if OPENVDB_BLOSC == 1
|
||||||
vdb_flags |= openvdb::io::COMPRESS_BLOSC;
|
// Cannot use |= here, causes segfault with blosc 1.5.0 (== recommended version)
|
||||||
|
vdb_flags = openvdb::io::COMPRESS_BLOSC;
|
||||||
# else
|
# else
|
||||||
debMsg("OpenVDB was built without Blosc support, using Zip compression instead", 1);
|
debMsg("OpenVDB was built without Blosc support, using Zip compression instead", 1);
|
||||||
vdb_flags |= openvdb::io::COMPRESS_ZIP;
|
vdb_flags |= openvdb::io::COMPRESS_ZIP;
|
||||||
|
1
extern/mantaflow/preprocessed/fluidsolver.h
vendored
1
extern/mantaflow/preprocessed/fluidsolver.h
vendored
@@ -384,6 +384,7 @@ class FluidSolver : public PbClass {
|
|||||||
GridStorage<Real> mGrids4dReal;
|
GridStorage<Real> mGrids4dReal;
|
||||||
GridStorage<Vec3> mGrids4dVec;
|
GridStorage<Vec3> mGrids4dVec;
|
||||||
GridStorage<Vec4> mGrids4dVec4;
|
GridStorage<Vec4> mGrids4dVec4;
|
||||||
|
|
||||||
public:
|
public:
|
||||||
PbArgs _args;
|
PbArgs _args;
|
||||||
}
|
}
|
||||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user