Compare commits
	
		
			1147 Commits
		
	
	
		
			last-py27
			...
			wip-commen
		
	
	| Author | SHA1 | Date | |
|---|---|---|---|
| d0e12401c0 | |||
| 411a6f75c5 | |||
| 07821c7f97 | |||
| 64b4ce3ba9 | |||
| 72417a9abb | |||
| 6ae9a5ddeb | |||
| a897e201ba | |||
| 3985a00c6f | |||
| 119291f817 | |||
| 801cda88bf | |||
| fc99713732 | |||
| 1d909faf49 | |||
| ed35c54361 | |||
| 411b15b1a0 | |||
| 9b85a938f3 | |||
| 989a40a7f7 | |||
| 64cc4dc9bf | |||
| 9182188647 | |||
| 5896f4cfdd | |||
| f9a407054d | |||
| 1c46e4c96b | |||
| 2990738b5d | |||
| e2432f6e9f | |||
| aa63389b4f | |||
| 5075cd5bd0 | |||
| ceef04455c | |||
| c8e62e3610 | |||
| ce7cf52d70 | |||
| dc2105fbb8 | |||
| 71185af880 | |||
| 041f8914b2 | |||
| b4ee5b59bd | |||
| 314ce40e71 | |||
| 7e941e2299 | |||
| 53811363ce | |||
| 51057e4d63 | |||
| a1a48c1941 | |||
| 19fdc75e60 | |||
| 879bcffc2b | |||
| 6ad12d0098 | |||
| a738cdcad8 | |||
| 199f37c5d7 | |||
| 4cf93f00f6 | |||
| eaf9235fa9 | |||
| 24ecf36896 | |||
| 86aa494aed | |||
| 5a5b97d362 | |||
| 831858a336 | |||
| e9d247fe97 | |||
| 1ddd8525c7 | |||
| c43941807c | |||
| bbad8eb5c5 | |||
| 04f00cdd4f | |||
| 66d9fd0908 | |||
| 516ef2ddc7 | |||
| 35fb07ee64 | |||
| f1d67894dc | |||
| aef2cf8c2d | |||
| d347ddac2c | |||
| 186ba167f1 | |||
| 847e97fe8c | |||
| 7ace5f4292 | |||
| 6cb85b06dc | |||
| 5c019e8d1c | |||
| 7796179021 | |||
| 26aca917c8 | |||
| e262a5c240 | |||
| e079ac4da1 | |||
| 83097cf473 | |||
| f4ade9cda7 | |||
| 31244a89e5 | |||
| 749c3dbd58 | |||
| b1d97e723f | |||
| 46bdd4f51c | |||
| 93720e226c | |||
| 9a0da126e6 | |||
| 45672565e9 | |||
| 3e1273d56c | |||
| fe86f76617 | |||
| 008d9b8880 | |||
| 13b606df45 | |||
| 57f5836829 | |||
| e40ba69872 | |||
| 0aeae2cabd | |||
| 601b94e23a | |||
| 00c4ec8741 | |||
| caee114d48 | |||
| 7fccf02e68 | |||
| 1c42e8fd07 | |||
| 77f855be3e | |||
| cede3e75db | |||
| 02a7014bf4 | |||
| 04e51a9d3f | |||
| d4fd6b5cda | |||
| 2935b442d8 | |||
| 567247f3fd | |||
| def52944bf | |||
| 8753a12dee | |||
| 77e3c476f0 | |||
| 842ddaeab0 | |||
| 85e5cb4f71 | |||
| 6648f8d074 | |||
| a5bc36b1cf | |||
| e56b3ec61f | |||
| 9624f6bd76 | |||
| 4e5a53a19b | |||
| fbc7c0fce7 | |||
| bb483e72aa | |||
| baf27fa560 | |||
| 845ba953cb | |||
| e5b7905a5c | |||
| 88c0ef0e7c | |||
| f8d992400e | |||
| 263d68071e | |||
| 0f7f7d5a66 | |||
| 6b29c70212 | |||
| 07670dce96 | |||
| fe288b1cc2 | |||
| 2e9555e160 | |||
| b0311af6b5 | |||
| 35a22cab4b | |||
| 0055633732 | |||
| 78b186c8e4 | |||
| 232321cc2c | |||
| a6d662b690 | |||
| 32c7ffbc99 | |||
| cfcc629b61 | |||
| 8ea0310956 | |||
| c1958d2da7 | |||
| 030c5494a8 | |||
| 462f31406a | |||
| 1a1f67cf00 | |||
| 8d5bdf04aa | |||
| 9a9d15ce47 | |||
| c795015a3c | |||
| afda0062f5 | |||
| a97c8ffc93 | |||
| c5fa6b9535 | |||
| 2be41a7145 | |||
| e8fb77c39b | |||
| 40933d51cf | |||
| 9a9ca1bf8b | |||
| 0983474e76 | |||
| 6bcce87bb9 | |||
| 1401a6168f | |||
| 85eab0c6cb | |||
| a753637e70 | |||
| f87c7a25df | |||
| 3ae16d7750 | |||
| c546dd2881 | |||
| 48df0583ab | |||
| 094d15116e | |||
| 534d06ca8f | |||
| df078b395d | |||
| 5df92ca4cf | |||
| ecace8c55b | |||
| bcacdfb7ea | |||
| d7fd90ded1 | |||
| b9268337c3 | |||
| 9b62daec74 | |||
| 5cc5698477 | |||
| 00ba98d279 | |||
| e818c92d4e | |||
| 612862c048 | |||
| 6b3f025e16 | |||
| 8a90cd00e9 | |||
| 17a69b973e | |||
| 8380270128 | |||
| 35225a189d | |||
| be98a95fc0 | |||
| 95c1f913c6 | |||
| 9bcd6cec89 | |||
| 4532c1ea39 | |||
| e19dd27099 | |||
| f54e56bad8 | |||
| eb851ce6e1 | |||
| 586d9c0d3b | |||
| ac23c7b00b | |||
| 811edc5a2a | |||
| cb95bf989a | |||
| e4fa32b8e4 | |||
| 08bf63c2ee | |||
| 0baf5b38c3 | |||
| 858a75af8d | |||
| 6b1a5e24e8 | |||
| 1500e20291 | |||
| d347534fea | |||
| 4546469d37 | |||
| b0d8da821f | |||
| 1821bb6b7d | |||
| 278eebd235 | |||
| 2777c37085 | |||
| 5e07cfb9b2 | |||
| bc16bb6e56 | |||
| 0fcafddbd1 | |||
| f29e01c78e | |||
| 2698be3e12 | |||
| 9c2ded79dd | |||
| b4acfb89fa | |||
| 3f8e0396cf | |||
| 05c488c484 | |||
| 33bd2c5880 | |||
| 76338b4568 | |||
| 7405e198eb | |||
| 2332bc0960 | |||
| ac3a599bb6 | |||
| 814275fc95 | |||
| 40c19a3cb0 | |||
| a67527d6af | |||
| 791906521f | |||
| 2ad5b20880 | |||
| f6fd9228e5 | |||
| e9f303f330 | |||
| 00a7406a1e | |||
| 82aa521b5f | |||
| f7220924bc | |||
| 46b0d6d663 | |||
| 595bb48741 | |||
| 1c430044b9 | |||
| 73bc084417 | |||
| 37ca803162 | |||
| 939bb97f13 | |||
| 2c40665271 | |||
| e8123b7839 | |||
| 6d6a40b8c0 | |||
| efd345ec46 | |||
| d655d2b749 | |||
| a58e616769 | |||
| a8a7166e78 | |||
| 1649591d75 | |||
| 9389fef8ba | |||
| 6737aa1123 | |||
| 40f79af49d | |||
| 84608500b9 | |||
| 819300f954 | |||
| b569829343 | |||
| c35fb6202b | |||
| d0ff519980 | |||
| 6ff4ee8fa1 | |||
| b5535a8773 | |||
| 2ded541955 | |||
| 3965061bde | |||
| 5238e2c26d | |||
| 469f24d113 | |||
| 8a0f582a80 | |||
| 559e212c55 | |||
| 61278730c6 | |||
| 0fdcbc3947 | |||
| 8dc3296bd5 | |||
| a699138fd6 | |||
| 466adabbb0 | |||
| 7da741f354 | |||
| 41369d134c | |||
| 61ed083218 | |||
| 46777f7f8c | |||
| ef94c68177 | |||
| aaf452e18b | |||
| c607eaf23d | |||
| baa77a7de5 | |||
| 5fb40eb32b | |||
| c83a1a21b8 | |||
| 549cf0a3e8 | |||
| 9f380751f5 | |||
| 49075cbc60 | |||
| 81848c2c44 | |||
| 9ee7b742ab | |||
| 58c33074c3 | |||
| 756427b34e | |||
| 7e06212cd5 | |||
| ef3912b647 | |||
| 151484dee3 | |||
| bec1f209ba | |||
| 0e14bdd09f | |||
| ce6df542cc | |||
| 530302b74f | |||
| 1bfb6cd2f6 | |||
| 53b6210531 | |||
| aeaa03ed80 | |||
| 3319a578b9 | |||
| 24d47f0848 | |||
| 505e3c3a6d | |||
| e5259bb56c | |||
| 8c0c22d801 | |||
| 
						 | 
					157eed8321 | ||
| 9ed526510f | |||
| ec2e4dee46 | |||
| c9789f46db | |||
| 289dc39e50 | |||
| 22b6673346 | |||
| 3e7722a567 | |||
| 1ba1da49c3 | |||
| a71de3a727 | |||
| 67e8e7c082 | |||
| cbb5d546ef | |||
| a86920fc73 | |||
| 14b31174dc | |||
| 1cb3a24e2f | |||
| a052e754f9 | |||
| 3b452d14ce | |||
| f4e0b9185b | |||
| 0841d52dd1 | |||
| f32630237a | |||
| 9ee816d366 | |||
| d10bdea6c5 | |||
| 5b061af3a5 | |||
| e69f991aa6 | |||
| fc9c518c2a | |||
| dcde2a4551 | |||
| fe7e078f8b | |||
| 8288455468 | |||
| 5eb464a1f3 | |||
| ab6b277293 | |||
| a4e415f1e3 | |||
| ebfd3d542c | |||
| 8f227076fd | |||
| a7cb3b9658 | |||
| 641f29ab30 | |||
| 17792df85e | |||
| bca8fac4cd | |||
| d3ff88e5cf | |||
| f22dc4d92a | |||
| 540dd28861 | |||
| 218c3f0dca | |||
| dfaac59e20 | |||
| 08ce84fe31 | |||
| d2a0a5ae26 | |||
| bf498b829c | |||
| 195edf679c | |||
| d24715a224 | |||
| dee0b18429 | |||
| de8bff51b5 | |||
| 318ccb2d95 | |||
| 12272750c3 | |||
| 0cf45c0d78 | |||
| e4f229cc70 | |||
| f8ccb8aaaa | |||
| fb2852acdc | |||
| e6edd00e46 | |||
| 479a435ec5 | |||
| d30a11c8f7 | |||
| 67a24e9d4e | |||
| 2bf0bf1064 | |||
| 678f72766e | |||
| 66e4229b9b | |||
| 99e0eb7a7a | |||
| 6a0e0721e9 | |||
| 97091457a8 | |||
| 6f69fe5b8a | |||
| 7292c534ed | |||
| df6297d40f | |||
| 257793dcd5 | |||
| 6e1d255dfc | |||
| f236845374 | |||
| 450dde56b7 | |||
| 854bc7cfaf | |||
| 0c7abdb99a | |||
| b10369a867 | |||
| 05187cacea | |||
| f79642d69f | |||
| 1f2fb774b4 | |||
| de801e41e3 | |||
| cd42ce6cba | |||
| eb18e5b933 | |||
| 350cf85cd2 | |||
| f2888069db | |||
| d0520484bb | |||
| d114b5631a | |||
| ce33ce994f | |||
| 05d5882c68 | |||
| 0c238284b0 | |||
| d85c45f10f | |||
| 06b2adf923 | |||
| 1ca2f336c4 | |||
| 284873ddd4 | |||
| d86c215c34 | |||
| 1b57b333df | |||
| 08a814525b | |||
| 27153bd74a | |||
| 20d80dee61 | |||
| ca7d528c85 | |||
| f8ff30fb4d | |||
| 7d1b08bf58 | |||
| 60abf6d4a9 | |||
| 7c384d1f45 | |||
| f18d5580c1 | |||
| 9177f77e69 | |||
| 4b5a961e14 | |||
| ed1e348d67 | |||
| 660b7a3811 | |||
| e5fb156224 | |||
| de1eab4596 | |||
| de1c227ccd | |||
| 230b2c669c | |||
| 2e2314c16b | |||
| 75e2402420 | |||
| fb121a9601 | |||
| f8c3408f18 | |||
| 89ca0516a9 | |||
| 5ae98507e3 | |||
| 66ac8c6587 | |||
| fd95135f66 | |||
| 987d6d03a6 | |||
| 9b3a836c83 | |||
| 741cdf6e12 | |||
| 0744aeb42f | |||
| ae7489d8e7 | |||
| 666da0adda | |||
| 889b5dc1c5 | |||
| b3a36f2833 | |||
| dd8d19178b | |||
| 840e8ba29b | |||
| 6a17949fdf | |||
| 0a0c47205f | |||
| fd3e795824 | |||
| 270bb21646 | |||
| d3f97358d9 | |||
| c44f0489bc | |||
| 9bd41ed5d7 | |||
| 0eca0c706f | |||
| 4da7a84c86 | |||
| 2b2910a1ac | |||
| 90c62664a6 | |||
| 18fe240b93 | |||
| bdff391440 | |||
| 46beaece75 | |||
| 15ce143356 | |||
| 7245dac1ca | |||
| 6748fd0006 | |||
| b2bd01117e | |||
| 31ca4f3d23 | |||
| 8326d8e7fe | |||
| 3e5ccaf8fd | |||
| 68b6e43649 | |||
| ca3d99c52c | |||
| 55ccd39960 | |||
| 61673ef273 | |||
| 82a2e9a523 | |||
| 36da289746 | |||
| 7f892601f4 | |||
| 1d08f6850b | |||
| 408db5e060 | |||
| 740e088cc5 | |||
| 4fdcd2a343 | |||
| 6e40b9a44a | |||
| c20aa41b5c | |||
| d96be99d1d | |||
| 6a9c27f8bf | |||
| 284d822a8a | |||
| 9d39995d0f | |||
| 36aad45b26 | |||
| 83a38ff50e | |||
| 67851752fa | |||
| 33c051bf28 | |||
| b6f7958dfe | |||
| acd7a40fe6 | |||
| 999c1a3fa6 | |||
| a574a75610 | |||
| af69b4fa58 | |||
| 01d8ad5ca2 | |||
| 57ce554feb | |||
| 72c01cc743 | |||
| 75a6de18b2 | |||
| fdab66a500 | |||
| dcd67b6114 | |||
| f6cf8d29f0 | |||
| 0b6969bf0c | |||
| b4a5cdec55 | |||
| 11b5be9d8e | |||
| 90883eddb9 | |||
| d240a979ba | |||
| 8f6966978f | |||
| 7f33826d1d | |||
| b09e4463bd | |||
| 1bfda7769a | |||
| bbdb731043 | |||
| 4381ed6671 | |||
| 2ca960a73f | |||
| 2433a1b981 | |||
| 2ed2aaf58f | |||
| 24d38fe52e | |||
| de8c6a8b63 | |||
| 96428d3c73 | |||
| 520f327f5a | |||
| f1b3409052 | |||
| 91660fefe4 | |||
| fdb9970792 | |||
| 1c6599fc30 | |||
| a938342611 | |||
| 656a878c6a | |||
| ef2cc44ceb | |||
| c7ba775048 | |||
| 85d6f76000 | |||
| ebe524ded3 | |||
| f4625cfe06 | |||
| 99131374cd | |||
| 04684c5f65 | |||
| d726e15ed8 | |||
| d73146ff62 | |||
| 8f73dab36e | |||
| 46612a9f68 | |||
| 766e766f50 | |||
| f47a45f9a3 | |||
| 8fb22931f5 | |||
| 8f9d21cdd8 | |||
| 054eced7de | |||
| 8ca6b4cdb0 | |||
| 01f81ce4d5 | |||
| ef1609efc2 | |||
| b7bf29c06e | |||
| dab8fbae6d | |||
| c545053b85 | |||
| 05ad824dcb | |||
| 92fe39ddac | |||
| 10732f9a10 | |||
| 7c6425ff4d | |||
| e0604fc217 | |||
| a7693aa78d | |||
| 20ca3f8ee4 | |||
| 6d37046933 | |||
| ae8c6e92fc | |||
| 1d1e588d57 | |||
| 8206186426 | |||
| d38f7fda3e | |||
| 88939ba51d | |||
| c15fffa11f | |||
| b77527e9a2 | |||
| 3bdd5197f5 | |||
| 199c6b1f77 | |||
| 3ea2504e8c | |||
| 8eee0d57b6 | |||
| 8a400c5c0f | |||
| fccd3e306e | |||
| d467f000a7 | |||
| 533544117b | |||
| 3b21027d6f | |||
| b7773e69c7 | |||
| 821f11393c | |||
| ca25078b30 | |||
| 785145e1c1 | |||
| e1646adff6 | |||
| d20f3d5668 | |||
| dfc224d8a9 | |||
| 5c7f37a100 | |||
| fc25ca9c03 | |||
| 6c4dd8ae02 | |||
| 9fdcfff4fc | |||
| 2bcc26860f | |||
| 1e012f860b | |||
| 87fe1887e8 | |||
| c8221ea0e4 | |||
| 517b283893 | |||
| b6a93452cd | |||
| 1cba014948 | |||
| b0d6f724ef | |||
| 0b218eb656 | |||
| 8ba4cc5c0c | |||
| 2ad65b0485 | |||
| ce7754ffe4 | |||
| 9cd3d97c75 | |||
| 7252055e4a | |||
| c086cff36e | |||
| eeba87d333 | |||
| cb7a23bc69 | |||
| 1bda98228c | |||
| 8b789d408e | |||
| d355e58f2f | |||
| b03e8d5bd7 | |||
| 76bb68dcc8 | |||
| 41eb5e256f | |||
| d2a8f2a47f | |||
| 43fa8f1a45 | |||
| fcf19de786 | |||
| 2233d015f3 | |||
| 8b25024f6f | |||
| 235a88d613 | |||
| 49a6a6a758 | |||
| 5a9a2c4268 | |||
| 491c5e1b8c | |||
| 3466320d98 | |||
| a8849ec823 | |||
| 0cea8d622d | |||
| a2df79d614 | |||
| 15d2f1ce18 | |||
| fee242ad07 | |||
| cde86db44e | |||
| 7d5785da62 | |||
| ac9aa59924 | |||
| cb0272fe60 | |||
| 8ef89d0b53 | |||
| e01f915abf | |||
| c6a138ff43 | |||
| 22d65f1e9c | |||
| 4f282c1587 | |||
| d651791f22 | |||
| 0cf57a633c | |||
| 5d6e0af605 | |||
| 1fe88819f4 | |||
| 8187a8a0dd | |||
| b6af919fa9 | |||
| 390d687f61 | |||
| 33d3ff07db | |||
| d0f10779f9 | |||
| 8427f03df4 | |||
| d66bfe6166 | |||
| 04f7869e8e | |||
| 545165c97f | |||
| 021c9e03bb | |||
| 53aabc8c84 | |||
| 3202b3f0b5 | |||
| e41fd36952 | |||
| 3636db1793 | |||
| 5732b1a938 | |||
| 3585443508 | |||
| 47a1db07dc | |||
| 99ed8fff5d | |||
| a6ab7dda97 | |||
| 6564fa000d | |||
| bb33ddd9fb | |||
| 5fbe62105a | |||
| 771b93b169 | |||
| f13310d71b | |||
| 243442694c | |||
| a4addbfd22 | |||
| e983d0756d | |||
| 52cd30b947 | |||
| ed55a73d04 | |||
| 4f3fc91c0a | |||
| 5c3524706f | |||
| 2290e019e6 | |||
| 6fe6345b13 | |||
| 53fe047bca | |||
| d9c3705c47 | |||
| 88ffd64706 | |||
| a897282400 | |||
| cfbb05530a | |||
| 72f440f509 | |||
| 9c3667b51f | |||
| 6ffbd52a36 | |||
| 49feaa8281 | |||
| 72507d7bb2 | |||
| 3bcf4eaebd | |||
| d637e322f7 | |||
| 43cb7a5f65 | |||
| 7fd7fcc16c | |||
| e01197a5d5 | |||
| be4ce024f4 | |||
| 98527c72f4 | |||
| 053e681d00 | |||
| 6d8870ba25 | |||
| 01dab4188c | |||
| eca5f48d41 | |||
| 73b50556be | |||
| e724c9c2ad | |||
| f42334453c | |||
| 2603c4f44f | |||
| bd93625119 | |||
| 8fe6b472e4 | |||
| 68c7a88fed | |||
| f9e10976b8 | |||
| f17453ba10 | |||
| c57aefd48b | |||
| 
						 | 
					389413ab8a | ||
| a2ffb8a342 | |||
| 6f0a03ce60 | |||
| 
						 | 
					053848607d | ||
| 
						 | 
					94a0868ba5 | ||
| 
						 | 
					c6d8a4c46f | ||
| 58a34d6cdb | |||
| b0c7997128 | |||
| 1bf2a87726 | |||
| d3cd6a884e | |||
| 
						 | 
					9870979c54 | ||
| 
						 | 
					bb067f77c3 | ||
| 
						 | 
					da7be50c61 | ||
| 
						 | 
					e4c5743852 | ||
| e9233ff7c0 | |||
| 2d01cd8761 | |||
| 45a44d08eb | |||
| ddc52b969e | |||
| 8ad2ee8729 | |||
| d160999535 | |||
| e4fff8df81 | |||
| 923cbe62d1 | |||
| 0612bd1a21 | |||
| a1fd48752e | |||
| 0e48c18579 | |||
| db1e3239e8 | |||
| 34353f773b | |||
| ad0253a461 | |||
| 6a541e0662 | |||
| ec42d033b3 | |||
| 
						 | 
					8fd577c733 | ||
| 
						 | 
					4eeccb6107 | ||
| 
						 | 
					6688ae66fa | ||
| 
						 | 
					38e960eb3f | ||
| 
						 | 
					dbde681aff | ||
| 
						 | 
					76a5555ff4 | ||
| 7d740c74e3 | |||
| 58b8cea757 | |||
| dc75980eba | |||
| f311dbbe30 | |||
| 
						 | 
					b5f0c59511 | ||
| bc5a8fba61 | |||
| 1e6180be48 | |||
| ebe3118f79 | |||
| 714455a4eb | |||
| 0089a8a98e | |||
| 9fa4da284b | |||
| e3f1f101e2 | |||
| 386571157a | |||
| 5012d19180 | |||
| 54b5e8e3d4 | |||
| 
						 | 
					5e0e71aa1c | ||
| bd976e6c2e | |||
| 9cce441f6c | |||
| 6af0cd2be8 | |||
| 82a1e0796c | |||
| 8d0c0894cb | |||
| b98771f067 | |||
| b2cfe46438 | |||
| 53ac29cfd1 | |||
| 4e153413d9 | |||
| 
						 | 
					7d48c02fa3 | ||
| 5df68c4ead | |||
| 1563f4142d | |||
| 1177f516ba | |||
| 2d18057c6e | |||
| 970376ed56 | |||
| e2ea8d491b | |||
| 62954ac157 | |||
| 1c70d80b99 | |||
| dc50d6e941 | |||
| 8377dc63c0 | |||
| 54bb506e10 | |||
| d4facbf2e3 | |||
| ddc8fc0f5e | |||
| 82d2921424 | |||
| 5d137ac997 | |||
| b06e17acf0 | |||
| c272b5d4bb | |||
| eba28b4eb4 | |||
| 44f473221f | |||
| 3be47056a0 | |||
| be6746f7ab | |||
| 8cb506731f | |||
| e09649586f | |||
| 230c15d51c | |||
| c4a765e73b | |||
| 18eb84fa9d | |||
| 1a505bb0a2 | |||
| 86caa3a044 | |||
| 3263f0a551 | |||
| 8aa6bb61dd | |||
| f89bec5089 | |||
| 896784a351 | |||
| 6488f4677e | |||
| f650835c07 | |||
| 33feaa81ca | |||
| 16bf193b0e | |||
| ce59bc3335 | |||
| 46d834f5aa | |||
| 4be05c8b57 | |||
| 5ce02bbbfe | |||
| d01b498ad5 | |||
| b1d69b2304 | |||
| 9ac870e0a5 | |||
| a8511c9db5 | |||
| ab7d623d27 | |||
| 901fea3361 | |||
| 
						 | 
					3329788be8 | ||
| 
						 | 
					303a33c3bf | ||
| 
						 | 
					b769cc6c3c | ||
| 4e5ce71a52 | |||
| 7dbd0c7896 | |||
| 
						 | 
					d5a55f71a7 | ||
| ebcd280886 | |||
| 
						 | 
					216b9278af | ||
| eb467474f8 | |||
| e5b80297ba | |||
| 6b32c4ede8 | |||
| 2db0ee11db | |||
| 146bf97432 | |||
| fc5177d58b | |||
| 97564022c3 | |||
| 941fd8dd9c | |||
| b6b7aad046 | |||
| a8e912fcb1 | |||
| 903bb6cfe9 | |||
| 94efdcd9b5 | |||
| b8153b5c9a | |||
| 923b4bd9d6 | |||
| 4c25248e5f | |||
| 06dd24bcf9 | |||
| 941ec94313 | |||
| adcbebd7b6 | |||
| 4a68821dee | |||
| 4b1bb6283e | |||
| 3a3b3c3269 | |||
| fe64a0c70e | |||
| ea9af92bd4 | |||
| dd3cfe80ef | |||
| 314f0b8dbe | |||
| 6a4f571b05 | |||
| 30b3d6abaf | |||
| 01c8f6cdae | |||
| 8318d4b1f6 | |||
| d6dd0d69d0 | |||
| 2d3b54b80b | |||
| 89f24ac4e6 | |||
| 7890cd2622 | |||
| b6bd40f956 | |||
| c0b380f215 | |||
| 811236cff4 | |||
| 62542f0329 | |||
| 6825b8bf74 | |||
| bdd603fb17 | |||
| 86e76aaa5f | |||
| 88af86ae61 | |||
| a6f56a4811 | |||
| c7c867f1c7 | |||
| add2538655 | |||
| ff1b14d980 | |||
| a12838032f | |||
| 6edd0e2f8d | |||
| 398bbbc316 | |||
| 41a82c44c5 | |||
| cecf81a07d | |||
| 45275c3831 | |||
| 99866542a1 | |||
| 6b3e523036 | |||
| 6e9a539d61 | |||
| c9b2eb25b2 | |||
| 23b856b073 | |||
| e0520e265d | |||
| 9b9e0488d3 | |||
| c827dc4ed2 | |||
| d48a308cc6 | |||
| b9ae4396e5 | |||
| 95dc799692 | |||
| be12bd7d99 | |||
| 0445c3bd86 | |||
| 694e04cc50 | |||
| 598b59c0c6 | |||
| 1e1bd83baf | |||
| d41e2bbce4 | |||
| 5f607fa2cf | |||
| cd417bb9db | |||
| 65518f2208 | |||
| 30902bc9cd | |||
| 37b1a6e0c1 | |||
| f1edb901d1 | |||
| a5d11ec31b | |||
| 2bf95223b7 | |||
| 08294e2f14 | |||
| 1c9f425a40 | |||
| 4116357447 | |||
| e9cb235640 | |||
| 64eab850c5 | |||
| c6eebc4eae | |||
| 1bd6e07fe2 | |||
| 1ad13d048f | |||
| cfde720b1d | |||
| 5d17d892a4 | |||
| 40172bf8b5 | |||
| 72404d0fd9 | |||
| b53d485960 | |||
| cf51d1a280 | |||
| efc1890871 | |||
| 93d534fe94 | |||
| 87afbc52f6 | |||
| 15de24214a | |||
| 2b09711eb0 | |||
| 566f2a4835 | |||
| 575a7ed1a7 | |||
| 566a23d3b6 | |||
| 6473ad3de7 | |||
| 6285e81883 | |||
| 4c896ae6b7 | |||
| b3aee6c8bc | |||
| e18ed79c7b | |||
| 9aa73c5306 | |||
| e430b2b9a1 | |||
| 502e494083 | |||
| e752a5dc87 | |||
| 5ec76f8801 | |||
| 7f336cb47c | |||
| bd13d89817 | |||
| 8a8f654657 | |||
| b88594958d | |||
| 3d1757476a | |||
| c9af6fe44f | |||
| 5c21443f9b | |||
| 758df5bc26 | |||
| 4c273671e4 | |||
| f3e79bcfb5 | |||
| b04abef20f | |||
| 73d4a77881 | |||
| c974b388b6 | |||
| 66ebfc669a | |||
| e061d6c29d | |||
| 08cb2b8438 | |||
| a19ad751a8 | |||
| fc4ab9d6ba | |||
| 2482381999 | |||
| 6e6ea6082d | |||
| 50108201cf | |||
| 964526924d | |||
| 05f6fb6016 | |||
| 7ed053b6c1 | |||
| 8e02de32ab | |||
| 8d94901bab | |||
| 13b67702b4 | |||
| cdb148fc0d | |||
| 4fd193ba2b | |||
| efa2321ac3 | |||
| 94d12c6b66 | |||
| 73c5032a48 | |||
| 5955b71459 | |||
| b091044bc2 | |||
| 3a500f3ea3 | |||
| de96e8e189 | |||
| 10e14e8c24 | |||
| 6f7aa86e8b | |||
| 1b6fbb940b | |||
| df40560c5a | |||
| 3713f1eee2 | |||
| 07ca1ad16e | |||
| de5557a68f | |||
| 93087f7fa9 | |||
| 41bc4b6a6f | |||
| 36a5190349 | |||
| 293961097f | |||
| 740df09b9d | |||
| 263c274774 | |||
| a9c506c290 | |||
| 85f2c6093d | |||
| 91807ad022 | |||
| 5ce78a2fb3 | |||
| 155ddf243a | |||
| 31b71eb244 | |||
| 1ce4654673 | |||
| 72cbd2ce2b | |||
| 3d273d4284 | |||
| d920d9e9fe | |||
| c2bc52718a | |||
| c3ea8228a1 | |||
| 5047803e3c | |||
| 1c566c6259 | |||
| 2ad8c5186c | |||
| d6506b6402 | |||
| 5bde262ad7 | |||
| 27ad3459c1 | |||
| 6f16e20bf6 | |||
| 3e67db50f0 | |||
| c7e225e81b | |||
| 878bf22695 | |||
| d0c30cfeca | |||
| 5af54237b9 | |||
| e7d268bde6 | |||
| ed4ee5228a | |||
| f152521041 | |||
| 2b36b4c514 | |||
| 5f2153ae5a | |||
| 22301a0e9a | |||
| 96ffee49cf | |||
| 8c38861c2f | |||
| 80a0643747 | |||
| 92536aa7ac | |||
| 7ac9203753 | |||
| d67f65019e | |||
| a806f294b2 | |||
| bfbcdee926 | |||
| 34b9be4efa | |||
| 2c78697e80 | |||
| f953f1e51b | |||
| 207dacf816 | |||
| add1c8b9b3 | |||
| 85922f4493 | |||
| a7d3ba24b4 | |||
| 10c584daab | |||
| fe56b04f7d | |||
| 43d4a36a0d | |||
| 12a8a34bdc | |||
| 85b6ff2d7f | |||
| 4edbcd6a98 | |||
| 2ba52e2467 | |||
| b7bccfeee3 | |||
| 43a04880e0 | |||
| 0cf96e47e8 | |||
| 7fbe648d99 | |||
| 1ce13b71a3 | |||
| 4e268510f2 | |||
| 1f2dd34683 | |||
| c50f745744 | |||
| 5e721c61b9 | |||
| 8c1dbf984b | |||
| 38df6e873b | |||
| ef2d8d14a0 | |||
| 1f0a855510 | |||
| 50d62f17b8 | |||
| c12b646b09 | |||
| cbe182a298 | |||
| 59a95450e5 | |||
| e4f221ab13 | |||
| 4ad82a1eb3 | |||
| 47b81055fd | |||
| 19d9684a67 | |||
| 091c70e735 | |||
| abcb0c27a0 | |||
| 71403e6f28 | |||
| 9a10c86329 | |||
| fdb9154b85 | |||
| 2703617179 | |||
| 9f752e2584 | |||
| 82437724cc | |||
| 080d98f57c | |||
| ad9a981cda | |||
| 7c5aef033d | |||
| d2f548faf9 | |||
| 203c6418fd | |||
| 736686390f | |||
| c66a6e67c8 | |||
| a139e8c41a | |||
| ee7af393a0 | |||
| a6617cae68 | |||
| 319f815985 | |||
| c77a6b9d21 | |||
| c854ccbb4b | |||
| fdaf4af31a | |||
| 69d7c5c5ce | |||
| 095f1cda0c | |||
| c3eb97e24c | |||
| b1b91a7b29 | |||
| 870800e8d2 | |||
| 379d40837b | |||
| 10a40ddabd | |||
| 118de12712 | |||
| cfa31ab542 | |||
| 47ba5e18a3 | |||
| 1a54b723aa | |||
| d0557445cd | |||
| 1ad3e7910c | |||
| 49895805e3 | |||
| bd3f8d597a | |||
| c711a04e6c | |||
| 1cb7a92e40 | |||
| d8640df115 | |||
| 4c704c8cda | |||
| 6f9feea8a9 | |||
| dde5526022 | |||
| 34a6fb064c | |||
| cecc9bc7fb | |||
| 9ccf4474bc | |||
| 3622fad9c2 | |||
| c846ee9823 | |||
| fd541d2243 | |||
| fcaa4fb936 | |||
| ddfb69edb2 | |||
| fac56943ee | |||
| 8330488c0a | |||
| 6001f1936c | |||
| 46c019a758 | |||
| b2ed441bf7 | |||
| 373be03c80 | |||
| ce5e27814a | |||
| 24468159e7 | |||
| 7153c8adde | |||
| 41414319a5 | |||
| a3513aa45c | |||
| 1ed31d26e5 | |||
| b36dc63335 | |||
| c02c6d89b0 | |||
| 563bb2c244 | |||
| 6b526f6b53 | |||
| cce388049d | |||
| fdfdd38c38 | |||
| 56b631d4a2 | |||
| 6eadc09c10 | |||
| 1f3d699a0c | |||
| 0eb2f17624 | |||
| 47eba6786a | |||
| ec1db0a725 | |||
| 04a235ca12 | |||
| 5b59d6b6b1 | |||
| 0e6bdf9467 | |||
| 6d1f81b26a | |||
| a000176db9 | |||
| 9f49140230 | |||
| 8934eb8b8d | |||
| 26f8a3fec7 | |||
| 49500570d4 | |||
| b2b3e8acf1 | |||
| 67bce48de8 | |||
| 3d1c8625a3 | |||
| 41600f208e | |||
| ef6e76f153 | |||
| ae5009c9ef | |||
| dcdcd99393 | |||
| 2e41c074b5 | |||
| c2206e6b27 | |||
| c58d616bfc | |||
| fb25e3e03f | |||
| 16b2b119fd | |||
| 4e138d38a1 | |||
| bced6cae68 | |||
| e0c525389f | |||
| 663627358f | |||
| 10b3318419 | |||
| c2c19bd6f3 | |||
| 1266d4b5d9 | |||
| e6fb64621f | |||
| fde50f6525 | |||
| f19be0ae17 | |||
| b70bc07a75 | |||
| 508a28aeae | |||
| ba5923044a | |||
| c52bfd2236 | |||
| bfb5f4f44e | |||
| 1eb1cd7b64 | |||
| 9abdd1ee90 | |||
| db98c681a2 | |||
| a7cd515fdb | |||
| 7967b80ab3 | |||
| 4e36ea5aae | |||
| 47ca614ea3 | |||
| 662f1276d2 | |||
| a0a8257df0 | |||
| 3fe9472d27 | |||
| a9e40ccf10 | |||
| 6fb58a3f26 | |||
| 86b13557fb | |||
| 60c608d095 | |||
| 72b002491a | |||
| 82157af84b | |||
| b454b011b0 | |||
| b65dd49aa6 | |||
| 7c055b5f56 | |||
| 2d6bdd350f | |||
| eadb91abc9 | |||
| 3e7152bb93 | |||
| 647ae0f3d6 | |||
| e5b4ce0890 | |||
| 27df603299 | |||
| 4d6bf65a99 | |||
| c06533db5b | |||
| b3b9c68486 | |||
| aecab0561e | 
							
								
								
									
										4
									
								
								.gitignore
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										4
									
								
								.gitignore
									
									
									
									
										vendored
									
									
								
							@@ -12,6 +12,7 @@ config_local.py
 | 
			
		||||
 | 
			
		||||
/build
 | 
			
		||||
/.cache
 | 
			
		||||
/.pytest_cache/
 | 
			
		||||
/*.egg-info/
 | 
			
		||||
profile.stats
 | 
			
		||||
/dump/
 | 
			
		||||
@@ -22,8 +23,11 @@ profile.stats
 | 
			
		||||
*.css.map
 | 
			
		||||
*.js.map
 | 
			
		||||
 | 
			
		||||
/translations/*/LC_MESSAGES/*.mo
 | 
			
		||||
 | 
			
		||||
pillar/web/static/assets/css/*.css
 | 
			
		||||
pillar/web/static/assets/js/*.min.js
 | 
			
		||||
pillar/web/static/assets/js/vendor/video.min.js
 | 
			
		||||
pillar/web/static/storage/
 | 
			
		||||
pillar/web/static/uploads/
 | 
			
		||||
pillar/web/templates/
 | 
			
		||||
 
 | 
			
		||||
							
								
								
									
										84
									
								
								README.md
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										84
									
								
								README.md
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,84 @@
 | 
			
		||||
Pillar
 | 
			
		||||
======
 | 
			
		||||
 | 
			
		||||
This is the latest iteration on the Attract project. We are building a unified
 | 
			
		||||
framework called Pillar. Pillar will combine Blender Cloud and Attract. You
 | 
			
		||||
can see Pillar in action on the [Blender Cloud](https://cloud.bender.org).
 | 
			
		||||
 | 
			
		||||
## Custom fonts
 | 
			
		||||
 | 
			
		||||
The icons on the website are drawn using a custom font, stored in
 | 
			
		||||
[pillar/web/static/font](pillar/web/static/font).
 | 
			
		||||
This font is generated via [Fontello](http://fontello.com/) by uploading
 | 
			
		||||
[pillar/web/static/font/config.json](pillar/web/static/font/config.json).
 | 
			
		||||
 | 
			
		||||
Note that we only use the WOFF and WOFF2 formats, and discard the others
 | 
			
		||||
supplied by Fontello.
 | 
			
		||||
 | 
			
		||||
After replacing the font files & `config.json`, edit the Fontello-supplied
 | 
			
		||||
`font.css` to remove all font formats except `woff` and `woff2`. Then upload
 | 
			
		||||
it to [css2sass](http://css2sass.herokuapp.com/) to convert it to SASS, and
 | 
			
		||||
place it in [src/styles/font-pillar.sass](src/styles/font-pillar.sass).
 | 
			
		||||
 | 
			
		||||
Don't forget to Gulp!
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
## Installation
 | 
			
		||||
 | 
			
		||||
Make sure your /data directory exists and is writable by the current user.
 | 
			
		||||
Alternatively, provide a `pillar/config_local.py` that changes the relevant
 | 
			
		||||
settings.
 | 
			
		||||
 | 
			
		||||
```
 | 
			
		||||
git clone git@git.blender.org:pillar-python-sdk.git ../pillar-python-sdk
 | 
			
		||||
pip install -e ../pillar-python-sdk
 | 
			
		||||
pip install -U -r requirements.txt
 | 
			
		||||
pip install -e .
 | 
			
		||||
```
 | 
			
		||||
 | 
			
		||||
## HDRi viewer
 | 
			
		||||
 | 
			
		||||
The HDRi viewer uses [Google VRView](https://github.com/googlevr/vrview). To upgrade,
 | 
			
		||||
get those files:
 | 
			
		||||
 | 
			
		||||
* [three.min.js](https://raw.githubusercontent.com/googlevr/vrview/master/build/three.min.js)
 | 
			
		||||
* [embed.min.js](https://raw.githubusercontent.com/googlevr/vrview/master/build/embed.min.js)
 | 
			
		||||
* [loading.gif](https://raw.githubusercontent.com/googlevr/vrview/master/images/loading.gif)
 | 
			
		||||
 | 
			
		||||
and place them in `pillar/web/static/assets/vrview`. Replace `images/loading.gif` in `embed.min.js` with `static/pillar/assets/vrview/loading.gif`.
 | 
			
		||||
 | 
			
		||||
You may also want to compare their
 | 
			
		||||
[index.html](https://raw.githubusercontent.com/googlevr/vrview/master/index.html) to our
 | 
			
		||||
`src/templates/vrview.pug`.
 | 
			
		||||
 | 
			
		||||
When on a HDRi page with the viewer embedded, use this JavaScript code to find the current
 | 
			
		||||
yaw: `vrview_window.contentWindow.yaw()`. This can be passed as `default_yaw` parameter to
 | 
			
		||||
the iframe.
 | 
			
		||||
 | 
			
		||||
## Celery
 | 
			
		||||
 | 
			
		||||
Pillar requires [Celery](http://www.celeryproject.org/) for background task processing. This in
 | 
			
		||||
turn requires a backend and a broker, for which the default Pillar configuration uses Redis and
 | 
			
		||||
RabbitMQ.
 | 
			
		||||
 | 
			
		||||
You can run the Celery Worker using `manage.py celery worker`.
 | 
			
		||||
 | 
			
		||||
Find other Celery operations with the `manage.py celery` command.
 | 
			
		||||
 | 
			
		||||
## Elasticsearch
 | 
			
		||||
 | 
			
		||||
Pillar uses [Elasticsearch](https://www.elastic.co/products/elasticsearch) to power the search engine.
 | 
			
		||||
You will need to run the `manage.py elastic reset_index` command to initialize the indexing.
 | 
			
		||||
If you need to reindex your documents in elastic you run the `manage.py elastic reindex` command.  
 | 
			
		||||
 | 
			
		||||
## Translations
 | 
			
		||||
 | 
			
		||||
If the language you want to support doesn't exist, you need to run: `translations init es_AR`.
 | 
			
		||||
 | 
			
		||||
Every time a new string is marked for translation you need to update the entire catalog: `translations update`
 | 
			
		||||
 | 
			
		||||
And once more strings are translated, you need to compile the translations: `translations compile`
 | 
			
		||||
 | 
			
		||||
*To mark strings strings for translations in Python scripts you need to
 | 
			
		||||
wrap them with the `flask_babel.gettext` function.
 | 
			
		||||
For .pug templates wrap them with `_()`.*
 | 
			
		||||
@@ -1,3 +1,3 @@
 | 
			
		||||
#!/bin/bash -ex
 | 
			
		||||
 | 
			
		||||
mongodump -h localhost:27018 -d cloud --out dump/$(date +'%Y-%m-%d-%H%M') --excludeCollection tokens
 | 
			
		||||
mongodump -h localhost:27018 -d cloud --out dump/$(date +'%Y-%m-%d-%H%M') --excludeCollection tokens --excludeCollection flamenco_task_logs
 | 
			
		||||
 
 | 
			
		||||
@@ -1,8 +0,0 @@
 | 
			
		||||
#!/bin/bash
 | 
			
		||||
 | 
			
		||||
echo
 | 
			
		||||
echo "==========================================================================="
 | 
			
		||||
echo "Dummy deploy script for people with a 'git pp' alias to push to production."
 | 
			
		||||
echo "Run deploy script on your server project."
 | 
			
		||||
echo "When done, press [ENTER] to stop this script."
 | 
			
		||||
read dummy
 | 
			
		||||
							
								
								
									
										221
									
								
								gulpfile.js
									
									
									
									
									
								
							
							
						
						
									
										221
									
								
								gulpfile.js
									
									
									
									
									
								
							@@ -1,29 +1,50 @@
 | 
			
		||||
var argv         = require('minimist')(process.argv.slice(2));
 | 
			
		||||
var autoprefixer = require('gulp-autoprefixer');
 | 
			
		||||
var chmod        = require('gulp-chmod');
 | 
			
		||||
var concat       = require('gulp-concat');
 | 
			
		||||
var gulp         = require('gulp');
 | 
			
		||||
var gulpif       = require('gulp-if');
 | 
			
		||||
var jade         = require('gulp-jade');
 | 
			
		||||
var livereload   = require('gulp-livereload');
 | 
			
		||||
var plumber      = require('gulp-plumber');
 | 
			
		||||
var rename       = require('gulp-rename');
 | 
			
		||||
var sass         = require('gulp-sass');
 | 
			
		||||
var sourcemaps   = require('gulp-sourcemaps');
 | 
			
		||||
var uglify       = require('gulp-uglify');
 | 
			
		||||
var cache        = require('gulp-cached');
 | 
			
		||||
let argv         = require('minimist')(process.argv.slice(2));
 | 
			
		||||
let autoprefixer = require('gulp-autoprefixer');
 | 
			
		||||
let cache        = require('gulp-cached');
 | 
			
		||||
let chmod        = require('gulp-chmod');
 | 
			
		||||
let concat       = require('gulp-concat');
 | 
			
		||||
let git          = require('gulp-git');
 | 
			
		||||
let gulpif       = require('gulp-if');
 | 
			
		||||
let gulp         = require('gulp');
 | 
			
		||||
let livereload   = require('gulp-livereload');
 | 
			
		||||
let plumber      = require('gulp-plumber');
 | 
			
		||||
let pug          = require('gulp-pug');
 | 
			
		||||
let rename       = require('gulp-rename');
 | 
			
		||||
let sass         = require('gulp-sass');
 | 
			
		||||
let sourcemaps   = require('gulp-sourcemaps');
 | 
			
		||||
let uglify       = require('gulp-uglify-es').default;
 | 
			
		||||
let browserify   = require('browserify');
 | 
			
		||||
let babelify     = require('babelify');
 | 
			
		||||
let sourceStream = require('vinyl-source-stream');
 | 
			
		||||
let glob         = require('glob');
 | 
			
		||||
let es           = require('event-stream');
 | 
			
		||||
let path         = require('path');
 | 
			
		||||
let buffer = require('vinyl-buffer');
 | 
			
		||||
 | 
			
		||||
var enabled = {
 | 
			
		||||
let enabled = {
 | 
			
		||||
    uglify: argv.production,
 | 
			
		||||
    maps: argv.production,
 | 
			
		||||
    failCheck: argv.production,
 | 
			
		||||
    maps: !argv.production,
 | 
			
		||||
    failCheck: !argv.production,
 | 
			
		||||
    prettyPug: !argv.production,
 | 
			
		||||
    liveReload: !argv.production
 | 
			
		||||
    cachify: !argv.production,
 | 
			
		||||
    cleanup: argv.production,
 | 
			
		||||
    chmod: argv.production,
 | 
			
		||||
};
 | 
			
		||||
 | 
			
		||||
let destination = {
 | 
			
		||||
    css: 'pillar/web/static/assets/css',
 | 
			
		||||
    pug: 'pillar/web/templates',
 | 
			
		||||
    js: 'pillar/web/static/assets/js',
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
/* CSS */
 | 
			
		||||
gulp.task('styles', function() {
 | 
			
		||||
let source = {
 | 
			
		||||
    bootstrap: 'node_modules/bootstrap/',
 | 
			
		||||
    jquery: 'node_modules/jquery/',
 | 
			
		||||
    popper: 'node_modules/popper.js/'
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
/* Stylesheets */
 | 
			
		||||
gulp.task('styles', function(done) {
 | 
			
		||||
    gulp.src('src/styles/**/*.sass')
 | 
			
		||||
        .pipe(gulpif(enabled.failCheck, plumber()))
 | 
			
		||||
        .pipe(gulpif(enabled.maps, sourcemaps.init()))
 | 
			
		||||
@@ -32,80 +53,164 @@ gulp.task('styles', function() {
 | 
			
		||||
            ))
 | 
			
		||||
        .pipe(autoprefixer("last 3 versions"))
 | 
			
		||||
        .pipe(gulpif(enabled.maps, sourcemaps.write(".")))
 | 
			
		||||
        .pipe(gulp.dest('pillar/web/static/assets/css'))
 | 
			
		||||
        .pipe(gulpif(enabled.liveReload, livereload()));
 | 
			
		||||
        .pipe(gulp.dest(destination.css))
 | 
			
		||||
        .pipe(gulpif(argv.livereload, livereload()));
 | 
			
		||||
    done();
 | 
			
		||||
});
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
/* Templates - Jade */
 | 
			
		||||
gulp.task('templates', function() {
 | 
			
		||||
    gulp.src('src/templates/**/*.jade')
 | 
			
		||||
/* Templates */
 | 
			
		||||
gulp.task('templates', function(done) {
 | 
			
		||||
    gulp.src('src/templates/**/*.pug')
 | 
			
		||||
        .pipe(gulpif(enabled.failCheck, plumber()))
 | 
			
		||||
        .pipe(cache('templating'))
 | 
			
		||||
        .pipe(jade({
 | 
			
		||||
        .pipe(gulpif(enabled.cachify, cache('templating')))
 | 
			
		||||
        .pipe(pug({
 | 
			
		||||
            pretty: enabled.prettyPug
 | 
			
		||||
        }))
 | 
			
		||||
        .pipe(gulp.dest('pillar/web/templates/'))
 | 
			
		||||
        .pipe(gulpif(enabled.liveReload, livereload()));
 | 
			
		||||
        .pipe(gulp.dest(destination.pug))
 | 
			
		||||
        .pipe(gulpif(argv.livereload, livereload()));
 | 
			
		||||
    done();
 | 
			
		||||
});
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
/* Individual Uglified Scripts */
 | 
			
		||||
gulp.task('scripts', function() {
 | 
			
		||||
gulp.task('scripts', function(done) {
 | 
			
		||||
    gulp.src('src/scripts/*.js')
 | 
			
		||||
        .pipe(gulpif(enabled.failCheck, plumber()))
 | 
			
		||||
        .pipe(cache('scripting'))
 | 
			
		||||
        .pipe(gulpif(enabled.cachify, cache('scripting')))
 | 
			
		||||
        .pipe(gulpif(enabled.maps, sourcemaps.init()))
 | 
			
		||||
        .pipe(gulpif(enabled.uglify, uglify()))
 | 
			
		||||
        .pipe(rename({suffix: '.min'}))
 | 
			
		||||
        .pipe(gulpif(enabled.maps, sourcemaps.write(".")))
 | 
			
		||||
        .pipe(chmod(644))
 | 
			
		||||
        .pipe(gulp.dest('pillar/web/static/assets/js/'))
 | 
			
		||||
        .pipe(gulpif(enabled.liveReload, livereload()));
 | 
			
		||||
        .pipe(gulpif(enabled.chmod, chmod(0o644)))
 | 
			
		||||
        .pipe(gulp.dest(destination.js))
 | 
			
		||||
        .pipe(gulpif(argv.livereload, livereload()));
 | 
			
		||||
    done();
 | 
			
		||||
});
 | 
			
		||||
 | 
			
		||||
function browserify_base(entry) {
 | 
			
		||||
    let pathSplited = path.dirname(entry).split(path.sep);
 | 
			
		||||
    let moduleName = pathSplited[pathSplited.length - 1];
 | 
			
		||||
    return browserify({
 | 
			
		||||
        entries: [entry],
 | 
			
		||||
        standalone: 'pillar.' + moduleName,
 | 
			
		||||
    })
 | 
			
		||||
    .transform(babelify, { "presets": ["@babel/preset-env"] })
 | 
			
		||||
    .bundle()
 | 
			
		||||
    .pipe(gulpif(enabled.failCheck, plumber()))
 | 
			
		||||
    .pipe(sourceStream(path.basename(entry)))
 | 
			
		||||
    .pipe(buffer())
 | 
			
		||||
    .pipe(rename({
 | 
			
		||||
        basename: moduleName,
 | 
			
		||||
        extname: '.min.js'
 | 
			
		||||
    }));
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
function browserify_common() {
 | 
			
		||||
    return glob.sync('src/scripts/js/es6/common/**/init.js').map(browserify_base);
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
gulp.task('scripts_browserify', function(done) {
 | 
			
		||||
    glob('src/scripts/js/es6/individual/**/init.js', function(err, files) {
 | 
			
		||||
        if(err) done(err);
 | 
			
		||||
 | 
			
		||||
        var tasks = files.map(function(entry) {
 | 
			
		||||
            return browserify_base(entry)
 | 
			
		||||
            .pipe(gulpif(enabled.maps, sourcemaps.init()))
 | 
			
		||||
            .pipe(gulpif(enabled.uglify, uglify()))
 | 
			
		||||
            .pipe(gulpif(enabled.maps, sourcemaps.write(".")))
 | 
			
		||||
            .pipe(gulp.dest(destination.js));
 | 
			
		||||
        });
 | 
			
		||||
 | 
			
		||||
        es.merge(tasks).on('end', done);
 | 
			
		||||
    })
 | 
			
		||||
});
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
/* Collection of scripts in src/scripts/tutti/ to merge into tutti.min.js */
 | 
			
		||||
/* Since it's always loaded, it's only for functions that we want site-wide */
 | 
			
		||||
gulp.task('scripts_concat_tutti', function() {
 | 
			
		||||
    gulp.src('src/scripts/tutti/**/*.js')
 | 
			
		||||
/* Collection of scripts in src/scripts/tutti/ to merge into tutti.min.js
 | 
			
		||||
 * Since it's always loaded, it's only for functions that we want site-wide.
 | 
			
		||||
 * It also includes jQuery and Bootstrap (and its dependency popper), since
 | 
			
		||||
 * the site doesn't work without it anyway.*/
 | 
			
		||||
gulp.task('scripts_concat_tutti', function(done) {
 | 
			
		||||
 | 
			
		||||
    let toUglify = [
 | 
			
		||||
        source.jquery    + 'dist/jquery.min.js',
 | 
			
		||||
        source.popper    + 'dist/umd/popper.min.js',
 | 
			
		||||
        source.bootstrap + 'js/dist/index.js',
 | 
			
		||||
        source.bootstrap + 'js/dist/util.js',
 | 
			
		||||
        source.bootstrap + 'js/dist/alert.js',
 | 
			
		||||
        source.bootstrap + 'js/dist/collapse.js',
 | 
			
		||||
        source.bootstrap + 'js/dist/dropdown.js',
 | 
			
		||||
        source.bootstrap + 'js/dist/tooltip.js',
 | 
			
		||||
        'src/scripts/tutti/**/*.js'
 | 
			
		||||
    ];
 | 
			
		||||
 | 
			
		||||
    es.merge(gulp.src(toUglify), ...browserify_common())
 | 
			
		||||
        .pipe(gulpif(enabled.failCheck, plumber()))
 | 
			
		||||
        .pipe(gulpif(enabled.maps, sourcemaps.init()))
 | 
			
		||||
        .pipe(concat("tutti.min.js"))
 | 
			
		||||
        .pipe(gulpif(enabled.uglify, uglify()))
 | 
			
		||||
        .pipe(gulpif(enabled.maps, sourcemaps.write(".")))
 | 
			
		||||
        .pipe(chmod(644))
 | 
			
		||||
        .pipe(gulp.dest('pillar/web/static/assets/js/'))
 | 
			
		||||
        .pipe(gulpif(enabled.liveReload, livereload()));
 | 
			
		||||
        .pipe(gulpif(enabled.chmod, chmod(0o644)))
 | 
			
		||||
        .pipe(gulp.dest(destination.js))
 | 
			
		||||
        .pipe(gulpif(argv.livereload, livereload()));
 | 
			
		||||
    done();
 | 
			
		||||
});
 | 
			
		||||
 | 
			
		||||
gulp.task('scripts_concat_markdown', function() {
 | 
			
		||||
    gulp.src('src/scripts/markdown/**/*.js')
 | 
			
		||||
        .pipe(gulpif(enabled.failCheck, plumber()))
 | 
			
		||||
        .pipe(gulpif(enabled.maps, sourcemaps.init()))
 | 
			
		||||
        .pipe(concat("markdown.min.js"))
 | 
			
		||||
        .pipe(gulpif(enabled.uglify, uglify()))
 | 
			
		||||
        .pipe(gulpif(enabled.maps, sourcemaps.write(".")))
 | 
			
		||||
        .pipe(chmod(644))
 | 
			
		||||
        .pipe(gulp.dest('pillar/web/static/assets/js/'))
 | 
			
		||||
        .pipe(gulpif(enabled.liveReload, livereload()));
 | 
			
		||||
 | 
			
		||||
/* Simply move these vendor scripts from node_modules. */
 | 
			
		||||
gulp.task('scripts_move_vendor', function(done) {
 | 
			
		||||
 | 
			
		||||
    let toMove = [
 | 
			
		||||
    'node_modules/video.js/dist/video.min.js',
 | 
			
		||||
    ];
 | 
			
		||||
 | 
			
		||||
    gulp.src(toMove)
 | 
			
		||||
        .pipe(gulp.dest(destination.js + '/vendor/'));
 | 
			
		||||
    done();
 | 
			
		||||
});
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
// While developing, run 'gulp watch'
 | 
			
		||||
gulp.task('watch',function() {
 | 
			
		||||
gulp.task('watch',function(done) {
 | 
			
		||||
    // Only listen for live reloads if ran with --livereload
 | 
			
		||||
    if (argv.livereload){
 | 
			
		||||
        livereload.listen();
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    gulp.watch('src/styles/**/*.sass',['styles']);
 | 
			
		||||
    gulp.watch('src/templates/**/*.jade',['templates']);
 | 
			
		||||
    gulp.watch('src/scripts/*.js',['scripts']);
 | 
			
		||||
    gulp.watch('src/scripts/tutti/**/*.js',['scripts_concat_tutti']);
 | 
			
		||||
    gulp.watch('src/scripts/markdown/**/*.js',['scripts_concat_markdown']);
 | 
			
		||||
    gulp.watch('src/styles/**/*.sass',gulp.series('styles'));
 | 
			
		||||
    gulp.watch('src/templates/**/*.pug',gulp.series('templates'));
 | 
			
		||||
    gulp.watch('src/scripts/*.js',gulp.series('scripts'));
 | 
			
		||||
    gulp.watch('src/scripts/tutti/**/*.js',gulp.series('scripts_concat_tutti'));
 | 
			
		||||
    gulp.watch('src/scripts/js/**/*.js',gulp.series(['scripts_browserify', 'scripts_concat_tutti']));
 | 
			
		||||
    done();
 | 
			
		||||
});
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
// Erases all generated files in output directories.
 | 
			
		||||
gulp.task('cleanup', function(done) {
 | 
			
		||||
    let paths = [];
 | 
			
		||||
    for (attr in destination) {
 | 
			
		||||
        paths.push(destination[attr]);
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    git.clean({ args: '-f -X ' + paths.join(' ') }, function (err) {
 | 
			
		||||
        if(err) throw err;
 | 
			
		||||
    });
 | 
			
		||||
    done();
 | 
			
		||||
});
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
// Run 'gulp' to build everything at once
 | 
			
		||||
gulp.task('default', ['styles', 'templates', 'scripts', 'scripts_concat_tutti', 'scripts_concat_markdown']);
 | 
			
		||||
let tasks = [];
 | 
			
		||||
if (enabled.cleanup) tasks.push('cleanup');
 | 
			
		||||
// gulp.task('default', gulp.parallel('styles', 'templates', 'scripts', 'scripts_tutti'));
 | 
			
		||||
 | 
			
		||||
gulp.task('default', gulp.parallel(tasks.concat([
 | 
			
		||||
    'styles',
 | 
			
		||||
    'templates',
 | 
			
		||||
    'scripts',
 | 
			
		||||
    'scripts_concat_tutti',
 | 
			
		||||
    'scripts_move_vendor',
 | 
			
		||||
    'scripts_browserify',
 | 
			
		||||
])));
 | 
			
		||||
 
 | 
			
		||||
							
								
								
									
										180
									
								
								jest.config.js
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										180
									
								
								jest.config.js
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,180 @@
 | 
			
		||||
// For a detailed explanation regarding each configuration property, visit:
 | 
			
		||||
// https://jestjs.io/docs/en/configuration.html
 | 
			
		||||
 | 
			
		||||
module.exports = {
 | 
			
		||||
  // All imported modules in your tests should be mocked automatically
 | 
			
		||||
  // automock: false,
 | 
			
		||||
 | 
			
		||||
  // Stop running tests after the first failure
 | 
			
		||||
  // bail: false,
 | 
			
		||||
 | 
			
		||||
  // Respect "browser" field in package.json when resolving modules
 | 
			
		||||
  // browser: false,
 | 
			
		||||
 | 
			
		||||
  // The directory where Jest should store its cached dependency information
 | 
			
		||||
  // cacheDirectory: "/tmp/jest_rs",
 | 
			
		||||
 | 
			
		||||
  // Automatically clear mock calls and instances between every test
 | 
			
		||||
  clearMocks: true,
 | 
			
		||||
 | 
			
		||||
  // Indicates whether the coverage information should be collected while executing the test
 | 
			
		||||
  // collectCoverage: false,
 | 
			
		||||
 | 
			
		||||
  // An array of glob patterns indicating a set of files for which coverage information should be collected
 | 
			
		||||
  // collectCoverageFrom: null,
 | 
			
		||||
 | 
			
		||||
  // The directory where Jest should output its coverage files
 | 
			
		||||
  // coverageDirectory: null,
 | 
			
		||||
 | 
			
		||||
  // An array of regexp pattern strings used to skip coverage collection
 | 
			
		||||
  // coveragePathIgnorePatterns: [
 | 
			
		||||
  //   "/node_modules/"
 | 
			
		||||
  // ],
 | 
			
		||||
 | 
			
		||||
  // A list of reporter names that Jest uses when writing coverage reports
 | 
			
		||||
  // coverageReporters: [
 | 
			
		||||
  //   "json",
 | 
			
		||||
  //   "text",
 | 
			
		||||
  //   "lcov",
 | 
			
		||||
  //   "clover"
 | 
			
		||||
  // ],
 | 
			
		||||
 | 
			
		||||
  // An object that configures minimum threshold enforcement for coverage results
 | 
			
		||||
  // coverageThreshold: null,
 | 
			
		||||
 | 
			
		||||
  // Make calling deprecated APIs throw helpful error messages
 | 
			
		||||
  // errorOnDeprecated: false,
 | 
			
		||||
 | 
			
		||||
  // Force coverage collection from ignored files usin a array of glob patterns
 | 
			
		||||
  // forceCoverageMatch: [],
 | 
			
		||||
 | 
			
		||||
  // A path to a module which exports an async function that is triggered once before all test suites
 | 
			
		||||
  // globalSetup: null,
 | 
			
		||||
 | 
			
		||||
  // A path to a module which exports an async function that is triggered once after all test suites
 | 
			
		||||
  // globalTeardown: null,
 | 
			
		||||
 | 
			
		||||
  // A set of global variables that need to be available in all test environments
 | 
			
		||||
  // globals: {},
 | 
			
		||||
 | 
			
		||||
  // An array of directory names to be searched recursively up from the requiring module's location
 | 
			
		||||
  // moduleDirectories: [
 | 
			
		||||
  //   "node_modules"
 | 
			
		||||
  // ],
 | 
			
		||||
 | 
			
		||||
  // An array of file extensions your modules use
 | 
			
		||||
  // moduleFileExtensions: [
 | 
			
		||||
  //   "js",
 | 
			
		||||
  //   "json",
 | 
			
		||||
  //   "jsx",
 | 
			
		||||
  //   "node"
 | 
			
		||||
  // ],
 | 
			
		||||
 | 
			
		||||
  // A map from regular expressions to module names that allow to stub out resources with a single module
 | 
			
		||||
  // moduleNameMapper: {},
 | 
			
		||||
 | 
			
		||||
  // An array of regexp pattern strings, matched against all module paths before considered 'visible' to the module loader
 | 
			
		||||
  // modulePathIgnorePatterns: [],
 | 
			
		||||
 | 
			
		||||
  // Activates notifications for test results
 | 
			
		||||
  // notify: false,
 | 
			
		||||
 | 
			
		||||
  // An enum that specifies notification mode. Requires { notify: true }
 | 
			
		||||
  // notifyMode: "always",
 | 
			
		||||
 | 
			
		||||
  // A preset that is used as a base for Jest's configuration
 | 
			
		||||
  // preset: null,
 | 
			
		||||
 | 
			
		||||
  // Run tests from one or more projects
 | 
			
		||||
  // projects: null,
 | 
			
		||||
 | 
			
		||||
  // Use this configuration option to add custom reporters to Jest
 | 
			
		||||
  // reporters: undefined,
 | 
			
		||||
 | 
			
		||||
  // Automatically reset mock state between every test
 | 
			
		||||
  // resetMocks: false,
 | 
			
		||||
 | 
			
		||||
  // Reset the module registry before running each individual test
 | 
			
		||||
  // resetModules: false,
 | 
			
		||||
 | 
			
		||||
  // A path to a custom resolver
 | 
			
		||||
  // resolver: null,
 | 
			
		||||
 | 
			
		||||
  // Automatically restore mock state between every test
 | 
			
		||||
  // restoreMocks: false,
 | 
			
		||||
 | 
			
		||||
  // The root directory that Jest should scan for tests and modules within
 | 
			
		||||
  // rootDir: null,
 | 
			
		||||
 | 
			
		||||
  // A list of paths to directories that Jest should use to search for files in
 | 
			
		||||
  // roots: [
 | 
			
		||||
  //   "<rootDir>"
 | 
			
		||||
  // ],
 | 
			
		||||
 | 
			
		||||
  // Allows you to use a custom runner instead of Jest's default test runner
 | 
			
		||||
  // runner: "jest-runner",
 | 
			
		||||
 | 
			
		||||
  // The paths to modules that run some code to configure or set up the testing environment before each test
 | 
			
		||||
  setupFiles: ["<rootDir>/src/scripts/js/es6/test_config/test-env.js"],
 | 
			
		||||
 | 
			
		||||
  // The path to a module that runs some code to configure or set up the testing framework before each test
 | 
			
		||||
  // setupTestFrameworkScriptFile: null,
 | 
			
		||||
 | 
			
		||||
  // A list of paths to snapshot serializer modules Jest should use for snapshot testing
 | 
			
		||||
  // snapshotSerializers: [],
 | 
			
		||||
 | 
			
		||||
  // The test environment that will be used for testing
 | 
			
		||||
  testEnvironment: "jsdom",
 | 
			
		||||
 | 
			
		||||
  // Options that will be passed to the testEnvironment
 | 
			
		||||
  // testEnvironmentOptions: {},
 | 
			
		||||
 | 
			
		||||
  // Adds a location field to test results
 | 
			
		||||
  // testLocationInResults: false,
 | 
			
		||||
 | 
			
		||||
  // The glob patterns Jest uses to detect test files
 | 
			
		||||
  // testMatch: [
 | 
			
		||||
  //   "**/__tests__/**/*.js?(x)",
 | 
			
		||||
  //   "**/?(*.)+(spec|test).js?(x)"
 | 
			
		||||
  // ],
 | 
			
		||||
 | 
			
		||||
  // An array of regexp pattern strings that are matched against all test paths, matched tests are skipped
 | 
			
		||||
  // testPathIgnorePatterns: [
 | 
			
		||||
  //   "/node_modules/"
 | 
			
		||||
  // ],
 | 
			
		||||
 | 
			
		||||
  // The regexp pattern Jest uses to detect test files
 | 
			
		||||
  // testRegex: "",
 | 
			
		||||
 | 
			
		||||
  // This option allows the use of a custom results processor
 | 
			
		||||
  // testResultsProcessor: null,
 | 
			
		||||
 | 
			
		||||
  // This option allows use of a custom test runner
 | 
			
		||||
  // testRunner: "jasmine2",
 | 
			
		||||
 | 
			
		||||
  // This option sets the URL for the jsdom environment. It is reflected in properties such as location.href
 | 
			
		||||
  // testURL: "http://localhost",
 | 
			
		||||
 | 
			
		||||
  // Setting this value to "fake" allows the use of fake timers for functions such as "setTimeout"
 | 
			
		||||
  // timers: "real",
 | 
			
		||||
 | 
			
		||||
  // A map from regular expressions to paths to transformers
 | 
			
		||||
  // transform: null,
 | 
			
		||||
 | 
			
		||||
  // An array of regexp pattern strings that are matched against all source file paths, matched files will skip transformation
 | 
			
		||||
  // transformIgnorePatterns: [
 | 
			
		||||
  //   "/node_modules/"
 | 
			
		||||
  // ],
 | 
			
		||||
 | 
			
		||||
  // An array of regexp pattern strings that are matched against all modules before the module loader will automatically return a mock for them
 | 
			
		||||
  // unmockedModulePathPatterns: undefined,
 | 
			
		||||
 | 
			
		||||
  // Indicates whether each individual test should be reported during the run
 | 
			
		||||
  // verbose: null,
 | 
			
		||||
 | 
			
		||||
  // An array of regexp patterns that are matched against all source file paths before re-running tests in watch mode
 | 
			
		||||
  // watchPathIgnorePatterns: [],
 | 
			
		||||
 | 
			
		||||
  // Whether to use watchman for file crawling
 | 
			
		||||
  // watchman: true,
 | 
			
		||||
};
 | 
			
		||||
							
								
								
									
										12797
									
								
								package-lock.json
									
									
									
										generated
									
									
									
										Normal file
									
								
							
							
						
						
									
										12797
									
								
								package-lock.json
									
									
									
										generated
									
									
									
										Normal file
									
								
							
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							
							
								
								
									
										55
									
								
								package.json
									
									
									
									
									
								
							
							
						
						
									
										55
									
								
								package.json
									
									
									
									
									
								
							@@ -1,25 +1,46 @@
 | 
			
		||||
{
 | 
			
		||||
  "name": "pillar",
 | 
			
		||||
  "license": "GPL-2.0+",
 | 
			
		||||
  "author": "Blender Institute",
 | 
			
		||||
  "repository": {
 | 
			
		||||
    "type": "git",
 | 
			
		||||
    "url": "https://github.com/armadillica/pillar.git"
 | 
			
		||||
    "url": "git://git.blender.org/pillar.git"
 | 
			
		||||
  },
 | 
			
		||||
  "author": "Blender Institute",
 | 
			
		||||
  "license": "GPL",
 | 
			
		||||
  "devDependencies": {
 | 
			
		||||
    "gulp": "~3.9.1",
 | 
			
		||||
    "gulp-autoprefixer": "~2.3.1",
 | 
			
		||||
    "gulp-cached": "~1.1.0",
 | 
			
		||||
    "gulp-chmod": "~1.3.0",
 | 
			
		||||
    "gulp-concat": "~2.6.0",
 | 
			
		||||
    "gulp-if": "^2.0.1",
 | 
			
		||||
    "gulp-jade": "~1.1.0",
 | 
			
		||||
    "gulp-livereload": "~3.8.1",
 | 
			
		||||
    "gulp-plumber": "~1.1.0",
 | 
			
		||||
    "gulp-rename": "~1.2.2",
 | 
			
		||||
    "gulp-sass": "~2.3.1",
 | 
			
		||||
    "gulp-sourcemaps": "~1.6.0",
 | 
			
		||||
    "gulp-uglify": "~1.5.3",
 | 
			
		||||
    "minimist": "^1.2.0"
 | 
			
		||||
    "@babel/core": "7.1.6",
 | 
			
		||||
    "@babel/preset-env": "7.1.6",
 | 
			
		||||
    "acorn": "5.7.3",
 | 
			
		||||
    "babel-core": "7.0.0-bridge.0",
 | 
			
		||||
    "babelify": "10.0.0",
 | 
			
		||||
    "browserify": "16.2.3",
 | 
			
		||||
    "gulp": "4.0.0",
 | 
			
		||||
    "gulp-autoprefixer": "6.0.0",
 | 
			
		||||
    "gulp-babel": "8.0.0",
 | 
			
		||||
    "gulp-cached": "1.1.1",
 | 
			
		||||
    "gulp-chmod": "2.0.0",
 | 
			
		||||
    "gulp-concat": "2.6.1",
 | 
			
		||||
    "gulp-git": "2.8.0",
 | 
			
		||||
    "gulp-if": "2.0.2",
 | 
			
		||||
    "gulp-livereload": "4.0.0",
 | 
			
		||||
    "gulp-plumber": "1.2.0",
 | 
			
		||||
    "gulp-pug": "4.0.1",
 | 
			
		||||
    "gulp-rename": "1.4.0",
 | 
			
		||||
    "gulp-sass": "4.0.1",
 | 
			
		||||
    "gulp-sourcemaps": "2.6.4",
 | 
			
		||||
    "gulp-uglify-es": "1.0.4",
 | 
			
		||||
    "jest": "23.6.0",
 | 
			
		||||
    "minimist": "1.2.0",
 | 
			
		||||
    "vinyl-buffer": "1.0.1",
 | 
			
		||||
    "vinyl-source-stream": "2.0.0"
 | 
			
		||||
  },
 | 
			
		||||
  "dependencies": {
 | 
			
		||||
    "bootstrap": "4.1.3",
 | 
			
		||||
    "glob": "7.1.3",
 | 
			
		||||
    "jquery": "3.3.1",
 | 
			
		||||
    "popper.js": "1.14.4",
 | 
			
		||||
    "video.js": "7.2.2"
 | 
			
		||||
  },
 | 
			
		||||
  "scripts": {
 | 
			
		||||
    "test": "jest"
 | 
			
		||||
  }
 | 
			
		||||
}
 | 
			
		||||
 
 | 
			
		||||
@@ -1,20 +1,37 @@
 | 
			
		||||
"""Pillar server."""
 | 
			
		||||
 | 
			
		||||
import collections
 | 
			
		||||
import contextlib
 | 
			
		||||
import copy
 | 
			
		||||
import json
 | 
			
		||||
import logging
 | 
			
		||||
import logging.config
 | 
			
		||||
import subprocess
 | 
			
		||||
import tempfile
 | 
			
		||||
import typing
 | 
			
		||||
import os
 | 
			
		||||
import os.path
 | 
			
		||||
import pathlib
 | 
			
		||||
 | 
			
		||||
import jinja2
 | 
			
		||||
from eve import Eve
 | 
			
		||||
import flask
 | 
			
		||||
from flask import render_template, request
 | 
			
		||||
from flask import g, render_template, request
 | 
			
		||||
from flask_babel import Babel, gettext as _
 | 
			
		||||
from flask.templating import TemplateNotFound
 | 
			
		||||
import pymongo.database
 | 
			
		||||
from werkzeug.local import LocalProxy
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
# Declare pillar.current_app before importing other Pillar modules.
 | 
			
		||||
def _get_current_app():
 | 
			
		||||
    """Returns the current application."""
 | 
			
		||||
 | 
			
		||||
    return flask.current_app
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
current_app: 'PillarServer' = LocalProxy(_get_current_app)
 | 
			
		||||
"""the current app, annotated as PillarServer"""
 | 
			
		||||
 | 
			
		||||
from pillar.api import custom_field_validation
 | 
			
		||||
from pillar.api.utils import authentication
 | 
			
		||||
@@ -23,6 +40,8 @@ import pillar.web.jinja
 | 
			
		||||
from . import api
 | 
			
		||||
from . import web
 | 
			
		||||
from . import auth
 | 
			
		||||
from . import sentry_extra
 | 
			
		||||
import pillar.api.organizations
 | 
			
		||||
 | 
			
		||||
empty_settings = {
 | 
			
		||||
    # Use a random URL prefix when booting Eve, to ensure that any
 | 
			
		||||
@@ -33,15 +52,49 @@ empty_settings = {
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class PillarServer(Eve):
 | 
			
		||||
class ConfigurationMissingError(SystemExit):
 | 
			
		||||
    """Raised when a vital configuration key is missing.
 | 
			
		||||
 | 
			
		||||
    Causes Python to exit.
 | 
			
		||||
    """
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class BlinkerCompatibleEve(Eve):
 | 
			
		||||
    """Workaround for https://github.com/pyeve/eve/issues/1087"""
 | 
			
		||||
 | 
			
		||||
    def __getattr__(self, name):
 | 
			
		||||
        if name in {"im_self", "im_func"}:
 | 
			
		||||
            raise AttributeError("type object '%s' has no attribute '%s'" %
 | 
			
		||||
                                 (self.__class__.__name__, name))
 | 
			
		||||
        return super().__getattr__(name)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class PillarServer(BlinkerCompatibleEve):
 | 
			
		||||
    def __init__(self, app_root, **kwargs):
 | 
			
		||||
        from .extension import PillarExtension
 | 
			
		||||
        from celery import Celery
 | 
			
		||||
        from flask_wtf.csrf import CSRFProtect
 | 
			
		||||
 | 
			
		||||
        kwargs.setdefault('validator', custom_field_validation.ValidateCustomFields)
 | 
			
		||||
        super(PillarServer, self).__init__(settings=empty_settings, **kwargs)
 | 
			
		||||
 | 
			
		||||
        # mapping from extension name to extension object.
 | 
			
		||||
        self.pillar_extensions = collections.OrderedDict()
 | 
			
		||||
        map_type = typing.MutableMapping[str, PillarExtension]
 | 
			
		||||
        self.pillar_extensions: map_type = collections.OrderedDict()
 | 
			
		||||
        self.pillar_extensions_template_paths = []  # list of paths
 | 
			
		||||
 | 
			
		||||
        # The default roles Pillar uses. Will probably all move to extensions at some point.
 | 
			
		||||
        self._user_roles: typing.Set[str] = {
 | 
			
		||||
            'demo', 'admin', 'subscriber', 'homeproject',
 | 
			
		||||
            'protected', 'org-subscriber', 'video-encoder',
 | 
			
		||||
            'service', 'badger', 'svner',
 | 
			
		||||
        }
 | 
			
		||||
        self._user_roles_indexable: typing.Set[str] = {'demo', 'admin', 'subscriber'}
 | 
			
		||||
 | 
			
		||||
        # Mapping from role name to capabilities given to that role.
 | 
			
		||||
        self._user_caps: typing.MutableMapping[str, typing.FrozenSet[str]] = \
 | 
			
		||||
            collections.defaultdict(frozenset)
 | 
			
		||||
 | 
			
		||||
        self.app_root = os.path.abspath(app_root)
 | 
			
		||||
        self._load_flask_config()
 | 
			
		||||
        self._config_logging()
 | 
			
		||||
@@ -49,9 +102,13 @@ class PillarServer(Eve):
 | 
			
		||||
        self.log = logging.getLogger('%s.%s' % (__name__, self.__class__.__name__))
 | 
			
		||||
        self.log.info('Creating new instance from %r', self.app_root)
 | 
			
		||||
 | 
			
		||||
        self._config_url_map()
 | 
			
		||||
        self._config_auth_token_hmac_key()
 | 
			
		||||
        self._config_tempdirs()
 | 
			
		||||
        self._config_git()
 | 
			
		||||
        self._config_bugsnag()
 | 
			
		||||
 | 
			
		||||
        self.sentry: typing.Optional[sentry_extra.PillarSentry] = None
 | 
			
		||||
        self._config_sentry()
 | 
			
		||||
        self._config_google_cloud_storage()
 | 
			
		||||
 | 
			
		||||
        self.algolia_index_users = None
 | 
			
		||||
@@ -69,14 +126,34 @@ class PillarServer(Eve):
 | 
			
		||||
                                         'api', 'eve_settings.py')
 | 
			
		||||
        # self.settings = self.config['EVE_SETTINGS_PATH']
 | 
			
		||||
        self.load_config()
 | 
			
		||||
        self._validate_config()
 | 
			
		||||
 | 
			
		||||
        # Configure authentication
 | 
			
		||||
        self.login_manager = auth.config_login_manager(self)
 | 
			
		||||
        self.oauth_blender_id = auth.config_oauth_login(self)
 | 
			
		||||
 | 
			
		||||
        self._config_caching()
 | 
			
		||||
 | 
			
		||||
        self.before_first_request(self.setup_db_indices)
 | 
			
		||||
        self._config_translations()
 | 
			
		||||
 | 
			
		||||
        # Celery itself is configured after all extensions have loaded.
 | 
			
		||||
        self.celery: Celery = None
 | 
			
		||||
 | 
			
		||||
        self.org_manager = pillar.api.organizations.OrgManager()
 | 
			
		||||
 | 
			
		||||
        # Make CSRF protection available to the application. By default it is
 | 
			
		||||
        # disabled on all endpoints. More info at WTF_CSRF_CHECK_DEFAULT in config.py
 | 
			
		||||
        self.csrf = CSRFProtect(self)
 | 
			
		||||
 | 
			
		||||
    def _validate_config(self):
 | 
			
		||||
        if not self.config.get('SECRET_KEY'):
 | 
			
		||||
            raise ConfigurationMissingError('SECRET_KEY configuration key is missing')
 | 
			
		||||
 | 
			
		||||
        server_name = self.config.get('SERVER_NAME')
 | 
			
		||||
        if not server_name:
 | 
			
		||||
            raise ConfigurationMissingError('SERVER_NAME configuration key is missing, should be a '
 | 
			
		||||
                                            'FQDN with TLD')
 | 
			
		||||
        if server_name != 'localhost' and '.' not in server_name:
 | 
			
		||||
            raise ConfigurationMissingError('SERVER_NAME should contain a FQDN with TLD')
 | 
			
		||||
 | 
			
		||||
    def _load_flask_config(self):
 | 
			
		||||
        # Load configuration from different sources, to make it easy to override
 | 
			
		||||
@@ -98,6 +175,30 @@ class PillarServer(Eve):
 | 
			
		||||
        if self.config['DEBUG']:
 | 
			
		||||
            log.info('Pillar starting, debug=%s', self.config['DEBUG'])
 | 
			
		||||
 | 
			
		||||
    def _config_url_map(self):
 | 
			
		||||
        """Extend Flask url_map with our own converters."""
 | 
			
		||||
        import secrets, re
 | 
			
		||||
        from . import flask_extra
 | 
			
		||||
 | 
			
		||||
        if not self.config.get('STATIC_FILE_HASH'):
 | 
			
		||||
            self.log.warning('STATIC_FILE_HASH is empty, generating random one')
 | 
			
		||||
            h = re.sub(r'[_.~-]', '', secrets.token_urlsafe())[:8]
 | 
			
		||||
            self.config['STATIC_FILE_HASH'] = h
 | 
			
		||||
 | 
			
		||||
        self.url_map.converters['hashed_path'] = flask_extra.HashedPathConverter
 | 
			
		||||
 | 
			
		||||
    def _config_auth_token_hmac_key(self):
 | 
			
		||||
        """Load AUTH_TOKEN_HMAC_KEY, falling back to SECRET_KEY."""
 | 
			
		||||
 | 
			
		||||
        hmac_key = self.config.get('AUTH_TOKEN_HMAC_KEY')
 | 
			
		||||
        if not hmac_key:
 | 
			
		||||
            self.log.warning('AUTH_TOKEN_HMAC_KEY not set, falling back to SECRET_KEY')
 | 
			
		||||
            hmac_key = self.config['AUTH_TOKEN_HMAC_KEY'] = self.config['SECRET_KEY']
 | 
			
		||||
 | 
			
		||||
        if isinstance(hmac_key, str):
 | 
			
		||||
            self.log.warning('Converting AUTH_TOKEN_HMAC_KEY to bytes')
 | 
			
		||||
            self.config['AUTH_TOKEN_HMAC_KEY'] = hmac_key.encode('utf8')
 | 
			
		||||
 | 
			
		||||
    def _config_tempdirs(self):
 | 
			
		||||
        storage_dir = self.config['STORAGE_DIR']
 | 
			
		||||
        if not os.path.exists(storage_dir):
 | 
			
		||||
@@ -123,25 +224,18 @@ class PillarServer(Eve):
 | 
			
		||||
            self.config['GIT_REVISION'] = 'unknown'
 | 
			
		||||
        self.log.info('Git revision %r', self.config['GIT_REVISION'])
 | 
			
		||||
 | 
			
		||||
    def _config_bugsnag(self):
 | 
			
		||||
        # Configure Bugsnag
 | 
			
		||||
        if self.config.get('TESTING') or not self.config.get('BUGSNAG_API_KEY'):
 | 
			
		||||
            self.log.info('Bugsnag NOT configured.')
 | 
			
		||||
    def _config_sentry(self):
 | 
			
		||||
        # TODO(Sybren): keep Sentry unconfigured when running CLI commands.
 | 
			
		||||
        sentry_dsn = self.config.get('SENTRY_CONFIG', {}).get('dsn')
 | 
			
		||||
        if self.config.get('TESTING') or sentry_dsn in {'', '-set-in-config-local-'}:
 | 
			
		||||
            self.log.warning('Sentry NOT configured.')
 | 
			
		||||
            self.sentry = None
 | 
			
		||||
            return
 | 
			
		||||
 | 
			
		||||
        import bugsnag
 | 
			
		||||
        from bugsnag.flask import handle_exceptions
 | 
			
		||||
        from bugsnag.handlers import BugsnagHandler
 | 
			
		||||
 | 
			
		||||
        bugsnag.configure(
 | 
			
		||||
            api_key=self.config['BUGSNAG_API_KEY'],
 | 
			
		||||
            project_root="/data/git/pillar/pillar",
 | 
			
		||||
        )
 | 
			
		||||
        handle_exceptions(self)
 | 
			
		||||
 | 
			
		||||
        bs_handler = BugsnagHandler()
 | 
			
		||||
        bs_handler.setLevel(logging.ERROR)
 | 
			
		||||
        self.log.addHandler(bs_handler)
 | 
			
		||||
        self.sentry = sentry_extra.PillarSentry(
 | 
			
		||||
            self, logging=True, level=logging.WARNING,
 | 
			
		||||
            logging_exclusions=('werkzeug',))
 | 
			
		||||
        self.log.debug('Sentry setup complete')
 | 
			
		||||
 | 
			
		||||
    def _config_google_cloud_storage(self):
 | 
			
		||||
        # Google Cloud project
 | 
			
		||||
@@ -149,17 +243,17 @@ class PillarServer(Eve):
 | 
			
		||||
            os.environ['GOOGLE_APPLICATION_CREDENTIALS'] = \
 | 
			
		||||
                self.config['GCLOUD_APP_CREDENTIALS']
 | 
			
		||||
        except KeyError:
 | 
			
		||||
            raise SystemExit('GCLOUD_APP_CREDENTIALS configuration is missing')
 | 
			
		||||
            raise ConfigurationMissingError('GCLOUD_APP_CREDENTIALS configuration is missing')
 | 
			
		||||
 | 
			
		||||
        # Storage backend (GCS)
 | 
			
		||||
        try:
 | 
			
		||||
            os.environ['GCLOUD_PROJECT'] = self.config['GCLOUD_PROJECT']
 | 
			
		||||
        except KeyError:
 | 
			
		||||
            raise SystemExit('GCLOUD_PROJECT configuration value is missing')
 | 
			
		||||
            raise ConfigurationMissingError('GCLOUD_PROJECT configuration value is missing')
 | 
			
		||||
 | 
			
		||||
    def _config_algolia(self):
 | 
			
		||||
        # Algolia search
 | 
			
		||||
        if self.config['SEARCH_BACKEND'] != 'algolia':
 | 
			
		||||
        if 'algolia' not in self.config['SEARCH_BACKENDS']:
 | 
			
		||||
            return
 | 
			
		||||
 | 
			
		||||
        from algoliasearch import algoliasearch
 | 
			
		||||
@@ -173,15 +267,88 @@ class PillarServer(Eve):
 | 
			
		||||
    def _config_encoding_backend(self):
 | 
			
		||||
        # Encoding backend
 | 
			
		||||
        if self.config['ENCODING_BACKEND'] != 'zencoder':
 | 
			
		||||
            self.log.warning('Encoding backend %r not supported, no video encoding possible!',
 | 
			
		||||
                             self.config['ENCODING_BACKEND'])
 | 
			
		||||
            return
 | 
			
		||||
 | 
			
		||||
        self.log.info('Setting up video encoding backend %r',
 | 
			
		||||
                      self.config['ENCODING_BACKEND'])
 | 
			
		||||
 | 
			
		||||
        from zencoder import Zencoder
 | 
			
		||||
        self.encoding_service_client = Zencoder(self.config['ZENCODER_API_KEY'])
 | 
			
		||||
 | 
			
		||||
    def _config_caching(self):
 | 
			
		||||
        from flask_cache import Cache
 | 
			
		||||
        from flask_caching import Cache
 | 
			
		||||
        self.cache = Cache(self)
 | 
			
		||||
 | 
			
		||||
    def set_languages(self, translations_folder: pathlib.Path):
 | 
			
		||||
        """Set the supported languages based on translations folders
 | 
			
		||||
 | 
			
		||||
        English is an optional language included by default, since we will
 | 
			
		||||
        never have a translations folder for it.
 | 
			
		||||
        """
 | 
			
		||||
        self.default_locale = self.config['DEFAULT_LOCALE']
 | 
			
		||||
        self.config['BABEL_DEFAULT_LOCALE'] = self.default_locale
 | 
			
		||||
 | 
			
		||||
        # Determine available languages.
 | 
			
		||||
        languages = list()
 | 
			
		||||
 | 
			
		||||
        # The available languages will be determined based on available
 | 
			
		||||
        # translations in the //translations/ folder. The exception is (American) English
 | 
			
		||||
        # since all the text is originally in English already.
 | 
			
		||||
        # That said, if rare occasions we may want to never show
 | 
			
		||||
        # the site in English.
 | 
			
		||||
 | 
			
		||||
        if self.config['SUPPORT_ENGLISH']:
 | 
			
		||||
            languages.append('en_US')
 | 
			
		||||
 | 
			
		||||
        base_path = pathlib.Path(self.app_root) / 'translations'
 | 
			
		||||
 | 
			
		||||
        if not base_path.is_dir():
 | 
			
		||||
            self.log.debug('Project has no translations folder: %s', base_path)
 | 
			
		||||
        else:
 | 
			
		||||
            languages.extend(i.name for i in base_path.iterdir() if i.is_dir())
 | 
			
		||||
 | 
			
		||||
        # Use set for quicker lookup
 | 
			
		||||
        self.languages = set(languages)
 | 
			
		||||
 | 
			
		||||
        self.log.info('Available languages: %s' % ', '.join(self.languages))
 | 
			
		||||
 | 
			
		||||
    def _config_translations(self):
 | 
			
		||||
        """
 | 
			
		||||
        Initialize translations variable.
 | 
			
		||||
 | 
			
		||||
        The BABEL_TRANSLATION_DIRECTORIES has the folder for the compiled
 | 
			
		||||
        translations files. It uses ; separation for the extension folders.
 | 
			
		||||
        """
 | 
			
		||||
        self.log.info('Configure translations')
 | 
			
		||||
        translations_path = pathlib.Path(__file__).parents[1].joinpath('translations')
 | 
			
		||||
 | 
			
		||||
        self.config['BABEL_TRANSLATION_DIRECTORIES'] = str(translations_path)
 | 
			
		||||
        babel = Babel(self)
 | 
			
		||||
 | 
			
		||||
        self.set_languages(translations_path)
 | 
			
		||||
 | 
			
		||||
        # get_locale() is registered as a callback for locale selection.
 | 
			
		||||
        # That prevents the function from being garbage collected.
 | 
			
		||||
        @babel.localeselector
 | 
			
		||||
        def get_locale() -> str:
 | 
			
		||||
            """
 | 
			
		||||
            Callback runs before each request to give us a chance to choose the
 | 
			
		||||
            language to use when producing its response.
 | 
			
		||||
 | 
			
		||||
            We set g.locale to be able to access it from the template pages.
 | 
			
		||||
            We still need to return it explicitly, since this function is
 | 
			
		||||
            called as part of the babel translation framework.
 | 
			
		||||
 | 
			
		||||
            We are using the 'Accept-Languages' header to match the available
 | 
			
		||||
            translations with the user supported languages.
 | 
			
		||||
            """
 | 
			
		||||
            locale = request.accept_languages.best_match(
 | 
			
		||||
                self.languages, self.default_locale)
 | 
			
		||||
            g.locale = locale
 | 
			
		||||
            return locale
 | 
			
		||||
 | 
			
		||||
    def load_extension(self, pillar_extension, url_prefix):
 | 
			
		||||
        from .extension import PillarExtension
 | 
			
		||||
 | 
			
		||||
@@ -200,12 +367,15 @@ class PillarServer(Eve):
 | 
			
		||||
        self.pillar_extensions[pillar_extension.name] = pillar_extension
 | 
			
		||||
 | 
			
		||||
        # Load extension Flask configuration
 | 
			
		||||
        for key, value in pillar_extension.flask_config():
 | 
			
		||||
        for key, value in pillar_extension.flask_config().items():
 | 
			
		||||
            self.config.setdefault(key, value)
 | 
			
		||||
 | 
			
		||||
        # Load extension blueprint(s)
 | 
			
		||||
        for blueprint in pillar_extension.blueprints():
 | 
			
		||||
            if blueprint.url_prefix:
 | 
			
		||||
                if not url_prefix:
 | 
			
		||||
                    # If we registered the extension with url_prefix=None
 | 
			
		||||
                    url_prefix = ''
 | 
			
		||||
                blueprint_prefix = url_prefix + blueprint.url_prefix
 | 
			
		||||
            else:
 | 
			
		||||
                blueprint_prefix = url_prefix
 | 
			
		||||
@@ -238,12 +408,25 @@ class PillarServer(Eve):
 | 
			
		||||
 | 
			
		||||
            self.config['DOMAIN'].update(eve_settings['DOMAIN'])
 | 
			
		||||
 | 
			
		||||
        # Configure the extension translations
 | 
			
		||||
        trpath = pillar_extension.translations_path
 | 
			
		||||
        if not trpath:
 | 
			
		||||
            self.log.debug('Extension %s does not have a translations folder',
 | 
			
		||||
                           pillar_extension.name)
 | 
			
		||||
            return
 | 
			
		||||
 | 
			
		||||
        self.log.info('Extension %s: adding translations path %s',
 | 
			
		||||
                      pillar_extension.name, trpath)
 | 
			
		||||
 | 
			
		||||
        # Babel requires semi-colon string separation
 | 
			
		||||
        self.config['BABEL_TRANSLATION_DIRECTORIES'] += ';' + str(trpath)
 | 
			
		||||
 | 
			
		||||
    def _config_jinja_env(self):
 | 
			
		||||
        # Start with the extensions...
 | 
			
		||||
        paths_list = [
 | 
			
		||||
            jinja2.FileSystemLoader(path)
 | 
			
		||||
            for path in reversed(self.pillar_extensions_template_paths)
 | 
			
		||||
            ]
 | 
			
		||||
        ]
 | 
			
		||||
 | 
			
		||||
        # ...then load Pillar paths.
 | 
			
		||||
        pillar_dir = os.path.dirname(os.path.realpath(__file__))
 | 
			
		||||
@@ -260,7 +443,15 @@ class PillarServer(Eve):
 | 
			
		||||
        custom_jinja_loader = jinja2.ChoiceLoader(paths_list)
 | 
			
		||||
        self.jinja_loader = custom_jinja_loader
 | 
			
		||||
 | 
			
		||||
        pillar.web.jinja.setup_jinja_env(self.jinja_env)
 | 
			
		||||
        pillar.web.jinja.setup_jinja_env(self.jinja_env, self.config)
 | 
			
		||||
 | 
			
		||||
        # Register context processors from extensions
 | 
			
		||||
        for ext in self.pillar_extensions.values():
 | 
			
		||||
            if not ext.has_context_processor:
 | 
			
		||||
                continue
 | 
			
		||||
 | 
			
		||||
            self.log.debug('Registering context processor for %s', ext.name)
 | 
			
		||||
            self.context_processor(ext.context_processor)
 | 
			
		||||
 | 
			
		||||
    def _config_static_dirs(self):
 | 
			
		||||
        # Setup static folder for the instanced app
 | 
			
		||||
@@ -279,13 +470,88 @@ class PillarServer(Eve):
 | 
			
		||||
                                               'static_%s' % name,
 | 
			
		||||
                                               ext.static_path)
 | 
			
		||||
 | 
			
		||||
    def _config_celery(self):
 | 
			
		||||
        from celery import Celery
 | 
			
		||||
 | 
			
		||||
        self.log.info('Configuring Celery')
 | 
			
		||||
 | 
			
		||||
        # Pillar-defined Celery task modules:
 | 
			
		||||
        celery_task_modules = [
 | 
			
		||||
            'pillar.celery.badges',
 | 
			
		||||
            'pillar.celery.email_tasks',
 | 
			
		||||
            'pillar.celery.file_link_tasks',
 | 
			
		||||
            'pillar.celery.search_index_tasks',
 | 
			
		||||
            'pillar.celery.tasks',
 | 
			
		||||
        ]
 | 
			
		||||
 | 
			
		||||
        # Allow Pillar extensions from defining their own Celery tasks.
 | 
			
		||||
        for extension in self.pillar_extensions.values():
 | 
			
		||||
            celery_task_modules.extend(extension.celery_task_modules)
 | 
			
		||||
 | 
			
		||||
        self.celery = Celery(
 | 
			
		||||
            'pillar.celery',
 | 
			
		||||
            backend=self.config['CELERY_BACKEND'],
 | 
			
		||||
            broker=self.config['CELERY_BROKER'],
 | 
			
		||||
            include=celery_task_modules,
 | 
			
		||||
            task_track_started=True,
 | 
			
		||||
            result_expires=3600,
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
        # This configures the Celery task scheduler in such a way that we don't
 | 
			
		||||
        # have to import the pillar.celery.XXX modules. Remember to run
 | 
			
		||||
        # 'manage.py celery beat' too, otherwise those will never run.
 | 
			
		||||
        beat_schedule = self.config.get('CELERY_BEAT_SCHEDULE')
 | 
			
		||||
        if beat_schedule:
 | 
			
		||||
            self.celery.conf.beat_schedule = beat_schedule
 | 
			
		||||
 | 
			
		||||
        self.log.info('Pinging Celery workers')
 | 
			
		||||
        self.log.info('Response: %s', self.celery.control.ping())
 | 
			
		||||
 | 
			
		||||
    def _config_user_roles(self):
 | 
			
		||||
        """Gathers all user roles from extensions.
 | 
			
		||||
 | 
			
		||||
        The union of all user roles can be obtained from self.user_roles.
 | 
			
		||||
        """
 | 
			
		||||
 | 
			
		||||
        for extension in self.pillar_extensions.values():
 | 
			
		||||
            indexed_but_not_defined = extension.user_roles_indexable - extension.user_roles
 | 
			
		||||
            if indexed_but_not_defined:
 | 
			
		||||
                raise ValueError('Extension %s has roles %s indexable but not in user_roles',
 | 
			
		||||
                                 extension.name, indexed_but_not_defined)
 | 
			
		||||
 | 
			
		||||
            self._user_roles.update(extension.user_roles)
 | 
			
		||||
            self._user_roles_indexable.update(extension.user_roles_indexable)
 | 
			
		||||
 | 
			
		||||
        self.log.info('Loaded %i user roles from extensions, %i of which are indexable',
 | 
			
		||||
                      len(self._user_roles), len(self._user_roles_indexable))
 | 
			
		||||
 | 
			
		||||
    def _config_user_caps(self):
 | 
			
		||||
        """Merges all capability settings from app config and extensions."""
 | 
			
		||||
 | 
			
		||||
        app_caps = collections.defaultdict(frozenset, **self.config['USER_CAPABILITIES'])
 | 
			
		||||
 | 
			
		||||
        for extension in self.pillar_extensions.values():
 | 
			
		||||
            ext_caps = extension.user_caps
 | 
			
		||||
 | 
			
		||||
            for role, caps in ext_caps.items():
 | 
			
		||||
                union_caps = frozenset(app_caps[role] | caps)
 | 
			
		||||
                app_caps[role] = union_caps
 | 
			
		||||
 | 
			
		||||
        self._user_caps = app_caps
 | 
			
		||||
 | 
			
		||||
        if self.log.isEnabledFor(logging.DEBUG):
 | 
			
		||||
            import pprint
 | 
			
		||||
            self.log.debug('Configured user capabilities: %s', pprint.pformat(self._user_caps))
 | 
			
		||||
 | 
			
		||||
    def register_static_file_endpoint(self, url_prefix, endpoint_name, static_folder):
 | 
			
		||||
        from pillar.web.static import PillarStaticFile
 | 
			
		||||
        from pillar.web.staticfile import PillarStaticFile
 | 
			
		||||
 | 
			
		||||
        view_func = PillarStaticFile.as_view(endpoint_name, static_folder=static_folder)
 | 
			
		||||
        self.add_url_rule('%s/<path:filename>' % url_prefix, view_func=view_func)
 | 
			
		||||
        self.add_url_rule(f'{url_prefix}/<hashed_path:filename>', view_func=view_func)
 | 
			
		||||
 | 
			
		||||
    def process_extensions(self):
 | 
			
		||||
        """This is about Eve extensions, not Pillar extensions."""
 | 
			
		||||
 | 
			
		||||
        # Re-initialise Eve after we allowed Pillar submodules to be loaded.
 | 
			
		||||
        # EVIL STARTS HERE. It just copies part of the Eve.__init__() method.
 | 
			
		||||
        self.set_defaults()
 | 
			
		||||
@@ -360,12 +626,12 @@ class PillarServer(Eve):
 | 
			
		||||
                if node_type:
 | 
			
		||||
                    node_type = node_type.replace('_', ' ').title()
 | 
			
		||||
                    if doc_name:
 | 
			
		||||
                        description = u'%s "%s" was deleted.' % (node_type, doc_name)
 | 
			
		||||
                        description = '%s "%s" was deleted.' % (node_type, doc_name)
 | 
			
		||||
                    else:
 | 
			
		||||
                        description = u'This %s was deleted.' % (node_type, )
 | 
			
		||||
                        description = 'This %s was deleted.' % (node_type,)
 | 
			
		||||
                else:
 | 
			
		||||
                    if doc_name:
 | 
			
		||||
                        description = u'"%s" was deleted.' % doc_name
 | 
			
		||||
                        description = '"%s" was deleted.' % doc_name
 | 
			
		||||
                    else:
 | 
			
		||||
                        description = None
 | 
			
		||||
 | 
			
		||||
@@ -384,7 +650,7 @@ class PillarServer(Eve):
 | 
			
		||||
        self.log.info('Forwarding ResourceInvalid exception to client: %s', error, exc_info=True)
 | 
			
		||||
 | 
			
		||||
        # Raising a Werkzeug 422 exception doens't work, as Flask turns it into a 500.
 | 
			
		||||
        return 'The submitted data could not be validated.', 422
 | 
			
		||||
        return _('The submitted data could not be validated.'), 422
 | 
			
		||||
 | 
			
		||||
    def handle_sdk_method_not_allowed(self, error):
 | 
			
		||||
        """Forwards 405 Method Not Allowed to the client.
 | 
			
		||||
@@ -437,18 +703,30 @@ class PillarServer(Eve):
 | 
			
		||||
    def finish_startup(self):
 | 
			
		||||
        self.log.info('Using MongoDB database %r', self.config['MONGO_DBNAME'])
 | 
			
		||||
 | 
			
		||||
        with self.app_context():
 | 
			
		||||
            self.setup_db_indices()
 | 
			
		||||
        self._config_celery()
 | 
			
		||||
 | 
			
		||||
        api.setup_app(self)
 | 
			
		||||
        web.setup_app(self)
 | 
			
		||||
 | 
			
		||||
        authentication.setup_app(self)
 | 
			
		||||
 | 
			
		||||
        for ext in self.pillar_extensions.itervalues():
 | 
			
		||||
        # Register Flask Debug Toolbar (disabled by default).
 | 
			
		||||
        from flask_debugtoolbar import DebugToolbarExtension
 | 
			
		||||
        DebugToolbarExtension(self)
 | 
			
		||||
 | 
			
		||||
        for ext in self.pillar_extensions.values():
 | 
			
		||||
            self.log.info('Setting up extension %s', ext.name)
 | 
			
		||||
            ext.setup_app(self)
 | 
			
		||||
 | 
			
		||||
        self._config_jinja_env()
 | 
			
		||||
        self._config_static_dirs()
 | 
			
		||||
        self._config_user_roles()
 | 
			
		||||
        self._config_user_caps()
 | 
			
		||||
 | 
			
		||||
        # Only enable this when debugging.
 | 
			
		||||
        # TODO(fsiddi): Consider removing this in favor of the routes tab in Flask Debug Toolbar.
 | 
			
		||||
        # self._list_routes()
 | 
			
		||||
 | 
			
		||||
    def setup_db_indices(self):
 | 
			
		||||
@@ -468,6 +746,7 @@ class PillarServer(Eve):
 | 
			
		||||
        coll = db['tokens']
 | 
			
		||||
        coll.create_index([('user', pymongo.ASCENDING)])
 | 
			
		||||
        coll.create_index([('token', pymongo.ASCENDING)])
 | 
			
		||||
        coll.create_index([('token_hashed', pymongo.ASCENDING)])
 | 
			
		||||
 | 
			
		||||
        coll = db['notifications']
 | 
			
		||||
        coll.create_index([('user', pymongo.ASCENDING)])
 | 
			
		||||
@@ -483,6 +762,22 @@ class PillarServer(Eve):
 | 
			
		||||
        coll.create_index([('parent', pymongo.ASCENDING)])
 | 
			
		||||
        coll.create_index([('short_code', pymongo.ASCENDING)],
 | 
			
		||||
                          sparse=True, unique=True)
 | 
			
		||||
        # Used for latest assets & comments
 | 
			
		||||
        coll.create_index([('properties.status', pymongo.ASCENDING),
 | 
			
		||||
                           ('node_type', pymongo.ASCENDING),
 | 
			
		||||
                           ('_created', pymongo.DESCENDING)])
 | 
			
		||||
        # Used for asset tags
 | 
			
		||||
        coll.create_index([('properties.tags', pymongo.ASCENDING)])
 | 
			
		||||
 | 
			
		||||
        coll = db['projects']
 | 
			
		||||
        # This index is used for statistics, and for fetching public projects.
 | 
			
		||||
        coll.create_index([('is_private', pymongo.ASCENDING)])
 | 
			
		||||
        coll.create_index([('category', pymongo.ASCENDING)])
 | 
			
		||||
 | 
			
		||||
        coll = db['organizations']
 | 
			
		||||
        coll.create_index([('ip_ranges.start', pymongo.ASCENDING)])
 | 
			
		||||
        coll.create_index([('ip_ranges.end', pymongo.ASCENDING)])
 | 
			
		||||
        self.log.debug('Created database indices')
 | 
			
		||||
 | 
			
		||||
    def register_api_blueprint(self, blueprint, url_prefix):
 | 
			
		||||
        # TODO: use Eve config variable instead of hard-coded '/api'
 | 
			
		||||
@@ -494,32 +789,49 @@ class PillarServer(Eve):
 | 
			
		||||
 | 
			
		||||
        return 'basic ' + base64.b64encode('%s:%s' % (username, subclient_id))
 | 
			
		||||
 | 
			
		||||
    def post_internal(self, resource, payl=None, skip_validation=False):
 | 
			
		||||
    def post_internal(self, resource: str, payl=None, skip_validation=False):
 | 
			
		||||
        """Workaround for Eve issue https://github.com/nicolaiarocci/eve/issues/810"""
 | 
			
		||||
        from eve.methods.post import post_internal
 | 
			
		||||
 | 
			
		||||
        with self.test_request_context(method='POST', path='%s/%s' % (self.api_prefix, resource)):
 | 
			
		||||
            return post_internal(resource, payl=payl, skip_validation=skip_validation)
 | 
			
		||||
        url = self.config['URLS'][resource]
 | 
			
		||||
        path = '%s/%s' % (self.api_prefix, url)
 | 
			
		||||
        with self.__fake_request_url_rule('POST', path):
 | 
			
		||||
            return post_internal(resource, payl=payl, skip_validation=skip_validation)[:4]
 | 
			
		||||
 | 
			
		||||
    def put_internal(self, resource, payload=None, concurrency_check=False,
 | 
			
		||||
    def put_internal(self, resource: str, payload=None, concurrency_check=False,
 | 
			
		||||
                     skip_validation=False, **lookup):
 | 
			
		||||
        """Workaround for Eve issue https://github.com/nicolaiarocci/eve/issues/810"""
 | 
			
		||||
        from eve.methods.put import put_internal
 | 
			
		||||
 | 
			
		||||
        path = '%s/%s/%s' % (self.api_prefix, resource, lookup['_id'])
 | 
			
		||||
        with self.test_request_context(method='PUT', path=path):
 | 
			
		||||
        url = self.config['URLS'][resource]
 | 
			
		||||
        path = '%s/%s/%s' % (self.api_prefix, url, lookup['_id'])
 | 
			
		||||
        with self.__fake_request_url_rule('PUT', path):
 | 
			
		||||
            return put_internal(resource, payload=payload, concurrency_check=concurrency_check,
 | 
			
		||||
                                skip_validation=skip_validation, **lookup)
 | 
			
		||||
                                skip_validation=skip_validation, **lookup)[:4]
 | 
			
		||||
 | 
			
		||||
    def patch_internal(self, resource, payload=None, concurrency_check=False,
 | 
			
		||||
    def patch_internal(self, resource: str, payload=None, concurrency_check=False,
 | 
			
		||||
                       skip_validation=False, **lookup):
 | 
			
		||||
        """Workaround for Eve issue https://github.com/nicolaiarocci/eve/issues/810"""
 | 
			
		||||
        from eve.methods.patch import patch_internal
 | 
			
		||||
 | 
			
		||||
        path = '%s/%s/%s' % (self.api_prefix, resource, lookup['_id'])
 | 
			
		||||
        with self.test_request_context(method='PATCH', path=path):
 | 
			
		||||
        url = self.config['URLS'][resource]
 | 
			
		||||
        path = '%s/%s/%s' % (self.api_prefix, url, lookup['_id'])
 | 
			
		||||
        with self.__fake_request_url_rule('PATCH', path):
 | 
			
		||||
            return patch_internal(resource, payload=payload, concurrency_check=concurrency_check,
 | 
			
		||||
                                  skip_validation=skip_validation, **lookup)
 | 
			
		||||
                                  skip_validation=skip_validation, **lookup)[:4]
 | 
			
		||||
 | 
			
		||||
    def delete_internal(self, resource: str, concurrency_check=False,
 | 
			
		||||
                        suppress_callbacks=False, **lookup):
 | 
			
		||||
        """Workaround for Eve issue https://github.com/nicolaiarocci/eve/issues/810"""
 | 
			
		||||
        from eve.methods.delete import deleteitem_internal
 | 
			
		||||
 | 
			
		||||
        url = self.config['URLS'][resource]
 | 
			
		||||
        path = '%s/%s/%s' % (self.api_prefix, url, lookup['_id'])
 | 
			
		||||
        with self.__fake_request_url_rule('DELETE', path):
 | 
			
		||||
            return deleteitem_internal(resource,
 | 
			
		||||
                                       concurrency_check=concurrency_check,
 | 
			
		||||
                                       suppress_callbacks=suppress_callbacks,
 | 
			
		||||
                                       **lookup)[:4]
 | 
			
		||||
 | 
			
		||||
    def _list_routes(self):
 | 
			
		||||
        from pprint import pprint
 | 
			
		||||
@@ -537,18 +849,22 @@ class PillarServer(Eve):
 | 
			
		||||
                # and rules that require parameters
 | 
			
		||||
                if "GET" in rule.methods and has_no_empty_params(rule):
 | 
			
		||||
                    url = url_for(rule.endpoint, **(rule.defaults or {}))
 | 
			
		||||
                    links.append((url, rule.endpoint))
 | 
			
		||||
                    links.append((url, rule.endpoint, rule.methods))
 | 
			
		||||
                if "PATCH" in rule.methods:
 | 
			
		||||
                    args = {arg: arg for arg in rule.arguments}
 | 
			
		||||
                    url = url_for(rule.endpoint, **args)
 | 
			
		||||
                    links.append((url, rule.endpoint, rule.methods))
 | 
			
		||||
 | 
			
		||||
        links.sort(key=lambda t: len(t[0]) + 100 * ('/api/' in t[0]))
 | 
			
		||||
        links.sort(key=lambda t: (('/api/' in t[0]), len(t[0])))
 | 
			
		||||
 | 
			
		||||
        pprint(links)
 | 
			
		||||
        pprint(links, width=300)
 | 
			
		||||
 | 
			
		||||
    def db(self):
 | 
			
		||||
        """Returns the MongoDB database.
 | 
			
		||||
 | 
			
		||||
        :rtype: flask_pymongo.PyMongo
 | 
			
		||||
        """
 | 
			
		||||
    def db(self, collection_name: str = None) \
 | 
			
		||||
            -> typing.Union[pymongo.collection.Collection, pymongo.database.Database]:
 | 
			
		||||
        """Returns the MongoDB database, or the collection (if given)"""
 | 
			
		||||
 | 
			
		||||
        if collection_name:
 | 
			
		||||
            return self.data.driver.db[collection_name]
 | 
			
		||||
        return self.data.driver.db
 | 
			
		||||
 | 
			
		||||
    def extension_sidebar_links(self, project):
 | 
			
		||||
@@ -562,3 +878,52 @@ class PillarServer(Eve):
 | 
			
		||||
 | 
			
		||||
        return jinja2.Markup(''.join(ext.sidebar_links(project)
 | 
			
		||||
                                     for ext in self.pillar_extensions.values()))
 | 
			
		||||
 | 
			
		||||
    @contextlib.contextmanager
 | 
			
		||||
    def __fake_request_url_rule(self, method: str, url_path: str):
 | 
			
		||||
        """Tries to force-set the request URL rule.
 | 
			
		||||
 | 
			
		||||
        This is required by Eve (since 0.70) to be able to construct a
 | 
			
		||||
        Location HTTP header that points to the resource item.
 | 
			
		||||
 | 
			
		||||
        See post_internal, put_internal and patch_internal.
 | 
			
		||||
        """
 | 
			
		||||
 | 
			
		||||
        import werkzeug.exceptions as wz_exceptions
 | 
			
		||||
 | 
			
		||||
        with self.test_request_context(method=method, path=url_path) as ctx:
 | 
			
		||||
            try:
 | 
			
		||||
                rule, _ = ctx.url_adapter.match(url_path, method=method, return_rule=True)
 | 
			
		||||
            except (wz_exceptions.MethodNotAllowed, wz_exceptions.NotFound):
 | 
			
		||||
                # We're POSTing things that we haven't told Eve are POSTable. Try again using the
 | 
			
		||||
                # GET method.
 | 
			
		||||
                rule, _ = ctx.url_adapter.match(url_path, method='GET', return_rule=True)
 | 
			
		||||
            current_request = request._get_current_object()
 | 
			
		||||
            current_request.url_rule = rule
 | 
			
		||||
 | 
			
		||||
            yield ctx
 | 
			
		||||
 | 
			
		||||
    def validator_for_resource(self, resource_name: str) -> custom_field_validation.ValidateCustomFields:
 | 
			
		||||
        schema = self.config['DOMAIN'][resource_name]['schema']
 | 
			
		||||
        validator = self.validator(schema, resource_name)
 | 
			
		||||
        return validator
 | 
			
		||||
 | 
			
		||||
    @property
 | 
			
		||||
    def user_roles(self) -> typing.FrozenSet[str]:
 | 
			
		||||
        return frozenset(self._user_roles)
 | 
			
		||||
 | 
			
		||||
    @property
 | 
			
		||||
    def user_roles_indexable(self) -> typing.FrozenSet[str]:
 | 
			
		||||
        return frozenset(self._user_roles_indexable)
 | 
			
		||||
 | 
			
		||||
    @property
 | 
			
		||||
    def user_caps(self) -> typing.Mapping[str, typing.FrozenSet[str]]:
 | 
			
		||||
        return self._user_caps
 | 
			
		||||
 | 
			
		||||
    @property
 | 
			
		||||
    def real_app(self) -> 'PillarServer':
 | 
			
		||||
        """The real application object.
 | 
			
		||||
 | 
			
		||||
        Can be used to obtain the real app object from a LocalProxy.
 | 
			
		||||
        """
 | 
			
		||||
        return self
 | 
			
		||||
 
 | 
			
		||||
@@ -1,15 +1,20 @@
 | 
			
		||||
def setup_app(app):
 | 
			
		||||
    from . import encoding, blender_id, projects, local_auth, file_storage
 | 
			
		||||
    from . import users, nodes, latest, blender_cloud, service, activities
 | 
			
		||||
    from . import users, nodes, latest, blender_cloud, service, activities, timeline
 | 
			
		||||
    from . import organizations
 | 
			
		||||
    from . import search
 | 
			
		||||
 | 
			
		||||
    encoding.setup_app(app, url_prefix='/encoding')
 | 
			
		||||
    blender_id.setup_app(app, url_prefix='/blender_id')
 | 
			
		||||
    search.setup_app(app, url_prefix='/newsearch')
 | 
			
		||||
    projects.setup_app(app, api_prefix='/p')
 | 
			
		||||
    local_auth.setup_app(app, url_prefix='/auth')
 | 
			
		||||
    file_storage.setup_app(app, url_prefix='/storage')
 | 
			
		||||
    latest.setup_app(app, url_prefix='/latest')
 | 
			
		||||
    timeline.setup_app(app, url_prefix='/timeline')
 | 
			
		||||
    blender_cloud.setup_app(app, url_prefix='/bcloud')
 | 
			
		||||
    users.setup_app(app, api_prefix='/users')
 | 
			
		||||
    service.setup_app(app, api_prefix='/service')
 | 
			
		||||
    nodes.setup_app(app, url_prefix='/nodes')
 | 
			
		||||
    activities.setup_app(app)
 | 
			
		||||
    organizations.setup_app(app)
 | 
			
		||||
 
 | 
			
		||||
@@ -1,7 +1,8 @@
 | 
			
		||||
import logging
 | 
			
		||||
 | 
			
		||||
from flask import g, request, current_app
 | 
			
		||||
from flask import request, current_app
 | 
			
		||||
from pillar.api.utils import gravatar
 | 
			
		||||
from pillar.auth import current_user
 | 
			
		||||
 | 
			
		||||
log = logging.getLogger(__name__)
 | 
			
		||||
 | 
			
		||||
@@ -30,7 +31,7 @@ def notification_parse(notification):
 | 
			
		||||
    object_name = ''
 | 
			
		||||
    object_id = activity['object']
 | 
			
		||||
 | 
			
		||||
    if node['parent']['user'] == g.current_user['user_id']:
 | 
			
		||||
    if node['parent']['user'] == current_user.user_id:
 | 
			
		||||
        owner = "your {0}".format(node['parent']['node_type'])
 | 
			
		||||
    else:
 | 
			
		||||
        parent_comment_user = users_collection.find_one(
 | 
			
		||||
@@ -52,7 +53,7 @@ def notification_parse(notification):
 | 
			
		||||
        action = activity['verb']
 | 
			
		||||
 | 
			
		||||
    lookup = {
 | 
			
		||||
        'user': g.current_user['user_id'],
 | 
			
		||||
        'user': current_user.user_id,
 | 
			
		||||
        'context_object_type': 'node',
 | 
			
		||||
        'context_object': context_object_id,
 | 
			
		||||
    }
 | 
			
		||||
 
 | 
			
		||||
@@ -24,7 +24,8 @@ def blender_cloud_addon_version():
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def setup_app(app, url_prefix):
 | 
			
		||||
    from . import texture_libs, home_project
 | 
			
		||||
    from . import texture_libs, home_project, subscription
 | 
			
		||||
 | 
			
		||||
    texture_libs.setup_app(app, url_prefix=url_prefix)
 | 
			
		||||
    home_project.setup_app(app, url_prefix=url_prefix)
 | 
			
		||||
    subscription.setup_app(app, url_prefix=url_prefix)
 | 
			
		||||
 
 | 
			
		||||
@@ -1,12 +1,11 @@
 | 
			
		||||
import copy
 | 
			
		||||
import logging
 | 
			
		||||
 | 
			
		||||
import datetime
 | 
			
		||||
from bson import ObjectId, tz_util
 | 
			
		||||
from bson import ObjectId
 | 
			
		||||
from eve.methods.get import get
 | 
			
		||||
from flask import Blueprint, g, current_app, request
 | 
			
		||||
from flask import Blueprint, current_app, request
 | 
			
		||||
from pillar.api import utils
 | 
			
		||||
from pillar.api.utils import authentication, authorization
 | 
			
		||||
from pillar.api.utils import authentication, authorization, utcnow
 | 
			
		||||
from werkzeug import exceptions as wz_exceptions
 | 
			
		||||
 | 
			
		||||
from pillar.api.projects import utils as proj_utils
 | 
			
		||||
@@ -18,7 +17,7 @@ log = logging.getLogger(__name__)
 | 
			
		||||
HOME_PROJECT_USERS = set()
 | 
			
		||||
 | 
			
		||||
# Users with any of these roles will get full write access to their home project.
 | 
			
		||||
HOME_PROJECT_WRITABLE_USERS = {u'subscriber', u'demo'}
 | 
			
		||||
HOME_PROJECT_WRITABLE_USERS = {'subscriber', 'demo'}
 | 
			
		||||
 | 
			
		||||
HOME_PROJECT_DESCRIPTION = ('# Your home project\n\n'
 | 
			
		||||
                            'This is your home project. It allows synchronisation '
 | 
			
		||||
@@ -30,7 +29,7 @@ HOME_PROJECT_SUMMARY = 'This is your home project. Here you can sync your Blende
 | 
			
		||||
#                             'as a pastebin for text, images and other assets, and '
 | 
			
		||||
#                             'allows synchronisation of your Blender settings.')
 | 
			
		||||
# HOME_PROJECT_SUMMARY = 'This is your home project. Pastebin and Blender settings sync in one!'
 | 
			
		||||
SYNC_GROUP_NODE_NAME = u'Blender Sync'
 | 
			
		||||
SYNC_GROUP_NODE_NAME = 'Blender Sync'
 | 
			
		||||
SYNC_GROUP_NODE_DESC = ('The [Blender Cloud Addon](https://cloud.blender.org/services'
 | 
			
		||||
                        '#blender-addon) will synchronize your Blender settings here.')
 | 
			
		||||
 | 
			
		||||
@@ -113,7 +112,7 @@ def create_home_project(user_id, write_access):
 | 
			
		||||
 | 
			
		||||
    # Re-validate the authentication token, so that the put_internal call sees the
 | 
			
		||||
    # new group created for the project.
 | 
			
		||||
    authentication.validate_token()
 | 
			
		||||
    authentication.validate_token(force=True)
 | 
			
		||||
 | 
			
		||||
    # There are a few things in the on_insert_projects hook we need to adjust.
 | 
			
		||||
 | 
			
		||||
@@ -135,8 +134,8 @@ def create_home_project(user_id, write_access):
 | 
			
		||||
    # This allows people to comment on shared images and see comments.
 | 
			
		||||
    node_type_comment = assign_permissions(
 | 
			
		||||
        node_type_comment,
 | 
			
		||||
        subscriber_methods=[u'GET', u'POST'],
 | 
			
		||||
        world_methods=[u'GET'])
 | 
			
		||||
        subscriber_methods=['GET', 'POST'],
 | 
			
		||||
        world_methods=['GET'])
 | 
			
		||||
 | 
			
		||||
    project['node_types'] = [
 | 
			
		||||
        node_type_group,
 | 
			
		||||
@@ -201,8 +200,10 @@ def home_project():
 | 
			
		||||
    Eve projections are supported, but at least the following fields must be present:
 | 
			
		||||
        'permissions', 'category', 'user'
 | 
			
		||||
    """
 | 
			
		||||
    user_id = g.current_user['user_id']
 | 
			
		||||
    roles = g.current_user.get('roles', ())
 | 
			
		||||
    from pillar.auth import current_user
 | 
			
		||||
 | 
			
		||||
    user_id = current_user.user_id
 | 
			
		||||
    roles = current_user.roles
 | 
			
		||||
 | 
			
		||||
    log.debug('Possibly creating home project for user %s with roles %s', user_id, roles)
 | 
			
		||||
    if HOME_PROJECT_USERS and not HOME_PROJECT_USERS.intersection(roles):
 | 
			
		||||
@@ -215,7 +216,7 @@ def home_project():
 | 
			
		||||
        write_access = write_access_with_roles(roles)
 | 
			
		||||
        create_home_project(user_id, write_access)
 | 
			
		||||
 | 
			
		||||
    resp, _, _, status, _ = get('projects', category=u'home', user=user_id)
 | 
			
		||||
    resp, _, _, status, _ = get('projects', category='home', user=user_id)
 | 
			
		||||
    if status != 200:
 | 
			
		||||
        return utils.jsonify(resp), status
 | 
			
		||||
 | 
			
		||||
@@ -248,8 +249,8 @@ def home_project_permissions(write_access):
 | 
			
		||||
    """
 | 
			
		||||
 | 
			
		||||
    if write_access:
 | 
			
		||||
        return [u'GET', u'PUT', u'POST', u'DELETE']
 | 
			
		||||
    return [u'GET']
 | 
			
		||||
        return ['GET', 'PUT', 'POST', 'DELETE']
 | 
			
		||||
    return ['GET']
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def has_home_project(user_id):
 | 
			
		||||
@@ -280,7 +281,7 @@ def is_home_project(project_id, user_id):
 | 
			
		||||
def mark_node_updated(node_id):
 | 
			
		||||
    """Uses pymongo to set the node's _updated to "now"."""
 | 
			
		||||
 | 
			
		||||
    now = datetime.datetime.now(tz=tz_util.utc)
 | 
			
		||||
    now = utcnow()
 | 
			
		||||
    nodes_coll = current_app.data.driver.db['nodes']
 | 
			
		||||
 | 
			
		||||
    return nodes_coll.update_one({'_id': node_id},
 | 
			
		||||
 
 | 
			
		||||
							
								
								
									
										180
									
								
								pillar/api/blender_cloud/subscription.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										180
									
								
								pillar/api/blender_cloud/subscription.py
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,180 @@
 | 
			
		||||
import logging
 | 
			
		||||
import typing
 | 
			
		||||
 | 
			
		||||
import blinker
 | 
			
		||||
from flask import Blueprint, Response
 | 
			
		||||
import requests
 | 
			
		||||
from requests.adapters import HTTPAdapter
 | 
			
		||||
 | 
			
		||||
from pillar import auth, current_app
 | 
			
		||||
from pillar.api import blender_id
 | 
			
		||||
from pillar.api.utils import authorization, jsonify
 | 
			
		||||
from pillar.auth import current_user
 | 
			
		||||
 | 
			
		||||
log = logging.getLogger(__name__)
 | 
			
		||||
blueprint = Blueprint('blender_cloud.subscription', __name__)
 | 
			
		||||
 | 
			
		||||
# Mapping from roles on Blender ID to roles here in Pillar.
 | 
			
		||||
# Roles not mentioned here will not be synced from Blender ID.
 | 
			
		||||
ROLES_BID_TO_PILLAR = {
 | 
			
		||||
    'cloud_subscriber': 'subscriber',
 | 
			
		||||
    'cloud_demo': 'demo',
 | 
			
		||||
    'cloud_has_subscription': 'has_subscription',
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
user_subscription_updated = blinker.NamedSignal(
 | 
			
		||||
    'user_subscription_updated',
 | 
			
		||||
    'The sender is a UserClass instance, kwargs includes "revoke_roles" and "grant_roles".')
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@blueprint.route('/update-subscription')
 | 
			
		||||
@authorization.require_login()
 | 
			
		||||
def update_subscription() -> typing.Tuple[str, int]:
 | 
			
		||||
    """Updates the subscription status of the current user.
 | 
			
		||||
 | 
			
		||||
    Returns an empty HTTP response.
 | 
			
		||||
    """
 | 
			
		||||
 | 
			
		||||
    my_log: logging.Logger = log.getChild('update_subscription')
 | 
			
		||||
    real_current_user = auth.get_current_user()  # multiple accesses, just get unproxied.
 | 
			
		||||
 | 
			
		||||
    try:
 | 
			
		||||
        bid_user = blender_id.fetch_blenderid_user()
 | 
			
		||||
    except blender_id.LogoutUser:
 | 
			
		||||
        auth.logout_user()
 | 
			
		||||
        return '', 204
 | 
			
		||||
 | 
			
		||||
    if not bid_user:
 | 
			
		||||
        my_log.warning('Logged in user %s has no BlenderID account! '
 | 
			
		||||
                       'Unable to update subscription status.', real_current_user.user_id)
 | 
			
		||||
        return '', 204
 | 
			
		||||
 | 
			
		||||
    do_update_subscription(real_current_user, bid_user)
 | 
			
		||||
    return '', 204
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@blueprint.route('/update-subscription-for/<user_id>', methods=['POST'])
 | 
			
		||||
@authorization.require_login(require_cap='admin')
 | 
			
		||||
def update_subscription_for(user_id: str):
 | 
			
		||||
    """Updates the user based on their info at Blender ID."""
 | 
			
		||||
 | 
			
		||||
    from urllib.parse import urljoin
 | 
			
		||||
 | 
			
		||||
    from pillar.api.utils import str2id
 | 
			
		||||
 | 
			
		||||
    my_log = log.getChild('update_subscription_for')
 | 
			
		||||
 | 
			
		||||
    bid_session = requests.Session()
 | 
			
		||||
    bid_session.mount('https://', HTTPAdapter(max_retries=5))
 | 
			
		||||
    bid_session.mount('http://', HTTPAdapter(max_retries=5))
 | 
			
		||||
 | 
			
		||||
    users_coll = current_app.db('users')
 | 
			
		||||
    db_user = users_coll.find_one({'_id': str2id(user_id)})
 | 
			
		||||
    if not db_user:
 | 
			
		||||
        my_log.warning('User %s not found in database', user_id)
 | 
			
		||||
        return Response(f'User {user_id} not found in our database', status=404)
 | 
			
		||||
 | 
			
		||||
    log.info('Updating user %s from Blender ID on behalf of %s',
 | 
			
		||||
             db_user['email'], current_user.email)
 | 
			
		||||
 | 
			
		||||
    bid_user_id = blender_id.get_user_blenderid(db_user)
 | 
			
		||||
    if not bid_user_id:
 | 
			
		||||
        my_log.info('User %s has no Blender ID', user_id)
 | 
			
		||||
        return Response('User has no Blender ID', status=404)
 | 
			
		||||
 | 
			
		||||
    # Get the user info from Blender ID, and handle errors.
 | 
			
		||||
    api_url = current_app.config['BLENDER_ID_USER_INFO_API']
 | 
			
		||||
    api_token = current_app.config['BLENDER_ID_USER_INFO_TOKEN']
 | 
			
		||||
    url = urljoin(api_url, bid_user_id)
 | 
			
		||||
    resp = bid_session.get(url, headers={'Authorization': f'Bearer {api_token}'})
 | 
			
		||||
    if resp.status_code == 404:
 | 
			
		||||
        my_log.info('User %s has a Blender ID %s but Blender ID itself does not find it',
 | 
			
		||||
                    user_id, bid_user_id)
 | 
			
		||||
        return Response(f'User {bid_user_id} does not exist at Blender ID', status=404)
 | 
			
		||||
    if resp.status_code != 200:
 | 
			
		||||
        my_log.info('Error code %s getting user %s from Blender ID (resp = %s)',
 | 
			
		||||
                    resp.status_code, user_id, resp.text)
 | 
			
		||||
        return Response(f'Error code {resp.status_code} from Blender ID', status=resp.status_code)
 | 
			
		||||
 | 
			
		||||
    # Update the user in our database.
 | 
			
		||||
    local_user = auth.UserClass.construct('', db_user)
 | 
			
		||||
    bid_user = resp.json()
 | 
			
		||||
    do_update_subscription(local_user, bid_user)
 | 
			
		||||
 | 
			
		||||
    return '', 204
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def do_update_subscription(local_user: auth.UserClass, bid_user: dict):
 | 
			
		||||
    """Updates the subscription status of the user given the Blender ID user info.
 | 
			
		||||
 | 
			
		||||
    Uses the badger service to update the user's roles from Blender ID.
 | 
			
		||||
 | 
			
		||||
    bid_user should be a dict like:
 | 
			
		||||
    {'id': 1234,
 | 
			
		||||
     'full_name': 'मूंगफली मक्खन प्रेमी',
 | 
			
		||||
     'email': 'here@example.com',
 | 
			
		||||
     'roles': {'cloud_demo': True}}
 | 
			
		||||
 | 
			
		||||
    The 'roles' key can also be an interable of role names instead of a dict.
 | 
			
		||||
    """
 | 
			
		||||
 | 
			
		||||
    from pillar.api import service
 | 
			
		||||
 | 
			
		||||
    my_log: logging.Logger = log.getChild('do_update_subscription')
 | 
			
		||||
 | 
			
		||||
    try:
 | 
			
		||||
        email = bid_user['email']
 | 
			
		||||
    except KeyError:
 | 
			
		||||
        email = '-missing email-'
 | 
			
		||||
 | 
			
		||||
    # Transform the BID roles from a dict to a set.
 | 
			
		||||
    bidr = bid_user.get('roles', set())
 | 
			
		||||
    if isinstance(bidr, dict):
 | 
			
		||||
        bid_roles = {role
 | 
			
		||||
                     for role, has_role in bid_user.get('roles', {}).items()
 | 
			
		||||
                     if has_role}
 | 
			
		||||
    else:
 | 
			
		||||
        bid_roles = set(bidr)
 | 
			
		||||
 | 
			
		||||
    # Handle the role changes via the badger service functionality.
 | 
			
		||||
    plr_roles = set(local_user.roles)
 | 
			
		||||
 | 
			
		||||
    grant_roles = set()
 | 
			
		||||
    revoke_roles = set()
 | 
			
		||||
    for bid_role, plr_role in ROLES_BID_TO_PILLAR.items():
 | 
			
		||||
        if bid_role in bid_roles and plr_role not in plr_roles:
 | 
			
		||||
            grant_roles.add(plr_role)
 | 
			
		||||
            continue
 | 
			
		||||
        if bid_role not in bid_roles and plr_role in plr_roles:
 | 
			
		||||
            revoke_roles.add(plr_role)
 | 
			
		||||
 | 
			
		||||
    user_id = local_user.user_id
 | 
			
		||||
 | 
			
		||||
    if grant_roles:
 | 
			
		||||
        if my_log.isEnabledFor(logging.INFO):
 | 
			
		||||
            my_log.info('granting roles to user %s (Blender ID %s): %s',
 | 
			
		||||
                        user_id, email, ', '.join(sorted(grant_roles)))
 | 
			
		||||
        service.do_badger('grant', roles=grant_roles, user_id=user_id)
 | 
			
		||||
 | 
			
		||||
    if revoke_roles:
 | 
			
		||||
        if my_log.isEnabledFor(logging.INFO):
 | 
			
		||||
            my_log.info('revoking roles to user %s (Blender ID %s): %s',
 | 
			
		||||
                        user_id, email, ', '.join(sorted(revoke_roles)))
 | 
			
		||||
        service.do_badger('revoke', roles=revoke_roles, user_id=user_id)
 | 
			
		||||
 | 
			
		||||
    # Let the world know this user's subscription was updated.
 | 
			
		||||
    final_roles = (plr_roles - revoke_roles).union(grant_roles)
 | 
			
		||||
    local_user.roles = list(final_roles)
 | 
			
		||||
    local_user.collect_capabilities()
 | 
			
		||||
    user_subscription_updated.send(local_user,
 | 
			
		||||
                                   grant_roles=grant_roles,
 | 
			
		||||
                                   revoke_roles=revoke_roles)
 | 
			
		||||
 | 
			
		||||
    # Re-index the user in the search database.
 | 
			
		||||
    from pillar.api.users import hooks
 | 
			
		||||
    hooks.push_updated_user_to_search({'_id': user_id}, {})
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def setup_app(app, url_prefix):
 | 
			
		||||
    log.info('Registering blueprint at %s', url_prefix)
 | 
			
		||||
    app.register_api_blueprint(blueprint, url_prefix=url_prefix)
 | 
			
		||||
@@ -3,12 +3,14 @@ import logging
 | 
			
		||||
 | 
			
		||||
from eve.methods.get import get
 | 
			
		||||
from eve.utils import config as eve_config
 | 
			
		||||
from flask import Blueprint, request, current_app, g
 | 
			
		||||
from flask import Blueprint, request, current_app
 | 
			
		||||
from werkzeug.datastructures import MultiDict
 | 
			
		||||
from werkzeug.exceptions import InternalServerError
 | 
			
		||||
 | 
			
		||||
from pillar.api import utils
 | 
			
		||||
from pillar.api.utils.authentication import current_user_id
 | 
			
		||||
from pillar.api.utils.authorization import require_login
 | 
			
		||||
from werkzeug.datastructures import MultiDict
 | 
			
		||||
from werkzeug.exceptions import InternalServerError
 | 
			
		||||
from pillar.auth import current_user
 | 
			
		||||
 | 
			
		||||
FIRST_ADDON_VERSION_WITH_HDRI = (1, 4, 0)
 | 
			
		||||
TL_PROJECTION = utils.dumps({'name': 1, 'url': 1, 'permissions': 1,})
 | 
			
		||||
@@ -25,8 +27,8 @@ log = logging.getLogger(__name__)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def keep_fetching_texture_libraries(proj_filter):
 | 
			
		||||
    groups = g.current_user['groups']
 | 
			
		||||
    user_id = g.current_user['user_id']
 | 
			
		||||
    groups = current_user.group_ids
 | 
			
		||||
    user_id = current_user.user_id
 | 
			
		||||
 | 
			
		||||
    page = 1
 | 
			
		||||
    max_page = float('inf')
 | 
			
		||||
@@ -74,7 +76,7 @@ def texture_libraries():
 | 
			
		||||
    # of the Blender Cloud Addon. If the addon version is None, we're dealing
 | 
			
		||||
    # with a version of the BCA that's so old it doesn't send its version along.
 | 
			
		||||
    addon_version = blender_cloud_addon_version()
 | 
			
		||||
    return_hdri = addon_version >= FIRST_ADDON_VERSION_WITH_HDRI
 | 
			
		||||
    return_hdri = addon_version is not None and addon_version >= FIRST_ADDON_VERSION_WITH_HDRI
 | 
			
		||||
    log.debug('User %s has Blender Cloud Addon version %s; return_hdri=%s',
 | 
			
		||||
              current_user_id(), addon_version, return_hdri)
 | 
			
		||||
 | 
			
		||||
 
 | 
			
		||||
@@ -4,20 +4,31 @@ Also contains functionality for other parts of Pillar to perform communication
 | 
			
		||||
with Blender ID.
 | 
			
		||||
"""
 | 
			
		||||
 | 
			
		||||
import logging
 | 
			
		||||
 | 
			
		||||
import datetime
 | 
			
		||||
import logging
 | 
			
		||||
from urllib.parse import urljoin
 | 
			
		||||
 | 
			
		||||
import requests
 | 
			
		||||
from bson import tz_util
 | 
			
		||||
from flask import Blueprint, request, current_app, jsonify
 | 
			
		||||
from pillar.api.utils import authentication, remove_private_keys
 | 
			
		||||
from rauth import OAuth2Session
 | 
			
		||||
from flask import Blueprint, request, jsonify, session
 | 
			
		||||
from requests.adapters import HTTPAdapter
 | 
			
		||||
from werkzeug import exceptions as wz_exceptions
 | 
			
		||||
 | 
			
		||||
from pillar import current_app
 | 
			
		||||
from pillar.api.utils import authentication, utcnow
 | 
			
		||||
from pillar.api.utils.authentication import find_user_in_db, upsert_user
 | 
			
		||||
 | 
			
		||||
blender_id = Blueprint('blender_id', __name__)
 | 
			
		||||
log = logging.getLogger(__name__)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class LogoutUser(Exception):
 | 
			
		||||
    """Raised when Blender ID tells us the current user token is invalid.
 | 
			
		||||
 | 
			
		||||
    This indicates the user should be immediately logged out.
 | 
			
		||||
    """
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@blender_id.route('/store_scst', methods=['POST'])
 | 
			
		||||
def store_subclient_token():
 | 
			
		||||
    """Verifies & stores a user's subclient-specific token."""
 | 
			
		||||
@@ -37,13 +48,6 @@ def store_subclient_token():
 | 
			
		||||
                    'subclient_user_id': str(db_user['_id'])}), status
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def blender_id_endpoint():
 | 
			
		||||
    """Gets the endpoint for the authentication API. If the env variable
 | 
			
		||||
    is defined, it's possible to override the (default) production address.
 | 
			
		||||
    """
 | 
			
		||||
    return current_app.config['BLENDER_ID_ENDPOINT'].rstrip('/')
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def validate_create_user(blender_id_user_id, token, oauth_subclient_id):
 | 
			
		||||
    """Validates a user against Blender ID, creating the user in our database.
 | 
			
		||||
 | 
			
		||||
@@ -64,78 +68,23 @@ def validate_create_user(blender_id_user_id, token, oauth_subclient_id):
 | 
			
		||||
    # Blender ID can be queried without user ID, and will always include the
 | 
			
		||||
    # correct user ID in its response.
 | 
			
		||||
    log.debug('Obtained user info from Blender ID: %s', user_info)
 | 
			
		||||
    blender_id_user_id = user_info['id']
 | 
			
		||||
 | 
			
		||||
    # Store the user info in MongoDB.
 | 
			
		||||
    db_user = find_user_in_db(blender_id_user_id, user_info)
 | 
			
		||||
    db_id, status = upsert_user(db_user, blender_id_user_id)
 | 
			
		||||
    db_user = find_user_in_db(user_info)
 | 
			
		||||
    db_id, status = upsert_user(db_user)
 | 
			
		||||
 | 
			
		||||
    # Store the token in MongoDB.
 | 
			
		||||
    authentication.store_token(db_id, token, token_expiry, oauth_subclient_id)
 | 
			
		||||
    ip_based_roles = current_app.org_manager.roles_for_request()
 | 
			
		||||
    authentication.store_token(db_id, token, token_expiry, oauth_subclient_id,
 | 
			
		||||
                               org_roles=ip_based_roles)
 | 
			
		||||
 | 
			
		||||
    if current_app.org_manager is not None:
 | 
			
		||||
        roles = current_app.org_manager.refresh_roles(db_id)
 | 
			
		||||
        db_user['roles'] = list(roles)
 | 
			
		||||
 | 
			
		||||
    return db_user, status
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def upsert_user(db_user, blender_id_user_id):
 | 
			
		||||
    """Inserts/updates the user in MongoDB.
 | 
			
		||||
 | 
			
		||||
    Retries a few times when there are uniqueness issues in the username.
 | 
			
		||||
 | 
			
		||||
    :returns: the user's database ID and the status of the PUT/POST.
 | 
			
		||||
        The status is 201 on insert, and 200 on update.
 | 
			
		||||
    :type: (ObjectId, int)
 | 
			
		||||
    """
 | 
			
		||||
 | 
			
		||||
    if u'subscriber' in db_user.get('groups', []):
 | 
			
		||||
        log.error('Non-ObjectID string found in user.groups: %s', db_user)
 | 
			
		||||
        raise wz_exceptions.InternalServerError('Non-ObjectID string found in user.groups: %s' % db_user)
 | 
			
		||||
 | 
			
		||||
    r = {}
 | 
			
		||||
    for retry in range(5):
 | 
			
		||||
        if '_id' in db_user:
 | 
			
		||||
            # Update the existing user
 | 
			
		||||
            attempted_eve_method = 'PUT'
 | 
			
		||||
            db_id = db_user['_id']
 | 
			
		||||
            r, _, _, status = current_app.put_internal('users', remove_private_keys(db_user),
 | 
			
		||||
                                                       _id=db_id)
 | 
			
		||||
            if status == 422:
 | 
			
		||||
                log.error('Status %i trying to PUT user %s with values %s, should not happen! %s',
 | 
			
		||||
                          status, db_id, remove_private_keys(db_user), r)
 | 
			
		||||
        else:
 | 
			
		||||
            # Create a new user, retry for non-unique usernames.
 | 
			
		||||
            attempted_eve_method = 'POST'
 | 
			
		||||
            r, _, _, status = current_app.post_internal('users', db_user)
 | 
			
		||||
 | 
			
		||||
            if status not in {200, 201}:
 | 
			
		||||
                log.error('Status %i trying to create user for BlenderID %s with values %s: %s',
 | 
			
		||||
                          status, blender_id_user_id, db_user, r)
 | 
			
		||||
                raise wz_exceptions.InternalServerError()
 | 
			
		||||
 | 
			
		||||
            db_id = r['_id']
 | 
			
		||||
            db_user.update(r)  # update with database/eve-generated fields.
 | 
			
		||||
 | 
			
		||||
        if status == 422:
 | 
			
		||||
            # Probably non-unique username, so retry a few times with different usernames.
 | 
			
		||||
            log.info('Error creating new user: %s', r)
 | 
			
		||||
            username_issue = r.get('_issues', {}).get(u'username', '')
 | 
			
		||||
            if u'not unique' in username_issue:
 | 
			
		||||
                # Retry
 | 
			
		||||
                db_user['username'] = authentication.make_unique_username(db_user['email'])
 | 
			
		||||
                continue
 | 
			
		||||
 | 
			
		||||
        # Saving was successful, or at least didn't break on a non-unique username.
 | 
			
		||||
        break
 | 
			
		||||
    else:
 | 
			
		||||
        log.error('Unable to create new user %s: %s', db_user, r)
 | 
			
		||||
        raise wz_exceptions.InternalServerError()
 | 
			
		||||
 | 
			
		||||
    if status not in (200, 201):
 | 
			
		||||
        log.error('internal response from %s to Eve: %r %r', attempted_eve_method, status, r)
 | 
			
		||||
        raise wz_exceptions.InternalServerError()
 | 
			
		||||
 | 
			
		||||
    return db_id, status
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def validate_token(user_id, token, oauth_subclient_id):
 | 
			
		||||
    """Verifies a subclient token with Blender ID.
 | 
			
		||||
 | 
			
		||||
@@ -159,23 +108,39 @@ def validate_token(user_id, token, oauth_subclient_id):
 | 
			
		||||
    payload = {'user_id': user_id,
 | 
			
		||||
               'token': token}
 | 
			
		||||
    if oauth_subclient_id:
 | 
			
		||||
        # If the subclient ID is set, the token belongs to another OAuth Client,
 | 
			
		||||
        # in which case we do not set the client_id field.
 | 
			
		||||
        payload['subclient_id'] = oauth_subclient_id
 | 
			
		||||
    else:
 | 
			
		||||
        # We only want to accept Blender Cloud tokens.
 | 
			
		||||
        payload['client_id'] = current_app.config['OAUTH_CREDENTIALS']['blender-id']['id']
 | 
			
		||||
 | 
			
		||||
    url = '{0}/u/validate_token'.format(blender_id_endpoint())
 | 
			
		||||
    blender_id_endpoint = current_app.config['BLENDER_ID_ENDPOINT']
 | 
			
		||||
    url = urljoin(blender_id_endpoint, 'u/validate_token')
 | 
			
		||||
    log.debug('POSTing to %r', url)
 | 
			
		||||
 | 
			
		||||
    # Retry a few times when POSTing to BlenderID fails.
 | 
			
		||||
    # Source: http://stackoverflow.com/a/15431343/875379
 | 
			
		||||
    s = requests.Session()
 | 
			
		||||
    s.mount(blender_id_endpoint(), HTTPAdapter(max_retries=5))
 | 
			
		||||
    s.mount(blender_id_endpoint, HTTPAdapter(max_retries=5))
 | 
			
		||||
 | 
			
		||||
    # POST to Blender ID, handling errors as negative verification results.
 | 
			
		||||
    try:
 | 
			
		||||
        r = s.post(url, data=payload, timeout=5,
 | 
			
		||||
                   verify=current_app.config['TLS_CERT_FILE'])
 | 
			
		||||
    except requests.exceptions.ConnectionError as e:
 | 
			
		||||
    except requests.exceptions.ConnectionError:
 | 
			
		||||
        log.error('Connection error trying to POST to %s, handling as invalid token.', url)
 | 
			
		||||
        return None, None
 | 
			
		||||
    except requests.exceptions.ReadTimeout:
 | 
			
		||||
        log.error('Read timeout trying to POST to %s, handling as invalid token.', url)
 | 
			
		||||
        return None, None
 | 
			
		||||
    except requests.exceptions.RequestException as ex:
 | 
			
		||||
        log.error('Requests error "%s" trying to POST to %s, handling as invalid token.', ex, url)
 | 
			
		||||
        return None, None
 | 
			
		||||
    except IOError as ex:
 | 
			
		||||
        log.error('Unknown I/O error "%s" trying to POST to %s, handling as invalid token.',
 | 
			
		||||
                  ex, url)
 | 
			
		||||
        return None, None
 | 
			
		||||
 | 
			
		||||
    if r.status_code != 200:
 | 
			
		||||
        log.debug('Token %s invalid, HTTP status %i returned', token, r.status_code)
 | 
			
		||||
@@ -199,43 +164,108 @@ def _compute_token_expiry(token_expires_string):
 | 
			
		||||
    the token.
 | 
			
		||||
    """
 | 
			
		||||
 | 
			
		||||
    date_format = current_app.config['RFC1123_DATE_FORMAT']
 | 
			
		||||
    blid_expiry = datetime.datetime.strptime(token_expires_string, date_format)
 | 
			
		||||
    blid_expiry = blid_expiry.replace(tzinfo=tz_util.utc)
 | 
			
		||||
    our_expiry = datetime.datetime.now(tz=tz_util.utc) + datetime.timedelta(hours=1)
 | 
			
		||||
    # requirement is called python-dateutil, so PyCharm doesn't find it.
 | 
			
		||||
    # noinspection PyPackageRequirements
 | 
			
		||||
    from dateutil import parser
 | 
			
		||||
 | 
			
		||||
    blid_expiry = parser.parse(token_expires_string)
 | 
			
		||||
    blid_expiry = blid_expiry.astimezone(tz_util.utc)
 | 
			
		||||
    our_expiry = utcnow() + datetime.timedelta(hours=1)
 | 
			
		||||
 | 
			
		||||
    return min(blid_expiry, our_expiry)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def find_user_in_db(blender_id_user_id, user_info):
 | 
			
		||||
    """Find the user in our database, creating/updating the returned document where needed.
 | 
			
		||||
def get_user_blenderid(db_user: dict) -> str:
 | 
			
		||||
    """Returns the Blender ID user ID for this Pillar user.
 | 
			
		||||
 | 
			
		||||
    Does NOT update the user in the database.
 | 
			
		||||
    Takes the string from 'auth.*.user_id' for the '*' where 'provider'
 | 
			
		||||
    is 'blender-id'.
 | 
			
		||||
 | 
			
		||||
    :returns the user ID, or the empty string when the user has none.
 | 
			
		||||
    """
 | 
			
		||||
 | 
			
		||||
    users = current_app.data.driver.db['users']
 | 
			
		||||
    bid_user_ids = [auth['user_id']
 | 
			
		||||
                    for auth in db_user['auth']
 | 
			
		||||
                    if auth['provider'] == 'blender-id']
 | 
			
		||||
    try:
 | 
			
		||||
        return bid_user_ids[0]
 | 
			
		||||
    except IndexError:
 | 
			
		||||
        return ''
 | 
			
		||||
 | 
			
		||||
    query = {'auth': {'$elemMatch': {'user_id': str(blender_id_user_id),
 | 
			
		||||
                                     'provider': 'blender-id'}}}
 | 
			
		||||
    log.debug('Querying: %s', query)
 | 
			
		||||
    db_user = users.find_one(query)
 | 
			
		||||
 | 
			
		||||
    if db_user:
 | 
			
		||||
        log.debug('User blender_id_user_id=%r already in our database, '
 | 
			
		||||
                  'updating with info from Blender ID.', blender_id_user_id)
 | 
			
		||||
        db_user['email'] = user_info['email']
 | 
			
		||||
    else:
 | 
			
		||||
        log.debug('User %r not yet in our database, create a new one.', blender_id_user_id)
 | 
			
		||||
        db_user = authentication.create_new_user_document(
 | 
			
		||||
            email=user_info['email'],
 | 
			
		||||
            user_id=blender_id_user_id,
 | 
			
		||||
            username=user_info['full_name'])
 | 
			
		||||
        db_user['username'] = authentication.make_unique_username(user_info['email'])
 | 
			
		||||
        if not db_user['full_name']:
 | 
			
		||||
            db_user['full_name'] = db_user['username']
 | 
			
		||||
def fetch_blenderid_user() -> dict:
 | 
			
		||||
    """Returns the user info of the currently logged in user from BlenderID.
 | 
			
		||||
 | 
			
		||||
    return db_user
 | 
			
		||||
    Returns an empty dict if communication fails.
 | 
			
		||||
 | 
			
		||||
    Example dict:
 | 
			
		||||
    {
 | 
			
		||||
         "email": "some@email.example.com",
 | 
			
		||||
         "full_name": "dr. Sybren A. St\u00fcvel",
 | 
			
		||||
         "id": 5555,
 | 
			
		||||
         "roles": {
 | 
			
		||||
           "admin": true,
 | 
			
		||||
           "bfct_trainer": false,
 | 
			
		||||
           "cloud_has_subscription": true,
 | 
			
		||||
           "cloud_subscriber": true,
 | 
			
		||||
           "conference_speaker": true,
 | 
			
		||||
           "network_member": true
 | 
			
		||||
         }
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    :raises LogoutUser: when Blender ID tells us the current token is
 | 
			
		||||
        invalid, and the user should be logged out.
 | 
			
		||||
    """
 | 
			
		||||
    import httplib2  # used by the oauth2 package
 | 
			
		||||
 | 
			
		||||
    my_log = log.getChild('fetch_blenderid_user')
 | 
			
		||||
 | 
			
		||||
    bid_url = urljoin(current_app.config['BLENDER_ID_ENDPOINT'], 'api/user')
 | 
			
		||||
    my_log.debug('Fetching user info from %s', bid_url)
 | 
			
		||||
 | 
			
		||||
    credentials = current_app.config['OAUTH_CREDENTIALS']['blender-id']
 | 
			
		||||
    oauth_token = session.get('blender_id_oauth_token')
 | 
			
		||||
    if not oauth_token:
 | 
			
		||||
        my_log.warning('no Blender ID oauth token found in user session')
 | 
			
		||||
        return {}
 | 
			
		||||
 | 
			
		||||
    assert isinstance(oauth_token, str), f'oauth token must be str, not {type(oauth_token)}'
 | 
			
		||||
 | 
			
		||||
    oauth_session = OAuth2Session(
 | 
			
		||||
        credentials['id'], credentials['secret'],
 | 
			
		||||
        access_token=oauth_token)
 | 
			
		||||
 | 
			
		||||
    try:
 | 
			
		||||
        bid_resp = oauth_session.get(bid_url)
 | 
			
		||||
    except httplib2.HttpLib2Error:
 | 
			
		||||
        my_log.exception('Error getting %s from BlenderID', bid_url)
 | 
			
		||||
        return {}
 | 
			
		||||
 | 
			
		||||
    if bid_resp.status_code == 403:
 | 
			
		||||
        my_log.warning('Error %i from BlenderID %s, logging out user', bid_resp.status_code, bid_url)
 | 
			
		||||
        raise LogoutUser()
 | 
			
		||||
 | 
			
		||||
    if bid_resp.status_code != 200:
 | 
			
		||||
        my_log.warning('Error %i from BlenderID %s: %s', bid_resp.status_code, bid_url, bid_resp.text)
 | 
			
		||||
        return {}
 | 
			
		||||
 | 
			
		||||
    payload = bid_resp.json()
 | 
			
		||||
    if not payload:
 | 
			
		||||
        my_log.warning('Empty data returned from BlenderID %s', bid_url)
 | 
			
		||||
        return {}
 | 
			
		||||
 | 
			
		||||
    my_log.debug('BlenderID returned %s', payload)
 | 
			
		||||
    return payload
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def setup_app(app, url_prefix):
 | 
			
		||||
    app.register_api_blueprint(blender_id, url_prefix=url_prefix)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def switch_user_url(next_url: str) -> str:
 | 
			
		||||
    from urllib.parse import quote
 | 
			
		||||
 | 
			
		||||
    base_url = urljoin(current_app.config['BLENDER_ID_ENDPOINT'], 'switch')
 | 
			
		||||
    if next_url:
 | 
			
		||||
        return '%s?next=%s' % (base_url, quote(next_url))
 | 
			
		||||
    return base_url
 | 
			
		||||
 
 | 
			
		||||
@@ -1,14 +1,42 @@
 | 
			
		||||
import copy
 | 
			
		||||
from datetime import datetime
 | 
			
		||||
import logging
 | 
			
		||||
 | 
			
		||||
from bson import ObjectId, tz_util
 | 
			
		||||
from datetime import datetime, tzinfo
 | 
			
		||||
from eve.io.mongo import Validator
 | 
			
		||||
from flask import current_app
 | 
			
		||||
 | 
			
		||||
import pillar.markdown
 | 
			
		||||
 | 
			
		||||
log = logging.getLogger(__name__)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class ValidateCustomFields(Validator):
 | 
			
		||||
    def __init__(self, *args, **kwargs):
 | 
			
		||||
        super().__init__(*args, **kwargs)
 | 
			
		||||
 | 
			
		||||
        # Will be reference to the actual document being validated, so that we can
 | 
			
		||||
        # modify it during validation.
 | 
			
		||||
        self.__real_document = None
 | 
			
		||||
 | 
			
		||||
    def validate(self, document, *args, **kwargs):
 | 
			
		||||
        # Keep a reference to the actual document, because Cerberus validates copies.
 | 
			
		||||
        self.__real_document = document
 | 
			
		||||
        result = super().validate(document, *args, **kwargs)
 | 
			
		||||
 | 
			
		||||
        # Store the in-place modified document as self.document, so that Eve's post_internal
 | 
			
		||||
        # can actually pick it up as the validated document. We need to make a copy so that
 | 
			
		||||
        # further modifications (like setting '_etag' etc.) aren't done in-place.
 | 
			
		||||
        self.document = copy.deepcopy(document)
 | 
			
		||||
 | 
			
		||||
        return result
 | 
			
		||||
 | 
			
		||||
    def _get_child_validator(self, *args, **kwargs):
 | 
			
		||||
        child = super()._get_child_validator(*args, **kwargs)
 | 
			
		||||
        # Pass along our reference to the actual document.
 | 
			
		||||
        child.__real_document = self.__real_document
 | 
			
		||||
        return child
 | 
			
		||||
 | 
			
		||||
    # TODO: split this into a convert_property(property, schema) and call that from this function.
 | 
			
		||||
    def convert_properties(self, properties, node_schema):
 | 
			
		||||
        """Converts datetime strings and ObjectId strings to actual Python objects."""
 | 
			
		||||
@@ -61,15 +89,20 @@ class ValidateCustomFields(Validator):
 | 
			
		||||
        Only validates the dict values, not the keys. Modifies the given dict in-place.
 | 
			
		||||
        """
 | 
			
		||||
 | 
			
		||||
        assert dict_valueschema[u'type'] == u'dict'
 | 
			
		||||
        assert dict_valueschema['type'] == 'dict'
 | 
			
		||||
        assert isinstance(dict_property, dict)
 | 
			
		||||
 | 
			
		||||
        for key, val in dict_property.items():
 | 
			
		||||
            item_schema = {u'item': dict_valueschema}
 | 
			
		||||
            item_prop = {u'item': val}
 | 
			
		||||
            dict_property[key] = self.convert_properties(item_prop, item_schema)[u'item']
 | 
			
		||||
            item_schema = {'item': dict_valueschema}
 | 
			
		||||
            item_prop = {'item': val}
 | 
			
		||||
            dict_property[key] = self.convert_properties(item_prop, item_schema)['item']
 | 
			
		||||
 | 
			
		||||
    def _validate_valid_properties(self, valid_properties, field, value):
 | 
			
		||||
        """Fake property that triggers node dynamic property validation.
 | 
			
		||||
 | 
			
		||||
        The rule's arguments are validated against this schema:
 | 
			
		||||
        {'type': 'boolean'}
 | 
			
		||||
        """
 | 
			
		||||
        from pillar.api.utils import project_get_node_type
 | 
			
		||||
 | 
			
		||||
        projects_collection = current_app.data.driver.db['projects']
 | 
			
		||||
@@ -102,6 +135,10 @@ class ValidateCustomFields(Validator):
 | 
			
		||||
        val = v.validate(value)
 | 
			
		||||
 | 
			
		||||
        if val:
 | 
			
		||||
            # This ensures the modifications made by v's coercion rules are
 | 
			
		||||
            # visible to this validator's output.
 | 
			
		||||
            # TODO(fsiddi): this no longer works due to Cerberus internal changes.
 | 
			
		||||
            # self.current[field] = v.current
 | 
			
		||||
            return True
 | 
			
		||||
 | 
			
		||||
        log.warning('Error validating properties for node %s: %s', self.document, v.errors)
 | 
			
		||||
@@ -112,6 +149,9 @@ class ValidateCustomFields(Validator):
 | 
			
		||||
 | 
			
		||||
        Combine "required_after_creation=True" with "required=False" to allow
 | 
			
		||||
        pre-insert hooks to set default values.
 | 
			
		||||
 | 
			
		||||
        The rule's arguments are validated against this schema:
 | 
			
		||||
        {'type': 'boolean'}
 | 
			
		||||
        """
 | 
			
		||||
 | 
			
		||||
        if not required_after_creation:
 | 
			
		||||
@@ -119,9 +159,81 @@ class ValidateCustomFields(Validator):
 | 
			
		||||
            # validator at all.
 | 
			
		||||
            return
 | 
			
		||||
 | 
			
		||||
        if self._id is None:
 | 
			
		||||
        if self.document_id is None:
 | 
			
		||||
            # This is a creation call, in which case this validator shouldn't run.
 | 
			
		||||
            return
 | 
			
		||||
 | 
			
		||||
        if not value:
 | 
			
		||||
            self._error(field, "Value is required once the document was created")
 | 
			
		||||
 | 
			
		||||
    def _validator_iprange(self, field_name: str, value: str):
 | 
			
		||||
        """Ensure the field contains a valid IP address.
 | 
			
		||||
 | 
			
		||||
        Supports both IPv6 and IPv4 ranges. Requires the IPy module.
 | 
			
		||||
        """
 | 
			
		||||
 | 
			
		||||
        from IPy import IP
 | 
			
		||||
 | 
			
		||||
        try:
 | 
			
		||||
            ip = IP(value, make_net=True)
 | 
			
		||||
        except ValueError as ex:
 | 
			
		||||
            self._error(field_name, str(ex))
 | 
			
		||||
            return
 | 
			
		||||
 | 
			
		||||
        if ip.prefixlen() == 0:
 | 
			
		||||
            self._error(field_name, 'Zero-length prefix is not allowed')
 | 
			
		||||
 | 
			
		||||
    def _validator_markdown(self, field, value):
 | 
			
		||||
        """Convert MarkDown.
 | 
			
		||||
        """
 | 
			
		||||
        my_log = log.getChild('_validator_markdown')
 | 
			
		||||
 | 
			
		||||
        # Find this field inside the original document
 | 
			
		||||
        my_subdoc = self._subdoc_in_real_document()
 | 
			
		||||
        if my_subdoc is None:
 | 
			
		||||
            # If self.update==True we are validating an update document, which
 | 
			
		||||
            # may not contain all fields, so then a missing field is fine.
 | 
			
		||||
            if not self.update:
 | 
			
		||||
                self._error(field, f'validator_markdown: unable to find sub-document '
 | 
			
		||||
                                   f'for path {self.document_path}')
 | 
			
		||||
            return
 | 
			
		||||
 | 
			
		||||
        my_log.debug('validating field %r with value %r', field, value)
 | 
			
		||||
        save_to = pillar.markdown.cache_field_name(field)
 | 
			
		||||
        html = pillar.markdown.markdown(value)
 | 
			
		||||
        my_log.debug('saving result to %r in doc with id %s', save_to, id(my_subdoc))
 | 
			
		||||
        my_subdoc[save_to] = html
 | 
			
		||||
 | 
			
		||||
    def _subdoc_in_real_document(self):
 | 
			
		||||
        """Return a reference to the current sub-document inside the real document.
 | 
			
		||||
 | 
			
		||||
        This allows modification of the document being validated.
 | 
			
		||||
        """
 | 
			
		||||
        my_subdoc = getattr(self, 'persisted_document') or self.__real_document
 | 
			
		||||
        for item in self.document_path:
 | 
			
		||||
            my_subdoc = my_subdoc[item]
 | 
			
		||||
        return my_subdoc
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
if __name__ == '__main__':
 | 
			
		||||
    from pprint import pprint
 | 
			
		||||
 | 
			
		||||
    v = ValidateCustomFields()
 | 
			
		||||
    v.schema = {
 | 
			
		||||
        'foo': {'type': 'string', 'validator': 'markdown'},
 | 
			
		||||
        'foo_html': {'type': 'string'},
 | 
			
		||||
        'nested': {
 | 
			
		||||
            'type': 'dict',
 | 
			
		||||
            'schema': {
 | 
			
		||||
                'bar': {'type': 'string', 'validator': 'markdown'},
 | 
			
		||||
                'bar_html': {'type': 'string'},
 | 
			
		||||
            }
 | 
			
		||||
        }
 | 
			
		||||
    }
 | 
			
		||||
    print('Valid   :', v.validate({
 | 
			
		||||
        'foo': '# Title\n\nHeyyyy',
 | 
			
		||||
        'nested': {'bar': 'bhahaha'},
 | 
			
		||||
    }))
 | 
			
		||||
    print('Document:')
 | 
			
		||||
    pprint(v.document)
 | 
			
		||||
    print('Errors  :', v.errors)
 | 
			
		||||
 
 | 
			
		||||
@@ -1,15 +1,16 @@
 | 
			
		||||
import logging
 | 
			
		||||
 | 
			
		||||
import datetime
 | 
			
		||||
import json
 | 
			
		||||
import logging
 | 
			
		||||
import os
 | 
			
		||||
from bson import ObjectId, tz_util
 | 
			
		||||
 | 
			
		||||
from bson import ObjectId
 | 
			
		||||
from flask import Blueprint
 | 
			
		||||
from flask import abort
 | 
			
		||||
from flask import current_app
 | 
			
		||||
from flask import request
 | 
			
		||||
 | 
			
		||||
from pillar.api import utils
 | 
			
		||||
from pillar.api.utils.gcs import GoogleCloudStorageBucket
 | 
			
		||||
from pillar.api.utils import skip_when_testing
 | 
			
		||||
from pillar.api.file_storage_backends import Bucket
 | 
			
		||||
 | 
			
		||||
encoding = Blueprint('encoding', __name__)
 | 
			
		||||
log = logging.getLogger(__name__)
 | 
			
		||||
@@ -32,6 +33,7 @@ def size_descriptor(width, height):
 | 
			
		||||
        1280: '720p',
 | 
			
		||||
        1920: '1080p',
 | 
			
		||||
        2048: '2k',
 | 
			
		||||
        3840: 'UHD',
 | 
			
		||||
        4096: '4k',
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
@@ -42,13 +44,6 @@ def size_descriptor(width, height):
 | 
			
		||||
    return '%ip' % height
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@skip_when_testing
 | 
			
		||||
def rename_on_gcs(bucket_name, from_path, to_path):
 | 
			
		||||
    gcs = GoogleCloudStorageBucket(str(bucket_name))
 | 
			
		||||
    blob = gcs.bucket.blob(from_path)
 | 
			
		||||
    gcs.bucket.rename_blob(blob, to_path)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@encoding.route('/zencoder/notifications', methods=['POST'])
 | 
			
		||||
def zencoder_notifications():
 | 
			
		||||
    """
 | 
			
		||||
@@ -102,25 +97,24 @@ def zencoder_notifications():
 | 
			
		||||
    file_doc['processing']['status'] = job_state
 | 
			
		||||
 | 
			
		||||
    if job_state == 'failed':
 | 
			
		||||
        log.warning('Zencoder job %i for file %s failed.', zencoder_job_id, file_id)
 | 
			
		||||
        # Log what Zencoder told us went wrong.
 | 
			
		||||
        for output in data['outputs']:
 | 
			
		||||
            if not any('error' in key for key in output):
 | 
			
		||||
                continue
 | 
			
		||||
            log.warning('Errors for output %s:', output['url'])
 | 
			
		||||
            for key in output:
 | 
			
		||||
                if 'error' in key:
 | 
			
		||||
                    log.info('    %s: %s', key, output[key])
 | 
			
		||||
        log.warning('Zencoder job %s for file %s failed: %s', zencoder_job_id, file_id,
 | 
			
		||||
                    json.dumps(data, sort_keys=True, indent=4))
 | 
			
		||||
 | 
			
		||||
        file_doc['status'] = 'failed'
 | 
			
		||||
        current_app.put_internal('files', file_doc, _id=file_id)
 | 
			
		||||
 | 
			
		||||
        # This is 'okay' because we handled the Zencoder notification properly.
 | 
			
		||||
        return "You failed, but that's okay.", 200
 | 
			
		||||
 | 
			
		||||
    log.info('Zencoder job %s for file %s completed with status %s.', zencoder_job_id, file_id,
 | 
			
		||||
             job_state)
 | 
			
		||||
 | 
			
		||||
    # For every variation encoded, try to update the file object
 | 
			
		||||
    root, _ = os.path.splitext(file_doc['file_path'])
 | 
			
		||||
    storage_name, _ = os.path.splitext(file_doc['file_path'])
 | 
			
		||||
    nice_name, _ = os.path.splitext(file_doc['filename'])
 | 
			
		||||
 | 
			
		||||
    bucket_class = Bucket.for_backend(file_doc['backend'])
 | 
			
		||||
    bucket = bucket_class(str(file_doc['project']))
 | 
			
		||||
 | 
			
		||||
    for output in data['outputs']:
 | 
			
		||||
        video_format = output['format']
 | 
			
		||||
@@ -141,16 +135,16 @@ def zencoder_notifications():
 | 
			
		||||
 | 
			
		||||
        # Rename the file to include the now-known size descriptor.
 | 
			
		||||
        size = size_descriptor(output['width'], output['height'])
 | 
			
		||||
        new_fname = '{}-{}.{}'.format(root, size, video_format)
 | 
			
		||||
        new_fname = f'{storage_name}-{size}.{video_format}'
 | 
			
		||||
 | 
			
		||||
        # Rename on Google Cloud Storage
 | 
			
		||||
        # Rename the file on the storage.
 | 
			
		||||
        blob = bucket.blob(variation['file_path'])
 | 
			
		||||
        try:
 | 
			
		||||
            rename_on_gcs(file_doc['project'],
 | 
			
		||||
                          '_/' + variation['file_path'],
 | 
			
		||||
                          '_/' + new_fname)
 | 
			
		||||
            new_blob = bucket.rename_blob(blob, new_fname)
 | 
			
		||||
            new_blob.update_filename(f'{nice_name}-{size}.{video_format}')
 | 
			
		||||
        except Exception:
 | 
			
		||||
            log.warning('Unable to rename GCS blob %r to %r. Keeping old name.',
 | 
			
		||||
                        variation['file_path'], new_fname, exc_info=True)
 | 
			
		||||
            log.warning('Unable to rename blob %r to %r. Keeping old name.',
 | 
			
		||||
                        blob, new_fname, exc_info=True)
 | 
			
		||||
        else:
 | 
			
		||||
            variation['file_path'] = new_fname
 | 
			
		||||
 | 
			
		||||
@@ -167,9 +161,12 @@ def zencoder_notifications():
 | 
			
		||||
    file_doc['status'] = 'complete'
 | 
			
		||||
 | 
			
		||||
    # Force an update of the links on the next load of the file.
 | 
			
		||||
    file_doc['link_expires'] = datetime.datetime.now(tz=tz_util.utc) - datetime.timedelta(days=1)
 | 
			
		||||
    file_doc['link_expires'] = utils.utcnow() - datetime.timedelta(days=1)
 | 
			
		||||
 | 
			
		||||
    current_app.put_internal('files', file_doc, _id=file_id)
 | 
			
		||||
    r, _, _, status = current_app.put_internal('files', file_doc, _id=file_id)
 | 
			
		||||
    if status != 200:
 | 
			
		||||
        log.error('unable to save file %s after Zencoder notification: %s', file_id, r)
 | 
			
		||||
        return json.dumps(r), 500
 | 
			
		||||
 | 
			
		||||
    return '', 204
 | 
			
		||||
 | 
			
		||||
 
 | 
			
		||||
@@ -88,8 +88,8 @@ users_schema = {
 | 
			
		||||
        }
 | 
			
		||||
    },
 | 
			
		||||
    'auth': {
 | 
			
		||||
        # Storage of authentication credentials (one will be able to auth with
 | 
			
		||||
        # multiple providers on the same account)
 | 
			
		||||
        # Storage of authentication credentials (one will be able to auth with multiple providers on
 | 
			
		||||
        # the same account)
 | 
			
		||||
        'type': 'list',
 | 
			
		||||
        'required': True,
 | 
			
		||||
        'schema': {
 | 
			
		||||
@@ -97,13 +97,12 @@ users_schema = {
 | 
			
		||||
            'schema': {
 | 
			
		||||
                'provider': {
 | 
			
		||||
                    'type': 'string',
 | 
			
		||||
                    'allowed': ["blender-id", "local"],
 | 
			
		||||
                    'allowed': ['local', 'blender-id', 'facebook', 'google'],
 | 
			
		||||
                },
 | 
			
		||||
                'user_id': {
 | 
			
		||||
                    'type': 'string'
 | 
			
		||||
                },
 | 
			
		||||
                # A token is considered a "password" in case the provider is
 | 
			
		||||
                # "local".
 | 
			
		||||
                # A token is considered a "password" in case the provider is "local".
 | 
			
		||||
                'token': {
 | 
			
		||||
                    'type': 'string'
 | 
			
		||||
                }
 | 
			
		||||
@@ -122,13 +121,59 @@ users_schema = {
 | 
			
		||||
    'service': {
 | 
			
		||||
        'type': 'dict',
 | 
			
		||||
        'allow_unknown': True,
 | 
			
		||||
    },
 | 
			
		||||
 | 
			
		||||
    # Node-specific information for this user.
 | 
			
		||||
    'nodes': {
 | 
			
		||||
        'type': 'dict',
 | 
			
		||||
        'schema': {
 | 
			
		||||
            'badger': {
 | 
			
		||||
                'type': 'list',
 | 
			
		||||
                'schema': {'type': 'string'}
 | 
			
		||||
            }
 | 
			
		||||
        }
 | 
			
		||||
    }
 | 
			
		||||
            # Per watched video info about where the user left off, both in time and in percent.
 | 
			
		||||
            'view_progress': {
 | 
			
		||||
                'type': 'dict',
 | 
			
		||||
                # Keyed by Node ID of the video asset. MongoDB doesn't support using
 | 
			
		||||
                # ObjectIds as key, so we cast them to string instead.
 | 
			
		||||
                'keyschema': {'type': 'string'},
 | 
			
		||||
                'valueschema': {
 | 
			
		||||
                    'type': 'dict',
 | 
			
		||||
                    'schema': {
 | 
			
		||||
                        'progress_in_sec': {'type': 'float', 'min': 0},
 | 
			
		||||
                        'progress_in_percent': {'type': 'integer', 'min': 0, 'max': 100},
 | 
			
		||||
 | 
			
		||||
                        # When the progress was last updated, so we can limit this history to
 | 
			
		||||
                        # the last-watched N videos if we want, or show stuff in chrono order.
 | 
			
		||||
                        'last_watched': {'type': 'datetime'},
 | 
			
		||||
 | 
			
		||||
                        # True means progress_in_percent = 100, for easy querying
 | 
			
		||||
                        'done': {'type': 'boolean', 'default': False},
 | 
			
		||||
                    },
 | 
			
		||||
                },
 | 
			
		||||
            },
 | 
			
		||||
 | 
			
		||||
        },
 | 
			
		||||
    },
 | 
			
		||||
 | 
			
		||||
    'badges': {
 | 
			
		||||
        'type': 'dict',
 | 
			
		||||
        'schema': {
 | 
			
		||||
            'html': {'type': 'string'},  # HTML fetched from Blender ID.
 | 
			
		||||
            'expires': {'type': 'datetime'},  # When we should fetch it again.
 | 
			
		||||
        },
 | 
			
		||||
    },
 | 
			
		||||
 | 
			
		||||
    # Properties defined by extensions. Extensions should use their name (see the
 | 
			
		||||
    # PillarExtension.name property) as the key, and are free to use whatever they want as value,
 | 
			
		||||
    # but we suggest a dict for future extendability.
 | 
			
		||||
    # Properties can be of two types:
 | 
			
		||||
    # - public: they will be visible to the world (for example as part of the User.find() query)
 | 
			
		||||
    # - private: visible only to their user
 | 
			
		||||
    'extension_props_public': {
 | 
			
		||||
        'type': 'dict',
 | 
			
		||||
        'required': False,
 | 
			
		||||
    },
 | 
			
		||||
    'extension_props_private': {
 | 
			
		||||
        'type': 'dict',
 | 
			
		||||
        'required': False,
 | 
			
		||||
    },
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
organizations_schema = {
 | 
			
		||||
@@ -138,19 +183,12 @@ organizations_schema = {
 | 
			
		||||
        'maxlength': 128,
 | 
			
		||||
        'required': True
 | 
			
		||||
    },
 | 
			
		||||
    'email': {
 | 
			
		||||
        'type': 'string'
 | 
			
		||||
    },
 | 
			
		||||
    'url': {
 | 
			
		||||
        'type': 'string',
 | 
			
		||||
        'minlength': 1,
 | 
			
		||||
        'maxlength': 128,
 | 
			
		||||
        'required': True
 | 
			
		||||
    },
 | 
			
		||||
    'description': {
 | 
			
		||||
        'type': 'string',
 | 
			
		||||
        'maxlength': 256,
 | 
			
		||||
        'validator': 'markdown',
 | 
			
		||||
    },
 | 
			
		||||
    '_description_html': {'type': 'string'},
 | 
			
		||||
    'website': {
 | 
			
		||||
        'type': 'string',
 | 
			
		||||
        'maxlength': 256,
 | 
			
		||||
@@ -162,7 +200,15 @@ organizations_schema = {
 | 
			
		||||
    'picture': dict(
 | 
			
		||||
        nullable=True,
 | 
			
		||||
        **_file_embedded_schema),
 | 
			
		||||
    'users': {
 | 
			
		||||
    'admin_uid': {
 | 
			
		||||
        'type': 'objectid',
 | 
			
		||||
        'data_relation': {
 | 
			
		||||
            'resource': 'users',
 | 
			
		||||
            'field': '_id',
 | 
			
		||||
        },
 | 
			
		||||
        'required': True,
 | 
			
		||||
    },
 | 
			
		||||
    'members': {
 | 
			
		||||
        'type': 'list',
 | 
			
		||||
        'default': [],
 | 
			
		||||
        'schema': {
 | 
			
		||||
@@ -170,51 +216,52 @@ organizations_schema = {
 | 
			
		||||
            'data_relation': {
 | 
			
		||||
                'resource': 'users',
 | 
			
		||||
                'field': '_id',
 | 
			
		||||
                'embeddable': True
 | 
			
		||||
            }
 | 
			
		||||
        }
 | 
			
		||||
    },
 | 
			
		||||
    'teams': {
 | 
			
		||||
    'unknown_members': {
 | 
			
		||||
        'type': 'list',  # of email addresses of yet-to-register users.
 | 
			
		||||
        'default': [],
 | 
			
		||||
        'schema': {
 | 
			
		||||
            'type': 'string',
 | 
			
		||||
        },
 | 
			
		||||
    },
 | 
			
		||||
 | 
			
		||||
    # Maximum size of the organization, i.e. len(members) + len(unknown_members) may
 | 
			
		||||
    # not exceed this.
 | 
			
		||||
    'seat_count': {
 | 
			
		||||
        'type': 'integer',
 | 
			
		||||
        'required': True,
 | 
			
		||||
    },
 | 
			
		||||
 | 
			
		||||
    # Roles that the members of this organization automatically get.
 | 
			
		||||
    'org_roles': {
 | 
			
		||||
        'type': 'list',
 | 
			
		||||
        'default': [],
 | 
			
		||||
        'schema': {
 | 
			
		||||
            'type': 'string',
 | 
			
		||||
        },
 | 
			
		||||
    },
 | 
			
		||||
 | 
			
		||||
    # Identification of the subscription that pays for this organisation
 | 
			
		||||
    # in an external subscription/payment management system.
 | 
			
		||||
    'payment_subscription_id': {
 | 
			
		||||
        'type': 'string',
 | 
			
		||||
    },
 | 
			
		||||
 | 
			
		||||
    'ip_ranges': {
 | 
			
		||||
        'type': 'list',
 | 
			
		||||
        'schema': {
 | 
			
		||||
            'type': 'dict',
 | 
			
		||||
            'schema': {
 | 
			
		||||
                # Team name
 | 
			
		||||
                'name': {
 | 
			
		||||
                    'type': 'string',
 | 
			
		||||
                    'minlength': 1,
 | 
			
		||||
                    'maxlength': 128,
 | 
			
		||||
                    'required': True
 | 
			
		||||
                },
 | 
			
		||||
                # List of user ids for the team
 | 
			
		||||
                'users': {
 | 
			
		||||
                    'type': 'list',
 | 
			
		||||
                    'default': [],
 | 
			
		||||
                    'schema': {
 | 
			
		||||
                        'type': 'objectid',
 | 
			
		||||
                        'data_relation': {
 | 
			
		||||
                            'resource': 'users',
 | 
			
		||||
                            'field': '_id',
 | 
			
		||||
                        }
 | 
			
		||||
                    }
 | 
			
		||||
                },
 | 
			
		||||
                # List of groups assigned to the team (this will automatically
 | 
			
		||||
                # update the groups property of each user in the team)
 | 
			
		||||
                'groups': {
 | 
			
		||||
                    'type': 'list',
 | 
			
		||||
                    'default': [],
 | 
			
		||||
                    'schema': {
 | 
			
		||||
                        'type': 'objectid',
 | 
			
		||||
                        'data_relation': {
 | 
			
		||||
                            'resource': 'groups',
 | 
			
		||||
                            'field': '_id',
 | 
			
		||||
                        }
 | 
			
		||||
                    }
 | 
			
		||||
                }
 | 
			
		||||
                # see _validate_type_{typename} in ValidateCustomFields:
 | 
			
		||||
                'start': {'type': 'binary', 'required': True},
 | 
			
		||||
                'end': {'type': 'binary', 'required': True},
 | 
			
		||||
                'prefix': {'type': 'integer', 'required': True},
 | 
			
		||||
                'human': {'type': 'string', 'required': True, 'validator': 'iprange'},
 | 
			
		||||
            }
 | 
			
		||||
        }
 | 
			
		||||
    }
 | 
			
		||||
        },
 | 
			
		||||
    },
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
permissions_embedded_schema = {
 | 
			
		||||
@@ -276,7 +323,9 @@ nodes_schema = {
 | 
			
		||||
    },
 | 
			
		||||
    'description': {
 | 
			
		||||
        'type': 'string',
 | 
			
		||||
        'validator': 'markdown',
 | 
			
		||||
    },
 | 
			
		||||
    '_description_html': {'type': 'string'},
 | 
			
		||||
    'picture': _file_embedded_schema,
 | 
			
		||||
    'order': {
 | 
			
		||||
        'type': 'integer',
 | 
			
		||||
@@ -309,7 +358,7 @@ nodes_schema = {
 | 
			
		||||
    'properties': {
 | 
			
		||||
        'type': 'dict',
 | 
			
		||||
        'valid_properties': True,
 | 
			
		||||
        'required': True,
 | 
			
		||||
        'required': True
 | 
			
		||||
    },
 | 
			
		||||
    'permissions': {
 | 
			
		||||
        'type': 'dict',
 | 
			
		||||
@@ -329,6 +378,10 @@ tokens_schema = {
 | 
			
		||||
        'type': 'string',
 | 
			
		||||
        'required': True,
 | 
			
		||||
    },
 | 
			
		||||
    'token_hashed': {
 | 
			
		||||
        'type': 'string',
 | 
			
		||||
        'required': False,
 | 
			
		||||
    },
 | 
			
		||||
    'expire_time': {
 | 
			
		||||
        'type': 'datetime',
 | 
			
		||||
        'required': True,
 | 
			
		||||
@@ -336,6 +389,22 @@ tokens_schema = {
 | 
			
		||||
    'is_subclient_token': {
 | 
			
		||||
        'type': 'boolean',
 | 
			
		||||
        'required': False,
 | 
			
		||||
    },
 | 
			
		||||
 | 
			
		||||
    # Roles this user gets while this token is valid.
 | 
			
		||||
    'org_roles': {
 | 
			
		||||
        'type': 'list',
 | 
			
		||||
        'default': [],
 | 
			
		||||
        'schema': {
 | 
			
		||||
            'type': 'string',
 | 
			
		||||
        },
 | 
			
		||||
    },
 | 
			
		||||
 | 
			
		||||
    # OAuth scopes granted to this token.
 | 
			
		||||
    'oauth_scopes': {
 | 
			
		||||
        'type': 'list',
 | 
			
		||||
        'default': [],
 | 
			
		||||
        'schema': {'type': 'string'},
 | 
			
		||||
    }
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
@@ -394,7 +463,7 @@ files_schema = {
 | 
			
		||||
    'backend': {
 | 
			
		||||
        'type': 'string',
 | 
			
		||||
        'required': True,
 | 
			
		||||
        'allowed': ["attract-web", "pillar", "cdnsun", "gcs", "unittest"]
 | 
			
		||||
        'allowed': ["local", "pillar", "cdnsun", "gcs", "unittest"]
 | 
			
		||||
    },
 | 
			
		||||
 | 
			
		||||
    # Where the file is in the backend storage itself. In the case of GCS,
 | 
			
		||||
@@ -508,7 +577,9 @@ projects_schema = {
 | 
			
		||||
    },
 | 
			
		||||
    'description': {
 | 
			
		||||
        'type': 'string',
 | 
			
		||||
        'validator': 'markdown',
 | 
			
		||||
    },
 | 
			
		||||
    '_description_html': {'type': 'string'},
 | 
			
		||||
    # Short summary for the project
 | 
			
		||||
    'summary': {
 | 
			
		||||
        'type': 'string',
 | 
			
		||||
@@ -534,8 +605,9 @@ projects_schema = {
 | 
			
		||||
    'category': {
 | 
			
		||||
        'type': 'string',
 | 
			
		||||
        'allowed': [
 | 
			
		||||
            'training',
 | 
			
		||||
            'course',
 | 
			
		||||
            'film',
 | 
			
		||||
            'workshop',
 | 
			
		||||
            'assets',
 | 
			
		||||
            'software',
 | 
			
		||||
            'game',
 | 
			
		||||
@@ -718,13 +790,9 @@ users = {
 | 
			
		||||
    'cache_expires': 10,
 | 
			
		||||
 | 
			
		||||
    'resource_methods': ['GET'],
 | 
			
		||||
    'item_methods': ['GET', 'PUT', 'PATCH'],
 | 
			
		||||
    'item_methods': ['GET', 'PUT'],
 | 
			
		||||
    'public_item_methods': ['GET'],
 | 
			
		||||
 | 
			
		||||
    # By default don't include the 'auth' field. It can still be obtained
 | 
			
		||||
    # using projections, though, so we block that in hooks.
 | 
			
		||||
    'datasource': {'projection': {u'auth': 0}},
 | 
			
		||||
 | 
			
		||||
    'schema': users_schema
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
@@ -738,11 +806,12 @@ tokens = {
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
files = {
 | 
			
		||||
    'schema': files_schema,
 | 
			
		||||
    'resource_methods': ['GET', 'POST'],
 | 
			
		||||
    'item_methods': ['GET', 'PATCH'],
 | 
			
		||||
    'public_methods': ['GET'],
 | 
			
		||||
    'public_item_methods': ['GET'],
 | 
			
		||||
    'schema': files_schema
 | 
			
		||||
    'soft_delete': True,
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
groups = {
 | 
			
		||||
@@ -754,8 +823,11 @@ groups = {
 | 
			
		||||
 | 
			
		||||
organizations = {
 | 
			
		||||
    'schema': organizations_schema,
 | 
			
		||||
    'public_item_methods': ['GET'],
 | 
			
		||||
    'public_methods': ['GET']
 | 
			
		||||
    'resource_methods': ['GET', 'POST'],
 | 
			
		||||
    'item_methods': ['GET'],
 | 
			
		||||
    'public_item_methods': [],
 | 
			
		||||
    'public_methods': [],
 | 
			
		||||
    'soft_delete': True,
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
projects = {
 | 
			
		||||
@@ -799,4 +871,9 @@ UPSET_ON_PUT = False  # do not create new document on PUT of non-existant URL.
 | 
			
		||||
X_DOMAINS = '*'
 | 
			
		||||
X_ALLOW_CREDENTIALS = True
 | 
			
		||||
X_HEADERS = 'Authorization'
 | 
			
		||||
XML = False
 | 
			
		||||
RENDERERS = ['eve.render.JSONRenderer']
 | 
			
		||||
 | 
			
		||||
# TODO(Sybren): this is a quick workaround to make /p/{url}/jstree work again.
 | 
			
		||||
# Apparently Eve is now stricter in checking against MONGO_QUERY_BLACKLIST, and
 | 
			
		||||
# blocks our use of $regex.
 | 
			
		||||
MONGO_QUERY_BLACKLIST = ['$where']
 | 
			
		||||
 
 | 
			
		||||
@@ -1,32 +1,37 @@
 | 
			
		||||
import datetime
 | 
			
		||||
import io
 | 
			
		||||
import logging
 | 
			
		||||
import mimetypes
 | 
			
		||||
import os
 | 
			
		||||
import pathlib
 | 
			
		||||
import tempfile
 | 
			
		||||
import typing
 | 
			
		||||
import uuid
 | 
			
		||||
from hashlib import md5
 | 
			
		||||
import os
 | 
			
		||||
import requests
 | 
			
		||||
import bson.tz_util
 | 
			
		||||
import datetime
 | 
			
		||||
 | 
			
		||||
import eve.utils
 | 
			
		||||
import pymongo
 | 
			
		||||
import werkzeug.exceptions as wz_exceptions
 | 
			
		||||
import werkzeug.datastructures
 | 
			
		||||
 | 
			
		||||
from bson import ObjectId
 | 
			
		||||
from flask import Blueprint
 | 
			
		||||
from flask import current_app
 | 
			
		||||
from flask import g
 | 
			
		||||
from flask import jsonify
 | 
			
		||||
from flask import request
 | 
			
		||||
from flask import send_from_directory
 | 
			
		||||
from flask import url_for, helpers
 | 
			
		||||
 | 
			
		||||
from pillar.api import utils
 | 
			
		||||
from pillar.api.utils.imaging import generate_local_thumbnails
 | 
			
		||||
from pillar.api.utils import remove_private_keys, authentication
 | 
			
		||||
from pillar.api.utils.authorization import require_login, user_has_role, \
 | 
			
		||||
from pillar.api.file_storage_backends.gcs import GoogleCloudStorageBucket, \
 | 
			
		||||
    GoogleCloudStorageBlob
 | 
			
		||||
from pillar.api.utils import remove_private_keys, imaging
 | 
			
		||||
from pillar.api.utils.authorization import require_login, \
 | 
			
		||||
    user_matches_roles
 | 
			
		||||
from pillar.api.utils.cdn import hash_file_path
 | 
			
		||||
from pillar.api.utils.encoding import Encoder
 | 
			
		||||
from pillar.api.utils.gcs import GoogleCloudStorageBucket
 | 
			
		||||
from pillar.api.file_storage_backends import default_storage_backend, Bucket
 | 
			
		||||
from pillar.auth import current_user
 | 
			
		||||
 | 
			
		||||
log = logging.getLogger(__name__)
 | 
			
		||||
 | 
			
		||||
@@ -45,31 +50,6 @@ mimetypes.add_type('application/x-radiance-hdr', '.hdr')
 | 
			
		||||
mimetypes.add_type('application/x-exr', '.exr')
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@file_storage.route('/gcs/<bucket_name>/<subdir>/')
 | 
			
		||||
@file_storage.route('/gcs/<bucket_name>/<subdir>/<path:file_path>')
 | 
			
		||||
def browse_gcs(bucket_name, subdir, file_path=None):
 | 
			
		||||
    """Browse the content of a Google Cloud Storage bucket"""
 | 
			
		||||
 | 
			
		||||
    # Initialize storage client
 | 
			
		||||
    storage = GoogleCloudStorageBucket(bucket_name, subdir=subdir)
 | 
			
		||||
    if file_path:
 | 
			
		||||
        # If we provided a file_path, we try to fetch it
 | 
			
		||||
        file_object = storage.Get(file_path)
 | 
			
		||||
        if file_object:
 | 
			
		||||
            # If it exists, return file properties in a dictionary
 | 
			
		||||
            return jsonify(file_object)
 | 
			
		||||
        else:
 | 
			
		||||
            listing = storage.List(file_path)
 | 
			
		||||
            return jsonify(listing)
 | 
			
		||||
            # We always return an empty listing even if the directory does not
 | 
			
		||||
            # exist. This can be changed later.
 | 
			
		||||
            # return abort(404)
 | 
			
		||||
 | 
			
		||||
    else:
 | 
			
		||||
        listing = storage.List('')
 | 
			
		||||
        return jsonify(listing)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@file_storage.route('/file', methods=['POST'])
 | 
			
		||||
@file_storage.route('/file/<path:file_name>', methods=['GET', 'POST'])
 | 
			
		||||
def index(file_name=None):
 | 
			
		||||
@@ -103,7 +83,10 @@ def index(file_name=None):
 | 
			
		||||
    return jsonify({'url': url_for('file_storage.index', file_name=file_name)})
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def _process_image(gcs, file_id, local_file, src_file):
 | 
			
		||||
def _process_image(bucket: Bucket,
 | 
			
		||||
                   file_id: ObjectId,
 | 
			
		||||
                   local_file: tempfile._TemporaryFileWrapper,
 | 
			
		||||
                   src_file: dict):
 | 
			
		||||
    from PIL import Image
 | 
			
		||||
 | 
			
		||||
    im = Image.open(local_file)
 | 
			
		||||
@@ -113,8 +96,9 @@ def _process_image(gcs, file_id, local_file, src_file):
 | 
			
		||||
 | 
			
		||||
    # Generate previews
 | 
			
		||||
    log.info('Generating thumbnails for file %s', file_id)
 | 
			
		||||
    src_file['variations'] = generate_local_thumbnails(src_file['name'],
 | 
			
		||||
                                                       local_file.name)
 | 
			
		||||
    local_path = pathlib.Path(local_file.name)
 | 
			
		||||
    name_base = pathlib.Path(src_file['name']).stem
 | 
			
		||||
    src_file['variations'] = imaging.generate_local_thumbnails(name_base, local_path)
 | 
			
		||||
 | 
			
		||||
    # Send those previews to Google Cloud Storage.
 | 
			
		||||
    log.info('Uploading %i thumbnails for file %s to Google Cloud Storage '
 | 
			
		||||
@@ -124,12 +108,12 @@ def _process_image(gcs, file_id, local_file, src_file):
 | 
			
		||||
    for variation in src_file['variations']:
 | 
			
		||||
        fname = variation['file_path']
 | 
			
		||||
        if current_app.config['TESTING']:
 | 
			
		||||
            log.warning('  - NOT sending thumbnail %s to GCS', fname)
 | 
			
		||||
            log.warning('  - NOT sending thumbnail %s to %s', fname, bucket)
 | 
			
		||||
        else:
 | 
			
		||||
            log.debug('  - Sending thumbnail %s to GCS', fname)
 | 
			
		||||
            blob = gcs.bucket.blob('_/' + fname, chunk_size=256 * 1024 * 2)
 | 
			
		||||
            blob.upload_from_filename(variation['local_path'],
 | 
			
		||||
                                      content_type=variation['content_type'])
 | 
			
		||||
            blob = bucket.blob(fname)
 | 
			
		||||
            log.debug('  - Sending thumbnail %s to %s', fname, blob)
 | 
			
		||||
            blob.upload_from_path(pathlib.Path(variation['local_path']),
 | 
			
		||||
                                  content_type=variation['content_type'])
 | 
			
		||||
 | 
			
		||||
            if variation.get('size') == 't':
 | 
			
		||||
                blob.make_public()
 | 
			
		||||
@@ -146,11 +130,162 @@ def _process_image(gcs, file_id, local_file, src_file):
 | 
			
		||||
    src_file['status'] = 'complete'
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def _process_video(gcs, file_id, local_file, src_file):
 | 
			
		||||
    """Video is processed by Zencoder; the file isn't even stored locally."""
 | 
			
		||||
def _video_duration_seconds(filename: pathlib.Path) -> typing.Optional[int]:
 | 
			
		||||
    """Get the duration of a video file using ffprobe
 | 
			
		||||
    https://superuser.com/questions/650291/how-to-get-video-duration-in-seconds
 | 
			
		||||
 | 
			
		||||
    :param filename: file path to video
 | 
			
		||||
    :return: video duration in seconds
 | 
			
		||||
    """
 | 
			
		||||
    import subprocess
 | 
			
		||||
 | 
			
		||||
    def run(cli_args):
 | 
			
		||||
        if log.isEnabledFor(logging.INFO):
 | 
			
		||||
            import shlex
 | 
			
		||||
            cmd = ' '.join(shlex.quote(s) for s in cli_args)
 | 
			
		||||
            log.info('Calling %s', cmd)
 | 
			
		||||
 | 
			
		||||
        ffprobe = subprocess.run(
 | 
			
		||||
            cli_args,
 | 
			
		||||
            stdin=subprocess.DEVNULL,
 | 
			
		||||
            stdout=subprocess.PIPE,
 | 
			
		||||
            stderr=subprocess.STDOUT,
 | 
			
		||||
            timeout=10,  # seconds
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
        if ffprobe.returncode:
 | 
			
		||||
            import shlex
 | 
			
		||||
            cmd = ' '.join(shlex.quote(s) for s in cli_args)
 | 
			
		||||
            log.error('Error running %s: stopped with return code %i',
 | 
			
		||||
                      cmd, ffprobe.returncode)
 | 
			
		||||
            log.error('Output was: %s', ffprobe.stdout)
 | 
			
		||||
            return None
 | 
			
		||||
 | 
			
		||||
        try:
 | 
			
		||||
            return int(float(ffprobe.stdout))
 | 
			
		||||
        except ValueError as e:
 | 
			
		||||
            log.exception('ffprobe produced invalid number: %s', ffprobe.stdout)
 | 
			
		||||
            return None
 | 
			
		||||
 | 
			
		||||
    ffprobe_from_container_args = [
 | 
			
		||||
        current_app.config['BIN_FFPROBE'],
 | 
			
		||||
        '-v', 'error',
 | 
			
		||||
        '-show_entries', 'format=duration',
 | 
			
		||||
        '-of', 'default=noprint_wrappers=1:nokey=1',
 | 
			
		||||
        str(filename),
 | 
			
		||||
    ]
 | 
			
		||||
 | 
			
		||||
    ffprobe_from_stream_args = [
 | 
			
		||||
        current_app.config['BIN_FFPROBE'],
 | 
			
		||||
        '-v', 'error',
 | 
			
		||||
        '-hide_banner',
 | 
			
		||||
        '-select_streams', 'v:0',  # we only care about the first video stream
 | 
			
		||||
        '-show_entries', 'stream=duration',
 | 
			
		||||
        '-of', 'default=noprint_wrappers=1:nokey=1',
 | 
			
		||||
        str(filename),
 | 
			
		||||
    ]
 | 
			
		||||
 | 
			
		||||
    duration = run(ffprobe_from_stream_args) or\
 | 
			
		||||
               run(ffprobe_from_container_args) or\
 | 
			
		||||
               None
 | 
			
		||||
    return duration
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def _video_size_pixels(filename: pathlib.Path) -> typing.Tuple[int, int]:
 | 
			
		||||
    """Figures out the size (in pixels) of the video file.
 | 
			
		||||
 | 
			
		||||
    Returns (0, 0) if there was any error detecting the size.
 | 
			
		||||
    """
 | 
			
		||||
 | 
			
		||||
    import json
 | 
			
		||||
    import subprocess
 | 
			
		||||
 | 
			
		||||
    cli_args = [
 | 
			
		||||
        current_app.config['BIN_FFPROBE'],
 | 
			
		||||
        '-loglevel', 'error',
 | 
			
		||||
        '-hide_banner',
 | 
			
		||||
        '-print_format', 'json',
 | 
			
		||||
        '-select_streams', 'v:0',  # we only care about the first video stream
 | 
			
		||||
        '-show_streams',
 | 
			
		||||
        str(filename),
 | 
			
		||||
    ]
 | 
			
		||||
 | 
			
		||||
    if log.isEnabledFor(logging.INFO):
 | 
			
		||||
        import shlex
 | 
			
		||||
        cmd = ' '.join(shlex.quote(s) for s in cli_args)
 | 
			
		||||
        log.info('Calling %s', cmd)
 | 
			
		||||
 | 
			
		||||
    ffprobe = subprocess.run(
 | 
			
		||||
        cli_args,
 | 
			
		||||
        stdin=subprocess.DEVNULL,
 | 
			
		||||
        stdout=subprocess.PIPE,
 | 
			
		||||
        stderr=subprocess.STDOUT,
 | 
			
		||||
        timeout=10,  # seconds
 | 
			
		||||
    )
 | 
			
		||||
 | 
			
		||||
    if ffprobe.returncode:
 | 
			
		||||
        import shlex
 | 
			
		||||
        cmd = ' '.join(shlex.quote(s) for s in cli_args)
 | 
			
		||||
        log.error('Error running %s: stopped with return code %i',
 | 
			
		||||
                  cmd, ffprobe.returncode)
 | 
			
		||||
        log.error('Output was: %s', ffprobe.stdout)
 | 
			
		||||
        return 0, 0
 | 
			
		||||
 | 
			
		||||
    try:
 | 
			
		||||
        ffprobe_info = json.loads(ffprobe.stdout)
 | 
			
		||||
    except json.JSONDecodeError:
 | 
			
		||||
        log.exception('ffprobe produced invalid JSON: %s', ffprobe.stdout)
 | 
			
		||||
        return 0, 0
 | 
			
		||||
 | 
			
		||||
    try:
 | 
			
		||||
        stream_info = ffprobe_info['streams'][0]
 | 
			
		||||
        return stream_info['width'], stream_info['height']
 | 
			
		||||
    except (KeyError, IndexError):
 | 
			
		||||
        log.exception('ffprobe produced unexpected JSON: %s', ffprobe.stdout)
 | 
			
		||||
        return 0, 0
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def _video_cap_at_1080(width: int, height: int) -> typing.Tuple[int, int]:
 | 
			
		||||
    """Returns an appropriate width/height for a video capped at 1920x1080.
 | 
			
		||||
 | 
			
		||||
    Takes into account that h264 has limitations:
 | 
			
		||||
        - the width must be a multiple of 16
 | 
			
		||||
        - the height must be a multiple of 8
 | 
			
		||||
    """
 | 
			
		||||
 | 
			
		||||
    if width > 1920:
 | 
			
		||||
        # The height must be a multiple of 8
 | 
			
		||||
        new_height = height / width * 1920
 | 
			
		||||
        height = new_height - (new_height % 8)
 | 
			
		||||
        width = 1920
 | 
			
		||||
 | 
			
		||||
    if height > 1080:
 | 
			
		||||
        # The width must be a multiple of 16
 | 
			
		||||
        new_width = width / height * 1080
 | 
			
		||||
        width = new_width - (new_width % 16)
 | 
			
		||||
        height = 1080
 | 
			
		||||
 | 
			
		||||
    return int(width), int(height)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def _process_video(gcs,
 | 
			
		||||
                   file_id: ObjectId,
 | 
			
		||||
                   local_file: tempfile._TemporaryFileWrapper,
 | 
			
		||||
                   src_file: dict):
 | 
			
		||||
    """Video is processed by Zencoder."""
 | 
			
		||||
 | 
			
		||||
    log.info('Processing video for file %s', file_id)
 | 
			
		||||
 | 
			
		||||
    # Use ffprobe to find the size (in pixels) of the video.
 | 
			
		||||
    # Even though Zencoder can do resizing to a maximum resolution without upscaling,
 | 
			
		||||
    # by determining the video size here we already have this information in the file
 | 
			
		||||
    # document before Zencoder calls our notification URL. It also opens up possibilities
 | 
			
		||||
    # for other encoding backends that don't support this functionality.
 | 
			
		||||
    video_path = pathlib.Path(local_file.name)
 | 
			
		||||
    video_width, video_height = _video_size_pixels(video_path)
 | 
			
		||||
    capped_video_width, capped_video_height = _video_cap_at_1080(video_width, video_height)
 | 
			
		||||
    video_duration = _video_duration_seconds(video_path)
 | 
			
		||||
 | 
			
		||||
    # Create variations
 | 
			
		||||
    root, _ = os.path.splitext(src_file['file_path'])
 | 
			
		||||
    src_file['variations'] = []
 | 
			
		||||
@@ -162,12 +297,13 @@ def _process_video(gcs, file_id, local_file, src_file):
 | 
			
		||||
        content_type='video/{}'.format(v),
 | 
			
		||||
        file_path='{}-{}.{}'.format(root, v, v),
 | 
			
		||||
        size='',
 | 
			
		||||
        duration=0,
 | 
			
		||||
        width=0,
 | 
			
		||||
        height=0,
 | 
			
		||||
        width=capped_video_width,
 | 
			
		||||
        height=capped_video_height,
 | 
			
		||||
        length=0,
 | 
			
		||||
        md5='',
 | 
			
		||||
    )
 | 
			
		||||
    if video_duration:
 | 
			
		||||
        file_variation['duration'] = video_duration
 | 
			
		||||
    # Append file variation. Originally mp4 and webm were the available options,
 | 
			
		||||
    # that's why we build a list.
 | 
			
		||||
    src_file['variations'].append(file_variation)
 | 
			
		||||
@@ -175,8 +311,8 @@ def _process_video(gcs, file_id, local_file, src_file):
 | 
			
		||||
    if current_app.config['TESTING']:
 | 
			
		||||
        log.warning('_process_video: NOT sending out encoding job due to '
 | 
			
		||||
                    'TESTING=%r', current_app.config['TESTING'])
 | 
			
		||||
        j = type('EncoderJob', (), {'process_id': 'fake-process-id',
 | 
			
		||||
                                    'backend': 'fake'})
 | 
			
		||||
        j = {'process_id': 'fake-process-id',
 | 
			
		||||
             'backend': 'fake'}
 | 
			
		||||
    else:
 | 
			
		||||
        j = Encoder.job_create(src_file)
 | 
			
		||||
        if j is None:
 | 
			
		||||
@@ -194,14 +330,14 @@ def _process_video(gcs, file_id, local_file, src_file):
 | 
			
		||||
        'backend': j['backend']}
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def process_file(gcs, file_id, local_file):
 | 
			
		||||
def process_file(bucket: Bucket,
 | 
			
		||||
                 file_id: typing.Union[str, ObjectId],
 | 
			
		||||
                 local_file: tempfile._TemporaryFileWrapper):
 | 
			
		||||
    """Process the file by creating thumbnails, sending to Zencoder, etc.
 | 
			
		||||
 | 
			
		||||
    :param file_id: '_id' key of the file
 | 
			
		||||
    :type file_id: ObjectId or str
 | 
			
		||||
    :param local_file: locally stored file, or None if no local processing is
 | 
			
		||||
    needed.
 | 
			
		||||
    :type local_file: file
 | 
			
		||||
    """
 | 
			
		||||
 | 
			
		||||
    file_id = ObjectId(file_id)
 | 
			
		||||
@@ -218,8 +354,8 @@ def process_file(gcs, file_id, local_file):
 | 
			
		||||
    # TODO: overrule the content type based on file extention & magic numbers.
 | 
			
		||||
    mime_category, src_file['format'] = src_file['content_type'].split('/', 1)
 | 
			
		||||
 | 
			
		||||
    # Prevent video handling for non-admins.
 | 
			
		||||
    if not user_has_role(u'admin') and mime_category == 'video':
 | 
			
		||||
    # Only allow video encoding when the user has the correct capability.
 | 
			
		||||
    if not current_user.has_cap('encode-video') and mime_category == 'video':
 | 
			
		||||
        if src_file['format'].startswith('x-'):
 | 
			
		||||
            xified = src_file['format']
 | 
			
		||||
        else:
 | 
			
		||||
@@ -227,10 +363,10 @@ def process_file(gcs, file_id, local_file):
 | 
			
		||||
 | 
			
		||||
        src_file['content_type'] = 'application/%s' % xified
 | 
			
		||||
        mime_category = 'application'
 | 
			
		||||
        log.info('Not processing video file %s for non-admin user', file_id)
 | 
			
		||||
        log.info('Not processing video file %s for non-video-encoding user', file_id)
 | 
			
		||||
 | 
			
		||||
    # Run the required processor, based on the MIME category.
 | 
			
		||||
    processors = {
 | 
			
		||||
    processors: typing.Mapping[str, typing.Callable] = {
 | 
			
		||||
        'image': _process_image,
 | 
			
		||||
        'video': _process_video,
 | 
			
		||||
    }
 | 
			
		||||
@@ -249,7 +385,7 @@ def process_file(gcs, file_id, local_file):
 | 
			
		||||
        update_file_doc(file_id, status='processing')
 | 
			
		||||
 | 
			
		||||
        try:
 | 
			
		||||
            processor(gcs, file_id, local_file, src_file)
 | 
			
		||||
            processor(bucket, file_id, local_file, src_file)
 | 
			
		||||
        except Exception:
 | 
			
		||||
            log.warning('process_file(%s): error when processing file, '
 | 
			
		||||
                        'resetting status to '
 | 
			
		||||
@@ -265,65 +401,41 @@ def process_file(gcs, file_id, local_file):
 | 
			
		||||
                    file_id, status, r)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def delete_file(file_item):
 | 
			
		||||
    def process_file_delete(file_item):
 | 
			
		||||
        """Given a file item, delete the actual file from the storage backend.
 | 
			
		||||
        This function can be probably made self-calling."""
 | 
			
		||||
        if file_item['backend'] == 'gcs':
 | 
			
		||||
            storage = GoogleCloudStorageBucket(str(file_item['project']))
 | 
			
		||||
            storage.Delete(file_item['file_path'])
 | 
			
		||||
            # Delete any file variation found in the file_item document
 | 
			
		||||
            if 'variations' in file_item:
 | 
			
		||||
                for v in file_item['variations']:
 | 
			
		||||
                    storage.Delete(v['file_path'])
 | 
			
		||||
            return True
 | 
			
		||||
        elif file_item['backend'] == 'pillar':
 | 
			
		||||
            pass
 | 
			
		||||
        elif file_item['backend'] == 'cdnsun':
 | 
			
		||||
            pass
 | 
			
		||||
        else:
 | 
			
		||||
            pass
 | 
			
		||||
 | 
			
		||||
    files_collection = current_app.data.driver.db['files']
 | 
			
		||||
    # Collect children (variations) of the original file
 | 
			
		||||
    children = files_collection.find({'parent': file_item['_id']})
 | 
			
		||||
    for child in children:
 | 
			
		||||
        process_file_delete(child)
 | 
			
		||||
    # Finally remove the original file
 | 
			
		||||
    process_file_delete(file_item)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def generate_link(backend, file_path, project_id=None, is_public=False):
 | 
			
		||||
def generate_link(backend, file_path: str, project_id: str=None, is_public=False) -> str:
 | 
			
		||||
    """Hook to check the backend of a file resource, to build an appropriate link
 | 
			
		||||
    that can be used by the client to retrieve the actual file.
 | 
			
		||||
    """
 | 
			
		||||
 | 
			
		||||
    if backend == 'gcs':
 | 
			
		||||
        if current_app.config['TESTING']:
 | 
			
		||||
            log.info('Skipping GCS link generation, and returning a fake link '
 | 
			
		||||
                     'instead.')
 | 
			
		||||
            return '/path/to/testing/gcs/%s' % file_path
 | 
			
		||||
    # TODO: replace config['TESTING'] with mocking GCS.
 | 
			
		||||
    if backend == 'gcs' and current_app.config['TESTING']:
 | 
			
		||||
        log.info('Skipping GCS link generation, and returning a fake link '
 | 
			
		||||
                 'instead.')
 | 
			
		||||
        return '/path/to/testing/gcs/%s' % file_path
 | 
			
		||||
 | 
			
		||||
    if backend in {'gcs', 'local'}:
 | 
			
		||||
        from ..file_storage_backends import Bucket
 | 
			
		||||
 | 
			
		||||
        bucket_cls = Bucket.for_backend(backend)
 | 
			
		||||
        storage = bucket_cls(project_id)
 | 
			
		||||
        blob = storage.get_blob(file_path)
 | 
			
		||||
 | 
			
		||||
        storage = GoogleCloudStorageBucket(project_id)
 | 
			
		||||
        blob = storage.Get(file_path)
 | 
			
		||||
        if blob is None:
 | 
			
		||||
            log.warning('generate_link(%r, %r): unable to find blob for file path,'
 | 
			
		||||
                        ' returning empty link.', backend, file_path)
 | 
			
		||||
            log.warning('generate_link(%r, %r): unable to find blob for file'
 | 
			
		||||
                        ' path, returning empty link.', backend, file_path)
 | 
			
		||||
            return ''
 | 
			
		||||
 | 
			
		||||
        if is_public:
 | 
			
		||||
            return blob['public_url']
 | 
			
		||||
        return blob['signed_url']
 | 
			
		||||
        return blob.get_url(is_public=is_public)
 | 
			
		||||
 | 
			
		||||
    if backend == 'pillar':
 | 
			
		||||
    if backend == 'pillar':  # obsolete, replace with local.
 | 
			
		||||
        return url_for('file_storage.index', file_name=file_path,
 | 
			
		||||
                       _external=True, _scheme=current_app.config['SCHEME'])
 | 
			
		||||
    if backend == 'cdnsun':
 | 
			
		||||
        return hash_file_path(file_path, None)
 | 
			
		||||
    if backend == 'unittest':
 | 
			
		||||
        return 'https://unit.test/%s' % md5(file_path).hexdigest()
 | 
			
		||||
        return 'https://unit.test/%s' % md5(file_path.encode()).hexdigest()
 | 
			
		||||
 | 
			
		||||
    log.warning('generate_link(): Unknown backend %r, returning empty string as new link.',
 | 
			
		||||
    log.warning('generate_link(): Unknown backend %r, returning empty string '
 | 
			
		||||
                'as new link.',
 | 
			
		||||
                backend)
 | 
			
		||||
    return ''
 | 
			
		||||
 | 
			
		||||
@@ -338,12 +450,8 @@ def before_returning_file(response):
 | 
			
		||||
 | 
			
		||||
def strip_link_and_variations(response):
 | 
			
		||||
    # Check the access level of the user.
 | 
			
		||||
    if g.current_user is None:
 | 
			
		||||
        has_full_access = False
 | 
			
		||||
    else:
 | 
			
		||||
        user_roles = g.current_user['roles']
 | 
			
		||||
        access_roles = current_app.config['FULL_FILE_ACCESS_ROLES']
 | 
			
		||||
        has_full_access = bool(user_roles.intersection(access_roles))
 | 
			
		||||
    capability = current_app.config['FULL_FILE_ACCESS_CAP']
 | 
			
		||||
    has_full_access = current_user.has_cap(capability)
 | 
			
		||||
 | 
			
		||||
    # Strip all file variations (unless image) and link to the actual file.
 | 
			
		||||
    if not has_full_access:
 | 
			
		||||
@@ -369,7 +477,7 @@ def ensure_valid_link(response):
 | 
			
		||||
    # log.debug('Inspecting link for file %s', response['_id'])
 | 
			
		||||
 | 
			
		||||
    # Check link expiry.
 | 
			
		||||
    now = datetime.datetime.now(tz=bson.tz_util.utc)
 | 
			
		||||
    now = utils.utcnow()
 | 
			
		||||
    if 'link_expires' in response:
 | 
			
		||||
        link_expires = response['link_expires']
 | 
			
		||||
        if now < link_expires:
 | 
			
		||||
@@ -397,7 +505,14 @@ def generate_all_links(response, now):
 | 
			
		||||
        response['project']) if 'project' in response else None
 | 
			
		||||
    # TODO: add project id to all files
 | 
			
		||||
    backend = response['backend']
 | 
			
		||||
    response['link'] = generate_link(backend, response['file_path'], project_id)
 | 
			
		||||
 | 
			
		||||
    if 'file_path' in response:
 | 
			
		||||
        response['link'] = generate_link(backend, response['file_path'], project_id)
 | 
			
		||||
    else:
 | 
			
		||||
        import pprint
 | 
			
		||||
        log.error('File without file_path properly, unable to generate links: %s',
 | 
			
		||||
                  pprint.pformat(response))
 | 
			
		||||
        return
 | 
			
		||||
 | 
			
		||||
    variations = response.get('variations')
 | 
			
		||||
    if variations:
 | 
			
		||||
@@ -410,6 +525,12 @@ def generate_all_links(response, now):
 | 
			
		||||
    response['link_expires'] = now + datetime.timedelta(seconds=validity_secs)
 | 
			
		||||
 | 
			
		||||
    patch_info = remove_private_keys(response)
 | 
			
		||||
 | 
			
		||||
    # The project could have been soft-deleted, in which case it's fine to
 | 
			
		||||
    # update the links to the file. However, Eve/Cerberus doesn't allow this;
 | 
			
		||||
    # removing the 'project' key from the PATCH works around this.
 | 
			
		||||
    patch_info.pop('project', None)
 | 
			
		||||
 | 
			
		||||
    file_id = ObjectId(response['_id'])
 | 
			
		||||
    (patch_resp, _, _, _) = current_app.patch_internal('files', patch_info,
 | 
			
		||||
                                                       _id=file_id)
 | 
			
		||||
@@ -427,22 +548,28 @@ def generate_all_links(response, now):
 | 
			
		||||
    response['_etag'] = etag_doc['_etag']
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def before_deleting_file(item):
 | 
			
		||||
    delete_file(item)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def on_pre_get_files(_, lookup):
 | 
			
		||||
    # Override the HTTP header, we always want to fetch the document from
 | 
			
		||||
    # MongoDB.
 | 
			
		||||
    parsed_req = eve.utils.parse_request('files')
 | 
			
		||||
    parsed_req.if_modified_since = None
 | 
			
		||||
 | 
			
		||||
    # If there is no lookup, we would refresh *all* file documents,
 | 
			
		||||
    # which is far too heavy to do in one client HTTP request.
 | 
			
		||||
    if not lookup:
 | 
			
		||||
        return
 | 
			
		||||
 | 
			
		||||
    # Only fetch it if the date got expired.
 | 
			
		||||
    now = datetime.datetime.now(tz=bson.tz_util.utc)
 | 
			
		||||
    now = utils.utcnow()
 | 
			
		||||
    lookup_expired = lookup.copy()
 | 
			
		||||
    lookup_expired['link_expires'] = {'$lte': now}
 | 
			
		||||
 | 
			
		||||
    cursor = current_app.data.find('files', parsed_req, lookup_expired)
 | 
			
		||||
    if cursor.count() == 0:
 | 
			
		||||
        return
 | 
			
		||||
 | 
			
		||||
    log.debug('Updating expired links for %d files that matched lookup %s',
 | 
			
		||||
              cursor.count(), lookup_expired)
 | 
			
		||||
    for file_doc in cursor:
 | 
			
		||||
        # log.debug('Updating expired links for file %r.', file_doc['_id'])
 | 
			
		||||
        generate_all_links(file_doc, now)
 | 
			
		||||
@@ -458,7 +585,7 @@ def refresh_links_for_project(project_uuid, chunk_size, expiry_seconds):
 | 
			
		||||
    # Retrieve expired links.
 | 
			
		||||
    files_collection = current_app.data.driver.db['files']
 | 
			
		||||
 | 
			
		||||
    now = datetime.datetime.now(tz=bson.tz_util.utc)
 | 
			
		||||
    now = utils.utcnow()
 | 
			
		||||
    expire_before = now + datetime.timedelta(seconds=expiry_seconds)
 | 
			
		||||
    log.info('Limiting to links that expire before %s', expire_before)
 | 
			
		||||
 | 
			
		||||
@@ -481,33 +608,43 @@ def refresh_links_for_project(project_uuid, chunk_size, expiry_seconds):
 | 
			
		||||
def refresh_links_for_backend(backend_name, chunk_size, expiry_seconds):
 | 
			
		||||
    import gcloud.exceptions
 | 
			
		||||
 | 
			
		||||
    my_log = log.getChild(f'refresh_links_for_backend.{backend_name}')
 | 
			
		||||
 | 
			
		||||
    # Retrieve expired links.
 | 
			
		||||
    files_collection = current_app.data.driver.db['files']
 | 
			
		||||
    proj_coll = current_app.data.driver.db['projects']
 | 
			
		||||
 | 
			
		||||
    now = datetime.datetime.now(tz=bson.tz_util.utc)
 | 
			
		||||
    now = utils.utcnow()
 | 
			
		||||
    expire_before = now + datetime.timedelta(seconds=expiry_seconds)
 | 
			
		||||
    log.info('Limiting to links that expire before %s', expire_before)
 | 
			
		||||
    my_log.info('Limiting to links that expire before %s', expire_before)
 | 
			
		||||
 | 
			
		||||
    base_query = {'backend': backend_name, '_deleted': {'$ne': True}}
 | 
			
		||||
    to_refresh = files_collection.find(
 | 
			
		||||
        {'$or': [{'backend': backend_name, 'link_expires': None},
 | 
			
		||||
                 {'backend': backend_name, 'link_expires': {
 | 
			
		||||
                     '$lt': expire_before}},
 | 
			
		||||
                 {'backend': backend_name, 'link': None}]
 | 
			
		||||
        {'$or': [{'link_expires': None, **base_query},
 | 
			
		||||
                 {'link_expires': {'$lt': expire_before}, **base_query},
 | 
			
		||||
                 {'link': None, **base_query}]
 | 
			
		||||
         }).sort([('link_expires', pymongo.ASCENDING)]).limit(
 | 
			
		||||
        chunk_size).batch_size(5)
 | 
			
		||||
 | 
			
		||||
    if to_refresh.count() == 0:
 | 
			
		||||
        log.info('No links to refresh.')
 | 
			
		||||
    document_count = to_refresh.count()
 | 
			
		||||
    if document_count == 0:
 | 
			
		||||
        my_log.info('No links to refresh.')
 | 
			
		||||
        return
 | 
			
		||||
 | 
			
		||||
    if 0 < chunk_size == document_count:
 | 
			
		||||
        my_log.info('Found %d documents to refresh, probably limited by the chunk size.',
 | 
			
		||||
                    document_count)
 | 
			
		||||
    else:
 | 
			
		||||
        my_log.info('Found %d documents to refresh.', document_count)
 | 
			
		||||
 | 
			
		||||
    refreshed = 0
 | 
			
		||||
    report_chunks = min(max(5, document_count // 25), 100)
 | 
			
		||||
    for file_doc in to_refresh:
 | 
			
		||||
        try:
 | 
			
		||||
            file_id = file_doc['_id']
 | 
			
		||||
            project_id = file_doc.get('project')
 | 
			
		||||
            if project_id is None:
 | 
			
		||||
                log.debug('Skipping file %s, it has no project.', file_id)
 | 
			
		||||
                my_log.debug('Skipping file %s, it has no project.', file_id)
 | 
			
		||||
                continue
 | 
			
		||||
 | 
			
		||||
            count = proj_coll.count({'_id': project_id, '$or': [
 | 
			
		||||
@@ -516,46 +653,50 @@ def refresh_links_for_backend(backend_name, chunk_size, expiry_seconds):
 | 
			
		||||
            ]})
 | 
			
		||||
 | 
			
		||||
            if count == 0:
 | 
			
		||||
                log.debug('Skipping file %s, project %s does not exist.',
 | 
			
		||||
                          file_id, project_id)
 | 
			
		||||
                my_log.debug('Skipping file %s, project %s does not exist.',
 | 
			
		||||
                             file_id, project_id)
 | 
			
		||||
                continue
 | 
			
		||||
 | 
			
		||||
            if 'file_path' not in file_doc:
 | 
			
		||||
                log.warning("Skipping file %s, missing 'file_path' property.",
 | 
			
		||||
                            file_id)
 | 
			
		||||
                my_log.warning("Skipping file %s, missing 'file_path' property.",
 | 
			
		||||
                               file_id)
 | 
			
		||||
                continue
 | 
			
		||||
 | 
			
		||||
            log.debug('Refreshing links for file %s', file_id)
 | 
			
		||||
            my_log.debug('Refreshing links for file %s', file_id)
 | 
			
		||||
 | 
			
		||||
            try:
 | 
			
		||||
                generate_all_links(file_doc, now)
 | 
			
		||||
            except gcloud.exceptions.Forbidden:
 | 
			
		||||
                log.warning('Skipping file %s, GCS forbids us access to '
 | 
			
		||||
                            'project %s bucket.', file_id, project_id)
 | 
			
		||||
                my_log.warning('Skipping file %s, GCS forbids us access to '
 | 
			
		||||
                               'project %s bucket.', file_id, project_id)
 | 
			
		||||
                continue
 | 
			
		||||
            refreshed += 1
 | 
			
		||||
 | 
			
		||||
            if refreshed % report_chunks == 0:
 | 
			
		||||
                my_log.info('Refreshed %i links', refreshed)
 | 
			
		||||
        except KeyboardInterrupt:
 | 
			
		||||
            log.warning('Aborting due to KeyboardInterrupt after refreshing %i '
 | 
			
		||||
                        'links', refreshed)
 | 
			
		||||
            my_log.warning('Aborting due to KeyboardInterrupt after refreshing %i '
 | 
			
		||||
                           'links', refreshed)
 | 
			
		||||
            return
 | 
			
		||||
 | 
			
		||||
    log.info('Refreshed %i links', refreshed)
 | 
			
		||||
    my_log.info('Refreshed %i links', refreshed)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@require_login()
 | 
			
		||||
def create_file_doc(name, filename, content_type, length, project,
 | 
			
		||||
                    backend='gcs', **extra_fields):
 | 
			
		||||
                    backend=None, **extra_fields):
 | 
			
		||||
    """Creates a minimal File document for storage in MongoDB.
 | 
			
		||||
 | 
			
		||||
    Doesn't save it to MongoDB yet.
 | 
			
		||||
    """
 | 
			
		||||
 | 
			
		||||
    current_user = g.get('current_user')
 | 
			
		||||
    if backend is None:
 | 
			
		||||
        backend = current_app.config['STORAGE_BACKEND']
 | 
			
		||||
 | 
			
		||||
    file_doc = {'name': name,
 | 
			
		||||
                'filename': filename,
 | 
			
		||||
                'file_path': '',
 | 
			
		||||
                'user': current_user['user_id'],
 | 
			
		||||
                'user': current_user.user_id,
 | 
			
		||||
                'backend': backend,
 | 
			
		||||
                'md5': '',
 | 
			
		||||
                'content_type': content_type,
 | 
			
		||||
@@ -601,10 +742,10 @@ def override_content_type(uploaded_file):
 | 
			
		||||
        del uploaded_file._parsed_content_type
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def assert_file_size_allowed(file_size):
 | 
			
		||||
def assert_file_size_allowed(file_size: int):
 | 
			
		||||
    """Asserts that the current user is allowed to upload a file of the given size.
 | 
			
		||||
 | 
			
		||||
    :raises
 | 
			
		||||
    :raises wz_exceptions.RequestEntityTooLarge:
 | 
			
		||||
    """
 | 
			
		||||
 | 
			
		||||
    roles = current_app.config['ROLES_FOR_UNLIMITED_UPLOADS']
 | 
			
		||||
@@ -618,7 +759,7 @@ def assert_file_size_allowed(file_size):
 | 
			
		||||
    filesize_limit_mb = filesize_limit / 2.0 ** 20
 | 
			
		||||
    log.info('User %s tried to upload a %.3f MiB file, but is only allowed '
 | 
			
		||||
             '%.3f MiB.',
 | 
			
		||||
             authentication.current_user_id(), file_size / 2.0 ** 20,
 | 
			
		||||
             current_user.user_id, file_size / 2.0 ** 20,
 | 
			
		||||
             filesize_limit_mb)
 | 
			
		||||
    raise wz_exceptions.RequestEntityTooLarge(
 | 
			
		||||
        'To upload files larger than %i MiB, subscribe to Blender Cloud' %
 | 
			
		||||
@@ -627,7 +768,7 @@ def assert_file_size_allowed(file_size):
 | 
			
		||||
 | 
			
		||||
@file_storage.route('/stream/<string:project_id>', methods=['POST', 'OPTIONS'])
 | 
			
		||||
@require_login()
 | 
			
		||||
def stream_to_storage(project_id):
 | 
			
		||||
def stream_to_storage(project_id: str):
 | 
			
		||||
    project_oid = utils.str2id(project_id)
 | 
			
		||||
 | 
			
		||||
    projects = current_app.data.driver.db['projects']
 | 
			
		||||
@@ -637,14 +778,14 @@ def stream_to_storage(project_id):
 | 
			
		||||
        raise wz_exceptions.NotFound('Project %s does not exist' % project_id)
 | 
			
		||||
 | 
			
		||||
    log.info('Streaming file to bucket for project=%s user_id=%s', project_id,
 | 
			
		||||
             authentication.current_user_id())
 | 
			
		||||
             current_user.user_id)
 | 
			
		||||
    log.info('request.headers[Origin] = %r', request.headers.get('Origin'))
 | 
			
		||||
    log.info('request.content_length = %r', request.content_length)
 | 
			
		||||
 | 
			
		||||
    # Try a check for the content length before we access request.files[]. This allows us
 | 
			
		||||
    # to abort the upload early. The entire body content length is always a bit larger than
 | 
			
		||||
    # the actual file size, so if we accept here, we're sure it'll be accepted in subsequent
 | 
			
		||||
    # checks as well.
 | 
			
		||||
    # Try a check for the content length before we access request.files[].
 | 
			
		||||
    # This allows us to abort the upload early. The entire body content length
 | 
			
		||||
    # is always a bit larger than the actual file size, so if we accept here,
 | 
			
		||||
    # we're sure it'll be accepted in subsequent checks as well.
 | 
			
		||||
    if request.content_length:
 | 
			
		||||
        assert_file_size_allowed(request.content_length)
 | 
			
		||||
 | 
			
		||||
@@ -659,50 +800,56 @@ def stream_to_storage(project_id):
 | 
			
		||||
 | 
			
		||||
    override_content_type(uploaded_file)
 | 
			
		||||
    if not uploaded_file.content_type:
 | 
			
		||||
        log.warning('File uploaded to project %s without content type.', project_oid)
 | 
			
		||||
        log.warning('File uploaded to project %s without content type.',
 | 
			
		||||
                    project_oid)
 | 
			
		||||
        raise wz_exceptions.BadRequest('Missing content type.')
 | 
			
		||||
 | 
			
		||||
    if uploaded_file.content_type.startswith('image/'):
 | 
			
		||||
        # We need to do local thumbnailing, so we have to write the stream
 | 
			
		||||
    if uploaded_file.content_type.startswith('image/') or uploaded_file.content_type.startswith(
 | 
			
		||||
            'video/'):
 | 
			
		||||
        # We need to do local thumbnailing and ffprobe, so we have to write the stream
 | 
			
		||||
        # both to Google Cloud Storage and to local storage.
 | 
			
		||||
        local_file = tempfile.NamedTemporaryFile(dir=current_app.config['STORAGE_DIR'])
 | 
			
		||||
        local_file = tempfile.NamedTemporaryFile(
 | 
			
		||||
            dir=current_app.config['STORAGE_DIR'])
 | 
			
		||||
        uploaded_file.save(local_file)
 | 
			
		||||
        local_file.seek(0)  # Make sure that a re-read starts from the beginning.
 | 
			
		||||
        stream_for_gcs = local_file
 | 
			
		||||
        local_file.seek(0)  # Make sure that re-read starts from the beginning.
 | 
			
		||||
    else:
 | 
			
		||||
        local_file = None
 | 
			
		||||
        stream_for_gcs = uploaded_file.stream
 | 
			
		||||
        local_file = uploaded_file.stream
 | 
			
		||||
 | 
			
		||||
    result = upload_and_process(local_file, uploaded_file, project_id)
 | 
			
		||||
    resp = jsonify(result)
 | 
			
		||||
    resp.status_code = result['status_code']
 | 
			
		||||
    add_access_control_headers(resp)
 | 
			
		||||
    return resp
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def upload_and_process(local_file: typing.Union[io.BytesIO, typing.BinaryIO],
 | 
			
		||||
                       uploaded_file: werkzeug.datastructures.FileStorage,
 | 
			
		||||
                       project_id: str):
 | 
			
		||||
    # Figure out the file size, as we need to pass this in explicitly to GCloud.
 | 
			
		||||
    # Otherwise it always uses os.fstat(file_obj.fileno()).st_size, which isn't
 | 
			
		||||
    # supported by a BytesIO object (even though it does have a fileno
 | 
			
		||||
    # attribute).
 | 
			
		||||
    if isinstance(stream_for_gcs, io.BytesIO):
 | 
			
		||||
        file_size = len(stream_for_gcs.getvalue())
 | 
			
		||||
    if isinstance(local_file, io.BytesIO):
 | 
			
		||||
        file_size = len(local_file.getvalue())
 | 
			
		||||
    else:
 | 
			
		||||
        file_size = os.fstat(stream_for_gcs.fileno()).st_size
 | 
			
		||||
        file_size = os.fstat(local_file.fileno()).st_size
 | 
			
		||||
 | 
			
		||||
    # Check the file size again, now that we know its size for sure.
 | 
			
		||||
    assert_file_size_allowed(file_size)
 | 
			
		||||
 | 
			
		||||
    # Create file document in MongoDB.
 | 
			
		||||
    file_id, internal_fname, status = create_file_doc_for_upload(project_oid,
 | 
			
		||||
                                                                 uploaded_file)
 | 
			
		||||
    file_id, internal_fname, status = create_file_doc_for_upload(project_id, uploaded_file)
 | 
			
		||||
 | 
			
		||||
    if current_app.config['TESTING']:
 | 
			
		||||
        log.warning('NOT streaming to GCS because TESTING=%r',
 | 
			
		||||
                    current_app.config['TESTING'])
 | 
			
		||||
        # Fake a Blob object.
 | 
			
		||||
        gcs = None
 | 
			
		||||
        blob = type('Blob', (), {'size': file_size})
 | 
			
		||||
    else:
 | 
			
		||||
        blob, gcs = stream_to_gcs(file_id, file_size, internal_fname,
 | 
			
		||||
                                  project_id, stream_for_gcs,
 | 
			
		||||
                                  uploaded_file.mimetype)
 | 
			
		||||
    # Copy the file into storage.
 | 
			
		||||
    bucket = default_storage_backend(project_id)
 | 
			
		||||
    blob = bucket.blob(internal_fname)
 | 
			
		||||
    blob.create_from_file(local_file,
 | 
			
		||||
                          file_size=file_size,
 | 
			
		||||
                          content_type=uploaded_file.mimetype)
 | 
			
		||||
 | 
			
		||||
    log.debug('Marking uploaded file id=%s, fname=%s, '
 | 
			
		||||
              'size=%i as "queued_for_processing"',
 | 
			
		||||
              file_id, internal_fname, blob.size)
 | 
			
		||||
              file_id, internal_fname, file_size)
 | 
			
		||||
    update_file_doc(file_id,
 | 
			
		||||
                    status='queued_for_processing',
 | 
			
		||||
                    file_path=internal_fname,
 | 
			
		||||
@@ -711,7 +858,7 @@ def stream_to_storage(project_id):
 | 
			
		||||
 | 
			
		||||
    log.debug('Processing uploaded file id=%s, fname=%s, size=%i', file_id,
 | 
			
		||||
              internal_fname, blob.size)
 | 
			
		||||
    process_file(gcs, file_id, local_file)
 | 
			
		||||
    process_file(bucket, file_id, local_file)
 | 
			
		||||
 | 
			
		||||
    # Local processing is done, we can close the local file so it is removed.
 | 
			
		||||
    if local_file is not None:
 | 
			
		||||
@@ -723,26 +870,20 @@ def stream_to_storage(project_id):
 | 
			
		||||
    # Status is 200 if the file already existed, and 201 if it was newly
 | 
			
		||||
    # created.
 | 
			
		||||
    # TODO: add a link to a thumbnail in the response.
 | 
			
		||||
    resp = jsonify(status='ok', file_id=str(file_id))
 | 
			
		||||
    resp.status_code = status
 | 
			
		||||
    add_access_control_headers(resp)
 | 
			
		||||
    return resp
 | 
			
		||||
    return dict(status='ok', file_id=str(file_id), status_code=status)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def stream_to_gcs(file_id, file_size, internal_fname, project_id,
 | 
			
		||||
                  stream_for_gcs, content_type):
 | 
			
		||||
from ..file_storage_backends.abstract import FileType
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def stream_to_gcs(file_id: ObjectId, file_size: int, internal_fname: str, project_id: ObjectId,
 | 
			
		||||
                  stream_for_gcs: FileType, content_type: str) \
 | 
			
		||||
        -> typing.Tuple[GoogleCloudStorageBlob, GoogleCloudStorageBucket]:
 | 
			
		||||
    # Upload the file to GCS.
 | 
			
		||||
    from gcloud.streaming import transfer
 | 
			
		||||
    log.debug('Streaming file to GCS bucket; id=%s, fname=%s, size=%i',
 | 
			
		||||
              file_id, internal_fname, file_size)
 | 
			
		||||
    # Files larger than this many bytes will be streamed directly from disk,
 | 
			
		||||
    # smaller ones will be read into memory and then uploaded.
 | 
			
		||||
    transfer.RESUMABLE_UPLOAD_THRESHOLD = 102400
 | 
			
		||||
    try:
 | 
			
		||||
        gcs = GoogleCloudStorageBucket(project_id)
 | 
			
		||||
        blob = gcs.bucket.blob('_/' + internal_fname, chunk_size=256 * 1024 * 2)
 | 
			
		||||
        blob.upload_from_file(stream_for_gcs, size=file_size,
 | 
			
		||||
                              content_type=content_type)
 | 
			
		||||
        bucket = GoogleCloudStorageBucket(str(project_id))
 | 
			
		||||
        blob = bucket.blob(internal_fname)
 | 
			
		||||
        blob.create_from_file(stream_for_gcs, file_size=file_size, content_type=content_type)
 | 
			
		||||
    except Exception:
 | 
			
		||||
        log.exception('Error uploading file to Google Cloud Storage (GCS),'
 | 
			
		||||
                      ' aborting handling of uploaded file (id=%s).', file_id)
 | 
			
		||||
@@ -750,9 +891,7 @@ def stream_to_gcs(file_id, file_size, internal_fname, project_id,
 | 
			
		||||
        raise wz_exceptions.InternalServerError(
 | 
			
		||||
            'Unable to stream file to Google Cloud Storage')
 | 
			
		||||
 | 
			
		||||
    # Reload the blob to get the file size according to Google.
 | 
			
		||||
    blob.reload()
 | 
			
		||||
    return blob, gcs
 | 
			
		||||
    return blob, bucket
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def add_access_control_headers(resp):
 | 
			
		||||
@@ -766,15 +905,6 @@ def add_access_control_headers(resp):
 | 
			
		||||
    return resp
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def update_file_doc(file_id, **updates):
 | 
			
		||||
    files = current_app.data.driver.db['files']
 | 
			
		||||
    res = files.update_one({'_id': ObjectId(file_id)},
 | 
			
		||||
                           {'$set': updates})
 | 
			
		||||
    log.debug('update_file_doc(%s, %s): %i matched, %i updated.',
 | 
			
		||||
              file_id, updates, res.matched_count, res.modified_count)
 | 
			
		||||
    return res
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def create_file_doc_for_upload(project_id, uploaded_file):
 | 
			
		||||
    """Creates a secure filename and a document in MongoDB for the file.
 | 
			
		||||
 | 
			
		||||
@@ -852,10 +982,17 @@ def setup_app(app, url_prefix):
 | 
			
		||||
    app.on_fetched_item_files += before_returning_file
 | 
			
		||||
    app.on_fetched_resource_files += before_returning_files
 | 
			
		||||
 | 
			
		||||
    app.on_delete_item_files += before_deleting_file
 | 
			
		||||
 | 
			
		||||
    app.on_update_files += compute_aggregate_length
 | 
			
		||||
    app.on_replace_files += compute_aggregate_length
 | 
			
		||||
    app.on_insert_files += compute_aggregate_length_items
 | 
			
		||||
 | 
			
		||||
    app.register_api_blueprint(file_storage, url_prefix=url_prefix)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def update_file_doc(file_id, **updates):
 | 
			
		||||
    files = current_app.data.driver.db['files']
 | 
			
		||||
    res = files.update_one({'_id': ObjectId(file_id)},
 | 
			
		||||
                           {'$set': updates})
 | 
			
		||||
    log.debug('update_file_doc(%s, %s): %i matched, %i updated.',
 | 
			
		||||
              file_id, updates, res.matched_count, res.modified_count)
 | 
			
		||||
    return res
 | 
			
		||||
 
 | 
			
		||||
@@ -1,20 +1,18 @@
 | 
			
		||||
"""Code for moving files between backends."""
 | 
			
		||||
 | 
			
		||||
import datetime
 | 
			
		||||
import logging
 | 
			
		||||
import os
 | 
			
		||||
import tempfile
 | 
			
		||||
 | 
			
		||||
from bson import ObjectId
 | 
			
		||||
import bson.tz_util
 | 
			
		||||
from flask import current_app
 | 
			
		||||
import requests
 | 
			
		||||
import requests.exceptions
 | 
			
		||||
from bson import ObjectId
 | 
			
		||||
from flask import current_app
 | 
			
		||||
 | 
			
		||||
from pillar.api import utils
 | 
			
		||||
from . import stream_to_gcs, generate_all_links, ensure_valid_link
 | 
			
		||||
import pillar.api.utils.gcs
 | 
			
		||||
 | 
			
		||||
__all__ = ['PrerequisiteNotMetError', 'change_file_storage_backend']
 | 
			
		||||
__all__ = ['PrerequisiteNotMetError', 'change_file_storage_backend', 'move_to_bucket']
 | 
			
		||||
 | 
			
		||||
log = logging.getLogger(__name__)
 | 
			
		||||
 | 
			
		||||
@@ -29,7 +27,7 @@ def change_file_storage_backend(file_id, dest_backend):
 | 
			
		||||
    Files on the original backend are not deleted automatically.
 | 
			
		||||
    """
 | 
			
		||||
 | 
			
		||||
    dest_backend = unicode(dest_backend)
 | 
			
		||||
    dest_backend = str(dest_backend)
 | 
			
		||||
    file_id = ObjectId(file_id)
 | 
			
		||||
 | 
			
		||||
    # Fetch file document
 | 
			
		||||
@@ -75,8 +73,7 @@ def change_file_storage_backend(file_id, dest_backend):
 | 
			
		||||
    # Generate new links for the file & all variations. This also saves
 | 
			
		||||
    # the new backend we set here.
 | 
			
		||||
    f['backend'] = dest_backend
 | 
			
		||||
    now = datetime.datetime.now(tz=bson.tz_util.utc)
 | 
			
		||||
    generate_all_links(f, now)
 | 
			
		||||
    generate_all_links(f, utils.utcnow())
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def copy_file_to_backend(file_id, project_id, file_or_var, src_backend, dest_backend):
 | 
			
		||||
@@ -90,22 +87,23 @@ def copy_file_to_backend(file_id, project_id, file_or_var, src_backend, dest_bac
 | 
			
		||||
    else:
 | 
			
		||||
        local_finfo = fetch_file_from_link(file_or_var['link'])
 | 
			
		||||
 | 
			
		||||
    # Upload to GCS
 | 
			
		||||
    if dest_backend != 'gcs':
 | 
			
		||||
        raise ValueError('Only dest_backend="gcs" is supported now.')
 | 
			
		||||
    try:
 | 
			
		||||
        # Upload to GCS
 | 
			
		||||
        if dest_backend != 'gcs':
 | 
			
		||||
            raise ValueError('Only dest_backend="gcs" is supported now.')
 | 
			
		||||
 | 
			
		||||
    if current_app.config['TESTING']:
 | 
			
		||||
        log.warning('Skipping actual upload to GCS due to TESTING')
 | 
			
		||||
    else:
 | 
			
		||||
        # TODO check for name collisions
 | 
			
		||||
        stream_to_gcs(file_id, local_finfo['file_size'],
 | 
			
		||||
                      internal_fname=internal_fname,
 | 
			
		||||
                      project_id=str(project_id),
 | 
			
		||||
                      stream_for_gcs=local_finfo['local_file'],
 | 
			
		||||
                      content_type=local_finfo['content_type'])
 | 
			
		||||
 | 
			
		||||
    # No longer needed, so it can be closed & dispersed of.
 | 
			
		||||
    local_finfo['local_file'].close()
 | 
			
		||||
        if current_app.config['TESTING']:
 | 
			
		||||
            log.warning('Skipping actual upload to GCS due to TESTING')
 | 
			
		||||
        else:
 | 
			
		||||
            # TODO check for name collisions
 | 
			
		||||
            stream_to_gcs(file_id, local_finfo['file_size'],
 | 
			
		||||
                          internal_fname=internal_fname,
 | 
			
		||||
                          project_id=project_id,
 | 
			
		||||
                          stream_for_gcs=local_finfo['local_file'],
 | 
			
		||||
                          content_type=local_finfo['content_type'])
 | 
			
		||||
    finally:
 | 
			
		||||
        # No longer needed, so it can be closed & dispersed of.
 | 
			
		||||
        local_finfo['local_file'].close()
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def fetch_file_from_link(link):
 | 
			
		||||
@@ -152,29 +150,39 @@ def fetch_file_from_local(file_doc):
 | 
			
		||||
    return local_finfo
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def gcs_move_to_bucket(file_id, dest_project_id, skip_gcs=False):
 | 
			
		||||
    """Moves a file from its own bucket to the new project_id bucket."""
 | 
			
		||||
def move_to_bucket(file_id: ObjectId, dest_project_id: ObjectId, *, skip_storage=False):
 | 
			
		||||
    """Move a file + variations from its own bucket to the new project_id bucket.
 | 
			
		||||
 | 
			
		||||
    files_coll = current_app.db()['files']
 | 
			
		||||
    :param file_id: ID of the file to move.
 | 
			
		||||
    :param dest_project_id: Project to move to.
 | 
			
		||||
    :param skip_storage: If True, the storage bucket will not be touched.
 | 
			
		||||
        Only use this when you know what you're doing.
 | 
			
		||||
    """
 | 
			
		||||
 | 
			
		||||
    files_coll = current_app.db('files')
 | 
			
		||||
    f = files_coll.find_one(file_id)
 | 
			
		||||
    if f is None:
 | 
			
		||||
        raise ValueError('File with _id: {} not found'.format(file_id))
 | 
			
		||||
 | 
			
		||||
    # Check that new backend differs from current one
 | 
			
		||||
    if f['backend'] != 'gcs':
 | 
			
		||||
        raise ValueError('Only Google Cloud Storage is supported for now.')
 | 
			
		||||
        raise ValueError(f'File with _id: {file_id} not found')
 | 
			
		||||
 | 
			
		||||
    # Move file and variations to the new bucket.
 | 
			
		||||
    if skip_gcs:
 | 
			
		||||
        log.warning('NOT ACTUALLY MOVING file %s on GCS, just updating MongoDB', file_id)
 | 
			
		||||
    if skip_storage:
 | 
			
		||||
        log.warning('NOT ACTUALLY MOVING file %s on storage, just updating MongoDB', file_id)
 | 
			
		||||
    else:
 | 
			
		||||
        src_project = f['project']
 | 
			
		||||
        pillar.api.utils.gcs.copy_to_bucket(f['file_path'], src_project, dest_project_id)
 | 
			
		||||
        from pillar.api.file_storage_backends import Bucket
 | 
			
		||||
        bucket_class = Bucket.for_backend(f['backend'])
 | 
			
		||||
        src_bucket = bucket_class(str(f['project']))
 | 
			
		||||
        dst_bucket = bucket_class(str(dest_project_id))
 | 
			
		||||
 | 
			
		||||
        src_blob = src_bucket.get_blob(f['file_path'])
 | 
			
		||||
        src_bucket.copy_blob(src_blob, dst_bucket)
 | 
			
		||||
 | 
			
		||||
        for var in f.get('variations', []):
 | 
			
		||||
            pillar.api.utils.gcs.copy_to_bucket(var['file_path'], src_project, dest_project_id)
 | 
			
		||||
            src_blob = src_bucket.get_blob(var['file_path'])
 | 
			
		||||
            src_bucket.copy_blob(src_blob, dst_bucket)
 | 
			
		||||
 | 
			
		||||
    # Update the file document after moving was successful.
 | 
			
		||||
    # No need to update _etag or _updated, since that'll be done when
 | 
			
		||||
    # the links are regenerated at the end of this function.
 | 
			
		||||
    log.info('Switching file %s to project %s', file_id, dest_project_id)
 | 
			
		||||
    update_result = files_coll.update_one({'_id': file_id},
 | 
			
		||||
                                          {'$set': {'project': dest_project_id}})
 | 
			
		||||
@@ -188,4 +196,4 @@ def gcs_move_to_bucket(file_id, dest_project_id, skip_gcs=False):
 | 
			
		||||
 | 
			
		||||
    # Regenerate the links for this file
 | 
			
		||||
    f['project'] = dest_project_id
 | 
			
		||||
    generate_all_links(f, now=datetime.datetime.now(tz=bson.tz_util.utc))
 | 
			
		||||
    generate_all_links(f, now=utils.utcnow())
 | 
			
		||||
 
 | 
			
		||||
							
								
								
									
										29
									
								
								pillar/api/file_storage_backends/__init__.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										29
									
								
								pillar/api/file_storage_backends/__init__.py
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,29 @@
 | 
			
		||||
"""Storage backends.
 | 
			
		||||
 | 
			
		||||
To obtain a storage backend, use either of the two forms:
 | 
			
		||||
 | 
			
		||||
>>> bucket = default_storage_backend('bucket_name')
 | 
			
		||||
 | 
			
		||||
>>> BucketClass = Bucket.for_backend('backend_name')
 | 
			
		||||
>>> bucket = BucketClass('bucket_name')
 | 
			
		||||
 | 
			
		||||
"""
 | 
			
		||||
 | 
			
		||||
from .abstract import Bucket
 | 
			
		||||
 | 
			
		||||
# Import the other backends so that they register.
 | 
			
		||||
from . import local
 | 
			
		||||
from . import gcs
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def default_storage_backend(name: str) -> Bucket:
 | 
			
		||||
    """Returns an instance of a Bucket, based on the default backend.
 | 
			
		||||
 | 
			
		||||
    Depending on the backend this may actually create the bucket.
 | 
			
		||||
    """
 | 
			
		||||
    from flask import current_app
 | 
			
		||||
 | 
			
		||||
    backend_name = current_app.config['STORAGE_BACKEND']
 | 
			
		||||
    backend_cls = Bucket.for_backend(backend_name)
 | 
			
		||||
 | 
			
		||||
    return backend_cls(name)
 | 
			
		||||
							
								
								
									
										161
									
								
								pillar/api/file_storage_backends/abstract.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										161
									
								
								pillar/api/file_storage_backends/abstract.py
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,161 @@
 | 
			
		||||
import abc
 | 
			
		||||
import io
 | 
			
		||||
import logging
 | 
			
		||||
import typing
 | 
			
		||||
 | 
			
		||||
import pathlib
 | 
			
		||||
from bson import ObjectId
 | 
			
		||||
 | 
			
		||||
__all__ = ['Bucket', 'Blob', 'Path', 'FileType']
 | 
			
		||||
 | 
			
		||||
# Shorthand for the type of path we use.
 | 
			
		||||
Path = pathlib.PurePosixPath
 | 
			
		||||
 | 
			
		||||
# This is a mess: typing.IO keeps mypy-0.501 happy, but not in all cases,
 | 
			
		||||
# and io.FileIO + io.BytesIO keeps PyCharm-2017.1 happy.
 | 
			
		||||
FileType = typing.Union[typing.IO, io.FileIO, io.BytesIO]
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class Bucket(metaclass=abc.ABCMeta):
 | 
			
		||||
    """Can be a GCS bucket or simply a project folder in Pillar
 | 
			
		||||
 | 
			
		||||
    :type name: string
 | 
			
		||||
    :param name: Name of the bucket. As a convention, we use the ID of
 | 
			
		||||
    the project to name the bucket.
 | 
			
		||||
 | 
			
		||||
    """
 | 
			
		||||
 | 
			
		||||
    # Mapping from backend name to Bucket class
 | 
			
		||||
    backends: typing.Dict[str, typing.Type['Bucket']] = {}
 | 
			
		||||
 | 
			
		||||
    backend_name: str = None  # define in subclass.
 | 
			
		||||
 | 
			
		||||
    def __init__(self, name: str) -> None:
 | 
			
		||||
        self.name = str(name)
 | 
			
		||||
 | 
			
		||||
    def __init_subclass__(cls):
 | 
			
		||||
        assert cls.backend_name, '%s.backend_name must be non-empty string' % cls
 | 
			
		||||
        cls.backends[cls.backend_name] = cls
 | 
			
		||||
 | 
			
		||||
    def __repr__(self):
 | 
			
		||||
        return f'<{self.__class__.__name__} name={self.name!r}>'
 | 
			
		||||
 | 
			
		||||
    @classmethod
 | 
			
		||||
    def for_backend(cls, backend_name: str) -> typing.Type['Bucket']:
 | 
			
		||||
        """Returns the Bucket subclass for the given backend."""
 | 
			
		||||
        return cls.backends[backend_name]
 | 
			
		||||
 | 
			
		||||
    @abc.abstractmethod
 | 
			
		||||
    def blob(self, blob_name: str) -> 'Blob':
 | 
			
		||||
        """Factory constructor for blob object.
 | 
			
		||||
 | 
			
		||||
        :param blob_name: The path of the blob to be instantiated.
 | 
			
		||||
        """
 | 
			
		||||
 | 
			
		||||
    @abc.abstractmethod
 | 
			
		||||
    def get_blob(self, blob_name: str) -> typing.Optional['Blob']:
 | 
			
		||||
        """Get a blob object by name.
 | 
			
		||||
 | 
			
		||||
        If the blob exists return the object, otherwise None.
 | 
			
		||||
        """
 | 
			
		||||
 | 
			
		||||
    @abc.abstractmethod
 | 
			
		||||
    def copy_blob(self, blob: 'Blob', to_bucket: 'Bucket'):
 | 
			
		||||
        """Copies a blob from the current bucket to the other bucket.
 | 
			
		||||
        
 | 
			
		||||
        Implementations only need to support copying between buckets of the
 | 
			
		||||
        same storage backend.
 | 
			
		||||
        """
 | 
			
		||||
 | 
			
		||||
    @abc.abstractmethod
 | 
			
		||||
    def rename_blob(self, blob: 'Blob', new_name: str) -> 'Blob':
 | 
			
		||||
        """Rename the blob, returning the new Blob."""
 | 
			
		||||
 | 
			
		||||
    @classmethod
 | 
			
		||||
    def copy_to_bucket(cls, blob_name, src_project_id: ObjectId, dest_project_id: ObjectId):
 | 
			
		||||
        """Copies a file from one bucket to the other."""
 | 
			
		||||
 | 
			
		||||
        src_storage = cls(str(src_project_id))
 | 
			
		||||
        dest_storage = cls(str(dest_project_id))
 | 
			
		||||
 | 
			
		||||
        blob = src_storage.get_blob(blob_name)
 | 
			
		||||
        src_storage.copy_blob(blob, dest_storage)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
Bu = typing.TypeVar('Bu', bound=Bucket)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class Blob(metaclass=abc.ABCMeta):
 | 
			
		||||
    """A wrapper for file or blob objects."""
 | 
			
		||||
 | 
			
		||||
    def __init__(self, name: str, bucket: Bucket) -> None:
 | 
			
		||||
        self.name = name
 | 
			
		||||
        self.bucket = bucket
 | 
			
		||||
        self._size_in_bytes: typing.Optional[int] = None
 | 
			
		||||
 | 
			
		||||
        self.filename: str = None
 | 
			
		||||
        """Name of the file for the Content-Disposition header when downloading it."""
 | 
			
		||||
 | 
			
		||||
        self._log = logging.getLogger(f'{__name__}.Blob')
 | 
			
		||||
 | 
			
		||||
    def __repr__(self):
 | 
			
		||||
        return f'<{self.__class__.__name__} bucket={self.bucket.name!r} name={self.name!r}>'
 | 
			
		||||
 | 
			
		||||
    @property
 | 
			
		||||
    def size(self) -> typing.Optional[int]:
 | 
			
		||||
        """Size of the object, in bytes.
 | 
			
		||||
 | 
			
		||||
        :returns: The size of the blob or ``None`` if the property
 | 
			
		||||
                  is not set locally.
 | 
			
		||||
        """
 | 
			
		||||
 | 
			
		||||
        size = self._size_in_bytes
 | 
			
		||||
        if size is None:
 | 
			
		||||
            return None
 | 
			
		||||
        return int(size)
 | 
			
		||||
 | 
			
		||||
    @abc.abstractmethod
 | 
			
		||||
    def create_from_file(self, file_obj: FileType, *,
 | 
			
		||||
                         content_type: str,
 | 
			
		||||
                         file_size: int = -1):
 | 
			
		||||
        """Copies the file object to the storage.
 | 
			
		||||
        
 | 
			
		||||
        :param file_obj: The file object to send to storage.
 | 
			
		||||
        :param content_type: The content type of the file.
 | 
			
		||||
        :param file_size: The size of the file in bytes, or -1 if unknown
 | 
			
		||||
        """
 | 
			
		||||
 | 
			
		||||
    def upload_from_path(self, path: pathlib.Path, content_type: str):
 | 
			
		||||
        file_size = path.stat().st_size
 | 
			
		||||
 | 
			
		||||
        with path.open('rb') as infile:
 | 
			
		||||
            self.create_from_file(infile, content_type=content_type,
 | 
			
		||||
                                  file_size=file_size)
 | 
			
		||||
 | 
			
		||||
    @abc.abstractmethod
 | 
			
		||||
    def update_filename(self, filename: str):
 | 
			
		||||
        """Sets the filename which is used when downloading the file.
 | 
			
		||||
        
 | 
			
		||||
        Not all storage backends support this, and will use the on-disk filename instead.
 | 
			
		||||
        """
 | 
			
		||||
 | 
			
		||||
    @abc.abstractmethod
 | 
			
		||||
    def get_url(self, *, is_public: bool) -> str:
 | 
			
		||||
        """Returns the URL to access this blob.
 | 
			
		||||
        
 | 
			
		||||
        Note that this may involve API calls to generate a signed URL.
 | 
			
		||||
        """
 | 
			
		||||
 | 
			
		||||
    @abc.abstractmethod
 | 
			
		||||
    def make_public(self):
 | 
			
		||||
        """Makes the blob publicly available.
 | 
			
		||||
        
 | 
			
		||||
        Only performs an actual action on backends that support temporary links.
 | 
			
		||||
        """
 | 
			
		||||
 | 
			
		||||
    @abc.abstractmethod
 | 
			
		||||
    def exists(self) -> bool:
 | 
			
		||||
        """Returns True iff the file exists on the storage backend."""
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
Bl = typing.TypeVar('Bl', bound=Blob)
 | 
			
		||||
							
								
								
									
										263
									
								
								pillar/api/file_storage_backends/gcs.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										263
									
								
								pillar/api/file_storage_backends/gcs.py
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,263 @@
 | 
			
		||||
import os
 | 
			
		||||
import datetime
 | 
			
		||||
import logging
 | 
			
		||||
import typing
 | 
			
		||||
 | 
			
		||||
from bson import ObjectId
 | 
			
		||||
from gcloud.storage.client import Client
 | 
			
		||||
import gcloud.storage.blob
 | 
			
		||||
import gcloud.exceptions as gcloud_exc
 | 
			
		||||
from flask import current_app, g
 | 
			
		||||
from werkzeug.local import LocalProxy
 | 
			
		||||
 | 
			
		||||
from pillar.api import utils
 | 
			
		||||
from .abstract import Bucket, Blob, FileType
 | 
			
		||||
 | 
			
		||||
log = logging.getLogger(__name__)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def get_client() -> Client:
 | 
			
		||||
    """Stores the GCS client on the global Flask object.
 | 
			
		||||
 | 
			
		||||
    The GCS client is not user-specific anyway.
 | 
			
		||||
    """
 | 
			
		||||
 | 
			
		||||
    _gcs = getattr(g, '_gcs_client', None)
 | 
			
		||||
    if _gcs is None:
 | 
			
		||||
        _gcs = g._gcs_client = Client()
 | 
			
		||||
    return _gcs
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
# This hides the specifics of how/where we store the GCS client,
 | 
			
		||||
# and allows the rest of the code to use 'gcs' as a simple variable
 | 
			
		||||
# that does the right thing.
 | 
			
		||||
gcs: Client = LocalProxy(get_client)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class GoogleCloudStorageBucket(Bucket):
 | 
			
		||||
    """Cloud Storage bucket interface. We create a bucket for every project. In
 | 
			
		||||
    the bucket we create first level subdirs as follows:
 | 
			
		||||
    - '_' (will contain hashed assets, and stays on top of default listing)
 | 
			
		||||
    - 'svn' (svn checkout mirror)
 | 
			
		||||
    - 'shared' (any additional folder of static folder that is accessed via a
 | 
			
		||||
      node of 'storage' node_type)
 | 
			
		||||
 | 
			
		||||
    :type bucket_name: string
 | 
			
		||||
    :param bucket_name: Name of the bucket.
 | 
			
		||||
 | 
			
		||||
    :type subdir: string
 | 
			
		||||
    :param subdir: The local entry point to browse the bucket.
 | 
			
		||||
 | 
			
		||||
    """
 | 
			
		||||
 | 
			
		||||
    backend_name = 'gcs'
 | 
			
		||||
 | 
			
		||||
    def __init__(self, name: str, subdir='_') -> None:
 | 
			
		||||
        super().__init__(name=name)
 | 
			
		||||
 | 
			
		||||
        self._log = logging.getLogger(f'{__name__}.GoogleCloudStorageBucket')
 | 
			
		||||
 | 
			
		||||
        try:
 | 
			
		||||
            self._gcs_bucket = gcs.get_bucket(name)
 | 
			
		||||
        except gcloud_exc.NotFound:
 | 
			
		||||
            self._gcs_bucket = gcs.bucket(name)
 | 
			
		||||
            # Hardcode the bucket location to EU
 | 
			
		||||
            self._gcs_bucket.location = 'EU'
 | 
			
		||||
            # Optionally enable CORS from * (currently only used for vrview)
 | 
			
		||||
            # self.gcs_bucket.cors = [
 | 
			
		||||
            #     {
 | 
			
		||||
            #       "origin": ["*"],
 | 
			
		||||
            #       "responseHeader": ["Content-Type"],
 | 
			
		||||
            #       "method": ["GET", "HEAD", "DELETE"],
 | 
			
		||||
            #       "maxAgeSeconds": 3600
 | 
			
		||||
            #     }
 | 
			
		||||
            # ]
 | 
			
		||||
            self._gcs_bucket.create()
 | 
			
		||||
            log.info('Created GCS instance for project %s', name)
 | 
			
		||||
 | 
			
		||||
        self.subdir = subdir
 | 
			
		||||
 | 
			
		||||
    def blob(self, blob_name: str) -> 'GoogleCloudStorageBlob':
 | 
			
		||||
        return GoogleCloudStorageBlob(name=blob_name, bucket=self)
 | 
			
		||||
 | 
			
		||||
    def get_blob(self, internal_fname: str) -> typing.Optional['GoogleCloudStorageBlob']:
 | 
			
		||||
        blob = self.blob(internal_fname)
 | 
			
		||||
        if not blob.gblob.exists():
 | 
			
		||||
            return None
 | 
			
		||||
        return blob
 | 
			
		||||
 | 
			
		||||
    def _gcs_get(self, path: str, *, chunk_size=None) -> gcloud.storage.Blob:
 | 
			
		||||
        """Get selected file info if the path matches.
 | 
			
		||||
 | 
			
		||||
        :param path: The path to the file, relative to the bucket's subdir.
 | 
			
		||||
        """
 | 
			
		||||
        path = os.path.join(self.subdir, path)
 | 
			
		||||
        blob = self._gcs_bucket.blob(path, chunk_size=chunk_size)
 | 
			
		||||
        return blob
 | 
			
		||||
 | 
			
		||||
    def _gcs_post(self, full_path, *, path=None) -> typing.Optional[gcloud.storage.Blob]:
 | 
			
		||||
        """Create new blob and upload data to it.
 | 
			
		||||
        """
 | 
			
		||||
        path = path if path else os.path.join(self.subdir, os.path.basename(full_path))
 | 
			
		||||
        gblob = self._gcs_bucket.blob(path)
 | 
			
		||||
        if gblob.exists():
 | 
			
		||||
            self._log.error(f'Trying to upload to {path}, but that blob already exists. '
 | 
			
		||||
                            f'Not uploading.')
 | 
			
		||||
            return None
 | 
			
		||||
 | 
			
		||||
        gblob.upload_from_filename(full_path)
 | 
			
		||||
        return gblob
 | 
			
		||||
        # return self.blob_to_dict(blob) # Has issues with threading
 | 
			
		||||
 | 
			
		||||
    def delete_blob(self, path: str) -> bool:
 | 
			
		||||
        """Deletes the blob (when removing an asset or replacing a preview)"""
 | 
			
		||||
 | 
			
		||||
        # We want to get the actual blob to delete
 | 
			
		||||
        gblob = self._gcs_get(path)
 | 
			
		||||
        try:
 | 
			
		||||
            gblob.delete()
 | 
			
		||||
            return True
 | 
			
		||||
        except gcloud_exc.NotFound:
 | 
			
		||||
            return False
 | 
			
		||||
 | 
			
		||||
    def copy_blob(self, blob: Blob, to_bucket: Bucket):
 | 
			
		||||
        """Copies the given blob from this bucket to the other bucket.
 | 
			
		||||
 | 
			
		||||
        Returns the new blob.
 | 
			
		||||
        """
 | 
			
		||||
 | 
			
		||||
        assert isinstance(blob, GoogleCloudStorageBlob)
 | 
			
		||||
        assert isinstance(to_bucket, GoogleCloudStorageBucket)
 | 
			
		||||
 | 
			
		||||
        self._log.info('Copying %s to bucket %s', blob, to_bucket)
 | 
			
		||||
 | 
			
		||||
        return self._gcs_bucket.copy_blob(blob.gblob, to_bucket._gcs_bucket)
 | 
			
		||||
 | 
			
		||||
    def rename_blob(self, blob: 'GoogleCloudStorageBlob', new_name: str) \
 | 
			
		||||
            -> 'GoogleCloudStorageBlob':
 | 
			
		||||
        """Rename the blob, returning the new Blob."""
 | 
			
		||||
 | 
			
		||||
        assert isinstance(blob, GoogleCloudStorageBlob)
 | 
			
		||||
 | 
			
		||||
        new_name = os.path.join(self.subdir, new_name)
 | 
			
		||||
 | 
			
		||||
        self._log.info('Renaming %s to %r', blob, new_name)
 | 
			
		||||
        new_gblob = self._gcs_bucket.rename_blob(blob.gblob, new_name)
 | 
			
		||||
        return GoogleCloudStorageBlob(new_gblob.name, self, gblob=new_gblob)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class GoogleCloudStorageBlob(Blob):
 | 
			
		||||
    """GCS blob interface."""
 | 
			
		||||
 | 
			
		||||
    def __init__(self, name: str, bucket: GoogleCloudStorageBucket,
 | 
			
		||||
                 *, gblob: gcloud.storage.blob.Blob=None) -> None:
 | 
			
		||||
        super().__init__(name, bucket)
 | 
			
		||||
 | 
			
		||||
        self._log = logging.getLogger(f'{__name__}.GoogleCloudStorageBlob')
 | 
			
		||||
        self.gblob = gblob or bucket._gcs_get(name, chunk_size=256 * 1024 * 2)
 | 
			
		||||
 | 
			
		||||
    def create_from_file(self, file_obj: FileType, *,
 | 
			
		||||
                         content_type: str,
 | 
			
		||||
                         file_size: int = -1) -> None:
 | 
			
		||||
        from gcloud.streaming import transfer
 | 
			
		||||
 | 
			
		||||
        self._log.debug('Streaming file to GCS bucket %r, size=%i', self, file_size)
 | 
			
		||||
 | 
			
		||||
        # Files larger than this many bytes will be streamed directly from disk,
 | 
			
		||||
        # smaller ones will be read into memory and then uploaded.
 | 
			
		||||
        transfer.RESUMABLE_UPLOAD_THRESHOLD = 102400
 | 
			
		||||
        self.gblob.upload_from_file(file_obj,
 | 
			
		||||
                                    size=file_size,
 | 
			
		||||
                                    content_type=content_type)
 | 
			
		||||
 | 
			
		||||
        # Reload the blob to get the file size according to Google.
 | 
			
		||||
        self.gblob.reload()
 | 
			
		||||
        self._size_in_bytes = self.gblob.size
 | 
			
		||||
 | 
			
		||||
    def update_filename(self, filename: str):
 | 
			
		||||
        """Set the ContentDisposition metadata so that when a file is downloaded
 | 
			
		||||
        it has a human-readable name.
 | 
			
		||||
        """
 | 
			
		||||
 | 
			
		||||
        if '"' in filename:
 | 
			
		||||
            raise ValueError(f'Filename is not allowed to have double quote in it: {filename!r}')
 | 
			
		||||
 | 
			
		||||
        self.gblob.content_disposition = f'attachment; filename="{filename}"'
 | 
			
		||||
        self.gblob.patch()
 | 
			
		||||
 | 
			
		||||
    def get_url(self, *, is_public: bool) -> str:
 | 
			
		||||
        if is_public:
 | 
			
		||||
            return self.gblob.public_url
 | 
			
		||||
 | 
			
		||||
        expiration = utils.utcnow() + datetime.timedelta(days=1)
 | 
			
		||||
        return self.gblob.generate_signed_url(expiration)
 | 
			
		||||
 | 
			
		||||
    def make_public(self):
 | 
			
		||||
        self.gblob.make_public()
 | 
			
		||||
 | 
			
		||||
    def exists(self) -> bool:
 | 
			
		||||
        # Reload to get the actual file properties from Google.
 | 
			
		||||
        try:
 | 
			
		||||
            self.gblob.reload()
 | 
			
		||||
        except gcloud_exc.NotFound:
 | 
			
		||||
            return False
 | 
			
		||||
        return self.gblob.exists()
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def update_file_name(node):
 | 
			
		||||
    """Assign to the CGS blob the same name of the asset node. This way when
 | 
			
		||||
    downloading an asset we get a human-readable name.
 | 
			
		||||
    """
 | 
			
		||||
 | 
			
		||||
    # Process only files that are not processing
 | 
			
		||||
    if node['properties'].get('status', '') == 'processing':
 | 
			
		||||
        return
 | 
			
		||||
 | 
			
		||||
    def _format_name(name, override_ext, size=None, map_type=''):
 | 
			
		||||
        root, _ = os.path.splitext(name)
 | 
			
		||||
        size = '-{}'.format(size) if size else ''
 | 
			
		||||
        map_type = '-{}'.format(map_type) if map_type else ''
 | 
			
		||||
        return '{}{}{}{}'.format(root, size, map_type, override_ext)
 | 
			
		||||
 | 
			
		||||
    def _update_name(file_id, file_props):
 | 
			
		||||
        files_collection = current_app.data.driver.db['files']
 | 
			
		||||
        file_doc = files_collection.find_one({'_id': ObjectId(file_id)})
 | 
			
		||||
 | 
			
		||||
        if file_doc is None or file_doc.get('backend') != 'gcs':
 | 
			
		||||
            return
 | 
			
		||||
 | 
			
		||||
        # For textures -- the map type should be part of the name.
 | 
			
		||||
        map_type = file_props.get('map_type', '')
 | 
			
		||||
 | 
			
		||||
        storage = GoogleCloudStorageBucket(str(node['project']))
 | 
			
		||||
        blob = storage.get_blob(file_doc['file_path'])
 | 
			
		||||
        if blob is None:
 | 
			
		||||
            log.warning('Unable to find blob for file %s in project %s',
 | 
			
		||||
                        file_doc['file_path'], file_doc['project'])
 | 
			
		||||
            return
 | 
			
		||||
 | 
			
		||||
        # Pick file extension from original filename
 | 
			
		||||
        _, ext = os.path.splitext(file_doc['filename'])
 | 
			
		||||
        name = _format_name(node['name'], ext, map_type=map_type)
 | 
			
		||||
        blob.update_filename(name)
 | 
			
		||||
 | 
			
		||||
        # Assign the same name to variations
 | 
			
		||||
        for v in file_doc.get('variations', []):
 | 
			
		||||
            _, override_ext = os.path.splitext(v['file_path'])
 | 
			
		||||
            name = _format_name(node['name'], override_ext, v['size'], map_type=map_type)
 | 
			
		||||
            blob = storage.get_blob(v['file_path'])
 | 
			
		||||
            if blob is None:
 | 
			
		||||
                log.info('Unable to find blob for file %s in project %s. This can happen if the '
 | 
			
		||||
                         'video encoding is still processing.', v['file_path'], node['project'])
 | 
			
		||||
                continue
 | 
			
		||||
            blob.update_filename(name)
 | 
			
		||||
 | 
			
		||||
    # Currently we search for 'file' and 'files' keys in the object properties.
 | 
			
		||||
    # This could become a bit more flexible and realy on a true reference of the
 | 
			
		||||
    # file object type from the schema.
 | 
			
		||||
    if 'file' in node['properties']:
 | 
			
		||||
        _update_name(node['properties']['file'], {})
 | 
			
		||||
 | 
			
		||||
    if 'files' in node['properties']:
 | 
			
		||||
        for file_props in node['properties']['files']:
 | 
			
		||||
            _update_name(file_props['file'], file_props)
 | 
			
		||||
							
								
								
									
										131
									
								
								pillar/api/file_storage_backends/local.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										131
									
								
								pillar/api/file_storage_backends/local.py
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,131 @@
 | 
			
		||||
import logging
 | 
			
		||||
import pathlib
 | 
			
		||||
import typing
 | 
			
		||||
 | 
			
		||||
from flask import current_app
 | 
			
		||||
 | 
			
		||||
__all__ = ['LocalBucket', 'LocalBlob']
 | 
			
		||||
 | 
			
		||||
from .abstract import Bucket, Blob, FileType, Path
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class LocalBucket(Bucket):
 | 
			
		||||
    backend_name = 'local'
 | 
			
		||||
 | 
			
		||||
    def __init__(self, name: str) -> None:
 | 
			
		||||
        super().__init__(name)
 | 
			
		||||
 | 
			
		||||
        self._log = logging.getLogger(f'{__name__}.LocalBucket')
 | 
			
		||||
 | 
			
		||||
        # For local storage, the name is actually a partial path, relative
 | 
			
		||||
        # to the local storage root.
 | 
			
		||||
        self.root = pathlib.Path(current_app.config['STORAGE_DIR'])
 | 
			
		||||
        self.bucket_path = pathlib.PurePosixPath(self.name[:2]) / self.name
 | 
			
		||||
        self.abspath = self.root / self.bucket_path
 | 
			
		||||
 | 
			
		||||
    def blob(self, blob_name: str) -> 'LocalBlob':
 | 
			
		||||
        return LocalBlob(name=blob_name, bucket=self)
 | 
			
		||||
 | 
			
		||||
    def get_blob(self, blob_name: str) -> typing.Optional['LocalBlob']:
 | 
			
		||||
        # TODO: Check if file exists, otherwise None
 | 
			
		||||
        return self.blob(blob_name)
 | 
			
		||||
 | 
			
		||||
    def copy_blob(self, blob: Blob, to_bucket: Bucket):
 | 
			
		||||
        """Copies a blob from the current bucket to the other bucket.
 | 
			
		||||
        
 | 
			
		||||
        Implementations only need to support copying between buckets of the
 | 
			
		||||
        same storage backend.
 | 
			
		||||
        """
 | 
			
		||||
 | 
			
		||||
        assert isinstance(blob, LocalBlob)
 | 
			
		||||
        assert isinstance(to_bucket, LocalBucket)
 | 
			
		||||
 | 
			
		||||
        self._log.info('Copying %s to bucket %s', blob, to_bucket)
 | 
			
		||||
 | 
			
		||||
        dest_blob = to_bucket.blob(blob.name)
 | 
			
		||||
 | 
			
		||||
        # TODO: implement content type handling for local storage.
 | 
			
		||||
        self._log.warning('Unable to set correct file content type for %s', dest_blob)
 | 
			
		||||
 | 
			
		||||
        fpath = blob.abspath()
 | 
			
		||||
        if not fpath.exists():
 | 
			
		||||
            if not fpath.parent.exists():
 | 
			
		||||
                raise FileNotFoundError(f'File {fpath} does not exist, and neither does its parent,'
 | 
			
		||||
                                        f' unable to copy to {to_bucket}')
 | 
			
		||||
            raise FileNotFoundError(f'File {fpath} does not exist, unable to copy to {to_bucket}')
 | 
			
		||||
 | 
			
		||||
        with open(fpath, 'rb') as src_file:
 | 
			
		||||
            dest_blob.create_from_file(src_file, content_type='application/x-octet-stream')
 | 
			
		||||
 | 
			
		||||
    def rename_blob(self, blob: 'LocalBlob', new_name: str) -> 'LocalBlob':
 | 
			
		||||
        """Rename the blob, returning the new Blob."""
 | 
			
		||||
 | 
			
		||||
        assert isinstance(blob, LocalBlob)
 | 
			
		||||
 | 
			
		||||
        self._log.info('Renaming %s to %r', blob, new_name)
 | 
			
		||||
        new_blob = LocalBlob(new_name, self)
 | 
			
		||||
 | 
			
		||||
        old_path = blob.abspath()
 | 
			
		||||
        new_path = new_blob.abspath()
 | 
			
		||||
        new_path.parent.mkdir(parents=True, exist_ok=True)
 | 
			
		||||
        old_path.rename(new_path)
 | 
			
		||||
 | 
			
		||||
        return new_blob
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class LocalBlob(Blob):
 | 
			
		||||
    """Blob representing a local file on the filesystem."""
 | 
			
		||||
 | 
			
		||||
    bucket: LocalBucket
 | 
			
		||||
 | 
			
		||||
    def __init__(self, name: str, bucket: LocalBucket) -> None:
 | 
			
		||||
        super().__init__(name, bucket)
 | 
			
		||||
 | 
			
		||||
        self._log = logging.getLogger(f'{__name__}.LocalBlob')
 | 
			
		||||
        self.partial_path = Path(name[:2]) / name
 | 
			
		||||
 | 
			
		||||
    def abspath(self) -> pathlib.Path:
 | 
			
		||||
        """Returns a concrete, absolute path to the local file."""
 | 
			
		||||
 | 
			
		||||
        return pathlib.Path(self.bucket.abspath / self.partial_path)
 | 
			
		||||
 | 
			
		||||
    def get_url(self, *, is_public: bool) -> str:
 | 
			
		||||
        from flask import url_for
 | 
			
		||||
 | 
			
		||||
        path = self.bucket.bucket_path / self.partial_path
 | 
			
		||||
        url = url_for('file_storage.index', file_name=str(path), _external=True,
 | 
			
		||||
                      _scheme=current_app.config['SCHEME'])
 | 
			
		||||
        return url
 | 
			
		||||
 | 
			
		||||
    def create_from_file(self, file_obj: FileType, *,
 | 
			
		||||
                         content_type: str,
 | 
			
		||||
                         file_size: int = -1):
 | 
			
		||||
        assert hasattr(file_obj, 'read')
 | 
			
		||||
 | 
			
		||||
        import shutil
 | 
			
		||||
 | 
			
		||||
        # Ensure path exists before saving
 | 
			
		||||
        my_path = self.abspath()
 | 
			
		||||
        my_path.parent.mkdir(exist_ok=True, parents=True)
 | 
			
		||||
 | 
			
		||||
        with my_path.open('wb') as outfile:
 | 
			
		||||
            shutil.copyfileobj(typing.cast(typing.IO, file_obj), outfile)
 | 
			
		||||
 | 
			
		||||
        self._size_in_bytes = file_size
 | 
			
		||||
 | 
			
		||||
    def update_filename(self, filename: str):
 | 
			
		||||
        # TODO: implement this for local storage.
 | 
			
		||||
        self._log.info('update_filename(%r) not supported', filename)
 | 
			
		||||
 | 
			
		||||
    def make_public(self):
 | 
			
		||||
        # No-op on this storage backend.
 | 
			
		||||
        pass
 | 
			
		||||
 | 
			
		||||
    def exists(self) -> bool:
 | 
			
		||||
        return self.abspath().exists()
 | 
			
		||||
 | 
			
		||||
    def touch(self):
 | 
			
		||||
        """Touch the file, creating parent directories if needed."""
 | 
			
		||||
        path = self.abspath()
 | 
			
		||||
        path.parent.mkdir(parents=True, exist_ok=True)
 | 
			
		||||
        path.touch(exist_ok=True)
 | 
			
		||||
@@ -1,5 +1,6 @@
 | 
			
		||||
import itertools
 | 
			
		||||
import typing
 | 
			
		||||
 | 
			
		||||
import bson
 | 
			
		||||
import pymongo
 | 
			
		||||
from flask import Blueprint, current_app
 | 
			
		||||
 | 
			
		||||
@@ -8,103 +9,85 @@ from pillar.api.utils import jsonify
 | 
			
		||||
blueprint = Blueprint('latest', __name__)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def keep_fetching(collection, db_filter, projection, sort, py_filter,
 | 
			
		||||
                  batch_size=12):
 | 
			
		||||
    """Yields results for which py_filter returns True"""
 | 
			
		||||
def _public_project_ids() -> typing.List[bson.ObjectId]:
 | 
			
		||||
    """Returns a list of ObjectIDs of public projects.
 | 
			
		||||
 | 
			
		||||
    projection['_deleted'] = 1
 | 
			
		||||
    curs = collection.find(db_filter, projection).sort(sort)
 | 
			
		||||
    curs.batch_size(batch_size)
 | 
			
		||||
    Memoized in setup_app().
 | 
			
		||||
    """
 | 
			
		||||
 | 
			
		||||
    for doc in curs:
 | 
			
		||||
        if doc.get('_deleted'):
 | 
			
		||||
            continue
 | 
			
		||||
        doc.pop('_deleted', None)
 | 
			
		||||
        if py_filter(doc):
 | 
			
		||||
            yield doc
 | 
			
		||||
    proj_coll = current_app.db('projects')
 | 
			
		||||
    result = proj_coll.find({'is_private': False}, {'_id': 1})
 | 
			
		||||
    return [p['_id'] for p in result]
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def latest_nodes(db_filter, projection, py_filter, limit):
 | 
			
		||||
    nodes = current_app.data.driver.db['nodes']
 | 
			
		||||
def latest_nodes(db_filter, projection, limit):
 | 
			
		||||
    """Returns the latest nodes, of a certain type, of public projects.
 | 
			
		||||
 | 
			
		||||
    Also includes information about the project and the user of each node.
 | 
			
		||||
    """
 | 
			
		||||
 | 
			
		||||
    proj = {
 | 
			
		||||
        '_created': 1,
 | 
			
		||||
        '_updated': 1,
 | 
			
		||||
        'project._id': 1,
 | 
			
		||||
        'project.url': 1,
 | 
			
		||||
        'project.name': 1,
 | 
			
		||||
        'name': 1,
 | 
			
		||||
        'node_type': 1,
 | 
			
		||||
        'parent': 1,
 | 
			
		||||
        **projection,
 | 
			
		||||
    }
 | 
			
		||||
    proj.update(projection)
 | 
			
		||||
 | 
			
		||||
    latest = keep_fetching(nodes, db_filter, proj,
 | 
			
		||||
                           [('_created', pymongo.DESCENDING)],
 | 
			
		||||
                           py_filter, limit)
 | 
			
		||||
    nodes_coll = current_app.db('nodes')
 | 
			
		||||
    pipeline = [
 | 
			
		||||
        {'$match': {'_deleted': {'$ne': True}}},
 | 
			
		||||
        {'$match': db_filter},
 | 
			
		||||
        {'$match': {'project': {'$in': _public_project_ids()}}},
 | 
			
		||||
        {'$sort': {'_created': pymongo.DESCENDING}},
 | 
			
		||||
        {'$limit': limit},
 | 
			
		||||
        {'$lookup': {"from": "users",
 | 
			
		||||
                     "localField": "user",
 | 
			
		||||
                     "foreignField": "_id",
 | 
			
		||||
                     "as": "user"}},
 | 
			
		||||
        {'$unwind': {'path': "$user"}},
 | 
			
		||||
        {'$lookup': {"from": "projects",
 | 
			
		||||
                     "localField": "project",
 | 
			
		||||
                     "foreignField": "_id",
 | 
			
		||||
                     "as": "project"}},
 | 
			
		||||
        {'$unwind': {'path': "$project"}},
 | 
			
		||||
        {'$project': proj},
 | 
			
		||||
    ]
 | 
			
		||||
 | 
			
		||||
    result = list(itertools.islice(latest, limit))
 | 
			
		||||
    return result
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def has_public_project(node_doc):
 | 
			
		||||
    """Returns True iff the project the node belongs to is public."""
 | 
			
		||||
 | 
			
		||||
    project_id = node_doc.get('project')
 | 
			
		||||
    return is_project_public(project_id)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
# TODO: cache result, for a limited amt. of time, or for this HTTP request.
 | 
			
		||||
def is_project_public(project_id):
 | 
			
		||||
    """Returns True iff the project is public."""
 | 
			
		||||
 | 
			
		||||
    project = current_app.data.driver.db['projects'].find_one(project_id)
 | 
			
		||||
    if not project:
 | 
			
		||||
        return False
 | 
			
		||||
 | 
			
		||||
    return not project.get('is_private')
 | 
			
		||||
    latest = nodes_coll.aggregate(pipeline)
 | 
			
		||||
    return list(latest)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@blueprint.route('/assets')
 | 
			
		||||
def latest_assets():
 | 
			
		||||
    latest = latest_nodes({'node_type': 'asset',
 | 
			
		||||
                           'properties.status': 'published'},
 | 
			
		||||
                          {'name': 1, 'project': 1, 'user': 1, 'node_type': 1,
 | 
			
		||||
                          {'name': 1, 'node_type': 1,
 | 
			
		||||
                           'parent': 1, 'picture': 1, 'properties.status': 1,
 | 
			
		||||
                           'properties.content_type': 1,
 | 
			
		||||
                           'properties.duration_seconds': 1,
 | 
			
		||||
                           'permissions.world': 1},
 | 
			
		||||
                          has_public_project, 12)
 | 
			
		||||
 | 
			
		||||
    embed_user(latest)
 | 
			
		||||
    embed_project(latest)
 | 
			
		||||
                          12)
 | 
			
		||||
 | 
			
		||||
    return jsonify({'_items': latest})
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def embed_user(latest):
 | 
			
		||||
    users = current_app.data.driver.db['users']
 | 
			
		||||
 | 
			
		||||
    for comment in latest:
 | 
			
		||||
        user_id = comment['user']
 | 
			
		||||
        comment['user'] = users.find_one(user_id, {
 | 
			
		||||
            'auth': 0, 'groups': 0, 'roles': 0, 'settings': 0, 'email': 0,
 | 
			
		||||
            '_created': 0, '_updated': 0, '_etag': 0})
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def embed_project(latest):
 | 
			
		||||
    projects = current_app.data.driver.db['projects']
 | 
			
		||||
 | 
			
		||||
    for comment in latest:
 | 
			
		||||
        project_id = comment['project']
 | 
			
		||||
        comment['project'] = projects.find_one(project_id, {'_id': 1, 'name': 1,
 | 
			
		||||
                                                            'url': 1})
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@blueprint.route('/comments')
 | 
			
		||||
def latest_comments():
 | 
			
		||||
    latest = latest_nodes({'node_type': 'comment',
 | 
			
		||||
                           'properties.status': 'published'},
 | 
			
		||||
                          {'project': 1, 'parent': 1, 'user': 1,
 | 
			
		||||
                          {'parent': 1, 'user.full_name': 1,
 | 
			
		||||
                           'properties.content': 1, 'node_type': 1,
 | 
			
		||||
                           'properties.status': 1,
 | 
			
		||||
                           'properties.is_reply': 1},
 | 
			
		||||
                          has_public_project, 10)
 | 
			
		||||
                          10)
 | 
			
		||||
 | 
			
		||||
    # Embed the comments' parents.
 | 
			
		||||
    # TODO: move to aggregation pipeline.
 | 
			
		||||
    nodes = current_app.data.driver.db['nodes']
 | 
			
		||||
    parents = {}
 | 
			
		||||
    for comment in latest:
 | 
			
		||||
@@ -118,11 +101,12 @@ def latest_comments():
 | 
			
		||||
        parents[parent_id] = parent
 | 
			
		||||
        comment['parent'] = parent
 | 
			
		||||
 | 
			
		||||
    embed_project(latest)
 | 
			
		||||
    embed_user(latest)
 | 
			
		||||
 | 
			
		||||
    return jsonify({'_items': latest})
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def setup_app(app, url_prefix):
 | 
			
		||||
    global _public_project_ids
 | 
			
		||||
 | 
			
		||||
    app.register_api_blueprint(blueprint, url_prefix=url_prefix)
 | 
			
		||||
    cached = app.cache.cached(timeout=3600)
 | 
			
		||||
    _public_project_ids = cached(_public_project_ids)
 | 
			
		||||
 
 | 
			
		||||
@@ -1,15 +1,16 @@
 | 
			
		||||
import base64
 | 
			
		||||
import datetime
 | 
			
		||||
import hashlib
 | 
			
		||||
import logging
 | 
			
		||||
import typing
 | 
			
		||||
 | 
			
		||||
import bcrypt
 | 
			
		||||
import datetime
 | 
			
		||||
import rsa.randnum
 | 
			
		||||
from bson import tz_util
 | 
			
		||||
 | 
			
		||||
from flask import abort, Blueprint, current_app, jsonify, request
 | 
			
		||||
from pillar.api.utils.authentication import create_new_user_document
 | 
			
		||||
from pillar.api.utils.authentication import make_unique_username
 | 
			
		||||
from pillar.api.utils.authentication import store_token
 | 
			
		||||
from pillar.api.utils import utcnow
 | 
			
		||||
 | 
			
		||||
blueprint = Blueprint('authentication', __name__)
 | 
			
		||||
log = logging.getLogger(__name__)
 | 
			
		||||
@@ -37,17 +38,7 @@ def create_local_user(email, password):
 | 
			
		||||
    return r['_id']
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@blueprint.route('/make-token', methods=['POST'])
 | 
			
		||||
def make_token():
 | 
			
		||||
    """Direct login for a user, without OAuth, using local database. Generates
 | 
			
		||||
    a token that is passed back to Pillar Web and used in subsequent
 | 
			
		||||
    transactions.
 | 
			
		||||
 | 
			
		||||
    :return: a token string
 | 
			
		||||
    """
 | 
			
		||||
    username = request.form['username']
 | 
			
		||||
    password = request.form['password']
 | 
			
		||||
 | 
			
		||||
def get_local_user(username, password):
 | 
			
		||||
    # Look up user in db
 | 
			
		||||
    users_collection = current_app.data.driver.db['users']
 | 
			
		||||
    user = users_collection.find_one({'username': username})
 | 
			
		||||
@@ -62,35 +53,63 @@ def make_token():
 | 
			
		||||
    hashed_password = hash_password(password, salt)
 | 
			
		||||
    if hashed_password != credentials['token']:
 | 
			
		||||
        return abort(403)
 | 
			
		||||
    return user
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@blueprint.route('/make-token', methods=['POST'])
 | 
			
		||||
def make_token():
 | 
			
		||||
    """Direct login for a user, without OAuth, using local database. Generates
 | 
			
		||||
    a token that is passed back to Pillar Web and used in subsequent
 | 
			
		||||
    transactions.
 | 
			
		||||
 | 
			
		||||
    :return: a token string
 | 
			
		||||
    """
 | 
			
		||||
    username = request.form['username']
 | 
			
		||||
    password = request.form['password']
 | 
			
		||||
 | 
			
		||||
    user = get_local_user(username, password)
 | 
			
		||||
 | 
			
		||||
    token = generate_and_store_token(user['_id'])
 | 
			
		||||
    return jsonify(token=token['token'])
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def generate_and_store_token(user_id, days=15, prefix=''):
 | 
			
		||||
def generate_and_store_token(user_id, days=15, prefix=b'') -> dict:
 | 
			
		||||
    """Generates token based on random bits.
 | 
			
		||||
 | 
			
		||||
    NOTE: the returned document includes the plain-text token.
 | 
			
		||||
    DO NOT STORE OR LOG THIS unless there is a good reason to.
 | 
			
		||||
 | 
			
		||||
    :param user_id: ObjectId of the owning user.
 | 
			
		||||
    :param days: token will expire in this many days.
 | 
			
		||||
    :param prefix: the token will be prefixed by this string, for easy identification.
 | 
			
		||||
    :return: the token document.
 | 
			
		||||
    :param prefix: the token will be prefixed by these bytes, for easy identification.
 | 
			
		||||
    :return: the token document with the token in plain text as well as hashed.
 | 
			
		||||
    """
 | 
			
		||||
 | 
			
		||||
    random_bits = rsa.randnum.read_random_bits(256)
 | 
			
		||||
    if not isinstance(prefix, bytes):
 | 
			
		||||
        raise TypeError('prefix must be bytes, not %s' % type(prefix))
 | 
			
		||||
 | 
			
		||||
    import secrets
 | 
			
		||||
 | 
			
		||||
    random_bits = secrets.token_bytes(32)
 | 
			
		||||
 | 
			
		||||
    # Use 'xy' as altargs to prevent + and / characters from appearing.
 | 
			
		||||
    # We never have to b64decode the string anyway.
 | 
			
		||||
    token = prefix + base64.b64encode(random_bits, altchars='xy').strip('=')
 | 
			
		||||
    token = prefix + base64.b64encode(random_bits, altchars=b'xy').strip(b'=')
 | 
			
		||||
 | 
			
		||||
    token_expiry = datetime.datetime.now(tz=tz_util.utc) + datetime.timedelta(days=days)
 | 
			
		||||
    return store_token(user_id, token, token_expiry)
 | 
			
		||||
    token_expiry = utcnow() + datetime.timedelta(days=days)
 | 
			
		||||
    return store_token(user_id, token.decode('ascii'), token_expiry)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def hash_password(password, salt):
 | 
			
		||||
    if isinstance(salt, unicode):
 | 
			
		||||
def hash_password(password: str, salt: typing.Union[str, bytes]) -> str:
 | 
			
		||||
    password = password.encode()
 | 
			
		||||
 | 
			
		||||
    if isinstance(salt, str):
 | 
			
		||||
        salt = salt.encode('utf-8')
 | 
			
		||||
    encoded_password = base64.b64encode(hashlib.sha256(password).digest())
 | 
			
		||||
    return bcrypt.hashpw(encoded_password, salt)
 | 
			
		||||
 | 
			
		||||
    hash = hashlib.sha256(password).digest()
 | 
			
		||||
    encoded_password = base64.b64encode(hash)
 | 
			
		||||
    hashed_password = bcrypt.hashpw(encoded_password, salt)
 | 
			
		||||
    return hashed_password.decode('ascii')
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def setup_app(app, url_prefix):
 | 
			
		||||
 
 | 
			
		||||
@@ -7,12 +7,12 @@ _file_embedded_schema = {
 | 
			
		||||
    }
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
ATTACHMENT_SLUG_REGEX = '[a-zA-Z0-9_ ]+'
 | 
			
		||||
ATTACHMENT_SLUG_REGEX = r'[a-zA-Z0-9_\-]+'
 | 
			
		||||
 | 
			
		||||
_attachments_embedded_schema = {
 | 
			
		||||
attachments_embedded_schema = {
 | 
			
		||||
    'type': 'dict',
 | 
			
		||||
    # TODO: will be renamed to 'keyschema' in Cerberus 1.0
 | 
			
		||||
    'propertyschema': {
 | 
			
		||||
    'keyschema': {
 | 
			
		||||
        'type': 'string',
 | 
			
		||||
        'regex': '^%s$' % ATTACHMENT_SLUG_REGEX,
 | 
			
		||||
    },
 | 
			
		||||
@@ -40,6 +40,51 @@ _attachments_embedded_schema = {
 | 
			
		||||
    },
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
# TODO (fsiddi) reference this schema in all node_types that allow ratings
 | 
			
		||||
ratings_embedded_schema = {
 | 
			
		||||
    'type': 'dict',
 | 
			
		||||
    # Total count of positive ratings (updated at every rating action)
 | 
			
		||||
    'schema': {
 | 
			
		||||
        'positive': {
 | 
			
		||||
            'type': 'integer',
 | 
			
		||||
        },
 | 
			
		||||
        # Total count of negative ratings (updated at every rating action)
 | 
			
		||||
        'negative': {
 | 
			
		||||
            'type': 'integer',
 | 
			
		||||
        },
 | 
			
		||||
        # Collection of ratings, keyed by user
 | 
			
		||||
        'ratings': {
 | 
			
		||||
            'type': 'list',
 | 
			
		||||
            'schema': {
 | 
			
		||||
                'type': 'dict',
 | 
			
		||||
                'schema': {
 | 
			
		||||
                    'user': {
 | 
			
		||||
                        'type': 'objectid',
 | 
			
		||||
                        'data_relation': {
 | 
			
		||||
                            'resource': 'users',
 | 
			
		||||
                            'field': '_id',
 | 
			
		||||
                            'embeddable': False
 | 
			
		||||
                        }
 | 
			
		||||
                    },
 | 
			
		||||
                    'is_positive': {
 | 
			
		||||
                        'type': 'boolean'
 | 
			
		||||
                    },
 | 
			
		||||
                    # Weight of the rating based on user rep and the context.
 | 
			
		||||
                    # Currently we have the following weights:
 | 
			
		||||
                    # - 1 auto null
 | 
			
		||||
                    # - 2 manual null
 | 
			
		||||
                    # - 3 auto valid
 | 
			
		||||
                    # - 4 manual valid
 | 
			
		||||
                    'weight': {
 | 
			
		||||
                        'type': 'integer'
 | 
			
		||||
                    }
 | 
			
		||||
                }
 | 
			
		||||
            }
 | 
			
		||||
        },
 | 
			
		||||
        'hot': {'type': 'float'},
 | 
			
		||||
    },
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
# Import after defining the common embedded schemas, to prevent dependency cycles.
 | 
			
		||||
from pillar.api.node_types.asset import node_type_asset
 | 
			
		||||
from pillar.api.node_types.blog import node_type_blog
 | 
			
		||||
 
 | 
			
		||||
@@ -1,4 +1,4 @@
 | 
			
		||||
from pillar.api.node_types import _file_embedded_schema, _attachments_embedded_schema
 | 
			
		||||
from pillar.api.node_types import _file_embedded_schema, attachments_embedded_schema
 | 
			
		||||
 | 
			
		||||
node_type_asset = {
 | 
			
		||||
    'name': 'asset',
 | 
			
		||||
@@ -24,10 +24,14 @@ node_type_asset = {
 | 
			
		||||
        'content_type': {
 | 
			
		||||
            'type': 'string'
 | 
			
		||||
        },
 | 
			
		||||
        # The duration of a video asset in seconds.
 | 
			
		||||
        'duration_seconds': {
 | 
			
		||||
            'type': 'integer'
 | 
			
		||||
        },
 | 
			
		||||
        # We point to the original file (and use it to extract any relevant
 | 
			
		||||
        # variation useful for our scope).
 | 
			
		||||
        'file': _file_embedded_schema,
 | 
			
		||||
        'attachments': _attachments_embedded_schema,
 | 
			
		||||
        'attachments': attachments_embedded_schema,
 | 
			
		||||
        # Tags for search
 | 
			
		||||
        'tags': {
 | 
			
		||||
            'type': 'list',
 | 
			
		||||
@@ -58,6 +62,7 @@ node_type_asset = {
 | 
			
		||||
    },
 | 
			
		||||
    'form_schema': {
 | 
			
		||||
        'content_type': {'visible': False},
 | 
			
		||||
        'duration_seconds': {'visible': False},
 | 
			
		||||
        'order': {'visible': False},
 | 
			
		||||
        'tags': {'visible': False},
 | 
			
		||||
        'categories': {'visible': False},
 | 
			
		||||
 
 | 
			
		||||
@@ -2,10 +2,6 @@ node_type_blog = {
 | 
			
		||||
    'name': 'blog',
 | 
			
		||||
    'description': 'Container for node_type post.',
 | 
			
		||||
    'dyn_schema': {
 | 
			
		||||
        # Path for a custom template to be used for rendering the posts
 | 
			
		||||
        'template': {
 | 
			
		||||
            'type': 'string',
 | 
			
		||||
        },
 | 
			
		||||
        'categories': {
 | 
			
		||||
            'type': 'list',
 | 
			
		||||
            'schema': {
 | 
			
		||||
@@ -17,5 +13,5 @@ node_type_blog = {
 | 
			
		||||
        'categories': {},
 | 
			
		||||
        'template': {},
 | 
			
		||||
    },
 | 
			
		||||
    'parent': ['project',],
 | 
			
		||||
    'parent': ['project', ],
 | 
			
		||||
}
 | 
			
		||||
 
 | 
			
		||||
@@ -2,16 +2,14 @@ node_type_comment = {
 | 
			
		||||
    'name': 'comment',
 | 
			
		||||
    'description': 'Comments for asset nodes, pages, etc.',
 | 
			
		||||
    'dyn_schema': {
 | 
			
		||||
        # The actual comment content (initially Markdown format)
 | 
			
		||||
        # The actual comment content
 | 
			
		||||
        'content': {
 | 
			
		||||
            'type': 'string',
 | 
			
		||||
            'minlength': 5,
 | 
			
		||||
            'required': True,
 | 
			
		||||
            'validator': 'markdown',
 | 
			
		||||
        },
 | 
			
		||||
        # The converted-to-HTML content.
 | 
			
		||||
        'content_html': {
 | 
			
		||||
            'type': 'string',
 | 
			
		||||
        },
 | 
			
		||||
        '_content_html': {'type': 'string'},
 | 
			
		||||
        'status': {
 | 
			
		||||
            'type': 'string',
 | 
			
		||||
            'allowed': [
 | 
			
		||||
 
 | 
			
		||||
@@ -3,7 +3,7 @@ node_type_group = {
 | 
			
		||||
    'description': 'Folder node type',
 | 
			
		||||
    'parent': ['group', 'project'],
 | 
			
		||||
    'dyn_schema': {
 | 
			
		||||
        # Used for sorting within the context of a group
 | 
			
		||||
 | 
			
		||||
        'order': {
 | 
			
		||||
            'type': 'integer'
 | 
			
		||||
        },
 | 
			
		||||
@@ -20,7 +20,8 @@ node_type_group = {
 | 
			
		||||
        'notes': {
 | 
			
		||||
            'type': 'string',
 | 
			
		||||
            'maxlength': 256,
 | 
			
		||||
        },
 | 
			
		||||
        }
 | 
			
		||||
 | 
			
		||||
    },
 | 
			
		||||
    'form_schema': {
 | 
			
		||||
        'url': {'visible': False},
 | 
			
		||||
 
 | 
			
		||||
@@ -7,6 +7,11 @@ node_type_hdri = {
 | 
			
		||||
    'description': 'HDR Image',
 | 
			
		||||
    'parent': ['group_hdri'],
 | 
			
		||||
    'dyn_schema': {
 | 
			
		||||
        # Default yaw angle in degrees.
 | 
			
		||||
        'default_yaw': {
 | 
			
		||||
            'type': 'float',
 | 
			
		||||
            'default': 0.0
 | 
			
		||||
        },
 | 
			
		||||
        'status': {
 | 
			
		||||
            'type': 'string',
 | 
			
		||||
            'allowed': [
 | 
			
		||||
 
 | 
			
		||||
@@ -1,16 +1,9 @@
 | 
			
		||||
from pillar.api.node_types import _attachments_embedded_schema
 | 
			
		||||
from pillar.api.node_types import attachments_embedded_schema
 | 
			
		||||
 | 
			
		||||
node_type_page = {
 | 
			
		||||
    'name': 'page',
 | 
			
		||||
    'description': 'A single page',
 | 
			
		||||
    'dyn_schema': {
 | 
			
		||||
        # The page content (Markdown format)
 | 
			
		||||
        'content': {
 | 
			
		||||
            'type': 'string',
 | 
			
		||||
            'minlength': 5,
 | 
			
		||||
            'maxlength': 90000,
 | 
			
		||||
            'required': True
 | 
			
		||||
        },
 | 
			
		||||
        'status': {
 | 
			
		||||
            'type': 'string',
 | 
			
		||||
            'allowed': [
 | 
			
		||||
@@ -22,7 +15,7 @@ node_type_page = {
 | 
			
		||||
        'url': {
 | 
			
		||||
            'type': 'string'
 | 
			
		||||
        },
 | 
			
		||||
        'attachments': _attachments_embedded_schema,
 | 
			
		||||
        'attachments': attachments_embedded_schema,
 | 
			
		||||
    },
 | 
			
		||||
    'form_schema': {
 | 
			
		||||
        'attachments': {'visible': False},
 | 
			
		||||
 
 | 
			
		||||
@@ -1,16 +1,17 @@
 | 
			
		||||
from pillar.api.node_types import _attachments_embedded_schema
 | 
			
		||||
from pillar.api.node_types import attachments_embedded_schema
 | 
			
		||||
 | 
			
		||||
node_type_post = {
 | 
			
		||||
    'name': 'post',
 | 
			
		||||
    'description': 'A blog post, for any project',
 | 
			
		||||
    'dyn_schema': {
 | 
			
		||||
        # The blogpost content (Markdown format)
 | 
			
		||||
        'content': {
 | 
			
		||||
            'type': 'string',
 | 
			
		||||
            'minlength': 5,
 | 
			
		||||
            'maxlength': 90000,
 | 
			
		||||
            'required': True
 | 
			
		||||
            'required': True,
 | 
			
		||||
            'validator': 'markdown',
 | 
			
		||||
        },
 | 
			
		||||
        '_content_html': {'type': 'string'},
 | 
			
		||||
        'status': {
 | 
			
		||||
            'type': 'string',
 | 
			
		||||
            'allowed': [
 | 
			
		||||
@@ -26,7 +27,7 @@ node_type_post = {
 | 
			
		||||
        'url': {
 | 
			
		||||
            'type': 'string'
 | 
			
		||||
        },
 | 
			
		||||
        'attachments': _attachments_embedded_schema,
 | 
			
		||||
        'attachments': attachments_embedded_schema,
 | 
			
		||||
    },
 | 
			
		||||
    'form_schema': {
 | 
			
		||||
        'attachments': {'visible': False},
 | 
			
		||||
 
 | 
			
		||||
@@ -16,7 +16,7 @@ node_type_storage = {
 | 
			
		||||
        'subdir': {
 | 
			
		||||
            'type': 'string',
 | 
			
		||||
        },
 | 
			
		||||
        # Which backend is used to store the files (gcs, pillar, bam, cdnsun)
 | 
			
		||||
        # Which backend is used to store the files (gcs, local)
 | 
			
		||||
        'backend': {
 | 
			
		||||
            'type': 'string',
 | 
			
		||||
        },
 | 
			
		||||
 
 | 
			
		||||
@@ -27,13 +27,19 @@ node_type_texture = {
 | 
			
		||||
                    'map_type': {
 | 
			
		||||
                        'type': 'string',
 | 
			
		||||
                        'allowed': [
 | 
			
		||||
                            'color',
 | 
			
		||||
                            'specular',
 | 
			
		||||
                            'bump',
 | 
			
		||||
                            'normal',
 | 
			
		||||
                            'translucency',
 | 
			
		||||
                            'emission',
 | 
			
		||||
                            'alpha'
 | 
			
		||||
                            "alpha",
 | 
			
		||||
                            "ambient occlusion",
 | 
			
		||||
                            "bump",
 | 
			
		||||
                            "color",
 | 
			
		||||
                            "displacement",
 | 
			
		||||
                            "emission",
 | 
			
		||||
                            "glossiness",
 | 
			
		||||
                            "id",
 | 
			
		||||
                            "mask",
 | 
			
		||||
                            "normal",
 | 
			
		||||
                            "roughness",
 | 
			
		||||
                            "specular",
 | 
			
		||||
                            "translucency",
 | 
			
		||||
                    ]}
 | 
			
		||||
                }
 | 
			
		||||
            }
 | 
			
		||||
 
 | 
			
		||||
@@ -1,60 +1,19 @@
 | 
			
		||||
import base64
 | 
			
		||||
import functools
 | 
			
		||||
import datetime
 | 
			
		||||
import logging
 | 
			
		||||
import urlparse
 | 
			
		||||
 | 
			
		||||
import pymongo.errors
 | 
			
		||||
import rsa.randnum
 | 
			
		||||
import werkzeug.exceptions as wz_exceptions
 | 
			
		||||
from bson import ObjectId
 | 
			
		||||
from flask import current_app, g, Blueprint, request
 | 
			
		||||
from flask import current_app, Blueprint, request
 | 
			
		||||
 | 
			
		||||
import pillar.markdown
 | 
			
		||||
from pillar.api.node_types import PILLAR_NAMED_NODE_TYPES
 | 
			
		||||
from pillar.api.activities import activity_subscribe, activity_object_add
 | 
			
		||||
from pillar.api.utils.algolia import algolia_index_node_delete
 | 
			
		||||
from pillar.api.utils.algolia import algolia_index_node_save
 | 
			
		||||
from pillar.api.nodes import eve_hooks
 | 
			
		||||
from pillar.api.utils import str2id, jsonify
 | 
			
		||||
from pillar.api.utils.authorization import check_permissions, require_login
 | 
			
		||||
from pillar.api.utils.gcs import update_file_name
 | 
			
		||||
from pillar.web.utils import pretty_date
 | 
			
		||||
 | 
			
		||||
log = logging.getLogger(__name__)
 | 
			
		||||
blueprint = Blueprint('nodes_api', __name__)
 | 
			
		||||
ROLES_FOR_SHARING = {u'subscriber', u'demo'}
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def only_for_node_type_decorator(*required_node_type_names):
 | 
			
		||||
    """Returns a decorator that checks its first argument's node type.
 | 
			
		||||
 | 
			
		||||
    If the node type is not of the required node type, returns None,
 | 
			
		||||
    otherwise calls the wrapped function.
 | 
			
		||||
 | 
			
		||||
    >>> deco = only_for_node_type_decorator('comment')
 | 
			
		||||
    >>> @deco
 | 
			
		||||
    ... def handle_comment(node): pass
 | 
			
		||||
 | 
			
		||||
    >>> deco = only_for_node_type_decorator('comment', 'post')
 | 
			
		||||
    >>> @deco
 | 
			
		||||
    ... def handle_comment_or_post(node): pass
 | 
			
		||||
 | 
			
		||||
    """
 | 
			
		||||
 | 
			
		||||
    # Convert to a set for efficient 'x in required_node_type_names' queries.
 | 
			
		||||
    required_node_type_names = set(required_node_type_names)
 | 
			
		||||
 | 
			
		||||
    def only_for_node_type(wrapped):
 | 
			
		||||
        @functools.wraps(wrapped)
 | 
			
		||||
        def wrapper(node, *args, **kwargs):
 | 
			
		||||
            if node.get('node_type') not in required_node_type_names:
 | 
			
		||||
                return
 | 
			
		||||
 | 
			
		||||
            return wrapped(node, *args, **kwargs)
 | 
			
		||||
 | 
			
		||||
        return wrapper
 | 
			
		||||
 | 
			
		||||
    only_for_node_type.__doc__ = "Decorator, immediately returns when " \
 | 
			
		||||
                                 "the first argument is not of type %s." % required_node_type_names
 | 
			
		||||
    return only_for_node_type
 | 
			
		||||
ROLES_FOR_SHARING = {'subscriber', 'demo'}
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@blueprint.route('/<node_id>/share', methods=['GET', 'POST'])
 | 
			
		||||
@@ -89,7 +48,80 @@ def share_node(node_id):
 | 
			
		||||
        else:
 | 
			
		||||
            return '', 204
 | 
			
		||||
 | 
			
		||||
    return jsonify(short_link_info(short_code), status=status)
 | 
			
		||||
    return jsonify(eve_hooks.short_link_info(short_code), status=status)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@blueprint.route('/tagged/')
 | 
			
		||||
@blueprint.route('/tagged/<tag>')
 | 
			
		||||
def tagged(tag=''):
 | 
			
		||||
    """Return all tagged nodes of public projects as JSON."""
 | 
			
		||||
    from pillar.auth import current_user
 | 
			
		||||
 | 
			
		||||
    # We explicitly register the tagless endpoint to raise a 404, otherwise the PATCH
 | 
			
		||||
    # handler on /api/nodes/<node_id> will return a 405 Method Not Allowed.
 | 
			
		||||
    if not tag:
 | 
			
		||||
        raise wz_exceptions.NotFound()
 | 
			
		||||
 | 
			
		||||
    # Build the (cached) list of tagged nodes
 | 
			
		||||
    agg_list = _tagged(tag)
 | 
			
		||||
 | 
			
		||||
    for node in agg_list:
 | 
			
		||||
        if node['properties'].get('duration_seconds'):
 | 
			
		||||
            node['properties']['duration'] = datetime.timedelta(seconds=node['properties']['duration_seconds'])
 | 
			
		||||
 | 
			
		||||
        if node.get('_created') is not None:
 | 
			
		||||
            node['pretty_created'] = pretty_date(node['_created'])
 | 
			
		||||
 | 
			
		||||
    # If the user is anonymous, no more information is needed and we return
 | 
			
		||||
    if current_user.is_anonymous:
 | 
			
		||||
        return jsonify(agg_list)
 | 
			
		||||
 | 
			
		||||
    # If the user is authenticated, attach view_progress for video assets
 | 
			
		||||
    view_progress = current_user.nodes['view_progress']
 | 
			
		||||
    for node in agg_list:
 | 
			
		||||
        node_id = str(node['_id'])
 | 
			
		||||
        # View progress should be added only for nodes of type 'asset' and
 | 
			
		||||
        # with content_type 'video', only if the video was already in the watched
 | 
			
		||||
        # list for the current user.
 | 
			
		||||
        if node_id in view_progress:
 | 
			
		||||
            node['view_progress'] = view_progress[node_id]
 | 
			
		||||
 | 
			
		||||
    return jsonify(agg_list)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def _tagged(tag: str):
 | 
			
		||||
    """Fetch all public nodes with the given tag.
 | 
			
		||||
 | 
			
		||||
    This function is cached, see setup_app().
 | 
			
		||||
    """
 | 
			
		||||
    nodes_coll = current_app.db('nodes')
 | 
			
		||||
    agg = nodes_coll.aggregate([
 | 
			
		||||
        {'$match': {'properties.tags': tag,
 | 
			
		||||
                    '_deleted': {'$ne': True}}},
 | 
			
		||||
 | 
			
		||||
        # Only get nodes from public projects. This is done after matching the
 | 
			
		||||
        # tagged nodes, because most likely nobody else will be able to tag
 | 
			
		||||
        # nodes anyway.
 | 
			
		||||
        {'$lookup': {
 | 
			
		||||
            'from': 'projects',
 | 
			
		||||
            'localField': 'project',
 | 
			
		||||
            'foreignField': '_id',
 | 
			
		||||
            'as': '_project',
 | 
			
		||||
        }},
 | 
			
		||||
        {'$unwind': '$_project'},
 | 
			
		||||
        {'$match': {'_project.is_private': False}},
 | 
			
		||||
        {'$addFields': {
 | 
			
		||||
            'project._id': '$_project._id',
 | 
			
		||||
            'project.name': '$_project.name',
 | 
			
		||||
            'project.url': '$_project.url',
 | 
			
		||||
        }},
 | 
			
		||||
 | 
			
		||||
        # Don't return the entire project/file for each node.
 | 
			
		||||
        {'$project': {'_project': False}},
 | 
			
		||||
        {'$sort': {'_created': -1}}
 | 
			
		||||
    ])
 | 
			
		||||
 | 
			
		||||
    return list(agg)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def generate_and_store_short_code(node):
 | 
			
		||||
@@ -138,7 +170,7 @@ def make_world_gettable(node):
 | 
			
		||||
    log.debug('Ensuring the world can read node %s', node_id)
 | 
			
		||||
 | 
			
		||||
    world_perms = set(node.get('permissions', {}).get('world', []))
 | 
			
		||||
    world_perms.add(u'GET')
 | 
			
		||||
    world_perms.add('GET')
 | 
			
		||||
    world_perms = list(world_perms)
 | 
			
		||||
 | 
			
		||||
    result = nodes_coll.update_one({'_id': node_id},
 | 
			
		||||
@@ -150,279 +182,52 @@ def make_world_gettable(node):
 | 
			
		||||
                                                node_id)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def create_short_code(node):
 | 
			
		||||
def create_short_code(node) -> str:
 | 
			
		||||
    """Generates a new 'short code' for the node."""
 | 
			
		||||
 | 
			
		||||
    import secrets
 | 
			
		||||
 | 
			
		||||
    length = current_app.config['SHORT_CODE_LENGTH']
 | 
			
		||||
    bits = rsa.randnum.read_random_bits(32)
 | 
			
		||||
    short_code = base64.b64encode(bits, altchars='xy').rstrip('=')
 | 
			
		||||
    short_code = short_code[:length]
 | 
			
		||||
 | 
			
		||||
    # Base64 encoding will expand it a bit, so we'll cut that off later.
 | 
			
		||||
    # It's a good idea to start with enough bytes, though.
 | 
			
		||||
    bits = secrets.token_bytes(length)
 | 
			
		||||
 | 
			
		||||
    short_code = base64.b64encode(bits, altchars=b'xy').rstrip(b'=')
 | 
			
		||||
    short_code = short_code[:length].decode('ascii')
 | 
			
		||||
 | 
			
		||||
    return short_code
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def short_link_info(short_code):
 | 
			
		||||
    """Returns the short link info in a dict."""
 | 
			
		||||
 | 
			
		||||
    short_link = urlparse.urljoin(current_app.config['SHORT_LINK_BASE_URL'], short_code)
 | 
			
		||||
 | 
			
		||||
    return {
 | 
			
		||||
        'short_code': short_code,
 | 
			
		||||
        'short_link': short_link,
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def before_replacing_node(item, original):
 | 
			
		||||
    check_permissions('nodes', original, 'PUT')
 | 
			
		||||
    update_file_name(item)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def after_replacing_node(item, original):
 | 
			
		||||
    """Push an update to the Algolia index when a node item is updated. If the
 | 
			
		||||
    project is private, prevent public indexing.
 | 
			
		||||
    """
 | 
			
		||||
 | 
			
		||||
    projects_collection = current_app.data.driver.db['projects']
 | 
			
		||||
    project = projects_collection.find_one({'_id': item['project']})
 | 
			
		||||
    if project.get('is_private', False):
 | 
			
		||||
        # Skip index updating and return
 | 
			
		||||
        return
 | 
			
		||||
 | 
			
		||||
    from algoliasearch.client import AlgoliaException
 | 
			
		||||
    status = item['properties'].get('status', 'unpublished')
 | 
			
		||||
 | 
			
		||||
    if status == 'published':
 | 
			
		||||
        try:
 | 
			
		||||
            algolia_index_node_save(item)
 | 
			
		||||
        except AlgoliaException as ex:
 | 
			
		||||
            log.warning('Unable to push node info to Algolia for node %s; %s',
 | 
			
		||||
                        item.get('_id'), ex)
 | 
			
		||||
    else:
 | 
			
		||||
        try:
 | 
			
		||||
            algolia_index_node_delete(item)
 | 
			
		||||
        except AlgoliaException as ex:
 | 
			
		||||
            log.warning('Unable to delete node info to Algolia for node %s; %s',
 | 
			
		||||
                        item.get('_id'), ex)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def before_inserting_nodes(items):
 | 
			
		||||
    """Before inserting a node in the collection we check if the user is allowed
 | 
			
		||||
    and we append the project id to it.
 | 
			
		||||
    """
 | 
			
		||||
    nodes_collection = current_app.data.driver.db['nodes']
 | 
			
		||||
 | 
			
		||||
    def find_parent_project(node):
 | 
			
		||||
        """Recursive function that finds the ultimate parent of a node."""
 | 
			
		||||
        if node and 'parent' in node:
 | 
			
		||||
            parent = nodes_collection.find_one({'_id': node['parent']})
 | 
			
		||||
            return find_parent_project(parent)
 | 
			
		||||
        if node:
 | 
			
		||||
            return node
 | 
			
		||||
        else:
 | 
			
		||||
            return None
 | 
			
		||||
 | 
			
		||||
    for item in items:
 | 
			
		||||
        check_permissions('nodes', item, 'POST')
 | 
			
		||||
        if 'parent' in item and 'project' not in item:
 | 
			
		||||
            parent = nodes_collection.find_one({'_id': item['parent']})
 | 
			
		||||
            project = find_parent_project(parent)
 | 
			
		||||
            if project:
 | 
			
		||||
                item['project'] = project['_id']
 | 
			
		||||
 | 
			
		||||
        # Default the 'user' property to the current user.
 | 
			
		||||
        item.setdefault('user', g.current_user['user_id'])
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def after_inserting_nodes(items):
 | 
			
		||||
    for item in items:
 | 
			
		||||
        # Skip subscriptions for first level items (since the context is not a
 | 
			
		||||
        # node, but a project).
 | 
			
		||||
        # TODO: support should be added for mixed context
 | 
			
		||||
        if 'parent' not in item:
 | 
			
		||||
            return
 | 
			
		||||
        context_object_id = item['parent']
 | 
			
		||||
        if item['node_type'] == 'comment':
 | 
			
		||||
            nodes_collection = current_app.data.driver.db['nodes']
 | 
			
		||||
            parent = nodes_collection.find_one({'_id': item['parent']})
 | 
			
		||||
            # Always subscribe to the parent node
 | 
			
		||||
            activity_subscribe(item['user'], 'node', item['parent'])
 | 
			
		||||
            if parent['node_type'] == 'comment':
 | 
			
		||||
                # If the parent is a comment, we provide its own parent as
 | 
			
		||||
                # context. We do this in order to point the user to an asset
 | 
			
		||||
                # or group when viewing the notification.
 | 
			
		||||
                verb = 'replied'
 | 
			
		||||
                context_object_id = parent['parent']
 | 
			
		||||
                # Subscribe to the parent of the parent comment (post or group)
 | 
			
		||||
                activity_subscribe(item['user'], 'node', parent['parent'])
 | 
			
		||||
            else:
 | 
			
		||||
                activity_subscribe(item['user'], 'node', item['_id'])
 | 
			
		||||
                verb = 'commented'
 | 
			
		||||
        elif item['node_type'] in PILLAR_NAMED_NODE_TYPES:
 | 
			
		||||
            verb = 'posted'
 | 
			
		||||
            activity_subscribe(item['user'], 'node', item['_id'])
 | 
			
		||||
        else:
 | 
			
		||||
            # Don't automatically create activities for non-Pillar node types,
 | 
			
		||||
            # as we don't know what would be a suitable verb (among other things).
 | 
			
		||||
            continue
 | 
			
		||||
 | 
			
		||||
        activity_object_add(
 | 
			
		||||
            item['user'],
 | 
			
		||||
            verb,
 | 
			
		||||
            'node',
 | 
			
		||||
            item['_id'],
 | 
			
		||||
            'node',
 | 
			
		||||
            context_object_id
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def deduct_content_type(node_doc, original=None):
 | 
			
		||||
    """Deduct the content type from the attached file, if any."""
 | 
			
		||||
 | 
			
		||||
    if node_doc['node_type'] != 'asset':
 | 
			
		||||
        log.debug('deduct_content_type: called on node type %r, ignoring', node_doc['node_type'])
 | 
			
		||||
        return
 | 
			
		||||
 | 
			
		||||
    node_id = node_doc.get('_id')
 | 
			
		||||
    try:
 | 
			
		||||
        file_id = ObjectId(node_doc['properties']['file'])
 | 
			
		||||
    except KeyError:
 | 
			
		||||
        if node_id is None:
 | 
			
		||||
            # Creation of a file-less node is allowed, but updates aren't.
 | 
			
		||||
            return
 | 
			
		||||
        log.warning('deduct_content_type: Asset without properties.file, rejecting.')
 | 
			
		||||
        raise wz_exceptions.UnprocessableEntity('Missing file property for asset node')
 | 
			
		||||
 | 
			
		||||
    files = current_app.data.driver.db['files']
 | 
			
		||||
    file_doc = files.find_one({'_id': file_id},
 | 
			
		||||
                              {'content_type': 1})
 | 
			
		||||
    if not file_doc:
 | 
			
		||||
        log.warning('deduct_content_type: Node %s refers to non-existing file %s, rejecting.',
 | 
			
		||||
                    node_id, file_id)
 | 
			
		||||
        raise wz_exceptions.UnprocessableEntity('File property refers to non-existing file')
 | 
			
		||||
 | 
			
		||||
    # Guess the node content type from the file content type
 | 
			
		||||
    file_type = file_doc['content_type']
 | 
			
		||||
    if file_type.startswith('video/'):
 | 
			
		||||
        content_type = 'video'
 | 
			
		||||
    elif file_type.startswith('image/'):
 | 
			
		||||
        content_type = 'image'
 | 
			
		||||
    else:
 | 
			
		||||
        content_type = 'file'
 | 
			
		||||
 | 
			
		||||
    node_doc['properties']['content_type'] = content_type
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def nodes_deduct_content_type(nodes):
 | 
			
		||||
    for node in nodes:
 | 
			
		||||
        deduct_content_type(node)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def before_returning_node(node):
 | 
			
		||||
    # Run validation process, since GET on nodes entry point is public
 | 
			
		||||
    check_permissions('nodes', node, 'GET', append_allowed_methods=True)
 | 
			
		||||
 | 
			
		||||
    # Embed short_link_info if the node has a short_code.
 | 
			
		||||
    short_code = node.get('short_code')
 | 
			
		||||
    if short_code:
 | 
			
		||||
        node['short_link'] = short_link_info(short_code)['short_link']
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def before_returning_nodes(nodes):
 | 
			
		||||
    for node in nodes['_items']:
 | 
			
		||||
        before_returning_node(node)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def node_set_default_picture(node, original=None):
 | 
			
		||||
    """Uses the image of an image asset or colour map of texture node as picture."""
 | 
			
		||||
 | 
			
		||||
    if node.get('picture'):
 | 
			
		||||
        log.debug('Node %s already has a picture, not overriding', node.get('_id'))
 | 
			
		||||
        return
 | 
			
		||||
 | 
			
		||||
    node_type = node.get('node_type')
 | 
			
		||||
    props = node.get('properties', {})
 | 
			
		||||
    content = props.get('content_type')
 | 
			
		||||
 | 
			
		||||
    if node_type == 'asset' and content == 'image':
 | 
			
		||||
        image_file_id = props.get('file')
 | 
			
		||||
    elif node_type == 'texture':
 | 
			
		||||
        # Find the colour map, defaulting to the first image map available.
 | 
			
		||||
        image_file_id = None
 | 
			
		||||
        for image in props.get('files', []):
 | 
			
		||||
            if image_file_id is None or image.get('map_type') == u'color':
 | 
			
		||||
                image_file_id = image.get('file')
 | 
			
		||||
    else:
 | 
			
		||||
        log.debug('Not setting default picture on node type %s content type %s',
 | 
			
		||||
                  node_type, content)
 | 
			
		||||
        return
 | 
			
		||||
 | 
			
		||||
    if image_file_id is None:
 | 
			
		||||
        log.debug('Nothing to set the picture to.')
 | 
			
		||||
        return
 | 
			
		||||
 | 
			
		||||
    log.debug('Setting default picture for node %s to %s', node.get('_id'), image_file_id)
 | 
			
		||||
    node['picture'] = image_file_id
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def nodes_set_default_picture(nodes):
 | 
			
		||||
    for node in nodes:
 | 
			
		||||
        node_set_default_picture(node)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def after_deleting_node(item):
 | 
			
		||||
    from algoliasearch.client import AlgoliaException
 | 
			
		||||
    try:
 | 
			
		||||
        algolia_index_node_delete(item)
 | 
			
		||||
    except AlgoliaException as ex:
 | 
			
		||||
        log.warning('Unable to delete node info to Algolia for node %s; %s',
 | 
			
		||||
                    item.get('_id'), ex)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
only_for_comments = only_for_node_type_decorator('comment')
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@only_for_comments
 | 
			
		||||
def convert_markdown(node, original=None):
 | 
			
		||||
    """Converts comments from Markdown to HTML.
 | 
			
		||||
 | 
			
		||||
    Always does this on save, even when the original Markdown hasn't changed,
 | 
			
		||||
    because our Markdown -> HTML conversion rules might have.
 | 
			
		||||
    """
 | 
			
		||||
 | 
			
		||||
    try:
 | 
			
		||||
        content = node['properties']['content']
 | 
			
		||||
    except KeyError:
 | 
			
		||||
        node['properties']['content_html'] = ''
 | 
			
		||||
    else:
 | 
			
		||||
        node['properties']['content_html'] = pillar.markdown.markdown(content)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def nodes_convert_markdown(nodes):
 | 
			
		||||
    for node in nodes:
 | 
			
		||||
        convert_markdown(node)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def setup_app(app, url_prefix):
 | 
			
		||||
    global _tagged
 | 
			
		||||
 | 
			
		||||
    cached = app.cache.memoize(timeout=300)
 | 
			
		||||
    _tagged = cached(_tagged)
 | 
			
		||||
 | 
			
		||||
    from . import patch
 | 
			
		||||
    patch.setup_app(app, url_prefix=url_prefix)
 | 
			
		||||
 | 
			
		||||
    app.on_fetched_item_nodes += before_returning_node
 | 
			
		||||
    app.on_fetched_resource_nodes += before_returning_nodes
 | 
			
		||||
    app.on_fetched_item_nodes += eve_hooks.before_returning_node
 | 
			
		||||
    app.on_fetched_resource_nodes += eve_hooks.before_returning_nodes
 | 
			
		||||
 | 
			
		||||
    app.on_replace_nodes += before_replacing_node
 | 
			
		||||
    app.on_replace_nodes += convert_markdown
 | 
			
		||||
    app.on_replace_nodes += deduct_content_type
 | 
			
		||||
    app.on_replace_nodes += node_set_default_picture
 | 
			
		||||
    app.on_replaced_nodes += after_replacing_node
 | 
			
		||||
    app.on_replace_nodes += eve_hooks.before_replacing_node
 | 
			
		||||
    app.on_replace_nodes += eve_hooks.parse_markdown
 | 
			
		||||
    app.on_replace_nodes += eve_hooks.texture_sort_files
 | 
			
		||||
    app.on_replace_nodes += eve_hooks.deduct_content_type_and_duration
 | 
			
		||||
    app.on_replace_nodes += eve_hooks.node_set_default_picture
 | 
			
		||||
    app.on_replaced_nodes += eve_hooks.after_replacing_node
 | 
			
		||||
 | 
			
		||||
    app.on_insert_nodes += before_inserting_nodes
 | 
			
		||||
    app.on_insert_nodes += nodes_deduct_content_type
 | 
			
		||||
    app.on_insert_nodes += nodes_set_default_picture
 | 
			
		||||
    app.on_insert_nodes += nodes_convert_markdown
 | 
			
		||||
    app.on_inserted_nodes += after_inserting_nodes
 | 
			
		||||
    app.on_insert_nodes += eve_hooks.before_inserting_nodes
 | 
			
		||||
    app.on_insert_nodes += eve_hooks.parse_markdowns
 | 
			
		||||
    app.on_insert_nodes += eve_hooks.nodes_deduct_content_type_and_duration
 | 
			
		||||
    app.on_insert_nodes += eve_hooks.nodes_set_default_picture
 | 
			
		||||
    app.on_insert_nodes += eve_hooks.textures_sort_files
 | 
			
		||||
    app.on_inserted_nodes += eve_hooks.after_inserting_nodes
 | 
			
		||||
 | 
			
		||||
    app.on_update_nodes += convert_markdown
 | 
			
		||||
    app.on_update_nodes += eve_hooks.texture_sort_files
 | 
			
		||||
 | 
			
		||||
    app.on_deleted_item_nodes += after_deleting_node
 | 
			
		||||
    app.on_delete_item_nodes += eve_hooks.before_deleting_node
 | 
			
		||||
    app.on_deleted_item_nodes += eve_hooks.after_deleting_node
 | 
			
		||||
 | 
			
		||||
    app.register_api_blueprint(blueprint, url_prefix=url_prefix)
 | 
			
		||||
 
 | 
			
		||||
@@ -2,31 +2,37 @@
 | 
			
		||||
 | 
			
		||||
import logging
 | 
			
		||||
 | 
			
		||||
from eve.methods.patch import patch_internal
 | 
			
		||||
from flask import current_app
 | 
			
		||||
import werkzeug.exceptions as wz_exceptions
 | 
			
		||||
 | 
			
		||||
from pillar.api.utils import authorization, authentication, jsonify
 | 
			
		||||
from pillar.api.utils.rating import confidence
 | 
			
		||||
 | 
			
		||||
from . import register_patch_handler
 | 
			
		||||
 | 
			
		||||
log = logging.getLogger(__name__)
 | 
			
		||||
ROLES_FOR_COMMENT_VOTING = {u'subscriber', u'demo'}
 | 
			
		||||
COMMENT_VOTING_OPS = {u'upvote', u'downvote', u'revoke'}
 | 
			
		||||
VALID_COMMENT_OPERATIONS = COMMENT_VOTING_OPS.union({u'edit'})
 | 
			
		||||
COMMENT_VOTING_OPS = {'upvote', 'downvote', 'revoke'}
 | 
			
		||||
VALID_COMMENT_OPERATIONS = COMMENT_VOTING_OPS.union({'edit'})
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@register_patch_handler(u'comment')
 | 
			
		||||
@register_patch_handler('comment')
 | 
			
		||||
def patch_comment(node_id, patch):
 | 
			
		||||
    assert_is_valid_patch(node_id, patch)
 | 
			
		||||
    user_id = authentication.current_user_id()
 | 
			
		||||
 | 
			
		||||
    if patch[u'op'] in COMMENT_VOTING_OPS:
 | 
			
		||||
    if patch['op'] in COMMENT_VOTING_OPS:
 | 
			
		||||
        result, node = vote_comment(user_id, node_id, patch)
 | 
			
		||||
    else:
 | 
			
		||||
        assert patch[u'op'] == u'edit', 'Invalid patch operation %s' % patch[u'op']
 | 
			
		||||
        assert patch['op'] == 'edit', 'Invalid patch operation %s' % patch['op']
 | 
			
		||||
        result, node = edit_comment(user_id, node_id, patch)
 | 
			
		||||
 | 
			
		||||
    # Calculate and update confidence.
 | 
			
		||||
    rating_confidence = confidence(
 | 
			
		||||
        node['properties']['rating_positive'], node['properties']['rating_negative'])
 | 
			
		||||
    current_app.data.driver.db['nodes'].update_one(
 | 
			
		||||
        {'_id': node_id},
 | 
			
		||||
        {'$set': {'properties.confidence': rating_confidence}})
 | 
			
		||||
 | 
			
		||||
    return jsonify({'_status': 'OK',
 | 
			
		||||
                    'result': result,
 | 
			
		||||
                    'properties': node['properties']
 | 
			
		||||
@@ -43,11 +49,15 @@ def vote_comment(user_id, node_id, patch):
 | 
			
		||||
                  '$or': [{'properties.ratings.$.user': {'$exists': False}},
 | 
			
		||||
                          {'properties.ratings.$.user': user_id}]}
 | 
			
		||||
    node = nodes_coll.find_one(node_query,
 | 
			
		||||
                               projection={'properties': 1})
 | 
			
		||||
                               projection={'properties': 1, 'user': 1})
 | 
			
		||||
    if node is None:
 | 
			
		||||
        log.warning('User %s wanted to patch non-existing node %s' % (user_id, node_id))
 | 
			
		||||
        raise wz_exceptions.NotFound('Node %s not found' % node_id)
 | 
			
		||||
 | 
			
		||||
    # We don't allow the user to down/upvote their own nodes.
 | 
			
		||||
    if user_id == node['user']:
 | 
			
		||||
        raise wz_exceptions.Forbidden('You cannot vote on your own node')
 | 
			
		||||
 | 
			
		||||
    props = node['properties']
 | 
			
		||||
 | 
			
		||||
    # Find the current rating (if any)
 | 
			
		||||
@@ -95,9 +105,9 @@ def vote_comment(user_id, node_id, patch):
 | 
			
		||||
        return update
 | 
			
		||||
 | 
			
		||||
    actions = {
 | 
			
		||||
        u'upvote': upvote,
 | 
			
		||||
        u'downvote': downvote,
 | 
			
		||||
        u'revoke': revoke,
 | 
			
		||||
        'upvote': upvote,
 | 
			
		||||
        'downvote': downvote,
 | 
			
		||||
        'revoke': revoke,
 | 
			
		||||
    }
 | 
			
		||||
    action = actions[patch['op']]
 | 
			
		||||
    mongo_update = action()
 | 
			
		||||
@@ -141,17 +151,17 @@ def edit_comment(user_id, node_id, patch):
 | 
			
		||||
        log.warning('User %s wanted to patch non-existing node %s' % (user_id, node_id))
 | 
			
		||||
        raise wz_exceptions.NotFound('Node %s not found' % node_id)
 | 
			
		||||
 | 
			
		||||
    if node['user'] != user_id and not authorization.user_has_role(u'admin'):
 | 
			
		||||
    if node['user'] != user_id and not authorization.user_has_role('admin'):
 | 
			
		||||
        raise wz_exceptions.Forbidden('You can only edit your own comments.')
 | 
			
		||||
 | 
			
		||||
    # Use Eve to PATCH this node, as that also updates the etag.
 | 
			
		||||
    r, _, _, status = patch_internal('nodes',
 | 
			
		||||
                                     {'properties.content': patch['content'],
 | 
			
		||||
                                      'project': node['project'],
 | 
			
		||||
                                      'user': node['user'],
 | 
			
		||||
                                      'node_type': node['node_type']},
 | 
			
		||||
                                     concurrency_check=False,
 | 
			
		||||
                                     _id=node_id)
 | 
			
		||||
    r, _, _, status = current_app.patch_internal('nodes',
 | 
			
		||||
                                                 {'properties.content': patch['content'],
 | 
			
		||||
                                                  'project': node['project'],
 | 
			
		||||
                                                  'user': node['user'],
 | 
			
		||||
                                                  'node_type': node['node_type']},
 | 
			
		||||
                                                 concurrency_check=False,
 | 
			
		||||
                                                 _id=node_id)
 | 
			
		||||
    if status != 200:
 | 
			
		||||
        log.error('Error %i editing comment %s for user %s: %s',
 | 
			
		||||
                  status, node_id, user_id, r)
 | 
			
		||||
@@ -160,7 +170,10 @@ def edit_comment(user_id, node_id, patch):
 | 
			
		||||
        log.info('User %s edited comment %s', user_id, node_id)
 | 
			
		||||
 | 
			
		||||
    # Fetch the new content, so the client can show these without querying again.
 | 
			
		||||
    node = nodes_coll.find_one(node_id, projection={'properties.content_html': 1})
 | 
			
		||||
    node = nodes_coll.find_one(node_id, projection={
 | 
			
		||||
        'properties.content': 1,
 | 
			
		||||
        'properties._content_html': 1,
 | 
			
		||||
    })
 | 
			
		||||
    return status, node
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@@ -173,15 +186,15 @@ def assert_is_valid_patch(node_id, patch):
 | 
			
		||||
        raise wz_exceptions.BadRequest("PATCH should have a key 'op' indicating the operation.")
 | 
			
		||||
 | 
			
		||||
    if op not in VALID_COMMENT_OPERATIONS:
 | 
			
		||||
        raise wz_exceptions.BadRequest(u'Operation should be one of %s',
 | 
			
		||||
                                       u', '.join(VALID_COMMENT_OPERATIONS))
 | 
			
		||||
        raise wz_exceptions.BadRequest('Operation should be one of %s',
 | 
			
		||||
                                       ', '.join(VALID_COMMENT_OPERATIONS))
 | 
			
		||||
 | 
			
		||||
    if op not in COMMENT_VOTING_OPS:
 | 
			
		||||
        # We can't check here, we need the node owner for that.
 | 
			
		||||
        return
 | 
			
		||||
 | 
			
		||||
    # See whether the user is allowed to patch
 | 
			
		||||
    if authorization.user_matches_roles(ROLES_FOR_COMMENT_VOTING):
 | 
			
		||||
    if authorization.user_matches_roles(current_app.config['ROLES_FOR_COMMENT_VOTING']):
 | 
			
		||||
        log.debug('User is allowed to upvote/downvote comment')
 | 
			
		||||
        return
 | 
			
		||||
 | 
			
		||||
 
 | 
			
		||||
							
								
								
									
										374
									
								
								pillar/api/nodes/eve_hooks.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										374
									
								
								pillar/api/nodes/eve_hooks.py
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,374 @@
 | 
			
		||||
import collections
 | 
			
		||||
import functools
 | 
			
		||||
import logging
 | 
			
		||||
import urllib.parse
 | 
			
		||||
 | 
			
		||||
from bson import ObjectId
 | 
			
		||||
from werkzeug import exceptions as wz_exceptions
 | 
			
		||||
 | 
			
		||||
from pillar import current_app
 | 
			
		||||
import pillar.markdown
 | 
			
		||||
from pillar.api.activities import activity_subscribe, activity_object_add
 | 
			
		||||
from pillar.api.file_storage_backends.gcs import update_file_name
 | 
			
		||||
from pillar.api.node_types import PILLAR_NAMED_NODE_TYPES
 | 
			
		||||
from pillar.api.utils import random_etag
 | 
			
		||||
from pillar.api.utils.authorization import check_permissions
 | 
			
		||||
 | 
			
		||||
log = logging.getLogger(__name__)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def before_returning_node(node):
 | 
			
		||||
    # Run validation process, since GET on nodes entry point is public
 | 
			
		||||
    check_permissions('nodes', node, 'GET', append_allowed_methods=True)
 | 
			
		||||
 | 
			
		||||
    # Embed short_link_info if the node has a short_code.
 | 
			
		||||
    short_code = node.get('short_code')
 | 
			
		||||
    if short_code:
 | 
			
		||||
        node['short_link'] = short_link_info(short_code)['short_link']
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def before_returning_nodes(nodes):
 | 
			
		||||
    for node in nodes['_items']:
 | 
			
		||||
        before_returning_node(node)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def only_for_node_type_decorator(*required_node_type_names):
 | 
			
		||||
    """Returns a decorator that checks its first argument's node type.
 | 
			
		||||
 | 
			
		||||
    If the node type is not of the required node type, returns None,
 | 
			
		||||
    otherwise calls the wrapped function.
 | 
			
		||||
 | 
			
		||||
    >>> deco = only_for_node_type_decorator('comment')
 | 
			
		||||
    >>> @deco
 | 
			
		||||
    ... def handle_comment(node): pass
 | 
			
		||||
 | 
			
		||||
    >>> deco = only_for_node_type_decorator('comment', 'post')
 | 
			
		||||
    >>> @deco
 | 
			
		||||
    ... def handle_comment_or_post(node): pass
 | 
			
		||||
 | 
			
		||||
    """
 | 
			
		||||
 | 
			
		||||
    # Convert to a set for efficient 'x in required_node_type_names' queries.
 | 
			
		||||
    required_node_type_names = set(required_node_type_names)
 | 
			
		||||
 | 
			
		||||
    def only_for_node_type(wrapped):
 | 
			
		||||
        @functools.wraps(wrapped)
 | 
			
		||||
        def wrapper(node, *args, **kwargs):
 | 
			
		||||
            if node.get('node_type') not in required_node_type_names:
 | 
			
		||||
                return
 | 
			
		||||
 | 
			
		||||
            return wrapped(node, *args, **kwargs)
 | 
			
		||||
 | 
			
		||||
        return wrapper
 | 
			
		||||
 | 
			
		||||
    only_for_node_type.__doc__ = "Decorator, immediately returns when " \
 | 
			
		||||
                                 "the first argument is not of type %s." % required_node_type_names
 | 
			
		||||
    return only_for_node_type
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def before_replacing_node(item, original):
 | 
			
		||||
    check_permissions('nodes', original, 'PUT')
 | 
			
		||||
    update_file_name(item)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def after_replacing_node(item, original):
 | 
			
		||||
    """Push an update to the Algolia index when a node item is updated. If the
 | 
			
		||||
    project is private, prevent public indexing.
 | 
			
		||||
    """
 | 
			
		||||
 | 
			
		||||
    from pillar.celery import search_index_tasks as index
 | 
			
		||||
 | 
			
		||||
    projects_collection = current_app.data.driver.db['projects']
 | 
			
		||||
    project = projects_collection.find_one({'_id': item['project']})
 | 
			
		||||
    if project.get('is_private', False):
 | 
			
		||||
        # Skip index updating and return
 | 
			
		||||
        return
 | 
			
		||||
 | 
			
		||||
    status = item['properties'].get('status', 'unpublished')
 | 
			
		||||
    node_id = str(item['_id'])
 | 
			
		||||
 | 
			
		||||
    if status == 'published':
 | 
			
		||||
        index.node_save.delay(node_id)
 | 
			
		||||
    else:
 | 
			
		||||
        index.node_delete.delay(node_id)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def before_inserting_nodes(items):
 | 
			
		||||
    """Before inserting a node in the collection we check if the user is allowed
 | 
			
		||||
    and we append the project id to it.
 | 
			
		||||
    """
 | 
			
		||||
    from pillar.auth import current_user
 | 
			
		||||
 | 
			
		||||
    nodes_collection = current_app.data.driver.db['nodes']
 | 
			
		||||
 | 
			
		||||
    def find_parent_project(node):
 | 
			
		||||
        """Recursive function that finds the ultimate parent of a node."""
 | 
			
		||||
        if node and 'parent' in node:
 | 
			
		||||
            parent = nodes_collection.find_one({'_id': node['parent']})
 | 
			
		||||
            return find_parent_project(parent)
 | 
			
		||||
        if node:
 | 
			
		||||
            return node
 | 
			
		||||
        else:
 | 
			
		||||
            return None
 | 
			
		||||
 | 
			
		||||
    for item in items:
 | 
			
		||||
        check_permissions('nodes', item, 'POST')
 | 
			
		||||
        if 'parent' in item and 'project' not in item:
 | 
			
		||||
            parent = nodes_collection.find_one({'_id': item['parent']})
 | 
			
		||||
            project = find_parent_project(parent)
 | 
			
		||||
            if project:
 | 
			
		||||
                item['project'] = project['_id']
 | 
			
		||||
 | 
			
		||||
        # Default the 'user' property to the current user.
 | 
			
		||||
        item.setdefault('user', current_user.user_id)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def after_inserting_nodes(items):
 | 
			
		||||
    for item in items:
 | 
			
		||||
        # Skip subscriptions for first level items (since the context is not a
 | 
			
		||||
        # node, but a project).
 | 
			
		||||
        # TODO: support should be added for mixed context
 | 
			
		||||
        if 'parent' not in item:
 | 
			
		||||
            return
 | 
			
		||||
        context_object_id = item['parent']
 | 
			
		||||
        if item['node_type'] == 'comment':
 | 
			
		||||
            nodes_collection = current_app.data.driver.db['nodes']
 | 
			
		||||
            parent = nodes_collection.find_one({'_id': item['parent']})
 | 
			
		||||
            # Always subscribe to the parent node
 | 
			
		||||
            activity_subscribe(item['user'], 'node', item['parent'])
 | 
			
		||||
            if parent['node_type'] == 'comment':
 | 
			
		||||
                # If the parent is a comment, we provide its own parent as
 | 
			
		||||
                # context. We do this in order to point the user to an asset
 | 
			
		||||
                # or group when viewing the notification.
 | 
			
		||||
                verb = 'replied'
 | 
			
		||||
                context_object_id = parent['parent']
 | 
			
		||||
                # Subscribe to the parent of the parent comment (post or group)
 | 
			
		||||
                activity_subscribe(item['user'], 'node', parent['parent'])
 | 
			
		||||
            else:
 | 
			
		||||
                activity_subscribe(item['user'], 'node', item['_id'])
 | 
			
		||||
                verb = 'commented'
 | 
			
		||||
        elif item['node_type'] in PILLAR_NAMED_NODE_TYPES:
 | 
			
		||||
            verb = 'posted'
 | 
			
		||||
            activity_subscribe(item['user'], 'node', item['_id'])
 | 
			
		||||
        else:
 | 
			
		||||
            # Don't automatically create activities for non-Pillar node types,
 | 
			
		||||
            # as we don't know what would be a suitable verb (among other things).
 | 
			
		||||
            continue
 | 
			
		||||
 | 
			
		||||
        activity_object_add(
 | 
			
		||||
            item['user'],
 | 
			
		||||
            verb,
 | 
			
		||||
            'node',
 | 
			
		||||
            item['_id'],
 | 
			
		||||
            'node',
 | 
			
		||||
            context_object_id
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def deduct_content_type_and_duration(node_doc, original=None):
 | 
			
		||||
    """Deduct the content type from the attached file, if any."""
 | 
			
		||||
 | 
			
		||||
    if node_doc['node_type'] != 'asset':
 | 
			
		||||
        log.debug('deduct_content_type: called on node type %r, ignoring', node_doc['node_type'])
 | 
			
		||||
        return
 | 
			
		||||
 | 
			
		||||
    node_id = node_doc.get('_id')
 | 
			
		||||
    try:
 | 
			
		||||
        file_id = ObjectId(node_doc['properties']['file'])
 | 
			
		||||
    except KeyError:
 | 
			
		||||
        if node_id is None:
 | 
			
		||||
            # Creation of a file-less node is allowed, but updates aren't.
 | 
			
		||||
            return
 | 
			
		||||
        log.warning('deduct_content_type: Asset without properties.file, rejecting.')
 | 
			
		||||
        raise wz_exceptions.UnprocessableEntity('Missing file property for asset node')
 | 
			
		||||
 | 
			
		||||
    files = current_app.data.driver.db['files']
 | 
			
		||||
    file_doc = files.find_one({'_id': file_id},
 | 
			
		||||
                              {'content_type': 1,
 | 
			
		||||
                               'variations': 1})
 | 
			
		||||
    if not file_doc:
 | 
			
		||||
        log.warning('deduct_content_type: Node %s refers to non-existing file %s, rejecting.',
 | 
			
		||||
                    node_id, file_id)
 | 
			
		||||
        raise wz_exceptions.UnprocessableEntity('File property refers to non-existing file')
 | 
			
		||||
 | 
			
		||||
    # Guess the node content type from the file content type
 | 
			
		||||
    file_type = file_doc['content_type']
 | 
			
		||||
    if file_type.startswith('video/'):
 | 
			
		||||
        content_type = 'video'
 | 
			
		||||
    elif file_type.startswith('image/'):
 | 
			
		||||
        content_type = 'image'
 | 
			
		||||
    else:
 | 
			
		||||
        content_type = 'file'
 | 
			
		||||
 | 
			
		||||
    node_doc['properties']['content_type'] = content_type
 | 
			
		||||
 | 
			
		||||
    if content_type == 'video':
 | 
			
		||||
        duration = file_doc['variations'][0].get('duration')
 | 
			
		||||
        if duration:
 | 
			
		||||
            node_doc['properties']['duration_seconds'] = duration
 | 
			
		||||
        else:
 | 
			
		||||
            log.warning('Video file %s has no duration', file_id)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def nodes_deduct_content_type_and_duration(nodes):
 | 
			
		||||
    for node in nodes:
 | 
			
		||||
        deduct_content_type_and_duration(node)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def node_set_default_picture(node, original=None):
 | 
			
		||||
    """Uses the image of an image asset or colour map of texture node as picture."""
 | 
			
		||||
 | 
			
		||||
    if node.get('picture'):
 | 
			
		||||
        log.debug('Node %s already has a picture, not overriding', node.get('_id'))
 | 
			
		||||
        return
 | 
			
		||||
 | 
			
		||||
    node_type = node.get('node_type')
 | 
			
		||||
    props = node.get('properties', {})
 | 
			
		||||
    content = props.get('content_type')
 | 
			
		||||
 | 
			
		||||
    if node_type == 'asset' and content == 'image':
 | 
			
		||||
        image_file_id = props.get('file')
 | 
			
		||||
    elif node_type == 'texture':
 | 
			
		||||
        # Find the colour map, defaulting to the first image map available.
 | 
			
		||||
        image_file_id = None
 | 
			
		||||
        for image in props.get('files', []):
 | 
			
		||||
            if image_file_id is None or image.get('map_type') == 'color':
 | 
			
		||||
                image_file_id = image.get('file')
 | 
			
		||||
    else:
 | 
			
		||||
        log.debug('Not setting default picture on node type %s content type %s',
 | 
			
		||||
                  node_type, content)
 | 
			
		||||
        return
 | 
			
		||||
 | 
			
		||||
    if image_file_id is None:
 | 
			
		||||
        log.debug('Nothing to set the picture to.')
 | 
			
		||||
        return
 | 
			
		||||
 | 
			
		||||
    log.debug('Setting default picture for node %s to %s', node.get('_id'), image_file_id)
 | 
			
		||||
    node['picture'] = image_file_id
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def nodes_set_default_picture(nodes):
 | 
			
		||||
    for node in nodes:
 | 
			
		||||
        node_set_default_picture(node)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def before_deleting_node(node: dict):
 | 
			
		||||
    check_permissions('nodes', node, 'DELETE')
 | 
			
		||||
    remove_project_references(node)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def remove_project_references(node):
 | 
			
		||||
    project_id = node.get('project')
 | 
			
		||||
    if not project_id:
 | 
			
		||||
        return
 | 
			
		||||
 | 
			
		||||
    node_id = node['_id']
 | 
			
		||||
    log.info('Removing references to node %s from project %s', node_id, project_id)
 | 
			
		||||
 | 
			
		||||
    projects_col = current_app.db('projects')
 | 
			
		||||
    project = projects_col.find_one({'_id': project_id})
 | 
			
		||||
    updates = collections.defaultdict(dict)
 | 
			
		||||
 | 
			
		||||
    if project.get('header_node') == node_id:
 | 
			
		||||
        updates['$unset']['header_node'] = node_id
 | 
			
		||||
 | 
			
		||||
    project_reference_lists = ('nodes_blog', 'nodes_featured', 'nodes_latest')
 | 
			
		||||
    for list_name in project_reference_lists:
 | 
			
		||||
        references = project.get(list_name)
 | 
			
		||||
        if not references:
 | 
			
		||||
            continue
 | 
			
		||||
        try:
 | 
			
		||||
            references.remove(node_id)
 | 
			
		||||
        except ValueError:
 | 
			
		||||
            continue
 | 
			
		||||
 | 
			
		||||
        updates['$set'][list_name] = references
 | 
			
		||||
 | 
			
		||||
    if not updates:
 | 
			
		||||
        return
 | 
			
		||||
 | 
			
		||||
    updates['$set']['_etag'] = random_etag()
 | 
			
		||||
    result = projects_col.update_one({'_id': project_id}, updates)
 | 
			
		||||
    if result.modified_count != 1:
 | 
			
		||||
        log.warning('Removing references to node %s from project %s resulted in %d modified documents (expected 1)',
 | 
			
		||||
                    node_id, project_id, result.modified_count)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def after_deleting_node(item):
 | 
			
		||||
    from pillar.celery import search_index_tasks as index
 | 
			
		||||
    index.node_delete.delay(str(item['_id']))
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
only_for_textures = only_for_node_type_decorator('texture')
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@only_for_textures
 | 
			
		||||
def texture_sort_files(node, original=None):
 | 
			
		||||
    """Sort files alphabetically by map type, with colour map first."""
 | 
			
		||||
 | 
			
		||||
    try:
 | 
			
		||||
        files = node['properties']['files']
 | 
			
		||||
    except KeyError:
 | 
			
		||||
        return
 | 
			
		||||
 | 
			
		||||
    # Sort the map types alphabetically, ensuring 'color' comes first.
 | 
			
		||||
    as_dict = {f['map_type']: f for f in files}
 | 
			
		||||
    types = sorted(as_dict.keys(), key=lambda k: '\0' if k == 'color' else k)
 | 
			
		||||
    node['properties']['files'] = [as_dict[map_type] for map_type in types]
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def textures_sort_files(nodes):
 | 
			
		||||
    for node in nodes:
 | 
			
		||||
        texture_sort_files(node)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def parse_markdown(node, original=None):
 | 
			
		||||
    import copy
 | 
			
		||||
 | 
			
		||||
    projects_collection = current_app.data.driver.db['projects']
 | 
			
		||||
    project = projects_collection.find_one({'_id': node['project']}, {'node_types': 1})
 | 
			
		||||
    # Query node type directly using the key
 | 
			
		||||
    node_type = next(nt for nt in project['node_types']
 | 
			
		||||
                     if nt['name'] == node['node_type'])
 | 
			
		||||
 | 
			
		||||
    # Create a copy to not overwrite the actual schema.
 | 
			
		||||
    schema = copy.deepcopy(current_app.config['DOMAIN']['nodes']['schema'])
 | 
			
		||||
    schema['properties'] = node_type['dyn_schema']
 | 
			
		||||
 | 
			
		||||
    def find_markdown_fields(schema, node):
 | 
			
		||||
        """Find and process all makrdown validated fields."""
 | 
			
		||||
        for k, v in schema.items():
 | 
			
		||||
            if not isinstance(v, dict):
 | 
			
		||||
                continue
 | 
			
		||||
 | 
			
		||||
            if v.get('validator') == 'markdown':
 | 
			
		||||
                # If there is a match with the validator: markdown pair, assign the sibling
 | 
			
		||||
                # property (following the naming convention _<property>_html)
 | 
			
		||||
                # the processed value.
 | 
			
		||||
                if k in node:
 | 
			
		||||
                    html = pillar.markdown.markdown(node[k])
 | 
			
		||||
                    field_name = pillar.markdown.cache_field_name(k)
 | 
			
		||||
                    node[field_name] = html
 | 
			
		||||
            if isinstance(node, dict) and k in node:
 | 
			
		||||
                find_markdown_fields(v, node[k])
 | 
			
		||||
 | 
			
		||||
    find_markdown_fields(schema, node)
 | 
			
		||||
 | 
			
		||||
    return 'ok'
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def parse_markdowns(items):
 | 
			
		||||
    for item in items:
 | 
			
		||||
        parse_markdown(item)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def short_link_info(short_code):
 | 
			
		||||
    """Returns the short link info in a dict."""
 | 
			
		||||
 | 
			
		||||
    short_link = urllib.parse.urljoin(
 | 
			
		||||
        current_app.config['SHORT_LINK_BASE_URL'], short_code)
 | 
			
		||||
 | 
			
		||||
    return {
 | 
			
		||||
        'short_code': short_code,
 | 
			
		||||
        'short_link': short_link,
 | 
			
		||||
    }
 | 
			
		||||
@@ -1,7 +1,7 @@
 | 
			
		||||
"""Code for moving around nodes."""
 | 
			
		||||
 | 
			
		||||
import attr
 | 
			
		||||
import flask_pymongo.wrappers
 | 
			
		||||
import pymongo.database
 | 
			
		||||
from bson import ObjectId
 | 
			
		||||
 | 
			
		||||
from pillar import attrs_extra
 | 
			
		||||
@@ -10,7 +10,7 @@ import pillar.api.file_storage.moving
 | 
			
		||||
 | 
			
		||||
@attr.s
 | 
			
		||||
class NodeMover(object):
 | 
			
		||||
    db = attr.ib(validator=attr.validators.instance_of(flask_pymongo.wrappers.Database))
 | 
			
		||||
    db = attr.ib(validator=attr.validators.instance_of(pymongo.database.Database))
 | 
			
		||||
    skip_gcs = attr.ib(default=False, validator=attr.validators.instance_of(bool))
 | 
			
		||||
    _log = attrs_extra.log('%s.NodeMover' % __name__)
 | 
			
		||||
 | 
			
		||||
@@ -61,8 +61,8 @@ class NodeMover(object):
 | 
			
		||||
        """Moves a single file to another project"""
 | 
			
		||||
 | 
			
		||||
        self._log.info('Moving file %s to project %s', file_id, dest_proj['_id'])
 | 
			
		||||
        pillar.api.file_storage.moving.gcs_move_to_bucket(file_id, dest_proj['_id'],
 | 
			
		||||
                                                          skip_gcs=self.skip_gcs)
 | 
			
		||||
        pillar.api.file_storage.moving.move_to_bucket(file_id, dest_proj['_id'],
 | 
			
		||||
                                                      skip_storage=self.skip_gcs)
 | 
			
		||||
 | 
			
		||||
    def _files(self, file_ref, *properties):
 | 
			
		||||
        """Yields file ObjectIDs."""
 | 
			
		||||
 
 | 
			
		||||
							
								
								
									
										444
									
								
								pillar/api/organizations/__init__.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										444
									
								
								pillar/api/organizations/__init__.py
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,444 @@
 | 
			
		||||
"""Organization management.
 | 
			
		||||
 | 
			
		||||
Assumes role names that are given to users by organization membership
 | 
			
		||||
start with the string "org-".
 | 
			
		||||
"""
 | 
			
		||||
 | 
			
		||||
import logging
 | 
			
		||||
import typing
 | 
			
		||||
 | 
			
		||||
import attr
 | 
			
		||||
import bson
 | 
			
		||||
import flask
 | 
			
		||||
import werkzeug.exceptions as wz_exceptions
 | 
			
		||||
 | 
			
		||||
from pillar import attrs_extra, current_app
 | 
			
		||||
from pillar.api.utils import remove_private_keys, utcnow
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class OrganizationError(Exception):
 | 
			
		||||
    """Superclass for all Organization-related errors."""
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@attr.s
 | 
			
		||||
class NotEnoughSeats(OrganizationError):
 | 
			
		||||
    """Thrown when trying to add too many members to the organization."""
 | 
			
		||||
 | 
			
		||||
    org_id = attr.ib(validator=attr.validators.instance_of(bson.ObjectId))
 | 
			
		||||
    seat_count = attr.ib(validator=attr.validators.instance_of(int))
 | 
			
		||||
    attempted_seat_count = attr.ib(validator=attr.validators.instance_of(int))
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@attr.s
 | 
			
		||||
class OrgManager:
 | 
			
		||||
    """Organization manager.
 | 
			
		||||
 | 
			
		||||
    Performs actions on an Organization. Does *NOT* test user permissions -- the caller
 | 
			
		||||
    is responsible for that.
 | 
			
		||||
    """
 | 
			
		||||
 | 
			
		||||
    _log = attrs_extra.log('%s.OrgManager' % __name__)
 | 
			
		||||
 | 
			
		||||
    def create_new_org(self,
 | 
			
		||||
                       name: str,
 | 
			
		||||
                       admin_uid: bson.ObjectId,
 | 
			
		||||
                       seat_count: int,
 | 
			
		||||
                       *,
 | 
			
		||||
                       org_roles: typing.Iterable[str] = None) -> dict:
 | 
			
		||||
        """Creates a new Organization.
 | 
			
		||||
 | 
			
		||||
        Returns the new organization document.
 | 
			
		||||
        """
 | 
			
		||||
 | 
			
		||||
        assert isinstance(admin_uid, bson.ObjectId)
 | 
			
		||||
 | 
			
		||||
        org_doc = {
 | 
			
		||||
            'name': name,
 | 
			
		||||
            'admin_uid': admin_uid,
 | 
			
		||||
            'seat_count': seat_count,
 | 
			
		||||
        }
 | 
			
		||||
 | 
			
		||||
        if org_roles:
 | 
			
		||||
            org_doc['org_roles'] = list(org_roles)
 | 
			
		||||
 | 
			
		||||
        r, _, _, status = current_app.post_internal('organizations', org_doc)
 | 
			
		||||
        if status != 201:
 | 
			
		||||
            self._log.error('Error creating organization; status should be 201, not %i: %s',
 | 
			
		||||
                            status, r)
 | 
			
		||||
            raise ValueError(f'Unable to create organization, status code {status}')
 | 
			
		||||
 | 
			
		||||
        org_doc.update(r)
 | 
			
		||||
        return org_doc
 | 
			
		||||
 | 
			
		||||
    def assign_users(self,
 | 
			
		||||
                     org_id: bson.ObjectId,
 | 
			
		||||
                     emails: typing.List[str]) -> dict:
 | 
			
		||||
        """Assigns users to the organization.
 | 
			
		||||
 | 
			
		||||
        Checks the seat count and throws a NotEnoughSeats exception when the
 | 
			
		||||
        seat count is not sufficient to assign the requested users.
 | 
			
		||||
 | 
			
		||||
        Users are looked up by email address, and known users are
 | 
			
		||||
        automatically mapped.
 | 
			
		||||
 | 
			
		||||
        :returns: the new organization document.
 | 
			
		||||
        """
 | 
			
		||||
 | 
			
		||||
        self._log.info('Adding %i new members to organization %s', len(emails), org_id)
 | 
			
		||||
 | 
			
		||||
        users_coll = current_app.db('users')
 | 
			
		||||
        existing_user_docs = list(users_coll.find({'email': {'$in': emails}},
 | 
			
		||||
                                                  projection={'_id': 1, 'email': 1}))
 | 
			
		||||
        unknown_users = set(emails) - {user['email'] for user in existing_user_docs}
 | 
			
		||||
        existing_users = {user['_id'] for user in existing_user_docs}
 | 
			
		||||
 | 
			
		||||
        return self._assign_users(org_id, unknown_users, existing_users)
 | 
			
		||||
 | 
			
		||||
    def assign_single_user(self, org_id: bson.ObjectId, *, user_id: bson.ObjectId) -> dict:
 | 
			
		||||
        """Assigns a single, known user to the organization.
 | 
			
		||||
 | 
			
		||||
        :returns: the new organization document.
 | 
			
		||||
        """
 | 
			
		||||
 | 
			
		||||
        self._log.info('Adding new member %s to organization %s', user_id, org_id)
 | 
			
		||||
        return self._assign_users(org_id, set(), {user_id})
 | 
			
		||||
 | 
			
		||||
    def _assign_users(self, org_id: bson.ObjectId,
 | 
			
		||||
                      unknown_users: typing.Set[str],
 | 
			
		||||
                      existing_users: typing.Set[bson.ObjectId]) -> dict:
 | 
			
		||||
 | 
			
		||||
        if self._log.isEnabledFor(logging.INFO):
 | 
			
		||||
            self._log.info('  - found users: %s', ', '.join(str(uid) for uid in existing_users))
 | 
			
		||||
            self._log.info('  - unknown users: %s', ', '.join(unknown_users))
 | 
			
		||||
 | 
			
		||||
        org_doc = self._get_org(org_id)
 | 
			
		||||
 | 
			
		||||
        # Compute the new members.
 | 
			
		||||
        members = set(org_doc.get('members') or []) | existing_users
 | 
			
		||||
        unknown_members = set(org_doc.get('unknown_members') or []) | unknown_users
 | 
			
		||||
 | 
			
		||||
        # Make sure we don't exceed the current seat count.
 | 
			
		||||
        new_seat_count = len(members) + len(unknown_members)
 | 
			
		||||
        if new_seat_count > org_doc['seat_count']:
 | 
			
		||||
            self._log.warning('assign_users(%s, ...): Trying to increase seats to %i, '
 | 
			
		||||
                              'but org only has %i seats.',
 | 
			
		||||
                              org_id, new_seat_count, org_doc['seat_count'])
 | 
			
		||||
            raise NotEnoughSeats(org_id, org_doc['seat_count'], new_seat_count)
 | 
			
		||||
 | 
			
		||||
        # Update the organization.
 | 
			
		||||
        org_doc['members'] = list(members)
 | 
			
		||||
        org_doc['unknown_members'] = list(unknown_members)
 | 
			
		||||
 | 
			
		||||
        r, _, _, status = current_app.put_internal('organizations',
 | 
			
		||||
                                                   remove_private_keys(org_doc),
 | 
			
		||||
                                                   _id=org_id)
 | 
			
		||||
        if status != 200:
 | 
			
		||||
            self._log.error('Error updating organization; status should be 200, not %i: %s',
 | 
			
		||||
                            status, r)
 | 
			
		||||
            raise ValueError(f'Unable to update organization, status code {status}')
 | 
			
		||||
        org_doc.update(r)
 | 
			
		||||
 | 
			
		||||
        # Update the roles for the affected members
 | 
			
		||||
        for uid in existing_users:
 | 
			
		||||
            self.refresh_roles(uid)
 | 
			
		||||
 | 
			
		||||
        return org_doc
 | 
			
		||||
 | 
			
		||||
    def assign_admin(self, org_id: bson.ObjectId, *, user_id: bson.ObjectId):
 | 
			
		||||
        """Assigns a user as admin user for this organization."""
 | 
			
		||||
 | 
			
		||||
        assert isinstance(org_id, bson.ObjectId)
 | 
			
		||||
        assert isinstance(user_id, bson.ObjectId)
 | 
			
		||||
 | 
			
		||||
        org_coll = current_app.db('organizations')
 | 
			
		||||
        users_coll = current_app.db('users')
 | 
			
		||||
 | 
			
		||||
        if users_coll.count({'_id': user_id}) == 0:
 | 
			
		||||
            raise ValueError('User not found')
 | 
			
		||||
 | 
			
		||||
        self._log.info('Updating organization %s, setting admin user to %s', org_id, user_id)
 | 
			
		||||
        org_coll.update_one({'_id': org_id},
 | 
			
		||||
                            {'$set': {'admin_uid': user_id}})
 | 
			
		||||
 | 
			
		||||
    def remove_user(self,
 | 
			
		||||
                    org_id: bson.ObjectId,
 | 
			
		||||
                    *,
 | 
			
		||||
                    user_id: bson.ObjectId = None,
 | 
			
		||||
                    email: str = None) -> dict:
 | 
			
		||||
        """Removes a user from the organization.
 | 
			
		||||
 | 
			
		||||
        The user can be identified by either user ID or email.
 | 
			
		||||
 | 
			
		||||
        Returns the new organization document.
 | 
			
		||||
        """
 | 
			
		||||
 | 
			
		||||
        users_coll = current_app.db('users')
 | 
			
		||||
 | 
			
		||||
        assert user_id or email
 | 
			
		||||
 | 
			
		||||
        # Collect the email address if not given. This ensures the removal
 | 
			
		||||
        # if the email was accidentally in the unknown_members list.
 | 
			
		||||
        if email is None:
 | 
			
		||||
            user_doc = users_coll.find_one(user_id, projection={'email': 1})
 | 
			
		||||
            if user_doc is not None:
 | 
			
		||||
                email = user_doc['email']
 | 
			
		||||
 | 
			
		||||
        # See if we know this user.
 | 
			
		||||
        if user_id is None:
 | 
			
		||||
            user_doc = users_coll.find_one({'email': email}, projection={'_id': 1})
 | 
			
		||||
            if user_doc is not None:
 | 
			
		||||
                user_id = user_doc['_id']
 | 
			
		||||
 | 
			
		||||
        if user_id and not users_coll.count({'_id': user_id}):
 | 
			
		||||
            raise wz_exceptions.UnprocessableEntity('User does not exist')
 | 
			
		||||
 | 
			
		||||
        self._log.info('Removing user %s / %s from organization %s', user_id, email, org_id)
 | 
			
		||||
 | 
			
		||||
        org_doc = self._get_org(org_id)
 | 
			
		||||
 | 
			
		||||
        # Compute the new members.
 | 
			
		||||
        if user_id:
 | 
			
		||||
            members = set(org_doc.get('members') or []) - {user_id}
 | 
			
		||||
            org_doc['members'] = list(members)
 | 
			
		||||
 | 
			
		||||
        if email:
 | 
			
		||||
            unknown_members = set(org_doc.get('unknown_members')) - {email}
 | 
			
		||||
            org_doc['unknown_members'] = list(unknown_members)
 | 
			
		||||
 | 
			
		||||
        r, _, _, status = current_app.put_internal('organizations',
 | 
			
		||||
                                                   remove_private_keys(org_doc),
 | 
			
		||||
                                                   _id=org_id)
 | 
			
		||||
        if status != 200:
 | 
			
		||||
            self._log.error('Error updating organization; status should be 200, not %i: %s',
 | 
			
		||||
                            status, r)
 | 
			
		||||
            raise ValueError(f'Unable to update organization, status code {status}')
 | 
			
		||||
        org_doc.update(r)
 | 
			
		||||
 | 
			
		||||
        # Update the roles for the affected member.
 | 
			
		||||
        if user_id:
 | 
			
		||||
            self.refresh_roles(user_id)
 | 
			
		||||
 | 
			
		||||
        return org_doc
 | 
			
		||||
 | 
			
		||||
    def _get_org(self, org_id: bson.ObjectId, *, projection=None):
 | 
			
		||||
        """Returns the organization, or raises a ValueError."""
 | 
			
		||||
 | 
			
		||||
        assert isinstance(org_id, bson.ObjectId)
 | 
			
		||||
 | 
			
		||||
        org_coll = current_app.db('organizations')
 | 
			
		||||
        org = org_coll.find_one(org_id, projection=projection)
 | 
			
		||||
        if org is None:
 | 
			
		||||
            raise ValueError(f'Organization {org_id} not found')
 | 
			
		||||
        return org
 | 
			
		||||
 | 
			
		||||
    def refresh_all_user_roles(self, org_id: bson.ObjectId):
 | 
			
		||||
        """Refreshes the roles of all members."""
 | 
			
		||||
 | 
			
		||||
        assert isinstance(org_id, bson.ObjectId)
 | 
			
		||||
 | 
			
		||||
        org = self._get_org(org_id, projection={'members': 1})
 | 
			
		||||
        members = org.get('members')
 | 
			
		||||
        if not members:
 | 
			
		||||
            self._log.info('Organization %s has no members, nothing to refresh.', org_id)
 | 
			
		||||
            return
 | 
			
		||||
 | 
			
		||||
        for uid in members:
 | 
			
		||||
            self.refresh_roles(uid)
 | 
			
		||||
 | 
			
		||||
    def refresh_roles(self, user_id: bson.ObjectId) -> typing.Set[str]:
 | 
			
		||||
        """Refreshes the user's roles to own roles + organizations' roles.
 | 
			
		||||
 | 
			
		||||
        :returns: the applied set of roles.
 | 
			
		||||
        """
 | 
			
		||||
 | 
			
		||||
        assert isinstance(user_id, bson.ObjectId)
 | 
			
		||||
 | 
			
		||||
        from pillar.api.service import do_badger
 | 
			
		||||
 | 
			
		||||
        self._log.info('Refreshing roles for user %s', user_id)
 | 
			
		||||
 | 
			
		||||
        org_coll = current_app.db('organizations')
 | 
			
		||||
        tokens_coll = current_app.db('tokens')
 | 
			
		||||
 | 
			
		||||
        def aggr_roles(coll, match: dict) -> typing.Set[str]:
 | 
			
		||||
            query = coll.aggregate([
 | 
			
		||||
                {'$match': match},
 | 
			
		||||
                {'$project': {'org_roles': 1}},
 | 
			
		||||
                {'$unwind': {'path': '$org_roles'}},
 | 
			
		||||
                {'$group': {
 | 
			
		||||
                    '_id': None,
 | 
			
		||||
                    'org_roles': {'$addToSet': '$org_roles'},
 | 
			
		||||
                }}])
 | 
			
		||||
 | 
			
		||||
            # If the user has no organizations/tokens at all, the query will have no results.
 | 
			
		||||
            try:
 | 
			
		||||
                org_roles_doc = query.next()
 | 
			
		||||
            except StopIteration:
 | 
			
		||||
                return set()
 | 
			
		||||
            return set(org_roles_doc['org_roles'])
 | 
			
		||||
 | 
			
		||||
        # Join all organization-given roles and roles from the tokens collection.
 | 
			
		||||
        org_roles = aggr_roles(org_coll, {'members': user_id})
 | 
			
		||||
        self._log.debug('Organization-given roles for user %s: %s', user_id, org_roles)
 | 
			
		||||
        token_roles = aggr_roles(tokens_coll, {
 | 
			
		||||
            'user': user_id,
 | 
			
		||||
            'expire_time': {"$gt": utcnow()},
 | 
			
		||||
        })
 | 
			
		||||
        self._log.debug('Token-given roles for user %s: %s', user_id, token_roles)
 | 
			
		||||
        org_roles.update(token_roles)
 | 
			
		||||
 | 
			
		||||
        users_coll = current_app.db('users')
 | 
			
		||||
        user_doc = users_coll.find_one(user_id, projection={'roles': 1})
 | 
			
		||||
        if not user_doc:
 | 
			
		||||
            self._log.warning('Trying refresh roles of non-existing user %s, ignoring', user_id)
 | 
			
		||||
            return set()
 | 
			
		||||
 | 
			
		||||
        all_user_roles = set(user_doc.get('roles') or [])
 | 
			
		||||
        existing_org_roles = {role for role in all_user_roles
 | 
			
		||||
                              if role.startswith('org-')}
 | 
			
		||||
 | 
			
		||||
        grant_roles = org_roles - all_user_roles
 | 
			
		||||
        revoke_roles = existing_org_roles - org_roles
 | 
			
		||||
 | 
			
		||||
        if grant_roles:
 | 
			
		||||
            do_badger('grant', roles=grant_roles, user_id=user_id)
 | 
			
		||||
        if revoke_roles:
 | 
			
		||||
            do_badger('revoke', roles=revoke_roles, user_id=user_id)
 | 
			
		||||
 | 
			
		||||
        return all_user_roles.union(grant_roles) - revoke_roles
 | 
			
		||||
 | 
			
		||||
    def user_is_admin(self, org_id: bson.ObjectId) -> bool:
 | 
			
		||||
        """Returns whether the currently logged in user is the admin of the organization."""
 | 
			
		||||
 | 
			
		||||
        from pillar.api.utils.authentication import current_user_id
 | 
			
		||||
 | 
			
		||||
        uid = current_user_id()
 | 
			
		||||
        if uid is None:
 | 
			
		||||
            return False
 | 
			
		||||
 | 
			
		||||
        org = self._get_org(org_id, projection={'admin_uid': 1})
 | 
			
		||||
        return org.get('admin_uid') == uid
 | 
			
		||||
 | 
			
		||||
    def unknown_member_roles(self, member_email: str) -> typing.Set[str]:
 | 
			
		||||
        """Returns the set of organization roles for this user.
 | 
			
		||||
 | 
			
		||||
        Assumes the user is not yet known, i.e. part of the unknown_members lists.
 | 
			
		||||
        """
 | 
			
		||||
 | 
			
		||||
        org_coll = current_app.db('organizations')
 | 
			
		||||
 | 
			
		||||
        # Aggregate all org-given roles for this user.
 | 
			
		||||
        query = org_coll.aggregate([
 | 
			
		||||
            {'$match': {'unknown_members': member_email}},
 | 
			
		||||
            {'$project': {'org_roles': 1}},
 | 
			
		||||
            {'$unwind': {'path': '$org_roles'}},
 | 
			
		||||
            {'$group': {
 | 
			
		||||
                '_id': None,
 | 
			
		||||
                'org_roles': {'$addToSet': '$org_roles'},
 | 
			
		||||
            }}])
 | 
			
		||||
 | 
			
		||||
        # If the user has no organizations at all, the query will have no results.
 | 
			
		||||
        try:
 | 
			
		||||
            org_roles_doc = query.next()
 | 
			
		||||
        except StopIteration:
 | 
			
		||||
            return set()
 | 
			
		||||
 | 
			
		||||
        return set(org_roles_doc['org_roles'])
 | 
			
		||||
 | 
			
		||||
    def make_member_known(self, member_uid: bson.ObjectId, member_email: str):
 | 
			
		||||
        """Moves the given member from the unknown_members to the members lists."""
 | 
			
		||||
 | 
			
		||||
        # This uses a direct PyMongo query rather than using Eve's put_internal,
 | 
			
		||||
        # to prevent simultaneous updates from dropping users.
 | 
			
		||||
 | 
			
		||||
        org_coll = current_app.db('organizations')
 | 
			
		||||
        for org in org_coll.find({'unknown_members': member_email}):
 | 
			
		||||
            self._log.info('Updating organization %s, marking member %s/%s as known',
 | 
			
		||||
                           org['_id'], member_uid, member_email)
 | 
			
		||||
            org_coll.update_one({'_id': org['_id']},
 | 
			
		||||
                                {'$addToSet': {'members': member_uid},
 | 
			
		||||
                                 '$pull': {'unknown_members': member_email}
 | 
			
		||||
                                 })
 | 
			
		||||
 | 
			
		||||
    def org_members(self, member_sting_ids: typing.Iterable[str]) -> typing.List[dict]:
 | 
			
		||||
        """Returns the user documents of the organization members.
 | 
			
		||||
 | 
			
		||||
        This is a workaround to provide membership information for
 | 
			
		||||
        organizations without giving 'mortal' users access to /api/users.
 | 
			
		||||
        """
 | 
			
		||||
        from pillar.api.utils import str2id
 | 
			
		||||
 | 
			
		||||
        if not member_sting_ids:
 | 
			
		||||
            return []
 | 
			
		||||
 | 
			
		||||
        member_ids = [str2id(uid) for uid in member_sting_ids]
 | 
			
		||||
        users_coll = current_app.db('users')
 | 
			
		||||
        users = users_coll.find({'_id': {'$in': member_ids}},
 | 
			
		||||
                                projection={'_id': 1, 'full_name': 1, 'email': 1})
 | 
			
		||||
        return list(users)
 | 
			
		||||
 | 
			
		||||
    def user_has_organizations(self, user_id: bson.ObjectId) -> bool:
 | 
			
		||||
        """Returns True iff the user has anything to do with organizations.
 | 
			
		||||
 | 
			
		||||
        That is, if the user is admin for and/or member of any organization.
 | 
			
		||||
        """
 | 
			
		||||
 | 
			
		||||
        org_coll = current_app.db('organizations')
 | 
			
		||||
 | 
			
		||||
        org_count = org_coll.count({'$or': [
 | 
			
		||||
            {'admin_uid': user_id},
 | 
			
		||||
            {'members': user_id}
 | 
			
		||||
        ]})
 | 
			
		||||
 | 
			
		||||
        return bool(org_count)
 | 
			
		||||
 | 
			
		||||
    def user_is_unknown_member(self, member_email: str) -> bool:
 | 
			
		||||
        """Return True iff the email is an unknown member of some org."""
 | 
			
		||||
 | 
			
		||||
        org_coll = current_app.db('organizations')
 | 
			
		||||
        org_count = org_coll.count({'unknown_members': member_email})
 | 
			
		||||
        return bool(org_count)
 | 
			
		||||
 | 
			
		||||
    def roles_for_ip_address(self, remote_addr: str) -> typing.Set[str]:
 | 
			
		||||
        """Find the roles given to the user via org IP range definitions."""
 | 
			
		||||
 | 
			
		||||
        from . import ip_ranges
 | 
			
		||||
 | 
			
		||||
        org_coll = current_app.db('organizations')
 | 
			
		||||
        try:
 | 
			
		||||
            q = ip_ranges.query(remote_addr)
 | 
			
		||||
        except ValueError as ex:
 | 
			
		||||
            self._log.warning('Invalid remote address %s, ignoring IP-based roles: %s',
 | 
			
		||||
                              remote_addr, ex)
 | 
			
		||||
            return set()
 | 
			
		||||
 | 
			
		||||
        orgs = org_coll.find(
 | 
			
		||||
            {'ip_ranges': q},
 | 
			
		||||
            projection={'org_roles': True},
 | 
			
		||||
        )
 | 
			
		||||
        return set(role
 | 
			
		||||
                   for org in orgs
 | 
			
		||||
                   for role in org.get('org_roles', []))
 | 
			
		||||
 | 
			
		||||
    def roles_for_request(self) -> typing.Set[str]:
 | 
			
		||||
        """Find roles for user via the request's remote IP address."""
 | 
			
		||||
 | 
			
		||||
        try:
 | 
			
		||||
            remote_addr = flask.request.access_route[0]
 | 
			
		||||
        except IndexError:
 | 
			
		||||
            return set()
 | 
			
		||||
 | 
			
		||||
        if not remote_addr:
 | 
			
		||||
            return set()
 | 
			
		||||
 | 
			
		||||
        roles = self.roles_for_ip_address(remote_addr)
 | 
			
		||||
        self._log.debug('Roles for IP address %s: %s', remote_addr, roles)
 | 
			
		||||
 | 
			
		||||
        return roles
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def setup_app(app):
 | 
			
		||||
    from . import patch, hooks
 | 
			
		||||
 | 
			
		||||
    hooks.setup_app(app)
 | 
			
		||||
    patch.setup_app(app)
 | 
			
		||||
							
								
								
									
										48
									
								
								pillar/api/organizations/hooks.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										48
									
								
								pillar/api/organizations/hooks.py
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,48 @@
 | 
			
		||||
import werkzeug.exceptions as wz_exceptions
 | 
			
		||||
 | 
			
		||||
from pillar.api.utils.authentication import current_user
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def pre_get_organizations(request, lookup):
 | 
			
		||||
    user = current_user()
 | 
			
		||||
    if user.is_anonymous:
 | 
			
		||||
        raise wz_exceptions.Forbidden()
 | 
			
		||||
 | 
			
		||||
    if user.has_cap('admin'):
 | 
			
		||||
        # Allow all lookups to admins.
 | 
			
		||||
        return
 | 
			
		||||
 | 
			
		||||
    # Only allow users to see their own organizations.
 | 
			
		||||
    lookup['$or'] = [{'admin_uid': user.user_id}, {'members': user.user_id}]
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def on_fetched_item_organizations(org_doc: dict):
 | 
			
		||||
    """Filter out binary data.
 | 
			
		||||
 | 
			
		||||
    Eve cannot return binary data, at least not until we upgrade to a version
 | 
			
		||||
    that depends on Cerberus >= 1.0.
 | 
			
		||||
    """
 | 
			
		||||
 | 
			
		||||
    for ipr in org_doc.get('ip_ranges') or []:
 | 
			
		||||
        ipr.pop('start', None)
 | 
			
		||||
        ipr.pop('end', None)
 | 
			
		||||
        ipr.pop('prefix', None)  # not binary, but useless without the other fields.
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def on_fetched_resource_organizations(response: dict):
 | 
			
		||||
    for org_doc in response.get('_items', []):
 | 
			
		||||
        on_fetched_item_organizations(org_doc)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def pre_post_organizations(request):
 | 
			
		||||
    user = current_user()
 | 
			
		||||
    if not user.has_cap('create-organization'):
 | 
			
		||||
        raise wz_exceptions.Forbidden()
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def setup_app(app):
 | 
			
		||||
    app.on_pre_GET_organizations += pre_get_organizations
 | 
			
		||||
    app.on_pre_POST_organizations += pre_post_organizations
 | 
			
		||||
 | 
			
		||||
    app.on_fetched_item_organizations += on_fetched_item_organizations
 | 
			
		||||
    app.on_fetched_resource_organizations += on_fetched_resource_organizations
 | 
			
		||||
							
								
								
									
										75
									
								
								pillar/api/organizations/ip_ranges.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										75
									
								
								pillar/api/organizations/ip_ranges.py
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,75 @@
 | 
			
		||||
"""IP range support for Organizations."""
 | 
			
		||||
 | 
			
		||||
from IPy import IP
 | 
			
		||||
 | 
			
		||||
# 128 bits all set to 1
 | 
			
		||||
ONES_128 = 2 ** 128 - 1
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def doc(iprange: str, min_prefixlen6: int=0, min_prefixlen4: int=0) -> dict:
 | 
			
		||||
    """Convert a human-readable string like '1.2.3.4/24' to a Mongo document.
 | 
			
		||||
 | 
			
		||||
    This converts the address to IPv6 and computes the start/end addresses
 | 
			
		||||
    of the range. The address, its prefix size, and start and end address,
 | 
			
		||||
    are returned as a dict.
 | 
			
		||||
 | 
			
		||||
    Addresses are stored as big-endian binary data because MongoDB doesn't
 | 
			
		||||
    support 128 bits integers.
 | 
			
		||||
 | 
			
		||||
    :param iprange: the IP address and mask size, can be IPv6 or IPv4.
 | 
			
		||||
    :param min_prefixlen6: if given, causes a ValuError when the mask size
 | 
			
		||||
                           is too low. Note that the mask size is always
 | 
			
		||||
                           evaluated only for IPv6 addresses.
 | 
			
		||||
    :param min_prefixlen4: if given, causes a ValuError when the mask size
 | 
			
		||||
                           is too low. Note that the mask size is always
 | 
			
		||||
                           evaluated only for IPv4 addresses.
 | 
			
		||||
    :returns: a dict like: {
 | 
			
		||||
        'start': b'xxxxx' with the lowest IP address in the range.
 | 
			
		||||
        'end': b'yyyyy' with the highest IP address in the range.
 | 
			
		||||
        'human': 'aaaa:bbbb::cc00/120' with the human-readable representation.
 | 
			
		||||
        'prefix': 120, the prefix length of the netmask in bits.
 | 
			
		||||
    }
 | 
			
		||||
    """
 | 
			
		||||
 | 
			
		||||
    ip = IP(iprange, make_net=True)
 | 
			
		||||
    prefixlen = ip.prefixlen()
 | 
			
		||||
    if ip.version() == 4:
 | 
			
		||||
        if prefixlen < min_prefixlen4:
 | 
			
		||||
            raise ValueError(f'Prefix length {prefixlen} smaller than allowed {min_prefixlen4}')
 | 
			
		||||
        ip = ip.v46map()
 | 
			
		||||
    else:
 | 
			
		||||
        if prefixlen < min_prefixlen6:
 | 
			
		||||
            raise ValueError(f'Prefix length {prefixlen} smaller than allowed {min_prefixlen6}')
 | 
			
		||||
 | 
			
		||||
    addr = ip.int()
 | 
			
		||||
 | 
			
		||||
    # Set all address bits to 1 where the mask is 0 to obtain the largest address.
 | 
			
		||||
    end = addr | (ONES_128 % ip.netmask().int())
 | 
			
		||||
 | 
			
		||||
    # This ensures that even a single host is represented as /128 in the human-readable form.
 | 
			
		||||
    ip.NoPrefixForSingleIp = False
 | 
			
		||||
 | 
			
		||||
    return {
 | 
			
		||||
        'start': addr.to_bytes(16, 'big'),
 | 
			
		||||
        'end': end.to_bytes(16, 'big'),
 | 
			
		||||
        'human': ip.strCompressed(),
 | 
			
		||||
        'prefix': ip.prefixlen(),
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def query(address: str) -> dict:
 | 
			
		||||
    """Return a dict usable for querying all organizations whose IP range matches the given one.
 | 
			
		||||
 | 
			
		||||
    :returns: a dict like:
 | 
			
		||||
        {$elemMatch: {'start': {$lte: b'xxxxx'}, 'end': {$gte: b'xxxxx'}}}
 | 
			
		||||
    """
 | 
			
		||||
 | 
			
		||||
    ip = IP(address)
 | 
			
		||||
    if ip.version() == 4:
 | 
			
		||||
        ip = ip.v46map()
 | 
			
		||||
    for_mongo = ip.ip.to_bytes(16, 'big')
 | 
			
		||||
 | 
			
		||||
    return {'$elemMatch': {
 | 
			
		||||
        'start': {'$lte': for_mongo},
 | 
			
		||||
        'end': {'$gte': for_mongo},
 | 
			
		||||
    }}
 | 
			
		||||
							
								
								
									
										228
									
								
								pillar/api/organizations/patch.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										228
									
								
								pillar/api/organizations/patch.py
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,228 @@
 | 
			
		||||
"""Organization patching support."""
 | 
			
		||||
 | 
			
		||||
import logging
 | 
			
		||||
 | 
			
		||||
import bson
 | 
			
		||||
from flask import Blueprint, jsonify
 | 
			
		||||
import werkzeug.exceptions as wz_exceptions
 | 
			
		||||
 | 
			
		||||
from pillar.api.utils.authentication import current_user
 | 
			
		||||
from pillar.api.utils import authorization, str2id, jsonify
 | 
			
		||||
from pillar.api import patch_handler
 | 
			
		||||
from pillar import current_app
 | 
			
		||||
 | 
			
		||||
log = logging.getLogger(__name__)
 | 
			
		||||
patch_api_blueprint = Blueprint('pillar.api.organizations.patch', __name__)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class OrganizationPatchHandler(patch_handler.AbstractPatchHandler):
 | 
			
		||||
    item_name = 'organization'
 | 
			
		||||
 | 
			
		||||
    @authorization.require_login()
 | 
			
		||||
    def patch_assign_users(self, org_id: bson.ObjectId, patch: dict):
 | 
			
		||||
        """Assigns users to an organization.
 | 
			
		||||
 | 
			
		||||
        The calling user must be admin of the organization.
 | 
			
		||||
        """
 | 
			
		||||
        from . import NotEnoughSeats
 | 
			
		||||
 | 
			
		||||
        self._assert_is_admin(org_id)
 | 
			
		||||
 | 
			
		||||
        # Do some basic validation.
 | 
			
		||||
        try:
 | 
			
		||||
            emails = patch['emails']
 | 
			
		||||
        except KeyError:
 | 
			
		||||
            raise wz_exceptions.BadRequest('No key "email" in patch.')
 | 
			
		||||
 | 
			
		||||
        # Skip empty emails.
 | 
			
		||||
        emails = [stripped
 | 
			
		||||
                  for stripped in (email.strip() for email in emails)
 | 
			
		||||
                  if stripped]
 | 
			
		||||
 | 
			
		||||
        log.info('User %s uses PATCH to add users to organization %s',
 | 
			
		||||
                 current_user().user_id, org_id)
 | 
			
		||||
        try:
 | 
			
		||||
            org_doc = current_app.org_manager.assign_users(org_id, emails)
 | 
			
		||||
        except NotEnoughSeats:
 | 
			
		||||
            resp = jsonify({'_message': f'Not enough seats to assign {len(emails)} users'})
 | 
			
		||||
            resp.status_code = 422
 | 
			
		||||
            return resp
 | 
			
		||||
 | 
			
		||||
        return jsonify(org_doc)
 | 
			
		||||
 | 
			
		||||
    @authorization.require_login()
 | 
			
		||||
    def patch_assign_user(self, org_id: bson.ObjectId, patch: dict):
 | 
			
		||||
        """Assigns a single user by User ID to an organization.
 | 
			
		||||
 | 
			
		||||
        The calling user must be admin of the organization.
 | 
			
		||||
        """
 | 
			
		||||
        from . import NotEnoughSeats
 | 
			
		||||
        self._assert_is_admin(org_id)
 | 
			
		||||
 | 
			
		||||
        # Do some basic validation.
 | 
			
		||||
        try:
 | 
			
		||||
            user_id = patch['user_id']
 | 
			
		||||
        except KeyError:
 | 
			
		||||
            raise wz_exceptions.BadRequest('No key "user_id" in patch.')
 | 
			
		||||
 | 
			
		||||
        user_oid = str2id(user_id)
 | 
			
		||||
        log.info('User %s uses PATCH to add user %s to organization %s',
 | 
			
		||||
                 current_user().user_id, user_oid, org_id)
 | 
			
		||||
        try:
 | 
			
		||||
            org_doc = current_app.org_manager.assign_single_user(org_id, user_id=user_oid)
 | 
			
		||||
        except NotEnoughSeats:
 | 
			
		||||
            resp = jsonify({'_message': f'Not enough seats to assign this user'})
 | 
			
		||||
            resp.status_code = 422
 | 
			
		||||
            return resp
 | 
			
		||||
 | 
			
		||||
        return jsonify(org_doc)
 | 
			
		||||
 | 
			
		||||
    @authorization.require_login()
 | 
			
		||||
    def patch_assign_admin(self, org_id: bson.ObjectId, patch: dict):
 | 
			
		||||
        """Assigns a single user by User ID as admin of the organization.
 | 
			
		||||
 | 
			
		||||
        The calling user must be admin of the organization.
 | 
			
		||||
        """
 | 
			
		||||
 | 
			
		||||
        self._assert_is_admin(org_id)
 | 
			
		||||
 | 
			
		||||
        # Do some basic validation.
 | 
			
		||||
        try:
 | 
			
		||||
            user_id = patch['user_id']
 | 
			
		||||
        except KeyError:
 | 
			
		||||
            raise wz_exceptions.BadRequest('No key "user_id" in patch.')
 | 
			
		||||
 | 
			
		||||
        user_oid = str2id(user_id)
 | 
			
		||||
        log.info('User %s uses PATCH to set user %s as admin for organization %s',
 | 
			
		||||
                 current_user().user_id, user_oid, org_id)
 | 
			
		||||
        current_app.org_manager.assign_admin(org_id, user_id=user_oid)
 | 
			
		||||
 | 
			
		||||
    @authorization.require_login()
 | 
			
		||||
    def patch_remove_user(self, org_id: bson.ObjectId, patch: dict):
 | 
			
		||||
        """Removes a user from an organization.
 | 
			
		||||
 | 
			
		||||
        The calling user must be admin of the organization.
 | 
			
		||||
        """
 | 
			
		||||
 | 
			
		||||
        # Do some basic validation.
 | 
			
		||||
        email = patch.get('email') or None
 | 
			
		||||
        user_id = patch.get('user_id')
 | 
			
		||||
        user_oid = str2id(user_id) if user_id else None
 | 
			
		||||
 | 
			
		||||
        # Users require admin rights on the org, except when removing themselves.
 | 
			
		||||
        current_user_id = current_user().user_id
 | 
			
		||||
        if user_oid is None or user_oid != current_user_id:
 | 
			
		||||
            self._assert_is_admin(org_id)
 | 
			
		||||
 | 
			
		||||
        log.info('User %s uses PATCH to remove user %s from organization %s',
 | 
			
		||||
                 current_user_id, user_oid, org_id)
 | 
			
		||||
 | 
			
		||||
        org_doc = current_app.org_manager.remove_user(org_id, user_id=user_oid, email=email)
 | 
			
		||||
        return jsonify(org_doc)
 | 
			
		||||
 | 
			
		||||
    def _assert_is_admin(self, org_id):
 | 
			
		||||
        om = current_app.org_manager
 | 
			
		||||
 | 
			
		||||
        if current_user().has_cap('admin'):
 | 
			
		||||
            # Always allow admins to edit every organization.
 | 
			
		||||
            return
 | 
			
		||||
 | 
			
		||||
        if not om.user_is_admin(org_id):
 | 
			
		||||
            log.warning('User %s uses PATCH to edit organization %s, '
 | 
			
		||||
                        'but is not admin of that Organization. Request denied.',
 | 
			
		||||
                        current_user().user_id, org_id)
 | 
			
		||||
            raise wz_exceptions.Forbidden()
 | 
			
		||||
 | 
			
		||||
    @authorization.require_login()
 | 
			
		||||
    def patch_edit_from_web(self, org_id: bson.ObjectId, patch: dict):
 | 
			
		||||
        """Updates Organization fields from the web.
 | 
			
		||||
 | 
			
		||||
        The PATCH command supports the following payload. The 'name' field must
 | 
			
		||||
        be set, all other fields are optional. When an optional field is
 | 
			
		||||
        ommitted it will be handled as an instruction to clear that field.
 | 
			
		||||
            {'name': str,
 | 
			
		||||
             'description': str,
 | 
			
		||||
             'website': str,
 | 
			
		||||
             'location': str,
 | 
			
		||||
             'ip_ranges': list of human-readable IP ranges}
 | 
			
		||||
        """
 | 
			
		||||
 | 
			
		||||
        from pymongo.results import UpdateResult
 | 
			
		||||
        from . import ip_ranges
 | 
			
		||||
 | 
			
		||||
        self._assert_is_admin(org_id)
 | 
			
		||||
        user = current_user()
 | 
			
		||||
        current_user_id = user.user_id
 | 
			
		||||
 | 
			
		||||
        # Only take known fields from the patch, don't just copy everything.
 | 
			
		||||
        update = {
 | 
			
		||||
            'name': patch['name'].strip(),
 | 
			
		||||
            'description': patch.get('description', '').strip(),
 | 
			
		||||
            'website': patch.get('website', '').strip(),
 | 
			
		||||
            'location': patch.get('location', '').strip(),
 | 
			
		||||
        }
 | 
			
		||||
        unset = {}
 | 
			
		||||
 | 
			
		||||
        # Special transformation for IP ranges
 | 
			
		||||
        iprs = patch.get('ip_ranges')
 | 
			
		||||
        if iprs:
 | 
			
		||||
            ipr_docs = []
 | 
			
		||||
            for r in iprs:
 | 
			
		||||
                try:
 | 
			
		||||
                    doc = ip_ranges.doc(r, min_prefixlen6=48, min_prefixlen4=8)
 | 
			
		||||
                except ValueError as ex:
 | 
			
		||||
                    raise wz_exceptions.UnprocessableEntity(f'Invalid IP range {r!r}: {ex}')
 | 
			
		||||
                ipr_docs.append(doc)
 | 
			
		||||
            update['ip_ranges'] = ipr_docs
 | 
			
		||||
        else:
 | 
			
		||||
            unset['ip_ranges'] = True
 | 
			
		||||
 | 
			
		||||
        refresh_user_roles = False
 | 
			
		||||
        if user.has_cap('admin'):
 | 
			
		||||
            if 'seat_count' in patch:
 | 
			
		||||
                update['seat_count'] = int(patch['seat_count'])
 | 
			
		||||
            if 'org_roles' in patch:
 | 
			
		||||
                org_roles = [stripped for stripped in (role.strip() for role in patch['org_roles'])
 | 
			
		||||
                             if stripped]
 | 
			
		||||
                if not all(role.startswith('org-') for role in org_roles):
 | 
			
		||||
                    raise wz_exceptions.UnprocessableEntity(
 | 
			
		||||
                        'Invalid role given, all roles must start with "org-"')
 | 
			
		||||
 | 
			
		||||
                update['org_roles'] = org_roles
 | 
			
		||||
                refresh_user_roles = True
 | 
			
		||||
 | 
			
		||||
        self.log.info('User %s edits Organization %s: %s', current_user_id, org_id, update)
 | 
			
		||||
 | 
			
		||||
        validator = current_app.validator_for_resource('organizations')
 | 
			
		||||
        if not validator.validate_update(update, org_id):
 | 
			
		||||
            resp = jsonify({
 | 
			
		||||
                '_errors': validator.errors,
 | 
			
		||||
                '_message': ', '.join(f'{field}: {error}'
 | 
			
		||||
                                      for field, error in validator.errors.items()),
 | 
			
		||||
            })
 | 
			
		||||
            resp.status_code = 422
 | 
			
		||||
            return resp
 | 
			
		||||
 | 
			
		||||
        # Figure out what to set and what to unset
 | 
			
		||||
        for_mongo = {'$set': update}
 | 
			
		||||
        if unset:
 | 
			
		||||
            for_mongo['$unset'] = unset
 | 
			
		||||
 | 
			
		||||
        organizations_coll = current_app.db('organizations')
 | 
			
		||||
        result: UpdateResult = organizations_coll.update_one({'_id': org_id}, for_mongo)
 | 
			
		||||
 | 
			
		||||
        if result.matched_count != 1:
 | 
			
		||||
            self.log.warning('User %s edits Organization %s but update matched %i items',
 | 
			
		||||
                             current_user_id, org_id, result.matched_count)
 | 
			
		||||
            raise wz_exceptions.BadRequest()
 | 
			
		||||
 | 
			
		||||
        if refresh_user_roles:
 | 
			
		||||
            self.log.info('Organization roles set for org %s, refreshing users', org_id)
 | 
			
		||||
            current_app.org_manager.refresh_all_user_roles(org_id)
 | 
			
		||||
 | 
			
		||||
        return '', 204
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def setup_app(app):
 | 
			
		||||
    OrganizationPatchHandler(patch_api_blueprint)
 | 
			
		||||
    app.register_api_blueprint(patch_api_blueprint, url_prefix='/organizations')
 | 
			
		||||
							
								
								
									
										92
									
								
								pillar/api/patch_handler.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										92
									
								
								pillar/api/patch_handler.py
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,92 @@
 | 
			
		||||
"""Handler for PATCH requests.
 | 
			
		||||
 | 
			
		||||
This supports PATCH request in the sense described by William Durand:
 | 
			
		||||
http://williamdurand.fr/2014/02/14/please-do-not-patch-like-an-idiot/
 | 
			
		||||
 | 
			
		||||
Each PATCH should be a JSON dict with at least a key 'op' with the
 | 
			
		||||
name of the operation to perform.
 | 
			
		||||
"""
 | 
			
		||||
 | 
			
		||||
import logging
 | 
			
		||||
 | 
			
		||||
import flask
 | 
			
		||||
 | 
			
		||||
from pillar.api.utils import authorization
 | 
			
		||||
 | 
			
		||||
log = logging.getLogger(__name__)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class AbstractPatchHandler:
 | 
			
		||||
    """Abstract PATCH handler supporting multiple operations.
 | 
			
		||||
 | 
			
		||||
    Each operation, i.e. possible value of the 'op' key in the PATCH body,
 | 
			
		||||
    should be matched to a similarly named "patch_xxx" function in a subclass.
 | 
			
		||||
    For example, the operation "set-owner" is mapped to "patch_set_owner".
 | 
			
		||||
 | 
			
		||||
    :cvar route: the Flask/Werkzeug route to attach this handler to.
 | 
			
		||||
        For most handlers, the default will be fine.
 | 
			
		||||
    :cvar item_name: the name of the things to patch, like "job", "task" etc.
 | 
			
		||||
        Only used for logging.
 | 
			
		||||
    """
 | 
			
		||||
 | 
			
		||||
    route: str = '/<object_id>'
 | 
			
		||||
    item_name: str = None
 | 
			
		||||
 | 
			
		||||
    def __init_subclass__(cls, **kwargs):
 | 
			
		||||
        if not cls.route:
 | 
			
		||||
            raise ValueError('Subclass must set route')
 | 
			
		||||
        if not cls.item_name:
 | 
			
		||||
            raise ValueError('Subclass must set item_name')
 | 
			
		||||
 | 
			
		||||
    def __init__(self, blueprint: flask.Blueprint):
 | 
			
		||||
        self.log: logging.Logger = log.getChild(self.__class__.__name__)
 | 
			
		||||
        self.patch_handlers = {
 | 
			
		||||
            name[6:].replace('_', '-'): getattr(self, name)
 | 
			
		||||
            for name in dir(self)
 | 
			
		||||
            if name.startswith('patch_') and callable(getattr(self, name))
 | 
			
		||||
        }
 | 
			
		||||
 | 
			
		||||
        if self.log.isEnabledFor(logging.INFO):
 | 
			
		||||
            self.log.info('Creating PATCH handler %s.%s%s for operations: %s',
 | 
			
		||||
                          blueprint.name, self.patch.__name__, self.route,
 | 
			
		||||
                          sorted(self.patch_handlers.keys()))
 | 
			
		||||
 | 
			
		||||
        blueprint.add_url_rule(self.route,
 | 
			
		||||
                               self.patch.__name__,
 | 
			
		||||
                               self.patch,
 | 
			
		||||
                               methods=['PATCH'])
 | 
			
		||||
 | 
			
		||||
    @authorization.require_login()
 | 
			
		||||
    def patch(self, object_id: str):
 | 
			
		||||
        from flask import request
 | 
			
		||||
        import werkzeug.exceptions as wz_exceptions
 | 
			
		||||
        from pillar.api.utils import str2id, authentication
 | 
			
		||||
 | 
			
		||||
        # Parse the request
 | 
			
		||||
        real_object_id = str2id(object_id)
 | 
			
		||||
        patch = request.get_json()
 | 
			
		||||
        if not patch:
 | 
			
		||||
            self.log.info('Bad PATCH request, did not contain JSON')
 | 
			
		||||
            raise wz_exceptions.BadRequest('Patch must contain JSON')
 | 
			
		||||
 | 
			
		||||
        try:
 | 
			
		||||
            patch_op = patch['op']
 | 
			
		||||
        except KeyError:
 | 
			
		||||
            self.log.info("Bad PATCH request, did not contain 'op' key")
 | 
			
		||||
            raise wz_exceptions.BadRequest("PATCH should contain 'op' key to denote operation.")
 | 
			
		||||
 | 
			
		||||
        log.debug('User %s wants to PATCH "%s" %s %s',
 | 
			
		||||
                  authentication.current_user_id(), patch_op, self.item_name, real_object_id)
 | 
			
		||||
 | 
			
		||||
        # Find the PATCH handler for the operation.
 | 
			
		||||
        try:
 | 
			
		||||
            handler = self.patch_handlers[patch_op]
 | 
			
		||||
        except KeyError:
 | 
			
		||||
            log.warning('No %s PATCH handler for operation %r', self.item_name, patch_op)
 | 
			
		||||
            raise wz_exceptions.BadRequest('Operation %r not supported' % patch_op)
 | 
			
		||||
 | 
			
		||||
        # Let the PATCH handler do its thing.
 | 
			
		||||
        response = handler(real_object_id, patch)
 | 
			
		||||
        if response is None:
 | 
			
		||||
            return '', 204
 | 
			
		||||
        return response
 | 
			
		||||
@@ -3,13 +3,20 @@ from .routes import blueprint_api
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def setup_app(app, api_prefix):
 | 
			
		||||
    from . import patch
 | 
			
		||||
    patch.setup_app(app)
 | 
			
		||||
 | 
			
		||||
    app.on_replace_projects += hooks.override_is_private_field
 | 
			
		||||
    app.on_replace_projects += hooks.before_edit_check_permissions
 | 
			
		||||
    app.on_replace_projects += hooks.protect_sensitive_fields
 | 
			
		||||
 | 
			
		||||
    app.on_update_projects += hooks.override_is_private_field
 | 
			
		||||
    app.on_update_projects += hooks.before_edit_check_permissions
 | 
			
		||||
    app.on_update_projects += hooks.protect_sensitive_fields
 | 
			
		||||
 | 
			
		||||
    app.on_delete_item_projects += hooks.before_delete_project
 | 
			
		||||
    app.on_deleted_item_projects += hooks.after_delete_project
 | 
			
		||||
 | 
			
		||||
    app.on_insert_projects += hooks.before_inserting_override_is_private_field
 | 
			
		||||
    app.on_insert_projects += hooks.before_inserting_projects
 | 
			
		||||
    app.on_inserted_projects += hooks.after_inserting_projects
 | 
			
		||||
 
 | 
			
		||||
@@ -1,17 +1,19 @@
 | 
			
		||||
import copy
 | 
			
		||||
import logging
 | 
			
		||||
 | 
			
		||||
from flask import request, abort, current_app
 | 
			
		||||
from gcloud import exceptions as gcs_exceptions
 | 
			
		||||
from flask import request, abort
 | 
			
		||||
 | 
			
		||||
from pillar import current_app
 | 
			
		||||
from pillar.api.node_types.asset import node_type_asset
 | 
			
		||||
from pillar.api.node_types.comment import node_type_comment
 | 
			
		||||
from pillar.api.node_types.group import node_type_group
 | 
			
		||||
from pillar.api.node_types.group_texture import node_type_group_texture
 | 
			
		||||
from pillar.api.node_types.texture import node_type_texture
 | 
			
		||||
from pillar.api.utils.gcs import GoogleCloudStorageBucket
 | 
			
		||||
from pillar.api.file_storage_backends import default_storage_backend
 | 
			
		||||
from pillar.api.utils import authorization, authentication
 | 
			
		||||
from pillar.api.utils import remove_private_keys
 | 
			
		||||
from pillar.api.utils.authorization import user_has_role, check_permissions
 | 
			
		||||
from pillar.auth import current_user
 | 
			
		||||
from .utils import abort_with_error
 | 
			
		||||
 | 
			
		||||
log = logging.getLogger(__name__)
 | 
			
		||||
@@ -28,7 +30,7 @@ def before_inserting_projects(items):
 | 
			
		||||
    """
 | 
			
		||||
 | 
			
		||||
    # Allow admin users to do whatever they want.
 | 
			
		||||
    if user_has_role(u'admin'):
 | 
			
		||||
    if user_has_role('admin'):
 | 
			
		||||
        return
 | 
			
		||||
 | 
			
		||||
    for item in items:
 | 
			
		||||
@@ -64,13 +66,32 @@ def before_delete_project(document):
 | 
			
		||||
    """Checks permissions before we allow deletion"""
 | 
			
		||||
 | 
			
		||||
    check_permissions('projects', document, request.method)
 | 
			
		||||
    log.info('Deleting project %s on behalf of user %s', document['_id'], current_user)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def after_delete_project(project: dict):
 | 
			
		||||
    """Perform delete on the project's files too."""
 | 
			
		||||
    from werkzeug.exceptions import NotFound
 | 
			
		||||
    from eve.methods.delete import delete
 | 
			
		||||
 | 
			
		||||
    pid = project['_id']
 | 
			
		||||
    log.info('Project %s was deleted, also deleting its files.', pid)
 | 
			
		||||
 | 
			
		||||
    try:
 | 
			
		||||
        r, _, _, status = delete('files', {'project': pid})
 | 
			
		||||
    except NotFound:
 | 
			
		||||
        # There were no files, and that's fine.
 | 
			
		||||
        return
 | 
			
		||||
    if status != 204:
 | 
			
		||||
        # Will never happen because bloody Eve always returns 204 or raises an exception.
 | 
			
		||||
        log.warning('Unable to delete files of project %s: %s', pid, r)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def protect_sensitive_fields(document, original):
 | 
			
		||||
    """When not logged in as admin, prevents update to certain fields."""
 | 
			
		||||
 | 
			
		||||
    # Allow admin users to do whatever they want.
 | 
			
		||||
    if user_has_role(u'admin'):
 | 
			
		||||
    if user_has_role('admin'):
 | 
			
		||||
        return
 | 
			
		||||
 | 
			
		||||
    def revert(name):
 | 
			
		||||
@@ -108,6 +129,8 @@ def after_inserting_projects(projects):
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def after_inserting_project(project, db_user):
 | 
			
		||||
    from pillar.auth import UserClass
 | 
			
		||||
 | 
			
		||||
    project_id = project['_id']
 | 
			
		||||
    user_id = db_user['_id']
 | 
			
		||||
 | 
			
		||||
@@ -133,7 +156,8 @@ def after_inserting_project(project, db_user):
 | 
			
		||||
    log.debug('Made user %s member of group %s', user_id, admin_group_id)
 | 
			
		||||
 | 
			
		||||
    # Assign the group to the project with admin rights
 | 
			
		||||
    is_admin = authorization.is_admin(db_user)
 | 
			
		||||
    owner_user = UserClass.construct('', db_user)
 | 
			
		||||
    is_admin = authorization.is_admin(owner_user)
 | 
			
		||||
    world_permissions = ['GET'] if is_admin else []
 | 
			
		||||
    permissions = {
 | 
			
		||||
        'world': world_permissions,
 | 
			
		||||
@@ -166,18 +190,8 @@ def after_inserting_project(project, db_user):
 | 
			
		||||
        else:
 | 
			
		||||
            project['url'] = "p-{!s}".format(project_id)
 | 
			
		||||
 | 
			
		||||
    # Initialize storage page (defaults to GCS)
 | 
			
		||||
    if current_app.config.get('TESTING'):
 | 
			
		||||
        log.warning('Not creating Google Cloud Storage bucket while running unit tests!')
 | 
			
		||||
    else:
 | 
			
		||||
        try:
 | 
			
		||||
            gcs_storage = GoogleCloudStorageBucket(str(project_id))
 | 
			
		||||
            if gcs_storage.bucket.exists():
 | 
			
		||||
                log.info('Created GCS instance for project %s', project_id)
 | 
			
		||||
            else:
 | 
			
		||||
                log.warning('Unable to create GCS instance for project %s', project_id)
 | 
			
		||||
        except gcs_exceptions.Forbidden as ex:
 | 
			
		||||
            log.warning('GCS forbids me to create CGS instance for project %s: %s', project_id, ex)
 | 
			
		||||
    # Initialize storage using the default specified in STORAGE_BACKEND
 | 
			
		||||
    default_storage_backend(str(project_id))
 | 
			
		||||
 | 
			
		||||
    # Commit the changes directly to the MongoDB; a PUT is not allowed yet,
 | 
			
		||||
    # as the project doesn't have a valid permission structure.
 | 
			
		||||
@@ -232,5 +246,3 @@ def project_node_type_has_method(response):
 | 
			
		||||
def projects_node_type_has_method(response):
 | 
			
		||||
    for project in response['_items']:
 | 
			
		||||
        project_node_type_has_method(project)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
 
 | 
			
		||||
							
								
								
									
										44
									
								
								pillar/api/projects/merging.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										44
									
								
								pillar/api/projects/merging.py
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,44 @@
 | 
			
		||||
"""Code for merging projects."""
 | 
			
		||||
import logging
 | 
			
		||||
 | 
			
		||||
from bson import ObjectId
 | 
			
		||||
 | 
			
		||||
from pillar import current_app
 | 
			
		||||
from pillar.api.file_storage.moving import move_to_bucket
 | 
			
		||||
from pillar.api.utils import random_etag, utcnow
 | 
			
		||||
 | 
			
		||||
log = logging.getLogger(__name__)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def merge_project(pid_from: ObjectId, pid_to: ObjectId):
 | 
			
		||||
    """Move nodes and files from one project to another.
 | 
			
		||||
 | 
			
		||||
    Note that this may invalidate the nodes, as their node type definition
 | 
			
		||||
    may differ between projects.
 | 
			
		||||
    """
 | 
			
		||||
    log.info('Moving project contents from %s to %s', pid_from, pid_to)
 | 
			
		||||
    assert isinstance(pid_from, ObjectId)
 | 
			
		||||
    assert isinstance(pid_to, ObjectId)
 | 
			
		||||
 | 
			
		||||
    files_coll = current_app.db('files')
 | 
			
		||||
    nodes_coll = current_app.db('nodes')
 | 
			
		||||
 | 
			
		||||
    # Move the files first. Since this requires API calls to an external
 | 
			
		||||
    # service, this is more likely to go wrong than moving the nodes.
 | 
			
		||||
    to_move = files_coll.find({'project': pid_from}, projection={'_id': 1})
 | 
			
		||||
    log.info('Moving %d files to project %s', to_move.count(), pid_to)
 | 
			
		||||
    for file_doc in to_move:
 | 
			
		||||
        fid = file_doc['_id']
 | 
			
		||||
        log.debug('moving file %s to project %s', fid, pid_to)
 | 
			
		||||
        move_to_bucket(fid, pid_to)
 | 
			
		||||
 | 
			
		||||
    # Mass-move the nodes.
 | 
			
		||||
    etag = random_etag()
 | 
			
		||||
    result = nodes_coll.update_many(
 | 
			
		||||
        {'project': pid_from},
 | 
			
		||||
        {'$set': {'project': pid_to,
 | 
			
		||||
                  '_etag': etag,
 | 
			
		||||
                  '_updated': utcnow(),
 | 
			
		||||
                  }}
 | 
			
		||||
    )
 | 
			
		||||
    log.info('Moved %d nodes to project %s', result.modified_count, pid_to)
 | 
			
		||||
							
								
								
									
										85
									
								
								pillar/api/projects/patch.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										85
									
								
								pillar/api/projects/patch.py
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,85 @@
 | 
			
		||||
"""Project patching support."""
 | 
			
		||||
 | 
			
		||||
import logging
 | 
			
		||||
 | 
			
		||||
import flask
 | 
			
		||||
from flask import Blueprint, request
 | 
			
		||||
import werkzeug.exceptions as wz_exceptions
 | 
			
		||||
 | 
			
		||||
from pillar import current_app
 | 
			
		||||
from pillar.auth import current_user
 | 
			
		||||
from pillar.api.utils import random_etag, str2id, utcnow
 | 
			
		||||
from pillar.api.utils import authorization
 | 
			
		||||
 | 
			
		||||
log = logging.getLogger(__name__)
 | 
			
		||||
blueprint = Blueprint('projects.patch', __name__)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@blueprint.route('/<project_id>', methods=['PATCH'])
 | 
			
		||||
@authorization.require_login()
 | 
			
		||||
def patch_project(project_id: str):
 | 
			
		||||
    """Undelete a project.
 | 
			
		||||
 | 
			
		||||
    This is done via a custom PATCH due to the lack of transactions of MongoDB;
 | 
			
		||||
    we cannot undelete both project-referenced files and file-referenced
 | 
			
		||||
    projects in one atomic operation.
 | 
			
		||||
    """
 | 
			
		||||
 | 
			
		||||
    # Parse the request
 | 
			
		||||
    pid = str2id(project_id)
 | 
			
		||||
    patch = request.get_json()
 | 
			
		||||
    if not patch:
 | 
			
		||||
        raise wz_exceptions.BadRequest('Expected JSON body')
 | 
			
		||||
 | 
			
		||||
    log.debug('User %s wants to PATCH project %s: %s', current_user, pid, patch)
 | 
			
		||||
 | 
			
		||||
    # 'undelete' is the only operation we support now, so no fancy handler registration.
 | 
			
		||||
    op = patch.get('op', '')
 | 
			
		||||
    if op != 'undelete':
 | 
			
		||||
        log.warning('User %s sent unsupported PATCH op %r to project %s: %s',
 | 
			
		||||
                    current_user, op, pid, patch)
 | 
			
		||||
        raise wz_exceptions.BadRequest(f'unsupported operation {op!r}')
 | 
			
		||||
 | 
			
		||||
    # Get the project to find the user's permissions.
 | 
			
		||||
    proj_coll = current_app.db('projects')
 | 
			
		||||
    proj = proj_coll.find_one({'_id': pid})
 | 
			
		||||
    if not proj:
 | 
			
		||||
        raise wz_exceptions.NotFound(f'project {pid} not found')
 | 
			
		||||
    allowed = authorization.compute_allowed_methods('projects', proj)
 | 
			
		||||
    if 'PUT' not in allowed:
 | 
			
		||||
        log.warning('User %s tried to undelete project %s but only has permissions %r',
 | 
			
		||||
                    current_user, pid, allowed)
 | 
			
		||||
        raise wz_exceptions.Forbidden(f'no PUT access to project {pid}')
 | 
			
		||||
 | 
			
		||||
    if not proj.get('_deleted', False):
 | 
			
		||||
        raise wz_exceptions.BadRequest(f'project {pid} was not deleted, unable to undelete')
 | 
			
		||||
 | 
			
		||||
    # Undelete the files. We cannot do this via Eve, as it doesn't support
 | 
			
		||||
    # PATCHing collections, so direct MongoDB modification is used to set
 | 
			
		||||
    # _deleted=False and provide new _etag and _updated values.
 | 
			
		||||
    new_etag = random_etag()
 | 
			
		||||
 | 
			
		||||
    log.debug('undeleting files before undeleting project %s', pid)
 | 
			
		||||
    files_coll = current_app.db('files')
 | 
			
		||||
    update_result = files_coll.update_many(
 | 
			
		||||
        {'project': pid},
 | 
			
		||||
        {'$set': {'_deleted': False,
 | 
			
		||||
                  '_etag': new_etag,
 | 
			
		||||
                  '_updated': utcnow()}})
 | 
			
		||||
    log.info('undeleted %d of %d file documents of project %s',
 | 
			
		||||
             update_result.modified_count, update_result.matched_count, pid)
 | 
			
		||||
 | 
			
		||||
    log.info('undeleting project %s on behalf of user %s', pid, current_user)
 | 
			
		||||
    update_result = proj_coll.update_one({'_id': pid},
 | 
			
		||||
                                         {'$set': {'_deleted': False}})
 | 
			
		||||
    log.info('undeleted %d project document %s', update_result.modified_count, pid)
 | 
			
		||||
 | 
			
		||||
    resp = flask.Response('', status=204)
 | 
			
		||||
    resp.location = flask.url_for('projects.view', project_url=proj['url'])
 | 
			
		||||
    return resp
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def setup_app(app):
 | 
			
		||||
    # This needs to be on the same URL prefix as Eve uses for the collection,
 | 
			
		||||
    # and not /p as used for the other Projects API calls.
 | 
			
		||||
    app.register_api_blueprint(blueprint, url_prefix='/projects')
 | 
			
		||||
@@ -2,11 +2,13 @@ import json
 | 
			
		||||
import logging
 | 
			
		||||
 | 
			
		||||
from bson import ObjectId
 | 
			
		||||
from flask import Blueprint, g, request, current_app, make_response, url_for
 | 
			
		||||
from flask import Blueprint, request, current_app, make_response, url_for
 | 
			
		||||
from werkzeug import exceptions as wz_exceptions
 | 
			
		||||
 | 
			
		||||
from pillar.api.utils import authorization, jsonify, str2id
 | 
			
		||||
from pillar.api.utils import mongo
 | 
			
		||||
from pillar.api.utils.authorization import require_login, check_permissions
 | 
			
		||||
from werkzeug import exceptions as wz_exceptions
 | 
			
		||||
from pillar.auth import current_user
 | 
			
		||||
 | 
			
		||||
from . import utils
 | 
			
		||||
 | 
			
		||||
@@ -16,7 +18,7 @@ blueprint_api = Blueprint('projects_api', __name__)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@blueprint_api.route('/create', methods=['POST'])
 | 
			
		||||
@authorization.require_login(require_roles={u'admin', u'subscriber', u'demo'})
 | 
			
		||||
@authorization.require_login(require_cap='subscriber')
 | 
			
		||||
def create_project(overrides=None):
 | 
			
		||||
    """Creates a new project."""
 | 
			
		||||
 | 
			
		||||
@@ -24,7 +26,7 @@ def create_project(overrides=None):
 | 
			
		||||
        project_name = request.json['name']
 | 
			
		||||
    else:
 | 
			
		||||
        project_name = request.form['project_name']
 | 
			
		||||
    user_id = g.current_user['user_id']
 | 
			
		||||
    user_id = current_user.user_id
 | 
			
		||||
 | 
			
		||||
    project = utils.create_new_project(project_name, user_id, overrides)
 | 
			
		||||
 | 
			
		||||
@@ -41,6 +43,8 @@ def project_manage_users():
 | 
			
		||||
    No changes are done on the project itself.
 | 
			
		||||
    """
 | 
			
		||||
 | 
			
		||||
    from pillar.api.utils import str2id
 | 
			
		||||
 | 
			
		||||
    projects_collection = current_app.data.driver.db['projects']
 | 
			
		||||
    users_collection = current_app.data.driver.db['users']
 | 
			
		||||
 | 
			
		||||
@@ -57,17 +61,19 @@ def project_manage_users():
 | 
			
		||||
 | 
			
		||||
    # The request is not a form, since it comes from the API sdk
 | 
			
		||||
    data = json.loads(request.data)
 | 
			
		||||
    project_id = ObjectId(data['project_id'])
 | 
			
		||||
    target_user_id = ObjectId(data['user_id'])
 | 
			
		||||
    project_id = str2id(data['project_id'])
 | 
			
		||||
    target_user_id = str2id(data['user_id'])
 | 
			
		||||
    action = data['action']
 | 
			
		||||
    current_user_id = g.current_user['user_id']
 | 
			
		||||
    current_user_id = current_user.user_id
 | 
			
		||||
 | 
			
		||||
    project = projects_collection.find_one({'_id': project_id})
 | 
			
		||||
 | 
			
		||||
    # Check if the current_user is owner of the project, or removing themselves.
 | 
			
		||||
    if not authorization.user_has_role(u'admin'):
 | 
			
		||||
    if not authorization.user_has_role('admin'):
 | 
			
		||||
        remove_self = target_user_id == current_user_id and action == 'remove'
 | 
			
		||||
        if project['user'] != current_user_id and not remove_self:
 | 
			
		||||
            log.warning('User %s tries to %s %s to/from project %s, but is not allowed',
 | 
			
		||||
                        current_user_id, action, target_user_id, project_id)
 | 
			
		||||
            utils.abort_with_error(403)
 | 
			
		||||
 | 
			
		||||
    admin_group = utils.get_admin_group(project)
 | 
			
		||||
 
 | 
			
		||||
@@ -1,10 +1,13 @@
 | 
			
		||||
import logging
 | 
			
		||||
import typing
 | 
			
		||||
 | 
			
		||||
from bson import ObjectId
 | 
			
		||||
from flask import current_app
 | 
			
		||||
from werkzeug import exceptions as wz_exceptions
 | 
			
		||||
from werkzeug.exceptions import abort
 | 
			
		||||
 | 
			
		||||
from pillar import current_app
 | 
			
		||||
from pillar.auth import current_user
 | 
			
		||||
 | 
			
		||||
log = logging.getLogger(__name__)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@@ -27,12 +30,30 @@ def project_total_file_size(project_id):
 | 
			
		||||
        return 0
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def get_admin_group(project):
 | 
			
		||||
def get_admin_group_id(project_id: ObjectId) -> ObjectId:
 | 
			
		||||
    assert isinstance(project_id, ObjectId)
 | 
			
		||||
 | 
			
		||||
    project = current_app.db('projects').find_one({'_id': project_id},
 | 
			
		||||
                                                  {'permissions': 1})
 | 
			
		||||
    if not project:
 | 
			
		||||
        raise ValueError(f'Project {project_id} does not exist.')
 | 
			
		||||
 | 
			
		||||
    # TODO: search through all groups to find the one with the project ID as its name,
 | 
			
		||||
    # or identify "the admin group" in a different way (for example the group with DELETE rights).
 | 
			
		||||
    try:
 | 
			
		||||
        admin_group_id = ObjectId(project['permissions']['groups'][0]['group'])
 | 
			
		||||
    except KeyError:
 | 
			
		||||
        raise ValueError(f'Project {project_id} does not seem to have an admin group')
 | 
			
		||||
 | 
			
		||||
    return admin_group_id
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def get_admin_group(project: dict) -> dict:
 | 
			
		||||
    """Returns the admin group for the project."""
 | 
			
		||||
 | 
			
		||||
    groups_collection = current_app.data.driver.db['groups']
 | 
			
		||||
 | 
			
		||||
    # TODO: search through all groups to find the one with the project ID as its name.
 | 
			
		||||
    # TODO: see get_admin_group_id
 | 
			
		||||
    admin_group_id = ObjectId(project['permissions']['groups'][0]['group'])
 | 
			
		||||
    group = groups_collection.find_one({'_id': admin_group_id})
 | 
			
		||||
 | 
			
		||||
@@ -40,11 +61,27 @@ def get_admin_group(project):
 | 
			
		||||
        raise ValueError('Unable to handle project without admin group.')
 | 
			
		||||
 | 
			
		||||
    if group['name'] != str(project['_id']):
 | 
			
		||||
        log.error('User %s tries to get admin group for project %s, '
 | 
			
		||||
                  'but that does not have the project ID as group name: %s',
 | 
			
		||||
                  current_user.user_id, project.get('_id', '-unknown-'), group)
 | 
			
		||||
        return abort_with_error(403)
 | 
			
		||||
 | 
			
		||||
    return group
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def user_rights_in_project(project_id: ObjectId) -> frozenset:
 | 
			
		||||
    """Returns the set of HTTP methods allowed on the given project for the current user."""
 | 
			
		||||
 | 
			
		||||
    from pillar.api.utils import authorization
 | 
			
		||||
 | 
			
		||||
    assert isinstance(project_id, ObjectId)
 | 
			
		||||
 | 
			
		||||
    proj_coll = current_app.db().projects
 | 
			
		||||
    proj = proj_coll.find_one({'_id': project_id})
 | 
			
		||||
 | 
			
		||||
    return frozenset(authorization.compute_allowed_methods('projects', proj))
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def abort_with_error(status):
 | 
			
		||||
    """Aborts with the given status, or 500 if the status doesn't indicate an error.
 | 
			
		||||
 | 
			
		||||
@@ -97,3 +134,56 @@ def get_node_type(project, node_type_name):
 | 
			
		||||
 | 
			
		||||
    return next((nt for nt in project['node_types']
 | 
			
		||||
                 if nt['name'] == node_type_name), None)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def node_type_dict(project: dict) -> typing.Dict[str, dict]:
 | 
			
		||||
    """Return the node types of the project as dictionary.
 | 
			
		||||
 | 
			
		||||
    The returned dictionary will be keyed by the node type name.
 | 
			
		||||
    """
 | 
			
		||||
    return {nt['name']: nt for nt in project['node_types']}
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def project_id(project_url: str) -> ObjectId:
 | 
			
		||||
    """Returns the object ID, or raises a ValueError when not found."""
 | 
			
		||||
 | 
			
		||||
    proj_coll = current_app.db('projects')
 | 
			
		||||
    proj = proj_coll.find_one({'url': project_url}, projection={'_id': True})
 | 
			
		||||
 | 
			
		||||
    if not proj:
 | 
			
		||||
        raise ValueError(f'project with url={project_url!r} not found')
 | 
			
		||||
    return proj['_id']
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def get_project(project_url: str) -> dict:
 | 
			
		||||
    """Find a project in the database, raises ValueError if not found.
 | 
			
		||||
 | 
			
		||||
    :param project_url: URL of the project
 | 
			
		||||
    """
 | 
			
		||||
 | 
			
		||||
    proj_coll = current_app.db('projects')
 | 
			
		||||
    project = proj_coll.find_one({'url': project_url, '_deleted': {'$ne': True}})
 | 
			
		||||
    if not project:
 | 
			
		||||
        raise ValueError(f'project url={project_url!r} does not exist')
 | 
			
		||||
 | 
			
		||||
    return project
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def put_project(project: dict):
 | 
			
		||||
    """Puts a project into the database via Eve.
 | 
			
		||||
 | 
			
		||||
    :param project: the project data, should be the entire project document
 | 
			
		||||
    :raises ValueError: if the project cannot be saved.
 | 
			
		||||
    """
 | 
			
		||||
 | 
			
		||||
    from pillar.api.utils import remove_private_keys
 | 
			
		||||
    from pillarsdk.utils import remove_none_attributes
 | 
			
		||||
 | 
			
		||||
    pid = ObjectId(project['_id'])
 | 
			
		||||
    proj_no_priv = remove_private_keys(project)
 | 
			
		||||
    proj_no_none = remove_none_attributes(proj_no_priv)
 | 
			
		||||
    result, _, _, status_code = current_app.put_internal('projects', proj_no_none, _id=pid)
 | 
			
		||||
 | 
			
		||||
    if status_code != 200:
 | 
			
		||||
        raise ValueError(f"Can't update project {pid}, "
 | 
			
		||||
                         f"status {status_code} with issues: {result}")
 | 
			
		||||
 
 | 
			
		||||
							
								
								
									
										9
									
								
								pillar/api/search/__init__.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										9
									
								
								pillar/api/search/__init__.py
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,9 @@
 | 
			
		||||
from .routes import blueprint_search
 | 
			
		||||
from . import queries
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def setup_app(app, url_prefix: str = None):
 | 
			
		||||
    app.register_api_blueprint(
 | 
			
		||||
        blueprint_search, url_prefix=url_prefix)
 | 
			
		||||
 | 
			
		||||
    queries.setup_app(app)
 | 
			
		||||
							
								
								
									
										40
									
								
								pillar/api/search/algolia_indexing.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										40
									
								
								pillar/api/search/algolia_indexing.py
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,40 @@
 | 
			
		||||
import logging
 | 
			
		||||
 | 
			
		||||
from algoliasearch.helpers import AlgoliaException
 | 
			
		||||
 | 
			
		||||
log = logging.getLogger(__name__)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def push_updated_user(user_to_index: dict):
 | 
			
		||||
    """Push an update to the index when a user document is updated."""
 | 
			
		||||
 | 
			
		||||
    from pillar.api.utils.algolia import index_user_save
 | 
			
		||||
 | 
			
		||||
    try:
 | 
			
		||||
        index_user_save(user_to_index)
 | 
			
		||||
    except AlgoliaException as ex:
 | 
			
		||||
        log.warning(
 | 
			
		||||
            'Unable to push user info to Algolia for user "%s", id=%s; %s',  # noqa
 | 
			
		||||
            user_to_index.get('username'),
 | 
			
		||||
            user_to_index.get('objectID'), ex)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def index_node_save(node_to_index: dict):
 | 
			
		||||
    """Save parsed node document to the index."""
 | 
			
		||||
    from pillar.api.utils import algolia
 | 
			
		||||
 | 
			
		||||
    try:
 | 
			
		||||
        algolia.index_node_save(node_to_index)
 | 
			
		||||
    except AlgoliaException as ex:
 | 
			
		||||
        log.warning(
 | 
			
		||||
            'Unable to push node info to Algolia for node %s; %s', node_to_index, ex)  # noqa
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def index_node_delete(delete_id: str):
 | 
			
		||||
    """Delete node using id."""
 | 
			
		||||
    from pillar.api.utils import algolia
 | 
			
		||||
 | 
			
		||||
    try:
 | 
			
		||||
        algolia.index_node_delete(delete_id)
 | 
			
		||||
    except AlgoliaException as ex:
 | 
			
		||||
        log.warning('Unable to delete node info to Algolia for node %s; %s', delete_id, ex)  # noqa
 | 
			
		||||
							
								
								
									
										193
									
								
								pillar/api/search/documents.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										193
									
								
								pillar/api/search/documents.py
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,193 @@
 | 
			
		||||
"""
 | 
			
		||||
Define elasticsearch document mapping.
 | 
			
		||||
 | 
			
		||||
Elasticsearch consist of two parts:
 | 
			
		||||
 | 
			
		||||
- Part 1: Define the documents in which you define who fields will be indexed.
 | 
			
		||||
- Part 2: Building elasticsearch json queries.
 | 
			
		||||
 | 
			
		||||
BOTH of these parts are equally importand to havea search API that returns
 | 
			
		||||
relevant results.
 | 
			
		||||
"""
 | 
			
		||||
import logging
 | 
			
		||||
import typing
 | 
			
		||||
 | 
			
		||||
import elasticsearch_dsl as es
 | 
			
		||||
from elasticsearch_dsl import analysis
 | 
			
		||||
 | 
			
		||||
log = logging.getLogger(__name__)
 | 
			
		||||
 | 
			
		||||
edge_ngram_filter = analysis.token_filter(
 | 
			
		||||
    'edge_ngram_filter',
 | 
			
		||||
    type='edge_ngram',
 | 
			
		||||
    min_gram=1,
 | 
			
		||||
    max_gram=15
 | 
			
		||||
)
 | 
			
		||||
 | 
			
		||||
autocomplete = es.analyzer(
 | 
			
		||||
    'autocomplete',
 | 
			
		||||
    tokenizer='standard',
 | 
			
		||||
    filter=['standard', 'asciifolding', 'lowercase', edge_ngram_filter]
 | 
			
		||||
)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class User(es.DocType):
 | 
			
		||||
    """Elastic document describing user."""
 | 
			
		||||
 | 
			
		||||
    objectID = es.Keyword()
 | 
			
		||||
 | 
			
		||||
    username = es.Text(fielddata=True, analyzer=autocomplete)
 | 
			
		||||
    username_exact = es.Keyword()
 | 
			
		||||
    full_name = es.Text(fielddata=True, analyzer=autocomplete)
 | 
			
		||||
 | 
			
		||||
    roles = es.Keyword(multi=True)
 | 
			
		||||
    groups = es.Keyword(multi=True)
 | 
			
		||||
 | 
			
		||||
    email = es.Text(fielddata=True, analyzer=autocomplete)
 | 
			
		||||
    email_exact = es.Keyword()
 | 
			
		||||
 | 
			
		||||
    class Meta:
 | 
			
		||||
        index = 'users'
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class Node(es.DocType):
 | 
			
		||||
    """
 | 
			
		||||
    Elastic document describing user
 | 
			
		||||
    """
 | 
			
		||||
 | 
			
		||||
    node_type = es.Keyword()
 | 
			
		||||
 | 
			
		||||
    objectID = es.Keyword()
 | 
			
		||||
 | 
			
		||||
    name = es.Text(
 | 
			
		||||
        fielddata=True,
 | 
			
		||||
        analyzer=autocomplete
 | 
			
		||||
    )
 | 
			
		||||
 | 
			
		||||
    user = es.Object(
 | 
			
		||||
        fields={
 | 
			
		||||
            'id': es.Keyword(),
 | 
			
		||||
            'name': es.Text(
 | 
			
		||||
                fielddata=True,
 | 
			
		||||
                analyzer=autocomplete)
 | 
			
		||||
        }
 | 
			
		||||
    )
 | 
			
		||||
 | 
			
		||||
    description = es.Text()
 | 
			
		||||
 | 
			
		||||
    is_free = es.Boolean()
 | 
			
		||||
 | 
			
		||||
    project = es.Object(
 | 
			
		||||
        fields={
 | 
			
		||||
            'id': es.Keyword(),
 | 
			
		||||
            'name': es.Keyword(),
 | 
			
		||||
            'url': es.Keyword(),
 | 
			
		||||
        }
 | 
			
		||||
    )
 | 
			
		||||
 | 
			
		||||
    media = es.Keyword()
 | 
			
		||||
 | 
			
		||||
    picture = es.Keyword()
 | 
			
		||||
 | 
			
		||||
    tags = es.Keyword(multi=True)
 | 
			
		||||
    license_notes = es.Text()
 | 
			
		||||
 | 
			
		||||
    created_at = es.Date()
 | 
			
		||||
    updated_at = es.Date()
 | 
			
		||||
 | 
			
		||||
    class Meta:
 | 
			
		||||
        index = 'nodes'
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def create_doc_from_user_data(user_to_index: dict) -> typing.Optional[User]:
 | 
			
		||||
    """
 | 
			
		||||
    Create the document to store in a search engine for this user.
 | 
			
		||||
 | 
			
		||||
    See pillar.celery.search_index_task
 | 
			
		||||
 | 
			
		||||
    :returns: an ElasticSearch document or None if user_to_index has no data.
 | 
			
		||||
    """
 | 
			
		||||
 | 
			
		||||
    if not user_to_index:
 | 
			
		||||
        return None
 | 
			
		||||
 | 
			
		||||
    doc_id = str(user_to_index.get('objectID', ''))
 | 
			
		||||
 | 
			
		||||
    if not doc_id:
 | 
			
		||||
        log.error('USER ID is missing %s', user_to_index)
 | 
			
		||||
        raise KeyError('Trying to create document without id')
 | 
			
		||||
 | 
			
		||||
    doc = User(_id=doc_id)
 | 
			
		||||
    doc.objectID = str(user_to_index['objectID'])
 | 
			
		||||
    doc.username = user_to_index['username']
 | 
			
		||||
    doc.username_exact = user_to_index['username']
 | 
			
		||||
    doc.full_name = user_to_index['full_name']
 | 
			
		||||
    doc.roles = list(map(str, user_to_index['roles']))
 | 
			
		||||
    doc.groups = list(map(str, user_to_index['groups']))
 | 
			
		||||
    doc.email = user_to_index['email']
 | 
			
		||||
    doc.email_exact = user_to_index['email']
 | 
			
		||||
 | 
			
		||||
    return doc
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def create_doc_from_node_data(node_to_index: dict) -> typing.Optional[Node]:
 | 
			
		||||
    """
 | 
			
		||||
    Create the document to store in a search engine for this node.
 | 
			
		||||
 | 
			
		||||
    See pillar.celery.search_index_task
 | 
			
		||||
 | 
			
		||||
    :returns: an ElasticSearch document or None if node_to_index has no data.
 | 
			
		||||
    """
 | 
			
		||||
 | 
			
		||||
    if not node_to_index:
 | 
			
		||||
        return None
 | 
			
		||||
 | 
			
		||||
    # node stuff
 | 
			
		||||
    doc_id = str(node_to_index.get('objectID', ''))
 | 
			
		||||
 | 
			
		||||
    if not doc_id:
 | 
			
		||||
        log.error('ID missing %s', node_to_index)
 | 
			
		||||
        return None
 | 
			
		||||
 | 
			
		||||
    doc = Node(_id=doc_id)
 | 
			
		||||
 | 
			
		||||
    doc.objectID = str(node_to_index['objectID'])
 | 
			
		||||
    doc.node_type = node_to_index['node_type']
 | 
			
		||||
    doc.name = node_to_index['name']
 | 
			
		||||
    doc.description = node_to_index.get('description')
 | 
			
		||||
    doc.user.id = str(node_to_index['user']['_id'])
 | 
			
		||||
    doc.user.name = node_to_index['user']['full_name']
 | 
			
		||||
    doc.project.id = str(node_to_index['project']['_id'])
 | 
			
		||||
    doc.project.name = node_to_index['project']['name']
 | 
			
		||||
    doc.project.url = node_to_index['project']['url']
 | 
			
		||||
 | 
			
		||||
    if node_to_index['node_type'] == 'asset':
 | 
			
		||||
        doc.media = node_to_index['media']
 | 
			
		||||
 | 
			
		||||
    doc.picture = str(node_to_index.get('picture'))
 | 
			
		||||
 | 
			
		||||
    doc.tags = node_to_index.get('tags')
 | 
			
		||||
    doc.license_notes = node_to_index.get('license_notes')
 | 
			
		||||
    doc.is_free = node_to_index.get('is_free')
 | 
			
		||||
 | 
			
		||||
    doc.created_at = node_to_index['created']
 | 
			
		||||
    doc.updated_at = node_to_index['updated']
 | 
			
		||||
 | 
			
		||||
    return doc
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def create_doc_from_user(user_to_index: dict) -> User:
 | 
			
		||||
    """
 | 
			
		||||
    Create a user document from user
 | 
			
		||||
    """
 | 
			
		||||
 | 
			
		||||
    doc_id = str(user_to_index['objectID'])
 | 
			
		||||
    doc = User(_id=doc_id)
 | 
			
		||||
    doc.objectID = str(user_to_index['objectID'])
 | 
			
		||||
    doc.full_name = user_to_index['full_name']
 | 
			
		||||
    doc.username = user_to_index['username']
 | 
			
		||||
    doc.roles = user_to_index['roles']
 | 
			
		||||
    doc.groups = user_to_index['groups']
 | 
			
		||||
    doc.email = user_to_index['email']
 | 
			
		||||
 | 
			
		||||
    return doc
 | 
			
		||||
							
								
								
									
										65
									
								
								pillar/api/search/elastic_indexing.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										65
									
								
								pillar/api/search/elastic_indexing.py
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,65 @@
 | 
			
		||||
import logging
 | 
			
		||||
 | 
			
		||||
from elasticsearch_dsl.connections import connections
 | 
			
		||||
from elasticsearch.exceptions import NotFoundError
 | 
			
		||||
 | 
			
		||||
from pillar import current_app
 | 
			
		||||
from . import documents
 | 
			
		||||
 | 
			
		||||
log = logging.getLogger(__name__)
 | 
			
		||||
 | 
			
		||||
elk_hosts = current_app.config['ELASTIC_SEARCH_HOSTS']
 | 
			
		||||
 | 
			
		||||
connections.create_connection(
 | 
			
		||||
    hosts=elk_hosts,
 | 
			
		||||
    sniff_on_start=False,
 | 
			
		||||
    timeout=20)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def push_updated_user(user_to_index: dict):
 | 
			
		||||
    """
 | 
			
		||||
    Push an update to the Elastic index when a user item is updated.
 | 
			
		||||
    """
 | 
			
		||||
    if not user_to_index:
 | 
			
		||||
        return
 | 
			
		||||
 | 
			
		||||
    doc = documents.create_doc_from_user_data(user_to_index)
 | 
			
		||||
 | 
			
		||||
    if not doc:
 | 
			
		||||
        return
 | 
			
		||||
 | 
			
		||||
    index = current_app.config['ELASTIC_INDICES']['USER']
 | 
			
		||||
    log.debug('Index %r update user doc %s in ElasticSearch.', index, doc._id)
 | 
			
		||||
    doc.save(index=index)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def index_node_save(node_to_index: dict):
 | 
			
		||||
    """
 | 
			
		||||
    Push an update to the Elastic index when a node item is saved.
 | 
			
		||||
    """
 | 
			
		||||
    if not node_to_index:
 | 
			
		||||
        return
 | 
			
		||||
 | 
			
		||||
    doc = documents.create_doc_from_node_data(node_to_index)
 | 
			
		||||
 | 
			
		||||
    if not doc:
 | 
			
		||||
        return
 | 
			
		||||
 | 
			
		||||
    index = current_app.config['ELASTIC_INDICES']['NODE']
 | 
			
		||||
    log.debug('Index %r update node doc %s in ElasticSearch.', index, doc._id)
 | 
			
		||||
    doc.save(index=index)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def index_node_delete(delete_id: str):
 | 
			
		||||
    """
 | 
			
		||||
    Delete node document from Elastic index useing a node id
 | 
			
		||||
    """
 | 
			
		||||
    index = current_app.config['ELASTIC_INDICES']['NODE']
 | 
			
		||||
    log.debug('Index %r node doc delete %s', index, delete_id)
 | 
			
		||||
 | 
			
		||||
    try:
 | 
			
		||||
        doc: documents.Node = documents.Node.get(id=delete_id)
 | 
			
		||||
        doc.delete(index=index)
 | 
			
		||||
    except NotFoundError:
 | 
			
		||||
        # seems to be gone already..
 | 
			
		||||
        pass
 | 
			
		||||
							
								
								
									
										64
									
								
								pillar/api/search/index.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										64
									
								
								pillar/api/search/index.py
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,64 @@
 | 
			
		||||
import logging
 | 
			
		||||
from typing import List
 | 
			
		||||
 | 
			
		||||
from elasticsearch.exceptions import NotFoundError
 | 
			
		||||
from elasticsearch_dsl.connections import connections
 | 
			
		||||
import elasticsearch_dsl as es
 | 
			
		||||
 | 
			
		||||
from pillar import current_app
 | 
			
		||||
 | 
			
		||||
from . import documents
 | 
			
		||||
 | 
			
		||||
log = logging.getLogger(__name__)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class ResetIndexTask(object):
 | 
			
		||||
    """ Clear and build index / mapping """
 | 
			
		||||
 | 
			
		||||
    # Key into the ELASTIC_INDICES dict in the app config.
 | 
			
		||||
    index_key: str = ''
 | 
			
		||||
 | 
			
		||||
    # List of elastic document  types
 | 
			
		||||
    doc_types: List = []
 | 
			
		||||
    name = 'remove index'
 | 
			
		||||
 | 
			
		||||
    def __init__(self):
 | 
			
		||||
        if not self.index_key:
 | 
			
		||||
            raise ValueError("No index specified")
 | 
			
		||||
 | 
			
		||||
        if not self.doc_types:
 | 
			
		||||
            raise ValueError("No doc_types specified")
 | 
			
		||||
 | 
			
		||||
        connections.create_connection(
 | 
			
		||||
            hosts=current_app.config['ELASTIC_SEARCH_HOSTS'],
 | 
			
		||||
            # sniff_on_start=True,
 | 
			
		||||
            retry_on_timeout=True,
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
    def execute(self):
 | 
			
		||||
        index = current_app.config['ELASTIC_INDICES'][self.index_key]
 | 
			
		||||
        idx = es.Index(index)
 | 
			
		||||
 | 
			
		||||
        try:
 | 
			
		||||
            idx.delete(ignore=404)
 | 
			
		||||
        except NotFoundError:
 | 
			
		||||
            log.warning("Could not delete index '%s', ignoring", index)
 | 
			
		||||
        else:
 | 
			
		||||
            log.info("Deleted index %s", index)
 | 
			
		||||
 | 
			
		||||
        # create doc types
 | 
			
		||||
        for dt in self.doc_types:
 | 
			
		||||
            idx.doc_type(dt)
 | 
			
		||||
 | 
			
		||||
        # create index
 | 
			
		||||
        idx.create()
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class ResetNodeIndex(ResetIndexTask):
 | 
			
		||||
    index_key = 'NODE'
 | 
			
		||||
    doc_types = [documents.Node]
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class ResetUserIndex(ResetIndexTask):
 | 
			
		||||
    index_key = 'USER'
 | 
			
		||||
    doc_types = [documents.User]
 | 
			
		||||
							
								
								
									
										215
									
								
								pillar/api/search/queries.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										215
									
								
								pillar/api/search/queries.py
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,215 @@
 | 
			
		||||
import json
 | 
			
		||||
import logging
 | 
			
		||||
import typing
 | 
			
		||||
 | 
			
		||||
from elasticsearch import Elasticsearch
 | 
			
		||||
from elasticsearch_dsl import Search, Q, MultiSearch
 | 
			
		||||
from elasticsearch_dsl.query import Query
 | 
			
		||||
 | 
			
		||||
from pillar import current_app
 | 
			
		||||
 | 
			
		||||
log = logging.getLogger(__name__)
 | 
			
		||||
 | 
			
		||||
BOOLEAN_TERMS = ['is_free']
 | 
			
		||||
NODE_AGG_TERMS = ['node_type', 'media', 'tags', *BOOLEAN_TERMS]
 | 
			
		||||
USER_AGG_TERMS = ['roles', ]
 | 
			
		||||
ITEMS_PER_PAGE = 10
 | 
			
		||||
USER_SOURCE_INCLUDE = ['full_name', 'objectID', 'username']
 | 
			
		||||
 | 
			
		||||
# Will be set in setup_app()
 | 
			
		||||
client: Elasticsearch = None
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def add_aggs_to_search(search, agg_terms):
 | 
			
		||||
    """
 | 
			
		||||
    Add facets / aggregations to the search result
 | 
			
		||||
    """
 | 
			
		||||
 | 
			
		||||
    for term in agg_terms:
 | 
			
		||||
        search.aggs.bucket(term, 'terms', field=term)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def make_filter(must: list, terms: dict) -> list:
 | 
			
		||||
    """ Given term parameters append must queries to the must list """
 | 
			
		||||
 | 
			
		||||
    for field, value in terms.items():
 | 
			
		||||
        if value not in (None, ''):
 | 
			
		||||
            must.append({'term': {field: value}})
 | 
			
		||||
 | 
			
		||||
    return must
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def nested_bool(filters: list, should: list, terms: dict, *, index_alias: str) -> Search:
 | 
			
		||||
    """
 | 
			
		||||
    Create a nested bool, where the aggregation selection is a must.
 | 
			
		||||
 | 
			
		||||
    :param index_alias: 'USER' or 'NODE', see ELASTIC_INDICES config.
 | 
			
		||||
    """
 | 
			
		||||
    filters = make_filter(filters, terms)
 | 
			
		||||
    bool_query = Q('bool', should=should)
 | 
			
		||||
    bool_query = Q('bool', must=bool_query, filter=filters)
 | 
			
		||||
 | 
			
		||||
    index = current_app.config['ELASTIC_INDICES'][index_alias]
 | 
			
		||||
    search = Search(using=client, index=index)
 | 
			
		||||
    search.query = bool_query
 | 
			
		||||
 | 
			
		||||
    return search
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def do_multi_node_search(queries: typing.List[dict]) -> typing.List[dict]:
 | 
			
		||||
    """
 | 
			
		||||
    Given user query input and term refinements
 | 
			
		||||
    search for public published nodes
 | 
			
		||||
    """
 | 
			
		||||
    search = create_multi_node_search(queries)
 | 
			
		||||
    return _execute_multi(search)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def do_node_search(query: str, terms: dict, page: int, project_id: str='') -> dict:
 | 
			
		||||
    """
 | 
			
		||||
    Given user query input and term refinements
 | 
			
		||||
    search for public published nodes
 | 
			
		||||
    """
 | 
			
		||||
    search = create_node_search(query, terms, page, project_id)
 | 
			
		||||
    return _execute(search)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def create_multi_node_search(queries: typing.List[dict]) -> MultiSearch:
 | 
			
		||||
    search = MultiSearch(using=client)
 | 
			
		||||
    for q in queries:
 | 
			
		||||
        search = search.add(create_node_search(**q))
 | 
			
		||||
 | 
			
		||||
    return search
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def create_node_search(query: str, terms: dict, page: int, project_id: str='') -> Search:
 | 
			
		||||
    terms = _transform_terms(terms)
 | 
			
		||||
    should = [
 | 
			
		||||
        Q('match', name=query),
 | 
			
		||||
 | 
			
		||||
        {"match": {"project.name": query}},
 | 
			
		||||
        {"match": {"user.name": query}},
 | 
			
		||||
 | 
			
		||||
        Q('match', description=query),
 | 
			
		||||
        Q('term', media=query),
 | 
			
		||||
        Q('term', tags=query),
 | 
			
		||||
    ]
 | 
			
		||||
    filters = []
 | 
			
		||||
    if project_id:
 | 
			
		||||
        filters.append({'term': {'project.id': project_id}})
 | 
			
		||||
    if not query:
 | 
			
		||||
        should = []
 | 
			
		||||
    search = nested_bool(filters, should, terms, index_alias='NODE')
 | 
			
		||||
    if not query:
 | 
			
		||||
        search = search.sort('-created_at')
 | 
			
		||||
    add_aggs_to_search(search, NODE_AGG_TERMS)
 | 
			
		||||
    search = paginate(search, page)
 | 
			
		||||
    if log.isEnabledFor(logging.DEBUG):
 | 
			
		||||
        log.debug(json.dumps(search.to_dict(), indent=4))
 | 
			
		||||
    return search
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def do_user_search(query: str, terms: dict, page: int) -> dict:
 | 
			
		||||
    """ return user objects represented in elasicsearch result dict"""
 | 
			
		||||
 | 
			
		||||
    search = create_user_search(query, terms, page)
 | 
			
		||||
    return _execute(search)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def _common_user_search(query: str) -> (typing.List[Query], typing.List[Query]):
 | 
			
		||||
    """Construct (filter,should) for regular + admin user search."""
 | 
			
		||||
    if not query:
 | 
			
		||||
        return [], []
 | 
			
		||||
 | 
			
		||||
    should = []
 | 
			
		||||
 | 
			
		||||
    if '@' in query:
 | 
			
		||||
        should.append({'term': {'email_exact': {'value': query, 'boost': 50}}})
 | 
			
		||||
        email_boost = 25
 | 
			
		||||
    else:
 | 
			
		||||
        email_boost = 1
 | 
			
		||||
 | 
			
		||||
    should.extend([
 | 
			
		||||
        Q('match', username=query),
 | 
			
		||||
        Q('match', full_name=query),
 | 
			
		||||
        {'match': {'email': {'query': query, 'boost': email_boost}}},
 | 
			
		||||
        {'term': {'username_exact': {'value': query, 'boost': 50}}},
 | 
			
		||||
    ])
 | 
			
		||||
 | 
			
		||||
    return [], should
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def do_user_search_admin(query: str, terms: dict, page: int) -> dict:
 | 
			
		||||
    """
 | 
			
		||||
    return users seach result dict object
 | 
			
		||||
    search all user fields and provide aggregation information
 | 
			
		||||
    """
 | 
			
		||||
 | 
			
		||||
    search = create_user_admin_search(query, terms, page)
 | 
			
		||||
    return _execute(search)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def _execute(search: Search) -> dict:
 | 
			
		||||
    if log.isEnabledFor(logging.DEBUG):
 | 
			
		||||
        log.debug(json.dumps(search.to_dict(), indent=4))
 | 
			
		||||
    resp = search.execute()
 | 
			
		||||
    if log.isEnabledFor(logging.DEBUG):
 | 
			
		||||
        log.debug(json.dumps(resp.to_dict(), indent=4))
 | 
			
		||||
    return resp.to_dict()
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def _execute_multi(search: typing.List[Search]) -> typing.List[dict]:
 | 
			
		||||
    if log.isEnabledFor(logging.DEBUG):
 | 
			
		||||
        log.debug(json.dumps(search.to_dict(), indent=4))
 | 
			
		||||
    resp = search.execute()
 | 
			
		||||
    if log.isEnabledFor(logging.DEBUG):
 | 
			
		||||
        log.debug(json.dumps(resp.to_dict(), indent=4))
 | 
			
		||||
    return [r.to_dict() for r in resp]
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def create_user_admin_search(query: str, terms: dict, page: int) -> Search:
 | 
			
		||||
    terms = _transform_terms(terms)
 | 
			
		||||
    filters, should = _common_user_search(query)
 | 
			
		||||
    if query:
 | 
			
		||||
        # We most likely got and id field. we should find it.
 | 
			
		||||
        if len(query) == len('563aca02c379cf0005e8e17d'):
 | 
			
		||||
            should.append({'term': {
 | 
			
		||||
                'objectID': {
 | 
			
		||||
                    'value': query,  # the thing we're looking for
 | 
			
		||||
                    'boost': 100,  # how much more it counts for the score
 | 
			
		||||
                }
 | 
			
		||||
            }})
 | 
			
		||||
    search = nested_bool(filters, should, terms, index_alias='USER')
 | 
			
		||||
    add_aggs_to_search(search, USER_AGG_TERMS)
 | 
			
		||||
    search = paginate(search, page)
 | 
			
		||||
    return search
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def create_user_search(query: str, terms: dict, page: int) -> Search:
 | 
			
		||||
    search = create_user_admin_search(query, terms, page)
 | 
			
		||||
    return search.source(include=USER_SOURCE_INCLUDE)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def paginate(search: Search, page_idx: int) -> Search:
 | 
			
		||||
    return search[page_idx * ITEMS_PER_PAGE:(page_idx + 1) * ITEMS_PER_PAGE]
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def _transform_terms(terms: dict) -> dict:
 | 
			
		||||
    """
 | 
			
		||||
    Ugly hack! Elastic uses 1/0 for boolean values in its aggregate response,
 | 
			
		||||
    but expects true/false in queries.
 | 
			
		||||
    """
 | 
			
		||||
    transformed = terms.copy()
 | 
			
		||||
    for t in BOOLEAN_TERMS:
 | 
			
		||||
        orig = transformed.get(t)
 | 
			
		||||
        if orig in ('1', '0'):
 | 
			
		||||
            transformed[t] = bool(int(orig))
 | 
			
		||||
    return transformed
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def setup_app(app):
 | 
			
		||||
    global client
 | 
			
		||||
 | 
			
		||||
    hosts = app.config['ELASTIC_SEARCH_HOSTS']
 | 
			
		||||
    log.getChild('setup_app').info('Creating ElasticSearch client for %s', hosts)
 | 
			
		||||
    client = Elasticsearch(hosts)
 | 
			
		||||
							
								
								
									
										107
									
								
								pillar/api/search/routes.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										107
									
								
								pillar/api/search/routes.py
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,107 @@
 | 
			
		||||
import logging
 | 
			
		||||
 | 
			
		||||
from flask import Blueprint, request
 | 
			
		||||
import elasticsearch.exceptions as elk_ex
 | 
			
		||||
from werkzeug import exceptions as wz_exceptions
 | 
			
		||||
from pillar.api.utils import authorization, jsonify
 | 
			
		||||
 | 
			
		||||
from . import queries
 | 
			
		||||
 | 
			
		||||
log = logging.getLogger(__name__)
 | 
			
		||||
 | 
			
		||||
blueprint_search = Blueprint('elksearch', __name__)
 | 
			
		||||
 | 
			
		||||
TERMS = [
 | 
			
		||||
    'node_type', 'media',
 | 
			
		||||
    'tags', 'is_free', 'projectname',
 | 
			
		||||
    'roles',
 | 
			
		||||
]
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def _term_filters(args) -> dict:
 | 
			
		||||
    """
 | 
			
		||||
    Check if frontent wants to filter stuff
 | 
			
		||||
    on specific fields AKA facets
 | 
			
		||||
 | 
			
		||||
    return mapping with term field name
 | 
			
		||||
    and provided user term value
 | 
			
		||||
    """
 | 
			
		||||
    return {term: args.get(term, '') for term in TERMS}
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def _page_index(page) -> int:
 | 
			
		||||
    """Return the page index from the query string."""
 | 
			
		||||
    try:
 | 
			
		||||
        page_idx = int(page)
 | 
			
		||||
    except TypeError:
 | 
			
		||||
        log.info('invalid page number %r received', request.args.get('page'))
 | 
			
		||||
        raise wz_exceptions.BadRequest()
 | 
			
		||||
    return page_idx
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@blueprint_search.route('/', methods=['GET'])
 | 
			
		||||
def search_nodes():
 | 
			
		||||
    searchword = request.args.get('q', '')
 | 
			
		||||
    project_id = request.args.get('project', '')
 | 
			
		||||
    terms = _term_filters(request.args)
 | 
			
		||||
    page_idx = _page_index(request.args.get('page', 0))
 | 
			
		||||
 | 
			
		||||
    result = queries.do_node_search(searchword, terms, page_idx, project_id)
 | 
			
		||||
    return jsonify(result)
 | 
			
		||||
 | 
			
		||||
@blueprint_search.route('/multisearch', methods=['GET'])
 | 
			
		||||
def multi_search_nodes():
 | 
			
		||||
    import json
 | 
			
		||||
    if len(request.args) != 1:
 | 
			
		||||
        log.info(f'Expected 1 argument, received {len(request.args)}')
 | 
			
		||||
 | 
			
		||||
    json_obj = json.loads([a for a in request.args][0])
 | 
			
		||||
    q = []
 | 
			
		||||
    for row in json_obj:
 | 
			
		||||
        q.append({
 | 
			
		||||
            'query': row.get('q', ''),
 | 
			
		||||
            'project_id': row.get('project', ''),
 | 
			
		||||
            'terms': _term_filters(row),
 | 
			
		||||
            'page': _page_index(row.get('page', 0))
 | 
			
		||||
        })
 | 
			
		||||
 | 
			
		||||
    result = queries.do_multi_node_search(q)
 | 
			
		||||
    return jsonify(result)
 | 
			
		||||
 | 
			
		||||
@blueprint_search.route('/user')
 | 
			
		||||
def search_user():
 | 
			
		||||
    searchword = request.args.get('q', '')
 | 
			
		||||
    terms = _term_filters(request.args)
 | 
			
		||||
    page_idx = _page_index(request.args.get('page', 0))
 | 
			
		||||
    # result is the raw elasticseach output.
 | 
			
		||||
    # we need to filter fields in case of user objects.
 | 
			
		||||
 | 
			
		||||
    try:
 | 
			
		||||
        result = queries.do_user_search(searchword, terms, page_idx)
 | 
			
		||||
    except elk_ex.ElasticsearchException as ex:
 | 
			
		||||
        resp = jsonify({'_message': str(ex)})
 | 
			
		||||
        resp.status_code = 500
 | 
			
		||||
        return resp
 | 
			
		||||
 | 
			
		||||
    return jsonify(result)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@blueprint_search.route('/admin/user')
 | 
			
		||||
@authorization.require_login(require_cap='admin')
 | 
			
		||||
def search_user_admin():
 | 
			
		||||
    """
 | 
			
		||||
    User search over all fields.
 | 
			
		||||
    """
 | 
			
		||||
 | 
			
		||||
    searchword = request.args.get('q', '')
 | 
			
		||||
    terms = _term_filters(request.args)
 | 
			
		||||
    page_idx = _page_index(_page_index(request.args.get('page', 0)))
 | 
			
		||||
 | 
			
		||||
    try:
 | 
			
		||||
        result = queries.do_user_search_admin(searchword, terms, page_idx)
 | 
			
		||||
    except elk_ex.ElasticsearchException as ex:
 | 
			
		||||
        resp = jsonify({'_message': str(ex)})
 | 
			
		||||
        resp.status_code = 500
 | 
			
		||||
        return resp
 | 
			
		||||
 | 
			
		||||
    return jsonify(result)
 | 
			
		||||
@@ -1,24 +1,31 @@
 | 
			
		||||
"""Service accounts."""
 | 
			
		||||
 | 
			
		||||
import logging
 | 
			
		||||
import typing
 | 
			
		||||
 | 
			
		||||
import blinker
 | 
			
		||||
import bson
 | 
			
		||||
 | 
			
		||||
from flask import Blueprint, current_app, request
 | 
			
		||||
from pillar.api import local_auth
 | 
			
		||||
from pillar.api.utils import mongo
 | 
			
		||||
from pillar.api.utils import authorization, authentication, str2id, jsonify
 | 
			
		||||
from werkzeug import exceptions as wz_exceptions
 | 
			
		||||
 | 
			
		||||
from pillar.api import local_auth
 | 
			
		||||
from pillar.api.utils import authorization, authentication
 | 
			
		||||
 | 
			
		||||
blueprint = Blueprint('service', __name__)
 | 
			
		||||
log = logging.getLogger(__name__)
 | 
			
		||||
signal_user_changed_role = blinker.NamedSignal('badger:user_changed_role')
 | 
			
		||||
 | 
			
		||||
ROLES_WITH_GROUPS = {u'admin', u'demo', u'subscriber'}
 | 
			
		||||
ROLES_WITH_GROUPS = {'admin', 'demo', 'subscriber'}
 | 
			
		||||
 | 
			
		||||
# Map of role name to group ID, for the above groups.
 | 
			
		||||
role_to_group_id = {}
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class ServiceAccountCreationError(Exception):
 | 
			
		||||
    """Raised when a service account cannot be created."""
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@blueprint.before_app_first_request
 | 
			
		||||
def fetch_role_to_group_id_map():
 | 
			
		||||
    """Fills the _role_to_group_id mapping upon application startup."""
 | 
			
		||||
@@ -38,7 +45,7 @@ def fetch_role_to_group_id_map():
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@blueprint.route('/badger', methods=['POST'])
 | 
			
		||||
@authorization.require_login(require_roles={u'service', u'badger'}, require_all=True)
 | 
			
		||||
@authorization.require_login(require_roles={'service', 'badger'}, require_all=True)
 | 
			
		||||
def badger():
 | 
			
		||||
    if request.mimetype != 'application/json':
 | 
			
		||||
        log.debug('Received %s instead of application/json', request.mimetype)
 | 
			
		||||
@@ -70,42 +77,76 @@ def badger():
 | 
			
		||||
                    action, user_email, role, action, role)
 | 
			
		||||
        return 'Role not allowed', 403
 | 
			
		||||
 | 
			
		||||
    return do_badger(action, user_email, role)
 | 
			
		||||
    return do_badger(action, role=role, user_email=user_email)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def do_badger(action, user_email, role):
 | 
			
		||||
    """Performs a badger action, returning a HTTP response."""
 | 
			
		||||
def do_badger(action: str, *,
 | 
			
		||||
              role: str=None, roles: typing.Iterable[str]=None,
 | 
			
		||||
              user_email: str = '', user_id: bson.ObjectId = None):
 | 
			
		||||
    """Performs a badger action, returning a HTTP response.
 | 
			
		||||
 | 
			
		||||
    Either role or roles must be given.
 | 
			
		||||
    Either user_email or user_id must be given.
 | 
			
		||||
    """
 | 
			
		||||
 | 
			
		||||
    if action not in {'grant', 'revoke'}:
 | 
			
		||||
        log.error('do_badger(%r, %r, %r, %r): action %r not supported.',
 | 
			
		||||
                  action, role, user_email, user_id, action)
 | 
			
		||||
        raise wz_exceptions.BadRequest('Action %r not supported' % action)
 | 
			
		||||
 | 
			
		||||
    if not user_email:
 | 
			
		||||
    if not user_email and user_id is None:
 | 
			
		||||
        log.error('do_badger(%r, %r, %r, %r): neither email nor user_id given.',
 | 
			
		||||
                  action, role, user_email, user_id)
 | 
			
		||||
        raise wz_exceptions.BadRequest('User email not given')
 | 
			
		||||
 | 
			
		||||
    if not role:
 | 
			
		||||
        raise wz_exceptions.BadRequest('Role not given')
 | 
			
		||||
    if bool(role) == bool(roles):
 | 
			
		||||
        log.error('do_badger(%r, role=%r, roles=%r, %r, %r): '
 | 
			
		||||
                  'either "role" or "roles" must be given.',
 | 
			
		||||
                  action, role, roles, user_email, user_id)
 | 
			
		||||
        raise wz_exceptions.BadRequest('Invalid role(s) given')
 | 
			
		||||
 | 
			
		||||
    # If only a single role was given, handle it as a set of one role.
 | 
			
		||||
    if not roles:
 | 
			
		||||
        roles = {role}
 | 
			
		||||
    del role
 | 
			
		||||
 | 
			
		||||
    users_coll = current_app.data.driver.db['users']
 | 
			
		||||
 | 
			
		||||
    # Fetch the user
 | 
			
		||||
    db_user = users_coll.find_one({'email': user_email}, projection={'roles': 1, 'groups': 1})
 | 
			
		||||
    if user_email:
 | 
			
		||||
        query = {'email': user_email}
 | 
			
		||||
    else:
 | 
			
		||||
        query = user_id
 | 
			
		||||
    db_user = users_coll.find_one(query, projection={'roles': 1, 'groups': 1})
 | 
			
		||||
    if db_user is None:
 | 
			
		||||
        log.warning('badger(%s, %s, %s): user not found', action, user_email, role)
 | 
			
		||||
        log.warning('badger(%s, roles=%s, user_email=%s, user_id=%s): user not found',
 | 
			
		||||
                    action, roles, user_email, user_id)
 | 
			
		||||
        return 'User not found', 404
 | 
			
		||||
 | 
			
		||||
    # Apply the action
 | 
			
		||||
    roles = set(db_user.get('roles') or [])
 | 
			
		||||
    user_roles = set(db_user.get('roles') or [])
 | 
			
		||||
    if action == 'grant':
 | 
			
		||||
        roles.add(role)
 | 
			
		||||
        user_roles |= roles
 | 
			
		||||
    else:
 | 
			
		||||
        roles.discard(role)
 | 
			
		||||
        user_roles -= roles
 | 
			
		||||
 | 
			
		||||
    groups = manage_user_group_membership(db_user, role, action)
 | 
			
		||||
    groups = None
 | 
			
		||||
    for role in roles:
 | 
			
		||||
        groups = manage_user_group_membership(db_user, role, action)
 | 
			
		||||
 | 
			
		||||
    updates = {'roles': list(roles)}
 | 
			
		||||
        if groups is None:
 | 
			
		||||
            # No change for this role
 | 
			
		||||
            continue
 | 
			
		||||
 | 
			
		||||
        # Also update db_user for the next iteration.
 | 
			
		||||
        db_user['groups'] = groups
 | 
			
		||||
 | 
			
		||||
    updates = {'roles': list(user_roles)}
 | 
			
		||||
    if groups is not None:
 | 
			
		||||
        updates['groups'] = list(groups)
 | 
			
		||||
 | 
			
		||||
    log.debug('badger(%s, %s, user_email=%s, user_id=%s): applying updates %r',
 | 
			
		||||
              action, role, user_email, user_id, updates)
 | 
			
		||||
    users_coll.update_one({'_id': db_user['_id']},
 | 
			
		||||
                          {'$set': updates})
 | 
			
		||||
 | 
			
		||||
@@ -116,19 +157,6 @@ def do_badger(action, user_email, role):
 | 
			
		||||
    return '', 204
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@blueprint.route('/urler/<project_id>', methods=['GET'])
 | 
			
		||||
@authorization.require_login(require_roles={u'service', u'urler'}, require_all=True)
 | 
			
		||||
def urler(project_id):
 | 
			
		||||
    """Returns the URL of any project."""
 | 
			
		||||
 | 
			
		||||
    project_id = str2id(project_id)
 | 
			
		||||
    project = mongo.find_one_or_404('projects', project_id,
 | 
			
		||||
                                    projection={'url': 1})
 | 
			
		||||
    return jsonify({
 | 
			
		||||
        '_id': project_id,
 | 
			
		||||
        'url': project['url']})
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def manage_user_group_membership(db_user, role, action):
 | 
			
		||||
    """Some roles have associated groups; this function maintains group & role membership.
 | 
			
		||||
 | 
			
		||||
@@ -162,69 +190,52 @@ def manage_user_group_membership(db_user, role, action):
 | 
			
		||||
    return user_groups
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def create_service_account(email, roles, service, update_existing=None):
 | 
			
		||||
def create_service_account(email: str, roles: typing.Iterable, service: dict,
 | 
			
		||||
                           *, full_name: str=None):
 | 
			
		||||
    """Creates a service account with the given roles + the role 'service'.
 | 
			
		||||
 | 
			
		||||
    :param email: email address associated with the account
 | 
			
		||||
    :type email: str
 | 
			
		||||
    :param email: optional email address associated with the account.
 | 
			
		||||
    :param roles: iterable of role names
 | 
			
		||||
    :param service: dict of the 'service' key in the user.
 | 
			
		||||
    :type service: dict
 | 
			
		||||
    :param update_existing: callback function that receives an existing user to update
 | 
			
		||||
        for this service, in case the email address is already in use by someone.
 | 
			
		||||
        If not given or None, updating existing users is disallowed, and a ValueError
 | 
			
		||||
        exception is thrown instead.
 | 
			
		||||
    :param full_name: Full name of the service account. If None, will be set to
 | 
			
		||||
        something reasonable.
 | 
			
		||||
 | 
			
		||||
    :return: tuple (user doc, token doc)
 | 
			
		||||
    """
 | 
			
		||||
 | 
			
		||||
    from pillar.api.utils import remove_private_keys
 | 
			
		||||
    # Create a user with the correct roles.
 | 
			
		||||
    roles = sorted(set(roles).union({'service'}))
 | 
			
		||||
    user_id = bson.ObjectId()
 | 
			
		||||
 | 
			
		||||
    # Find existing
 | 
			
		||||
    users_coll = current_app.db()['users']
 | 
			
		||||
    user = users_coll.find_one({'email': email})
 | 
			
		||||
    if user:
 | 
			
		||||
        # Check whether updating is allowed at all.
 | 
			
		||||
        if update_existing is None:
 | 
			
		||||
            raise ValueError('User %s already exists' % email)
 | 
			
		||||
    log.info('Creating service account %s with roles %s', user_id, roles)
 | 
			
		||||
    user = {'_id': user_id,
 | 
			
		||||
            'username': f'SRV-{user_id}',
 | 
			
		||||
            'groups': [],
 | 
			
		||||
            'roles': roles,
 | 
			
		||||
            'settings': {'email_communications': 0},
 | 
			
		||||
            'auth': [],
 | 
			
		||||
            'full_name': full_name or f'SRV-{user_id}',
 | 
			
		||||
            'service': service}
 | 
			
		||||
    if email:
 | 
			
		||||
        user['email'] = email
 | 
			
		||||
    result, _, _, status = current_app.post_internal('users', user)
 | 
			
		||||
 | 
			
		||||
        # Compute the new roles, and assign.
 | 
			
		||||
        roles = list(set(roles).union({u'service'}).union(user['roles']))
 | 
			
		||||
        user['roles'] = list(roles)
 | 
			
		||||
 | 
			
		||||
        # Let the caller perform any required updates.
 | 
			
		||||
        log.info('Updating existing user %s to become service account for %s',
 | 
			
		||||
                 email, roles)
 | 
			
		||||
        update_existing(user['service'])
 | 
			
		||||
 | 
			
		||||
        # Try to store the updated user.
 | 
			
		||||
        result, _, _, status = current_app.put_internal('users',
 | 
			
		||||
                                                        remove_private_keys(user),
 | 
			
		||||
                                                        _id=user['_id'])
 | 
			
		||||
        expected_status = 200
 | 
			
		||||
    else:
 | 
			
		||||
        # Create a user with the correct roles.
 | 
			
		||||
        roles = list(set(roles).union({u'service'}))
 | 
			
		||||
        user = {'username': email,
 | 
			
		||||
                'groups': [],
 | 
			
		||||
                'roles': roles,
 | 
			
		||||
                'settings': {'email_communications': 0},
 | 
			
		||||
                'auth': [],
 | 
			
		||||
                'full_name': email,
 | 
			
		||||
                'email': email,
 | 
			
		||||
                'service': service}
 | 
			
		||||
        result, _, _, status = current_app.post_internal('users', user)
 | 
			
		||||
        expected_status = 201
 | 
			
		||||
 | 
			
		||||
    if status != expected_status:
 | 
			
		||||
        raise SystemExit('Error creating user {}: {}'.format(email, result))
 | 
			
		||||
    if status != 201:
 | 
			
		||||
        raise ServiceAccountCreationError('Error creating user {}: {}'.format(user_id, result))
 | 
			
		||||
    user.update(result)
 | 
			
		||||
 | 
			
		||||
    # Create an authentication token that won't expire for a long time.
 | 
			
		||||
    token = local_auth.generate_and_store_token(user['_id'], days=36500, prefix='SRV')
 | 
			
		||||
    token = generate_auth_token(user['_id'])
 | 
			
		||||
 | 
			
		||||
    return user, token
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def generate_auth_token(service_account_id) -> dict:
 | 
			
		||||
    """Generates an authentication token for a service account."""
 | 
			
		||||
 | 
			
		||||
    token_info = local_auth.generate_and_store_token(service_account_id, days=36500, prefix=b'SRV')
 | 
			
		||||
    return token_info
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def setup_app(app, api_prefix):
 | 
			
		||||
    app.register_api_blueprint(blueprint, url_prefix=api_prefix)
 | 
			
		||||
 
 | 
			
		||||
							
								
								
									
										373
									
								
								pillar/api/timeline.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										373
									
								
								pillar/api/timeline.py
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,373 @@
 | 
			
		||||
import itertools
 | 
			
		||||
import typing
 | 
			
		||||
from datetime import datetime
 | 
			
		||||
from operator import itemgetter
 | 
			
		||||
 | 
			
		||||
import attr
 | 
			
		||||
import bson
 | 
			
		||||
import pymongo
 | 
			
		||||
from flask import Blueprint, current_app, request, url_for
 | 
			
		||||
 | 
			
		||||
import pillar
 | 
			
		||||
from pillar import shortcodes
 | 
			
		||||
from pillar.api.utils import jsonify, pretty_duration, str2id
 | 
			
		||||
 | 
			
		||||
blueprint = Blueprint('timeline', __name__)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@attr.s(auto_attribs=True)
 | 
			
		||||
class TimelineDO:
 | 
			
		||||
    groups: typing.List['GroupDO'] = []
 | 
			
		||||
    continue_from: typing.Optional[float] = None
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@attr.s(auto_attribs=True)
 | 
			
		||||
class GroupDO:
 | 
			
		||||
    label: typing.Optional[str] = None
 | 
			
		||||
    url: typing.Optional[str] = None
 | 
			
		||||
    items: typing.Dict = {}
 | 
			
		||||
    groups: typing.Iterable['GroupDO'] = []
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class SearchHelper:
 | 
			
		||||
    def __init__(self, nbr_of_weeks: int, continue_from: typing.Optional[datetime],
 | 
			
		||||
                 project_ids: typing.List[bson.ObjectId], sort_direction: str):
 | 
			
		||||
        self._nbr_of_weeks = nbr_of_weeks
 | 
			
		||||
        self._continue_from = continue_from
 | 
			
		||||
        self._project_ids = project_ids
 | 
			
		||||
        self.sort_direction = sort_direction
 | 
			
		||||
 | 
			
		||||
    def _match(self, continue_from: typing.Optional[datetime]) -> dict:
 | 
			
		||||
        created = {}
 | 
			
		||||
        if continue_from:
 | 
			
		||||
            if self.sort_direction == 'desc':
 | 
			
		||||
                created = {'_created': {'$lt': continue_from}}
 | 
			
		||||
            else:
 | 
			
		||||
                created = {'_created': {'$gt': continue_from}}
 | 
			
		||||
        return {'_deleted': {'$ne': True},
 | 
			
		||||
                'node_type': {'$in': ['asset', 'post']},
 | 
			
		||||
                'project': {'$in': self._project_ids},
 | 
			
		||||
                **created,
 | 
			
		||||
                }
 | 
			
		||||
 | 
			
		||||
    def raw_weeks_from_mongo(self) -> pymongo.collection.Collection:
 | 
			
		||||
        direction = pymongo.DESCENDING if self.sort_direction == 'desc' else pymongo.ASCENDING
 | 
			
		||||
        nodes_coll = current_app.db('nodes')
 | 
			
		||||
        return nodes_coll.aggregate([
 | 
			
		||||
            {'$match': self._match(self._continue_from)},
 | 
			
		||||
            {'$lookup': {"from": "projects",
 | 
			
		||||
                         "localField": "project",
 | 
			
		||||
                         "foreignField": "_id",
 | 
			
		||||
                         "as": "project"}},
 | 
			
		||||
            {'$unwind': {'path': "$project"}},
 | 
			
		||||
            {'$lookup': {"from": "users",
 | 
			
		||||
                         "localField": "user",
 | 
			
		||||
                         "foreignField": "_id",
 | 
			
		||||
                         "as": "user"}},
 | 
			
		||||
            {'$unwind': {'path': "$user"}},
 | 
			
		||||
            {'$project': {
 | 
			
		||||
                '_created': 1,
 | 
			
		||||
                'project._id': 1,
 | 
			
		||||
                'project.url': 1,
 | 
			
		||||
                'project.name': 1,
 | 
			
		||||
                'user._id': 1,
 | 
			
		||||
                'user.full_name': 1,
 | 
			
		||||
                'name': 1,
 | 
			
		||||
                'node_type': 1,
 | 
			
		||||
                'picture': 1,
 | 
			
		||||
                'properties': 1,
 | 
			
		||||
                'permissions': 1,
 | 
			
		||||
            }},
 | 
			
		||||
            {'$group': {
 | 
			
		||||
                '_id': {'year': {'$isoWeekYear': '$_created'},
 | 
			
		||||
                        'week': {'$isoWeek': '$_created'}},
 | 
			
		||||
                'nodes': {'$push': '$$ROOT'}
 | 
			
		||||
            }},
 | 
			
		||||
            {'$sort': {'_id.year': direction,
 | 
			
		||||
                       '_id.week': direction}},
 | 
			
		||||
            {'$limit': self._nbr_of_weeks}
 | 
			
		||||
        ])
 | 
			
		||||
 | 
			
		||||
    def has_more(self, continue_from: datetime) -> bool:
 | 
			
		||||
        nodes_coll = current_app.db('nodes')
 | 
			
		||||
        result = nodes_coll.count(self._match(continue_from))
 | 
			
		||||
        return bool(result)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class Grouper:
 | 
			
		||||
    @classmethod
 | 
			
		||||
    def label(cls, node):
 | 
			
		||||
        return None
 | 
			
		||||
 | 
			
		||||
    @classmethod
 | 
			
		||||
    def url(cls, node):
 | 
			
		||||
        return None
 | 
			
		||||
 | 
			
		||||
    @classmethod
 | 
			
		||||
    def group_key(cls) -> typing.Callable[[dict], typing.Any]:
 | 
			
		||||
        raise NotImplemented()
 | 
			
		||||
 | 
			
		||||
    @classmethod
 | 
			
		||||
    def sort_key(cls) -> typing.Callable[[dict], typing.Any]:
 | 
			
		||||
        raise NotImplemented()
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class ProjectGrouper(Grouper):
 | 
			
		||||
    @classmethod
 | 
			
		||||
    def label(cls, project: dict):
 | 
			
		||||
        return project['name']
 | 
			
		||||
 | 
			
		||||
    @classmethod
 | 
			
		||||
    def url(cls, project: dict):
 | 
			
		||||
        return url_for('projects.view', project_url=project['url'])
 | 
			
		||||
 | 
			
		||||
    @classmethod
 | 
			
		||||
    def group_key(cls) -> typing.Callable[[dict], typing.Any]:
 | 
			
		||||
        return itemgetter('project')
 | 
			
		||||
 | 
			
		||||
    @classmethod
 | 
			
		||||
    def sort_key(cls) -> typing.Callable[[dict], typing.Any]:
 | 
			
		||||
        return lambda node: node['project']['_id']
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class UserGrouper(Grouper):
 | 
			
		||||
    @classmethod
 | 
			
		||||
    def label(cls, user):
 | 
			
		||||
        return user['full_name']
 | 
			
		||||
 | 
			
		||||
    @classmethod
 | 
			
		||||
    def group_key(cls) -> typing.Callable[[dict], typing.Any]:
 | 
			
		||||
        return itemgetter('user')
 | 
			
		||||
 | 
			
		||||
    @classmethod
 | 
			
		||||
    def sort_key(cls) -> typing.Callable[[dict], typing.Any]:
 | 
			
		||||
        return lambda node: node['user']['_id']
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class TimeLineBuilder:
 | 
			
		||||
    def __init__(self, search_helper: SearchHelper, grouper: typing.Type[Grouper]):
 | 
			
		||||
        self.search_helper = search_helper
 | 
			
		||||
        self.grouper = grouper
 | 
			
		||||
        self.continue_from = None
 | 
			
		||||
 | 
			
		||||
    def build(self) -> TimelineDO:
 | 
			
		||||
        raw_weeks = self.search_helper.raw_weeks_from_mongo()
 | 
			
		||||
        clean_weeks = (self.create_week_group(week) for week in raw_weeks)
 | 
			
		||||
 | 
			
		||||
        return TimelineDO(
 | 
			
		||||
            groups=list(clean_weeks),
 | 
			
		||||
            continue_from=self.continue_from.timestamp() if self.search_helper.has_more(self.continue_from) else None
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
    def create_week_group(self, week: dict) -> GroupDO:
 | 
			
		||||
        nodes = week['nodes']
 | 
			
		||||
        nodes.sort(key=itemgetter('_created'), reverse=True)
 | 
			
		||||
        self.update_continue_from(nodes)
 | 
			
		||||
        groups = self.create_groups(nodes)
 | 
			
		||||
 | 
			
		||||
        return GroupDO(
 | 
			
		||||
            label=f'Week {week["_id"]["week"]}, {week["_id"]["year"]}',
 | 
			
		||||
            groups=groups
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
    def create_groups(self, nodes: typing.List[dict]) -> typing.List[GroupDO]:
 | 
			
		||||
        self.sort_nodes(nodes)  # groupby assumes that the list is sorted
 | 
			
		||||
        nodes_grouped = itertools.groupby(nodes, self.grouper.group_key())
 | 
			
		||||
        groups = (self.clean_group(grouped_by, group) for grouped_by, group in nodes_grouped)
 | 
			
		||||
        groups_sorted = sorted(groups, key=self.group_row_sorter, reverse=True)
 | 
			
		||||
        return groups_sorted
 | 
			
		||||
 | 
			
		||||
    def sort_nodes(self, nodes: typing.List[dict]):
 | 
			
		||||
        nodes.sort(key=itemgetter('node_type'))
 | 
			
		||||
        nodes.sort(key=self.grouper.sort_key())
 | 
			
		||||
 | 
			
		||||
    def update_continue_from(self, sorted_nodes: typing.List[dict]):
 | 
			
		||||
        if self.search_helper.sort_direction == 'desc':
 | 
			
		||||
            first_created = sorted_nodes[-1]['_created']
 | 
			
		||||
            candidate = self.continue_from or first_created
 | 
			
		||||
            self.continue_from = min(candidate, first_created)
 | 
			
		||||
        else:
 | 
			
		||||
            last_created = sorted_nodes[0]['_created']
 | 
			
		||||
            candidate = self.continue_from or last_created
 | 
			
		||||
            self.continue_from = max(candidate, last_created)
 | 
			
		||||
 | 
			
		||||
    def clean_group(self, grouped_by: typing.Any, group: typing.Iterable[dict]) -> GroupDO:
 | 
			
		||||
        items = self.create_items(group)
 | 
			
		||||
        return GroupDO(
 | 
			
		||||
            label=self.grouper.label(grouped_by),
 | 
			
		||||
            url=self.grouper.url(grouped_by),
 | 
			
		||||
            items=items
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
    def create_items(self, group) -> typing.List[dict]:
 | 
			
		||||
        by_node_type = itertools.groupby(group, key=itemgetter('node_type'))
 | 
			
		||||
        items = {}
 | 
			
		||||
        for node_type, nodes in by_node_type:
 | 
			
		||||
            items[node_type] = [self.node_prettyfy(n) for n in nodes]
 | 
			
		||||
        return items
 | 
			
		||||
 | 
			
		||||
    @classmethod
 | 
			
		||||
    def node_prettyfy(cls, node: dict)-> dict:
 | 
			
		||||
        duration_seconds = node['properties'].get('duration_seconds')
 | 
			
		||||
        if duration_seconds is not None:
 | 
			
		||||
            node['properties']['duration'] = pretty_duration(duration_seconds)
 | 
			
		||||
        if node['node_type'] == 'post':
 | 
			
		||||
            html = _get_markdowned_html(node['properties'], 'content')
 | 
			
		||||
            html = shortcodes.render_commented(html, context=node['properties'])
 | 
			
		||||
            node['properties']['pretty_content'] = html
 | 
			
		||||
        return node
 | 
			
		||||
 | 
			
		||||
    @classmethod
 | 
			
		||||
    def group_row_sorter(cls, row: GroupDO) -> typing.Tuple[datetime, datetime]:
 | 
			
		||||
        '''
 | 
			
		||||
        If a group contains posts are more interesting and therefor we put them higher in up
 | 
			
		||||
        :param row:
 | 
			
		||||
        :return: tuple with newest post date and newest asset date
 | 
			
		||||
        '''
 | 
			
		||||
        def newest_created(nodes: typing.List[dict]) -> datetime:
 | 
			
		||||
            if nodes:
 | 
			
		||||
                return nodes[0]['_created']
 | 
			
		||||
            return datetime.fromtimestamp(0, tz=bson.tz_util.utc)
 | 
			
		||||
        newest_post_date = newest_created(row.items.get('post'))
 | 
			
		||||
        newest_asset_date = newest_created(row.items.get('asset'))
 | 
			
		||||
        return newest_post_date, newest_asset_date
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def _public_project_ids() -> typing.List[bson.ObjectId]:
 | 
			
		||||
    """Returns a list of ObjectIDs of public projects.
 | 
			
		||||
 | 
			
		||||
    Memoized in setup_app().
 | 
			
		||||
    """
 | 
			
		||||
 | 
			
		||||
    proj_coll = current_app.db('projects')
 | 
			
		||||
    result = proj_coll.find({'is_private': False}, {'_id': 1})
 | 
			
		||||
    return [p['_id'] for p in result]
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def _get_markdowned_html(document: dict, field_name: str) -> str:
 | 
			
		||||
    cache_field_name = pillar.markdown.cache_field_name(field_name)
 | 
			
		||||
    html = document.get(cache_field_name)
 | 
			
		||||
    if html is None:
 | 
			
		||||
        markdown_src = document.get(field_name) or ''
 | 
			
		||||
        html = pillar.markdown.markdown(markdown_src)
 | 
			
		||||
    return html
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@blueprint.route('/', methods=['GET'])
 | 
			
		||||
def global_timeline():
 | 
			
		||||
    continue_from_str = request.args.get('from')
 | 
			
		||||
    continue_from = parse_continue_from(continue_from_str)
 | 
			
		||||
    nbr_of_weeks_str = request.args.get('weeksToLoad')
 | 
			
		||||
    nbr_of_weeks = parse_nbr_of_weeks(nbr_of_weeks_str)
 | 
			
		||||
    sort_direction = request.args.get('dir', 'desc')
 | 
			
		||||
    return _global_timeline(continue_from, nbr_of_weeks, sort_direction)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@blueprint.route('/p/<string(length=24):pid_path>', methods=['GET'])
 | 
			
		||||
def project_timeline(pid_path: str):
 | 
			
		||||
    continue_from_str = request.args.get('from')
 | 
			
		||||
    continue_from = parse_continue_from(continue_from_str)
 | 
			
		||||
    nbr_of_weeks_str = request.args.get('weeksToLoad')
 | 
			
		||||
    nbr_of_weeks = parse_nbr_of_weeks(nbr_of_weeks_str)
 | 
			
		||||
    sort_direction = request.args.get('dir', 'desc')
 | 
			
		||||
    pid = str2id(pid_path)
 | 
			
		||||
    return _project_timeline(continue_from, nbr_of_weeks, sort_direction, pid)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def parse_continue_from(from_arg) -> typing.Optional[datetime]:
 | 
			
		||||
    try:
 | 
			
		||||
        from_float = float(from_arg)
 | 
			
		||||
    except (TypeError, ValueError):
 | 
			
		||||
        return None
 | 
			
		||||
    return datetime.fromtimestamp(from_float, tz=bson.tz_util.utc)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def parse_nbr_of_weeks(weeks_to_load: str) -> int:
 | 
			
		||||
    try:
 | 
			
		||||
        return int(weeks_to_load)
 | 
			
		||||
    except (TypeError, ValueError):
 | 
			
		||||
        return 3
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def _global_timeline(continue_from: typing.Optional[datetime], nbr_of_weeks: int, sort_direction: str):
 | 
			
		||||
    """Returns an aggregated view of what has happened on the site
 | 
			
		||||
    Memoized in setup_app().
 | 
			
		||||
 | 
			
		||||
    :param continue_from: Python utc timestamp where to begin aggregation
 | 
			
		||||
 | 
			
		||||
    :param nbr_of_weeks: Number of weeks to return
 | 
			
		||||
 | 
			
		||||
    Example output:
 | 
			
		||||
    {
 | 
			
		||||
    groups: [{
 | 
			
		||||
        label: 'Week 32',
 | 
			
		||||
        groups: [{
 | 
			
		||||
            label: 'Spring',
 | 
			
		||||
            url: '/p/spring',
 | 
			
		||||
            items:{
 | 
			
		||||
                post: [blogPostDoc, blogPostDoc],
 | 
			
		||||
                asset: [assetDoc, assetDoc]
 | 
			
		||||
            },
 | 
			
		||||
            groups: ...
 | 
			
		||||
            }]
 | 
			
		||||
        }],
 | 
			
		||||
        continue_from: 123456.2 // python timestamp
 | 
			
		||||
    }
 | 
			
		||||
    """
 | 
			
		||||
    builder = TimeLineBuilder(
 | 
			
		||||
        SearchHelper(nbr_of_weeks, continue_from, _public_project_ids(), sort_direction),
 | 
			
		||||
        ProjectGrouper
 | 
			
		||||
    )
 | 
			
		||||
    return jsonify_timeline(builder.build())
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def jsonify_timeline(timeline: TimelineDO):
 | 
			
		||||
    return jsonify(
 | 
			
		||||
        attr.asdict(timeline,
 | 
			
		||||
                    recurse=True,
 | 
			
		||||
                    filter=lambda att, value: value is not None)
 | 
			
		||||
    )
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def _project_timeline(continue_from: typing.Optional[datetime], nbr_of_weeks: int, sort_direction, pid: bson.ObjectId):
 | 
			
		||||
    """Returns an aggregated view of what has happened on the site
 | 
			
		||||
    Memoized in setup_app().
 | 
			
		||||
 | 
			
		||||
    :param continue_from: Python utc timestamp where to begin aggregation
 | 
			
		||||
 | 
			
		||||
    :param nbr_of_weeks: Number of weeks to return
 | 
			
		||||
 | 
			
		||||
    Example output:
 | 
			
		||||
    {
 | 
			
		||||
    groups: [{
 | 
			
		||||
        label: 'Week 32',
 | 
			
		||||
        groups: [{
 | 
			
		||||
            label: 'Tobias Johansson',
 | 
			
		||||
            items:{
 | 
			
		||||
                post: [blogPostDoc, blogPostDoc],
 | 
			
		||||
                asset: [assetDoc, assetDoc]
 | 
			
		||||
            },
 | 
			
		||||
            groups: ...
 | 
			
		||||
            }]
 | 
			
		||||
        }],
 | 
			
		||||
        continue_from: 123456.2 // python timestamp
 | 
			
		||||
    }
 | 
			
		||||
    """
 | 
			
		||||
    builder = TimeLineBuilder(
 | 
			
		||||
        SearchHelper(nbr_of_weeks, continue_from, [pid], sort_direction),
 | 
			
		||||
        UserGrouper
 | 
			
		||||
    )
 | 
			
		||||
    return jsonify_timeline(builder.build())
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def setup_app(app, url_prefix):
 | 
			
		||||
    global _public_project_ids
 | 
			
		||||
    global _global_timeline
 | 
			
		||||
    global _project_timeline
 | 
			
		||||
 | 
			
		||||
    app.register_api_blueprint(blueprint, url_prefix=url_prefix)
 | 
			
		||||
    cached = app.cache.cached(timeout=3600)
 | 
			
		||||
    _public_project_ids = cached(_public_project_ids)
 | 
			
		||||
    memoize = app.cache.memoize(timeout=60)
 | 
			
		||||
    _global_timeline = memoize(_global_timeline)
 | 
			
		||||
    _project_timeline = memoize(_project_timeline)
 | 
			
		||||
@@ -1,15 +1,79 @@
 | 
			
		||||
import logging
 | 
			
		||||
 | 
			
		||||
import bson
 | 
			
		||||
from flask import current_app
 | 
			
		||||
 | 
			
		||||
from . import hooks
 | 
			
		||||
from .routes import blueprint_api
 | 
			
		||||
 | 
			
		||||
log = logging.getLogger(__name__)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def remove_user_from_group(user_id: bson.ObjectId, group_id: bson.ObjectId):
 | 
			
		||||
    """Removes the user from the given group.
 | 
			
		||||
 | 
			
		||||
    Directly uses MongoDB, so that it doesn't require any special permissions.
 | 
			
		||||
    """
 | 
			
		||||
 | 
			
		||||
    log.info('Removing user %s from group %s', user_id, group_id)
 | 
			
		||||
    user_group_action(user_id, group_id, '$pull')
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def add_user_to_group(user_id: bson.ObjectId, group_id: bson.ObjectId):
 | 
			
		||||
    """Makes the user member of the given group.
 | 
			
		||||
 | 
			
		||||
    Directly uses MongoDB, so that it doesn't require any special permissions.
 | 
			
		||||
    """
 | 
			
		||||
 | 
			
		||||
    log.info('Adding user %s to group %s', user_id, group_id)
 | 
			
		||||
    user_group_action(user_id, group_id, '$addToSet')
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def user_group_action(user_id: bson.ObjectId, group_id: bson.ObjectId, action: str):
 | 
			
		||||
    """Performs a group action (add/remove).
 | 
			
		||||
 | 
			
		||||
    :param user_id: the user's ObjectID.
 | 
			
		||||
    :param group_id: the group's ObjectID.
 | 
			
		||||
    :param action: either '$pull' to remove from a group, or '$addToSet' to add to a group.
 | 
			
		||||
    """
 | 
			
		||||
 | 
			
		||||
    from pymongo.results import UpdateResult
 | 
			
		||||
 | 
			
		||||
    assert isinstance(user_id, bson.ObjectId)
 | 
			
		||||
    assert isinstance(group_id, bson.ObjectId)
 | 
			
		||||
    assert action in {'$pull', '$addToSet'}
 | 
			
		||||
 | 
			
		||||
    users_coll = current_app.db('users')
 | 
			
		||||
    result: UpdateResult = users_coll.update_one(
 | 
			
		||||
        {'_id': user_id},
 | 
			
		||||
        {action: {'groups': group_id}},
 | 
			
		||||
    )
 | 
			
		||||
 | 
			
		||||
    if result.matched_count == 0:
 | 
			
		||||
        raise ValueError(f'Unable to {action} user {user_id} membership of group {group_id}; '
 | 
			
		||||
                         f'user not found.')
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def _update_search_user_changed_role(sender, user: dict):
 | 
			
		||||
    log.debug('Sending updated user %s to Algolia due to role change', user['_id'])
 | 
			
		||||
    hooks.push_updated_user_to_search(user, original=None)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def setup_app(app, api_prefix):
 | 
			
		||||
    from pillar.api import service
 | 
			
		||||
 | 
			
		||||
    app.on_pre_GET_users += hooks.check_user_access
 | 
			
		||||
    app.on_post_GET_users += hooks.post_GET_user
 | 
			
		||||
    app.on_pre_PUT_users += hooks.check_put_access
 | 
			
		||||
    app.on_pre_PUT_users += hooks.before_replacing_user
 | 
			
		||||
    app.on_replaced_users += hooks.push_updated_user_to_algolia
 | 
			
		||||
    app.on_replaced_users += hooks.push_updated_user_to_search
 | 
			
		||||
    app.on_replaced_users += hooks.send_blinker_signal_roles_changed
 | 
			
		||||
    app.on_fetched_item_users += hooks.after_fetching_user
 | 
			
		||||
    app.on_fetched_resource_users += hooks.after_fetching_user_resource
 | 
			
		||||
 | 
			
		||||
    app.on_insert_users += hooks.before_inserting_users
 | 
			
		||||
    app.on_inserted_users += hooks.after_inserting_users
 | 
			
		||||
 | 
			
		||||
    app.register_api_blueprint(blueprint_api, url_prefix=api_prefix)
 | 
			
		||||
 | 
			
		||||
    service.signal_user_changed_role.connect(_update_search_user_changed_role)
 | 
			
		||||
 
 | 
			
		||||
@@ -1,88 +1,129 @@
 | 
			
		||||
import copy
 | 
			
		||||
import json
 | 
			
		||||
 | 
			
		||||
import bson
 | 
			
		||||
from eve.utils import parse_request
 | 
			
		||||
from flask import current_app, g
 | 
			
		||||
from werkzeug import exceptions as wz_exceptions
 | 
			
		||||
 | 
			
		||||
from pillar import current_app
 | 
			
		||||
from pillar.api.users.routes import log
 | 
			
		||||
from pillar.api.utils.authorization import user_has_role
 | 
			
		||||
from werkzeug.exceptions import Forbidden
 | 
			
		||||
import pillar.auth
 | 
			
		||||
 | 
			
		||||
USER_EDITABLE_FIELDS = {'full_name', 'username', 'email', 'settings'}
 | 
			
		||||
 | 
			
		||||
# These fields nobody is allowed to touch directly, not even admins.
 | 
			
		||||
USER_ALWAYS_RESTORE_FIELDS = {'auth'}
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def before_replacing_user(request, lookup):
 | 
			
		||||
    """Loads the auth field from the database, preventing any changes."""
 | 
			
		||||
    """Prevents changes to any field of the user doc, except USER_EDITABLE_FIELDS."""
 | 
			
		||||
 | 
			
		||||
    # Find the user that is being replaced
 | 
			
		||||
    req = parse_request('users')
 | 
			
		||||
    req.projection = json.dumps({'auth': 1})
 | 
			
		||||
    req.projection = json.dumps({key: 0 for key in USER_EDITABLE_FIELDS})
 | 
			
		||||
    original = current_app.data.find_one('users', req, **lookup)
 | 
			
		||||
 | 
			
		||||
    # Make sure that the replacement has a valid auth field.
 | 
			
		||||
    updates = request.get_json()
 | 
			
		||||
    assert updates is request.get_json()  # We should get a ref to the cached JSON, and not a copy.
 | 
			
		||||
    put_data = request.get_json()
 | 
			
		||||
    if put_data is None:
 | 
			
		||||
        raise wz_exceptions.BadRequest('No JSON data received')
 | 
			
		||||
 | 
			
		||||
    if 'auth' in original:
 | 
			
		||||
        updates['auth'] = copy.deepcopy(original['auth'])
 | 
			
		||||
    else:
 | 
			
		||||
        updates.pop('auth', None)
 | 
			
		||||
    # We should get a ref to the cached JSON, and not a copy. This will allow us to
 | 
			
		||||
    # modify the cached JSON so that Eve sees our modifications.
 | 
			
		||||
    assert put_data is request.get_json()
 | 
			
		||||
 | 
			
		||||
    # Reset fields that shouldn't be edited to their original values. This is only
 | 
			
		||||
    # needed when users are editing themselves; admins are allowed to edit much more.
 | 
			
		||||
    if not pillar.auth.current_user.has_cap('admin'):
 | 
			
		||||
        for db_key, db_value in original.items():
 | 
			
		||||
            if db_key[0] == '_' or db_key in USER_EDITABLE_FIELDS:
 | 
			
		||||
                continue
 | 
			
		||||
 | 
			
		||||
            if db_key in original:
 | 
			
		||||
                put_data[db_key] = copy.deepcopy(original[db_key])
 | 
			
		||||
 | 
			
		||||
        # Remove fields added by this PUT request, except when they are user-editable.
 | 
			
		||||
        for put_key in list(put_data.keys()):
 | 
			
		||||
            if put_key[0] == '_' or put_key in USER_EDITABLE_FIELDS:
 | 
			
		||||
                continue
 | 
			
		||||
 | 
			
		||||
            if put_key not in original:
 | 
			
		||||
                del put_data[put_key]
 | 
			
		||||
 | 
			
		||||
    # Always restore those fields
 | 
			
		||||
    for db_key in USER_ALWAYS_RESTORE_FIELDS:
 | 
			
		||||
        if db_key in original:
 | 
			
		||||
            put_data[db_key] = copy.deepcopy(original[db_key])
 | 
			
		||||
        else:
 | 
			
		||||
            del put_data[db_key]
 | 
			
		||||
 | 
			
		||||
    # Regular users should always have an email address
 | 
			
		||||
    if 'service' not in put_data.get('roles', ()):
 | 
			
		||||
        if not put_data.get('email'):
 | 
			
		||||
            raise wz_exceptions.UnprocessableEntity(
 | 
			
		||||
                'email field must be given')
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def push_updated_user_to_algolia(user, original):
 | 
			
		||||
    """Push an update to the Algolia index when a user item is updated"""
 | 
			
		||||
def push_updated_user_to_search(user, original):
 | 
			
		||||
    """
 | 
			
		||||
    Push an update to the Search index when a user
 | 
			
		||||
    item is updated
 | 
			
		||||
    """
 | 
			
		||||
 | 
			
		||||
    from algoliasearch.client import AlgoliaException
 | 
			
		||||
    from pillar.api.utils.algolia import algolia_index_user_save
 | 
			
		||||
    from pillar.celery import search_index_tasks as searchindex
 | 
			
		||||
 | 
			
		||||
    try:
 | 
			
		||||
        algolia_index_user_save(user)
 | 
			
		||||
    except AlgoliaException as ex:
 | 
			
		||||
        log.warning('Unable to push user info to Algolia for user "%s", id=%s; %s',
 | 
			
		||||
                    user.get('username'), user.get('_id'), ex)
 | 
			
		||||
    searchindex.updated_user.delay(str(user['_id']))
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def send_blinker_signal_roles_changed(user, original):
 | 
			
		||||
    """Sends a Blinker signal that the user roles were changed, so others can respond."""
 | 
			
		||||
    """
 | 
			
		||||
    Sends a Blinker signal that the user roles were
 | 
			
		||||
    changed, so others can respond.
 | 
			
		||||
    """
 | 
			
		||||
 | 
			
		||||
    if user.get('roles') == original.get('roles'):
 | 
			
		||||
    current_roles = set(user.get('roles', []))
 | 
			
		||||
    original_roles = set(original.get('roles', []))
 | 
			
		||||
 | 
			
		||||
    if current_roles == original_roles:
 | 
			
		||||
        return
 | 
			
		||||
 | 
			
		||||
    from pillar.api.service import signal_user_changed_role
 | 
			
		||||
 | 
			
		||||
    log.info('User %s changed roles to %s, sending Blinker signal',
 | 
			
		||||
             user.get('_id'), user.get('roles'))
 | 
			
		||||
             user.get('_id'), current_roles)
 | 
			
		||||
    signal_user_changed_role.send(current_app, user=user)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def check_user_access(request, lookup):
 | 
			
		||||
    """Modifies the lookup dict to limit returned user info."""
 | 
			
		||||
 | 
			
		||||
    # No access when not logged in.
 | 
			
		||||
    current_user = g.get('current_user')
 | 
			
		||||
    current_user_id = current_user['user_id'] if current_user else None
 | 
			
		||||
    user = pillar.auth.get_current_user()
 | 
			
		||||
 | 
			
		||||
    # Admins can do anything and get everything, except the 'auth' block.
 | 
			
		||||
    if user_has_role(u'admin'):
 | 
			
		||||
    if user.has_cap('admin'):
 | 
			
		||||
        return
 | 
			
		||||
 | 
			
		||||
    if not lookup and not current_user:
 | 
			
		||||
        raise Forbidden()
 | 
			
		||||
    if not lookup and user.is_anonymous:
 | 
			
		||||
        raise wz_exceptions.Forbidden()
 | 
			
		||||
 | 
			
		||||
    # Add a filter to only return the current user.
 | 
			
		||||
    if '_id' not in lookup:
 | 
			
		||||
        lookup['_id'] = current_user['user_id']
 | 
			
		||||
        lookup['_id'] = user.user_id
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def check_put_access(request, lookup):
 | 
			
		||||
    """Only allow PUT to the current user, or all users if admin."""
 | 
			
		||||
 | 
			
		||||
    if user_has_role(u'admin'):
 | 
			
		||||
    user = pillar.auth.get_current_user()
 | 
			
		||||
    if user.has_cap('admin'):
 | 
			
		||||
        return
 | 
			
		||||
 | 
			
		||||
    current_user = g.get('current_user')
 | 
			
		||||
    if not current_user:
 | 
			
		||||
        raise Forbidden()
 | 
			
		||||
    if user.is_anonymous:
 | 
			
		||||
        raise wz_exceptions.Forbidden()
 | 
			
		||||
 | 
			
		||||
    if str(lookup['_id']) != str(current_user['user_id']):
 | 
			
		||||
        raise Forbidden()
 | 
			
		||||
    if str(lookup['_id']) != str(user.user_id):
 | 
			
		||||
        raise wz_exceptions.Forbidden()
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def after_fetching_user(user):
 | 
			
		||||
@@ -90,19 +131,18 @@ def after_fetching_user(user):
 | 
			
		||||
    # custom end-points.
 | 
			
		||||
    user.pop('auth', None)
 | 
			
		||||
 | 
			
		||||
    current_user = g.get('current_user')
 | 
			
		||||
    current_user_id = current_user['user_id'] if current_user else None
 | 
			
		||||
    current_user = pillar.auth.get_current_user()
 | 
			
		||||
 | 
			
		||||
    # Admins can do anything and get everything, except the 'auth' block.
 | 
			
		||||
    if user_has_role(u'admin'):
 | 
			
		||||
    if current_user.has_cap('admin'):
 | 
			
		||||
        return
 | 
			
		||||
 | 
			
		||||
    # Only allow full access to the current user.
 | 
			
		||||
    if str(user['_id']) == str(current_user_id):
 | 
			
		||||
    if current_user.is_authenticated and str(user['_id']) == str(current_user.user_id):
 | 
			
		||||
        return
 | 
			
		||||
 | 
			
		||||
    # Remove all fields except public ones.
 | 
			
		||||
    public_fields = {'full_name', 'username', 'email'}
 | 
			
		||||
    public_fields = {'full_name', 'username', 'email', 'extension_props_public', 'badges'}
 | 
			
		||||
    for field in list(user.keys()):
 | 
			
		||||
        if field not in public_fields:
 | 
			
		||||
            del user[field]
 | 
			
		||||
@@ -121,3 +161,46 @@ def post_GET_user(request, payload):
 | 
			
		||||
    # json_data['computed_permissions'] = \
 | 
			
		||||
    #     compute_permissions(json_data['_id'], app.data.driver)
 | 
			
		||||
    payload.data = json.dumps(json_data)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def grant_org_roles(user_doc):
 | 
			
		||||
    """Handle any organization this user may be part of."""
 | 
			
		||||
 | 
			
		||||
    email = user_doc.get('email')
 | 
			
		||||
    if not email:
 | 
			
		||||
        log.info('Unable to check new user for organization membership, no email address: %r',
 | 
			
		||||
                 user_doc)
 | 
			
		||||
        return
 | 
			
		||||
 | 
			
		||||
    org_roles = current_app.org_manager.unknown_member_roles(email)
 | 
			
		||||
    if not org_roles:
 | 
			
		||||
        log.debug('No organization roles for user %r', email)
 | 
			
		||||
        return
 | 
			
		||||
 | 
			
		||||
    log.info('Granting organization roles %r to user %r', org_roles, email)
 | 
			
		||||
    new_roles = set(user_doc.get('roles') or []) | org_roles
 | 
			
		||||
    user_doc['roles'] = list(new_roles)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def before_inserting_users(user_docs):
 | 
			
		||||
    """Grants organization roles to the created users."""
 | 
			
		||||
 | 
			
		||||
    for user_doc in user_docs:
 | 
			
		||||
        grant_org_roles(user_doc)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def after_inserting_users(user_docs):
 | 
			
		||||
    """Moves the users from the unknown_members to the members list of their organizations."""
 | 
			
		||||
 | 
			
		||||
    om = current_app.org_manager
 | 
			
		||||
    for user_doc in user_docs:
 | 
			
		||||
        user_id = user_doc.get('_id')
 | 
			
		||||
        user_email = user_doc.get('email')
 | 
			
		||||
 | 
			
		||||
        if not user_id or not user_email:
 | 
			
		||||
            # Missing emails can happen when creating a service account, it's fine.
 | 
			
		||||
            log.info('User created with _id=%r and email=%r, unable to check organizations',
 | 
			
		||||
                     user_id, user_email)
 | 
			
		||||
            continue
 | 
			
		||||
 | 
			
		||||
        om.make_member_known(user_id, user_email)
 | 
			
		||||
 
 | 
			
		||||
@@ -1,9 +1,13 @@
 | 
			
		||||
import logging
 | 
			
		||||
 | 
			
		||||
from eve.methods.get import get
 | 
			
		||||
from flask import g, Blueprint
 | 
			
		||||
from pillar.api.utils import jsonify
 | 
			
		||||
from flask import Blueprint, request
 | 
			
		||||
import werkzeug.exceptions as wz_exceptions
 | 
			
		||||
 | 
			
		||||
from pillar import current_app
 | 
			
		||||
from pillar.api import utils
 | 
			
		||||
from pillar.api.utils.authorization import require_login
 | 
			
		||||
from pillar.auth import current_user
 | 
			
		||||
 | 
			
		||||
log = logging.getLogger(__name__)
 | 
			
		||||
blueprint_api = Blueprint('users_api', __name__)
 | 
			
		||||
@@ -12,8 +16,129 @@ blueprint_api = Blueprint('users_api', __name__)
 | 
			
		||||
@blueprint_api.route('/me')
 | 
			
		||||
@require_login()
 | 
			
		||||
def my_info():
 | 
			
		||||
    eve_resp, _, _, status, _ = get('users', {'_id': g.current_user['user_id']})
 | 
			
		||||
    resp = jsonify(eve_resp['_items'][0], status=status)
 | 
			
		||||
    eve_resp, _, _, status, _ = get('users', {'_id': current_user.user_id})
 | 
			
		||||
    resp = utils.jsonify(eve_resp['_items'][0], status=status)
 | 
			
		||||
    return resp
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@blueprint_api.route('/video/<video_id>/progress')
 | 
			
		||||
@require_login()
 | 
			
		||||
def get_video_progress(video_id: str):
 | 
			
		||||
    """Return video progress information.
 | 
			
		||||
 | 
			
		||||
    Either a `204 No Content` is returned (no information stored),
 | 
			
		||||
    or a `200 Ok` with JSON from Eve's 'users' schema, from the key
 | 
			
		||||
    video.view_progress.<video_id>.
 | 
			
		||||
    """
 | 
			
		||||
 | 
			
		||||
    # Validation of the video ID; raises a BadRequest when it's not an ObjectID.
 | 
			
		||||
    # This isn't strictly necessary, but it makes this function behave symmetrical
 | 
			
		||||
    # to the set_video_progress() function.
 | 
			
		||||
    utils.str2id(video_id)
 | 
			
		||||
 | 
			
		||||
    users_coll = current_app.db('users')
 | 
			
		||||
    user_doc = users_coll.find_one(current_user.user_id, projection={'nodes.view_progress': True})
 | 
			
		||||
    try:
 | 
			
		||||
        progress = user_doc['nodes']['view_progress'][video_id]
 | 
			
		||||
    except KeyError:
 | 
			
		||||
        return '', 204
 | 
			
		||||
    if not progress:
 | 
			
		||||
        return '', 204
 | 
			
		||||
 | 
			
		||||
    return utils.jsonify(progress)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@blueprint_api.route('/video/<video_id>/progress', methods=['POST'])
 | 
			
		||||
@require_login()
 | 
			
		||||
def set_video_progress(video_id: str):
 | 
			
		||||
    """Save progress information about a certain video.
 | 
			
		||||
 | 
			
		||||
    Expected parameters:
 | 
			
		||||
    - progress_in_sec: float number of seconds
 | 
			
		||||
    - progress_in_perc: integer percentage of video watched (interval [0-100])
 | 
			
		||||
    """
 | 
			
		||||
    my_log = log.getChild('set_video_progress')
 | 
			
		||||
    my_log.debug('Setting video progress for user %r video %r', current_user.user_id, video_id)
 | 
			
		||||
 | 
			
		||||
    # Constructing this response requires an active app, and thus can't be done on module load.
 | 
			
		||||
    no_video_response = utils.jsonify({'_message': 'No such video'}, status=404)
 | 
			
		||||
 | 
			
		||||
    try:
 | 
			
		||||
        progress_in_sec = float(request.form['progress_in_sec'])
 | 
			
		||||
        progress_in_perc = int(request.form['progress_in_perc'])
 | 
			
		||||
    except KeyError as ex:
 | 
			
		||||
        my_log.debug('Missing POST field in request: %s', ex)
 | 
			
		||||
        raise wz_exceptions.BadRequest(f'missing a form field')
 | 
			
		||||
    except ValueError as ex:
 | 
			
		||||
        my_log.debug('Invalid value for POST field in request: %s', ex)
 | 
			
		||||
        raise wz_exceptions.BadRequest(f'Invalid value for field: {ex}')
 | 
			
		||||
 | 
			
		||||
    users_coll = current_app.db('users')
 | 
			
		||||
    nodes_coll = current_app.db('nodes')
 | 
			
		||||
 | 
			
		||||
    # First check whether this is actually an existing video
 | 
			
		||||
    video_oid = utils.str2id(video_id)
 | 
			
		||||
    video_doc = nodes_coll.find_one(video_oid, projection={
 | 
			
		||||
        'node_type': True,
 | 
			
		||||
        'properties.content_type': True,
 | 
			
		||||
        'properties.file': True,
 | 
			
		||||
    })
 | 
			
		||||
    if not video_doc:
 | 
			
		||||
        my_log.debug('Node %r not found, unable to set progress for user %r',
 | 
			
		||||
                     video_oid, current_user.user_id)
 | 
			
		||||
        return no_video_response
 | 
			
		||||
 | 
			
		||||
    try:
 | 
			
		||||
        is_video = (video_doc['node_type'] == 'asset'
 | 
			
		||||
                    and video_doc['properties']['content_type'] == 'video')
 | 
			
		||||
    except KeyError:
 | 
			
		||||
        is_video = False
 | 
			
		||||
 | 
			
		||||
    if not is_video:
 | 
			
		||||
        my_log.info('Node %r is not a video, unable to set progress for user %r',
 | 
			
		||||
                    video_oid, current_user.user_id)
 | 
			
		||||
        # There is no video found at this URL, so act as if it doesn't even exist.
 | 
			
		||||
        return no_video_response
 | 
			
		||||
 | 
			
		||||
    # Compute the progress
 | 
			
		||||
    percent = min(100, max(0, progress_in_perc))
 | 
			
		||||
    progress = {
 | 
			
		||||
        'progress_in_sec': progress_in_sec,
 | 
			
		||||
        'progress_in_percent': percent,
 | 
			
		||||
        'last_watched': utils.utcnow(),
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    # After watching a certain percentage of the video, we consider it 'done'
 | 
			
		||||
    #
 | 
			
		||||
    #                   Total     Credit start  Total  Credit  Percent
 | 
			
		||||
    #                   HH:MM:SS  HH:MM:SS      sec    sec     of duration
 | 
			
		||||
    # Sintel            00:14:48  00:12:24      888    744     83.78%
 | 
			
		||||
    # Tears of Steel    00:12:14  00:09:49      734    589     80.25%
 | 
			
		||||
    # Cosmos Laundro    00:12:10  00:10:05      730    605     82.88%
 | 
			
		||||
    # Agent 327         00:03:51  00:03:26      231    206     89.18%
 | 
			
		||||
    # Caminandes 3      00:02:30  00:02:18      150    138     92.00%
 | 
			
		||||
    # Glass Half        00:03:13  00:02:52      193    172     89.12%
 | 
			
		||||
    # Big Buck Bunny    00:09:56  00:08:11      596    491     82.38%
 | 
			
		||||
    # Elephant’s Drea   00:10:54  00:09:25      654    565     86.39%
 | 
			
		||||
    #
 | 
			
		||||
    #                                      Median              85.09%
 | 
			
		||||
    #                                      Average             85.75%
 | 
			
		||||
    #
 | 
			
		||||
    # For training videos marking at done at 85% of the video may be a bit
 | 
			
		||||
    # early, since those probably won't have (long) credits. This is why we
 | 
			
		||||
    # stick to 90% here.
 | 
			
		||||
    if percent >= 90:
 | 
			
		||||
        progress['done'] = True
 | 
			
		||||
 | 
			
		||||
    # Setting each property individually prevents us from overwriting any
 | 
			
		||||
    # existing {done: true} fields.
 | 
			
		||||
    updates = {f'nodes.view_progress.{video_id}.{k}': v
 | 
			
		||||
               for k, v in progress.items()}
 | 
			
		||||
    result = users_coll.update_one({'_id': current_user.user_id},
 | 
			
		||||
                                   {'$set': updates})
 | 
			
		||||
 | 
			
		||||
    if result.matched_count == 0:
 | 
			
		||||
        my_log.error('Current user %r could not be updated', current_user.user_id)
 | 
			
		||||
        raise wz_exceptions.InternalServerError('Unable to find logged-in user')
 | 
			
		||||
 | 
			
		||||
    return '', 204
 | 
			
		||||
 
 | 
			
		||||
@@ -1,13 +1,16 @@
 | 
			
		||||
import base64
 | 
			
		||||
import copy
 | 
			
		||||
import hashlib
 | 
			
		||||
import json
 | 
			
		||||
import urllib
 | 
			
		||||
 | 
			
		||||
import datetime
 | 
			
		||||
import functools
 | 
			
		||||
import hashlib
 | 
			
		||||
import json
 | 
			
		||||
import logging
 | 
			
		||||
import random
 | 
			
		||||
import typing
 | 
			
		||||
import urllib.request, urllib.parse, urllib.error
 | 
			
		||||
 | 
			
		||||
import bson.objectid
 | 
			
		||||
import bson.tz_util
 | 
			
		||||
from eve import RFC1123_DATE_FORMAT
 | 
			
		||||
from flask import current_app
 | 
			
		||||
from werkzeug import exceptions as wz_exceptions
 | 
			
		||||
@@ -54,6 +57,18 @@ def remove_private_keys(document):
 | 
			
		||||
    return doc_copy
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def pretty_duration(seconds):
 | 
			
		||||
    if seconds is None:
 | 
			
		||||
        return ''
 | 
			
		||||
    seconds = round(seconds)
 | 
			
		||||
    hours, seconds = divmod(seconds, 3600)
 | 
			
		||||
    minutes, seconds = divmod(seconds, 60)
 | 
			
		||||
    if hours > 0:
 | 
			
		||||
        return f'{hours:02}:{minutes:02}:{seconds:02}'
 | 
			
		||||
    else:
 | 
			
		||||
        return f'{minutes:02}:{seconds:02}'
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class PillarJSONEncoder(json.JSONEncoder):
 | 
			
		||||
    """JSON encoder with support for Pillar resources."""
 | 
			
		||||
 | 
			
		||||
@@ -61,6 +76,9 @@ class PillarJSONEncoder(json.JSONEncoder):
 | 
			
		||||
        if isinstance(obj, datetime.datetime):
 | 
			
		||||
            return obj.strftime(RFC1123_DATE_FORMAT)
 | 
			
		||||
 | 
			
		||||
        if isinstance(obj, datetime.timedelta):
 | 
			
		||||
            return pretty_duration(obj.total_seconds())
 | 
			
		||||
 | 
			
		||||
        if isinstance(obj, bson.ObjectId):
 | 
			
		||||
            return str(obj)
 | 
			
		||||
 | 
			
		||||
@@ -78,7 +96,7 @@ def dumps(mongo_doc, **kwargs):
 | 
			
		||||
 | 
			
		||||
def jsonify(mongo_doc, status=200, headers=None):
 | 
			
		||||
    """JSonifies a Mongo document into a Flask response object."""
 | 
			
		||||
    
 | 
			
		||||
 | 
			
		||||
    return current_app.response_class(dumps(mongo_doc),
 | 
			
		||||
                                      mimetype='application/json',
 | 
			
		||||
                                      status=status,
 | 
			
		||||
@@ -103,10 +121,11 @@ def skip_when_testing(func):
 | 
			
		||||
    @functools.wraps(func)
 | 
			
		||||
    def wrapper(*args, **kwargs):
 | 
			
		||||
        if current_app.config['TESTING']:
 | 
			
		||||
            log.debug('Skipping call to %s(...) due to TESTING', func.func_name)
 | 
			
		||||
            log.debug('Skipping call to %s(...) due to TESTING', func.__name__)
 | 
			
		||||
            return None
 | 
			
		||||
 | 
			
		||||
        return func(*args, **kwargs)
 | 
			
		||||
 | 
			
		||||
    return wrapper
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@@ -122,11 +141,9 @@ def project_get_node_type(project_document, node_type_node_name):
 | 
			
		||||
                 if node_type['name'] == node_type_node_name), None)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def str2id(document_id):
 | 
			
		||||
def str2id(document_id: str) -> bson.ObjectId:
 | 
			
		||||
    """Returns the document ID as ObjectID, or raises a BadRequest exception.
 | 
			
		||||
 | 
			
		||||
    :type document_id: str
 | 
			
		||||
    :rtype: bson.ObjectId
 | 
			
		||||
    :raises: wz_exceptions.BadRequest
 | 
			
		||||
    """
 | 
			
		||||
 | 
			
		||||
@@ -136,36 +153,41 @@ def str2id(document_id):
 | 
			
		||||
 | 
			
		||||
    try:
 | 
			
		||||
        return bson.ObjectId(document_id)
 | 
			
		||||
    except bson.objectid.InvalidId:
 | 
			
		||||
    except (bson.objectid.InvalidId, TypeError):
 | 
			
		||||
        log.debug('str2id(%r): Invalid Object ID', document_id)
 | 
			
		||||
        raise wz_exceptions.BadRequest('Invalid object ID %r' % document_id)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def gravatar(email, size=64):
 | 
			
		||||
def gravatar(email: str, size=64) -> typing.Optional[str]:
 | 
			
		||||
    if email is None:
 | 
			
		||||
        return None
 | 
			
		||||
 | 
			
		||||
    parameters = {'s': str(size), 'd': 'mm'}
 | 
			
		||||
    return "https://www.gravatar.com/avatar/" + \
 | 
			
		||||
           hashlib.md5(str(email)).hexdigest() + \
 | 
			
		||||
           "?" + urllib.urlencode(parameters)
 | 
			
		||||
 | 
			
		||||
           hashlib.md5(email.encode()).hexdigest() + \
 | 
			
		||||
           "?" + urllib.parse.urlencode(parameters)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class MetaFalsey(type):
 | 
			
		||||
    def __nonzero__(cls):
 | 
			
		||||
    def __bool__(cls):
 | 
			
		||||
        return False
 | 
			
		||||
    __bool__ = __nonzero__  # for Python 3
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class DoesNotExist(object):
 | 
			
		||||
class DoesNotExistMeta(MetaFalsey):
 | 
			
		||||
    def __repr__(cls) -> str:
 | 
			
		||||
        return 'DoesNotExist'
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class DoesNotExist(object, metaclass=DoesNotExistMeta):
 | 
			
		||||
    """Returned as value by doc_diff if a value does not exist."""
 | 
			
		||||
    __metaclass__ = MetaFalsey
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def doc_diff(doc1, doc2, falsey_is_equal=True):
 | 
			
		||||
def doc_diff(doc1, doc2, *, falsey_is_equal=True, superkey: str = None):
 | 
			
		||||
    """Generator, yields differences between documents.
 | 
			
		||||
 | 
			
		||||
    Yields changes as (key, value in doc1, value in doc2) tuples, where
 | 
			
		||||
    the value can also be the DoesNotExist class. Does not report changed
 | 
			
		||||
    private keys (i.e. starting with underscores).
 | 
			
		||||
    private keys (i.e. the standard Eve keys starting with underscores).
 | 
			
		||||
 | 
			
		||||
    Sub-documents (i.e. dicts) are recursed, and dot notation is used
 | 
			
		||||
    for the keys if changes are found.
 | 
			
		||||
@@ -174,22 +196,74 @@ def doc_diff(doc1, doc2, falsey_is_equal=True):
 | 
			
		||||
    function won't report differences between DoesNotExist, False, '', and 0.
 | 
			
		||||
    """
 | 
			
		||||
 | 
			
		||||
    for key in set(doc1.keys()).union(set(doc2.keys())):
 | 
			
		||||
        if isinstance(key, basestring) and key[0] == u'_':
 | 
			
		||||
            continue
 | 
			
		||||
    private_keys = {'_id', '_etag', '_deleted', '_updated', '_created'}
 | 
			
		||||
 | 
			
		||||
        val1 = doc1.get(key, DoesNotExist)
 | 
			
		||||
        val2 = doc2.get(key, DoesNotExist)
 | 
			
		||||
    def combine_key(some_key):
 | 
			
		||||
        """Combine this key with the superkey.
 | 
			
		||||
 | 
			
		||||
        # Only recurse if both values are dicts
 | 
			
		||||
        if isinstance(val1, dict) and isinstance(val2, dict):
 | 
			
		||||
            for subkey, subval1, subval2 in doc_diff(val1, val2):
 | 
			
		||||
                yield '%s.%s' % (key, subkey), subval1, subval2
 | 
			
		||||
            continue
 | 
			
		||||
        Keep the key type the same, unless we have to combine with a superkey.
 | 
			
		||||
        """
 | 
			
		||||
        if not superkey:
 | 
			
		||||
            return some_key
 | 
			
		||||
        if isinstance(some_key, str) and some_key[0] == '[':
 | 
			
		||||
            return f'{superkey}{some_key}'
 | 
			
		||||
        return f'{superkey}.{some_key}'
 | 
			
		||||
 | 
			
		||||
        if val1 == val2:
 | 
			
		||||
            continue
 | 
			
		||||
        if falsey_is_equal and bool(val1) == bool(val2) == False:
 | 
			
		||||
            continue
 | 
			
		||||
    if doc1 is doc2:
 | 
			
		||||
        return
 | 
			
		||||
 | 
			
		||||
        yield key, val1, val2
 | 
			
		||||
    if falsey_is_equal and not bool(doc1) and not bool(doc2):
 | 
			
		||||
        return
 | 
			
		||||
 | 
			
		||||
    if isinstance(doc1, dict) and isinstance(doc2, dict):
 | 
			
		||||
        for key in set(doc1.keys()).union(set(doc2.keys())):
 | 
			
		||||
            if key in private_keys:
 | 
			
		||||
                continue
 | 
			
		||||
 | 
			
		||||
            val1 = doc1.get(key, DoesNotExist)
 | 
			
		||||
            val2 = doc2.get(key, DoesNotExist)
 | 
			
		||||
 | 
			
		||||
            yield from doc_diff(val1, val2,
 | 
			
		||||
                                falsey_is_equal=falsey_is_equal,
 | 
			
		||||
                                superkey=combine_key(key))
 | 
			
		||||
        return
 | 
			
		||||
 | 
			
		||||
    if isinstance(doc1, list) and isinstance(doc2, list):
 | 
			
		||||
        for idx in range(max(len(doc1), len(doc2))):
 | 
			
		||||
            try:
 | 
			
		||||
                item1 = doc1[idx]
 | 
			
		||||
            except IndexError:
 | 
			
		||||
                item1 = DoesNotExist
 | 
			
		||||
            try:
 | 
			
		||||
                item2 = doc2[idx]
 | 
			
		||||
            except IndexError:
 | 
			
		||||
                item2 = DoesNotExist
 | 
			
		||||
 | 
			
		||||
            subkey = f'[{idx}]'
 | 
			
		||||
            if item1 is DoesNotExist or item2 is DoesNotExist:
 | 
			
		||||
                yield combine_key(subkey), item1, item2
 | 
			
		||||
            else:
 | 
			
		||||
                yield from doc_diff(item1, item2,
 | 
			
		||||
                                    falsey_is_equal=falsey_is_equal,
 | 
			
		||||
                                    superkey=combine_key(subkey))
 | 
			
		||||
        return
 | 
			
		||||
 | 
			
		||||
    if doc1 != doc2:
 | 
			
		||||
        yield superkey, doc1, doc2
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def random_etag() -> str:
 | 
			
		||||
    """Random string usable as etag."""
 | 
			
		||||
 | 
			
		||||
    randbytes = random.getrandbits(256).to_bytes(32, 'big')
 | 
			
		||||
    return base64.b64encode(randbytes)[:-1].decode()
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def utcnow() -> datetime.datetime:
 | 
			
		||||
    """Construct timezone-aware 'now' in UTC with millisecond precision."""
 | 
			
		||||
    now = datetime.datetime.now(tz=bson.tz_util.utc)
 | 
			
		||||
 | 
			
		||||
    # MongoDB stores in millisecond precision, so truncate the microseconds.
 | 
			
		||||
    # This way the returned datetime can be round-tripped via MongoDB and stay the same.
 | 
			
		||||
    trunc_now = now.replace(microsecond=now.microsecond - (now.microsecond % 1000))
 | 
			
		||||
    return trunc_now
 | 
			
		||||
 
 | 
			
		||||
@@ -1,101 +1,33 @@
 | 
			
		||||
import logging
 | 
			
		||||
 | 
			
		||||
from bson import ObjectId
 | 
			
		||||
from flask import current_app
 | 
			
		||||
 | 
			
		||||
from pillar.api.file_storage import generate_link
 | 
			
		||||
from pillar import current_app
 | 
			
		||||
from . import skip_when_testing
 | 
			
		||||
 | 
			
		||||
log = logging.getLogger(__name__)
 | 
			
		||||
 | 
			
		||||
INDEX_ALLOWED_USER_ROLES = {'admin', 'subscriber', 'demo'}
 | 
			
		||||
INDEX_ALLOWED_NODE_TYPES = {'asset', 'texture', 'group', 'hdri'}
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@skip_when_testing
 | 
			
		||||
def algolia_index_user_save(user):
 | 
			
		||||
    if current_app.algolia_index_users is None:
 | 
			
		||||
def index_user_save(to_index_user: dict):
 | 
			
		||||
    index_users = current_app.algolia_index_users
 | 
			
		||||
    if not index_users:
 | 
			
		||||
        log.debug('No Algolia index defined, so nothing to do.')
 | 
			
		||||
        return
 | 
			
		||||
    # Strip unneeded roles
 | 
			
		||||
    if 'roles' in user:
 | 
			
		||||
        roles = set(user['roles']).intersection(INDEX_ALLOWED_USER_ROLES)
 | 
			
		||||
    else:
 | 
			
		||||
        roles = set()
 | 
			
		||||
    if current_app.algolia_index_users:
 | 
			
		||||
        # Create or update Algolia index for the user
 | 
			
		||||
        current_app.algolia_index_users.save_object({
 | 
			
		||||
            'objectID': user['_id'],
 | 
			
		||||
            'full_name': user['full_name'],
 | 
			
		||||
            'username': user['username'],
 | 
			
		||||
            'roles': list(roles),
 | 
			
		||||
            'groups': user['groups'],
 | 
			
		||||
            'email': user['email']
 | 
			
		||||
        })
 | 
			
		||||
 | 
			
		||||
    # Create or update Algolia index for the user
 | 
			
		||||
    index_users.save_object(to_index_user)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@skip_when_testing
 | 
			
		||||
def algolia_index_node_save(node):
 | 
			
		||||
def index_node_save(node_to_index):
 | 
			
		||||
    if not current_app.algolia_index_nodes:
 | 
			
		||||
        return
 | 
			
		||||
    if node['node_type'] not in INDEX_ALLOWED_NODE_TYPES:
 | 
			
		||||
        return
 | 
			
		||||
    # If a nodes does not have status published, do not index
 | 
			
		||||
    if node['properties'].get('status') != 'published':
 | 
			
		||||
        return
 | 
			
		||||
 | 
			
		||||
    projects_collection = current_app.data.driver.db['projects']
 | 
			
		||||
    project = projects_collection.find_one({'_id': ObjectId(node['project'])})
 | 
			
		||||
 | 
			
		||||
    users_collection = current_app.data.driver.db['users']
 | 
			
		||||
    user = users_collection.find_one({'_id': ObjectId(node['user'])})
 | 
			
		||||
 | 
			
		||||
    node_ob = {
 | 
			
		||||
        'objectID': node['_id'],
 | 
			
		||||
        'name': node['name'],
 | 
			
		||||
        'project': {
 | 
			
		||||
            '_id': project['_id'],
 | 
			
		||||
            'name': project['name']
 | 
			
		||||
        },
 | 
			
		||||
        'created': node['_created'],
 | 
			
		||||
        'updated': node['_updated'],
 | 
			
		||||
        'node_type': node['node_type'],
 | 
			
		||||
        'user': {
 | 
			
		||||
            '_id': user['_id'],
 | 
			
		||||
            'full_name': user['full_name']
 | 
			
		||||
        },
 | 
			
		||||
    }
 | 
			
		||||
    if 'description' in node and node['description']:
 | 
			
		||||
        node_ob['description'] = node['description']
 | 
			
		||||
    if 'picture' in node and node['picture']:
 | 
			
		||||
        files_collection = current_app.data.driver.db['files']
 | 
			
		||||
        lookup = {'_id': ObjectId(node['picture'])}
 | 
			
		||||
        picture = files_collection.find_one(lookup)
 | 
			
		||||
        if picture['backend'] == 'gcs':
 | 
			
		||||
            variation_t = next((item for item in picture['variations'] \
 | 
			
		||||
                                if item['size'] == 't'), None)
 | 
			
		||||
            if variation_t:
 | 
			
		||||
                node_ob['picture'] = generate_link(picture['backend'],
 | 
			
		||||
                                                   variation_t['file_path'], project_id=str(picture['project']),
 | 
			
		||||
                                                   is_public=True)
 | 
			
		||||
    # If the node has world permissions, compute the Free permission
 | 
			
		||||
    if 'permissions' in node and 'world' in node['permissions']:
 | 
			
		||||
        if 'GET' in node['permissions']['world']:
 | 
			
		||||
            node_ob['is_free'] = True
 | 
			
		||||
 | 
			
		||||
    # Append the media key if the node is of node_type 'asset'
 | 
			
		||||
    if node['node_type'] == 'asset':
 | 
			
		||||
        node_ob['media'] = node['properties']['content_type']
 | 
			
		||||
 | 
			
		||||
    # Add extra properties
 | 
			
		||||
    for prop in ('tags', 'license_notes'):
 | 
			
		||||
        if prop in node['properties']:
 | 
			
		||||
            node_ob[prop] = node['properties'][prop]
 | 
			
		||||
 | 
			
		||||
    current_app.algolia_index_nodes.save_object(node_ob)
 | 
			
		||||
    current_app.algolia_index_nodes.save_object(node_to_index)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@skip_when_testing
 | 
			
		||||
def algolia_index_node_delete(node):
 | 
			
		||||
def index_node_delete(delete_id):
 | 
			
		||||
    if current_app.algolia_index_nodes is None:
 | 
			
		||||
        return
 | 
			
		||||
    current_app.algolia_index_nodes.delete_object(node['_id'])
 | 
			
		||||
    current_app.algolia_index_nodes.delete_object(delete_id)
 | 
			
		||||
 
 | 
			
		||||
@@ -5,21 +5,27 @@ unique usernames from emails. Calls out to the pillar_server.modules.blender_id
 | 
			
		||||
module for Blender ID communication.
 | 
			
		||||
"""
 | 
			
		||||
 | 
			
		||||
import logging
 | 
			
		||||
import base64
 | 
			
		||||
import datetime
 | 
			
		||||
import hmac
 | 
			
		||||
import hashlib
 | 
			
		||||
import logging
 | 
			
		||||
import typing
 | 
			
		||||
 | 
			
		||||
from bson import tz_util
 | 
			
		||||
from flask import g
 | 
			
		||||
import bson
 | 
			
		||||
from flask import g, current_app, session
 | 
			
		||||
from flask import request
 | 
			
		||||
from flask import current_app
 | 
			
		||||
from werkzeug import exceptions as wz_exceptions
 | 
			
		||||
 | 
			
		||||
from pillar.api.utils import remove_private_keys, utcnow
 | 
			
		||||
 | 
			
		||||
log = logging.getLogger(__name__)
 | 
			
		||||
 | 
			
		||||
CLI_USER = {
 | 
			
		||||
    'user_id': 'CLI',
 | 
			
		||||
    'groups': [],
 | 
			
		||||
    'roles': {'admin'},
 | 
			
		||||
}
 | 
			
		||||
# Construction is done when requested, since constructing a UserClass instance
 | 
			
		||||
# requires an application context to look up capabilities. We set the initial
 | 
			
		||||
# value to a not-None singleton to be able to differentiate between
 | 
			
		||||
# g.current_user set to "not logged in" or "uninitialised CLI_USER".
 | 
			
		||||
CLI_USER = ...
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def force_cli_user():
 | 
			
		||||
@@ -28,11 +34,76 @@ def force_cli_user():
 | 
			
		||||
    This is used as a marker to avoid authorization checks and just allow everything.
 | 
			
		||||
    """
 | 
			
		||||
 | 
			
		||||
    log.warning('Logging in as CLI_USER, circumventing authentication.')
 | 
			
		||||
    global CLI_USER
 | 
			
		||||
 | 
			
		||||
    from pillar.auth import UserClass
 | 
			
		||||
 | 
			
		||||
    if CLI_USER is ...:
 | 
			
		||||
        CLI_USER = UserClass.construct('CLI', {
 | 
			
		||||
            '_id': 'CLI',
 | 
			
		||||
            'groups': [],
 | 
			
		||||
            'roles': {'admin'},
 | 
			
		||||
            'email': 'local@nowhere',
 | 
			
		||||
            'username': 'CLI',
 | 
			
		||||
        })
 | 
			
		||||
        log.info('CONSTRUCTED CLI USER %s of type %s', id(CLI_USER), id(type(CLI_USER)))
 | 
			
		||||
 | 
			
		||||
    log.info('Logging in as CLI_USER (%s) of type %s, circumventing authentication.',
 | 
			
		||||
             id(CLI_USER), id(type(CLI_USER)))
 | 
			
		||||
    g.current_user = CLI_USER
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def validate_token():
 | 
			
		||||
def find_user_in_db(user_info: dict, provider='blender-id') -> dict:
 | 
			
		||||
    """Find the user in our database, creating/updating the returned document where needed.
 | 
			
		||||
 | 
			
		||||
    First, search for the user using its id from the provider, then try to look the user up via the
 | 
			
		||||
    email address.
 | 
			
		||||
 | 
			
		||||
    Does NOT update the user in the database.
 | 
			
		||||
    
 | 
			
		||||
    :param user_info: Information (id, email and full_name) from the auth provider
 | 
			
		||||
    :param provider: One of the supported providers
 | 
			
		||||
    """
 | 
			
		||||
 | 
			
		||||
    users = current_app.data.driver.db['users']
 | 
			
		||||
 | 
			
		||||
    user_id = user_info['id']
 | 
			
		||||
    query = {'$or': [
 | 
			
		||||
        {'auth': {'$elemMatch': {
 | 
			
		||||
            'user_id': str(user_id),
 | 
			
		||||
            'provider': provider}}},
 | 
			
		||||
        {'email': user_info['email']},
 | 
			
		||||
    ]}
 | 
			
		||||
    log.debug('Querying: %s', query)
 | 
			
		||||
    db_user = users.find_one(query)
 | 
			
		||||
 | 
			
		||||
    if db_user:
 | 
			
		||||
        log.debug('User with %s id %s already in our database, updating with info from %s',
 | 
			
		||||
                  provider, user_id, provider)
 | 
			
		||||
        db_user['email'] = user_info['email']
 | 
			
		||||
 | 
			
		||||
        # Find out if an auth entry for the current provider already exists
 | 
			
		||||
        provider_entry = [element for element in db_user['auth'] if element['provider'] == provider]
 | 
			
		||||
        if not provider_entry:
 | 
			
		||||
            db_user['auth'].append({
 | 
			
		||||
                'provider': provider,
 | 
			
		||||
                'user_id': str(user_id),
 | 
			
		||||
                'token': ''})
 | 
			
		||||
    else:
 | 
			
		||||
        log.debug('User %r not yet in our database, create a new one.', user_id)
 | 
			
		||||
        db_user = create_new_user_document(
 | 
			
		||||
            email=user_info['email'],
 | 
			
		||||
            user_id=user_id,
 | 
			
		||||
            username=user_info['full_name'],
 | 
			
		||||
            provider=provider)
 | 
			
		||||
        db_user['username'] = make_unique_username(user_info['email'])
 | 
			
		||||
        if not db_user['full_name']:
 | 
			
		||||
            db_user['full_name'] = db_user['username']
 | 
			
		||||
 | 
			
		||||
    return db_user
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def validate_token(*, force=False) -> bool:
 | 
			
		||||
    """Validate the token provided in the request and populate the current_user
 | 
			
		||||
    flask.g object, so that permissions and access to a resource can be defined
 | 
			
		||||
    from it.
 | 
			
		||||
@@ -40,26 +111,44 @@ def validate_token():
 | 
			
		||||
    When the token is successfully validated, sets `g.current_user` to contain
 | 
			
		||||
    the user information, otherwise it is set to None.
 | 
			
		||||
 | 
			
		||||
    @returns True iff the user is logged in with a valid Blender ID token.
 | 
			
		||||
    :param force: don't trust g.current_user and force a re-check.
 | 
			
		||||
    :returns: True iff the user is logged in with a valid Blender ID token.
 | 
			
		||||
    """
 | 
			
		||||
 | 
			
		||||
    import pillar.auth
 | 
			
		||||
 | 
			
		||||
    # Trust a pre-existing g.current_user
 | 
			
		||||
    if not force:
 | 
			
		||||
        cur = getattr(g, 'current_user', None)
 | 
			
		||||
        if cur is not None and cur.is_authenticated:
 | 
			
		||||
            log.debug('skipping token check because current user is already set to %s', cur)
 | 
			
		||||
            return True
 | 
			
		||||
 | 
			
		||||
    auth_header = request.headers.get('Authorization') or ''
 | 
			
		||||
    if request.authorization:
 | 
			
		||||
        token = request.authorization.username
 | 
			
		||||
        oauth_subclient = request.authorization.password
 | 
			
		||||
    elif auth_header.startswith('Bearer '):
 | 
			
		||||
        token = auth_header[7:].strip()
 | 
			
		||||
        oauth_subclient = ''
 | 
			
		||||
    else:
 | 
			
		||||
        # Check the session, the user might be logged in through Flask-Login.
 | 
			
		||||
        from pillar import auth
 | 
			
		||||
 | 
			
		||||
        token = auth.get_blender_id_oauth_token()
 | 
			
		||||
        if token and isinstance(token, (tuple, list)):
 | 
			
		||||
            token = token[0]
 | 
			
		||||
        # The user has a logged-in session; trust only if this request passes a CSRF check.
 | 
			
		||||
        # FIXME(Sybren): we should stop saving the token as 'user_id' in the sesion.
 | 
			
		||||
        token = session.get('user_id')
 | 
			
		||||
        if token:
 | 
			
		||||
            log.debug('skipping token check because current user already has a session')
 | 
			
		||||
            current_app.csrf.protect()
 | 
			
		||||
        else:
 | 
			
		||||
            token = pillar.auth.get_blender_id_oauth_token()
 | 
			
		||||
        oauth_subclient = None
 | 
			
		||||
 | 
			
		||||
    if not token:
 | 
			
		||||
        # If no authorization headers are provided, we are getting a request
 | 
			
		||||
        # from a non logged in user. Proceed accordingly.
 | 
			
		||||
        log.debug('No authentication headers, so not logged in.')
 | 
			
		||||
        g.current_user = None
 | 
			
		||||
        g.current_user = pillar.auth.AnonymousUser()
 | 
			
		||||
        return False
 | 
			
		||||
 | 
			
		||||
    return validate_this_token(token, oauth_subclient) is not None
 | 
			
		||||
@@ -72,13 +161,15 @@ def validate_this_token(token, oauth_subclient=None):
 | 
			
		||||
    :rtype: dict
 | 
			
		||||
    """
 | 
			
		||||
 | 
			
		||||
    from pillar.auth import UserClass, AnonymousUser, user_authenticated
 | 
			
		||||
 | 
			
		||||
    g.current_user = None
 | 
			
		||||
    _delete_expired_tokens()
 | 
			
		||||
 | 
			
		||||
    # Check the users to see if there is one with this Blender ID token.
 | 
			
		||||
    db_token = find_token(token, oauth_subclient)
 | 
			
		||||
    if not db_token:
 | 
			
		||||
        log.debug('Token %s not found in our local database.', token)
 | 
			
		||||
        log.debug('Token %r not found in our local database.', token)
 | 
			
		||||
 | 
			
		||||
        # If no valid token is found in our local database, we issue a new
 | 
			
		||||
        # request to the Blender ID server to verify the validity of the token
 | 
			
		||||
@@ -94,37 +185,70 @@ def validate_this_token(token, oauth_subclient=None):
 | 
			
		||||
 | 
			
		||||
    if db_user is None:
 | 
			
		||||
        log.debug('Validation failed, user not logged in')
 | 
			
		||||
        g.current_user = AnonymousUser()
 | 
			
		||||
        return None
 | 
			
		||||
 | 
			
		||||
    g.current_user = {'user_id': db_user['_id'],
 | 
			
		||||
                      'groups': db_user['groups'],
 | 
			
		||||
                      'roles': set(db_user.get('roles', []))}
 | 
			
		||||
    g.current_user = UserClass.construct(token, db_user)
 | 
			
		||||
    user_authenticated.send(g.current_user)
 | 
			
		||||
 | 
			
		||||
    return db_user
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def remove_token(token: str):
 | 
			
		||||
    """Removes the token from the database."""
 | 
			
		||||
 | 
			
		||||
    tokens_coll = current_app.db('tokens')
 | 
			
		||||
    token_hashed = hash_auth_token(token)
 | 
			
		||||
 | 
			
		||||
    # TODO: remove matching on hashed tokens once all hashed tokens have expired.
 | 
			
		||||
    lookup = {'$or': [{'token': token}, {'token_hashed': token_hashed}]}
 | 
			
		||||
    del_res = tokens_coll.delete_many(lookup)
 | 
			
		||||
    log.debug('Removed token %r, matched %d documents', token, del_res.deleted_count)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def find_token(token, is_subclient_token=False, **extra_filters):
 | 
			
		||||
    """Returns the token document, or None if it doesn't exist (or is expired)."""
 | 
			
		||||
 | 
			
		||||
    tokens_collection = current_app.data.driver.db['tokens']
 | 
			
		||||
    tokens_coll = current_app.db('tokens')
 | 
			
		||||
    token_hashed = hash_auth_token(token)
 | 
			
		||||
 | 
			
		||||
    # TODO: remove expired tokens from collection.
 | 
			
		||||
    lookup = {'token': token,
 | 
			
		||||
    # TODO: remove matching on hashed tokens once all hashed tokens have expired.
 | 
			
		||||
    lookup = {'$or': [{'token': token}, {'token_hashed': token_hashed}],
 | 
			
		||||
              'is_subclient_token': True if is_subclient_token else {'$in': [False, None]},
 | 
			
		||||
              'expire_time': {"$gt": datetime.datetime.now(tz=tz_util.utc)}}
 | 
			
		||||
              'expire_time': {"$gt": utcnow()}}
 | 
			
		||||
    lookup.update(extra_filters)
 | 
			
		||||
 | 
			
		||||
    db_token = tokens_collection.find_one(lookup)
 | 
			
		||||
    db_token = tokens_coll.find_one(lookup)
 | 
			
		||||
    return db_token
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def store_token(user_id, token, token_expiry, oauth_subclient_id=False):
 | 
			
		||||
def hash_auth_token(token: str) -> str:
 | 
			
		||||
    """Returns the hashed authentication token.
 | 
			
		||||
 | 
			
		||||
    The token is hashed using HMAC and then base64-encoded.
 | 
			
		||||
    """
 | 
			
		||||
 | 
			
		||||
    hmac_key = current_app.config['AUTH_TOKEN_HMAC_KEY']
 | 
			
		||||
    token_hmac = hmac.new(hmac_key, msg=token.encode('utf8'), digestmod=hashlib.sha256)
 | 
			
		||||
    digest = token_hmac.digest()
 | 
			
		||||
 | 
			
		||||
    return base64.b64encode(digest).decode('ascii')
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def store_token(user_id,
 | 
			
		||||
                token: str,
 | 
			
		||||
                token_expiry,
 | 
			
		||||
                oauth_subclient_id=False,
 | 
			
		||||
                *,
 | 
			
		||||
                org_roles: typing.Set[str] = frozenset(),
 | 
			
		||||
                oauth_scopes: typing.Optional[typing.List[str]] = None,
 | 
			
		||||
                ):
 | 
			
		||||
    """Stores an authentication token.
 | 
			
		||||
 | 
			
		||||
    :returns: the token document from MongoDB
 | 
			
		||||
    """
 | 
			
		||||
 | 
			
		||||
    assert isinstance(token, (str, unicode)), 'token must be string type, not %r' % type(token)
 | 
			
		||||
    assert isinstance(token, str), 'token must be string type, not %r' % type(token)
 | 
			
		||||
 | 
			
		||||
    token_data = {
 | 
			
		||||
        'user': user_id,
 | 
			
		||||
@@ -133,6 +257,10 @@ def store_token(user_id, token, token_expiry, oauth_subclient_id=False):
 | 
			
		||||
    }
 | 
			
		||||
    if oauth_subclient_id:
 | 
			
		||||
        token_data['is_subclient_token'] = True
 | 
			
		||||
    if org_roles:
 | 
			
		||||
        token_data['org_roles'] = sorted(org_roles)
 | 
			
		||||
    if oauth_scopes:
 | 
			
		||||
        token_data['oauth_scopes'] = oauth_scopes
 | 
			
		||||
 | 
			
		||||
    r, _, _, status = current_app.post_internal('tokens', token_data)
 | 
			
		||||
 | 
			
		||||
@@ -160,13 +288,13 @@ def create_new_user(email, username, user_id):
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def create_new_user_document(email, user_id, username, provider='blender-id',
 | 
			
		||||
                             token=''):
 | 
			
		||||
                             token='', *, full_name=''):
 | 
			
		||||
    """Creates a new user document, without storing it in MongoDB. The token
 | 
			
		||||
    parameter is a password in case provider is "local".
 | 
			
		||||
    """
 | 
			
		||||
 | 
			
		||||
    user_data = {
 | 
			
		||||
        'full_name': username,
 | 
			
		||||
        'full_name': full_name or username,
 | 
			
		||||
        'username': username,
 | 
			
		||||
        'email': email,
 | 
			
		||||
        'auth': [{
 | 
			
		||||
@@ -219,22 +347,99 @@ def _delete_expired_tokens():
 | 
			
		||||
 | 
			
		||||
    token_coll = current_app.data.driver.db['tokens']
 | 
			
		||||
 | 
			
		||||
    now = datetime.datetime.now(tz_util.utc)
 | 
			
		||||
    expiry_date = now - datetime.timedelta(days=7)
 | 
			
		||||
 | 
			
		||||
    expiry_date = utcnow() - datetime.timedelta(days=7)
 | 
			
		||||
    result = token_coll.delete_many({'expire_time': {"$lt": expiry_date}})
 | 
			
		||||
    # log.debug('Deleted %i expired authentication tokens', result.deleted_count)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def current_user_id():
 | 
			
		||||
def current_user_id() -> typing.Optional[bson.ObjectId]:
 | 
			
		||||
    """None-safe fetching of user ID. Can return None itself, though."""
 | 
			
		||||
 | 
			
		||||
    current_user = g.get('current_user') or {}
 | 
			
		||||
    return current_user.get('user_id')
 | 
			
		||||
    user = current_user()
 | 
			
		||||
    return user.user_id
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def current_user():
 | 
			
		||||
    """Returns the current user, or an AnonymousUser if not logged in.
 | 
			
		||||
 | 
			
		||||
    :rtype: pillar.auth.UserClass
 | 
			
		||||
    """
 | 
			
		||||
 | 
			
		||||
    import pillar.auth
 | 
			
		||||
 | 
			
		||||
    user: pillar.auth.UserClass = g.get('current_user')
 | 
			
		||||
    if user is None:
 | 
			
		||||
        return pillar.auth.AnonymousUser()
 | 
			
		||||
 | 
			
		||||
    return user
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def setup_app(app):
 | 
			
		||||
    @app.before_request
 | 
			
		||||
    def validate_token_at_each_request():
 | 
			
		||||
        validate_token()
 | 
			
		||||
        return None
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def upsert_user(db_user):
 | 
			
		||||
    """Inserts/updates the user in MongoDB.
 | 
			
		||||
 | 
			
		||||
    Retries a few times when there are uniqueness issues in the username.
 | 
			
		||||
 | 
			
		||||
    :returns: the user's database ID and the status of the PUT/POST.
 | 
			
		||||
        The status is 201 on insert, and 200 on update.
 | 
			
		||||
    :type: (ObjectId, int)
 | 
			
		||||
    """
 | 
			
		||||
 | 
			
		||||
    if 'subscriber' in db_user.get('groups', []):
 | 
			
		||||
        log.error('Non-ObjectID string found in user.groups: %s', db_user)
 | 
			
		||||
        raise wz_exceptions.InternalServerError(
 | 
			
		||||
            'Non-ObjectID string found in user.groups: %s' % db_user)
 | 
			
		||||
 | 
			
		||||
    if not db_user['full_name']:
 | 
			
		||||
        # Blender ID doesn't need a full name, but we do.
 | 
			
		||||
        db_user['full_name'] = db_user['username']
 | 
			
		||||
 | 
			
		||||
    r = {}
 | 
			
		||||
    for retry in range(5):
 | 
			
		||||
        if '_id' in db_user:
 | 
			
		||||
            # Update the existing user
 | 
			
		||||
            attempted_eve_method = 'PUT'
 | 
			
		||||
            db_id = db_user['_id']
 | 
			
		||||
            r, _, _, status = current_app.put_internal('users', remove_private_keys(db_user),
 | 
			
		||||
                                                       _id=db_id)
 | 
			
		||||
            if status == 422:
 | 
			
		||||
                log.error('Status %i trying to PUT user %s with values %s, should not happen! %s',
 | 
			
		||||
                          status, db_id, remove_private_keys(db_user), r)
 | 
			
		||||
        else:
 | 
			
		||||
            # Create a new user, retry for non-unique usernames.
 | 
			
		||||
            attempted_eve_method = 'POST'
 | 
			
		||||
            r, _, _, status = current_app.post_internal('users', db_user)
 | 
			
		||||
 | 
			
		||||
            if status not in {200, 201}:
 | 
			
		||||
                log.error('Status %i trying to create user with values %s: %s',
 | 
			
		||||
                          status, db_user, r)
 | 
			
		||||
                raise wz_exceptions.InternalServerError()
 | 
			
		||||
 | 
			
		||||
            db_id = r['_id']
 | 
			
		||||
            db_user.update(r)  # update with database/eve-generated fields.
 | 
			
		||||
 | 
			
		||||
        if status == 422:
 | 
			
		||||
            # Probably non-unique username, so retry a few times with different usernames.
 | 
			
		||||
            log.info('Error creating new user: %s', r)
 | 
			
		||||
            username_issue = r.get('_issues', {}).get('username', '')
 | 
			
		||||
            if 'not unique' in username_issue:
 | 
			
		||||
                # Retry
 | 
			
		||||
                db_user['username'] = make_unique_username(db_user['email'])
 | 
			
		||||
                continue
 | 
			
		||||
 | 
			
		||||
        # Saving was successful, or at least didn't break on a non-unique username.
 | 
			
		||||
        break
 | 
			
		||||
    else:
 | 
			
		||||
        log.error('Unable to create new user %s: %s', db_user, r)
 | 
			
		||||
        raise wz_exceptions.InternalServerError()
 | 
			
		||||
 | 
			
		||||
    if status not in (200, 201):
 | 
			
		||||
        log.error('internal response from %s to Eve: %r %r', attempted_eve_method, status, r)
 | 
			
		||||
        raise wz_exceptions.InternalServerError()
 | 
			
		||||
 | 
			
		||||
    return db_id, status
 | 
			
		||||
 
 | 
			
		||||
@@ -1,5 +1,6 @@
 | 
			
		||||
import logging
 | 
			
		||||
import functools
 | 
			
		||||
import typing
 | 
			
		||||
 | 
			
		||||
from bson import ObjectId
 | 
			
		||||
from flask import g
 | 
			
		||||
@@ -12,8 +13,9 @@ CHECK_PERMISSIONS_IMPLEMENTED_FOR = {'projects', 'nodes', 'flamenco_jobs'}
 | 
			
		||||
log = logging.getLogger(__name__)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def check_permissions(collection_name, resource, method, append_allowed_methods=False,
 | 
			
		||||
                      check_node_type=None):
 | 
			
		||||
def check_permissions(collection_name: str, resource: dict, method: str,
 | 
			
		||||
                      append_allowed_methods=False,
 | 
			
		||||
                      check_node_type: typing.Optional[str] = None):
 | 
			
		||||
    """Check user permissions to access a node. We look up node permissions from
 | 
			
		||||
    world to groups to users and match them with the computed user permissions.
 | 
			
		||||
    If there is not match, we raise 403.
 | 
			
		||||
@@ -27,6 +29,12 @@ def check_permissions(collection_name, resource, method, append_allowed_methods=
 | 
			
		||||
    :param check_node_type: node type to check. Only valid when collection_name='projects'.
 | 
			
		||||
    :type check_node_type: str
 | 
			
		||||
    """
 | 
			
		||||
    from pillar.auth import get_current_user
 | 
			
		||||
    from .authentication import CLI_USER
 | 
			
		||||
 | 
			
		||||
    if get_current_user() is CLI_USER:
 | 
			
		||||
        log.debug('Short-circuiting check_permissions() for CLI user')
 | 
			
		||||
        return
 | 
			
		||||
 | 
			
		||||
    if not has_permissions(collection_name, resource, method, append_allowed_methods,
 | 
			
		||||
                           check_node_type):
 | 
			
		||||
@@ -45,6 +53,8 @@ def compute_allowed_methods(collection_name, resource, check_node_type=None):
 | 
			
		||||
    :rtype: set
 | 
			
		||||
    """
 | 
			
		||||
 | 
			
		||||
    import pillar.auth
 | 
			
		||||
 | 
			
		||||
    # Check some input values.
 | 
			
		||||
    if collection_name not in CHECK_PERMISSIONS_IMPLEMENTED_FOR:
 | 
			
		||||
        raise ValueError('compute_allowed_methods only implemented for %s, not for %s',
 | 
			
		||||
@@ -62,18 +72,18 @@ def compute_allowed_methods(collection_name, resource, check_node_type=None):
 | 
			
		||||
 | 
			
		||||
    # Accumulate allowed methods from the user, group and world level.
 | 
			
		||||
    allowed_methods = set()
 | 
			
		||||
    current_user = getattr(g, 'current_user', None)
 | 
			
		||||
    user = pillar.auth.get_current_user()
 | 
			
		||||
 | 
			
		||||
    if current_user:
 | 
			
		||||
        user_is_admin = is_admin(current_user)
 | 
			
		||||
    if user.is_authenticated:
 | 
			
		||||
        user_is_admin = is_admin(user)
 | 
			
		||||
 | 
			
		||||
        # If the user is authenticated, proceed to compare the group permissions
 | 
			
		||||
        for permission in computed_permissions.get('groups', ()):
 | 
			
		||||
            if user_is_admin or permission['group'] in current_user['groups']:
 | 
			
		||||
            if user_is_admin or permission['group'] in user.group_ids:
 | 
			
		||||
                allowed_methods.update(permission['methods'])
 | 
			
		||||
 | 
			
		||||
        for permission in computed_permissions.get('users', ()):
 | 
			
		||||
            if user_is_admin or current_user['user_id'] == permission['user']:
 | 
			
		||||
            if user_is_admin or user.user_id == permission['user']:
 | 
			
		||||
                allowed_methods.update(permission['methods'])
 | 
			
		||||
 | 
			
		||||
    # Check if the node is public or private. This must be set for non logged
 | 
			
		||||
@@ -85,8 +95,9 @@ def compute_allowed_methods(collection_name, resource, check_node_type=None):
 | 
			
		||||
    return allowed_methods
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def has_permissions(collection_name, resource, method, append_allowed_methods=False,
 | 
			
		||||
                    check_node_type=None):
 | 
			
		||||
def has_permissions(collection_name: str, resource: dict, method: str,
 | 
			
		||||
                    append_allowed_methods=False,
 | 
			
		||||
                    check_node_type: typing.Optional[str] = None):
 | 
			
		||||
    """Check user permissions to access a node. We look up node permissions from
 | 
			
		||||
    world to groups to users and match them with the computed user permissions.
 | 
			
		||||
 | 
			
		||||
@@ -214,6 +225,8 @@ def merge_permissions(*args):
 | 
			
		||||
    :returns: combined list of permissions.
 | 
			
		||||
    """
 | 
			
		||||
 | 
			
		||||
    from pillar.auth import current_user
 | 
			
		||||
 | 
			
		||||
    if not args:
 | 
			
		||||
        return {}
 | 
			
		||||
 | 
			
		||||
@@ -235,25 +248,35 @@ def merge_permissions(*args):
 | 
			
		||||
        from0 = args[0].get(plural_name, [])
 | 
			
		||||
        from1 = args[1].get(plural_name, [])
 | 
			
		||||
 | 
			
		||||
        asdict0 = {permission[field_name]: permission['methods'] for permission in from0}
 | 
			
		||||
        asdict1 = {permission[field_name]: permission['methods'] for permission in from1}
 | 
			
		||||
        try:
 | 
			
		||||
            asdict0 = {permission[field_name]: permission['methods'] for permission in from0}
 | 
			
		||||
        except KeyError:
 | 
			
		||||
            log.exception('KeyError creating asdict0 for %r permissions; user=%s; args[0]=%r',
 | 
			
		||||
                          field_name, current_user.user_id, args[0])
 | 
			
		||||
            asdict0 = {}
 | 
			
		||||
        try:
 | 
			
		||||
            asdict1 = {permission[field_name]: permission['methods'] for permission in from1}
 | 
			
		||||
        except KeyError:
 | 
			
		||||
            log.exception('KeyError creating asdict1 for %r permissions; user=%s; args[1]=%r',
 | 
			
		||||
                          field_name, current_user.user_id, args[1])
 | 
			
		||||
            asdict1 = {}
 | 
			
		||||
 | 
			
		||||
        keys = set(asdict0.keys() + asdict1.keys())
 | 
			
		||||
        keys = set(asdict0.keys()).union(set(asdict1.keys()))
 | 
			
		||||
        for key in maybe_sorted(keys):
 | 
			
		||||
            methods0 = asdict0.get(key, [])
 | 
			
		||||
            methods1 = asdict1.get(key, [])
 | 
			
		||||
            methods = maybe_sorted(set(methods0).union(set(methods1)))
 | 
			
		||||
            effective.setdefault(plural_name, []).append({field_name: key, u'methods': methods})
 | 
			
		||||
            effective.setdefault(plural_name, []).append({field_name: key, 'methods': methods})
 | 
			
		||||
 | 
			
		||||
    merge(u'user')
 | 
			
		||||
    merge(u'group')
 | 
			
		||||
    merge('user')
 | 
			
		||||
    merge('group')
 | 
			
		||||
 | 
			
		||||
    # Gather permissions for world
 | 
			
		||||
    world0 = args[0].get('world', [])
 | 
			
		||||
    world1 = args[1].get('world', [])
 | 
			
		||||
    world_methods = set(world0).union(set(world1))
 | 
			
		||||
    if world_methods:
 | 
			
		||||
        effective[u'world'] = maybe_sorted(world_methods)
 | 
			
		||||
        effective['world'] = maybe_sorted(world_methods)
 | 
			
		||||
 | 
			
		||||
    # Recurse for longer merges
 | 
			
		||||
    if len(args) > 2:
 | 
			
		||||
@@ -262,39 +285,83 @@ def merge_permissions(*args):
 | 
			
		||||
    return effective
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def require_login(require_roles=set(),
 | 
			
		||||
                  require_all=False):
 | 
			
		||||
def require_login(*, require_roles=set(),
 | 
			
		||||
                  require_cap='',
 | 
			
		||||
                  require_all=False,
 | 
			
		||||
                  redirect_to_login=False,
 | 
			
		||||
                  error_view=None):
 | 
			
		||||
    """Decorator that enforces users to authenticate.
 | 
			
		||||
 | 
			
		||||
    Optionally only allows access to users with a certain role.
 | 
			
		||||
    Optionally only allows access to users with a certain role and/or capability.
 | 
			
		||||
 | 
			
		||||
    Either check on roles or on a capability, but never on both. There is no
 | 
			
		||||
    require_all check for capabilities; if you need to check for multiple
 | 
			
		||||
    capabilities at once, it's a sign that you need to add another capability
 | 
			
		||||
    and give it to everybody that needs it.
 | 
			
		||||
 | 
			
		||||
    :param require_roles: set of roles.
 | 
			
		||||
    :param require_cap: a capability.
 | 
			
		||||
    :param require_all:
 | 
			
		||||
        When False (the default): if the user's roles have a
 | 
			
		||||
        non-empty intersection with the given roles, access is granted.
 | 
			
		||||
        When True: require the user to have all given roles before access is
 | 
			
		||||
        granted.
 | 
			
		||||
    :param redirect_to_login: Determines the behaviour when the user is not
 | 
			
		||||
        logged in. When False (the default), a 403 Forbidden response is
 | 
			
		||||
        returned; this is suitable for API calls. When True, the user is
 | 
			
		||||
        redirected to the login page; this is suitable for user-facing web
 | 
			
		||||
        requests, and mimicks the flask_login behaviour.
 | 
			
		||||
    :param error_view: Callable that returns a Flask response object. This is
 | 
			
		||||
        sent back to the client instead of the default 403 Forbidden.
 | 
			
		||||
    """
 | 
			
		||||
 | 
			
		||||
    from flask import request, redirect, url_for, Response
 | 
			
		||||
 | 
			
		||||
    if not isinstance(require_roles, set):
 | 
			
		||||
        raise TypeError('require_roles param should be a set, but is a %r' % type(require_roles))
 | 
			
		||||
        raise TypeError(f'require_roles param should be a set, but is {type(require_roles)!r}')
 | 
			
		||||
 | 
			
		||||
    if not isinstance(require_cap, str):
 | 
			
		||||
        raise TypeError(f'require_caps param should be a str, but is {type(require_cap)!r}')
 | 
			
		||||
 | 
			
		||||
    if require_roles and require_cap:
 | 
			
		||||
        raise ValueError('either use require_roles or require_cap, but not both')
 | 
			
		||||
 | 
			
		||||
    if require_all and not require_roles:
 | 
			
		||||
        raise ValueError('require_login(require_all=True) cannot be used with empty require_roles.')
 | 
			
		||||
 | 
			
		||||
    def render_error() -> Response:
 | 
			
		||||
        if error_view is None:
 | 
			
		||||
            abort(403)
 | 
			
		||||
        resp: Response = error_view()
 | 
			
		||||
        resp.status_code = 403
 | 
			
		||||
        return resp
 | 
			
		||||
 | 
			
		||||
    def decorator(func):
 | 
			
		||||
        @functools.wraps(func)
 | 
			
		||||
        def wrapper(*args, **kwargs):
 | 
			
		||||
            if not user_matches_roles(require_roles, require_all):
 | 
			
		||||
                if g.current_user is None:
 | 
			
		||||
                    # We don't need to log at a higher level, as this is very common.
 | 
			
		||||
                    # Many browsers first try to see whether authentication is needed
 | 
			
		||||
                    # at all, before sending the password.
 | 
			
		||||
                    log.debug('Unauthenticated acces to %s attempted.', func)
 | 
			
		||||
                else:
 | 
			
		||||
                    log.warning('User %s is authenticated, but does not have required roles %s to '
 | 
			
		||||
                                'access %s', g.current_user['user_id'], require_roles, func)
 | 
			
		||||
                abort(403)
 | 
			
		||||
            import pillar.auth
 | 
			
		||||
 | 
			
		||||
            current_user = pillar.auth.get_current_user()
 | 
			
		||||
            if current_user.is_anonymous:
 | 
			
		||||
                # We don't need to log at a higher level, as this is very common.
 | 
			
		||||
                # Many browsers first try to see whether authentication is needed
 | 
			
		||||
                # at all, before sending the password.
 | 
			
		||||
                log.debug('Unauthenticated access to %s attempted.', func)
 | 
			
		||||
                if redirect_to_login:
 | 
			
		||||
                    # Redirect using a 303 See Other, since even a POST
 | 
			
		||||
                    # request should cause a GET on the login page.
 | 
			
		||||
                    return redirect(url_for('users.login', next=request.url), 303)
 | 
			
		||||
                return render_error()
 | 
			
		||||
 | 
			
		||||
            if require_roles and not current_user.matches_roles(require_roles, require_all):
 | 
			
		||||
                log.info('User %s is authenticated, but does not have required roles %s to '
 | 
			
		||||
                         'access %s', current_user.user_id, require_roles, func)
 | 
			
		||||
                return render_error()
 | 
			
		||||
 | 
			
		||||
            if require_cap and not current_user.has_cap(require_cap):
 | 
			
		||||
                log.info('User %s is authenticated, but does not have required capability %s to '
 | 
			
		||||
                         'access %s', current_user.user_id, require_cap, func)
 | 
			
		||||
                return render_error()
 | 
			
		||||
 | 
			
		||||
            return func(*args, **kwargs)
 | 
			
		||||
 | 
			
		||||
@@ -337,14 +404,36 @@ def ab_testing(require_roles=set(),
 | 
			
		||||
def user_has_role(role, user=None):
 | 
			
		||||
    """Returns True iff the user is logged in and has the given role."""
 | 
			
		||||
 | 
			
		||||
    if user is None:
 | 
			
		||||
        user = g.get('current_user')
 | 
			
		||||
    import pillar.auth
 | 
			
		||||
 | 
			
		||||
    if user is None:
 | 
			
		||||
        user = pillar.auth.get_current_user()
 | 
			
		||||
        if user is not None and not isinstance(user, pillar.auth.UserClass):
 | 
			
		||||
            raise TypeError(f'pillar.auth.current_user should be instance of UserClass, '
 | 
			
		||||
                            f'not {type(user)}')
 | 
			
		||||
    elif not isinstance(user, pillar.auth.UserClass):
 | 
			
		||||
        raise TypeError(f'user should be instance of UserClass, not {type(user)}')
 | 
			
		||||
 | 
			
		||||
    if user.is_anonymous:
 | 
			
		||||
        return False
 | 
			
		||||
 | 
			
		||||
    roles = user.get('roles') or ()
 | 
			
		||||
    return role in roles
 | 
			
		||||
    return user.has_role(role)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def user_has_cap(capability: str, user=None) -> bool:
 | 
			
		||||
    """Returns True iff the user is logged in and has the given capability."""
 | 
			
		||||
 | 
			
		||||
    import pillar.auth
 | 
			
		||||
 | 
			
		||||
    assert capability
 | 
			
		||||
 | 
			
		||||
    if user is None:
 | 
			
		||||
        user = pillar.auth.get_current_user()
 | 
			
		||||
 | 
			
		||||
    if not isinstance(user, pillar.auth.UserClass):
 | 
			
		||||
        raise TypeError(f'user should be instance of UserClass, not {type(user)}')
 | 
			
		||||
 | 
			
		||||
    return user.has_cap(capability)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def user_matches_roles(require_roles=set(),
 | 
			
		||||
@@ -359,25 +448,16 @@ def user_matches_roles(require_roles=set(),
 | 
			
		||||
        returning True.
 | 
			
		||||
    """
 | 
			
		||||
 | 
			
		||||
    if not isinstance(require_roles, set):
 | 
			
		||||
        raise TypeError('require_roles param should be a set, but is a %r' % type(require_roles))
 | 
			
		||||
    import pillar.auth
 | 
			
		||||
 | 
			
		||||
    if require_all and not require_roles:
 | 
			
		||||
        raise ValueError('require_login(require_all=True) cannot be used with empty require_roles.')
 | 
			
		||||
    user = pillar.auth.get_current_user()
 | 
			
		||||
    if not isinstance(user, pillar.auth.UserClass):
 | 
			
		||||
        raise TypeError(f'user should be instance of UserClass, not {type(user)}')
 | 
			
		||||
 | 
			
		||||
    current_user = g.get('current_user')
 | 
			
		||||
 | 
			
		||||
    if current_user is None:
 | 
			
		||||
        return False
 | 
			
		||||
 | 
			
		||||
    intersection = require_roles.intersection(current_user['roles'])
 | 
			
		||||
    if require_all:
 | 
			
		||||
        return len(intersection) == len(require_roles)
 | 
			
		||||
 | 
			
		||||
    return not bool(require_roles) or bool(intersection)
 | 
			
		||||
    return user.matches_roles(require_roles, require_all)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def is_admin(user):
 | 
			
		||||
    """Returns True iff the given user has the admin role."""
 | 
			
		||||
    """Returns True iff the given user has the admin capability."""
 | 
			
		||||
 | 
			
		||||
    return user_has_role(u'admin', user)
 | 
			
		||||
    return user_has_cap('admin', user)
 | 
			
		||||
 
 | 
			
		||||
@@ -1,5 +1,7 @@
 | 
			
		||||
import datetime
 | 
			
		||||
from hashlib import md5
 | 
			
		||||
import base64
 | 
			
		||||
 | 
			
		||||
from flask import current_app
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@@ -17,19 +19,20 @@ def hash_file_path(file_path, expiry_timestamp=None):
 | 
			
		||||
    if current_app.config['CDN_USE_URL_SIGNING']:
 | 
			
		||||
 | 
			
		||||
        url_signing_key = current_app.config['CDN_URL_SIGNING_KEY']
 | 
			
		||||
        hash_string = domain_subfolder + file_path + url_signing_key
 | 
			
		||||
        to_hash = domain_subfolder + file_path + url_signing_key
 | 
			
		||||
 | 
			
		||||
        if not expiry_timestamp:
 | 
			
		||||
            expiry_timestamp = datetime.datetime.now() + datetime.timedelta(hours=24)
 | 
			
		||||
            expiry_timestamp = expiry_timestamp.strftime('%s')
 | 
			
		||||
 | 
			
		||||
        hash_string = expiry_timestamp + hash_string
 | 
			
		||||
        to_hash = expiry_timestamp + to_hash
 | 
			
		||||
        if isinstance(to_hash, str):
 | 
			
		||||
            to_hash = to_hash.encode()
 | 
			
		||||
 | 
			
		||||
        expiry_timestamp = "," + str(expiry_timestamp)
 | 
			
		||||
 | 
			
		||||
        hashed_file_path = md5(hash_string).digest().encode('base64')[:-1]
 | 
			
		||||
        hashed_file_path = hashed_file_path.replace('+', '-')
 | 
			
		||||
        hashed_file_path = hashed_file_path.replace('/', '_')
 | 
			
		||||
        hashed_file_path = base64.b64encode(md5(to_hash).digest())[:-1].decode()
 | 
			
		||||
        hashed_file_path = hashed_file_path.replace('+', '-').replace('/', '_')
 | 
			
		||||
 | 
			
		||||
        asset_url = asset_url + \
 | 
			
		||||
                    '?secure=' + \
 | 
			
		||||
 
 | 
			
		||||
@@ -31,7 +31,10 @@ class Encoder:
 | 
			
		||||
        options = dict(notifications=current_app.config['ZENCODER_NOTIFICATIONS_URL'])
 | 
			
		||||
 | 
			
		||||
        outputs = [{'format': v['format'],
 | 
			
		||||
                    'url': os.path.join(storage_base, v['file_path'])}
 | 
			
		||||
                    'url': os.path.join(storage_base, v['file_path']),
 | 
			
		||||
                    'upscale': False,
 | 
			
		||||
                    'size': '{width}x{height}'.format(**v),
 | 
			
		||||
                    }
 | 
			
		||||
                   for v in src_file['variations']]
 | 
			
		||||
        r = current_app.encoding_service_client.job.create(file_input,
 | 
			
		||||
                                                           outputs=outputs,
 | 
			
		||||
 
 | 
			
		||||
@@ -1,251 +0,0 @@
 | 
			
		||||
import os
 | 
			
		||||
import time
 | 
			
		||||
import datetime
 | 
			
		||||
import logging
 | 
			
		||||
 | 
			
		||||
from bson import ObjectId
 | 
			
		||||
from gcloud.storage.client import Client
 | 
			
		||||
from gcloud.exceptions import NotFound
 | 
			
		||||
from flask import current_app, g
 | 
			
		||||
from werkzeug.local import LocalProxy
 | 
			
		||||
 | 
			
		||||
log = logging.getLogger(__name__)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def get_client():
 | 
			
		||||
    """Stores the GCS client on the global Flask object.
 | 
			
		||||
 | 
			
		||||
    The GCS client is not user-specific anyway.
 | 
			
		||||
 | 
			
		||||
    :rtype: Client
 | 
			
		||||
    """
 | 
			
		||||
 | 
			
		||||
    _gcs = getattr(g, '_gcs_client', None)
 | 
			
		||||
    if _gcs is None:
 | 
			
		||||
        _gcs = g._gcs_client = Client()
 | 
			
		||||
    return _gcs
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
# This hides the specifics of how/where we store the GCS client,
 | 
			
		||||
# and allows the rest of the code to use 'gcs' as a simple variable
 | 
			
		||||
# that does the right thing.
 | 
			
		||||
gcs = LocalProxy(get_client)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class GoogleCloudStorageBucket(object):
 | 
			
		||||
    """Cloud Storage bucket interface. We create a bucket for every project. In
 | 
			
		||||
    the bucket we create first level subdirs as follows:
 | 
			
		||||
    - '_' (will contain hashed assets, and stays on top of default listing)
 | 
			
		||||
    - 'svn' (svn checkout mirror)
 | 
			
		||||
    - 'shared' (any additional folder of static folder that is accessed via a
 | 
			
		||||
      node of 'storage' node_type)
 | 
			
		||||
 | 
			
		||||
    :type bucket_name: string
 | 
			
		||||
    :param bucket_name: Name of the bucket.
 | 
			
		||||
 | 
			
		||||
    :type subdir: string
 | 
			
		||||
    :param subdir: The local entry point to browse the bucket.
 | 
			
		||||
 | 
			
		||||
    """
 | 
			
		||||
 | 
			
		||||
    def __init__(self, bucket_name, subdir='_/'):
 | 
			
		||||
        try:
 | 
			
		||||
            self.bucket = gcs.get_bucket(bucket_name)
 | 
			
		||||
        except NotFound:
 | 
			
		||||
            self.bucket = gcs.bucket(bucket_name)
 | 
			
		||||
            # Hardcode the bucket location to EU
 | 
			
		||||
            self.bucket.location = 'EU'
 | 
			
		||||
            # Optionally enable CORS from * (currently only used for vrview)
 | 
			
		||||
            # self.bucket.cors = [
 | 
			
		||||
            #     {
 | 
			
		||||
            #       "origin": ["*"],
 | 
			
		||||
            #       "responseHeader": ["Content-Type"],
 | 
			
		||||
            #       "method": ["GET", "HEAD", "DELETE"],
 | 
			
		||||
            #       "maxAgeSeconds": 3600
 | 
			
		||||
            #     }
 | 
			
		||||
            # ]
 | 
			
		||||
            self.bucket.create()
 | 
			
		||||
 | 
			
		||||
        self.subdir = subdir
 | 
			
		||||
 | 
			
		||||
    def List(self, path=None):
 | 
			
		||||
        """Display the content of a subdir in the project bucket. If the path
 | 
			
		||||
        points to a file the listing is simply empty.
 | 
			
		||||
 | 
			
		||||
        :type path: string
 | 
			
		||||
        :param path: The relative path to the directory or asset.
 | 
			
		||||
        """
 | 
			
		||||
        if path and not path.endswith('/'):
 | 
			
		||||
            path += '/'
 | 
			
		||||
        prefix = os.path.join(self.subdir, path)
 | 
			
		||||
 | 
			
		||||
        fields_to_return = 'nextPageToken,items(name,size,contentType),prefixes'
 | 
			
		||||
        req = self.bucket.list_blobs(fields=fields_to_return, prefix=prefix,
 | 
			
		||||
                                     delimiter='/')
 | 
			
		||||
 | 
			
		||||
        files = []
 | 
			
		||||
        for f in req:
 | 
			
		||||
            filename = os.path.basename(f.name)
 | 
			
		||||
            if filename != '':  # Skip own folder name
 | 
			
		||||
                files.append(dict(
 | 
			
		||||
                    path=os.path.relpath(f.name, self.subdir),
 | 
			
		||||
                    text=filename,
 | 
			
		||||
                    type=f.content_type))
 | 
			
		||||
 | 
			
		||||
        directories = []
 | 
			
		||||
        for dir_path in req.prefixes:
 | 
			
		||||
            directory_name = os.path.basename(os.path.normpath(dir_path))
 | 
			
		||||
            directories.append(dict(
 | 
			
		||||
                text=directory_name,
 | 
			
		||||
                path=os.path.relpath(dir_path, self.subdir),
 | 
			
		||||
                type='group_storage',
 | 
			
		||||
                children=True))
 | 
			
		||||
            # print os.path.basename(os.path.normpath(path))
 | 
			
		||||
 | 
			
		||||
        list_dict = dict(
 | 
			
		||||
            name=os.path.basename(os.path.normpath(path)),
 | 
			
		||||
            type='group_storage',
 | 
			
		||||
            children=files + directories
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
        return list_dict
 | 
			
		||||
 | 
			
		||||
    def blob_to_dict(self, blob):
 | 
			
		||||
        blob.reload()
 | 
			
		||||
        expiration = datetime.datetime.now() + datetime.timedelta(days=1)
 | 
			
		||||
        expiration = int(time.mktime(expiration.timetuple()))
 | 
			
		||||
        return dict(
 | 
			
		||||
            updated=blob.updated,
 | 
			
		||||
            name=os.path.basename(blob.name),
 | 
			
		||||
            size=blob.size,
 | 
			
		||||
            content_type=blob.content_type,
 | 
			
		||||
            signed_url=blob.generate_signed_url(expiration),
 | 
			
		||||
            public_url=blob.public_url)
 | 
			
		||||
 | 
			
		||||
    def Get(self, path, to_dict=True):
 | 
			
		||||
        """Get selected file info if the path matches.
 | 
			
		||||
 | 
			
		||||
        :type path: string
 | 
			
		||||
        :param path: The relative path to the file.
 | 
			
		||||
        :type to_dict: bool
 | 
			
		||||
        :param to_dict: Return the object as a dictionary.
 | 
			
		||||
        """
 | 
			
		||||
        path = os.path.join(self.subdir, path)
 | 
			
		||||
        blob = self.bucket.blob(path)
 | 
			
		||||
        if blob.exists():
 | 
			
		||||
            if to_dict:
 | 
			
		||||
                return self.blob_to_dict(blob)
 | 
			
		||||
            else:
 | 
			
		||||
                return blob
 | 
			
		||||
        else:
 | 
			
		||||
            return None
 | 
			
		||||
 | 
			
		||||
    def Post(self, full_path, path=None):
 | 
			
		||||
        """Create new blob and upload data to it.
 | 
			
		||||
        """
 | 
			
		||||
        path = path if path else os.path.join('_', os.path.basename(full_path))
 | 
			
		||||
        blob = self.bucket.blob(path)
 | 
			
		||||
        if blob.exists():
 | 
			
		||||
            return None
 | 
			
		||||
        blob.upload_from_filename(full_path)
 | 
			
		||||
        return blob
 | 
			
		||||
        # return self.blob_to_dict(blob) # Has issues with threading
 | 
			
		||||
 | 
			
		||||
    def Delete(self, path):
 | 
			
		||||
        """Delete blob (when removing an asset or replacing a preview)"""
 | 
			
		||||
 | 
			
		||||
        # We want to get the actual blob to delete
 | 
			
		||||
        blob = self.Get(path, to_dict=False)
 | 
			
		||||
        try:
 | 
			
		||||
            blob.delete()
 | 
			
		||||
            return True
 | 
			
		||||
        except NotFound:
 | 
			
		||||
            return None
 | 
			
		||||
 | 
			
		||||
    def update_name(self, blob, name):
 | 
			
		||||
        """Set the ContentDisposition metadata so that when a file is downloaded
 | 
			
		||||
        it has a human-readable name.
 | 
			
		||||
        """
 | 
			
		||||
        blob.content_disposition = u'attachment; filename="{0}"'.format(name)
 | 
			
		||||
        blob.patch()
 | 
			
		||||
 | 
			
		||||
    def copy_blob(self, blob, to_bucket):
 | 
			
		||||
        """Copies the given blob from this bucket to the other bucket.
 | 
			
		||||
 | 
			
		||||
        Returns the new blob.
 | 
			
		||||
        """
 | 
			
		||||
 | 
			
		||||
        assert isinstance(to_bucket, GoogleCloudStorageBucket)
 | 
			
		||||
        return self.bucket.copy_blob(blob, to_bucket.bucket)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def update_file_name(node):
 | 
			
		||||
    """Assign to the CGS blob the same name of the asset node. This way when
 | 
			
		||||
    downloading an asset we get a human-readable name.
 | 
			
		||||
    """
 | 
			
		||||
 | 
			
		||||
    # Process only files that are not processing
 | 
			
		||||
    if node['properties'].get('status', '') == 'processing':
 | 
			
		||||
        return
 | 
			
		||||
 | 
			
		||||
    def _format_name(name, override_ext, size=None, map_type=u''):
 | 
			
		||||
        root, _ = os.path.splitext(name)
 | 
			
		||||
        size = u'-{}'.format(size) if size else u''
 | 
			
		||||
        map_type = u'-{}'.format(map_type) if map_type else u''
 | 
			
		||||
        return u'{}{}{}{}'.format(root, size, map_type, override_ext)
 | 
			
		||||
 | 
			
		||||
    def _update_name(file_id, file_props):
 | 
			
		||||
        files_collection = current_app.data.driver.db['files']
 | 
			
		||||
        file_doc = files_collection.find_one({'_id': ObjectId(file_id)})
 | 
			
		||||
 | 
			
		||||
        if file_doc is None or file_doc.get('backend') != 'gcs':
 | 
			
		||||
            return
 | 
			
		||||
 | 
			
		||||
        # For textures -- the map type should be part of the name.
 | 
			
		||||
        map_type = file_props.get('map_type', u'')
 | 
			
		||||
 | 
			
		||||
        storage = GoogleCloudStorageBucket(str(node['project']))
 | 
			
		||||
        blob = storage.Get(file_doc['file_path'], to_dict=False)
 | 
			
		||||
        if blob is None:
 | 
			
		||||
            log.warning('Unable to find blob for file %s in project %s',
 | 
			
		||||
                        file_doc['file_path'], file_doc['project'])
 | 
			
		||||
            return
 | 
			
		||||
 | 
			
		||||
        # Pick file extension from original filename
 | 
			
		||||
        _, ext = os.path.splitext(file_doc['filename'])
 | 
			
		||||
        name = _format_name(node['name'], ext, map_type=map_type)
 | 
			
		||||
        storage.update_name(blob, name)
 | 
			
		||||
 | 
			
		||||
        # Assign the same name to variations
 | 
			
		||||
        for v in file_doc.get('variations', []):
 | 
			
		||||
            _, override_ext = os.path.splitext(v['file_path'])
 | 
			
		||||
            name = _format_name(node['name'], override_ext, v['size'], map_type=map_type)
 | 
			
		||||
            blob = storage.Get(v['file_path'], to_dict=False)
 | 
			
		||||
            if blob is None:
 | 
			
		||||
                log.info('Unable to find blob for file %s in project %s. This can happen if the '
 | 
			
		||||
                         'video encoding is still processing.', v['file_path'], node['project'])
 | 
			
		||||
                continue
 | 
			
		||||
            storage.update_name(blob, name)
 | 
			
		||||
 | 
			
		||||
    # Currently we search for 'file' and 'files' keys in the object properties.
 | 
			
		||||
    # This could become a bit more flexible and realy on a true reference of the
 | 
			
		||||
    # file object type from the schema.
 | 
			
		||||
    if 'file' in node['properties']:
 | 
			
		||||
        _update_name(node['properties']['file'], {})
 | 
			
		||||
 | 
			
		||||
    if 'files' in node['properties']:
 | 
			
		||||
        for file_props in node['properties']['files']:
 | 
			
		||||
            _update_name(file_props['file'], file_props)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def copy_to_bucket(file_path, src_project_id, dest_project_id):
 | 
			
		||||
    """Copies a file from one bucket to the other."""
 | 
			
		||||
 | 
			
		||||
    log.info('Copying %s from project bucket %s to %s',
 | 
			
		||||
             file_path, src_project_id, dest_project_id)
 | 
			
		||||
 | 
			
		||||
    src_storage = GoogleCloudStorageBucket(str(src_project_id))
 | 
			
		||||
    dest_storage = GoogleCloudStorageBucket(str(dest_project_id))
 | 
			
		||||
 | 
			
		||||
    blob = src_storage.Get(file_path, to_dict=False)
 | 
			
		||||
    src_storage.copy_blob(blob, dest_storage)
 | 
			
		||||
@@ -1,47 +1,61 @@
 | 
			
		||||
import os
 | 
			
		||||
import json
 | 
			
		||||
import typing
 | 
			
		||||
 | 
			
		||||
import os
 | 
			
		||||
import pathlib
 | 
			
		||||
import subprocess
 | 
			
		||||
 | 
			
		||||
from PIL import Image
 | 
			
		||||
from flask import current_app
 | 
			
		||||
 | 
			
		||||
# Images with these modes will be thumbed to PNG, others to JPEG.
 | 
			
		||||
MODES_FOR_PNG = {'RGBA', 'LA'}
 | 
			
		||||
 | 
			
		||||
def generate_local_thumbnails(name_base, src):
 | 
			
		||||
 | 
			
		||||
def generate_local_thumbnails(fp_base: str, src: pathlib.Path):
 | 
			
		||||
    """Given a source image, use Pillow to generate thumbnails according to the
 | 
			
		||||
    application settings.
 | 
			
		||||
 | 
			
		||||
    :param name_base: the thumbnail will get a field 'name': '{basename}-{thumbsize}.jpg'
 | 
			
		||||
    :type name_base: str
 | 
			
		||||
    :param fp_base: the thumbnail will get a field
 | 
			
		||||
        'file_path': '{fp_base}-{thumbsize}.{ext}'
 | 
			
		||||
    :param src: the path of the image to be thumbnailed
 | 
			
		||||
    :type src: str
 | 
			
		||||
    """
 | 
			
		||||
 | 
			
		||||
    thumbnail_settings = current_app.config['UPLOADS_LOCAL_STORAGE_THUMBNAILS']
 | 
			
		||||
    thumbnails = []
 | 
			
		||||
 | 
			
		||||
    save_to_base, _ = os.path.splitext(src)
 | 
			
		||||
    name_base, _ = os.path.splitext(name_base)
 | 
			
		||||
    for size, settings in thumbnail_settings.items():
 | 
			
		||||
        im = Image.open(src)
 | 
			
		||||
        extra_args = {}
 | 
			
		||||
 | 
			
		||||
    for size, settings in thumbnail_settings.iteritems():
 | 
			
		||||
        dst = '{0}-{1}{2}'.format(save_to_base, size, '.jpg')
 | 
			
		||||
        name = '{0}-{1}{2}'.format(name_base, size, '.jpg')
 | 
			
		||||
        # If the source image has transparency, save as PNG
 | 
			
		||||
        if im.mode in MODES_FOR_PNG:
 | 
			
		||||
            suffix = '.png'
 | 
			
		||||
            imformat = 'PNG'
 | 
			
		||||
        else:
 | 
			
		||||
            suffix = '.jpg'
 | 
			
		||||
            imformat = 'JPEG'
 | 
			
		||||
            extra_args = {'quality': 95}
 | 
			
		||||
        dst = src.with_name(f'{src.stem}-{size}{suffix}')
 | 
			
		||||
 | 
			
		||||
        if settings['crop']:
 | 
			
		||||
            resize_and_crop(src, dst, settings['size'])
 | 
			
		||||
            width, height = settings['size']
 | 
			
		||||
            im = resize_and_crop(im, settings['size'])
 | 
			
		||||
        else:
 | 
			
		||||
            im = Image.open(src).convert('RGB')
 | 
			
		||||
            im.thumbnail(settings['size'])
 | 
			
		||||
            im.save(dst, "JPEG")
 | 
			
		||||
            width, height = im.size
 | 
			
		||||
            im.thumbnail(settings['size'], resample=Image.LANCZOS)
 | 
			
		||||
        width, height = im.size
 | 
			
		||||
 | 
			
		||||
        if imformat == 'JPEG':
 | 
			
		||||
            im = im.convert('RGB')
 | 
			
		||||
        im.save(dst, format=imformat, optimize=True, **extra_args)
 | 
			
		||||
 | 
			
		||||
        thumb_info = {'size': size,
 | 
			
		||||
                      'file_path': name,
 | 
			
		||||
                      'local_path': dst,
 | 
			
		||||
                      'length': os.stat(dst).st_size,
 | 
			
		||||
                      'file_path': f'{fp_base}-{size}{suffix}',
 | 
			
		||||
                      'local_path': str(dst),
 | 
			
		||||
                      'length': dst.stat().st_size,
 | 
			
		||||
                      'width': width,
 | 
			
		||||
                      'height': height,
 | 
			
		||||
                      'md5': '',
 | 
			
		||||
                      'content_type': 'image/jpeg'}
 | 
			
		||||
                      'content_type': f'image/{imformat.lower()}'}
 | 
			
		||||
 | 
			
		||||
        if size == 't':
 | 
			
		||||
            thumb_info['is_public'] = True
 | 
			
		||||
@@ -51,63 +65,40 @@ def generate_local_thumbnails(name_base, src):
 | 
			
		||||
    return thumbnails
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def resize_and_crop(img_path, modified_path, size, crop_type='middle'):
 | 
			
		||||
    """
 | 
			
		||||
    Resize and crop an image to fit the specified size. Thanks to:
 | 
			
		||||
    https://gist.github.com/sigilioso/2957026
 | 
			
		||||
def resize_and_crop(img: Image, size: typing.Tuple[int, int]) -> Image:
 | 
			
		||||
    """Resize and crop an image to fit the specified size.
 | 
			
		||||
 | 
			
		||||
    args:
 | 
			
		||||
    img_path: path for the image to resize.
 | 
			
		||||
    modified_path: path to store the modified image.
 | 
			
		||||
    size: `(width, height)` tuple.
 | 
			
		||||
    crop_type: can be 'top', 'middle' or 'bottom', depending on this
 | 
			
		||||
    value, the image will cropped getting the 'top/left', 'middle' or
 | 
			
		||||
    'bottom/right' of the image to fit the size.
 | 
			
		||||
    raises:
 | 
			
		||||
    Exception: if can not open the file in img_path of there is problems
 | 
			
		||||
    to save the image.
 | 
			
		||||
    ValueError: if an invalid `crop_type` is provided.
 | 
			
		||||
    Thanks to: https://gist.github.com/sigilioso/2957026
 | 
			
		||||
 | 
			
		||||
    :param img: opened PIL.Image to work on
 | 
			
		||||
    :param size: `(width, height)` tuple.
 | 
			
		||||
    """
 | 
			
		||||
    # If height is higher we resize vertically, if not we resize horizontally
 | 
			
		||||
    img = Image.open(img_path).convert('RGB')
 | 
			
		||||
    # Get current and desired ratio for the images
 | 
			
		||||
    img_ratio = img.size[0] / float(img.size[1])
 | 
			
		||||
    ratio = size[0] / float(size[1])
 | 
			
		||||
    cur_w, cur_h = img.size  # current
 | 
			
		||||
    img_ratio = cur_w / cur_h
 | 
			
		||||
 | 
			
		||||
    w, h = size  # desired
 | 
			
		||||
    ratio = w / h
 | 
			
		||||
 | 
			
		||||
    # The image is scaled/cropped vertically or horizontally depending on the ratio
 | 
			
		||||
    if ratio > img_ratio:
 | 
			
		||||
        img = img.resize((size[0], int(round(size[0] * img.size[1] / img.size[0]))),
 | 
			
		||||
                         Image.ANTIALIAS)
 | 
			
		||||
        # Crop in the top, middle or bottom
 | 
			
		||||
        if crop_type == 'top':
 | 
			
		||||
            box = (0, 0, img.size[0], size[1])
 | 
			
		||||
        elif crop_type == 'middle':
 | 
			
		||||
            box = (0, int(round((img.size[1] - size[1]) / 2)), img.size[0],
 | 
			
		||||
                   int(round((img.size[1] + size[1]) / 2)))
 | 
			
		||||
        elif crop_type == 'bottom':
 | 
			
		||||
            box = (0, img.size[1] - size[1], img.size[0], img.size[1])
 | 
			
		||||
        else:
 | 
			
		||||
            raise ValueError('ERROR: invalid value for crop_type')
 | 
			
		||||
        uncropped_h = (w * cur_h) // cur_w
 | 
			
		||||
        img = img.resize((w, uncropped_h), Image.ANTIALIAS)
 | 
			
		||||
        box = (0, (uncropped_h - h) // 2,
 | 
			
		||||
               w, (uncropped_h + h) // 2)
 | 
			
		||||
        img = img.crop(box)
 | 
			
		||||
    elif ratio < img_ratio:
 | 
			
		||||
        img = img.resize((int(round(size[1] * img.size[0] / img.size[1])), size[1]),
 | 
			
		||||
                         Image.ANTIALIAS)
 | 
			
		||||
        # Crop in the top, middle or bottom
 | 
			
		||||
        if crop_type == 'top':
 | 
			
		||||
            box = (0, 0, size[0], img.size[1])
 | 
			
		||||
        elif crop_type == 'middle':
 | 
			
		||||
            box = (int(round((img.size[0] - size[0]) / 2)), 0,
 | 
			
		||||
                   int(round((img.size[0] + size[0]) / 2)), img.size[1])
 | 
			
		||||
        elif crop_type == 'bottom':
 | 
			
		||||
            box = (img.size[0] - size[0], 0, img.size[0], img.size[1])
 | 
			
		||||
        else:
 | 
			
		||||
            raise ValueError('ERROR: invalid value for crop_type')
 | 
			
		||||
        uncropped_w = (h * cur_w) // cur_h
 | 
			
		||||
        img = img.resize((uncropped_w, h), Image.ANTIALIAS)
 | 
			
		||||
        box = ((uncropped_w - w) // 2, 0,
 | 
			
		||||
               (uncropped_w + w) // 2, h)
 | 
			
		||||
        img = img.crop(box)
 | 
			
		||||
    else:
 | 
			
		||||
        img = img.resize((size[0], size[1]),
 | 
			
		||||
                         Image.ANTIALIAS)
 | 
			
		||||
        img = img.resize((w, h), Image.ANTIALIAS)
 | 
			
		||||
 | 
			
		||||
    # If the scale is the same, we do not need to crop
 | 
			
		||||
    img.save(modified_path, "JPEG")
 | 
			
		||||
    return img
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def get_video_data(filepath):
 | 
			
		||||
@@ -143,7 +134,7 @@ def get_video_data(filepath):
 | 
			
		||||
            res_y=video_stream['height'],
 | 
			
		||||
        )
 | 
			
		||||
        if video_stream['sample_aspect_ratio'] != '1:1':
 | 
			
		||||
            print '[warning] Pixel aspect ratio is not square!'
 | 
			
		||||
            print('[warning] Pixel aspect ratio is not square!')
 | 
			
		||||
 | 
			
		||||
    return outdata
 | 
			
		||||
 | 
			
		||||
@@ -190,14 +181,14 @@ def ffmpeg_encode(src, format, res_y=720):
 | 
			
		||||
    dst = os.path.splitext(src)
 | 
			
		||||
    dst = "{0}-{1}p.{2}".format(dst[0], res_y, format)
 | 
			
		||||
    args.append(dst)
 | 
			
		||||
    print "Encoding {0} to {1}".format(src, format)
 | 
			
		||||
    print("Encoding {0} to {1}".format(src, format))
 | 
			
		||||
    returncode = subprocess.call([current_app.config['BIN_FFMPEG']] + args)
 | 
			
		||||
    if returncode == 0:
 | 
			
		||||
        print "Successfully encoded {0}".format(dst)
 | 
			
		||||
        print("Successfully encoded {0}".format(dst))
 | 
			
		||||
    else:
 | 
			
		||||
        print "Error during encode"
 | 
			
		||||
        print "Code:    {0}".format(returncode)
 | 
			
		||||
        print "Command: {0}".format(current_app.config['BIN_FFMPEG'] + " " + " ".join(args))
 | 
			
		||||
        print("Error during encode")
 | 
			
		||||
        print("Code:    {0}".format(returncode))
 | 
			
		||||
        print("Command: {0}".format(current_app.config['BIN_FFMPEG'] + " " + " ".join(args)))
 | 
			
		||||
        dst = None
 | 
			
		||||
    # return path of the encoded video
 | 
			
		||||
    return dst
 | 
			
		||||
 
 | 
			
		||||
@@ -27,9 +27,11 @@ def assign_permissions(project, node_types, permission_callback):
 | 
			
		||||
        permissions = {}
 | 
			
		||||
 | 
			
		||||
        for key in ('users', 'groups'):
 | 
			
		||||
            perms = proj_perms[key]
 | 
			
		||||
            singular = key.rstrip('s')
 | 
			
		||||
            perms = proj_perms.get(key)
 | 
			
		||||
            if not perms:
 | 
			
		||||
                continue
 | 
			
		||||
 | 
			
		||||
            singular = key.rstrip('s')
 | 
			
		||||
            for perm in perms:
 | 
			
		||||
                assert isinstance(perm, dict), 'perm should be dict, but is %r' % perm
 | 
			
		||||
                ident = perm[singular]  # group or user ID.
 | 
			
		||||
 
 | 
			
		||||
							
								
								
									
										87
									
								
								pillar/api/utils/rating.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										87
									
								
								pillar/api/utils/rating.py
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,87 @@
 | 
			
		||||
# These functions come from Reddit
 | 
			
		||||
# https://github.com/reddit/reddit/blob/master/r2/r2/lib/db/_sorts.pyx
 | 
			
		||||
 | 
			
		||||
# Additional resources
 | 
			
		||||
# http://www.redditblog.com/2009/10/reddits-new-comment-sorting-system.html
 | 
			
		||||
# http://www.evanmiller.org/how-not-to-sort-by-average-rating.html
 | 
			
		||||
# http://amix.dk/blog/post/19588
 | 
			
		||||
 | 
			
		||||
from datetime import datetime, timezone
 | 
			
		||||
from math import log
 | 
			
		||||
from math import sqrt
 | 
			
		||||
 | 
			
		||||
epoch = datetime(1970, 1, 1, 0, 0, 0, 0, timezone.utc)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def epoch_seconds(date):
 | 
			
		||||
    """Returns the number of seconds from the epoch to date."""
 | 
			
		||||
    td = date - epoch
 | 
			
		||||
    return td.days * 86400 + td.seconds + (float(td.microseconds) / 1000000)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def score(ups, downs):
 | 
			
		||||
    return ups - downs
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def hot(ups, downs, date):
 | 
			
		||||
    """The hot formula. Reddit's hot ranking uses the logarithm function to
 | 
			
		||||
    weight the first votes higher than the rest.
 | 
			
		||||
    The first 10 upvotes have the same weight as the next 100 upvotes which
 | 
			
		||||
    have the same weight as the next 1000, etc.
 | 
			
		||||
 | 
			
		||||
    Dillo authors: we modified the formula to give more weight to negative
 | 
			
		||||
    votes when an entry is controversial.
 | 
			
		||||
 | 
			
		||||
    TODO: make this function more dynamic so that different defaults can be
 | 
			
		||||
    specified depending on the item that is being rated.
 | 
			
		||||
    """
 | 
			
		||||
 | 
			
		||||
    s = score(ups, downs)
 | 
			
		||||
    order = log(max(abs(s), 1), 10)
 | 
			
		||||
    sign = 1 if s > 0 else -1 if s < 0 else 0
 | 
			
		||||
    seconds = epoch_seconds(date) - 1134028003
 | 
			
		||||
    base_hot = round(sign * order + seconds / 45000, 7)
 | 
			
		||||
 | 
			
		||||
    if downs > 1:
 | 
			
		||||
        rating_delta = 100 * (downs - ups) / downs
 | 
			
		||||
        if rating_delta < 25:
 | 
			
		||||
            # The post is controversial
 | 
			
		||||
            return base_hot
 | 
			
		||||
        base_hot = base_hot - (downs * 6)
 | 
			
		||||
 | 
			
		||||
    return base_hot
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def _confidence(ups, downs):
 | 
			
		||||
    n = ups + downs
 | 
			
		||||
 | 
			
		||||
    if n == 0:
 | 
			
		||||
        return 0
 | 
			
		||||
 | 
			
		||||
    z = 1.0 #1.0 = 85%, 1.6 = 95%
 | 
			
		||||
    phat = float(ups) / n
 | 
			
		||||
    return sqrt(phat+z*z/(2*n)-z*((phat*(1-phat)+z*z/(4*n))/n))/(1+z*z/n)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def confidence(ups, downs):
 | 
			
		||||
    if ups + downs == 0:
 | 
			
		||||
        return 0
 | 
			
		||||
    else:
 | 
			
		||||
        return _confidence(ups, downs)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def update_hot(document):
 | 
			
		||||
    """Update the hotness of a document given its current ratings.
 | 
			
		||||
 | 
			
		||||
    We expect the document to implement the ratings_embedded_schema in
 | 
			
		||||
    a 'ratings' property.
 | 
			
		||||
    """
 | 
			
		||||
 | 
			
		||||
    dt = document['_created']
 | 
			
		||||
    dt = dt.replace(tzinfo=timezone.utc)
 | 
			
		||||
 | 
			
		||||
    document['properties']['ratings']['hot'] = hot(
 | 
			
		||||
        document['properties']['ratings']['positive'],
 | 
			
		||||
        document['properties']['ratings']['negative'],
 | 
			
		||||
        dt,
 | 
			
		||||
    )
 | 
			
		||||
@@ -1,83 +1 @@
 | 
			
		||||
import subprocess
 | 
			
		||||
 | 
			
		||||
import os
 | 
			
		||||
from flask import current_app
 | 
			
		||||
from pillar.api.utils.gcs import GoogleCloudStorageBucket
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def get_sizedata(filepath):
 | 
			
		||||
    outdata = dict(
 | 
			
		||||
        size=int(os.stat(filepath).st_size)
 | 
			
		||||
    )
 | 
			
		||||
    return outdata
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def rsync(path, remote_dir=''):
 | 
			
		||||
    BIN_SSH = current_app.config['BIN_SSH']
 | 
			
		||||
    BIN_RSYNC = current_app.config['BIN_RSYNC']
 | 
			
		||||
 | 
			
		||||
    DRY_RUN = False
 | 
			
		||||
    arguments = ['--verbose', '--ignore-existing', '--recursive', '--human-readable']
 | 
			
		||||
    logs_path = current_app.config['CDN_SYNC_LOGS']
 | 
			
		||||
    storage_address = current_app.config['CDN_STORAGE_ADDRESS']
 | 
			
		||||
    user = current_app.config['CDN_STORAGE_USER']
 | 
			
		||||
    rsa_key_path = current_app.config['CDN_RSA_KEY']
 | 
			
		||||
    known_hosts_path = current_app.config['CDN_KNOWN_HOSTS']
 | 
			
		||||
 | 
			
		||||
    if DRY_RUN:
 | 
			
		||||
        arguments.append('--dry-run')
 | 
			
		||||
    folder_arguments = list(arguments)
 | 
			
		||||
    if rsa_key_path:
 | 
			
		||||
        folder_arguments.append(
 | 
			
		||||
            '-e ' + BIN_SSH + ' -i ' + rsa_key_path + ' -o "StrictHostKeyChecking=no"')
 | 
			
		||||
    # if known_hosts_path:
 | 
			
		||||
    #     folder_arguments.append("-o UserKnownHostsFile " + known_hosts_path)
 | 
			
		||||
    folder_arguments.append("--log-file=" + logs_path + "/rsync.log")
 | 
			
		||||
    folder_arguments.append(path)
 | 
			
		||||
    folder_arguments.append(user + "@" + storage_address + ":/public/" + remote_dir)
 | 
			
		||||
    # print (folder_arguments)
 | 
			
		||||
    devnull = open(os.devnull, 'wb')
 | 
			
		||||
    # DEBUG CONFIG
 | 
			
		||||
    # print folder_arguments
 | 
			
		||||
    # proc = subprocess.Popen(['rsync'] + folder_arguments)
 | 
			
		||||
    # stdout, stderr = proc.communicate()
 | 
			
		||||
    subprocess.Popen(['nohup', BIN_RSYNC] + folder_arguments, stdout=devnull, stderr=devnull)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def remote_storage_sync(path):  # can be both folder and file
 | 
			
		||||
    if os.path.isfile(path):
 | 
			
		||||
        filename = os.path.split(path)[1]
 | 
			
		||||
        rsync(path, filename[:2] + '/')
 | 
			
		||||
    else:
 | 
			
		||||
        if os.path.exists(path):
 | 
			
		||||
            rsync(path)
 | 
			
		||||
        else:
 | 
			
		||||
            raise IOError('ERROR: path not found')
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def push_to_storage(project_id, full_path, backend='cgs'):
 | 
			
		||||
    """Move a file from temporary/processing local storage to a storage endpoint.
 | 
			
		||||
    By default we store items in a Google Cloud Storage bucket named after the
 | 
			
		||||
    project id.
 | 
			
		||||
    """
 | 
			
		||||
 | 
			
		||||
    def push_single_file(project_id, full_path, backend):
 | 
			
		||||
        if backend == 'cgs':
 | 
			
		||||
            storage = GoogleCloudStorageBucket(project_id, subdir='_')
 | 
			
		||||
            blob = storage.Post(full_path)
 | 
			
		||||
            # XXX Make public on the fly if it's an image and small preview.
 | 
			
		||||
            # This should happen by reading the database (push to storage
 | 
			
		||||
            # should change to accomodate it).
 | 
			
		||||
            if blob is not None and full_path.endswith('-t.jpg'):
 | 
			
		||||
                blob.make_public()
 | 
			
		||||
            os.remove(full_path)
 | 
			
		||||
 | 
			
		||||
    if os.path.isfile(full_path):
 | 
			
		||||
        push_single_file(project_id, full_path, backend)
 | 
			
		||||
    else:
 | 
			
		||||
        if os.path.exists(full_path):
 | 
			
		||||
            for root, dirs, files in os.walk(full_path):
 | 
			
		||||
                for name in files:
 | 
			
		||||
                    push_single_file(project_id, os.path.join(root, name), backend)
 | 
			
		||||
        else:
 | 
			
		||||
            raise IOError('ERROR: path not found')
 | 
			
		||||
"""Utility for managing storage backends and files."""
 | 
			
		||||
 
 | 
			
		||||
@@ -1,9 +1,12 @@
 | 
			
		||||
"""Extra functionality for attrs."""
 | 
			
		||||
 | 
			
		||||
import functools
 | 
			
		||||
import logging
 | 
			
		||||
 | 
			
		||||
import attr
 | 
			
		||||
 | 
			
		||||
string = functools.partial(attr.ib, validator=attr.validators.instance_of(str))
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def log(name):
 | 
			
		||||
    """Returns a logger attr.ib
 | 
			
		||||
 
 | 
			
		||||
@@ -1,27 +1,116 @@
 | 
			
		||||
"""Authentication code common to the web and api modules."""
 | 
			
		||||
 | 
			
		||||
import collections
 | 
			
		||||
import logging
 | 
			
		||||
import typing
 | 
			
		||||
 | 
			
		||||
from flask import current_app, session
 | 
			
		||||
import blinker
 | 
			
		||||
import bson
 | 
			
		||||
from flask import session, g
 | 
			
		||||
import flask_login
 | 
			
		||||
import flask_oauthlib.client
 | 
			
		||||
from werkzeug.local import LocalProxy
 | 
			
		||||
 | 
			
		||||
from ..api import utils, blender_id
 | 
			
		||||
from ..api.utils import authentication
 | 
			
		||||
from pillar import current_app
 | 
			
		||||
 | 
			
		||||
# The sender is the user that was just authenticated.
 | 
			
		||||
user_authenticated = blinker.Signal('Sent whenever a user was authenticated')
 | 
			
		||||
user_logged_in = blinker.Signal('Sent whenever a user logged in on the web')
 | 
			
		||||
 | 
			
		||||
log = logging.getLogger(__name__)
 | 
			
		||||
 | 
			
		||||
# Mapping from user role to capabilities obtained by users with that role.
 | 
			
		||||
CAPABILITIES = collections.defaultdict(**{
 | 
			
		||||
    'subscriber': {'subscriber', 'home-project'},
 | 
			
		||||
    'demo': {'subscriber', 'home-project'},
 | 
			
		||||
    'admin': {'video-encoding', 'admin',
 | 
			
		||||
              'view-pending-nodes', 'edit-project-node-types'},
 | 
			
		||||
}, default_factory=frozenset)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class UserClass(flask_login.UserMixin):
 | 
			
		||||
    def __init__(self, token):
 | 
			
		||||
    def __init__(self, token: typing.Optional[str]):
 | 
			
		||||
        # We store the Token instead of ID
 | 
			
		||||
        self.id = token
 | 
			
		||||
        self.username = None
 | 
			
		||||
        self.full_name = None
 | 
			
		||||
        self.objectid = None
 | 
			
		||||
        self.gravatar = None
 | 
			
		||||
        self.email = None
 | 
			
		||||
        self.roles = []
 | 
			
		||||
        self.username: str = None
 | 
			
		||||
        self.full_name: str = None
 | 
			
		||||
        self.user_id: bson.ObjectId = None
 | 
			
		||||
        self.objectid: str = None
 | 
			
		||||
        self.gravatar: str = None
 | 
			
		||||
        self.email: str = None
 | 
			
		||||
        self.roles: typing.List[str] = []
 | 
			
		||||
        self.groups: typing.List[str] = []  # NOTE: these are stringified object IDs.
 | 
			
		||||
        self.group_ids: typing.List[bson.ObjectId] = []
 | 
			
		||||
        self.capabilities: typing.Set[str] = set()
 | 
			
		||||
        self.nodes: dict = {}  # see the 'nodes' key in eve_settings.py::user_schema.
 | 
			
		||||
        self.badges_html: str = ''
 | 
			
		||||
 | 
			
		||||
        # Lazily evaluated
 | 
			
		||||
        self._has_organizations: typing.Optional[bool] = None
 | 
			
		||||
 | 
			
		||||
    @classmethod
 | 
			
		||||
    def construct(cls, token: str, db_user: dict) -> 'UserClass':
 | 
			
		||||
        """Constructs a new UserClass instance from a Mongo user document."""
 | 
			
		||||
 | 
			
		||||
        from ..api import utils
 | 
			
		||||
 | 
			
		||||
        user = cls(token)
 | 
			
		||||
 | 
			
		||||
        user.user_id = db_user.get('_id')
 | 
			
		||||
        user.roles = db_user.get('roles') or []
 | 
			
		||||
        user.group_ids = db_user.get('groups') or []
 | 
			
		||||
        user.email = db_user.get('email') or ''
 | 
			
		||||
        user.username = db_user.get('username') or ''
 | 
			
		||||
        user.full_name = db_user.get('full_name') or ''
 | 
			
		||||
        user.badges_html = db_user.get('badges', {}).get('html') or ''
 | 
			
		||||
 | 
			
		||||
        # Be a little more specific than just db_user['nodes']
 | 
			
		||||
        user.nodes = {
 | 
			
		||||
            'view_progress': db_user.get('nodes', {}).get('view_progress', {}),
 | 
			
		||||
        }
 | 
			
		||||
 | 
			
		||||
        # Derived properties
 | 
			
		||||
        user.objectid = str(user.user_id or '')
 | 
			
		||||
        user.gravatar = utils.gravatar(user.email)
 | 
			
		||||
        user.groups = [str(g) for g in user.group_ids]
 | 
			
		||||
        user.collect_capabilities()
 | 
			
		||||
 | 
			
		||||
        return user
 | 
			
		||||
 | 
			
		||||
    def __repr__(self):
 | 
			
		||||
        return f'UserClass(user_id={self.user_id})'
 | 
			
		||||
 | 
			
		||||
    def __str__(self):
 | 
			
		||||
        return f'{self.__class__.__name__}(id={self.user_id}, email={self.email!r}'
 | 
			
		||||
 | 
			
		||||
    def __getitem__(self, item):
 | 
			
		||||
        """Compatibility layer with old dict-based g.current_user object."""
 | 
			
		||||
 | 
			
		||||
        if item == 'user_id':
 | 
			
		||||
            return self.user_id
 | 
			
		||||
        if item == 'groups':
 | 
			
		||||
            return self.group_ids
 | 
			
		||||
        if item == 'roles':
 | 
			
		||||
            return set(self.roles)
 | 
			
		||||
 | 
			
		||||
        raise KeyError(f'No such key {item!r}')
 | 
			
		||||
 | 
			
		||||
    def get(self, key, default=None):
 | 
			
		||||
        """Compatibility layer with old dict-based g.current_user object."""
 | 
			
		||||
 | 
			
		||||
        try:
 | 
			
		||||
            return self[key]
 | 
			
		||||
        except KeyError:
 | 
			
		||||
            return default
 | 
			
		||||
 | 
			
		||||
    def collect_capabilities(self):
 | 
			
		||||
        """Constructs the capabilities set given the user's current roles.
 | 
			
		||||
 | 
			
		||||
        Requires an application context to be active.
 | 
			
		||||
        """
 | 
			
		||||
 | 
			
		||||
        app_caps = current_app.user_caps
 | 
			
		||||
 | 
			
		||||
        self.capabilities = set().union(*(app_caps[role] for role in self.roles))
 | 
			
		||||
 | 
			
		||||
    def has_role(self, *roles):
 | 
			
		||||
        """Returns True iff the user has one or more of the given roles."""
 | 
			
		||||
@@ -31,38 +120,84 @@ class UserClass(flask_login.UserMixin):
 | 
			
		||||
 | 
			
		||||
        return bool(set(self.roles).intersection(set(roles)))
 | 
			
		||||
 | 
			
		||||
    def has_cap(self, *capabilities: typing.Iterable[str]) -> bool:
 | 
			
		||||
        """Returns True iff the user has one or more of the given capabilities."""
 | 
			
		||||
 | 
			
		||||
class AnonymousUser(flask_login.AnonymousUserMixin):
 | 
			
		||||
    @property
 | 
			
		||||
    def objectid(self):
 | 
			
		||||
        """Anonymous user has no settable objectid."""
 | 
			
		||||
        return None
 | 
			
		||||
        if not self.capabilities:
 | 
			
		||||
            return False
 | 
			
		||||
 | 
			
		||||
        return bool(set(self.capabilities).intersection(set(capabilities)))
 | 
			
		||||
 | 
			
		||||
    def matches_roles(self,
 | 
			
		||||
                      require_roles=set(),
 | 
			
		||||
                      require_all=False) -> bool:
 | 
			
		||||
        """Returns True iff the user's roles matches the query.
 | 
			
		||||
 | 
			
		||||
        :param require_roles: set of roles.
 | 
			
		||||
        :param require_all:
 | 
			
		||||
            When False (the default): if the user's roles have a
 | 
			
		||||
            non-empty intersection with the given roles, returns True.
 | 
			
		||||
            When True: require the user to have all given roles before
 | 
			
		||||
            returning True.
 | 
			
		||||
        """
 | 
			
		||||
 | 
			
		||||
        if not isinstance(require_roles, set):
 | 
			
		||||
            raise TypeError(f'require_roles param should be a set, but is {type(require_roles)!r}')
 | 
			
		||||
 | 
			
		||||
        if require_all and not require_roles:
 | 
			
		||||
            raise ValueError('require_login(require_all=True) cannot be used with '
 | 
			
		||||
                             'empty require_roles.')
 | 
			
		||||
 | 
			
		||||
        intersection = require_roles.intersection(self.roles)
 | 
			
		||||
        if require_all:
 | 
			
		||||
            return len(intersection) == len(require_roles)
 | 
			
		||||
 | 
			
		||||
        return not bool(require_roles) or bool(intersection)
 | 
			
		||||
 | 
			
		||||
    def has_organizations(self) -> bool:
 | 
			
		||||
        """Returns True iff this user administers or is member of any organization."""
 | 
			
		||||
 | 
			
		||||
        if self._has_organizations is None:
 | 
			
		||||
            assert self.user_id
 | 
			
		||||
            self._has_organizations = current_app.org_manager.user_has_organizations(self.user_id)
 | 
			
		||||
 | 
			
		||||
        return bool(self._has_organizations)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class AnonymousUser(flask_login.AnonymousUserMixin, UserClass):
 | 
			
		||||
    def __init__(self):
 | 
			
		||||
        super().__init__(token=None)
 | 
			
		||||
 | 
			
		||||
    def has_role(self, *roles):
 | 
			
		||||
        return False
 | 
			
		||||
 | 
			
		||||
    def has_cap(self, *capabilities):
 | 
			
		||||
        return False
 | 
			
		||||
 | 
			
		||||
def _load_user(token):
 | 
			
		||||
    def has_organizations(self) -> bool:
 | 
			
		||||
        return False
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def _load_user(token) -> typing.Union[UserClass, AnonymousUser]:
 | 
			
		||||
    """Loads a user by their token.
 | 
			
		||||
 | 
			
		||||
    :returns: returns a UserClass instance if logged in, or an AnonymousUser() if not.
 | 
			
		||||
    :rtype: UserClass
 | 
			
		||||
    """
 | 
			
		||||
 | 
			
		||||
    from ..api.utils import authentication
 | 
			
		||||
 | 
			
		||||
    if not token:
 | 
			
		||||
        return AnonymousUser()
 | 
			
		||||
 | 
			
		||||
    db_user = authentication.validate_this_token(token)
 | 
			
		||||
    if not db_user:
 | 
			
		||||
        # There is a token, but it's not valid. We should reset the user's session.
 | 
			
		||||
        session.clear()
 | 
			
		||||
        return AnonymousUser()
 | 
			
		||||
 | 
			
		||||
    login_user = UserClass(token)
 | 
			
		||||
    login_user.email = db_user['email']
 | 
			
		||||
    login_user.objectid = unicode(db_user['_id'])
 | 
			
		||||
    login_user.username = db_user['username']
 | 
			
		||||
    login_user.gravatar = utils.gravatar(db_user['email'])
 | 
			
		||||
    login_user.roles = db_user.get('roles', [])
 | 
			
		||||
    login_user.groups = [unicode(g) for g in db_user['groups'] or ()]
 | 
			
		||||
    login_user.full_name = db_user.get('full_name', '')
 | 
			
		||||
    user = UserClass.construct(token, db_user)
 | 
			
		||||
 | 
			
		||||
    return login_user
 | 
			
		||||
    return user
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def config_login_manager(app):
 | 
			
		||||
@@ -71,6 +206,7 @@ def config_login_manager(app):
 | 
			
		||||
    login_manager = flask_login.LoginManager()
 | 
			
		||||
    login_manager.init_app(app)
 | 
			
		||||
    login_manager.login_view = "users.login"
 | 
			
		||||
    login_manager.login_message = ''
 | 
			
		||||
    login_manager.anonymous_user = AnonymousUser
 | 
			
		||||
    # noinspection PyTypeChecker
 | 
			
		||||
    login_manager.user_loader(_load_user)
 | 
			
		||||
@@ -78,39 +214,78 @@ def config_login_manager(app):
 | 
			
		||||
    return login_manager
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def login_user(oauth_token):
 | 
			
		||||
def login_user(oauth_token: str, *, load_from_db=False):
 | 
			
		||||
    """Log in the user identified by the given token."""
 | 
			
		||||
 | 
			
		||||
    user = UserClass(oauth_token)
 | 
			
		||||
    flask_login.login_user(user)
 | 
			
		||||
    if load_from_db:
 | 
			
		||||
        user = _load_user(oauth_token)
 | 
			
		||||
    else:
 | 
			
		||||
        user = UserClass(oauth_token)
 | 
			
		||||
    login_user_object(user)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def get_blender_id_oauth_token():
 | 
			
		||||
    """Returns a tuple (token, ''), for use with flask_oauthlib."""
 | 
			
		||||
    return session.get('blender_id_oauth_token')
 | 
			
		||||
def login_user_object(user: UserClass):
 | 
			
		||||
    """Log in the given user."""
 | 
			
		||||
    flask_login.login_user(user, remember=True)
 | 
			
		||||
    g.current_user = user
 | 
			
		||||
    user_authenticated.send(user)
 | 
			
		||||
    user_logged_in.send(user)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def config_oauth_login(app):
 | 
			
		||||
    config = app.config
 | 
			
		||||
    if not config.get('SOCIAL_BLENDER_ID'):
 | 
			
		||||
        log.info('OAuth Blender-ID login not setup.')
 | 
			
		||||
        return None
 | 
			
		||||
def logout_user():
 | 
			
		||||
    """Forces a logout of the current user."""
 | 
			
		||||
 | 
			
		||||
    oauth = flask_oauthlib.client.OAuth(app)
 | 
			
		||||
    social_blender_id = config.get('SOCIAL_BLENDER_ID')
 | 
			
		||||
    from ..api.utils import authentication
 | 
			
		||||
 | 
			
		||||
    oauth_blender_id = oauth.remote_app(
 | 
			
		||||
        'blender_id',
 | 
			
		||||
        consumer_key=social_blender_id['app_id'],
 | 
			
		||||
        consumer_secret=social_blender_id['app_secret'],
 | 
			
		||||
        request_token_params={'scope': 'email'},
 | 
			
		||||
        base_url=config['BLENDER_ID_OAUTH_URL'],
 | 
			
		||||
        request_token_url=None,
 | 
			
		||||
        access_token_url=config['BLENDER_ID_BASE_ACCESS_TOKEN_URL'],
 | 
			
		||||
        authorize_url=config['BLENDER_ID_AUTHORIZE_URL']
 | 
			
		||||
    )
 | 
			
		||||
    token = get_blender_id_oauth_token()
 | 
			
		||||
    if token:
 | 
			
		||||
        authentication.remove_token(token)
 | 
			
		||||
 | 
			
		||||
    oauth_blender_id.tokengetter(get_blender_id_oauth_token)
 | 
			
		||||
    log.info('OAuth Blender-ID login setup as %s', social_blender_id['app_id'])
 | 
			
		||||
    session.clear()
 | 
			
		||||
    flask_login.logout_user()
 | 
			
		||||
    g.current_user = AnonymousUser()
 | 
			
		||||
 | 
			
		||||
    return oauth_blender_id
 | 
			
		||||
 | 
			
		||||
def get_blender_id_oauth_token() -> str:
 | 
			
		||||
    """Returns the Blender ID auth token, or an empty string if there is none."""
 | 
			
		||||
 | 
			
		||||
    from flask import request
 | 
			
		||||
 | 
			
		||||
    token = session.get('blender_id_oauth_token')
 | 
			
		||||
    if token:
 | 
			
		||||
        if isinstance(token, (tuple, list)):
 | 
			
		||||
            # In a past version of Pillar we accidentally stored tuples in the session.
 | 
			
		||||
            # Such sessions should be actively fixed.
 | 
			
		||||
            # TODO(anyone, after 2017-12-01): refactor this if-block so that it just converts
 | 
			
		||||
            # the token value to a string and use that instead.
 | 
			
		||||
            token = token[0]
 | 
			
		||||
            session['blender_id_oauth_token'] = token
 | 
			
		||||
        return token
 | 
			
		||||
 | 
			
		||||
    if request.authorization and request.authorization.username:
 | 
			
		||||
        return request.authorization.username
 | 
			
		||||
 | 
			
		||||
    if current_user.is_authenticated and current_user.id:
 | 
			
		||||
        return current_user.id
 | 
			
		||||
 | 
			
		||||
    return ''
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def get_current_user() -> UserClass:
 | 
			
		||||
    """Returns the current user as a UserClass instance.
 | 
			
		||||
 | 
			
		||||
    Never returns None; returns an AnonymousUser() instance instead.
 | 
			
		||||
 | 
			
		||||
    This function is intended to be used when pillar.auth.current_user is
 | 
			
		||||
    accessed many times in the same scope. Calling this function is then
 | 
			
		||||
    more efficient, since it doesn't have to resolve the LocalProxy for
 | 
			
		||||
    each access to the returned object.
 | 
			
		||||
    """
 | 
			
		||||
 | 
			
		||||
    from ..api.utils.authentication import current_user
 | 
			
		||||
 | 
			
		||||
    return current_user()
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
current_user: UserClass = LocalProxy(get_current_user)
 | 
			
		||||
"""The current user."""
 | 
			
		||||
 
 | 
			
		||||
							
								
								
									
										228
									
								
								pillar/auth/oauth.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										228
									
								
								pillar/auth/oauth.py
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,228 @@
 | 
			
		||||
import abc
 | 
			
		||||
import json
 | 
			
		||||
import logging
 | 
			
		||||
import typing
 | 
			
		||||
 | 
			
		||||
import attr
 | 
			
		||||
from rauth import OAuth2Service
 | 
			
		||||
from flask import current_app, url_for, request, redirect, session, Response
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@attr.s
 | 
			
		||||
class OAuthUserResponse:
 | 
			
		||||
    """Represents user information requested to an OAuth provider after
 | 
			
		||||
    authenticating.
 | 
			
		||||
    """
 | 
			
		||||
 | 
			
		||||
    id = attr.ib(validator=attr.validators.instance_of(str))
 | 
			
		||||
    email = attr.ib(validator=attr.validators.instance_of(str))
 | 
			
		||||
    access_token = attr.ib(validator=attr.validators.instance_of(str))
 | 
			
		||||
    scopes: typing.List[str] = attr.ib(validator=attr.validators.instance_of(list))
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class OAuthError(Exception):
 | 
			
		||||
    """Superclass of all exceptions raised by this module."""
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class ProviderConfigurationMissing(OAuthError):
 | 
			
		||||
    """Raised when an OAuth provider is used but not configured."""
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class ProviderNotImplemented(OAuthError):
 | 
			
		||||
    """Raised when a provider is requested that does not exist."""
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class OAuthCodeNotProvided(OAuthError):
 | 
			
		||||
    """Raised when the 'code' arg is not provided in the OAuth callback."""
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class ProviderNotConfigured:
 | 
			
		||||
    """Dummy class that indicates a provider isn't configured."""
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class OAuthSignIn(metaclass=abc.ABCMeta):
 | 
			
		||||
    provider_name: str = None  # set in each subclass.
 | 
			
		||||
 | 
			
		||||
    _providers = None  # initialized in get_provider()
 | 
			
		||||
    _log = logging.getLogger(f'{__name__}.OAuthSignIn')
 | 
			
		||||
 | 
			
		||||
    def __init__(self):
 | 
			
		||||
        credentials = current_app.config['OAUTH_CREDENTIALS'].get(self.provider_name)
 | 
			
		||||
        if not credentials:
 | 
			
		||||
            raise ProviderConfigurationMissing(
 | 
			
		||||
                f'Missing OAuth credentials for {self.provider_name}')
 | 
			
		||||
 | 
			
		||||
        self.consumer_id = credentials['id']
 | 
			
		||||
        self.consumer_secret = credentials['secret']
 | 
			
		||||
 | 
			
		||||
        # Set in a subclass
 | 
			
		||||
        self.service: OAuth2Service = None
 | 
			
		||||
 | 
			
		||||
    @abc.abstractmethod
 | 
			
		||||
    def authorize(self) -> Response:
 | 
			
		||||
        """Redirect to the correct authorization endpoint for the current provider.
 | 
			
		||||
 | 
			
		||||
        Depending on the provider, we sometimes have to specify a different
 | 
			
		||||
        'scope'.
 | 
			
		||||
        """
 | 
			
		||||
        pass
 | 
			
		||||
 | 
			
		||||
    @abc.abstractmethod
 | 
			
		||||
    def callback(self) -> OAuthUserResponse:
 | 
			
		||||
        """Callback performed after authorizing the user.
 | 
			
		||||
 | 
			
		||||
        This is usually a request to a protected /me endpoint to query for
 | 
			
		||||
        user information, such as user id and email address.
 | 
			
		||||
        """
 | 
			
		||||
        pass
 | 
			
		||||
 | 
			
		||||
    def get_callback_url(self):
 | 
			
		||||
        return url_for('users.oauth_callback', provider=self.provider_name,
 | 
			
		||||
                       _external=True, _scheme=current_app.config['SCHEME'])
 | 
			
		||||
 | 
			
		||||
    @staticmethod
 | 
			
		||||
    def auth_code_from_request() -> str:
 | 
			
		||||
        try:
 | 
			
		||||
            return request.args['code']
 | 
			
		||||
        except KeyError:
 | 
			
		||||
            raise OAuthCodeNotProvided('A code argument was not provided in the request')
 | 
			
		||||
 | 
			
		||||
    @staticmethod
 | 
			
		||||
    def decode_json(payload):
 | 
			
		||||
        return json.loads(payload.decode('utf-8'))
 | 
			
		||||
 | 
			
		||||
    def make_oauth_session(self):
 | 
			
		||||
        return self.service.get_auth_session(
 | 
			
		||||
            data={'code': self.auth_code_from_request(),
 | 
			
		||||
                  'grant_type': 'authorization_code',
 | 
			
		||||
                  'redirect_uri': self.get_callback_url()},
 | 
			
		||||
            decoder=self.decode_json
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
    @classmethod
 | 
			
		||||
    def get_provider(cls, provider_name) -> 'OAuthSignIn':
 | 
			
		||||
        if cls._providers is None:
 | 
			
		||||
            cls._init_providers()
 | 
			
		||||
 | 
			
		||||
        try:
 | 
			
		||||
            provider = cls._providers[provider_name]
 | 
			
		||||
        except KeyError:
 | 
			
		||||
            raise ProviderNotImplemented(f'No such OAuth provider {provider_name}')
 | 
			
		||||
 | 
			
		||||
        if provider is ProviderNotConfigured:
 | 
			
		||||
            raise ProviderConfigurationMissing(f'OAuth provider {provider_name} not configured')
 | 
			
		||||
 | 
			
		||||
        return provider
 | 
			
		||||
 | 
			
		||||
    @classmethod
 | 
			
		||||
    def _init_providers(cls):
 | 
			
		||||
        cls._providers = {}
 | 
			
		||||
 | 
			
		||||
        for provider_class in cls.__subclasses__():
 | 
			
		||||
            try:
 | 
			
		||||
                provider = provider_class()
 | 
			
		||||
            except ProviderConfigurationMissing:
 | 
			
		||||
                cls._log.info('OAuth provider %s not configured',
 | 
			
		||||
                              provider_class.provider_name)
 | 
			
		||||
                provider = ProviderNotConfigured
 | 
			
		||||
            cls._providers[provider_class.provider_name] = provider
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class BlenderIdSignIn(OAuthSignIn):
 | 
			
		||||
    provider_name = 'blender-id'
 | 
			
		||||
    scopes = ['email', 'badge']
 | 
			
		||||
 | 
			
		||||
    def __init__(self):
 | 
			
		||||
        from urllib.parse import urljoin
 | 
			
		||||
        super().__init__()
 | 
			
		||||
 | 
			
		||||
        base_url = current_app.config['BLENDER_ID_ENDPOINT']
 | 
			
		||||
 | 
			
		||||
        self.service = OAuth2Service(
 | 
			
		||||
            name='blender-id',
 | 
			
		||||
            client_id=self.consumer_id,
 | 
			
		||||
            client_secret=self.consumer_secret,
 | 
			
		||||
            authorize_url=urljoin(base_url, 'oauth/authorize'),
 | 
			
		||||
            access_token_url=urljoin(base_url, 'oauth/token'),
 | 
			
		||||
            base_url=urljoin(base_url, 'api/'),
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
    def authorize(self):
 | 
			
		||||
        return redirect(self.service.get_authorize_url(
 | 
			
		||||
            scope=' '.join(self.scopes),
 | 
			
		||||
            response_type='code',
 | 
			
		||||
            redirect_uri=self.get_callback_url())
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
    def callback(self):
 | 
			
		||||
        oauth_session = self.make_oauth_session()
 | 
			
		||||
 | 
			
		||||
        # TODO handle exception for failed oauth or not authorized
 | 
			
		||||
        access_token = oauth_session.access_token
 | 
			
		||||
        assert isinstance(access_token, str), f'oauth token must be str, not {type(access_token)}'
 | 
			
		||||
 | 
			
		||||
        session['blender_id_oauth_token'] = access_token
 | 
			
		||||
        me = oauth_session.get('user').json()
 | 
			
		||||
 | 
			
		||||
        # Blender ID doesn't tell us which scopes were granted by the user, so
 | 
			
		||||
        # for now assume we got all the scopes we requested.
 | 
			
		||||
        # (see https://github.com/jazzband/django-oauth-toolkit/issues/644)
 | 
			
		||||
        return OAuthUserResponse(str(me['id']), me['email'], access_token, self.scopes)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class FacebookSignIn(OAuthSignIn):
 | 
			
		||||
    provider_name = 'facebook'
 | 
			
		||||
 | 
			
		||||
    def __init__(self):
 | 
			
		||||
        super().__init__()
 | 
			
		||||
        self.service = OAuth2Service(
 | 
			
		||||
            name='facebook',
 | 
			
		||||
            client_id=self.consumer_id,
 | 
			
		||||
            client_secret=self.consumer_secret,
 | 
			
		||||
            authorize_url='https://graph.facebook.com/oauth/authorize',
 | 
			
		||||
            access_token_url='https://graph.facebook.com/oauth/access_token',
 | 
			
		||||
            base_url='https://graph.facebook.com/'
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
    def authorize(self):
 | 
			
		||||
        return redirect(self.service.get_authorize_url(
 | 
			
		||||
            scope='email',
 | 
			
		||||
            response_type='code',
 | 
			
		||||
            redirect_uri=self.get_callback_url())
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
    def callback(self):
 | 
			
		||||
        oauth_session = self.make_oauth_session()
 | 
			
		||||
 | 
			
		||||
        me = oauth_session.get('me?fields=id,email').json()
 | 
			
		||||
        # TODO handle case when user chooses not to disclose en email
 | 
			
		||||
        # see https://developers.facebook.com/docs/graph-api/reference/user/
 | 
			
		||||
        return OAuthUserResponse(me['id'], me.get('email'), '', [])
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class GoogleSignIn(OAuthSignIn):
 | 
			
		||||
    provider_name = 'google'
 | 
			
		||||
 | 
			
		||||
    def __init__(self):
 | 
			
		||||
        super().__init__()
 | 
			
		||||
        self.service = OAuth2Service(
 | 
			
		||||
            name='google',
 | 
			
		||||
            client_id=self.consumer_id,
 | 
			
		||||
            client_secret=self.consumer_secret,
 | 
			
		||||
            authorize_url='https://accounts.google.com/o/oauth2/auth',
 | 
			
		||||
            access_token_url='https://accounts.google.com/o/oauth2/token',
 | 
			
		||||
            base_url='https://www.googleapis.com/oauth2/v1/'
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
    def authorize(self):
 | 
			
		||||
        return redirect(self.service.get_authorize_url(
 | 
			
		||||
            scope='https://www.googleapis.com/auth/userinfo.email',
 | 
			
		||||
            response_type='code',
 | 
			
		||||
            redirect_uri=self.get_callback_url())
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
    def callback(self):
 | 
			
		||||
        oauth_session = self.make_oauth_session()
 | 
			
		||||
 | 
			
		||||
        me = oauth_session.get('userinfo').json()
 | 
			
		||||
        return OAuthUserResponse(str(me['id']), me['email'], '', [])
 | 
			
		||||
@@ -1,51 +0,0 @@
 | 
			
		||||
"""Cloud subscription info.
 | 
			
		||||
 | 
			
		||||
Connects to the external subscription server to obtain user info.
 | 
			
		||||
"""
 | 
			
		||||
 | 
			
		||||
import logging
 | 
			
		||||
 | 
			
		||||
from flask import current_app
 | 
			
		||||
import requests
 | 
			
		||||
from requests.adapters import HTTPAdapter
 | 
			
		||||
 | 
			
		||||
log = logging.getLogger(__name__)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def fetch_user(email):
 | 
			
		||||
    """Returns the user info dict from the external subscriptions management server.
 | 
			
		||||
 | 
			
		||||
    :returns: the store user info, or None if the user can't be found or there
 | 
			
		||||
        was an error communicating. A dict like this is returned:
 | 
			
		||||
        {
 | 
			
		||||
            "shop_id": 700,
 | 
			
		||||
            "cloud_access": 1,
 | 
			
		||||
            "paid_balance": 314.75,
 | 
			
		||||
            "balance_currency": "EUR",
 | 
			
		||||
            "start_date": "2014-08-25 17:05:46",
 | 
			
		||||
            "expiration_date": "2016-08-24 13:38:45",
 | 
			
		||||
            "subscription_status": "wc-active",
 | 
			
		||||
            "expiration_date_approximate": true
 | 
			
		||||
        }
 | 
			
		||||
    :rtype: dict
 | 
			
		||||
    """
 | 
			
		||||
 | 
			
		||||
    external_subscriptions_server = current_app.config['EXTERNAL_SUBSCRIPTIONS_MANAGEMENT_SERVER']
 | 
			
		||||
 | 
			
		||||
    log.debug('Connecting to store at %s?blenderid=%s', external_subscriptions_server, email)
 | 
			
		||||
 | 
			
		||||
    # Retry a few times when contacting the store.
 | 
			
		||||
    s = requests.Session()
 | 
			
		||||
    s.mount(external_subscriptions_server, HTTPAdapter(max_retries=5))
 | 
			
		||||
    r = s.get(external_subscriptions_server, params={'blenderid': email},
 | 
			
		||||
              verify=current_app.config['TLS_CERT_FILE'])
 | 
			
		||||
 | 
			
		||||
    if r.status_code != 200:
 | 
			
		||||
        log.warning("Error communicating with %s, code=%i, unable to check "
 | 
			
		||||
                    "subscription status of user %s",
 | 
			
		||||
                    external_subscriptions_server, r.status_code, email)
 | 
			
		||||
        return None
 | 
			
		||||
 | 
			
		||||
    store_user = r.json()
 | 
			
		||||
    return store_user
 | 
			
		||||
 | 
			
		||||
							
								
								
									
										266
									
								
								pillar/badge_sync.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										266
									
								
								pillar/badge_sync.py
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,266 @@
 | 
			
		||||
import collections
 | 
			
		||||
import datetime
 | 
			
		||||
import logging
 | 
			
		||||
import typing
 | 
			
		||||
from urllib.parse import urljoin
 | 
			
		||||
 | 
			
		||||
import bson
 | 
			
		||||
import requests
 | 
			
		||||
 | 
			
		||||
from pillar import current_app, auth
 | 
			
		||||
from pillar.api.utils import utcnow
 | 
			
		||||
 | 
			
		||||
SyncUser = collections.namedtuple('SyncUser', 'user_id token bid_user_id')
 | 
			
		||||
BadgeHTML = collections.namedtuple('BadgeHTML', 'html expires')
 | 
			
		||||
log = logging.getLogger(__name__)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class StopRefreshing(Exception):
 | 
			
		||||
    """Indicates that Blender ID is having problems.
 | 
			
		||||
 | 
			
		||||
    Further badge refreshes should be put on hold to avoid bludgeoning
 | 
			
		||||
    a suffering Blender ID.
 | 
			
		||||
    """
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def find_user_to_sync(user_id: bson.ObjectId) -> typing.Optional[SyncUser]:
 | 
			
		||||
    """Return user information for syncing badges for a specific user.
 | 
			
		||||
 | 
			
		||||
    Returns None if the user cannot be synced (no 'badge' scope on a token,
 | 
			
		||||
    or no Blender ID user_id known).
 | 
			
		||||
    """
 | 
			
		||||
    my_log = log.getChild('refresh_single_user')
 | 
			
		||||
 | 
			
		||||
    now = utcnow()
 | 
			
		||||
    tokens_coll = current_app.db('tokens')
 | 
			
		||||
    users_coll = current_app.db('users')
 | 
			
		||||
 | 
			
		||||
    token_info = tokens_coll.find_one({
 | 
			
		||||
        'user': user_id,
 | 
			
		||||
        'token': {'$exists': True},
 | 
			
		||||
        'oauth_scopes': 'badge',
 | 
			
		||||
        'expire_time': {'$gt': now},
 | 
			
		||||
    })
 | 
			
		||||
    if not token_info:
 | 
			
		||||
        my_log.debug('No token with scope "badge" for user %s', user_id)
 | 
			
		||||
        return None
 | 
			
		||||
 | 
			
		||||
    user_info = users_coll.find_one({'_id': user_id})
 | 
			
		||||
    # TODO(Sybren): do this filtering in the MongoDB query:
 | 
			
		||||
    bid_user_ids = [auth_info.get('user_id')
 | 
			
		||||
                    for auth_info in user_info.get('auth', [])
 | 
			
		||||
                    if auth_info.get('provider', '') == 'blender-id' and auth_info.get('user_id')]
 | 
			
		||||
    if not bid_user_ids:
 | 
			
		||||
        my_log.debug('No Blender ID user_id for user %s', user_id)
 | 
			
		||||
        return None
 | 
			
		||||
 | 
			
		||||
    bid_user_id = bid_user_ids[0]
 | 
			
		||||
    return SyncUser(user_id=user_id, token=token_info['token'], bid_user_id=bid_user_id)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def find_users_to_sync() -> typing.Iterable[SyncUser]:
 | 
			
		||||
    """Return user information of syncable users with badges."""
 | 
			
		||||
 | 
			
		||||
    now = utcnow()
 | 
			
		||||
    tokens_coll = current_app.db('tokens')
 | 
			
		||||
    cursor = tokens_coll.aggregate([
 | 
			
		||||
        # Find all users who have a 'badge' scope in their OAuth token.
 | 
			
		||||
        {'$match': {
 | 
			
		||||
            'token': {'$exists': True},
 | 
			
		||||
            'oauth_scopes': 'badge',
 | 
			
		||||
            'expire_time': {'$gt': now},
 | 
			
		||||
            # TODO(Sybren): save real token expiry time but keep checking tokens hourly when they are used!
 | 
			
		||||
        }},
 | 
			
		||||
        {'$lookup': {
 | 
			
		||||
            'from': 'users',
 | 
			
		||||
            'localField': 'user',
 | 
			
		||||
            'foreignField': '_id',
 | 
			
		||||
            'as': 'user'
 | 
			
		||||
        }},
 | 
			
		||||
 | 
			
		||||
        # Prevent 'user' from being an array.
 | 
			
		||||
        {'$unwind': {'path': '$user'}},
 | 
			
		||||
 | 
			
		||||
        # Get the Blender ID user ID only.
 | 
			
		||||
        {'$unwind': {'path': '$user.auth'}},
 | 
			
		||||
        {'$match': {'user.auth.provider': 'blender-id'}},
 | 
			
		||||
 | 
			
		||||
        # Only select those users whose badge doesn't exist or has expired.
 | 
			
		||||
        {'$match': {
 | 
			
		||||
            'user.badges.expires': {'$not': {'$gt': now}}
 | 
			
		||||
        }},
 | 
			
		||||
 | 
			
		||||
        # Make sure that the badges that expire last are also refreshed last.
 | 
			
		||||
        {'$sort': {'user.badges.expires': 1}},
 | 
			
		||||
 | 
			
		||||
        # Reduce the document to the info we're after.
 | 
			
		||||
        {'$project': {
 | 
			
		||||
            'token': True,
 | 
			
		||||
            'user._id': True,
 | 
			
		||||
            'user.auth.user_id': True,
 | 
			
		||||
        }},
 | 
			
		||||
    ])
 | 
			
		||||
 | 
			
		||||
    log.debug('Aggregating tokens and users')
 | 
			
		||||
    for user_info in cursor:
 | 
			
		||||
        log.debug('User %s has badges %s',
 | 
			
		||||
                  user_info['user']['_id'], user_info['user'].get('badges'))
 | 
			
		||||
        yield SyncUser(
 | 
			
		||||
            user_id=user_info['user']['_id'],
 | 
			
		||||
            token=user_info['token'],
 | 
			
		||||
            bid_user_id=user_info['user']['auth']['user_id'])
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def fetch_badge_html(session: requests.Session, user: SyncUser, size: str) \
 | 
			
		||||
        -> str:
 | 
			
		||||
    """Fetch a Blender ID badge for this user.
 | 
			
		||||
 | 
			
		||||
    :param session:
 | 
			
		||||
    :param user:
 | 
			
		||||
    :param size: Size indication for the badge images, see the Blender ID
 | 
			
		||||
        documentation/code. As of this writing valid sizes are {'s', 'm', 'l'}.
 | 
			
		||||
    """
 | 
			
		||||
    my_log = log.getChild('fetch_badge_html')
 | 
			
		||||
 | 
			
		||||
    blender_id_endpoint = current_app.config['BLENDER_ID_ENDPOINT']
 | 
			
		||||
    url = urljoin(blender_id_endpoint, f'api/badges/{user.bid_user_id}/html/{size}')
 | 
			
		||||
 | 
			
		||||
    my_log.debug('Fetching badge HTML at %s for user %s', url, user.user_id)
 | 
			
		||||
    try:
 | 
			
		||||
        resp = session.get(url, headers={'Authorization': f'Bearer {user.token}'})
 | 
			
		||||
    except requests.ConnectionError as ex:
 | 
			
		||||
        my_log.warning('Unable to connect to Blender ID at %s: %s', url, ex)
 | 
			
		||||
        raise StopRefreshing()
 | 
			
		||||
 | 
			
		||||
    if resp.status_code == 204:
 | 
			
		||||
        my_log.debug('No badges for user %s', user.user_id)
 | 
			
		||||
        return ''
 | 
			
		||||
    if resp.status_code == 403:
 | 
			
		||||
        # TODO(Sybren): this indicates the token is invalid, so we could just as well delete it.
 | 
			
		||||
        my_log.warning('Tried fetching %s for user %s but received a 403: %s',
 | 
			
		||||
                       url, user.user_id, resp.text)
 | 
			
		||||
        return ''
 | 
			
		||||
    if resp.status_code == 400:
 | 
			
		||||
        my_log.warning('Blender ID did not accept our GET request at %s for user %s: %s',
 | 
			
		||||
                       url, user.user_id, resp.text)
 | 
			
		||||
        return ''
 | 
			
		||||
    if resp.status_code == 500:
 | 
			
		||||
        my_log.warning('Blender ID returned an internal server error on %s for user %s, '
 | 
			
		||||
                       'aborting all badge refreshes: %s', url, user.user_id, resp.text)
 | 
			
		||||
        raise StopRefreshing()
 | 
			
		||||
    if resp.status_code == 404:
 | 
			
		||||
        my_log.warning('Blender ID has no user %s for our user %s', user.bid_user_id, user.user_id)
 | 
			
		||||
        return ''
 | 
			
		||||
    resp.raise_for_status()
 | 
			
		||||
    return resp.text
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def refresh_all_badges(only_user_id: typing.Optional[bson.ObjectId] = None, *,
 | 
			
		||||
                       dry_run=False,
 | 
			
		||||
                       timelimit: datetime.timedelta):
 | 
			
		||||
    """Re-fetch all badges for all users, except when already refreshed recently.
 | 
			
		||||
 | 
			
		||||
    :param only_user_id: Only refresh this user. This is expected to be used
 | 
			
		||||
        sparingly during manual maintenance / debugging sessions only. It does
 | 
			
		||||
        fetch all users to refresh, and in Python code skips all except the
 | 
			
		||||
        given one.
 | 
			
		||||
    :param dry_run: if True the changes are described in the log, but not performed.
 | 
			
		||||
    :param timelimit: Refreshing will stop after this time. This allows for cron(-like)
 | 
			
		||||
        jobs to run without overlapping, even when the number fo badges to refresh
 | 
			
		||||
        becomes larger than possible within the period of the cron job.
 | 
			
		||||
    """
 | 
			
		||||
    my_log = log.getChild('refresh_all_badges')
 | 
			
		||||
 | 
			
		||||
    # Test the config before we start looping over the world.
 | 
			
		||||
    badge_expiry = badge_expiry_config()
 | 
			
		||||
    if not badge_expiry or not isinstance(badge_expiry, datetime.timedelta):
 | 
			
		||||
        raise ValueError('BLENDER_ID_BADGE_EXPIRY not configured properly, should be a timedelta')
 | 
			
		||||
 | 
			
		||||
    session = _get_requests_session()
 | 
			
		||||
    deadline = utcnow() + timelimit
 | 
			
		||||
 | 
			
		||||
    num_updates = 0
 | 
			
		||||
    for user_info in find_users_to_sync():
 | 
			
		||||
        if utcnow() > deadline:
 | 
			
		||||
            my_log.info('Stopping badge refresh because the timelimit %s (H:MM:SS) was hit.',
 | 
			
		||||
                        timelimit)
 | 
			
		||||
            break
 | 
			
		||||
 | 
			
		||||
        if only_user_id and user_info.user_id != only_user_id:
 | 
			
		||||
            my_log.debug('Skipping user %s', user_info.user_id)
 | 
			
		||||
            continue
 | 
			
		||||
        try:
 | 
			
		||||
            badge_html = fetch_badge_html(session, user_info, 's')
 | 
			
		||||
        except StopRefreshing:
 | 
			
		||||
            my_log.error('Blender ID has internal problems, stopping badge refreshing at user %s',
 | 
			
		||||
                         user_info)
 | 
			
		||||
            break
 | 
			
		||||
 | 
			
		||||
        num_updates += 1
 | 
			
		||||
        update_badges(user_info, badge_html, badge_expiry, dry_run=dry_run)
 | 
			
		||||
    my_log.info('Updated badges of %d users%s', num_updates, ' (dry-run)' if dry_run else '')
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def _get_requests_session() -> requests.Session:
 | 
			
		||||
    from requests.adapters import HTTPAdapter
 | 
			
		||||
    session = requests.Session()
 | 
			
		||||
    session.mount('https://', HTTPAdapter(max_retries=5))
 | 
			
		||||
    return session
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def refresh_single_user(user_id: bson.ObjectId):
 | 
			
		||||
    """Refresh badges for a single user."""
 | 
			
		||||
    my_log = log.getChild('refresh_single_user')
 | 
			
		||||
 | 
			
		||||
    badge_expiry = badge_expiry_config()
 | 
			
		||||
    if not badge_expiry:
 | 
			
		||||
        my_log.warning('Skipping badge fetching, BLENDER_ID_BADGE_EXPIRY not configured')
 | 
			
		||||
 | 
			
		||||
    my_log.debug('Fetching badges for user %s', user_id)
 | 
			
		||||
    session = _get_requests_session()
 | 
			
		||||
    user_info = find_user_to_sync(user_id)
 | 
			
		||||
    if not user_info:
 | 
			
		||||
        return
 | 
			
		||||
 | 
			
		||||
    try:
 | 
			
		||||
        badge_html = fetch_badge_html(session, user_info, 's')
 | 
			
		||||
    except StopRefreshing:
 | 
			
		||||
        my_log.error('Blender ID has internal problems, stopping badge refreshing at user %s',
 | 
			
		||||
                     user_info)
 | 
			
		||||
        return
 | 
			
		||||
 | 
			
		||||
    update_badges(user_info, badge_html, badge_expiry, dry_run=False)
 | 
			
		||||
    my_log.info('Updated badges of user %s', user_id)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def update_badges(user_info: SyncUser, badge_html: str, badge_expiry: datetime.timedelta,
 | 
			
		||||
                  *, dry_run: bool):
 | 
			
		||||
    my_log = log.getChild('update_badges')
 | 
			
		||||
    users_coll = current_app.db('users')
 | 
			
		||||
 | 
			
		||||
    update = {'badges': {
 | 
			
		||||
        'html': badge_html,
 | 
			
		||||
        'expires': utcnow() + badge_expiry,
 | 
			
		||||
    }}
 | 
			
		||||
    my_log.info('Updating badges HTML for Blender ID %s, user %s',
 | 
			
		||||
                user_info.bid_user_id, user_info.user_id)
 | 
			
		||||
 | 
			
		||||
    if dry_run:
 | 
			
		||||
        return
 | 
			
		||||
 | 
			
		||||
    result = users_coll.update_one({'_id': user_info.user_id},
 | 
			
		||||
                                   {'$set': update})
 | 
			
		||||
    if result.matched_count != 1:
 | 
			
		||||
        my_log.warning('Unable to update badges for user %s', user_info.user_id)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def badge_expiry_config() -> datetime.timedelta:
 | 
			
		||||
    return current_app.config.get('BLENDER_ID_BADGE_EXPIRY')
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@auth.user_logged_in.connect
 | 
			
		||||
def sync_badge_upon_login(sender: auth.UserClass, **kwargs):
 | 
			
		||||
    """Auto-sync badges when a user logs in."""
 | 
			
		||||
 | 
			
		||||
    log.info('Refreshing badge of %s because they logged in', sender.user_id)
 | 
			
		||||
    refresh_single_user(sender.user_id)
 | 
			
		||||
							
								
								
									
										52
									
								
								pillar/bugsnag_extra.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										52
									
								
								pillar/bugsnag_extra.py
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,52 @@
 | 
			
		||||
# Keys in the user's session dictionary that are removed before sending to Bugsnag.
 | 
			
		||||
SESSION_KEYS_TO_REMOVE = ('blender_id_oauth_token', 'user_id')
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def add_pillar_request_to_notification(notification):
 | 
			
		||||
    """Adds request metadata to the Bugsnag notifications.
 | 
			
		||||
 | 
			
		||||
    This basically copies bugsnag.flask.add_flask_request_to_notification,
 | 
			
		||||
    but is altered to include Pillar-specific metadata.
 | 
			
		||||
    """
 | 
			
		||||
    from flask import request, session
 | 
			
		||||
    from bugsnag.wsgi import request_path
 | 
			
		||||
    import pillar.auth
 | 
			
		||||
 | 
			
		||||
    if not request:
 | 
			
		||||
        return
 | 
			
		||||
 | 
			
		||||
    notification.context = "%s %s" % (request.method,
 | 
			
		||||
                                      request_path(request.environ))
 | 
			
		||||
 | 
			
		||||
    if 'id' not in notification.user:
 | 
			
		||||
        user: pillar.auth.UserClass = pillar.auth.current_user._get_current_object()
 | 
			
		||||
        notification.set_user(id=user.user_id,
 | 
			
		||||
                              email=user.email,
 | 
			
		||||
                              name=user.username)
 | 
			
		||||
        notification.user['roles'] = sorted(user.roles)
 | 
			
		||||
        notification.user['capabilities'] = sorted(user.capabilities)
 | 
			
		||||
 | 
			
		||||
    session_dict = dict(session)
 | 
			
		||||
    for key in SESSION_KEYS_TO_REMOVE:
 | 
			
		||||
        try:
 | 
			
		||||
            del session_dict[key]
 | 
			
		||||
        except KeyError:
 | 
			
		||||
            pass
 | 
			
		||||
    notification.add_tab("session", session_dict)
 | 
			
		||||
    notification.add_tab("environment", dict(request.environ))
 | 
			
		||||
 | 
			
		||||
    remote_addr = request.remote_addr
 | 
			
		||||
    forwarded_for = request.headers.get('X-Forwarded-For')
 | 
			
		||||
    if forwarded_for:
 | 
			
		||||
        remote_addr = f'{forwarded_for} (proxied via {remote_addr})'
 | 
			
		||||
 | 
			
		||||
    notification.add_tab("request", {
 | 
			
		||||
        "method": request.method,
 | 
			
		||||
        "url": request.base_url,
 | 
			
		||||
        "headers": dict(request.headers),
 | 
			
		||||
        "params": dict(request.form),
 | 
			
		||||
        "data": {'request.data': request.data,
 | 
			
		||||
                 'request.json': request.get_json()},
 | 
			
		||||
        "endpoint": request.endpoint,
 | 
			
		||||
        "remote_addr": remote_addr,
 | 
			
		||||
    })
 | 
			
		||||
							
								
								
									
										6
									
								
								pillar/celery/__init__.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										6
									
								
								pillar/celery/__init__.py
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,6 @@
 | 
			
		||||
"""Tasks to be run by the Celery worker.
 | 
			
		||||
 | 
			
		||||
If you create a new submodule/subpackage, be sure to add it to
 | 
			
		||||
PillarServer._config_celery() too.
 | 
			
		||||
 | 
			
		||||
"""
 | 
			
		||||
							
								
								
									
										20
									
								
								pillar/celery/badges.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										20
									
								
								pillar/celery/badges.py
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,20 @@
 | 
			
		||||
"""Badge HTML synchronisation.
 | 
			
		||||
 | 
			
		||||
Note that this module can only be imported when an application context is
 | 
			
		||||
active. Best to late-import this in the functions where it's needed.
 | 
			
		||||
"""
 | 
			
		||||
import datetime
 | 
			
		||||
import logging
 | 
			
		||||
 | 
			
		||||
from pillar import current_app, badge_sync
 | 
			
		||||
 | 
			
		||||
log = logging.getLogger(__name__)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@current_app.celery.task(ignore_result=True)
 | 
			
		||||
def sync_badges_for_users(timelimit_seconds: int):
 | 
			
		||||
    """Synchronises Blender ID badges for the most-urgent users."""
 | 
			
		||||
 | 
			
		||||
    timelimit = datetime.timedelta(seconds=timelimit_seconds)
 | 
			
		||||
    log.info('Refreshing badges, timelimit is %s (H:MM:SS)', timelimit)
 | 
			
		||||
    badge_sync.refresh_all_badges(timelimit=timelimit)
 | 
			
		||||
							
								
								
									
										50
									
								
								pillar/celery/email_tasks.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										50
									
								
								pillar/celery/email_tasks.py
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,50 @@
 | 
			
		||||
"""Deferred email support.
 | 
			
		||||
 | 
			
		||||
Note that this module can only be imported when an application context is
 | 
			
		||||
active. Best to late-import this in the functions where it's needed.
 | 
			
		||||
"""
 | 
			
		||||
from email.message import EmailMessage
 | 
			
		||||
from email.headerregistry import Address
 | 
			
		||||
import logging
 | 
			
		||||
import smtplib
 | 
			
		||||
 | 
			
		||||
import celery
 | 
			
		||||
 | 
			
		||||
from pillar import current_app
 | 
			
		||||
 | 
			
		||||
log = logging.getLogger(__name__)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@current_app.celery.task(bind=True, ignore_result=True, acks_late=True)
 | 
			
		||||
def send_email(self: celery.Task, to_name: str, to_addr: str, subject: str, text: str, html: str):
 | 
			
		||||
    """Send an email to a single address."""
 | 
			
		||||
    # WARNING: when changing the signature of this function, also change the
 | 
			
		||||
    # self.retry() call below.
 | 
			
		||||
    cfg = current_app.config
 | 
			
		||||
 | 
			
		||||
    # Construct the message
 | 
			
		||||
    msg = EmailMessage()
 | 
			
		||||
    msg['Subject'] = subject
 | 
			
		||||
    msg['From'] = Address(cfg['MAIL_DEFAULT_FROM_NAME'], addr_spec=cfg['MAIL_DEFAULT_FROM_ADDR'])
 | 
			
		||||
    msg['To'] = (Address(to_name, addr_spec=to_addr),)
 | 
			
		||||
    msg.set_content(text)
 | 
			
		||||
    msg.add_alternative(html, subtype='html')
 | 
			
		||||
 | 
			
		||||
    # Refuse to send mail when we're testing.
 | 
			
		||||
    if cfg['TESTING']:
 | 
			
		||||
        log.warning('not sending mail to %s <%s> because we are TESTING', to_name, to_addr)
 | 
			
		||||
        return
 | 
			
		||||
    log.info('sending email to %s <%s>', to_name, to_addr)
 | 
			
		||||
 | 
			
		||||
    # Send the message via local SMTP server.
 | 
			
		||||
    try:
 | 
			
		||||
        with smtplib.SMTP(cfg['SMTP_HOST'], cfg['SMTP_PORT'], timeout=cfg['SMTP_TIMEOUT']) as smtp:
 | 
			
		||||
            if cfg.get('SMTP_USERNAME') and cfg.get('SMTP_PASSWORD'):
 | 
			
		||||
                smtp.login(cfg['SMTP_USERNAME'], cfg['SMTP_PASSWORD'])
 | 
			
		||||
            smtp.send_message(msg)
 | 
			
		||||
    except (IOError, OSError) as ex:
 | 
			
		||||
        log.exception('error sending email to %s <%s>, will retry later: %s',
 | 
			
		||||
                      to_name, to_addr, ex)
 | 
			
		||||
        self.retry((to_name, to_addr, subject, text, html), countdown=cfg['MAIL_RETRY'])
 | 
			
		||||
    else:
 | 
			
		||||
        log.info('mail to %s <%s> successfully sent', to_name, to_addr)
 | 
			
		||||
							
								
								
									
										19
									
								
								pillar/celery/file_link_tasks.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										19
									
								
								pillar/celery/file_link_tasks.py
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,19 @@
 | 
			
		||||
from pillar import current_app
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@current_app.celery.task(ignore_result=True)
 | 
			
		||||
def regenerate_all_expired_links(backend_name: str, chunk_size: int):
 | 
			
		||||
    """Regenerate all expired links for all non-deleted file documents.
 | 
			
		||||
 | 
			
		||||
    Probably only works on Google Cloud Storage ('gcs') backends at
 | 
			
		||||
    the moment, since those are the only links that actually expire.
 | 
			
		||||
 | 
			
		||||
    :param backend_name: name of the backend to refresh for.
 | 
			
		||||
    :param chunk_size: the maximum number of files to refresh in this run.
 | 
			
		||||
    """
 | 
			
		||||
    from pillar.api import file_storage
 | 
			
		||||
 | 
			
		||||
    # Refresh all files that already have expired or will expire in the next
 | 
			
		||||
    # two hours. Since this task is intended to run every hour, this should
 | 
			
		||||
    # result in all regular file requests having a valid link.
 | 
			
		||||
    file_storage.refresh_links_for_backend(backend_name, chunk_size, expiry_seconds=7200)
 | 
			
		||||
							
								
								
									
										177
									
								
								pillar/celery/search_index_tasks.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										177
									
								
								pillar/celery/search_index_tasks.py
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,177 @@
 | 
			
		||||
import logging
 | 
			
		||||
 | 
			
		||||
import bleach
 | 
			
		||||
from bson import ObjectId
 | 
			
		||||
 | 
			
		||||
from pillar import current_app
 | 
			
		||||
from pillar.api.file_storage import generate_link
 | 
			
		||||
from pillar.api.search import elastic_indexing
 | 
			
		||||
from pillar.api.search import algolia_indexing
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
log = logging.getLogger(__name__)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
INDEX_ALLOWED_NODE_TYPES = {'asset', 'texture', 'group', 'hdri', 'post'}
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
SEARCH_BACKENDS = {
 | 
			
		||||
    'algolia': algolia_indexing,
 | 
			
		||||
    'elastic': elastic_indexing
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def _get_node_from_id(node_id: str):
 | 
			
		||||
    node_oid = ObjectId(node_id)
 | 
			
		||||
 | 
			
		||||
    nodes_coll = current_app.db('nodes')
 | 
			
		||||
    node = nodes_coll.find_one({'_id': node_oid})
 | 
			
		||||
 | 
			
		||||
    return node
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def prepare_node_data(node_id: str, node: dict=None) -> dict:
 | 
			
		||||
    """Given a node id or a node document, return an indexable version of it.
 | 
			
		||||
 | 
			
		||||
    Returns an empty dict when the node shouldn't be indexed.
 | 
			
		||||
    """
 | 
			
		||||
 | 
			
		||||
    if node_id and node:
 | 
			
		||||
        raise ValueError("Do not provide node and node_id together")
 | 
			
		||||
 | 
			
		||||
    if node_id:
 | 
			
		||||
        node = _get_node_from_id(node_id)
 | 
			
		||||
 | 
			
		||||
    if node is None:
 | 
			
		||||
        log.warning('Unable to find node %s, not updating.', node_id)
 | 
			
		||||
        return {}
 | 
			
		||||
 | 
			
		||||
    if node['node_type'] not in INDEX_ALLOWED_NODE_TYPES:
 | 
			
		||||
        log.debug('Node of type %s is not indexable by Pillar', node['node_type'])
 | 
			
		||||
        return {}
 | 
			
		||||
    # If a nodes does not have status published, do not index
 | 
			
		||||
    if node['properties'].get('status') != 'published':
 | 
			
		||||
        log.debug('Node %s is does not have published status', node_id)
 | 
			
		||||
        return {}
 | 
			
		||||
 | 
			
		||||
    projects_collection = current_app.data.driver.db['projects']
 | 
			
		||||
    project = projects_collection.find_one({'_id': ObjectId(node['project'])})
 | 
			
		||||
 | 
			
		||||
    users_collection = current_app.data.driver.db['users']
 | 
			
		||||
    user = users_collection.find_one({'_id': ObjectId(node['user'])})
 | 
			
		||||
 | 
			
		||||
    clean_description = bleach.clean(node.get('_description_html') or '', strip=True)
 | 
			
		||||
    if not clean_description and node['node_type'] == 'post':
 | 
			
		||||
        clean_description = bleach.clean(node['properties'].get('_content_html') or '', strip=True)
 | 
			
		||||
 | 
			
		||||
    to_index = {
 | 
			
		||||
        'objectID': node['_id'],
 | 
			
		||||
        'name': node['name'],
 | 
			
		||||
        'project': {
 | 
			
		||||
            '_id': project['_id'],
 | 
			
		||||
            'name': project['name'],
 | 
			
		||||
            'url': project['url'],
 | 
			
		||||
        },
 | 
			
		||||
        'created': node['_created'],
 | 
			
		||||
        'updated': node['_updated'],
 | 
			
		||||
        'node_type': node['node_type'],
 | 
			
		||||
        'picture': node.get('picture') or '',
 | 
			
		||||
        'user': {
 | 
			
		||||
            '_id': user['_id'],
 | 
			
		||||
            'full_name': user['full_name']
 | 
			
		||||
        },
 | 
			
		||||
        'description': clean_description or None,
 | 
			
		||||
        'is_free': False
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    # If the node has world permissions, compute the Free permission
 | 
			
		||||
    if 'world' in node.get('permissions', {}):
 | 
			
		||||
        if 'GET' in node['permissions']['world']:
 | 
			
		||||
            to_index['is_free'] = True
 | 
			
		||||
 | 
			
		||||
    # Append the media key if the node is of node_type 'asset'
 | 
			
		||||
    if node['node_type'] == 'asset':
 | 
			
		||||
        to_index['media'] = node['properties']['content_type']
 | 
			
		||||
 | 
			
		||||
    # Add extra properties
 | 
			
		||||
    for prop in ('tags', 'license_notes'):
 | 
			
		||||
        if prop in node['properties']:
 | 
			
		||||
            to_index[prop] = node['properties'][prop]
 | 
			
		||||
 | 
			
		||||
    return to_index
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def prepare_user_data(user_id: str, user=None) -> dict:
 | 
			
		||||
    """
 | 
			
		||||
    Prepare data to index for user node.
 | 
			
		||||
 | 
			
		||||
    Returns an empty dict if the user should not be indexed.
 | 
			
		||||
    """
 | 
			
		||||
 | 
			
		||||
    if not user:
 | 
			
		||||
        user_oid = ObjectId(user_id)
 | 
			
		||||
        log.info('Retrieving user %s', user_oid)
 | 
			
		||||
        users_coll = current_app.db('users')
 | 
			
		||||
        user = users_coll.find_one({'_id': user_oid})
 | 
			
		||||
 | 
			
		||||
    if user is None:
 | 
			
		||||
        log.warning('Unable to find user %s, not updating search index.', user_id)
 | 
			
		||||
        return {}
 | 
			
		||||
 | 
			
		||||
    user_roles = set(user.get('roles', ()))
 | 
			
		||||
 | 
			
		||||
    if 'service' in user_roles:
 | 
			
		||||
        return {}
 | 
			
		||||
 | 
			
		||||
    # Strip unneeded roles
 | 
			
		||||
    index_roles = user_roles.intersection(current_app.user_roles_indexable)
 | 
			
		||||
 | 
			
		||||
    log.debug('Push user %r to Search index', user['_id'])
 | 
			
		||||
 | 
			
		||||
    user_to_index = {
 | 
			
		||||
        'objectID': user['_id'],
 | 
			
		||||
        'full_name': user['full_name'],
 | 
			
		||||
        'username': user['username'],
 | 
			
		||||
        'roles': list(index_roles),
 | 
			
		||||
        'groups': user['groups'],
 | 
			
		||||
        'email': user['email']
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    return user_to_index
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@current_app.celery.task(ignore_result=True)
 | 
			
		||||
def updated_user(user_id: str):
 | 
			
		||||
    """Push an update to the index when a user item is updated"""
 | 
			
		||||
 | 
			
		||||
    user_to_index = prepare_user_data(user_id)
 | 
			
		||||
 | 
			
		||||
    for searchoption in current_app.config['SEARCH_BACKENDS']:
 | 
			
		||||
        searchmodule = SEARCH_BACKENDS[searchoption]
 | 
			
		||||
        searchmodule.push_updated_user(user_to_index)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@current_app.celery.task(ignore_result=True)
 | 
			
		||||
def node_save(node_id: str):
 | 
			
		||||
 | 
			
		||||
    to_index = prepare_node_data(node_id)
 | 
			
		||||
 | 
			
		||||
    if not to_index:
 | 
			
		||||
        log.debug('Node %s will not be indexed', node_id)
 | 
			
		||||
        return
 | 
			
		||||
 | 
			
		||||
    for searchoption in current_app.config['SEARCH_BACKENDS']:
 | 
			
		||||
        searchmodule = SEARCH_BACKENDS[searchoption]
 | 
			
		||||
        searchmodule.index_node_save(to_index)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@current_app.celery.task(ignore_result=True)
 | 
			
		||||
def node_delete(node_id: str):
 | 
			
		||||
 | 
			
		||||
    # Deleting a node takes nothing more than the ID anyway.
 | 
			
		||||
    # No need to fetch anything from Mongo.
 | 
			
		||||
    delete_id = ObjectId(node_id)
 | 
			
		||||
 | 
			
		||||
    for searchoption in current_app.config['SEARCH_BACKENDS']:
 | 
			
		||||
        searchmodule = SEARCH_BACKENDS[searchoption]
 | 
			
		||||
        searchmodule.index_node_delete(delete_id)
 | 
			
		||||
							
								
								
									
										20
									
								
								pillar/celery/tasks.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										20
									
								
								pillar/celery/tasks.py
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,20 @@
 | 
			
		||||
import logging
 | 
			
		||||
import typing
 | 
			
		||||
 | 
			
		||||
from pillar import current_app
 | 
			
		||||
 | 
			
		||||
log = logging.getLogger(__name__)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@current_app.celery.task(track_started=True)
 | 
			
		||||
def long_task(numbers: typing.List[int]):
 | 
			
		||||
    _log = log.getChild('long_task')
 | 
			
		||||
    _log.info('Computing sum of %i items', len(numbers))
 | 
			
		||||
 | 
			
		||||
    import time
 | 
			
		||||
    time.sleep(6)
 | 
			
		||||
    thesum = sum(numbers)
 | 
			
		||||
 | 
			
		||||
    _log.info('Computed sum of %i items', len(numbers))
 | 
			
		||||
 | 
			
		||||
    return thesum
 | 
			
		||||
							
								
								
									
										795
									
								
								pillar/cli.py
									
									
									
									
									
								
							
							
						
						
									
										795
									
								
								pillar/cli.py
									
									
									
									
									
								
							@@ -1,795 +0,0 @@
 | 
			
		||||
"""Commandline interface.
 | 
			
		||||
 | 
			
		||||
Run commands with 'flask <command>'
 | 
			
		||||
"""
 | 
			
		||||
 | 
			
		||||
from __future__ import print_function, division
 | 
			
		||||
 | 
			
		||||
import copy
 | 
			
		||||
import logging
 | 
			
		||||
 | 
			
		||||
from bson.objectid import ObjectId, InvalidId
 | 
			
		||||
from eve.methods.put import put_internal
 | 
			
		||||
from eve.methods.post import post_internal
 | 
			
		||||
 | 
			
		||||
from flask import current_app
 | 
			
		||||
from flask_script import Manager
 | 
			
		||||
 | 
			
		||||
log = logging.getLogger(__name__)
 | 
			
		||||
manager = Manager(current_app)
 | 
			
		||||
 | 
			
		||||
manager_maintenance = Manager(
 | 
			
		||||
    current_app, usage="Maintenance scripts, to update user groups")
 | 
			
		||||
manager_setup = Manager(
 | 
			
		||||
    current_app, usage="Setup utilities, like setup_db() or create_blog()")
 | 
			
		||||
manager_operations = Manager(
 | 
			
		||||
    current_app, usage="Backend operations, like moving nodes across projects")
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@manager_setup.command
 | 
			
		||||
def setup_db(admin_email):
 | 
			
		||||
    """Setup the database
 | 
			
		||||
    - Create admin, subscriber and demo Group collection
 | 
			
		||||
    - Create admin user (must use valid blender-id credentials)
 | 
			
		||||
    - Create one project
 | 
			
		||||
    """
 | 
			
		||||
 | 
			
		||||
    # Create default groups
 | 
			
		||||
    groups_list = []
 | 
			
		||||
    for group in ['admin', 'subscriber', 'demo']:
 | 
			
		||||
        g = {'name': group}
 | 
			
		||||
        g = current_app.post_internal('groups', g)
 | 
			
		||||
        groups_list.append(g[0]['_id'])
 | 
			
		||||
        print("Creating group {0}".format(group))
 | 
			
		||||
 | 
			
		||||
    # Create admin user
 | 
			
		||||
    user = {'username': admin_email,
 | 
			
		||||
            'groups': groups_list,
 | 
			
		||||
            'roles': ['admin', 'subscriber', 'demo'],
 | 
			
		||||
            'settings': {'email_communications': 1},
 | 
			
		||||
            'auth': [],
 | 
			
		||||
            'full_name': admin_email,
 | 
			
		||||
            'email': admin_email}
 | 
			
		||||
    result, _, _, status = current_app.post_internal('users', user)
 | 
			
		||||
    if status != 201:
 | 
			
		||||
        raise SystemExit('Error creating user {}: {}'.format(admin_email, result))
 | 
			
		||||
    user.update(result)
 | 
			
		||||
    print("Created user {0}".format(user['_id']))
 | 
			
		||||
 | 
			
		||||
    # Create a default project by faking a POST request.
 | 
			
		||||
    with current_app.test_request_context(data={'project_name': u'Default Project'}):
 | 
			
		||||
        from flask import g
 | 
			
		||||
        from pillar.api.projects import routes as proj_routes
 | 
			
		||||
 | 
			
		||||
        g.current_user = {'user_id': user['_id'],
 | 
			
		||||
                          'groups': user['groups'],
 | 
			
		||||
                          'roles': set(user['roles'])}
 | 
			
		||||
 | 
			
		||||
        proj_routes.create_project(overrides={'url': 'default-project',
 | 
			
		||||
                                              'is_private': False})
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@manager_maintenance.command
 | 
			
		||||
def find_duplicate_users():
 | 
			
		||||
    """Finds users that have the same BlenderID user_id."""
 | 
			
		||||
 | 
			
		||||
    from collections import defaultdict
 | 
			
		||||
 | 
			
		||||
    users_coll = current_app.data.driver.db['users']
 | 
			
		||||
    nodes_coll = current_app.data.driver.db['nodes']
 | 
			
		||||
    projects_coll = current_app.data.driver.db['projects']
 | 
			
		||||
 | 
			
		||||
    found_users = defaultdict(list)
 | 
			
		||||
 | 
			
		||||
    for user in users_coll.find():
 | 
			
		||||
        blender_ids = [auth['user_id'] for auth in user['auth']
 | 
			
		||||
                       if auth['provider'] == 'blender-id']
 | 
			
		||||
        if not blender_ids:
 | 
			
		||||
            continue
 | 
			
		||||
        blender_id = blender_ids[0]
 | 
			
		||||
        found_users[blender_id].append(user)
 | 
			
		||||
 | 
			
		||||
    for blender_id, users in found_users.iteritems():
 | 
			
		||||
        if len(users) == 1:
 | 
			
		||||
            continue
 | 
			
		||||
 | 
			
		||||
        usernames = ', '.join(user['username'] for user in users)
 | 
			
		||||
        print('Blender ID: %5s has %i users: %s' % (
 | 
			
		||||
            blender_id, len(users), usernames))
 | 
			
		||||
 | 
			
		||||
        for user in users:
 | 
			
		||||
            print('  %s owns %i nodes and %i projects' % (
 | 
			
		||||
                user['username'],
 | 
			
		||||
                nodes_coll.count({'user': user['_id']}),
 | 
			
		||||
                projects_coll.count({'user': user['_id']}),
 | 
			
		||||
            ))
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@manager_maintenance.command
 | 
			
		||||
def sync_role_groups(do_revoke_groups):
 | 
			
		||||
    """For each user, synchronizes roles and group membership.
 | 
			
		||||
 | 
			
		||||
    This ensures that everybody with the 'subscriber' role is also member of the 'subscriber'
 | 
			
		||||
    group, and people without the 'subscriber' role are not member of that group. Same for
 | 
			
		||||
    admin and demo groups.
 | 
			
		||||
 | 
			
		||||
    When do_revoke_groups=False (the default), people are only added to groups.
 | 
			
		||||
    when do_revoke_groups=True, people are also removed from groups.
 | 
			
		||||
    """
 | 
			
		||||
 | 
			
		||||
    from pillar.api import service
 | 
			
		||||
 | 
			
		||||
    if do_revoke_groups not in {'true', 'false'}:
 | 
			
		||||
        print('Use either "true" or "false" as first argument.')
 | 
			
		||||
        print('When passing "false", people are only added to groups.')
 | 
			
		||||
        print('when passing "true", people are also removed from groups.')
 | 
			
		||||
        raise SystemExit()
 | 
			
		||||
    do_revoke_groups = do_revoke_groups == 'true'
 | 
			
		||||
 | 
			
		||||
    service.fetch_role_to_group_id_map()
 | 
			
		||||
 | 
			
		||||
    users_coll = current_app.data.driver.db['users']
 | 
			
		||||
    groups_coll = current_app.data.driver.db['groups']
 | 
			
		||||
 | 
			
		||||
    group_names = {}
 | 
			
		||||
 | 
			
		||||
    def gname(gid):
 | 
			
		||||
        try:
 | 
			
		||||
            return group_names[gid]
 | 
			
		||||
        except KeyError:
 | 
			
		||||
            name = groups_coll.find_one(gid, projection={'name': 1})['name']
 | 
			
		||||
            name = str(name)
 | 
			
		||||
            group_names[gid] = name
 | 
			
		||||
            return name
 | 
			
		||||
 | 
			
		||||
    ok_users = bad_users = 0
 | 
			
		||||
    for user in users_coll.find():
 | 
			
		||||
        grant_groups = set()
 | 
			
		||||
        revoke_groups = set()
 | 
			
		||||
        current_groups = set(user.get('groups', []))
 | 
			
		||||
        user_roles = user.get('roles', set())
 | 
			
		||||
 | 
			
		||||
        for role in service.ROLES_WITH_GROUPS:
 | 
			
		||||
            action = 'grant' if role in user_roles else 'revoke'
 | 
			
		||||
            groups = service.manage_user_group_membership(user, role, action)
 | 
			
		||||
 | 
			
		||||
            if groups is None:
 | 
			
		||||
                # No changes required
 | 
			
		||||
                continue
 | 
			
		||||
 | 
			
		||||
            if groups == current_groups:
 | 
			
		||||
                continue
 | 
			
		||||
 | 
			
		||||
            grant_groups.update(groups.difference(current_groups))
 | 
			
		||||
            revoke_groups.update(current_groups.difference(groups))
 | 
			
		||||
 | 
			
		||||
        if grant_groups or revoke_groups:
 | 
			
		||||
            bad_users += 1
 | 
			
		||||
 | 
			
		||||
            expected_groups = current_groups.union(grant_groups).difference(revoke_groups)
 | 
			
		||||
 | 
			
		||||
            print('Discrepancy for user %s/%s:' % (user['_id'], user['full_name'].encode('utf8')))
 | 
			
		||||
            print('    - actual groups  :', sorted(gname(gid) for gid in user.get('groups')))
 | 
			
		||||
            print('    - expected groups:', sorted(gname(gid) for gid in expected_groups))
 | 
			
		||||
            print('    - will grant     :', sorted(gname(gid) for gid in grant_groups))
 | 
			
		||||
 | 
			
		||||
            if do_revoke_groups:
 | 
			
		||||
                label = 'WILL REVOKE '
 | 
			
		||||
            else:
 | 
			
		||||
                label = 'could revoke'
 | 
			
		||||
            print('    - %s   :' % label, sorted(gname(gid) for gid in revoke_groups))
 | 
			
		||||
 | 
			
		||||
            if grant_groups and revoke_groups:
 | 
			
		||||
                print('        ------ CAREFUL this one has BOTH grant AND revoke -----')
 | 
			
		||||
 | 
			
		||||
            # Determine which changes we'll apply
 | 
			
		||||
            final_groups = current_groups.union(grant_groups)
 | 
			
		||||
            if do_revoke_groups:
 | 
			
		||||
                final_groups.difference_update(revoke_groups)
 | 
			
		||||
            print('    - final groups   :', sorted(gname(gid) for gid in final_groups))
 | 
			
		||||
 | 
			
		||||
            # Perform the actual update
 | 
			
		||||
            users_coll.update_one({'_id': user['_id']},
 | 
			
		||||
                                  {'$set': {'groups': list(final_groups)}})
 | 
			
		||||
        else:
 | 
			
		||||
            ok_users += 1
 | 
			
		||||
 | 
			
		||||
    print('%i bad and %i ok users seen.' % (bad_users, ok_users))
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@manager_maintenance.command
 | 
			
		||||
def sync_project_groups(user_email, fix):
 | 
			
		||||
    """Gives the user access to their self-created projects."""
 | 
			
		||||
 | 
			
		||||
    if fix.lower() not in {'true', 'false'}:
 | 
			
		||||
        print('Use either "true" or "false" as second argument.')
 | 
			
		||||
        print('When passing "false", only a report is produced.')
 | 
			
		||||
        print('when passing "true", group membership is fixed.')
 | 
			
		||||
        raise SystemExit()
 | 
			
		||||
    fix = fix.lower() == 'true'
 | 
			
		||||
 | 
			
		||||
    users_coll = current_app.data.driver.db['users']
 | 
			
		||||
    proj_coll = current_app.data.driver.db['projects']
 | 
			
		||||
    groups_coll = current_app.data.driver.db['groups']
 | 
			
		||||
 | 
			
		||||
    # Find by email or by user ID
 | 
			
		||||
    if '@' in user_email:
 | 
			
		||||
        where = {'email': user_email}
 | 
			
		||||
    else:
 | 
			
		||||
        try:
 | 
			
		||||
            where = {'_id': ObjectId(user_email)}
 | 
			
		||||
        except InvalidId:
 | 
			
		||||
            log.warning('Invalid ObjectID: %s', user_email)
 | 
			
		||||
            return
 | 
			
		||||
 | 
			
		||||
    user = users_coll.find_one(where, projection={'_id': 1, 'groups': 1})
 | 
			
		||||
    if user is None:
 | 
			
		||||
        log.error('User %s not found', where)
 | 
			
		||||
        raise SystemExit()
 | 
			
		||||
 | 
			
		||||
    user_groups = set(user['groups'])
 | 
			
		||||
    user_id = user['_id']
 | 
			
		||||
    log.info('Updating projects for user %s', user_id)
 | 
			
		||||
 | 
			
		||||
    ok_groups = missing_groups = 0
 | 
			
		||||
    for proj in proj_coll.find({'user': user_id}):
 | 
			
		||||
        project_id = proj['_id']
 | 
			
		||||
        log.info('Investigating project %s (%s)', project_id, proj['name'])
 | 
			
		||||
 | 
			
		||||
        # Find the admin group
 | 
			
		||||
        admin_group = groups_coll.find_one({'name': str(project_id)}, projection={'_id': 1})
 | 
			
		||||
        if admin_group is None:
 | 
			
		||||
            log.warning('No admin group for project %s', project_id)
 | 
			
		||||
            continue
 | 
			
		||||
        group_id = admin_group['_id']
 | 
			
		||||
 | 
			
		||||
        # Check membership
 | 
			
		||||
        if group_id not in user_groups:
 | 
			
		||||
            log.info('Missing group membership')
 | 
			
		||||
            missing_groups += 1
 | 
			
		||||
            user_groups.add(group_id)
 | 
			
		||||
        else:
 | 
			
		||||
            ok_groups += 1
 | 
			
		||||
 | 
			
		||||
    log.info('User %s was missing %i group memberships; %i projects were ok.',
 | 
			
		||||
             user_id, missing_groups, ok_groups)
 | 
			
		||||
 | 
			
		||||
    if missing_groups > 0 and fix:
 | 
			
		||||
        log.info('Updating database.')
 | 
			
		||||
        result = users_coll.update_one({'_id': user_id},
 | 
			
		||||
                                       {'$set': {'groups': list(user_groups)}})
 | 
			
		||||
        log.info('Updated %i user.', result.modified_count)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@manager_maintenance.command
 | 
			
		||||
def check_home_project_groups():
 | 
			
		||||
    """Checks all users' group membership of their home project admin group."""
 | 
			
		||||
 | 
			
		||||
    users_coll = current_app.data.driver.db['users']
 | 
			
		||||
    proj_coll = current_app.data.driver.db['projects']
 | 
			
		||||
 | 
			
		||||
    good = bad = 0
 | 
			
		||||
    for proj in proj_coll.find({'category': 'home'}):
 | 
			
		||||
        try:
 | 
			
		||||
            admin_group_perms = proj['permissions']['groups'][0]
 | 
			
		||||
        except IndexError:
 | 
			
		||||
            log.error('Project %s has no admin group', proj['_id'])
 | 
			
		||||
            return 255
 | 
			
		||||
        except KeyError:
 | 
			
		||||
            log.error('Project %s has no group permissions at all', proj['_id'])
 | 
			
		||||
            return 255
 | 
			
		||||
 | 
			
		||||
        user = users_coll.find_one({'_id': proj['user']},
 | 
			
		||||
                                   projection={'groups': 1})
 | 
			
		||||
        if user is None:
 | 
			
		||||
            log.error('Project %s has non-existing owner %s', proj['user'])
 | 
			
		||||
            return 255
 | 
			
		||||
 | 
			
		||||
        user_groups = set(user['groups'])
 | 
			
		||||
        admin_group_id = admin_group_perms['group']
 | 
			
		||||
        if admin_group_id in user_groups:
 | 
			
		||||
            # All is fine!
 | 
			
		||||
            good += 1
 | 
			
		||||
            continue
 | 
			
		||||
 | 
			
		||||
        log.warning('User %s has no admin rights to home project %s -- needs group %s',
 | 
			
		||||
                    proj['user'], proj['_id'], admin_group_id)
 | 
			
		||||
        bad += 1
 | 
			
		||||
 | 
			
		||||
    log.info('%i projects OK, %i projects in error', good, bad)
 | 
			
		||||
    return bad
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@manager_setup.command
 | 
			
		||||
def badger(action, user_email, role):
 | 
			
		||||
    from pillar.api import service
 | 
			
		||||
 | 
			
		||||
    with current_app.app_context():
 | 
			
		||||
        service.fetch_role_to_group_id_map()
 | 
			
		||||
        response, status = service.do_badger(action, user_email, role)
 | 
			
		||||
 | 
			
		||||
    if status == 204:
 | 
			
		||||
        log.info('Done.')
 | 
			
		||||
    else:
 | 
			
		||||
        log.info('Response: %s', response)
 | 
			
		||||
        log.info('Status  : %i', status)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def create_service_account(email, service_roles, service_definition, update_existing=None):
 | 
			
		||||
    from pillar.api import service
 | 
			
		||||
    from pillar.api.utils import dumps
 | 
			
		||||
 | 
			
		||||
    account, token = service.create_service_account(
 | 
			
		||||
        email,
 | 
			
		||||
        service_roles,
 | 
			
		||||
        service_definition,
 | 
			
		||||
        update_existing=update_existing
 | 
			
		||||
    )
 | 
			
		||||
 | 
			
		||||
    print('Service account information:')
 | 
			
		||||
    print(dumps(account, indent=4, sort_keys=True))
 | 
			
		||||
    print()
 | 
			
		||||
    print('Access token: %s' % token['token'])
 | 
			
		||||
    print('  expires on: %s' % token['expire_time'])
 | 
			
		||||
    return account, token
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@manager_setup.command
 | 
			
		||||
def create_badger_account(email, badges):
 | 
			
		||||
    """
 | 
			
		||||
    Creates a new service account that can give badges (i.e. roles).
 | 
			
		||||
 | 
			
		||||
    :param email: email address associated with the account
 | 
			
		||||
    :param badges: single space-separated argument containing the roles
 | 
			
		||||
        this account can assign and revoke.
 | 
			
		||||
    """
 | 
			
		||||
 | 
			
		||||
    create_service_account(email, [u'badger'], {'badger': badges.strip().split()})
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@manager_setup.command
 | 
			
		||||
def create_urler_account(email):
 | 
			
		||||
    """Creates a new service account that can fetch all project URLs."""
 | 
			
		||||
 | 
			
		||||
    create_service_account(email, [u'urler'], {})
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@manager_setup.command
 | 
			
		||||
def create_local_user_account(email, password):
 | 
			
		||||
    from pillar.api.local_auth import create_local_user
 | 
			
		||||
    create_local_user(email, password)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@manager_maintenance.command
 | 
			
		||||
@manager_maintenance.option('-c', '--chunk', dest='chunk_size', default=50,
 | 
			
		||||
                help='Number of links to update, use 0 to update all.')
 | 
			
		||||
@manager_maintenance.option('-q', '--quiet', dest='quiet', action='store_true', default=False)
 | 
			
		||||
@manager_maintenance.option('-w', '--window', dest='window', default=12,
 | 
			
		||||
                help='Refresh links that expire in this many hours.')
 | 
			
		||||
def refresh_backend_links(backend_name, chunk_size=50, quiet=False, window=12):
 | 
			
		||||
    """Refreshes all file links that are using a certain storage backend.
 | 
			
		||||
 | 
			
		||||
    Use `--chunk 0` to refresh all links.
 | 
			
		||||
    """
 | 
			
		||||
 | 
			
		||||
    chunk_size = int(chunk_size)
 | 
			
		||||
    window = int(window)
 | 
			
		||||
 | 
			
		||||
    loglevel = logging.WARNING if quiet else logging.DEBUG
 | 
			
		||||
    logging.getLogger('pillar.api.file_storage').setLevel(loglevel)
 | 
			
		||||
 | 
			
		||||
    chunk_size = int(chunk_size)  # CLI parameters are passed as strings
 | 
			
		||||
    from pillar.api import file_storage
 | 
			
		||||
 | 
			
		||||
    file_storage.refresh_links_for_backend(backend_name, chunk_size, window * 3600)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@manager_maintenance.command
 | 
			
		||||
def expire_all_project_links(project_uuid):
 | 
			
		||||
    """Expires all file links for a certain project without refreshing.
 | 
			
		||||
 | 
			
		||||
    This is just for testing.
 | 
			
		||||
    """
 | 
			
		||||
 | 
			
		||||
    import datetime
 | 
			
		||||
    import bson.tz_util
 | 
			
		||||
 | 
			
		||||
    files_collection = current_app.data.driver.db['files']
 | 
			
		||||
 | 
			
		||||
    now = datetime.datetime.now(tz=bson.tz_util.utc)
 | 
			
		||||
    expires = now - datetime.timedelta(days=1)
 | 
			
		||||
 | 
			
		||||
    result = files_collection.update_many(
 | 
			
		||||
        {'project': ObjectId(project_uuid)},
 | 
			
		||||
        {'$set': {'link_expires': expires}}
 | 
			
		||||
    )
 | 
			
		||||
 | 
			
		||||
    print('Expired %i links' % result.matched_count)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@manager_operations.command
 | 
			
		||||
def file_change_backend(file_id, dest_backend='gcs'):
 | 
			
		||||
    """Given a file document, move it to the specified backend (if not already
 | 
			
		||||
    there) and update the document to reflect that.
 | 
			
		||||
    Files on the original backend are not deleted automatically.
 | 
			
		||||
    """
 | 
			
		||||
 | 
			
		||||
    from pillar.api.file_storage.moving import change_file_storage_backend
 | 
			
		||||
    change_file_storage_backend(file_id, dest_backend)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@manager_operations.command
 | 
			
		||||
def mass_copy_between_backends(src_backend='cdnsun', dest_backend='gcs'):
 | 
			
		||||
    """Copies all files from one backend to the other, updating them in Mongo.
 | 
			
		||||
 | 
			
		||||
    Files on the original backend are not deleted.
 | 
			
		||||
    """
 | 
			
		||||
 | 
			
		||||
    import requests.exceptions
 | 
			
		||||
 | 
			
		||||
    from pillar.api.file_storage import moving
 | 
			
		||||
 | 
			
		||||
    logging.getLogger('pillar').setLevel(logging.INFO)
 | 
			
		||||
    log.info('Mass-moving all files from backend %r to %r',
 | 
			
		||||
             src_backend, dest_backend)
 | 
			
		||||
 | 
			
		||||
    files_coll = current_app.data.driver.db['files']
 | 
			
		||||
 | 
			
		||||
    fdocs = files_coll.find({'backend': src_backend},
 | 
			
		||||
                            projection={'_id': True})
 | 
			
		||||
    copied_ok = 0
 | 
			
		||||
    copy_errs = 0
 | 
			
		||||
    try:
 | 
			
		||||
        for fdoc in fdocs:
 | 
			
		||||
            try:
 | 
			
		||||
                moving.change_file_storage_backend(fdoc['_id'], dest_backend)
 | 
			
		||||
            except moving.PrerequisiteNotMetError as ex:
 | 
			
		||||
                log.error('Error copying %s: %s', fdoc['_id'], ex)
 | 
			
		||||
                copy_errs += 1
 | 
			
		||||
            except requests.exceptions.HTTPError as ex:
 | 
			
		||||
                log.error('Error copying %s (%s): %s',
 | 
			
		||||
                          fdoc['_id'], ex.response.url, ex)
 | 
			
		||||
                copy_errs += 1
 | 
			
		||||
            except Exception:
 | 
			
		||||
                log.exception('Unexpected exception handling file %s', fdoc['_id'])
 | 
			
		||||
                copy_errs += 1
 | 
			
		||||
            else:
 | 
			
		||||
                copied_ok += 1
 | 
			
		||||
    except KeyboardInterrupt:
 | 
			
		||||
        log.error('Stopping due to keyboard interrupt')
 | 
			
		||||
 | 
			
		||||
    log.info('%i files copied ok', copied_ok)
 | 
			
		||||
    log.info('%i files we did not copy', copy_errs)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@manager_operations.command
 | 
			
		||||
@manager_operations.option('-p', '--project', dest='dest_proj_url',
 | 
			
		||||
                help='Destination project URL')
 | 
			
		||||
@manager_operations.option('-f', '--force', dest='force', action='store_true', default=False,
 | 
			
		||||
                help='Move even when already at the given project.')
 | 
			
		||||
@manager_operations.option('-s', '--skip-gcs', dest='skip_gcs', action='store_true', default=False,
 | 
			
		||||
                help='Skip file handling on GCS, just update the database.')
 | 
			
		||||
def move_group_node_project(node_uuid, dest_proj_url, force=False, skip_gcs=False):
 | 
			
		||||
    """Copies all files from one project to the other, then moves the nodes.
 | 
			
		||||
 | 
			
		||||
    The node and all its children are moved recursively.
 | 
			
		||||
    """
 | 
			
		||||
 | 
			
		||||
    from pillar.api.nodes import moving
 | 
			
		||||
    from pillar.api.utils import str2id
 | 
			
		||||
 | 
			
		||||
    logging.getLogger('pillar').setLevel(logging.INFO)
 | 
			
		||||
 | 
			
		||||
    db = current_app.db()
 | 
			
		||||
    nodes_coll = db['nodes']
 | 
			
		||||
    projs_coll = db['projects']
 | 
			
		||||
 | 
			
		||||
    # Parse CLI args and get the node, source and destination projects.
 | 
			
		||||
    node_uuid = str2id(node_uuid)
 | 
			
		||||
    node = nodes_coll.find_one({'_id': node_uuid})
 | 
			
		||||
    if node is None:
 | 
			
		||||
        log.error("Node %s can't be found!", node_uuid)
 | 
			
		||||
        return 1
 | 
			
		||||
 | 
			
		||||
    if node.get('parent', None):
 | 
			
		||||
        log.error('Node cannot have a parent, it must be top-level.')
 | 
			
		||||
        return 4
 | 
			
		||||
 | 
			
		||||
    src_proj = projs_coll.find_one({'_id': node['project']})
 | 
			
		||||
    dest_proj = projs_coll.find_one({'url': dest_proj_url})
 | 
			
		||||
 | 
			
		||||
    if src_proj is None:
 | 
			
		||||
        log.warning("Node's source project %s doesn't exist!", node['project'])
 | 
			
		||||
    if dest_proj is None:
 | 
			
		||||
        log.error("Destination project url='%s' doesn't exist.", dest_proj_url)
 | 
			
		||||
        return 2
 | 
			
		||||
    if src_proj['_id'] == dest_proj['_id']:
 | 
			
		||||
        if force:
 | 
			
		||||
            log.warning("Node is already at project url='%s'!", dest_proj_url)
 | 
			
		||||
        else:
 | 
			
		||||
            log.error("Node is already at project url='%s'!", dest_proj_url)
 | 
			
		||||
            return 3
 | 
			
		||||
 | 
			
		||||
    log.info("Mass-moving %s (%s) and children from project '%s' (%s) to '%s' (%s)",
 | 
			
		||||
             node_uuid, node['name'], src_proj['url'], src_proj['_id'], dest_proj['url'],
 | 
			
		||||
             dest_proj['_id'])
 | 
			
		||||
 | 
			
		||||
    mover = moving.NodeMover(db=db, skip_gcs=skip_gcs)
 | 
			
		||||
    mover.change_project(node, dest_proj)
 | 
			
		||||
 | 
			
		||||
    log.info('Done moving.')
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@manager_maintenance.command
 | 
			
		||||
@manager_maintenance.option('-p', '--project', dest='proj_url', nargs='?',
 | 
			
		||||
                help='Project URL')
 | 
			
		||||
@manager_maintenance.option('-a', '--all', dest='all_projects', action='store_true', default=False,
 | 
			
		||||
                help='Replace on all projects.')
 | 
			
		||||
def replace_pillar_node_type_schemas(proj_url=None, all_projects=False):
 | 
			
		||||
    """Replaces the project's node type schemas with the standard Pillar ones.
 | 
			
		||||
 | 
			
		||||
    Non-standard node types are left alone.
 | 
			
		||||
    """
 | 
			
		||||
 | 
			
		||||
    if bool(proj_url) == all_projects:
 | 
			
		||||
        log.error('Use either --project or --all.')
 | 
			
		||||
        return 1
 | 
			
		||||
 | 
			
		||||
    from pillar.api.utils.authentication import force_cli_user
 | 
			
		||||
    force_cli_user()
 | 
			
		||||
 | 
			
		||||
    from pillar.api.node_types import PILLAR_NAMED_NODE_TYPES
 | 
			
		||||
    from pillar.api.utils import remove_private_keys
 | 
			
		||||
 | 
			
		||||
    projects_collection = current_app.db()['projects']
 | 
			
		||||
 | 
			
		||||
    def handle_project(project):
 | 
			
		||||
        log.info('Handling project %s', project['url'])
 | 
			
		||||
        is_public_proj = not project.get('is_private', True)
 | 
			
		||||
 | 
			
		||||
        for proj_nt in project['node_types']:
 | 
			
		||||
            nt_name = proj_nt['name']
 | 
			
		||||
            try:
 | 
			
		||||
                pillar_nt = PILLAR_NAMED_NODE_TYPES[nt_name]
 | 
			
		||||
            except KeyError:
 | 
			
		||||
                log.info('   - skipping non-standard node type "%s"', nt_name)
 | 
			
		||||
                continue
 | 
			
		||||
 | 
			
		||||
            log.info('   - replacing schema on node type "%s"', nt_name)
 | 
			
		||||
 | 
			
		||||
            # This leaves node type keys intact that aren't in Pillar's node_type_xxx definitions,
 | 
			
		||||
            # such as permissions.
 | 
			
		||||
            proj_nt.update(copy.deepcopy(pillar_nt))
 | 
			
		||||
 | 
			
		||||
            # On our own public projects we want to be able to set license stuff.
 | 
			
		||||
            if is_public_proj:
 | 
			
		||||
                proj_nt['form_schema'].pop('license_type', None)
 | 
			
		||||
                proj_nt['form_schema'].pop('license_notes', None)
 | 
			
		||||
 | 
			
		||||
        # Use Eve to PUT, so we have schema checking.
 | 
			
		||||
        db_proj = remove_private_keys(project)
 | 
			
		||||
        r, _, _, status = put_internal('projects', db_proj, _id=project['_id'])
 | 
			
		||||
        if status != 200:
 | 
			
		||||
            log.error('Error %i storing altered project %s %s', status, project['_id'], r)
 | 
			
		||||
            raise SystemExit('Error storing project, see log.')
 | 
			
		||||
        log.info('Project saved succesfully.')
 | 
			
		||||
 | 
			
		||||
    if all_projects:
 | 
			
		||||
        for project in projects_collection.find():
 | 
			
		||||
            handle_project(project)
 | 
			
		||||
        return
 | 
			
		||||
 | 
			
		||||
    project = projects_collection.find_one({'url': proj_url})
 | 
			
		||||
    if not project:
 | 
			
		||||
        log.error('Project url=%s not found', proj_url)
 | 
			
		||||
        return 3
 | 
			
		||||
 | 
			
		||||
    handle_project(project)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@manager_maintenance.command
 | 
			
		||||
def remarkdown_comments():
 | 
			
		||||
    """Retranslates all Markdown to HTML for all comment nodes.
 | 
			
		||||
    """
 | 
			
		||||
 | 
			
		||||
    from pillar.api.nodes import convert_markdown
 | 
			
		||||
 | 
			
		||||
    nodes_collection = current_app.db()['nodes']
 | 
			
		||||
    comments = nodes_collection.find({'node_type': 'comment'},
 | 
			
		||||
                                     projection={'properties.content': 1,
 | 
			
		||||
                                                 'node_type': 1})
 | 
			
		||||
 | 
			
		||||
    updated = identical = skipped = errors = 0
 | 
			
		||||
    for node in comments:
 | 
			
		||||
        convert_markdown(node)
 | 
			
		||||
        node_id = node['_id']
 | 
			
		||||
 | 
			
		||||
        try:
 | 
			
		||||
            content_html = node['properties']['content_html']
 | 
			
		||||
        except KeyError:
 | 
			
		||||
            log.warning('Node %s has no content_html', node_id)
 | 
			
		||||
            skipped += 1
 | 
			
		||||
            continue
 | 
			
		||||
 | 
			
		||||
        result = nodes_collection.update_one(
 | 
			
		||||
            {'_id': node_id},
 | 
			
		||||
            {'$set': {'properties.content_html': content_html}}
 | 
			
		||||
        )
 | 
			
		||||
        if result.matched_count != 1:
 | 
			
		||||
            log.error('Unable to update node %s', node_id)
 | 
			
		||||
            errors += 1
 | 
			
		||||
            continue
 | 
			
		||||
 | 
			
		||||
        if result.modified_count:
 | 
			
		||||
            updated += 1
 | 
			
		||||
        else:
 | 
			
		||||
            identical += 1
 | 
			
		||||
 | 
			
		||||
    log.info('updated  : %i', updated)
 | 
			
		||||
    log.info('identical: %i', identical)
 | 
			
		||||
    log.info('skipped  : %i', skipped)
 | 
			
		||||
    log.info('errors   : %i', errors)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@manager_maintenance.command
 | 
			
		||||
@manager_maintenance.option('-p', '--project', dest='proj_url', nargs='?',
 | 
			
		||||
                help='Project URL')
 | 
			
		||||
@manager_maintenance.option('-a', '--all', dest='all_projects', action='store_true', default=False,
 | 
			
		||||
                help='Replace on all projects.')
 | 
			
		||||
def upgrade_attachment_schema(proj_url=None, all_projects=False):
 | 
			
		||||
    """Replaces the project's attachments with the new schema.
 | 
			
		||||
 | 
			
		||||
    Updates both the schema definition and the nodes with attachments (asset, page, post).
 | 
			
		||||
    """
 | 
			
		||||
 | 
			
		||||
    if bool(proj_url) == all_projects:
 | 
			
		||||
        log.error('Use either --project or --all.')
 | 
			
		||||
        return 1
 | 
			
		||||
 | 
			
		||||
    from pillar.api.utils.authentication import force_cli_user
 | 
			
		||||
    force_cli_user()
 | 
			
		||||
 | 
			
		||||
    from pillar.api.node_types.asset import node_type_asset
 | 
			
		||||
    from pillar.api.node_types.page import node_type_page
 | 
			
		||||
    from pillar.api.node_types.post import node_type_post
 | 
			
		||||
    from pillar.api.node_types import _attachments_embedded_schema
 | 
			
		||||
    from pillar.api.utils import remove_private_keys
 | 
			
		||||
 | 
			
		||||
    # Node types that support attachments
 | 
			
		||||
    node_types = (node_type_asset, node_type_page, node_type_post)
 | 
			
		||||
    nts_by_name = {nt['name']: nt for nt in node_types}
 | 
			
		||||
 | 
			
		||||
    db = current_app.db()
 | 
			
		||||
    projects_coll = db['projects']
 | 
			
		||||
    nodes_coll = db['nodes']
 | 
			
		||||
 | 
			
		||||
    def handle_project(project):
 | 
			
		||||
        log.info('Handling project %s', project['url'])
 | 
			
		||||
 | 
			
		||||
        replace_schemas(project)
 | 
			
		||||
        replace_attachments(project)
 | 
			
		||||
 | 
			
		||||
    def replace_schemas(project):
 | 
			
		||||
        for proj_nt in project['node_types']:
 | 
			
		||||
            nt_name = proj_nt['name']
 | 
			
		||||
            if nt_name not in nts_by_name:
 | 
			
		||||
                continue
 | 
			
		||||
 | 
			
		||||
            log.info('   - replacing attachment schema on node type "%s"', nt_name)
 | 
			
		||||
            pillar_nt = nts_by_name[nt_name]
 | 
			
		||||
            proj_nt['dyn_schema']['attachments'] = copy.deepcopy(_attachments_embedded_schema)
 | 
			
		||||
 | 
			
		||||
            # Get the form schema the same as the official Pillar one, but only for attachments.
 | 
			
		||||
            try:
 | 
			
		||||
                pillar_form_schema = pillar_nt['form_schema']['attachments']
 | 
			
		||||
            except KeyError:
 | 
			
		||||
                proj_nt['form_schema'].pop('attachments', None)
 | 
			
		||||
            else:
 | 
			
		||||
                proj_nt['form_schema']['attachments'] = pillar_form_schema
 | 
			
		||||
 | 
			
		||||
        # Use Eve to PUT, so we have schema checking.
 | 
			
		||||
        db_proj = remove_private_keys(project)
 | 
			
		||||
        r, _, _, status = put_internal('projects', db_proj, _id=project['_id'])
 | 
			
		||||
        if status != 200:
 | 
			
		||||
            log.error('Error %i storing altered project %s %s', status, project['_id'], r)
 | 
			
		||||
            raise SystemExit('Error storing project, see log.')
 | 
			
		||||
        log.info('Project saved succesfully.')
 | 
			
		||||
 | 
			
		||||
    def replace_attachments(project):
 | 
			
		||||
        log.info('Upgrading nodes for project %s', project['url'])
 | 
			
		||||
        nodes = nodes_coll.find({
 | 
			
		||||
            '_deleted': False,
 | 
			
		||||
            'project': project['_id'],
 | 
			
		||||
            'node_type': {'$in': list(nts_by_name)},
 | 
			
		||||
            'properties.attachments': {'$exists': True},
 | 
			
		||||
        })
 | 
			
		||||
        for node in nodes:
 | 
			
		||||
            attachments = node[u'properties'][u'attachments']
 | 
			
		||||
            if isinstance(attachments, dict):
 | 
			
		||||
                # This node has already been upgraded.
 | 
			
		||||
                continue
 | 
			
		||||
 | 
			
		||||
            log.info('    - Updating schema on node %s (%s)', node['_id'], node.get('name'))
 | 
			
		||||
            new_atts = {}
 | 
			
		||||
            for field_info in attachments:
 | 
			
		||||
                for attachment in field_info.get('files', []):
 | 
			
		||||
                    new_atts[attachment[u'slug']] = {u'oid': attachment[u'file']}
 | 
			
		||||
 | 
			
		||||
            node[u'properties'][u'attachments'] = new_atts
 | 
			
		||||
 | 
			
		||||
            # Use Eve to PUT, so we have schema checking.
 | 
			
		||||
            db_node = remove_private_keys(node)
 | 
			
		||||
            r, _, _, status = put_internal('nodes', db_node, _id=node['_id'])
 | 
			
		||||
            if status != 200:
 | 
			
		||||
                log.error('Error %i storing altered node %s %s', status, node['_id'], r)
 | 
			
		||||
                raise SystemExit('Error storing node; see log.')
 | 
			
		||||
 | 
			
		||||
    if all_projects:
 | 
			
		||||
        for proj in projects_coll.find():
 | 
			
		||||
            handle_project(proj)
 | 
			
		||||
        return
 | 
			
		||||
 | 
			
		||||
    proj = projects_coll.find_one({'url': proj_url})
 | 
			
		||||
    if not proj:
 | 
			
		||||
        log.error('Project url=%s not found', proj_url)
 | 
			
		||||
        return 3
 | 
			
		||||
 | 
			
		||||
    handle_project(proj)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@manager_setup.command
 | 
			
		||||
def create_blog(proj_url):
 | 
			
		||||
    """Adds a blog to the project."""
 | 
			
		||||
 | 
			
		||||
    from pillar.api.utils.authentication import force_cli_user
 | 
			
		||||
    from pillar.api.utils import node_type_utils
 | 
			
		||||
    from pillar.api.node_types.blog import node_type_blog
 | 
			
		||||
    from pillar.api.node_types.post import node_type_post
 | 
			
		||||
    from pillar.api.utils import remove_private_keys
 | 
			
		||||
 | 
			
		||||
    force_cli_user()
 | 
			
		||||
 | 
			
		||||
    db = current_app.db()
 | 
			
		||||
 | 
			
		||||
    # Add the blog & post node types to the project.
 | 
			
		||||
    projects_coll = db['projects']
 | 
			
		||||
    proj = projects_coll.find_one({'url': proj_url})
 | 
			
		||||
    if not proj:
 | 
			
		||||
        log.error('Project url=%s not found', proj_url)
 | 
			
		||||
        return 3
 | 
			
		||||
 | 
			
		||||
    node_type_utils.add_to_project(proj,
 | 
			
		||||
                                   (node_type_blog, node_type_post),
 | 
			
		||||
                                   replace_existing=False)
 | 
			
		||||
 | 
			
		||||
    proj_id = proj['_id']
 | 
			
		||||
    r, _, _, status = put_internal('projects', remove_private_keys(proj), _id=proj_id)
 | 
			
		||||
    if status != 200:
 | 
			
		||||
        log.error('Error %i storing altered project %s %s', status, proj_id, r)
 | 
			
		||||
        return 4
 | 
			
		||||
    log.info('Project saved succesfully.')
 | 
			
		||||
 | 
			
		||||
    # Create a blog node.
 | 
			
		||||
    nodes_coll = db['nodes']
 | 
			
		||||
    blog = nodes_coll.find_one({'node_type': 'blog', 'project': proj_id})
 | 
			
		||||
    if not blog:
 | 
			
		||||
        blog = {
 | 
			
		||||
            u'node_type': node_type_blog['name'],
 | 
			
		||||
            u'name': u'Blog',
 | 
			
		||||
            u'description': u'',
 | 
			
		||||
            u'properties': {},
 | 
			
		||||
            u'project': proj_id,
 | 
			
		||||
        }
 | 
			
		||||
        r, _, _, status = post_internal('nodes', blog)
 | 
			
		||||
        if status != 201:
 | 
			
		||||
            log.error('Error %i storing blog node: %s', status, r)
 | 
			
		||||
            return 4
 | 
			
		||||
        log.info('Blog node saved succesfully: %s', r)
 | 
			
		||||
    else:
 | 
			
		||||
        log.info('Blog node already exists: %s', blog)
 | 
			
		||||
 | 
			
		||||
    return 0
 | 
			
		||||
 | 
			
		||||
manager.add_command("maintenance", manager_maintenance)
 | 
			
		||||
manager.add_command("setup", manager_setup)
 | 
			
		||||
manager.add_command("operations", manager_operations)
 | 
			
		||||
							
								
								
									
										28
									
								
								pillar/cli/__init__.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										28
									
								
								pillar/cli/__init__.py
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,28 @@
 | 
			
		||||
"""Commandline interface.
 | 
			
		||||
 | 
			
		||||
Run commands with 'flask <command>'
 | 
			
		||||
"""
 | 
			
		||||
 | 
			
		||||
import logging
 | 
			
		||||
 | 
			
		||||
from flask_script import Manager
 | 
			
		||||
 | 
			
		||||
from pillar import current_app
 | 
			
		||||
from pillar.cli.celery import manager_celery
 | 
			
		||||
from pillar.cli.maintenance import manager_maintenance
 | 
			
		||||
from pillar.cli.operations import manager_operations
 | 
			
		||||
from pillar.cli.setup import manager_setup
 | 
			
		||||
from pillar.cli.elastic import manager_elastic
 | 
			
		||||
from . import badges
 | 
			
		||||
 | 
			
		||||
from pillar.cli import translations
 | 
			
		||||
 | 
			
		||||
log = logging.getLogger(__name__)
 | 
			
		||||
manager = Manager(current_app)
 | 
			
		||||
 | 
			
		||||
manager.add_command('celery', manager_celery)
 | 
			
		||||
manager.add_command("maintenance", manager_maintenance)
 | 
			
		||||
manager.add_command("setup", manager_setup)
 | 
			
		||||
manager.add_command("operations", manager_operations)
 | 
			
		||||
manager.add_command("elastic", manager_elastic)
 | 
			
		||||
manager.add_command("badges", badges.manager)
 | 
			
		||||
							
								
								
									
										39
									
								
								pillar/cli/badges.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										39
									
								
								pillar/cli/badges.py
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,39 @@
 | 
			
		||||
import datetime
 | 
			
		||||
import logging
 | 
			
		||||
 | 
			
		||||
from flask_script import Manager
 | 
			
		||||
from pillar import current_app, badge_sync
 | 
			
		||||
from pillar.api.utils import utcnow
 | 
			
		||||
 | 
			
		||||
log = logging.getLogger(__name__)
 | 
			
		||||
 | 
			
		||||
manager = Manager(current_app, usage="Badge operations")
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@manager.option('-u', '--user', dest='email', default='', help='Email address of the user to sync')
 | 
			
		||||
@manager.option('-a', '--all', dest='sync_all', action='store_true', default=False,
 | 
			
		||||
                help='Sync all users')
 | 
			
		||||
@manager.option('--go', action='store_true', default=False,
 | 
			
		||||
                help='Actually perform the sync; otherwise it is a dry-run.')
 | 
			
		||||
def sync(email: str = '', sync_all: bool=False, go: bool=False):
 | 
			
		||||
    if bool(email) == bool(sync_all):
 | 
			
		||||
        raise ValueError('Use either --user or --all.')
 | 
			
		||||
 | 
			
		||||
    if email:
 | 
			
		||||
        users_coll = current_app.db('users')
 | 
			
		||||
        db_user = users_coll.find_one({'email': email}, projection={'_id': True})
 | 
			
		||||
        if not db_user:
 | 
			
		||||
            raise ValueError(f'No user with email {email!r} found')
 | 
			
		||||
        specific_user = db_user['_id']
 | 
			
		||||
    else:
 | 
			
		||||
        specific_user = None
 | 
			
		||||
 | 
			
		||||
    if not go:
 | 
			
		||||
        log.info('Performing dry-run, not going to change the user database.')
 | 
			
		||||
    start_time = utcnow()
 | 
			
		||||
    badge_sync.refresh_all_badges(specific_user, dry_run=not go,
 | 
			
		||||
                                  timelimit=datetime.timedelta(hours=1))
 | 
			
		||||
    end_time = utcnow()
 | 
			
		||||
    log.info('%s took %s (H:MM:SS)',
 | 
			
		||||
             'Updating user badges' if go else 'Dry-run',
 | 
			
		||||
             end_time - start_time)
 | 
			
		||||
							
								
								
									
										69
									
								
								pillar/cli/celery.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										69
									
								
								pillar/cli/celery.py
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,69 @@
 | 
			
		||||
import logging
 | 
			
		||||
 | 
			
		||||
from flask_script import Manager
 | 
			
		||||
from pillar import current_app
 | 
			
		||||
 | 
			
		||||
log = logging.getLogger(__name__)
 | 
			
		||||
 | 
			
		||||
manager_celery = Manager(
 | 
			
		||||
    current_app, usage="Celery operations, like starting a worker or showing the queue")
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@manager_celery.option('args', nargs='*')
 | 
			
		||||
def worker(args):
 | 
			
		||||
    """Runs a Celery worker."""
 | 
			
		||||
 | 
			
		||||
    import sys
 | 
			
		||||
 | 
			
		||||
    argv0 = f'{sys.argv[0]} operations worker'
 | 
			
		||||
    argvother = [
 | 
			
		||||
        '-E',
 | 
			
		||||
        '-l', 'INFO',
 | 
			
		||||
        '--concurrency', '1',
 | 
			
		||||
        '--pool', 'solo',  # No preforking, as PyMongo can't handle connect-before-fork.
 | 
			
		||||
                           # We might get rid of this and go for the default Celery worker
 | 
			
		||||
                           # preforking concurrency model, *if* we can somehow reset the
 | 
			
		||||
                           # PyMongo client and reconnect after forking.
 | 
			
		||||
    ] + list(args)
 | 
			
		||||
 | 
			
		||||
    current_app.celery.worker_main([argv0] + argvother)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@manager_celery.command
 | 
			
		||||
def queue():
 | 
			
		||||
    """Shows queued Celery tasks."""
 | 
			
		||||
 | 
			
		||||
    from pprint import pprint
 | 
			
		||||
 | 
			
		||||
    # Inspect all nodes.
 | 
			
		||||
    i = current_app.celery.control.inspect()
 | 
			
		||||
 | 
			
		||||
    print(50 * '=')
 | 
			
		||||
    print('Tasks that have an ETA or are scheduled for later processing:')
 | 
			
		||||
    pprint(i.scheduled())
 | 
			
		||||
 | 
			
		||||
    print()
 | 
			
		||||
    print('Tasks that are currently active:')
 | 
			
		||||
    pprint(i.active())
 | 
			
		||||
 | 
			
		||||
    print()
 | 
			
		||||
    print('Tasks that have been claimed by workers:')
 | 
			
		||||
    pprint(i.reserved())
 | 
			
		||||
    print(50 * '=')
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@manager_celery.command
 | 
			
		||||
def purge():
 | 
			
		||||
    """Deletes queued Celery tasks."""
 | 
			
		||||
 | 
			
		||||
    log.warning('Purging all pending Celery tasks.')
 | 
			
		||||
    current_app.celery.control.purge()
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@manager_celery.option('args', nargs='*')
 | 
			
		||||
def beat(args):
 | 
			
		||||
    """Runs the Celery beat."""
 | 
			
		||||
 | 
			
		||||
    from celery.bin.beat import beat
 | 
			
		||||
 | 
			
		||||
    return beat(app=current_app.celery).run_from_argv('je moeder', args, command='beat')
 | 
			
		||||
							
								
								
									
										164
									
								
								pillar/cli/elastic.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										164
									
								
								pillar/cli/elastic.py
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,164 @@
 | 
			
		||||
import concurrent.futures
 | 
			
		||||
import logging
 | 
			
		||||
import typing
 | 
			
		||||
 | 
			
		||||
import bson
 | 
			
		||||
from flask_script import Manager
 | 
			
		||||
 | 
			
		||||
from pillar import current_app
 | 
			
		||||
from pillar.api.search import index
 | 
			
		||||
 | 
			
		||||
log = logging.getLogger(__name__)
 | 
			
		||||
 | 
			
		||||
manager_elastic = Manager(
 | 
			
		||||
    current_app, usage="Elastic utilities")
 | 
			
		||||
 | 
			
		||||
name_to_task = {
 | 
			
		||||
    'nodes': index.ResetNodeIndex,
 | 
			
		||||
    'users': index.ResetUserIndex,
 | 
			
		||||
}
 | 
			
		||||
REINDEX_THREAD_COUNT = 5
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@manager_elastic.option('indices', nargs='*')
 | 
			
		||||
def reset_index(indices: typing.List[str]):
 | 
			
		||||
    """
 | 
			
		||||
    Destroy and recreate elastic indices
 | 
			
		||||
 | 
			
		||||
    nodes, users
 | 
			
		||||
    """
 | 
			
		||||
 | 
			
		||||
    with current_app.app_context():
 | 
			
		||||
        if not indices:
 | 
			
		||||
            indices = name_to_task.keys()
 | 
			
		||||
 | 
			
		||||
        for elk_index in indices:
 | 
			
		||||
            try:
 | 
			
		||||
                task = name_to_task[elk_index]()
 | 
			
		||||
            except KeyError:
 | 
			
		||||
                raise SystemError('Unknown elk_index, choose from %s' %
 | 
			
		||||
                                  (', '.join(name_to_task.keys())))
 | 
			
		||||
            task.execute()
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def _reindex_users():
 | 
			
		||||
    db = current_app.db()
 | 
			
		||||
    users_coll = db['users']
 | 
			
		||||
 | 
			
		||||
    # Note that this also finds service accounts, which are filtered out
 | 
			
		||||
    # in prepare_user_data(…)
 | 
			
		||||
    users = users_coll.find()
 | 
			
		||||
    user_count = users.count()
 | 
			
		||||
    indexed = 0
 | 
			
		||||
 | 
			
		||||
    log.info('Reindexing %d users in Elastic', user_count)
 | 
			
		||||
 | 
			
		||||
    from pillar.celery.search_index_tasks import prepare_user_data
 | 
			
		||||
    from pillar.api.search import elastic_indexing
 | 
			
		||||
 | 
			
		||||
    app = current_app.real_app
 | 
			
		||||
 | 
			
		||||
    def do_work(work_idx_user):
 | 
			
		||||
        nonlocal indexed
 | 
			
		||||
        idx, user = work_idx_user
 | 
			
		||||
 | 
			
		||||
        with app.app_context():
 | 
			
		||||
            if idx % 100 == 0:
 | 
			
		||||
                log.info('Processing user %d/%d', idx+1, user_count)
 | 
			
		||||
            to_index = prepare_user_data('', user=user)
 | 
			
		||||
            if not to_index:
 | 
			
		||||
                log.debug('not indexing user %s', user)
 | 
			
		||||
                return
 | 
			
		||||
 | 
			
		||||
            try:
 | 
			
		||||
                elastic_indexing.push_updated_user(to_index)
 | 
			
		||||
            except(KeyError, AttributeError):
 | 
			
		||||
                log.exception('Field is missing for %s', user)
 | 
			
		||||
            else:
 | 
			
		||||
                indexed += 1
 | 
			
		||||
 | 
			
		||||
    with concurrent.futures.ThreadPoolExecutor(max_workers=REINDEX_THREAD_COUNT) as executor:
 | 
			
		||||
        result = executor.map(do_work, enumerate(users))
 | 
			
		||||
 | 
			
		||||
        # When an exception occurs, it's enough to just iterate over the results.
 | 
			
		||||
        # That will re-raise the exception in the main thread.
 | 
			
		||||
        for ob in result:
 | 
			
		||||
            log.debug('result: %s', ob)
 | 
			
		||||
    log.info('Reindexed %d/%d users', indexed, user_count)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def _public_project_ids() -> typing.List[bson.ObjectId]:
 | 
			
		||||
    """Returns a list of ObjectIDs of public projects.
 | 
			
		||||
 | 
			
		||||
    Memoized in setup_app().
 | 
			
		||||
    """
 | 
			
		||||
 | 
			
		||||
    proj_coll = current_app.db('projects')
 | 
			
		||||
    result = proj_coll.find({'is_private': False}, {'_id': 1})
 | 
			
		||||
    return [p['_id'] for p in result]
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def _reindex_nodes():
 | 
			
		||||
    db = current_app.db()
 | 
			
		||||
    nodes_coll = db['nodes']
 | 
			
		||||
    nodes = nodes_coll.find({
 | 
			
		||||
        'project': {'$in': _public_project_ids()},
 | 
			
		||||
        '_deleted': {'$ne': True},
 | 
			
		||||
    })
 | 
			
		||||
    node_count = nodes.count()
 | 
			
		||||
    indexed = 0
 | 
			
		||||
 | 
			
		||||
    log.info('Nodes %d will be reindexed in Elastic', node_count)
 | 
			
		||||
    app = current_app.real_app
 | 
			
		||||
 | 
			
		||||
    from pillar.celery.search_index_tasks import prepare_node_data
 | 
			
		||||
    from pillar.api.search import elastic_indexing
 | 
			
		||||
 | 
			
		||||
    def do_work(work_idx_node):
 | 
			
		||||
        nonlocal indexed
 | 
			
		||||
 | 
			
		||||
        idx, node = work_idx_node
 | 
			
		||||
        with app.app_context():
 | 
			
		||||
            if idx % 100 == 0:
 | 
			
		||||
                log.info('Processing node %d/%d', idx+1, node_count)
 | 
			
		||||
            try:
 | 
			
		||||
                to_index = prepare_node_data('', node=node)
 | 
			
		||||
                elastic_indexing.index_node_save(to_index)
 | 
			
		||||
            except (KeyError, AttributeError):
 | 
			
		||||
                log.exception('Node %s is missing a field', node)
 | 
			
		||||
            else:
 | 
			
		||||
                indexed += 1
 | 
			
		||||
 | 
			
		||||
    with concurrent.futures.ThreadPoolExecutor(max_workers=REINDEX_THREAD_COUNT) as executor:
 | 
			
		||||
        result = executor.map(do_work, enumerate(nodes))
 | 
			
		||||
        # When an exception occurs, it's enough to just iterate over the results.
 | 
			
		||||
        # That will re-raise the exception in the main thread.
 | 
			
		||||
        for ob in result:
 | 
			
		||||
            log.debug('result: %s', ob)
 | 
			
		||||
    log.info('Reindexed %d/%d nodes', indexed, node_count)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@manager_elastic.option('indexname', nargs='?')
 | 
			
		||||
@manager_elastic.option('-r', '--reset', default=False, action='store_true')
 | 
			
		||||
def reindex(indexname='', reset=False):
 | 
			
		||||
    import time
 | 
			
		||||
    import datetime
 | 
			
		||||
 | 
			
		||||
    start = time.time()
 | 
			
		||||
 | 
			
		||||
    if reset:
 | 
			
		||||
        log.info('Resetting first')
 | 
			
		||||
        reset_index([indexname] if indexname else [])
 | 
			
		||||
 | 
			
		||||
    if not indexname:
 | 
			
		||||
        log.info('reindex everything..')
 | 
			
		||||
        _reindex_nodes()
 | 
			
		||||
        _reindex_users()
 | 
			
		||||
    elif indexname == 'users':
 | 
			
		||||
        log.info('Indexing %s', indexname)
 | 
			
		||||
        _reindex_users()
 | 
			
		||||
    elif indexname == 'nodes':
 | 
			
		||||
        log.info('Indexing %s', indexname)
 | 
			
		||||
        _reindex_nodes()
 | 
			
		||||
    duration = time.time() - start
 | 
			
		||||
    log.info('Reindexing took %s', datetime.timedelta(seconds=duration))
 | 
			
		||||
							
								
								
									
										1177
									
								
								pillar/cli/maintenance.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										1177
									
								
								pillar/cli/maintenance.py
									
									
									
									
									
										Normal file
									
								
							
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							
							
								
								
									
										250
									
								
								pillar/cli/operations.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										250
									
								
								pillar/cli/operations.py
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,250 @@
 | 
			
		||||
import logging
 | 
			
		||||
 | 
			
		||||
from flask_script import Manager
 | 
			
		||||
from pillar import current_app
 | 
			
		||||
 | 
			
		||||
log = logging.getLogger(__name__)
 | 
			
		||||
 | 
			
		||||
manager_operations = Manager(
 | 
			
		||||
    current_app, usage="Backend operations, like moving nodes across projects")
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@manager_operations.command
 | 
			
		||||
def file_change_backend(file_id, dest_backend='gcs'):
 | 
			
		||||
    """Given a file document, move it to the specified backend (if not already
 | 
			
		||||
    there) and update the document to reflect that.
 | 
			
		||||
    Files on the original backend are not deleted automatically.
 | 
			
		||||
    """
 | 
			
		||||
 | 
			
		||||
    from pillar.api.file_storage.moving import change_file_storage_backend
 | 
			
		||||
    change_file_storage_backend(file_id, dest_backend)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@manager_operations.command
 | 
			
		||||
def mass_copy_between_backends(src_backend='cdnsun', dest_backend='gcs'):
 | 
			
		||||
    """Copies all files from one backend to the other, updating them in Mongo.
 | 
			
		||||
 | 
			
		||||
    Files on the original backend are not deleted.
 | 
			
		||||
    """
 | 
			
		||||
 | 
			
		||||
    import requests.exceptions
 | 
			
		||||
 | 
			
		||||
    from pillar.api.file_storage import moving
 | 
			
		||||
 | 
			
		||||
    logging.getLogger('pillar').setLevel(logging.INFO)
 | 
			
		||||
    log.info('Mass-moving all files from backend %r to %r',
 | 
			
		||||
             src_backend, dest_backend)
 | 
			
		||||
 | 
			
		||||
    files_coll = current_app.data.driver.db['files']
 | 
			
		||||
 | 
			
		||||
    fdocs = files_coll.find({'backend': src_backend},
 | 
			
		||||
                            projection={'_id': True})
 | 
			
		||||
    copied_ok = 0
 | 
			
		||||
    copy_errs = 0
 | 
			
		||||
    try:
 | 
			
		||||
        for fdoc in fdocs:
 | 
			
		||||
            try:
 | 
			
		||||
                moving.change_file_storage_backend(fdoc['_id'], dest_backend)
 | 
			
		||||
            except moving.PrerequisiteNotMetError as ex:
 | 
			
		||||
                log.error('Error copying %s: %s', fdoc['_id'], ex)
 | 
			
		||||
                copy_errs += 1
 | 
			
		||||
            except requests.exceptions.HTTPError as ex:
 | 
			
		||||
                log.error('Error copying %s (%s): %s',
 | 
			
		||||
                          fdoc['_id'], ex.response.url, ex)
 | 
			
		||||
                copy_errs += 1
 | 
			
		||||
            except Exception:
 | 
			
		||||
                log.exception('Unexpected exception handling file %s', fdoc['_id'])
 | 
			
		||||
                copy_errs += 1
 | 
			
		||||
            else:
 | 
			
		||||
                copied_ok += 1
 | 
			
		||||
    except KeyboardInterrupt:
 | 
			
		||||
        log.error('Stopping due to keyboard interrupt')
 | 
			
		||||
 | 
			
		||||
    log.info('%i files copied ok', copied_ok)
 | 
			
		||||
    log.info('%i files we did not copy', copy_errs)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@manager_operations.option('dest_proj_url', help='Destination project URL')
 | 
			
		||||
@manager_operations.option('node_uuid', help='ID of the node to move')
 | 
			
		||||
@manager_operations.option('-f', '--force', dest='force', action='store_true', default=False,
 | 
			
		||||
                           help='Move even when already at the given project.')
 | 
			
		||||
@manager_operations.option('-s', '--skip-gcs', dest='skip_gcs', action='store_true', default=False,
 | 
			
		||||
                           help='Skip file handling on GCS, just update the database.')
 | 
			
		||||
def move_group_node_project(node_uuid, dest_proj_url, force=False, skip_gcs=False):
 | 
			
		||||
    """Copies all files from one project to the other, then moves the nodes.
 | 
			
		||||
 | 
			
		||||
    The node and all its children are moved recursively.
 | 
			
		||||
    """
 | 
			
		||||
 | 
			
		||||
    from pillar.api.nodes import moving
 | 
			
		||||
    from pillar.api.utils import str2id
 | 
			
		||||
 | 
			
		||||
    logging.getLogger('pillar').setLevel(logging.INFO)
 | 
			
		||||
 | 
			
		||||
    db = current_app.db()
 | 
			
		||||
    nodes_coll = db['nodes']
 | 
			
		||||
    projs_coll = db['projects']
 | 
			
		||||
 | 
			
		||||
    # Parse CLI args and get the node, source and destination projects.
 | 
			
		||||
    node_uuid = str2id(node_uuid)
 | 
			
		||||
    node = nodes_coll.find_one({'_id': node_uuid})
 | 
			
		||||
    if node is None:
 | 
			
		||||
        log.error("Node %s can't be found!", node_uuid)
 | 
			
		||||
        return 1
 | 
			
		||||
 | 
			
		||||
    if node.get('parent', None):
 | 
			
		||||
        log.error('Node cannot have a parent, it must be top-level.')
 | 
			
		||||
        return 4
 | 
			
		||||
 | 
			
		||||
    src_proj = projs_coll.find_one({'_id': node['project']})
 | 
			
		||||
    dest_proj = projs_coll.find_one({'url': dest_proj_url})
 | 
			
		||||
 | 
			
		||||
    if src_proj is None:
 | 
			
		||||
        log.warning("Node's source project %s doesn't exist!", node['project'])
 | 
			
		||||
    if dest_proj is None:
 | 
			
		||||
        log.error("Destination project url='%s' doesn't exist.", dest_proj_url)
 | 
			
		||||
        return 2
 | 
			
		||||
    if src_proj['_id'] == dest_proj['_id']:
 | 
			
		||||
        if force:
 | 
			
		||||
            log.warning("Node is already at project url='%s'!", dest_proj_url)
 | 
			
		||||
        else:
 | 
			
		||||
            log.error("Node is already at project url='%s'!", dest_proj_url)
 | 
			
		||||
            return 3
 | 
			
		||||
 | 
			
		||||
    log.info("Mass-moving %s (%s) and children from project '%s' (%s) to '%s' (%s)",
 | 
			
		||||
             node_uuid, node['name'], src_proj['url'], src_proj['_id'], dest_proj['url'],
 | 
			
		||||
             dest_proj['_id'])
 | 
			
		||||
 | 
			
		||||
    mover = moving.NodeMover(db=db, skip_gcs=skip_gcs)
 | 
			
		||||
    mover.change_project(node, dest_proj)
 | 
			
		||||
 | 
			
		||||
    log.info('Done moving.')
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@manager_operations.command
 | 
			
		||||
def merge_project(src_proj_url, dest_proj_url):
 | 
			
		||||
    """Move all nodes and files from one project to the other."""
 | 
			
		||||
 | 
			
		||||
    from pillar.api.projects import merging
 | 
			
		||||
 | 
			
		||||
    logging.getLogger('pillar').setLevel(logging.INFO)
 | 
			
		||||
 | 
			
		||||
    log.info('Current server name is %s', current_app.config['SERVER_NAME'])
 | 
			
		||||
    if not current_app.config['SERVER_NAME']:
 | 
			
		||||
        log.fatal('SERVER_NAME configuration is missing, would result in malformed file links.')
 | 
			
		||||
        return 5
 | 
			
		||||
 | 
			
		||||
    # Parse CLI args and get source and destination projects.
 | 
			
		||||
    projs_coll = current_app.db('projects')
 | 
			
		||||
    src_proj = projs_coll.find_one({'url': src_proj_url}, projection={'_id': 1})
 | 
			
		||||
    dest_proj = projs_coll.find_one({'url': dest_proj_url}, projection={'_id': 1})
 | 
			
		||||
 | 
			
		||||
    if src_proj is None:
 | 
			
		||||
        log.fatal("Source project url='%s' doesn't exist.", src_proj_url)
 | 
			
		||||
        return 1
 | 
			
		||||
    if dest_proj is None:
 | 
			
		||||
        log.fatal("Destination project url='%s' doesn't exist.", dest_proj_url)
 | 
			
		||||
        return 2
 | 
			
		||||
    dpid = dest_proj['_id']
 | 
			
		||||
    spid = src_proj['_id']
 | 
			
		||||
    if spid == dpid:
 | 
			
		||||
        log.fatal("Source and destination projects are the same!")
 | 
			
		||||
        return 3
 | 
			
		||||
 | 
			
		||||
    print()
 | 
			
		||||
    try:
 | 
			
		||||
        input(f'Press ENTER to start moving ALL NODES AND FILES '
 | 
			
		||||
              f'from {src_proj_url} to {dest_proj_url}')
 | 
			
		||||
    except KeyboardInterrupt:
 | 
			
		||||
        print()
 | 
			
		||||
        print('Aborted')
 | 
			
		||||
        return 4
 | 
			
		||||
    print()
 | 
			
		||||
 | 
			
		||||
    merging.merge_project(spid, dpid)
 | 
			
		||||
    log.info('Done moving.')
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@manager_operations.command
 | 
			
		||||
def index_users_rebuild():
 | 
			
		||||
    """Clear users index, update settings and reindex all users."""
 | 
			
		||||
 | 
			
		||||
    import concurrent.futures
 | 
			
		||||
 | 
			
		||||
    from pillar.api.utils.algolia import algolia_index_user_save
 | 
			
		||||
 | 
			
		||||
    users_index = current_app.algolia_index_users
 | 
			
		||||
    if users_index is None:
 | 
			
		||||
        log.error('Algolia is not configured properly, unable to do anything!')
 | 
			
		||||
        return 1
 | 
			
		||||
 | 
			
		||||
    log.info('Dropping existing index: %s', users_index)
 | 
			
		||||
    users_index.clear_index()
 | 
			
		||||
    index_users_update_settings()
 | 
			
		||||
 | 
			
		||||
    db = current_app.db()
 | 
			
		||||
    users = db['users'].find({'_deleted': {'$ne': True}})
 | 
			
		||||
    user_count = users.count()
 | 
			
		||||
 | 
			
		||||
    log.info('Reindexing all %i users', user_count)
 | 
			
		||||
 | 
			
		||||
    real_current_app = current_app._get_current_object()._get_current_object()
 | 
			
		||||
 | 
			
		||||
    def do_user(user):
 | 
			
		||||
        with real_current_app.app_context():
 | 
			
		||||
            algolia_index_user_save(user)
 | 
			
		||||
 | 
			
		||||
    with concurrent.futures.ThreadPoolExecutor(max_workers=10) as executor:
 | 
			
		||||
        future_to_user = {executor.submit(do_user, user): user
 | 
			
		||||
                          for user in users}
 | 
			
		||||
        for idx, future in enumerate(concurrent.futures.as_completed(future_to_user)):
 | 
			
		||||
            user = future_to_user[future]
 | 
			
		||||
            user_ident = user.get('email') or user.get('_id')
 | 
			
		||||
            try:
 | 
			
		||||
                future.result()
 | 
			
		||||
            except Exception:
 | 
			
		||||
                log.exception('Error updating user %i/%i %s', idx + 1, user_count, user_ident)
 | 
			
		||||
            else:
 | 
			
		||||
                log.info('Updated user %i/%i %s', idx + 1, user_count, user_ident)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@manager_operations.command
 | 
			
		||||
def index_users_update_settings():
 | 
			
		||||
    """Configure indexing backend as required by the project"""
 | 
			
		||||
    users_index = current_app.algolia_index_users
 | 
			
		||||
 | 
			
		||||
    # Automatically creates index if it does not exist
 | 
			
		||||
    users_index.set_settings({
 | 
			
		||||
        'searchableAttributes': [
 | 
			
		||||
            'full_name',
 | 
			
		||||
            'username',
 | 
			
		||||
            'email',
 | 
			
		||||
            'unordered(roles)'
 | 
			
		||||
        ]
 | 
			
		||||
    })
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@manager_operations.command
 | 
			
		||||
def hash_auth_tokens():
 | 
			
		||||
    """Hashes all unhashed authentication tokens."""
 | 
			
		||||
 | 
			
		||||
    from pymongo.results import UpdateResult
 | 
			
		||||
    from pillar.api.utils.authentication import hash_auth_token
 | 
			
		||||
 | 
			
		||||
    tokens_coll = current_app.db('tokens')
 | 
			
		||||
    query = {'token': {'$exists': True}}
 | 
			
		||||
    cursor = tokens_coll.find(query, projection={'token': 1, '_id': 1})
 | 
			
		||||
    log.info('Updating %d tokens', cursor.count())
 | 
			
		||||
 | 
			
		||||
    for token_doc in cursor:
 | 
			
		||||
        hashed_token = hash_auth_token(token_doc['token'])
 | 
			
		||||
        token_id = token_doc['_id']
 | 
			
		||||
        res: UpdateResult = tokens_coll.update_one(
 | 
			
		||||
            {'_id': token_id},
 | 
			
		||||
            {'$set': {'token_hashed': hashed_token},
 | 
			
		||||
             '$unset': {'token': 1}},
 | 
			
		||||
        )
 | 
			
		||||
        if res.modified_count != 1:
 | 
			
		||||
            raise ValueError(f'Unable to update token {token_id}!')
 | 
			
		||||
 | 
			
		||||
    log.info('Done')
 | 
			
		||||
							
								
								
									
										160
									
								
								pillar/cli/setup.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										160
									
								
								pillar/cli/setup.py
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,160 @@
 | 
			
		||||
import logging
 | 
			
		||||
 | 
			
		||||
from flask_script import Manager
 | 
			
		||||
 | 
			
		||||
from pillar import current_app
 | 
			
		||||
 | 
			
		||||
log = logging.getLogger(__name__)
 | 
			
		||||
 | 
			
		||||
manager_setup = Manager(
 | 
			
		||||
    current_app, usage="Setup utilities, like setup_db() or create_blog()")
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@manager_setup.command
 | 
			
		||||
def setup_db(admin_email):
 | 
			
		||||
    """Setup the database
 | 
			
		||||
    - Create admin, subscriber and demo Group collection
 | 
			
		||||
    - Create admin user (must use valid blender-id credentials)
 | 
			
		||||
    - Create one project
 | 
			
		||||
    """
 | 
			
		||||
 | 
			
		||||
    # Create default groups
 | 
			
		||||
    groups_list = []
 | 
			
		||||
    for group in ['admin', 'subscriber', 'demo']:
 | 
			
		||||
        g = {'name': group}
 | 
			
		||||
        g = current_app.post_internal('groups', g)
 | 
			
		||||
        groups_list.append(g[0]['_id'])
 | 
			
		||||
        print("Creating group {0}".format(group))
 | 
			
		||||
 | 
			
		||||
    # Create admin user
 | 
			
		||||
    user = {'username': admin_email,
 | 
			
		||||
            'groups': groups_list,
 | 
			
		||||
            'roles': ['admin', 'subscriber', 'demo'],
 | 
			
		||||
            'settings': {'email_communications': 1},
 | 
			
		||||
            'auth': [],
 | 
			
		||||
            'full_name': admin_email,
 | 
			
		||||
            'email': admin_email}
 | 
			
		||||
    result, _, _, status = current_app.post_internal('users', user)
 | 
			
		||||
    if status != 201:
 | 
			
		||||
        raise SystemExit('Error creating user {}: {}'.format(admin_email, result))
 | 
			
		||||
    user.update(result)
 | 
			
		||||
    print("Created user {0}".format(user['_id']))
 | 
			
		||||
 | 
			
		||||
    # Create a default project by faking a POST request.
 | 
			
		||||
    with current_app.test_request_context(data={'project_name': 'Default Project'}):
 | 
			
		||||
        from flask import g
 | 
			
		||||
        from pillar.auth import UserClass
 | 
			
		||||
        from pillar.api.projects import routes as proj_routes
 | 
			
		||||
 | 
			
		||||
        g.current_user = UserClass.construct('', user)
 | 
			
		||||
 | 
			
		||||
        proj_routes.create_project(overrides={'url': 'default-project',
 | 
			
		||||
                                              'is_private': False})
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@manager_setup.command
 | 
			
		||||
def create_badger_account(email, badges):
 | 
			
		||||
    """
 | 
			
		||||
    Creates a new service account that can give badges (i.e. roles).
 | 
			
		||||
 | 
			
		||||
    :param email: email address associated with the account
 | 
			
		||||
    :param badges: single space-separated argument containing the roles
 | 
			
		||||
        this account can assign and revoke.
 | 
			
		||||
    """
 | 
			
		||||
 | 
			
		||||
    create_service_account(email, ['badger'], {'badger': badges.strip().split()})
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@manager_setup.command
 | 
			
		||||
def create_local_user_account(email, password):
 | 
			
		||||
    from pillar.api.local_auth import create_local_user
 | 
			
		||||
    create_local_user(email, password)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@manager_setup.command
 | 
			
		||||
def badger(action, user_email, role):
 | 
			
		||||
    from pillar.api import service
 | 
			
		||||
 | 
			
		||||
    with current_app.app_context():
 | 
			
		||||
        service.fetch_role_to_group_id_map()
 | 
			
		||||
        response, status = service.do_badger(action, role=role, user_email=user_email)
 | 
			
		||||
 | 
			
		||||
    if status == 204:
 | 
			
		||||
        log.info('Done.')
 | 
			
		||||
    else:
 | 
			
		||||
        log.info('Response: %s', response)
 | 
			
		||||
        log.info('Status  : %i', status)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@manager_setup.command
 | 
			
		||||
def create_blog(proj_url):
 | 
			
		||||
    """Adds a blog to the project."""
 | 
			
		||||
 | 
			
		||||
    from pillar.api.utils.authentication import force_cli_user
 | 
			
		||||
    from pillar.api.utils import node_type_utils
 | 
			
		||||
    from pillar.api.node_types.blog import node_type_blog
 | 
			
		||||
    from pillar.api.node_types.post import node_type_post
 | 
			
		||||
    from pillar.api.utils import remove_private_keys
 | 
			
		||||
 | 
			
		||||
    force_cli_user()
 | 
			
		||||
 | 
			
		||||
    db = current_app.db()
 | 
			
		||||
 | 
			
		||||
    # Add the blog & post node types to the project.
 | 
			
		||||
    projects_coll = db['projects']
 | 
			
		||||
    proj = projects_coll.find_one({'url': proj_url})
 | 
			
		||||
    if not proj:
 | 
			
		||||
        log.error('Project url=%s not found', proj_url)
 | 
			
		||||
        return 3
 | 
			
		||||
 | 
			
		||||
    node_type_utils.add_to_project(proj,
 | 
			
		||||
                                   (node_type_blog, node_type_post),
 | 
			
		||||
                                   replace_existing=False)
 | 
			
		||||
 | 
			
		||||
    proj_id = proj['_id']
 | 
			
		||||
    r, _, _, status = current_app.put_internal('projects', remove_private_keys(proj), _id=proj_id)
 | 
			
		||||
    if status != 200:
 | 
			
		||||
        log.error('Error %i storing altered project %s %s', status, proj_id, r)
 | 
			
		||||
        return 4
 | 
			
		||||
    log.info('Project saved succesfully.')
 | 
			
		||||
 | 
			
		||||
    # Create a blog node.
 | 
			
		||||
    nodes_coll = db['nodes']
 | 
			
		||||
    blog = nodes_coll.find_one({'node_type': 'blog', 'project': proj_id})
 | 
			
		||||
    if not blog:
 | 
			
		||||
        blog = {
 | 
			
		||||
            'node_type': node_type_blog['name'],
 | 
			
		||||
            'name': 'Blog',
 | 
			
		||||
            'description': '',
 | 
			
		||||
            'properties': {},
 | 
			
		||||
            'project': proj_id,
 | 
			
		||||
        }
 | 
			
		||||
        r, _, _, status = current_app.post_internal('nodes', blog)
 | 
			
		||||
        if status != 201:
 | 
			
		||||
            log.error('Error %i storing blog node: %s', status, r)
 | 
			
		||||
            return 4
 | 
			
		||||
        log.info('Blog node saved succesfully: %s', r)
 | 
			
		||||
    else:
 | 
			
		||||
        log.info('Blog node already exists: %s', blog)
 | 
			
		||||
 | 
			
		||||
    return 0
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def create_service_account(email, service_roles, service_definition,
 | 
			
		||||
                           *, full_name: str=None):
 | 
			
		||||
    from pillar.api import service
 | 
			
		||||
    from pillar.api.utils import dumps
 | 
			
		||||
 | 
			
		||||
    account, token = service.create_service_account(
 | 
			
		||||
        email,
 | 
			
		||||
        service_roles,
 | 
			
		||||
        service_definition,
 | 
			
		||||
        full_name=full_name,
 | 
			
		||||
    )
 | 
			
		||||
 | 
			
		||||
    print('Service account information:')
 | 
			
		||||
    print(dumps(account, indent=4, sort_keys=True))
 | 
			
		||||
    print()
 | 
			
		||||
    print('Access token: %s' % token['token'])
 | 
			
		||||
    print('  expires on: %s' % token['expire_time'])
 | 
			
		||||
    return account, token
 | 
			
		||||
							
								
								
									
										104
									
								
								pillar/cli/translations.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										104
									
								
								pillar/cli/translations.py
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,104 @@
 | 
			
		||||
import argparse
 | 
			
		||||
import contextlib
 | 
			
		||||
import pathlib
 | 
			
		||||
import subprocess
 | 
			
		||||
import sys
 | 
			
		||||
 | 
			
		||||
BABEL_CONFIG = pathlib.Path('translations.cfg')
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@contextlib.contextmanager
 | 
			
		||||
def create_messages_pot() -> pathlib.Path:
 | 
			
		||||
    """Extract the translatable strings from the source code
 | 
			
		||||
 | 
			
		||||
    This creates a temporary messages.pot file, to be used to init or
 | 
			
		||||
    update the translation .mo files.
 | 
			
		||||
 | 
			
		||||
    It works as a generator, yielding the temporarily created pot file.
 | 
			
		||||
    The messages.pot file will be deleted at the end of it if all went well.
 | 
			
		||||
 | 
			
		||||
    :return The path of the messages.pot file created.
 | 
			
		||||
    """
 | 
			
		||||
    if not BABEL_CONFIG.is_file():
 | 
			
		||||
        print("No translations config file found: %s" % (BABEL_CONFIG))
 | 
			
		||||
        sys.exit(-1)
 | 
			
		||||
        return
 | 
			
		||||
 | 
			
		||||
    messages_pot = pathlib.Path('messages.pot')
 | 
			
		||||
    subprocess.run(('pybabel', 'extract', '-F', BABEL_CONFIG, '-k', 'lazy_gettext', '-o', messages_pot, '.'))
 | 
			
		||||
    yield messages_pot
 | 
			
		||||
    messages_pot.unlink()
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def init(locale):
 | 
			
		||||
    """
 | 
			
		||||
    Initialize the translations for a new language.
 | 
			
		||||
    """
 | 
			
		||||
    with create_messages_pot() as messages_pot:
 | 
			
		||||
        subprocess.run(('pybabel', 'init', '-i', messages_pot, '-d', 'translations', '-l', locale))
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def update():
 | 
			
		||||
    """
 | 
			
		||||
    Update the strings to be translated.
 | 
			
		||||
    """
 | 
			
		||||
    with create_messages_pot() as messages_pot:
 | 
			
		||||
        subprocess.run(('pybabel', 'update', '-i', messages_pot, '-d', 'translations'))
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def compile():
 | 
			
		||||
    """
 | 
			
		||||
    Compile the translation to be used.
 | 
			
		||||
    """
 | 
			
		||||
    if pathlib.Path('translations').is_dir():
 | 
			
		||||
        subprocess.run(('pybabel', 'compile','-d', 'translations'))
 | 
			
		||||
    else:
 | 
			
		||||
        print("No translations folder available")
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def parse_arguments() -> argparse.Namespace:
 | 
			
		||||
    """
 | 
			
		||||
    Parse command-line arguments.
 | 
			
		||||
    """
 | 
			
		||||
    parser = argparse.ArgumentParser(description='Translate Pillar')
 | 
			
		||||
 | 
			
		||||
    parser.add_argument(
 | 
			
		||||
            'mode',
 | 
			
		||||
            type=str,
 | 
			
		||||
            help='Init once, update often, compile before deploying.',
 | 
			
		||||
            choices=['init', 'update', 'compile'])
 | 
			
		||||
 | 
			
		||||
    parser.add_argument(
 | 
			
		||||
            'languages',
 | 
			
		||||
            nargs='*',
 | 
			
		||||
            type=str,
 | 
			
		||||
            help='Languages to initialize: pt it es ...')
 | 
			
		||||
 | 
			
		||||
    args = parser.parse_args()
 | 
			
		||||
    if args.mode == 'init' and not args.languages:
 | 
			
		||||
        parser.error("init requires languages")
 | 
			
		||||
 | 
			
		||||
    return args
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def main():
 | 
			
		||||
    """
 | 
			
		||||
    When calling from the setup.py entry-point we need to parse the arguments
 | 
			
		||||
    and init/update/compile the translations strings
 | 
			
		||||
    """
 | 
			
		||||
    args = parse_arguments()
 | 
			
		||||
 | 
			
		||||
    if args.mode == 'init':
 | 
			
		||||
        for language in args.languages:
 | 
			
		||||
            init(language)
 | 
			
		||||
 | 
			
		||||
    elif args.mode == 'update':
 | 
			
		||||
        update()
 | 
			
		||||
 | 
			
		||||
    else: # mode == 'compile'
 | 
			
		||||
        compile()
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
if __name__ == '__main__':
 | 
			
		||||
    main()
 | 
			
		||||
 | 
			
		||||
							
								
								
									
										168
									
								
								pillar/config.py
									
									
									
									
									
								
							
							
						
						
									
										168
									
								
								pillar/config.py
									
									
									
									
									
								
							@@ -1,6 +1,8 @@
 | 
			
		||||
from collections import defaultdict
 | 
			
		||||
import datetime
 | 
			
		||||
import os.path
 | 
			
		||||
from os import getenv
 | 
			
		||||
from collections import defaultdict
 | 
			
		||||
 | 
			
		||||
import requests.certs
 | 
			
		||||
 | 
			
		||||
# Certificate file for communication with other systems.
 | 
			
		||||
@@ -13,17 +15,27 @@ RFC1123_DATE_FORMAT = '%a, %d %b %Y %H:%M:%S GMT'
 | 
			
		||||
PILLAR_SERVER_ROOT = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
 | 
			
		||||
 | 
			
		||||
SCHEME = 'https'
 | 
			
		||||
PREFERRED_URL_SCHEME = SCHEME
 | 
			
		||||
 | 
			
		||||
# Be sure to set this in your config_local:
 | 
			
		||||
# SERVER_NAME = 'pillar.local:5000'
 | 
			
		||||
# PILLAR_SERVER_ENDPOINT = f'{SCHEME}://{SERVER_NAME}/api/'
 | 
			
		||||
 | 
			
		||||
STORAGE_DIR = getenv('PILLAR_STORAGE_DIR', '/data/storage/pillar')
 | 
			
		||||
PORT = 5000
 | 
			
		||||
HOST = '0.0.0.0'
 | 
			
		||||
DEBUG = False
 | 
			
		||||
 | 
			
		||||
SECRET_KEY = '123'
 | 
			
		||||
# Flask and CSRF secret key; generate local one with:
 | 
			
		||||
# python3 -c 'import secrets; print(secrets.token_urlsafe(128))'
 | 
			
		||||
SECRET_KEY = ''
 | 
			
		||||
 | 
			
		||||
# Authentication token hashing key. If empty falls back to UTF8-encoded SECRET_KEY with a warning.
 | 
			
		||||
# Not used to hash new tokens, but it is used to check pre-existing hashed tokens.
 | 
			
		||||
AUTH_TOKEN_HMAC_KEY = b''
 | 
			
		||||
 | 
			
		||||
# Authentication settings
 | 
			
		||||
BLENDER_ID_ENDPOINT = 'http://blender_id:8000/'
 | 
			
		||||
 | 
			
		||||
PILLAR_SERVER_ENDPOINT = 'http://pillar:5001/api/'
 | 
			
		||||
BLENDER_ID_ENDPOINT = 'http://id.local:8000/'
 | 
			
		||||
 | 
			
		||||
CDN_USE_URL_SIGNING = True
 | 
			
		||||
CDN_SERVICE_DOMAIN_PROTOCOL = 'https'
 | 
			
		||||
@@ -53,17 +65,33 @@ BIN_RSYNC = '/usr/bin/rsync'
 | 
			
		||||
 | 
			
		||||
GCLOUD_APP_CREDENTIALS = 'google_app.json'
 | 
			
		||||
GCLOUD_PROJECT = '-SECRET-'
 | 
			
		||||
# Used for cross-verification on various Google sites (eg. YouTube)
 | 
			
		||||
GOOGLE_SITE_VERIFICATION = ''
 | 
			
		||||
 | 
			
		||||
ADMIN_USER_GROUP = '5596e975ea893b269af85c0e'
 | 
			
		||||
SUBSCRIBER_USER_GROUP = '5596e975ea893b269af85c0f'
 | 
			
		||||
BUGSNAG_API_KEY = ''
 | 
			
		||||
 | 
			
		||||
SENTRY_CONFIG = {
 | 
			
		||||
    'dsn': '-set-in-config-local-',
 | 
			
		||||
    # 'release': raven.fetch_git_sha(os.path.dirname(__file__)),
 | 
			
		||||
}
 | 
			
		||||
# See https://docs.sentry.io/clients/python/integrations/flask/#settings
 | 
			
		||||
SENTRY_USER_ATTRS = ['username', 'full_name', 'email', 'objectid']
 | 
			
		||||
 | 
			
		||||
ALGOLIA_USER = '-SECRET-'
 | 
			
		||||
ALGOLIA_API_KEY = '-SECRET-'
 | 
			
		||||
ALGOLIA_INDEX_USERS = 'dev_Users'
 | 
			
		||||
ALGOLIA_INDEX_NODES = 'dev_Nodes'
 | 
			
		||||
 | 
			
		||||
SEARCH_BACKEND = 'algolia'  # algolia, elastic
 | 
			
		||||
SEARCH_BACKENDS = ('elastic', )
 | 
			
		||||
 | 
			
		||||
ELASTIC_INDICES = {
 | 
			
		||||
    'NODE': 'nodes',
 | 
			
		||||
    'USER': 'users',
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
ELASTIC_SEARCH_HOSTS = ['elastic:9200']
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
ZENCODER_API_KEY = '-SECRET-'
 | 
			
		||||
ZENCODER_NOTIFICATIONS_SECRET = '-SECRET-'
 | 
			
		||||
@@ -71,6 +99,10 @@ ZENCODER_NOTIFICATIONS_URL = 'http://zencoderfetcher/'
 | 
			
		||||
 | 
			
		||||
ENCODING_BACKEND = 'zencoder'  # local, flamenco
 | 
			
		||||
 | 
			
		||||
# Storage solution for uploaded files. If 'local' is selected, make sure you specify the SERVER_NAME
 | 
			
		||||
# config value as well, since it will help building correct URLs when indexing.
 | 
			
		||||
STORAGE_BACKEND = 'local'  # gcs
 | 
			
		||||
 | 
			
		||||
# Validity period of links, per file storage backend. Expressed in seconds.
 | 
			
		||||
# Shouldn't be more than a year, as this isn't supported by HTTP/1.1.
 | 
			
		||||
FILE_LINK_VALIDITY = defaultdict(
 | 
			
		||||
@@ -78,13 +110,33 @@ FILE_LINK_VALIDITY = defaultdict(
 | 
			
		||||
    gcs=3600 * 23,  # 23 hours for Google Cloud Storage.
 | 
			
		||||
)
 | 
			
		||||
 | 
			
		||||
# Roles with full GET-access to all variations of files.
 | 
			
		||||
FULL_FILE_ACCESS_ROLES = {u'admin', u'subscriber', u'demo'}
 | 
			
		||||
# Capability with GET-access to all variations of files.
 | 
			
		||||
FULL_FILE_ACCESS_CAP = 'subscriber'
 | 
			
		||||
 | 
			
		||||
# Client and Subclient IDs for Blender ID
 | 
			
		||||
BLENDER_ID_CLIENT_ID = 'SPECIAL-SNOWFLAKE-57'
 | 
			
		||||
BLENDER_ID_SUBCLIENT_ID = 'PILLAR'
 | 
			
		||||
 | 
			
		||||
# Blender ID user info API endpoint URL and auth token, used for
 | 
			
		||||
# reconciling subscribers and updating their info from /u/.
 | 
			
		||||
# The token requires the 'userinfo' scope.
 | 
			
		||||
BLENDER_ID_USER_INFO_API = 'http://blender-id:8000/api/user/'
 | 
			
		||||
BLENDER_ID_USER_INFO_TOKEN = '-set-in-config-local-'
 | 
			
		||||
 | 
			
		||||
# Collection of supported OAuth providers (Blender ID, Facebook and Google).
 | 
			
		||||
# Example entry:
 | 
			
		||||
# OAUTH_CREDENTIALS = {
 | 
			
		||||
#    'blender-id': {
 | 
			
		||||
#        'id': 'CLOUD-OF-SNOWFLAKES-42',
 | 
			
		||||
#        'secret': 'thesecret',
 | 
			
		||||
#     }
 | 
			
		||||
# }
 | 
			
		||||
# OAuth providers are defined in pillar.auth.oauth
 | 
			
		||||
OAUTH_CREDENTIALS = {
 | 
			
		||||
    'blender-id': {},
 | 
			
		||||
    'facebook': {},
 | 
			
		||||
    'google': {},
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
# See https://docs.python.org/2/library/logging.config.html#configuration-dictionary-schema
 | 
			
		||||
LOGGING = {
 | 
			
		||||
@@ -117,8 +169,9 @@ SHORT_CODE_LENGTH = 6  # characters
 | 
			
		||||
# People are allowed this many bytes per uploaded file.
 | 
			
		||||
FILESIZE_LIMIT_BYTES_NONSUBS = 32 * 2 ** 20
 | 
			
		||||
# Unless they have one of those roles.
 | 
			
		||||
ROLES_FOR_UNLIMITED_UPLOADS = {u'subscriber', u'demo', u'admin'}
 | 
			
		||||
ROLES_FOR_UNLIMITED_UPLOADS = {'subscriber', 'demo', 'admin'}
 | 
			
		||||
 | 
			
		||||
ROLES_FOR_COMMENT_VOTING = {'subscriber', 'demo'}
 | 
			
		||||
 | 
			
		||||
#############################################
 | 
			
		||||
# Old pillar-web config:
 | 
			
		||||
@@ -134,16 +187,97 @@ GIT = 'git'
 | 
			
		||||
RENDER_HOME_AS_REGULAR_PROJECT = False
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
# Authentication token for the Urler service. If None, defaults
 | 
			
		||||
# to the authentication token of the current user.
 | 
			
		||||
URLER_SERVICE_AUTH_TOKEN = None
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
# Blender Cloud add-on version. This updates the value in all places in the
 | 
			
		||||
# front-end.
 | 
			
		||||
BLENDER_CLOUD_ADDON_VERSION = '1.4'
 | 
			
		||||
 | 
			
		||||
EXTERNAL_SUBSCRIPTIONS_MANAGEMENT_SERVER = 'https://store.blender.org/api/'
 | 
			
		||||
 | 
			
		||||
# Certificate file for communication with other systems.
 | 
			
		||||
TLS_CERT_FILE = requests.certs.where()
 | 
			
		||||
 | 
			
		||||
CELERY_BACKEND = 'redis://redis/1'
 | 
			
		||||
CELERY_BROKER = 'amqp://guest:guest@rabbit//'
 | 
			
		||||
 | 
			
		||||
# This configures the Celery task scheduler in such a way that we don't
 | 
			
		||||
# have to import the pillar.celery.XXX modules. Remember to run
 | 
			
		||||
# 'manage.py celery beat' too, otherwise those will never run.
 | 
			
		||||
CELERY_BEAT_SCHEDULE = {
 | 
			
		||||
    'regenerate-expired-links': {
 | 
			
		||||
        'task': 'pillar.celery.file_link_tasks.regenerate_all_expired_links',
 | 
			
		||||
        'schedule': 600,  # every N seconds
 | 
			
		||||
        'args': ('gcs', 100)
 | 
			
		||||
    },
 | 
			
		||||
    'refresh-blenderid-badges': {
 | 
			
		||||
        'task': 'pillar.celery.badges.sync_badges_for_users',
 | 
			
		||||
        'schedule': 10 * 60,  # every N seconds
 | 
			
		||||
        'args': (9 * 60, ),  # time limit in seconds, keep shorter than 'schedule'
 | 
			
		||||
    }
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
# Badges will be re-fetched every timedelta.
 | 
			
		||||
# TODO(Sybren): A proper value should be determined after we actually have users with badges.
 | 
			
		||||
BLENDER_ID_BADGE_EXPIRY = datetime.timedelta(hours=4)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
# Mapping from user role to capabilities obtained by users with that role.
 | 
			
		||||
USER_CAPABILITIES = defaultdict(**{
 | 
			
		||||
    'subscriber': {'subscriber', 'home-project'},
 | 
			
		||||
    'demo': {'subscriber', 'home-project'},
 | 
			
		||||
    'admin': {'encode-video', 'admin',
 | 
			
		||||
              'view-pending-nodes', 'edit-project-node-types', 'create-organization'},
 | 
			
		||||
    'video-encoder': {'encode-video'},
 | 
			
		||||
    'org-subscriber': {'subscriber', 'home-project'},
 | 
			
		||||
}, default_factory=frozenset)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
# Internationalization and localization
 | 
			
		||||
 | 
			
		||||
# The default locale is US English.
 | 
			
		||||
# A locale can include a territory, a codeset and a modifier.
 | 
			
		||||
# We only support locale strings with or without territories though.
 | 
			
		||||
# For example, nl_NL and pt_BR are not the same language as nl_BE, and pt_PT.
 | 
			
		||||
# However we can have a nl, or a pt translation, to be used as a common
 | 
			
		||||
# translation when no territorial specific locale is available.
 | 
			
		||||
# All translations should be in UTF-8.
 | 
			
		||||
# This setting is used as a fallback when there is no good match between the
 | 
			
		||||
# browser language and the available translations.
 | 
			
		||||
DEFAULT_LOCALE = 'en_US'
 | 
			
		||||
# All the available languages will be determined based on available translations
 | 
			
		||||
# in the //translations/ folder. The exception is English, since all the text is
 | 
			
		||||
# originally in English already. That said, if rare occasions we may want to
 | 
			
		||||
# never show the site in English.
 | 
			
		||||
SUPPORT_ENGLISH = True
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
# Mail options, see pillar.celery.email_tasks.
 | 
			
		||||
SMTP_HOST = 'localhost'
 | 
			
		||||
SMTP_PORT = 2525
 | 
			
		||||
SMTP_USERNAME = ''
 | 
			
		||||
SMTP_PASSWORD = ''
 | 
			
		||||
SMTP_TIMEOUT = 30  # timeout in seconds, https://docs.python.org/3/library/smtplib.html#smtplib.SMTP
 | 
			
		||||
MAIL_RETRY = 180  # in seconds, delay until trying to send an email again.
 | 
			
		||||
MAIL_DEFAULT_FROM_NAME = 'Blender Cloud'
 | 
			
		||||
MAIL_DEFAULT_FROM_ADDR = 'cloudsupport@localhost'
 | 
			
		||||
 | 
			
		||||
SEND_FILE_MAX_AGE_DEFAULT = 3600 * 24 * 365  # seconds
 | 
			
		||||
 | 
			
		||||
# MUST be 8 characters long, see pillar.flask_extra.HashedPathConverter
 | 
			
		||||
# Intended to be changed for every deploy. If it is empty, a random hash will
 | 
			
		||||
# be used. Note that this causes extra traffic, since every time the process
 | 
			
		||||
# restarts the URLs will be different.
 | 
			
		||||
STATIC_FILE_HASH = ''
 | 
			
		||||
 | 
			
		||||
# Disable default CSRF protection for all views, since most web endpoints and
 | 
			
		||||
# all API endpoints do not need it. On the views that require it, we use the
 | 
			
		||||
# current_app.csrf.protect() method.
 | 
			
		||||
WTF_CSRF_CHECK_DEFAULT = False
 | 
			
		||||
 | 
			
		||||
# Flask Debug Toolbar. Enable it by overriding DEBUG_TB_ENABLED in config_local.py.
 | 
			
		||||
DEBUG_TB_ENABLED = False
 | 
			
		||||
DEBUG_TB_PANELS = [
 | 
			
		||||
    'flask_debugtoolbar.panels.versions.VersionDebugPanel',
 | 
			
		||||
    'flask_debugtoolbar.panels.headers.HeaderDebugPanel',
 | 
			
		||||
    'flask_debugtoolbar.panels.request_vars.RequestVarsDebugPanel',
 | 
			
		||||
    'flask_debugtoolbar.panels.config_vars.ConfigVarsDebugPanel',
 | 
			
		||||
    'flask_debugtoolbar.panels.template.TemplateDebugPanel',
 | 
			
		||||
    'flask_debugtoolbar.panels.logger.LoggingPanel',
 | 
			
		||||
    'flask_debugtoolbar.panels.route_list.RouteListDebugPanel']
 | 
			
		||||
 
 | 
			
		||||
@@ -16,12 +16,34 @@ can then be registered to the application at app creation time:
 | 
			
		||||
"""
 | 
			
		||||
 | 
			
		||||
import abc
 | 
			
		||||
import inspect
 | 
			
		||||
import pathlib
 | 
			
		||||
import typing
 | 
			
		||||
 | 
			
		||||
import flask
 | 
			
		||||
import pillarsdk
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class PillarExtension(object):
 | 
			
		||||
    __metaclass__ = abc.ABCMeta
 | 
			
		||||
class PillarExtension(object, metaclass=abc.ABCMeta):
 | 
			
		||||
    # Set to True when your extension implements the project_settings() method.
 | 
			
		||||
    has_project_settings = False
 | 
			
		||||
 | 
			
		||||
    @abc.abstractproperty
 | 
			
		||||
    # Set to True when your extension implements the context_processor() method.
 | 
			
		||||
    has_context_processor = False
 | 
			
		||||
 | 
			
		||||
    # List of Celery task modules introduced by this extension.
 | 
			
		||||
    celery_task_modules: typing.List[str] = []
 | 
			
		||||
 | 
			
		||||
    # Set of user roles used/introduced by this extension.
 | 
			
		||||
    user_roles: typing.Set[str] = set()
 | 
			
		||||
    user_roles_indexable: typing.Set[str] = set()
 | 
			
		||||
 | 
			
		||||
    # User capabilities introduced by this extension. The final set of
 | 
			
		||||
    # capabilities is the union of all app-level and extension-level caps.
 | 
			
		||||
    user_caps: typing.Mapping[str, typing.FrozenSet] = {}
 | 
			
		||||
 | 
			
		||||
    @property
 | 
			
		||||
    @abc.abstractmethod
 | 
			
		||||
    def name(self):
 | 
			
		||||
        """The name of this extension.
 | 
			
		||||
 | 
			
		||||
@@ -33,6 +55,14 @@ class PillarExtension(object):
 | 
			
		||||
        :rtype: unicode
 | 
			
		||||
        """
 | 
			
		||||
 | 
			
		||||
    @property
 | 
			
		||||
    def icon(self) -> str:
 | 
			
		||||
        """Returns the icon HTML class, for use like i.pi-{{ext.icon}}
 | 
			
		||||
 | 
			
		||||
        Defaults to the extension name.
 | 
			
		||||
        """
 | 
			
		||||
        return self.name
 | 
			
		||||
 | 
			
		||||
    @abc.abstractmethod
 | 
			
		||||
    def flask_config(self):
 | 
			
		||||
        """Returns extension-specific defaults for the Flask configuration.
 | 
			
		||||
@@ -84,13 +114,48 @@ class PillarExtension(object):
 | 
			
		||||
        """
 | 
			
		||||
        return None
 | 
			
		||||
 | 
			
		||||
    @property
 | 
			
		||||
    def translations_path(self) -> typing.Union[pathlib.Path, None]:
 | 
			
		||||
        """Returns the path where the translations for this extension are stored.
 | 
			
		||||
 | 
			
		||||
        This is top folder that contains a "translations" sub-folder
 | 
			
		||||
 | 
			
		||||
        May return None, in which case English will always be used for this extension.
 | 
			
		||||
        """
 | 
			
		||||
        class_filename = pathlib.Path(inspect.getfile(self.__class__))
 | 
			
		||||
 | 
			
		||||
        # Pillar extensions instantiate the PillarExtension from a sub-folder in
 | 
			
		||||
        # the main project (e.g. //blender_cloud/blender_cloud/__init__.py), but
 | 
			
		||||
        # the translations folders is in the main project folder.
 | 
			
		||||
        translations_path = class_filename.parents[1] / 'translations'
 | 
			
		||||
 | 
			
		||||
        return translations_path if translations_path.is_dir() else None
 | 
			
		||||
 | 
			
		||||
    def setup_app(self, app):
 | 
			
		||||
        """Called during app startup, after all extensions have loaded."""
 | 
			
		||||
 | 
			
		||||
    def sidebar_links(self, project):
 | 
			
		||||
    def sidebar_links(self, project: pillarsdk.Project) -> str:
 | 
			
		||||
        """Returns the sidebar link(s) for the given projects.
 | 
			
		||||
 | 
			
		||||
        :returns: HTML as a string for the sidebar.
 | 
			
		||||
        """
 | 
			
		||||
 | 
			
		||||
        return ''
 | 
			
		||||
 | 
			
		||||
    def project_settings(self, project: pillarsdk.Project, **template_args: dict) -> flask.Response:
 | 
			
		||||
        """Renders the project settings page for this extension.
 | 
			
		||||
 | 
			
		||||
        Set YourExtension.has_project_settings = True and Pillar will call this function.
 | 
			
		||||
 | 
			
		||||
        :param project: the project for which to render the settings.
 | 
			
		||||
        :param template_args: additional template arguments.
 | 
			
		||||
        :returns: a Flask HTTP response
 | 
			
		||||
        """
 | 
			
		||||
 | 
			
		||||
    def context_processor(self) -> dict:
 | 
			
		||||
        """Returns a dictionary that gets injected into the Flask Jinja2 namespace.
 | 
			
		||||
 | 
			
		||||
        Set has_context_processor  to True when your extension implements this method.
 | 
			
		||||
        """
 | 
			
		||||
 | 
			
		||||
        return {}
 | 
			
		||||
 
 | 
			
		||||
							
								
								
									
										76
									
								
								pillar/flask_extra.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										76
									
								
								pillar/flask_extra.py
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,76 @@
 | 
			
		||||
import re
 | 
			
		||||
import functools
 | 
			
		||||
 | 
			
		||||
import flask
 | 
			
		||||
import werkzeug.routing
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class HashedPathConverter(werkzeug.routing.PathConverter):
 | 
			
		||||
    """Allows for files `xxx.yyy.js` to be served as `xxx.yyy.abc123.js`.
 | 
			
		||||
 | 
			
		||||
    The hash code is placed before the last extension.
 | 
			
		||||
    """
 | 
			
		||||
    weight = 300
 | 
			
		||||
    # Hash length is hard-coded to 8 characters for now.
 | 
			
		||||
    hash_re = re.compile(r'\.([a-zA-Z0-9]{8})(?=\.[^.]+$)')
 | 
			
		||||
 | 
			
		||||
    @functools.lru_cache(maxsize=1024)
 | 
			
		||||
    def to_python(self, from_url: str) -> str:
 | 
			
		||||
        return self.hash_re.sub('', from_url)
 | 
			
		||||
 | 
			
		||||
    @functools.lru_cache(maxsize=1024)
 | 
			
		||||
    def to_url(self, filepath: str) -> str:
 | 
			
		||||
        try:
 | 
			
		||||
            dotidx = filepath.rindex('.')
 | 
			
		||||
        except ValueError:
 | 
			
		||||
            # Happens when there is no dot. Very unlikely.
 | 
			
		||||
            return filepath
 | 
			
		||||
 | 
			
		||||
        current_hash = flask.current_app.config['STATIC_FILE_HASH']
 | 
			
		||||
        before, after = filepath[:dotidx], filepath[dotidx:]
 | 
			
		||||
        return f'{before}.{current_hash}{after}'
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def add_response_headers(headers: dict):
 | 
			
		||||
    """This decorator adds the headers passed in to the response"""
 | 
			
		||||
 | 
			
		||||
    def decorator(f):
 | 
			
		||||
        @functools.wraps(f)
 | 
			
		||||
        def decorated_function(*args, **kwargs):
 | 
			
		||||
            resp = flask.make_response(f(*args, **kwargs))
 | 
			
		||||
            h = resp.headers
 | 
			
		||||
            for header, value in headers.items():
 | 
			
		||||
                h[header] = value
 | 
			
		||||
            return resp
 | 
			
		||||
 | 
			
		||||
        return decorated_function
 | 
			
		||||
 | 
			
		||||
    return decorator
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def vary_xhr():
 | 
			
		||||
    """View function decorator; adds HTTP header "Vary: X-Requested-With" to the response"""
 | 
			
		||||
 | 
			
		||||
    def decorator(f):
 | 
			
		||||
        header_adder = add_response_headers({'Vary': 'X-Requested-With'})
 | 
			
		||||
        return header_adder(f)
 | 
			
		||||
 | 
			
		||||
    return decorator
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def ensure_schema(url: str) -> str:
 | 
			
		||||
    """Return the same URL with the configured PREFERRED_URL_SCHEME."""
 | 
			
		||||
    import urllib.parse
 | 
			
		||||
 | 
			
		||||
    if not url:
 | 
			
		||||
        return url
 | 
			
		||||
 | 
			
		||||
    bits = urllib.parse.urlsplit(url, allow_fragments=True)
 | 
			
		||||
 | 
			
		||||
    if not bits[0] and not bits[1]:
 | 
			
		||||
        # don't replace the schema if there is not even a hostname.
 | 
			
		||||
        return url
 | 
			
		||||
 | 
			
		||||
    scheme = flask.current_app.config.get('PREFERRED_URL_SCHEME', 'https')
 | 
			
		||||
    bits = (scheme, *bits[1:])
 | 
			
		||||
    return urllib.parse.urlunsplit(bits)
 | 
			
		||||
Some files were not shown because too many files have changed in this diff Show More
		Reference in New Issue
	
	Block a user