Compare commits
2795 Commits
0.1.4
...
ebf864c585
| Author | SHA1 | Date | |
|---|---|---|---|
| ebf864c585 | |||
| f68a1759d6 | |||
| 586ce3455c | |||
| 730845c861 | |||
| c2314b65ea | |||
| 19f2caedb7 | |||
| cb2f340277 | |||
| ab91d58a03 | |||
| 67c184a97c | |||
| 55658c5f23 | |||
| 29f3dd43f2 | |||
| 32c0c5fb00 | |||
| 9dc476f393 | |||
| cd29b2b870 | |||
| f38380d156 | |||
| 4d5e712e85 | |||
| ee1d07000a | |||
| 90839b4d4b | |||
| 0e3638b86c | |||
| c75d44864e | |||
| 62d099b500 | |||
| ddf94e04da | |||
| 16766b1f84 | |||
| f08b50a952 | |||
| ea915282b2 | |||
| 499bbca42e | |||
| a0ebfad89d | |||
| 60a8219869 | |||
| 409e12a9d6 | |||
| 2fefbb6dea | |||
| 04fd7c83c1 | |||
| 894e35b4e2 | |||
| 97cf54433b | |||
| 82dd663942 | |||
| 28381737dc | |||
| b677e6e325 | |||
| 8b8f9e652b | |||
| 5aa37b48d3 | |||
| 5db1d31668 | |||
| 896d1af0f7 | |||
| 817fc3f0fe | |||
| c65386a8a8 | |||
| 4c7b429001 | |||
| 71697bad34 | |||
| 770339f94c | |||
| fb4df8865b | |||
| 0fc050e17b | |||
| 475833e8e1 | |||
| 1d18f061f2 | |||
| 198e20ce4f | |||
| d4819440af | |||
| d52e0d4e09 | |||
| b59c76d630 | |||
| 42e9b8b627 | |||
| e67fc09bc8 | |||
| 5a968ffe63 | |||
| cc02a0adb0 | |||
| d43e240a2a | |||
| a6a4f458e0 | |||
| d5ce38bf71 | |||
| 8928e04385 | |||
| 1a270cd315 | |||
| 4f696abb5d | |||
| e078d33103 | |||
| b7d0e62844 | |||
| 57534d86cd | |||
| 8353396940 | |||
| 9abd3e97e7 | |||
| bf66de0bfd | |||
| e0708410d0 | |||
| 7e6fb740b3 | |||
| eed628dbab | |||
| 898b5da661 | |||
| e64e2a41d5 | |||
| c72661a87f | |||
| 6b96aefa21 | |||
| 72d28d0af9 | |||
| 0b667177dd | |||
| 90f17d02d9 | |||
| 4e6d25d0f9 | |||
| da0970f3c8 | |||
| 16177d751d | |||
| 0876d5f911 | |||
| 2586c23efd | |||
| 7a612f36a3 | |||
| 60d28cf875 | |||
| 98668b0972 | |||
| 010862ebcb | |||
| ea513f465f | |||
| d3b01d6b40 | |||
| b9100dbf29 | |||
| c278a6db4d | |||
| 5e10a82621 | |||
| 12e8e4891c | |||
| 0f0f717520 | |||
| 02ff34f440 | |||
| 5fc6533443 | |||
| 3634b5e423 | |||
| 23797ec067 | |||
| ca357c08c4 | |||
| d7fc84f6e9 | |||
| d3908fa445 | |||
| aaff0f7581 | |||
| 7a226241db | |||
| bf7638937a | |||
| ba47e7448d | |||
| 96eada297f | |||
| 0fa136e7a4 | |||
| f8705aa730 | |||
| 1e75cf4ab5 | |||
| 8f1c966079 | |||
| d49edd886b | |||
| e871c5eb18 | |||
| 85da1f1c75 | |||
| 57457bfe78 | |||
| cd13832377 | |||
| df8d6e5d7f | |||
| f0aa9d249f | |||
| 497afc3b46 | |||
| 4e4a7e9cab | |||
| e5961146b9 | |||
| 21c2255c75 | |||
| 4f1e09e3af | |||
| ae1ead6d75 | |||
| 1983eb79de | |||
| 59bfcec808 | |||
| 12af0ecb45 | |||
| 710290f590 | |||
| 72be3ba54c | |||
| 17218cbf02 | |||
| 988d23e163 | |||
| 2c477ae8e7 | |||
| 19b693f636 | |||
| e3fa072608 | |||
| d5557c1533 | |||
| cad04885a0 | |||
| f97e880fa6 | |||
| 1ea24e396a | |||
| 921e8d8236 | |||
| 9211178e77 | |||
| 8de9842092 | |||
| 9de8e5a28d | |||
| 429261fd63 | |||
| 772894c414 | |||
| c10f4b6483 | |||
| ebf86c0eb2 | |||
| 9f66c18e71 | |||
| 6dc4b9877b | |||
| 19a678dcc2 | |||
| 1632b4b078 | |||
| 21e9a4508d | |||
| c892cd5ae1 | |||
| cc855e2758 | |||
| edab32d9a1 | |||
| 416f39222a | |||
| fae3793255 | |||
| 65795bd139 | |||
| 531e5c0d00 | |||
| 5d7ca8520a | |||
| 51549ae795 | |||
| efd231e0ab | |||
| d8cca855e8 | |||
| 5ae65c2c4d | |||
| 87a85dd3b5 | |||
| 4321ca7cc7 | |||
| 2b40d2743c | |||
| 980883cb8d | |||
| f1e2cc1265 | |||
| 477f2fa1c2 | |||
| f5e71bb431 | |||
| 9136a50d51 | |||
| 8022efe32e | |||
| df3e39cc25 | |||
| 1ceafc593a | |||
| b147aec53d | |||
| dc675311f0 | |||
| 64fffecc8a | |||
| 51fbd6a871 | |||
| 93057561b3 | |||
| f5228407c2 | |||
| cff8fb15ae | |||
| 305de2217e | |||
| fd2aff7a16 | |||
| 676562ffde | |||
| 02d711282d | |||
| 02c7a5ca7c | |||
| e011cc9251 | |||
| 9778eb7b25 | |||
| 5bb0143da4 | |||
| 84496b0f55 | |||
| b8e3162c5f | |||
| b337b27146 | |||
| 01678a714f | |||
| 1bdfaa9739 | |||
| b217a68512 | |||
| 8adca442e1 | |||
| 5cc6279361 | |||
| 09a2a8fc76 | |||
| 806aeddd87 | |||
| e4ed2a14e7 | |||
| 6e588f9c7b | |||
| 1c9c377751 | |||
| fd52438d61 | |||
| de9ad82011 | |||
| 3487ecbaa7 | |||
| aa0a31f90e | |||
| a99a84b0b2 | |||
| f47f108f43 | |||
| f6c10ef3d8 | |||
| a7be8b6dce | |||
| 4923157dc2 | |||
| aff7ad7ea2 | |||
| 60fd2134ca | |||
| 5ab9626cbd | |||
| 6fa15c4cb3 | |||
| de0c38e9ba | |||
| 1eb2df63cc | |||
| aeead5363b | |||
| e14dfc8639 | |||
| 6ef6fe40e6 | |||
| 61a0aae96c | |||
| e0ba90d672 | |||
| 5d24cff338 | |||
| 458b4b062c | |||
| 50f98dd223 | |||
| 86d37ced17 | |||
| f66d91036b | |||
| 2fd05eed3d | |||
| ac62cc7767 | |||
| 0d95d77a10 | |||
| cb38415b3c | |||
| b9a30a2188 | |||
| 67c57b9b63 | |||
| 26dfe5f654 | |||
| 2ca8dba5e4 | |||
| 9af887cf95 | |||
| 7cad569114 | |||
| 6fd27c3372 | |||
| 8e30305596 | |||
| 9ded7b8571 | |||
| ef6373cb3a | |||
| 5dd6c6636b | |||
| af4bb72be3 | |||
| d5e51b3fc3 | |||
| beba785b09 | |||
| 4ec0430a61 | |||
| 079e8ccf3b | |||
| 77b67f613f | |||
| 0077452e10 | |||
| 8ad4448c85 | |||
| db4f68f0ed | |||
| 9c4fb85dc3 | |||
| d478def98c | |||
| 9a7a632489 | |||
| a50c641044 | |||
| 9671b34485 | |||
| 91c2976bfc | |||
| 75eaea3aad | |||
| 1fc2e29ab8 | |||
| 1577f99567 | |||
| 9d37f8018a | |||
| beea47fd09 | |||
| 4955ec8541 | |||
| ced9696322 | |||
| 50d5c15a69 | |||
| 54ca1315ca | |||
| f7abfff51d | |||
| d4dfa63cf5 | |||
| cbc328d073 | |||
| c5106f5fa4 | |||
| 07f1d751c4 | |||
| ab4cda2298 | |||
| 7a9500eee0 | |||
| e79dda3384 | |||
| 5ed109e998 | |||
| 599a6943e2 | |||
| 7011a4df8b | |||
| 4d7c6844e5 | |||
| 2645c4a82d | |||
| 8d2fffba87 | |||
| 3cb386cc0f | |||
| e06dea106f | |||
| 747df683a9 | |||
| 8a1563db54 | |||
| d7d483fa9b | |||
| 25125f3257 | |||
| a7082f7332 | |||
| 404d213e8f | |||
| e75e472a1a | |||
| 69b64c63ea | |||
| d07464f3b1 | |||
| de0e646cf9 | |||
| 5c5e53b8ec | |||
| 85f18afa81 | |||
| bc8eda2a6b | |||
| f4bcd1cf30 | |||
| 5a6e4e5b43 | |||
| c169ad291e | |||
| 3242fc1e13 | |||
| 9af8d63f11 | |||
| 27c5539178 | |||
| be21d357cb | |||
| 6aedd3b0d8 | |||
| 2b64959953 | |||
| 3efe14c43f | |||
| 85b47bed05 | |||
| 0f463d5f13 | |||
| 367c6f0b03 | |||
| 939c41a70e | |||
| 8855e60db7 | |||
| a4a83dda72 | |||
| d229ccf654 | |||
| ad5416e441 | |||
| 41f853afd6 | |||
| 88a043cfec | |||
| 605d218385 | |||
| 1a8e96ed32 | |||
| dc4df9b279 | |||
| 61eac2aada | |||
| 1cc3051aa2 | |||
| ad0e469e92 | |||
| 9cb430f92c | |||
| 660a09e4c9 | |||
| 08762d3bbb | |||
| 553b7fab8d | |||
| 3d160a459c | |||
| 7b2b3911bc | |||
| c899173dc9 | |||
| 3e8ca982a1 | |||
| 136b8e67d4 | |||
| 301f099622 | |||
| 4f148f3bc3 | |||
| 41b35fb13d | |||
| 1b73b1d080 | |||
| c2116df652 | |||
| 5b96b3a032 | |||
| d41227327e | |||
| 240f0b99c8 | |||
| cab1216cb7 | |||
| bc5579e9bf | |||
| 5b3f40467b | |||
| 14b73b73cf | |||
| d86d599d61 | |||
| 6ff8910f87 | |||
| acb0463844 | |||
| d7ca1570be | |||
| bde2829e72 | |||
| 226a62d338 | |||
| 0d1798b0e0 | |||
| aa800d79de | |||
| 9d3b7ac5dd | |||
| 4c62108c11 | |||
| 682991c022 | |||
| 18a3514974 | |||
| 7a83799bcd | |||
| 914c029138 | |||
| 574c76ad66 | |||
| 9bcfcebb3a | |||
| 817a4c3d90 | |||
| c3d8501401 | |||
| c15a2c62d1 | |||
| 983f9beacb | |||
| cbe06bd4d0 | |||
| 2a8cf805dd | |||
| 084f9a14f4 | |||
| 2af336f6c2 | |||
| 5c4a167cfd | |||
| b382d0332e | |||
| 117c0bab04 | |||
| 41a02610ba | |||
| c80a3390e7 | |||
| 88b38f7f0b | |||
| 229a5fbc9b | |||
| 57e06cf1db | |||
| e7c684724a | |||
| 8e1d2b51e0 | |||
| b5e22f5a6b | |||
| 4752e10472 | |||
| 7ac754a894 | |||
| 1e90bb2d0b | |||
| 00134a0966 | |||
| 69bfe48cbe | |||
| f98d53fda0 | |||
| 99628aface | |||
| 918e1699ea | |||
| 560894befb | |||
| b5b50d34b2 | |||
| 276229db4a | |||
| 6000d8bb06 | |||
| 0607520f93 | |||
| 3c1d6998fb | |||
| 456c994d25 | |||
| 4c3a6112b8 | |||
| f6faa856fe | |||
| 91c0432cc2 | |||
| 0fd83d13ae | |||
| 4fa0374097 | |||
| a3ae76fac1 | |||
| 1da7564374 | |||
| e7a0002f1b | |||
| 0d19986fb1 | |||
| c0869e0c71 | |||
| 863bdc045b | |||
| 8cc57a884e | |||
| f7fce73e1e | |||
| 2317967802 | |||
| d5ae45a0d0 | |||
| a46991646b | |||
| fd2adad49e | |||
| e3afaccf97 | |||
| 05cae2ae8d | |||
| 5bfb98e097 | |||
| ba9b633c45 | |||
| e73c7db16b | |||
| 0517d01748 | |||
| 672f0e10f9 | |||
| d02128a093 | |||
| d27a1292c5 | |||
| f6fed4ddc2 | |||
| a58e10f107 | |||
| 2aea0fbfba | |||
| 0fa9ce7a6d | |||
| f863a227ac | |||
| 31298845f5 | |||
| 98c2e5cc31 | |||
| 5e24e3fd91 | |||
| 4050111291 | |||
| 343ea09813 | |||
| c384de7e19 | |||
| d8bc7d34c5 | |||
| b329d1cdb9 | |||
| 0f00074500 | |||
| d65198cd2e | |||
| 75160ae5f3 | |||
| e0c2f4274e | |||
| 4fd1d7d957 | |||
| aa11088944 | |||
| 1f598e3752 | |||
| 5870ff4713 | |||
| 7a5a5531cc | |||
| 47946510d4 | |||
| 9ec7593bc7 | |||
| 6bb3fa20fb | |||
| f17ad9aad9 | |||
| c722df1245 | |||
| 81457f88c9 | |||
| a669e5b057 | |||
| 80b9f97b39 | |||
| 9f641c14a9 | |||
| 1d9af2eb72 | |||
| 0003e53f5a | |||
| 5ab9d7f4e8 | |||
| e61ecea0f2 | |||
| 1be0362b11 | |||
| 7ada40d1b3 | |||
| ff6e23f2ea | |||
| 738afc4760 | |||
| 9c20c0c950 | |||
| bf4043d4c2 | |||
| 2a12fd886d | |||
| d350953bcc | |||
| b6b07edcb6 | |||
| fb39c48530 | |||
| c8a232f65f | |||
| 125e6d046f | |||
| 17e28748ff | |||
| f532d5c1a5 | |||
| 8af7a2155d | |||
| a480330c98 | |||
| 59d63b5f40 | |||
| 51c0342ec4 | |||
| 431c3cc686 | |||
| 88ed00c330 | |||
| 804d3b630a | |||
| e024fceba2 | |||
| 4985744bd8 | |||
| 5ea3815c7e | |||
| a7a309136f | |||
| f834d10f9a | |||
| 43bb48c286 | |||
| c40d297735 | |||
| 1b05971268 | |||
| 472a4654a2 | |||
| 1a0704c43b | |||
| fd3ea2cf46 | |||
| 3ec8b7b36e | |||
| 099ebee409 | |||
| 64b4437ca7 | |||
| 62469e518f | |||
| e5a9debf10 | |||
| cd47101069 | |||
| b059813c5b | |||
| 422d9ba050 | |||
| 44be7d2900 | |||
| 34f61db5bc | |||
| 735db771f8 | |||
| f08394ffe2 | |||
| 354f699135 | |||
| 5d00670346 | |||
| a2c56d6686 | |||
| c3b36d697f | |||
| 9dccd6f3bd | |||
| d9d46dba35 | |||
| 73347a02b6 | |||
| ad005e0b4c | |||
| a632f32816 | |||
| 9d097c593e | |||
| db45fc678f | |||
| 4a040c66c5 | |||
| 53601be788 | |||
| a0c2357d81 | |||
| a2e84441ce | |||
| 7fad7df768 | |||
| 5215a71a6d | |||
| b487363bda | |||
| 704e61dd53 | |||
| e06c3ea192 | |||
| 8beaeffaec | |||
| 7e8bb97920 | |||
| 8270ee98e9 | |||
| aadca10a12 | |||
| 03eb991c89 | |||
| 6c957be3d8 | |||
| 1bcc9d5d0d | |||
| 1b8507636b | |||
| 3ce7cd6c50 | |||
| 8658ac531e | |||
| 2a1751ec30 | |||
| 860b35e19d | |||
| aff8500b85 | |||
| 840408d5d2 | |||
| 9a5ec72850 | |||
| b68a7f1de2 | |||
| 93225f1ec2 | |||
| 01bb9df16d | |||
| c42bf89c5e | |||
| 2e73a4e872 | |||
| a3af428639 | |||
| e2fefdbe89 | |||
| 50091cca1d | |||
| d8948a12d3 | |||
| 277249fff1 | |||
| 86c3771044 | |||
| 904bc9d8b6 | |||
| 4bf7e26976 | |||
| 66a5c3880e | |||
| 084672bd07 | |||
| 3ac2e20e93 | |||
| f794ad7e9e | |||
| 44eeb2b738 | |||
| 22759e7f5c | |||
| 760a219e09 | |||
| 0ed10eafbb | |||
| d95f02d234 | |||
| 8861cc70cb | |||
| 34e3f7c049 | |||
| 4eeab91d73 | |||
| 10dfedee36 | |||
| 5f87b7d2db | |||
| 86310ff02d | |||
| 87a53557cd | |||
| b96657994a | |||
| 8f16688b0a | |||
| 49a8b80df2 | |||
| c9767b3172 | |||
| 49c4a9c3b2 | |||
| 4923bbf8a7 | |||
| 09016fd2ee | |||
| 13cfe7a754 | |||
| f83e3f764e | |||
| b292c1c3c3 | |||
| 0e38870fb5 | |||
| ecf940bb8f | |||
| 97145b6dee | |||
| cc4fc66c93 | |||
| 748268ecd5 | |||
| f60ade4d28 | |||
| 3b8f3612a1 | |||
| 2582086d39 | |||
| 2422717467 | |||
| fd444403bb | |||
| 07cb2f71cb | |||
| 2660beb0bc | |||
| fb3373f789 | |||
| 4d4ff85090 | |||
| fb6979ae6d | |||
| 2bf03a0bc2 | |||
| 9d894b0817 | |||
| d010553d23 | |||
| 25685b4e16 | |||
| 9bc017c9bf | |||
| c1902d9232 | |||
| d3a0fe7491 | |||
| ba1bb688b7 | |||
| 23cd7d3d62 | |||
| 296d4d04a0 | |||
| dfd9acb0d7 | |||
| ae75fa8246 | |||
| 9e1737edc4 | |||
| c4b42e194f | |||
| c0829478fd | |||
| 477248dd8b | |||
| 5d75204a4f | |||
| 70985f4ff5 | |||
| 201a96ff72 | |||
| a17f665c52 | |||
| 5afa32fa22 | |||
| 98e05cf8ea | |||
| 5ac65062c9 | |||
| 4534e26f93 | |||
| 6d05af1790 | |||
| 6bef8fb9d7 | |||
| 56308cbd90 | |||
| cdd9137f4e | |||
| dac95313b8 | |||
| f8c6e2338a | |||
| 5ad9c11716 | |||
| 34f6c3a230 | |||
| 8e2226180a | |||
| b993e664a3 | |||
| fa13bda99e | |||
| 53c314cb41 | |||
| 0f374c8fd0 | |||
| 529ee04ae7 | |||
| 65b193a891 | |||
| daafb5ddd4 | |||
| bffaeb1067 | |||
| 746e314a5b | |||
| f68900d2b3 | |||
| 7f6eae7213 | |||
| d469700cbf | |||
| 843ffad60e | |||
| 1b6907a404 | |||
| 54035e8db8 | |||
| ba8184c874 | |||
| 4c40e806bc | |||
| eaaacfad3e | |||
| 7e97d885df | |||
| 863af7a3e5 | |||
| 273c3e2793 | |||
| f71a9e0ad2 | |||
| 8d0007b9c0 | |||
| 7b305514a1 | |||
| 4fcb050fa8 | |||
| 3eea32a227 | |||
| 5122e0e375 | |||
| e33b767d12 | |||
| f8657998e6 | |||
| cee81bd693 | |||
| 67cb703c83 | |||
| 213b13d3c9 | |||
| 0537d494e7 | |||
| 2c8dc24fda | |||
| 1360d846fd | |||
| 3b3cec6f8b | |||
| 6102ec9267 | |||
| eaeec5d757 | |||
| 7b2d5c9103 | |||
| 11b15e7e23 | |||
| eb138fc960 | |||
| 45c98a21b3 | |||
| 37f4b4c2a6 | |||
| bb21f891cb | |||
| 46e0d1953b | |||
| f82ec24dfa | |||
| 5c2a2f8ff2 | |||
| 5add647148 | |||
| 274e61cffb | |||
| 9ddfd9f3b1 | |||
| 2e1b58c70a | |||
| 1dd5d64461 | |||
| c4b1277d3e | |||
| e4bba5c6e9 | |||
| 37bdbc0f91 | |||
| efb7a33d3e | |||
| 577a29db0b | |||
| 77cfa1e55c | |||
| b4025e6820 | |||
| 9a939e8281 | |||
| 2500228d7a | |||
| d825616ea6 | |||
| e25f97fce7 | |||
| 67d24caef5 | |||
| 6f3ba23fa0 | |||
| 9d9bf9d7ba | |||
| d6cc8a8a9a | |||
| 66183e6bdd | |||
| 88b14d6ad6 | |||
| 0c02ddec3c | |||
| 2b7e60399c | |||
| 0e314d0028 | |||
| 0149f8b0d3 | |||
| 2a2499a929 | |||
| e4e5bc056b | |||
| 5206997468 | |||
| 8ba9ae1148 | |||
| 88c40aa93c | |||
| 001ad3b8d9 | |||
| 697215f8bc | |||
| edf1f9c6ad | |||
| 7bd5173da4 | |||
| 6049e8f117 | |||
| eb48e2fe75 | |||
| 1d4da0e3d8 | |||
| ecca003ab4 | |||
| 9de89ec96a | |||
| db3920ed25 | |||
| 07dc31bed7 | |||
| febc1c270b | |||
| 1a19e250bb | |||
| 34d23503de | |||
| b28b4f9a28 | |||
| c96695c966 | |||
| 593c35776c | |||
| cf3298d1ad | |||
| 09ce55efcd | |||
| 921d52b360 | |||
| be722fb1b3 | |||
| 92a8942727 | |||
| a6c3b85fe1 | |||
| ba8e315eed | |||
| 729ed3843d | |||
| d3cb0b517a | |||
| ad86cf1fd9 | |||
| 790367ea5a | |||
| cd7adacca3 | |||
| df596be5cf | |||
| 027580cade | |||
| 6227e4aa89 | |||
| 5fc62aff7e | |||
| e1e7efc96e | |||
| 771e72187a | |||
| b85fe2f2b5 | |||
| 5dfb6acb17 | |||
| 3c24c22890 | |||
| c0004e506e | |||
| 509523b9ee | |||
| 9ef538305d | |||
| 72a0552073 | |||
| 4a4b3b932e | |||
| a873d69859 | |||
| 38a7f3d43e | |||
| 1e81d47793 | |||
| fdc1e20c23 | |||
| 38b7d6e5e3 | |||
| 6c46481ffd | |||
| 318292704d | |||
| 6ed70eb9eb | |||
| 27a86f5c18 | |||
| 462099d906 | |||
| fe67ff2146 | |||
| a8c0adaa4d | |||
| 5fb3da8bec | |||
| a584aeb7eb | |||
| ce69d47b8b | |||
| 4df64a87e5 | |||
| 64a7faffd9 | |||
| 2bcb842d93 | |||
| 1430ac5395 | |||
| ccb811516c | |||
| 9612d291ed | |||
| 9c56d603ce | |||
| 2ee60bcdb6 | |||
| 0c95ec3bdf | |||
| e75df1ddc9 | |||
| d29edabefe | |||
| ed3472d029 | |||
| aa5200b85f | |||
| 3fd0d3e141 | |||
| 0d76690091 | |||
| 1815c89970 | |||
| a70a49e4e9 | |||
| 3693bc2d8b | |||
| bfa953270d | |||
| ac95f1cc33 | |||
| fabcad1e46 | |||
| a8e8924e2a | |||
| bde3d1ac66 | |||
| 8e3cc93d6a | |||
| b521aaf579 | |||
| af21225918 | |||
| a449cc2b15 | |||
| 2b849a6520 | |||
| 9db1ea3307 | |||
| 5c4b36fd5f | |||
| 571c8bf42d | |||
| 6c7bb5f9b7 | |||
| 14edd1bd4f | |||
| ca8e73286f | |||
| d69c83e5c8 | |||
| 1e4d4b9413 | |||
| 2d7284f677 | |||
| c575ed5c79 | |||
| 7dd2a3c269 | |||
| 98cffb6c79 | |||
| c0cf1c02c1 | |||
| b23ae60847 | |||
| 77cec4fde9 | |||
| e379e34212 | |||
| b9736dfd3f | |||
| bcdb3173bd | |||
| 5324290234 | |||
| 8ea54d7db2 | |||
| 48bccd6f68 | |||
| c38e651eb0 | |||
| 53f81fb09f | |||
| 5e8599540e | |||
| ac0282529e | |||
| 7dac0e1dd8 | |||
| fecb5b6252 | |||
| fb6d369130 | |||
| fe2b13becb | |||
| a4475ad2a3 | |||
| e9404aa06b | |||
| 02757359f0 | |||
| be7736d350 | |||
| 75d4699c7a | |||
| 139769edea | |||
| 30ab0bcdbe | |||
| 48d8e1d235 | |||
| 969a7107fe | |||
| b885244aa7 | |||
| ef115ef2b1 | |||
| f3039a1210 | |||
| b91899fe99 | |||
| 9576fa4b0f | |||
| 192ecb3ce0 | |||
| 980360b94a | |||
| 7a2ca8969e | |||
| a0a8c155f6 | |||
| 0129e97dfe | |||
| 127f7c1582 | |||
| 620f972635 | |||
| 6248cbb902 | |||
| 5d8eb51ef4 | |||
| c465062673 | |||
| 21c651a566 | |||
| 9984470b58 | |||
| bddae6e428 | |||
| 52c7686d58 | |||
| 915ec0ba63 | |||
| 99f46c1add | |||
| 71f43dfcc1 | |||
| e159e2f3d8 | |||
| 3ef2f97967 | |||
| d9990bc075 | |||
| b10e68df35 | |||
| 5293fac125 | |||
| ca61857dcf | |||
| acf531ece3 | |||
| 6ee856e26d | |||
| 2cfe5437d9 | |||
| 6d06f3ca6f | |||
| 9b1317f4a4 | |||
| 3080a9527c | |||
| 7f119b8914 | |||
| cb7507156c | |||
| 3b393dec6d | |||
| d6f41b6a99 | |||
| 7f73417bda | |||
| 1ed6ae539d | |||
| 58d8a145c3 | |||
| cd7d9aee55 | |||
| 8d957f22af | |||
| 78f9c490dd | |||
| bc1a2cf69c | |||
| cc0b2d5439 | |||
| cad56c813e | |||
| 409b499217 | |||
| a65c4c94a0 | |||
| 348682d5ea | |||
| 9f35d740d2 | |||
| 8be8c95b17 | |||
| 1537d591a8 | |||
| 8e04aa1bd3 | |||
| f446fcc1c2 | |||
| deaac42dba | |||
| 1ccc15859f | |||
| 9b9662d470 | |||
| ebc48ae304 | |||
| 45c1207d07 | |||
| 9fb4be1273 | |||
| 189e8b2725 | |||
| f10121c343 | |||
| f188aea2c2 | |||
| 805d6942ae | |||
| 00a0a27826 | |||
| 93aca891b7 | |||
| 113c9dd657 | |||
| f803fab413 | |||
| 77fdc8795b | |||
| 0d983bd2b5 | |||
| f077b19126 | |||
| da1080e041 | |||
| 06f4aed693 | |||
| 11f9920ff9 | |||
| 2418c3eb0a | |||
| 14e13b512e | |||
| 9037f88430 | |||
| 1768aad9e2 | |||
| 8aea257e6a | |||
| f075c5af3d | |||
| d43e859c34 | |||
| 10b600424e | |||
| b5ab87877b | |||
| 356d3ee9f3 | |||
| fea37ba101 | |||
| 9d7ab27977 | |||
| a076497cf0 | |||
| 6f96a8f5b0 | |||
| f0652ca6a9 | |||
| 2239b68e0e | |||
| 477db836f4 | |||
| 86f661a8af | |||
| 4e7e81aee8 | |||
| 96b2149433 | |||
| 8c9a1df2cf | |||
| a13c45e9cc | |||
| 0c70e5c3fd | |||
| c669cd23f0 | |||
| 972051a61e | |||
| 9b2a1bebd1 | |||
| d0e8666267 | |||
| cb966a0204 | |||
| db91e48395 | |||
| e5e395f0d9 | |||
| 18f42d2349 | |||
| 9b04d901c4 | |||
| f09643f350 | |||
| 24698516fc | |||
| 1c6fee7292 | |||
| 68abf11be8 | |||
| 0e31e708e3 | |||
| 296a315a3e | |||
| b6486d85ba | |||
| ceb2d3d796 | |||
| 2de3f287ab | |||
| 6e17d36d76 | |||
| 22c60fedad | |||
| a3dfc3ef0a | |||
| c29f282560 | |||
| 4d728738ee | |||
| 07a9c56fb8 | |||
| bf47f87c21 | |||
| 95086e08dc | |||
| 5d4413e45c | |||
| 0711ba9c04 | |||
| 83159c2417 | |||
| d2c7330fb7 | |||
| da9c91afb4 | |||
| 3b9b94623f | |||
| 8340e0653b | |||
| d1519343d1 | |||
| 9a02230d63 | |||
| d9aef2da3e | |||
| a97283f0a4 | |||
| a6bf081bec | |||
| 43f5c8b34e | |||
| cadf372f7b | |||
| b4f4e4dc24 | |||
| fa7f71d859 | |||
| 3ff56fc595 | |||
| 894502644c | |||
| 37a1b55b08 | |||
| 31c2d207a2 | |||
| 785c1ca73e | |||
| f2cbddf9e2 | |||
| 6e84e1fd59 | |||
| ff1f73f985 | |||
| bbda9b1d6f | |||
| e2ea2ca4d1 | |||
| b885cdf9d0 | |||
| a7c2b970b0 | |||
| ad6c38960a | |||
| 2903799b85 | |||
| bbc3bf513d | |||
| e8e4f826ea | |||
| 5a401b2d87 | |||
| fe075dc9f5 | |||
| 503df999fa | |||
| 11cd095131 | |||
| 3ba7fdbd49 | |||
| 0f591e9a3d | |||
| 6bf920e66c | |||
| 0be80e156a | |||
| 7810095796 | |||
| ff933ba1aa | |||
| 44bc562e8b | |||
| 30e4f5032b | |||
| ffdd06ec07 | |||
| bec4416185 | |||
| b17da52ead | |||
| 3d48b422b5 | |||
| 0ec8fe05ea | |||
| 244aa069f0 | |||
| a89cbe9332 | |||
| 3ad791e1ec | |||
| e993194b4f | |||
| adabe18c90 | |||
| 429e6a967c | |||
| 252410c6e9 | |||
| 51f3b7dde0 | |||
| 0b52aa8c59 | |||
| 5c1bc72af8 | |||
| 41cf1335d2 | |||
| 3beaedc2fa | |||
| 004df84c6d | |||
| 36ebba6491 | |||
| e37a7c775e | |||
| ee151bdc43 | |||
| a3706594fc | |||
| d199819d66 | |||
| 09c0fa0f94 | |||
| 54ecda4e1a | |||
| 0ed00c5011 | |||
| cd1aeb15f1 | |||
| ae1633b0f2 | |||
| 1b1bdbb261 | |||
| 97d74bfa1d | |||
| 2628ed1a82 | |||
| 66bff57c04 | |||
| 41c781318c | |||
| ea8524f035 | |||
| 424b517914 | |||
| 7419fd951e | |||
| f2a2683dcb | |||
| 8eb639e366 | |||
| 2510f25cc0 | |||
| 4f5f55a383 | |||
| 0b2a5e8646 | |||
| c7398d9e2f | |||
| 0c5a8f2039 | |||
| ff3016e65e | |||
| 0e037973b2 | |||
| 14d32368c0 | |||
| 850620c2a2 | |||
| 5df06501f6 | |||
| 8fbff00850 | |||
| 3434f78e50 | |||
| 404b7a25bc | |||
| 86a1fb41ac | |||
| 55a96ba790 | |||
| 6699833297 | |||
| 2319858586 | |||
| d96155b6df | |||
| 4565bd7dc6 | |||
| e1d8bb83ec | |||
| 960064d5c6 | |||
| b564f97a47 | |||
| 23caac5576 | |||
| 7bff3a7947 | |||
| 39d65db7fd | |||
| 162a300e53 | |||
| 5b8c600261 | |||
| 34cdd29a50 | |||
| de0462e54f | |||
| 68815b8f44 | |||
| bbf50cf0b0 | |||
| 02719a1de7 | |||
| 56917614a2 | |||
| 8a08edb0f3 | |||
| f836c6fff6 | |||
| cdb83c48c5 | |||
| 198abcfe65 | |||
| c0f8fbb24f | |||
| 57016f2f45 | |||
| 491d048948 | |||
| 29213972ae | |||
| 0446aea20e | |||
| 1ed41d03ea | |||
| 28e26a1baf | |||
| 3a512d5bda | |||
| 24f5ce7170 | |||
| f991317301 | |||
| aeb32f4853 | |||
| 45231c2423 | |||
| 28b216273d | |||
| ce9b06e4c6 | |||
| e300cf6e1b | |||
| 7eb9c80fb2 | |||
| 7e31498a4b | |||
| b89dd36771 | |||
| 8b821d0023 | |||
| 071c083eae | |||
| b4d9ab9f0c | |||
| 13d46ae42e | |||
| 8bc23f6deb | |||
| 6e4306b3bb | |||
| 5e389f3f48 | |||
| f81adb1371 | |||
| 9da428c7fd | |||
| fd35a26955 | |||
| 5059cb731a | |||
| 09c7076e79 | |||
| de65d363fc | |||
| 1423ac0d98 | |||
| 34c7e5230b | |||
| 3c6799d736 | |||
| 4fac726cf4 | |||
| 37e5857406 | |||
| 0320c04be2 | |||
| c5ec5fa41f | |||
| 68fd1556b2 | |||
| 3680d523d4 | |||
| e3c5490d25 | |||
| 8c73851708 | |||
| 26d10e8b98 | |||
| 7e92edc70a | |||
| ed39e30824 | |||
| 6eb3836abc | |||
| 302219325b | |||
| 5d8f71c3e4 | |||
| 565142f985 | |||
| f452a7ce68 | |||
| 0f2773c986 | |||
| 3f10b43254 | |||
| ed18df22db | |||
| e33a103ca1 | |||
| 02554b427a | |||
| c9c782684d | |||
| 87470602fd | |||
| 9cfdf55d9e | |||
| 1de85dbcf4 | |||
| 6a83da2292 | |||
| 34640e009b | |||
| 317c84800c | |||
| 3500be39aa | |||
| 029efeb03a | |||
| 15eb7689ed | |||
| 6c66370142 | |||
| 0eacbd42d7 | |||
| 4e6e7edf27 | |||
| b7ce9ab901 | |||
| 1492890ac7 | |||
| 65f594fc29 | |||
| 0de42fdf0f | |||
| ff583981b1 | |||
| e58ff476c9 | |||
| 06ccafeb24 | |||
| 22caaa0c95 | |||
| 44d4c98a99 | |||
| e65154b48e | |||
| adec451613 | |||
| ef7a8587fe | |||
| b0c8901b0a | |||
| 36ce1cc7ef | |||
| fb3f0bd72a | |||
| da237c9c84 | |||
| a7af3cf8d2 | |||
| 298d172536 | |||
| 3be8117310 | |||
| 852a605de3 | |||
| 51bf256723 | |||
| 6d5552afd3 | |||
| 47595e2073 | |||
| deed1b9685 | |||
| 6c99e76c9a | |||
| 2063baefc9 | |||
| dfadcc52ef | |||
| b642c7279d | |||
| 8ed6187697 | |||
| 2497f3fd4e | |||
| 14cd593a59 | |||
| 3a1da72419 | |||
| 6e3f394cff | |||
| cf56300b55 | |||
| 2e3cd98a3c | |||
| 1a90e71884 | |||
| 056dfc34e4 | |||
| 333ba8030a | |||
| 517a96c6eb | |||
| 1a3ba46873 | |||
| 1e64851d79 | |||
| b300a21948 | |||
| 8eef95b58e | |||
| dcdfb32883 | |||
| 4cc2befbfb | |||
| cbfde3ff94 | |||
| 39584f214b | |||
| db4c4846b9 | |||
| e2b5962281 | |||
| 2bc604e5a9 | |||
| 88ed9581b6 | |||
| 45da5847f7 | |||
| 55f35b0f35 | |||
| e313b74813 | |||
| 272285f64a | |||
| ccd0bde0bc | |||
| 0f9b0f39f7 | |||
| 21d619e24a | |||
| 6804bfb233 | |||
| 4338b4346e | |||
| 07e8e6a628 | |||
| b6e09621f8 | |||
| a801112cf6 | |||
| 85efb6a99e | |||
| ec7da60d04 | |||
| 83784d7cb8 | |||
| 9b38761153 | |||
| f1a3fdf7b9 | |||
| 94f3265104 | |||
| 0bd2c76ed3 | |||
| d1e5a40d20 | |||
| f9dadb2670 | |||
| 8dccaaf544 | |||
| 1667c05742 | |||
| b39e2e3f66 | |||
| fb3b0e8cd7 | |||
| 7dd9268ca7 | |||
| 8177e12f3f | |||
| 4c7bf917f2 | |||
| 52f939658f | |||
| f6afcc6d21 | |||
| 2f7342951d | |||
| f76e913689 | |||
| 135d86c583 | |||
| c45c2344bc | |||
| eb16afb6e6 | |||
| 58dd424de8 | |||
| 900ae10764 | |||
| 57874971aa | |||
| e45d005b74 | |||
| 0c078fea57 | |||
| 8e1f00f17c | |||
| e24026f3db | |||
| 5dd2e05972 | |||
| 771f2ebc47 | |||
| 770729a72e | |||
| 2ff811ae71 | |||
| 09796cf7c9 | |||
| 406753fcde | |||
| a5570d07bc | |||
| c1b02cc8a5 | |||
| cf5b0145bd | |||
| 9bdf48c1b9 | |||
| ad244fc83d | |||
| da51e7f31d | |||
| f8a9ec6e3e | |||
| df8d4e0892 | |||
| ceb335f3ab | |||
| 26e3982cb4 | |||
| 9ecc19c481 | |||
| 6d67ec7e34 | |||
| 512e1a0bdd | |||
| 6ec84a398c | |||
| 69c00c4db5 | |||
| d7abf2ec18 | |||
| 557fac39b5 | |||
| d1ead4b79c | |||
| 5900828051 | |||
| 818da6653d | |||
| e1a67e9b4e | |||
| 84dfdd0600 | |||
| ba691a26d4 | |||
| b66fac0494 | |||
| 1bda246df2 | |||
| 9a210c055a | |||
| 81e2ff99ff | |||
| 2459234147 | |||
| 60edab9f6d | |||
| ec3d2d7316 | |||
| 83d408b238 | |||
| 266c83367d | |||
| f185df4f1e | |||
| 4dae8851d9 | |||
| 142aadffef | |||
| d3fbf46f7a | |||
| 2ff57e932c | |||
| d628e97035 | |||
| bc8c7e114a | |||
| f3d0536800 | |||
| 1c09712df0 | |||
| 0cc98c378f | |||
| 6fd7752b29 | |||
| bfc4f940da | |||
| 64c6bb2475 | |||
| 70c8c2b5fc | |||
| dbf34a4d48 | |||
| 2c82708391 | |||
| d80a6bb405 | |||
| e10007ef7b | |||
| b86e381e20 | |||
| d2e969b836 | |||
| a865675537 | |||
| 4018c68d49 | |||
| c2158ff8fb | |||
| 57a680834e | |||
| 8a42cfa345 | |||
| d920341dab | |||
| fa4a5122bc | |||
| 3a96475a2e | |||
| f99b11d50e | |||
| a59eec553a | |||
| fed6f661c7 | |||
| 5c11a2941c | |||
| dbd948be6e | |||
| 9e5496b484 | |||
| f7452e8379 | |||
| 157db684c3 | |||
| cfe0595918 | |||
| c445297357 | |||
| f38e5b0879 | |||
| 1a5a91ccc7 | |||
| 3b3faa66f4 | |||
| d220e9326c | |||
| 9ff789ab06 | |||
| 57d3f3d5a5 | |||
| f1c09a6f8f | |||
| 93ce259fb2 | |||
| bd27932783 | |||
| 7b0a3cf781 | |||
| 752c9a086b | |||
| 92b60b279a | |||
| b8d3a4f9aa | |||
| 43b1d6217a | |||
| 98ece58342 | |||
| b5d255d7c9 | |||
| f974e29cc9 | |||
| 45cb0f0513 | |||
| 4904e6e223 | |||
| cc6d53fdeb | |||
| 721fb8ec70 | |||
| 077ae1eedd | |||
| b878c7e2db | |||
| 20ac4bd3dd | |||
| a1cb8ee266 | |||
| 880eaad6cb | |||
| 4a027797e0 | |||
| 54ad3ba777 | |||
| c9bcd29082 | |||
| 130688252a | |||
| dd2900bdbc | |||
| 10cec063c2 | |||
| 20518bc377 | |||
| 5d2f603c84 | |||
| 63de8047ce | |||
| a9735e129c | |||
| 930af17802 | |||
| 658c58e4b6 | |||
| 9dbae39604 | |||
| 16a18cc4b7 | |||
| aec7c7b0bc | |||
| 38827cc66b | |||
| 53301728fa | |||
| bc35ae90d0 | |||
| d37943a17f | |||
| 141500f4aa | |||
| 6e4f400d93 | |||
| 65cbe7c77e | |||
| 0eefb9e355 | |||
| de129037e1 | |||
| 9159520716 | |||
| e64de7d312 | |||
| 60cd28f516 | |||
| 40fac02d8b | |||
| cd65a36437 | |||
| 14d8596b8a | |||
| ef0c08dfd9 | |||
| a0ca486f0f | |||
| eaa73998a0 | |||
| 29bda6c00d | |||
| 8abf95063c | |||
| c79d9c7051 | |||
| a390c59e27 | |||
| e1d13ef965 | |||
| 7c3815e7b5 | |||
| e034771e36 | |||
| f43e3588a2 | |||
| 793242d3a7 | |||
| 427025e8f3 | |||
| ad18ea4bf7 | |||
| 42af082d3a | |||
| 7ea6e2059a | |||
| 20402ddd2f | |||
| 73a474bd35 | |||
| bd1d29f0a1 | |||
| fd60b16342 | |||
| f29c99bde9 | |||
| a43c6cf954 | |||
| 198826dd66 | |||
| 70a70663a2 | |||
| 605663704b | |||
| e139b92b24 | |||
| 6d1ef933c4 | |||
| 2107d58050 | |||
| 8d261b4120 | |||
| eebee9e523 | |||
| d9d12c7603 | |||
| 35158ed933 | |||
| 176f9bfea6 | |||
| 51248c1938 | |||
| 1d2771b014 | |||
| f249a82d71 | |||
| 44a060b159 | |||
| c1cf8d7a92 | |||
| 1a2712cdf1 | |||
| 6c5c9fac01 | |||
| 45fbaf159a | |||
| 8e93d007be | |||
| 6705a0e030 | |||
| 73ac1591e0 | |||
| 36ab1c0bec | |||
| e24a94d798 | |||
| e475d90e2e | |||
| c60b712523 | |||
| 81e5abd23d | |||
| e5ddf08f48 | |||
| 7f4f4ffded | |||
| 48ad20faca | |||
| 0a392b7bac | |||
| 1e708bf1c7 | |||
| 753ae3cbaf | |||
| d2317acfc5 | |||
| 29638c7f3b | |||
| 93021a5d89 | |||
| 7fb97ef4e5 | |||
| c68a9cf80a | |||
| 254a3079f2 | |||
| b4d1b80e04 | |||
| c77ccdf46e | |||
| c47fa0f9a2 | |||
| a9724e7383 | |||
| 4b893ab5b4 | |||
| 4c4fbf3e48 | |||
| cb35f19d6c | |||
| 4842bb0880 | |||
| d689f5cda3 | |||
| 0336d68ee2 | |||
| 7f88c24e83 | |||
| 3022af4410 | |||
| d3284a4006 | |||
| f9618def0b | |||
| cbfdd0c531 | |||
| 3567a768d5 | |||
| 2c545aa4bc | |||
| 757b99e6e5 | |||
| 31a86687e7 | |||
| 42ffeda90d | |||
| c4e6e7c59b | |||
| 5ebfba26d7 | |||
| 638a8450a3 | |||
| 0e02e6da79 | |||
| 5118681cb5 | |||
| 1570cdb5d4 | |||
| 7b70719ea7 | |||
| 3ee12cc50b | |||
| cb42685322 | |||
| a1ca61d813 | |||
| 707affec01 | |||
| cdf9a53a40 | |||
| 3637410327 | |||
| 7bc95ba6ee | |||
| c5ed0f3f81 | |||
| f1e3777947 | |||
| b0a2d2a907 | |||
| 61b64d9353 | |||
| c95fde7023 | |||
| faa91ef2a7 | |||
| af88ad0f0d | |||
| a43476bc87 | |||
| 054685fc38 | |||
| c62bcd1456 | |||
| 3ac5361cb2 | |||
| 7cbdc09055 | |||
| 125a885742 | |||
| 6c1129c946 | |||
| 949ebfa285 | |||
| 248c0d226f | |||
| 4570fcf7fa | |||
| 28382ce728 | |||
| bb4b781d24 | |||
| 0d0c295f82 | |||
| 3cc63c6618 | |||
| ff0dbdcc5a | |||
| 918af0873f | |||
| c0f6e5a134 | |||
| 628aaf2748 | |||
| 809ca0fcfe | |||
| 4faedf3e5b | |||
| 728be37de9 | |||
| 7fb0631ff0 | |||
| 542e953919 | |||
| 6b1d2bfb60 | |||
| 341756d7c0 | |||
| d6a374130c | |||
| 666f180482 | |||
| c94557f2ed | |||
| ca6f2b782b | |||
| 239acb5f95 | |||
| 25c4672845 | |||
| aefdead50a | |||
| e488c0ddcf | |||
| ba20c07420 | |||
| abd29f8462 | |||
| d5d4241501 | |||
| 97f6cdccfc | |||
| 7348fd37e8 | |||
| 2ae6c3a714 | |||
| a4ce379bce | |||
| 692671a543 | |||
| 60b84a29b5 | |||
| 58296cff5a | |||
| f8008e8614 | |||
| ce634bfd08 | |||
| 4edda34e2d | |||
| d8377ffc57 | |||
| 3901571685 | |||
| 5567bb2eaa | |||
| 6d5782b44c | |||
| c25c703723 | |||
| 7eb6617a28 | |||
| fc6caecc0b | |||
| 4ec8490c55 | |||
| d60b0c8805 | |||
| 6a31856d0d | |||
| b5d6abb01f | |||
| 954c4dfc16 | |||
| b7332957e7 | |||
| 4bfe9bc921 | |||
| 70381c4c89 | |||
| a14fe08a63 | |||
| fb7605e34b | |||
| ae2b227943 | |||
| 72f6fdb17d | |||
| 0f2e30cdae | |||
| f02178c154 | |||
| 194e2a43e7 | |||
| fbf48316b1 | |||
| 073d05ae21 | |||
| e7313da03e | |||
| 0b39d0fa34 | |||
| 49723d9aed | |||
| 9e8804dddb | |||
| d01e9f21f9 | |||
| b35d494f2d | |||
| 425a07e988 | |||
| 388699be7c | |||
| 513e876e2e | |||
| 04681d9e1e | |||
| bc621c1468 | |||
| a50d80992c | |||
| 060c78fd91 | |||
| 437d918cf7 | |||
| dcf5ce0eec | |||
| afc7512914 | |||
| da87135e02 | |||
| 27fdce3842 | |||
| c32e20b6fc | |||
| e0ac749734 | |||
| f944e6aa32 | |||
| 2a235fb0e2 | |||
| d36a51fabb | |||
| a90154e0ae | |||
| 67b476e6d7 | |||
| 39b76d18dc | |||
| e074a14ee9 | |||
| 2381d0a4bb | |||
| c66c8f873e | |||
| 5fc5a058b6 | |||
| da10913045 | |||
| 61839f4aca | |||
| 661bc9cc13 | |||
| 495b3fd844 | |||
| 3ce8abe46e | |||
| d5bf85b3b0 | |||
| 92a771f5ed | |||
| 9d07b6644f | |||
| 29be647911 | |||
| a7a05e26bc | |||
| 6f0005c78e | |||
| de1c2fc500 | |||
| 1643650685 | |||
| 08a2a2b0e5 | |||
| a2b22a7d09 | |||
| a3f96b96ee | |||
| 75183ef2f2 | |||
| 8e3f9a3c5a | |||
| b9f511ed02 | |||
| 61738dde9e | |||
| 73e4396edd | |||
| bb36d0e0fa | |||
| 0b697b9d53 | |||
| b6cc8180fe | |||
| 52e773230d | |||
| baa73c7f3e | |||
| 0277e4dc05 | |||
| 50761d9d3b | |||
| f824f76d9c | |||
| a3a8a9d6f7 | |||
| 3079cfd08c | |||
| 885b674298 | |||
| d4880f3e9d | |||
| 56ed416cb7 | |||
| a8b357965e | |||
| f0e305c20e | |||
| 2138930102 | |||
| 75069cd52a | |||
| 9d7ad28ca1 | |||
| 31571ba930 | |||
| b709eed3c3 | |||
| b0d8454915 | |||
| 054cc64ee8 | |||
| e168221bdc | |||
| 73ed5164cd | |||
| b058508478 | |||
| 65060d3321 | |||
| ac0e9b4b81 | |||
| f263be96b8 | |||
| e83699b6ae | |||
| 81d114092e | |||
| 48017a9d4c | |||
| c6b679bb19 | |||
| a912c3488d | |||
| 89a077e54c | |||
| 13ef965666 | |||
| 6073f9e7b6 | |||
| 4b3d458dba | |||
| 3d8c4af543 | |||
| cc18a68c00 | |||
| b83c5eca49 | |||
| e91d8ec81b | |||
| 318527223b | |||
| 79033f42b4 | |||
| 40f4444099 | |||
| eb7eb6ae88 | |||
| 56282845fa | |||
| 50919d85a8 | |||
| 590fac4aa8 | |||
| f19b6382bc | |||
| 11f2210894 | |||
| 652d7f65dd | |||
| c7b57e21a5 | |||
| 54ba7a053a | |||
| 563f0fb9b2 | |||
| 23382b2777 | |||
| 3c521f66a5 | |||
| c09d8ae630 | |||
| 7d42e4ce67 | |||
| 72858e7412 | |||
| f6a130b09d | |||
| c9836fbf25 | |||
| 85b035e6ad | |||
| d9aeef363e | |||
| 0ab0caa375 | |||
| 82e69db0c5 | |||
| b503594ac0 | |||
| 2815ddf6c8 | |||
| 34c88494b8 | |||
| 7dbca821c3 | |||
| b3c4324728 | |||
| d82a615e17 | |||
| cfbb3a205b | |||
| 5797fd4042 | |||
| 453bb43157 | |||
| 9d56ae3add | |||
| dee9bbdfad | |||
| b8b0f77ff8 | |||
| 7ad24042c1 | |||
| 51f05764e0 | |||
| 9aac7c2269 | |||
| 1b77dfa47a | |||
| cdc385ea16 | |||
| bc88d3514c | |||
| e21693cb18 | |||
| 3e9726d9db | |||
| a21b71a0e2 | |||
| 6abf274680 | |||
| 075b18e5db | |||
| a0aa78a529 | |||
| 9f64f0523b | |||
| 43ae6c39e3 | |||
| c5fb2422da | |||
| 7f9a035802 | |||
| a6b1f33208 | |||
| cc4cdfcf13 | |||
| 1ad61b1550 | |||
| 29556e9f8c | |||
| be9d683e46 | |||
| da99bcda68 | |||
| f3d9513df2 | |||
| 2c22c9c2f1 | |||
| e050177c08 | |||
| 1a5abe6550 | |||
| cc836433fb | |||
| 5829794d82 | |||
| bb026b8b59 | |||
| 771be58dc5 | |||
| ab37189022 | |||
| 3463848cb5 | |||
| cf71f88680 | |||
| 234533e367 | |||
| e0c6d6dd7d | |||
| c82f3bbf0f | |||
| 51d5a897c2 | |||
| 7f821abfef | |||
| 1edb964da9 | |||
| 400bcaf085 | |||
| 583bbee606 | |||
| 7463d47057 | |||
| 46cd1a21f7 | |||
| 82c7530b6f | |||
| 6d8217e00f | |||
| cc735e9b33 | |||
| 19753632a9 | |||
| a3e1d08ba2 | |||
| 5e34287530 | |||
| dd7c9e3f88 | |||
| d8652fad36 | |||
| 58ec0bab09 | |||
| 094b2fd5a9 | |||
| 85285b5e62 | |||
| 16dc7dc2f6 | |||
| f53067ab29 | |||
| efd33db69d | |||
| 5b13032aec | |||
| 177208f9da | |||
| b8d017418a | |||
| ce6e64bd17 | |||
| 6ce044806b | |||
| 68203436e0 | |||
| f9a7b97839 | |||
| 2869042f38 | |||
| 82158aece6 | |||
| 1f0f432327 | |||
| acd2701fa2 | |||
| bb0c229d7e | |||
| 3f9d66bd51 | |||
| 025d177565 | |||
| 44192d4494 | |||
| 4b5e93cd3c | |||
| 0889076d3b | |||
| d6b482755b | |||
| b70885595f | |||
| caf99d36d6 | |||
| 35341a6828 | |||
| 2bb00bc666 | |||
| e16c1de001 | |||
| 2a6dda07eb | |||
| 4fa8f9ecc0 | |||
| 9b29f9f819 | |||
| faa1779204 | |||
| 2f51fea743 | |||
| c78077d8d6 | |||
| 0bb7a6e125 | |||
| bd9203fcbc | |||
| af8cf2d550 | |||
| d071d85486 | |||
| 04ee1656ee | |||
| 56372c55b4 | |||
| 7146c4cb71 | |||
| b0847e2fa1 | |||
| f93e938cda | |||
| 5a01840784 | |||
| 0a0992fffe | |||
| 2a5f713f97 | |||
| 3ac440b6b5 | |||
| db9891d8cf | |||
| be9be6d3cd | |||
| b02c00bbf1 | |||
| 7f3454128d | |||
| 4922f4dd40 | |||
| e3de9baaeb | |||
| 5df280e94d | |||
| 5afc9ba739 | |||
| 983636c502 | |||
| a19a47dba1 | |||
| ead374db5f | |||
| 2f32014c75 | |||
| 4f4be51ac8 | |||
| 149caa5602 | |||
| b472e5e648 | |||
| 64132ba92b | |||
| 9ef356f59d | |||
| 825844107e | |||
| 8dc52b859b | |||
| 86082009b9 | |||
| 5b70504144 | |||
| 0398c6e723 | |||
| c568e41296 | |||
| d690ea32bc | |||
| 48c378127f | |||
| 50846eb682 | |||
| 5c0cc69d22 | |||
| 74d9fa58be | |||
| 1a02740b67 | |||
| acd473683e | |||
| 090619151e | |||
| 846027b9e3 | |||
| f9539cfba5 | |||
| 9375862ee2 | |||
| 4818cc4eb9 | |||
| 65461d7418 | |||
| ad73abced1 | |||
| 261b024bf4 | |||
| 20292275b5 | |||
| e912b8e075 | |||
| 4551cc11df | |||
| 14b8892cce | |||
| 284e57ad68 | |||
| b46023bb4c | |||
| 3002945d55 | |||
| 0d4df75375 | |||
| 78c4a86371 | |||
| c0c6ff51e2 | |||
| 4384cbb953 | |||
| 3397fb6560 | |||
| b231521ff6 | |||
| 3efc709e03 | |||
| dda7f54a16 | |||
| 2d33d3e2b8 | |||
| d50c9c7748 | |||
| 665a0bcffe | |||
| a141b8c5ea | |||
| 9d710702a4 | |||
| e835fa6073 | |||
| c0d037b9e9 | |||
| b2bc431823 | |||
| 9c5140006b | |||
| 4e72cb96c9 | |||
| f88e81ffef | |||
| 17861289c8 | |||
| b99aad743b | |||
| b1ce4d630d | |||
| 135f2b710c | |||
| e8c18bd9b6 | |||
| 76621e497f | |||
| 065e0edc5f | |||
| d72792ff37 | |||
| f9239b008e | |||
| b31c7357ed | |||
| 7c6d6f5297 | |||
| e225139011 | |||
| 37edf80321 | |||
| 038f5dc554 | |||
| 5e964fad39 | |||
| 3800d67d71 | |||
| 7f5d1a0b6b | |||
| 92860cffca | |||
| 2aced5c010 | |||
| 4e1879715d | |||
| 403f70d6db | |||
| b33256c809 | |||
| e39fac32ec | |||
| 80e3331596 | |||
| 2a3af5214e | |||
| 4911d713a5 | |||
| 5e24f685c1 | |||
| 97d3621705 | |||
| 544a02ca3f | |||
| ae26e44cc2 | |||
| b0f9d33b32 | |||
| c5e7e5ab68 | |||
| 5e3add0b81 | |||
| 9ccd43c29b | |||
| 9fc6c9aaf7 | |||
| 2d61200a05 | |||
| 268d826158 | |||
| a47b6c330d | |||
| de52fa7f48 | |||
| 680f4966a1 | |||
| a9b9b27a0b | |||
| 52e7ff9919 | |||
| f4a010e505 | |||
| 0bd14488bb | |||
| 6500559f8e | |||
| 642dbd4098 | |||
| a8187d15c6 | |||
| df5168765b | |||
| c26ae16060 | |||
| 9ccb8fb838 | |||
| e68b3d2cbd | |||
| 1be3f8368f | |||
| 3e64dd4653 | |||
| 48dde287d8 | |||
| 4da2f33892 | |||
| 74ca13861c | |||
| 532872b3c6 | |||
| d37f730ee8 | |||
| 5e744c4c52 | |||
| 858c4eb808 | |||
| 3ffeb8ab00 | |||
| 0579b2935c | |||
| c5cb01bd33 | |||
| efd5836e43 | |||
| f0f2092fb4 | |||
| e09b7eb978 | |||
| 3e5db9eedb | |||
| 91500d1022 | |||
| 51d2990eb9 | |||
| 38b8df4a07 | |||
| 211027919f | |||
| 38c33395c8 | |||
| 7704f51441 | |||
| ae63808678 | |||
| 81e349e07d | |||
| 49cdf1c7cf | |||
| 7e36b0e8fd | |||
| 552c07e932 | |||
| 44e3b33aaa | |||
| a8ce219016 | |||
| b9e93065f7 | |||
| 78f9ceb995 | |||
| 1904a187e0 | |||
| 0320a9aece | |||
| 4e94e51218 | |||
| 4392657a71 | |||
| fbce1ef7c7 | |||
| 309d10c4e2 | |||
| f43100a247 | |||
| 4d05a09a20 | |||
| 3538f1a629 | |||
| 993958c356 | |||
| 2d6d2357b5 | |||
| a66d85b63d | |||
| b0bd0435c4 | |||
| b2e6938815 | |||
| d66dd543bf | |||
| de7a5a30d1 | |||
| 40c35dc77b | |||
| 5dd03098e5 | |||
| 672a28bb28 | |||
| 8ea2f5253a | |||
| 1e0146a453 | |||
| c03133622f | |||
| 8303cfbd2b | |||
| 3ef550f738 | |||
| c8767e23bf | |||
| f302408712 | |||
| c88c0b0127 | |||
| acb1eab24e | |||
| 6cd2205f1f | |||
| f6fd262618 | |||
| 5125990c4c | |||
| 52cb145333 | |||
| c6bd93fe85 | |||
| 6a762d463f | |||
| 5beb319b27 | |||
| 12622d5847 | |||
| a9baaf4da4 | |||
| f61098b874 | |||
| 8ca4f730e8 | |||
| 0b5f85469c | |||
| 8e2b2123f1 | |||
| b4b9a913b3 | |||
| 2dc6478c34 | |||
| 28614b5793 | |||
| 4a0103a88d | |||
| fb494bc32a | |||
| de9c00b293 | |||
| 3e5cbb40ce | |||
| 47793635b2 | |||
| 259800ce35 | |||
| a38f286fb9 | |||
| b6ffbfa40e | |||
| b814a4f009 | |||
| 4ed6b7727a | |||
| c3a2781507 | |||
| ffba1d2b85 | |||
| 248409e43f | |||
| a316cbba73 | |||
| 12135c445b | |||
| 844202f36b | |||
| 9b4a124c08 | |||
| ab1b31604c | |||
| a8b18480aa | |||
| b5e4df5c16 | |||
| 2dbcc7a297 | |||
| 1730b3bacc | |||
| d730ffbc72 | |||
| d36fececd6 | |||
| 0caafea777 | |||
| c847339b0e | |||
| 58bb08b604 | |||
| 98d303c6c0 | |||
| 9514edafba | |||
| adb9149413 | |||
| c51fed5307 | |||
| db746f1296 | |||
| 62046aed59 | |||
| 5e0e8804c0 | |||
| 416791d4c5 | |||
| 5ee11ed4e0 | |||
| 3b2ef95798 | |||
| 827e4c65a7 | |||
| fef89feb62 | |||
| 42f92306a5 | |||
| 44b8fd6ef5 | |||
| 5a86ebe318 | |||
| 1e3df62993 | |||
| 662eaf4933 | |||
| 3fd82e51bd | |||
| 154e38b42e | |||
| 915cdeb426 | |||
| e15836e9ca | |||
| 8e1eae9a45 | |||
| d67542d7f5 | |||
| a202d082e8 | |||
| 4087f1c03b | |||
| bbacb7e210 | |||
| 19cf8f6bdd | |||
| f05d1750ee | |||
| fa696b56c2 | |||
| 3f52cd9c2b | |||
| d44a1934fe | |||
| 08f66df860 | |||
| 48d9a3ec8a | |||
| de0b4ddc99 | |||
| 6e1bb0c49c | |||
| d4597b6bb6 | |||
| 52f5930744 | |||
| 9504ad3b80 | |||
| d2c7f8a963 | |||
| ff05deaa1f | |||
| b233f567ce | |||
| b30d2c9536 | |||
| 5dd37ea696 | |||
| 49393070e0 | |||
| fdb6dd4077 | |||
| 74a516cde0 | |||
| 58da68d72f | |||
| 918250ce78 | |||
| c7ca3949f6 | |||
| bbf5e95186 | |||
| 462e757f92 | |||
| 58798f1513 | |||
| 087490e26a | |||
| c08d3dd82f | |||
| 430cb5ea1b | |||
| 9b1c279fd5 | |||
| 17be8b626d | |||
| 412757b178 | |||
| 18c64fafe4 | |||
| 77a1600c13 | |||
| 59ce586ea4 | |||
| 5fe28f6503 | |||
| 1f641c0ba6 | |||
| cca3797669 | |||
| c9cb5800ec | |||
| a28fdac242 | |||
| 0724fcffeb | |||
| 7032abf2e7 | |||
| 9e8fa5827d | |||
| 5d18838868 | |||
| 2578970f7d | |||
| f44fe81573 | |||
| aa5d97f49b | |||
| f262c93912 | |||
| 763c5e8356 | |||
| 050295ea20 | |||
| 77044f56fc | |||
| eea413a90f | |||
| 8cad2f9f56 | |||
| 64ac32f683 | |||
| 1287c3dc4a | |||
| 9d7fc9db8c | |||
| 99b10c436a | |||
| bb54085c20 | |||
| 9a0ada75fa | |||
| 848ce8c978 | |||
| 7b8df16c9e | |||
| 7a84f38db9 | |||
| ba4de07ad8 | |||
| b2d87940d6 | |||
| 6edc5180c7 | |||
| f0c895a008 | |||
| 6d6716b8a2 | |||
| d64a010c39 | |||
| e1f241bd55 | |||
| ad88637f22 | |||
| a756a74b49 | |||
| c311c0a221 | |||
| ecc0934657 | |||
| c402f1ff87 | |||
| eb810f1bf0 | |||
| c067573193 | |||
| 553c119356 | |||
| e62cb1b6b8 | |||
| 4da243a59e | |||
| 622192e75e | |||
| 81a6ec644a | |||
| 58100cda8b | |||
| 734ab5f3cd | |||
| d855f752c8 | |||
| 5ac3ecb85e | |||
| dfb9e3a0c8 | |||
| c2b2ce1f11 | |||
| cecfe47540 | |||
| 4b544ae207 | |||
| e30e17038b | |||
| 7e2c16ee38 | |||
| 041f3a22fa | |||
| f990ef27cf | |||
| bef762e0d6 | |||
| 0d001b358e | |||
| c1cd5c71e0 | |||
| d4209510c2 | |||
| 620e279453 | |||
| bbf73c48a3 | |||
| 9319dda0ec | |||
| 14f5340802 | |||
| 0152985e64 | |||
| e43268f585 | |||
| 7ef788752e | |||
| b66d7ce1fd | |||
| dc34652efd | |||
| e0d2fb0de1 | |||
| e0d9443141 | |||
| a6305a5cae | |||
| 9e2578be1e | |||
| 09b8f532a7 | |||
| e0939a2856 | |||
| 90f4b458e3 | |||
| f5213deb67 | |||
| bb08b1e637 | |||
| ea6f5c920b | |||
| 54ff4cddbf | |||
| 645641f4bd | |||
| 97d83890e0 | |||
| ec5dec4a16 | |||
| 4cfb621423 | |||
| c381331c10 | |||
| a7923f2a06 | |||
| e5f7172c97 | |||
| 43fff0450b | |||
| 107fd3fce1 | |||
| 1a9b6dec26 | |||
| 444be5bb7f | |||
| 5ebfa018ee | |||
| a6dab5e1ee | |||
| f766871824 | |||
| ba29bbe3be | |||
| d711031ce9 | |||
| af5c19cc52 | |||
| 359fbd2d73 | |||
| e8b9853367 | |||
| 376b2b8051 | |||
| e8d0af87e4 | |||
| a4267320b0 | |||
| 52dd42701a | |||
| fc9b1e5b12 | |||
| 2ecfaa41cf | |||
| 7106c4fdcf | |||
| 9420ca9949 | |||
| 956a1851a2 | |||
| dafed86179 | |||
| e72efce071 | |||
| 77b9658dba | |||
| 090c984ca3 | |||
| 2ff25b656f | |||
| ff4d1edd63 | |||
| 79d12578c7 | |||
| c0784b40e0 | |||
| ff87c487c8 | |||
| 82b43b5a9d | |||
| 4b4e159a8e | |||
| bb1c339655 | |||
| aca6d6346f | |||
| e7efaf4365 | |||
| c6d76f580e | |||
| 941df0366d | |||
| 7762d6ed52 | |||
| 466df367e6 | |||
| b0c8787cfa | |||
| cf805f530f | |||
| b40c6a1c67 | |||
| 3a62010445 | |||
| 3b4e7d9169 | |||
| 4245ba0d15 | |||
| 95e4c23db1 | |||
| f5e120ad2e | |||
| fab146b328 | |||
| 5aeadf8f98 | |||
| 5f9c655594 | |||
| dd18cac702 | |||
| b76ab902e5 | |||
| f5082e2d3a | |||
| 61c493fc91 | |||
| 6779e19ac9 | |||
| 443eb43d1f | |||
| 560bd5a872 | |||
| 8f35a64faf | |||
| 7507f6be50 | |||
| ac3b441456 | |||
| 53113e5eeb | |||
| 9d5db3ec12 | |||
| 169dcb86e2 | |||
| e4f5224f42 | |||
| 98907e66e9 | |||
| c05343d58e | |||
| 541fbc9a6d | |||
| ef08e02333 | |||
| 35cc7ef8d7 | |||
| e77382864b | |||
| b5fd802005 | |||
| 98897f3c98 | |||
| d49bb8a6ca | |||
| 05f2d3b2d9 | |||
| d4d6d832b1 | |||
| 9c92138f2d | |||
| 5a4806bc43 | |||
| 54105e221e | |||
| adfc76aa79 | |||
| 3e3f7af796 | |||
| 07969f7e10 | |||
| 249ab23df4 | |||
| 3141b47fba | |||
| 31f4cf0253 | |||
| 21d48b32c9 | |||
| 11bd42af82 | |||
| feac9cb3a3 | |||
| f6b5012f56 | |||
| f9b388c658 | |||
| 4093f4669a | |||
| 9594f2cd8d | |||
| 380203eb53 | |||
| 307a73c752 | |||
| 7ad471a810 | |||
| 1184f9d070 | |||
| 3050aca3e6 | |||
| 8c41c6785d | |||
| 092ce0f9d8 | |||
| 97dceb5623 | |||
| 23b6df536f | |||
| 95b4206986 | |||
| 914de78576 | |||
| ecf00fe9d6 | |||
| 7257e791ff | |||
| c71b3a319d | |||
| 767147aef1 | |||
| ce5a45037a | |||
| 9c9ca37586 | |||
| 381cd2e1ff | |||
| 2a2d5a5583 | |||
| 5c41dafc97 | |||
| 6367a98134 | |||
| 0bbe2b0331 | |||
| 6a77d511e8 | |||
| 989e3733a2 | |||
| fbc24ea400 | |||
| 2b8c2f612e | |||
| 4905020e77 | |||
| 75787d20bc | |||
| ca9f120988 | |||
| 5fb6753445 | |||
| e86954e8ea | |||
| 604cd60dbe | |||
| 05f4ae8e58 | |||
| 88ac783fd2 | |||
| bc66ede9aa | |||
| 1c295896e6 | |||
| f90076abe9 | |||
| 01aa372e59 | |||
| 479ac81aa9 | |||
| 9c69c6d129 | |||
| ea1e9cb4c6 | |||
| dac7a77afb | |||
| 9b21197fec | |||
| e4255649c0 | |||
| 81aff42e03 | |||
| 221851abc1 | |||
| 7f019583f2 | |||
| e18a188723 | |||
| f91ae5b319 | |||
| dd39b9ebe8 | |||
| 15896a3b11 | |||
| e092606181 | |||
| a4707c5fc9 | |||
| f0dde845db | |||
| b0ea027769 | |||
| d9f2faa462 | |||
| 7b4d31d4f6 | |||
| 522e182694 | |||
| 6c8a6620d2 | |||
| d68b2b22e0 | |||
| a4068001a3 | |||
| 574fed2618 | |||
| 8762e1c5ae | |||
| d94e3113ff | |||
| 3c5b2618c0 | |||
| 602c5580d3 | |||
| 038beafb5e | |||
| 14923f8c07 | |||
| b715687617 | |||
| c46fa5d69c | |||
| 310e1d4501 | |||
| fc957b63ff | |||
| d53f64890c | |||
| 5f5583e2cb | |||
| 4c11ac9a42 | |||
| cf6ad94509 | |||
| 08bb9c73a0 | |||
| 8e49194764 | |||
| 8afcb50a39 | |||
| 0326e1031f | |||
| 117009c0a2 | |||
| b7833d8e09 | |||
| 3fd39fb823 | |||
| 317b7cabb3 | |||
| a59bc1f436 | |||
| c24810b876 | |||
| bc94353850 | |||
| f13a3505f3 | |||
| 4af871f408 | |||
| 162d5ccb62 | |||
| b1723b4985 | |||
| 6bf7d56d51 | |||
| 9751cbbf83 | |||
| 8fa5ffa007 | |||
| f353956353 | |||
| 02cfb2d877 | |||
| 1b6f88f6fd | |||
| 9f6ad08c50 | |||
| 25340fd744 | |||
| 7f2b44db04 | |||
| d67b6c6120 | |||
| 4cfb5752b2 | |||
| 0d7b2d9f44 | |||
| 08ebc4cd59 | |||
| 85ae9712e3 | |||
| 83128f3019 | |||
| 7aa5ba9c6b | |||
| e5dee2d7e6 | |||
| b0232b804e | |||
| de7cec35c6 | |||
| 700c57b807 | |||
| ce75bba2c3 | |||
| 46f8ebd136 | |||
| f8279d6972 | |||
| 072ca4da4f | |||
| 8c5c30dfd4 | |||
| edc0116a3a | |||
| c1b2c3689c | |||
| 6746cc33a0 | |||
| 74723d1a1f | |||
| fccb8148d5 | |||
| 3a4ebbf92c | |||
| 48735e685c | |||
| cdcae4efb0 | |||
| f7c795c7f6 | |||
| beba2ba092 | |||
| 9ac10a97ce | |||
| 2f5f82d797 | |||
| c7fdb2acd7 | |||
| 51c7216b70 | |||
| 0f3ffaade0 | |||
| 156b98f7f0 | |||
| a09faac9a7 | |||
| d20c552248 | |||
| f7fdf7902d | |||
| b327963925 | |||
| 1eb3d563c6 | |||
| 02991c70a9 | |||
| 71ddbb409c | |||
| fbcedc2fa0 | |||
| 3dad818af2 | |||
| 5dc0fa91e8 | |||
| 565c9ae98d | |||
| 2d6aa620b4 | |||
| 03d5a6cfe1 | |||
| a5c47e4fdc | |||
| 9581278481 | |||
| 1c3ac21291 | |||
| 25faf05807 | |||
| 968dd52f6f | |||
| a4b32b0d31 | |||
| be1415fbd4 | |||
| b5901a1570 | |||
| bdc6dc8683 | |||
| 5087fa67dc | |||
| fc205713c8 | |||
| 9adc5ad59e | |||
| f63ccd033d | |||
| d7c0e2ec35 | |||
| 00da52f32e | |||
| 287c684866 | |||
| e94cf6ddc9 | |||
| 81272a2f7a | |||
| e622a49b72 | |||
| 9030aed8a4 | |||
| eee534a161 | |||
| 344abbda66 | |||
| 834814f867 | |||
| 7f823a04cd | |||
| 0f5e925a1a | |||
| e0c79389ca | |||
| a40bc65fd4 | |||
| 81bf98c746 | |||
| 41b59c5445 | |||
| e1bbf9d80c | |||
| bd2abdf45f | |||
| abb91fbb65 | |||
| f9b16a2110 | |||
| 588ac1d6a6 | |||
| 058d2938fb | |||
| 3db3214cbe | |||
| bfc80f982c | |||
| 727bc87ede | |||
| e2143d3ee8 | |||
| b46ff4158a | |||
| 734233257c | |||
| 250558baf3 | |||
| 8e5323e2d7 | |||
| 06a920502c | |||
| d5d036b412 | |||
| 9d03e75d9b | |||
| 0158807847 | |||
| 06a3f3ea0d | |||
| 12ae0a587d | |||
| b3aa057d58 | |||
| dd6d332166 | |||
| 6eca2eb147 | |||
| 744e204817 | |||
| d45e7d6b85 | |||
| 6fd47edbe3 | |||
| a616310eb7 | |||
| 2130029f90 | |||
| d11f254476 | |||
| d54a11ad11 | |||
| a9361fe428 | |||
| 5345170a4f | |||
| d0ccd85afe | |||
| 520404c215 | |||
| 9ac1756011 | |||
| 851d74da3d | |||
| 3f2691c5d4 | |||
| eaf34b1c8b | |||
| e9219adfb5 | |||
| 9eddaf66cb | |||
| 0a29a3fa2a | |||
| 9bb0787410 | |||
| dd14fd202d | |||
| 114deba06e | |||
| 0334f1094d | |||
| 7af68c3cc0 | |||
| 953d3a08e7 | |||
| f141ae78f3 | |||
| 94d619cfa6 | |||
| 89470a0ce0 | |||
| e6b291d034 | |||
| b0eef03c73 | |||
| 25a6c722b6 | |||
| 67a5993926 | |||
| aa979e31fd | |||
| b74df2b3e4 | |||
| 4afedaf537 | |||
| 2b79474060 | |||
| a6360ebfe5 | |||
| d99681904e | |||
| 1ac1a44e83 | |||
| f990f92977 | |||
| 490d5b6e6c | |||
| 4b7fc8551c | |||
| cd9c112218 | |||
| a8f44944b1 | |||
| d31c9b19ce | |||
| fb178866f4 | |||
| f921b67fff | |||
| c367e4f73f | |||
| dcb18a57c4 | |||
| 1b861baf0a | |||
| 10d833e598 | |||
| 708d85abeb | |||
| ee028382df | |||
| c05a49f8c9 | |||
| 35cfb50955 | |||
| f179e74a4a | |||
| 9065aa3750 | |||
| 96e42c793e | |||
| 72a390c563 | |||
| a19c918c68 | |||
| c45c23ae6f | |||
| 5cbf5365c5 | |||
| 3ad7a37f95 | |||
| 6cac2838e3 | |||
| fbbf7f90f6 | |||
| 1ea75a5d2d | |||
| 3ce87c8a6b | |||
| 39645a1a84 | |||
| a60e372c5a | |||
| 76cece7b90 | |||
| ca2944d566 | |||
| 53d0636574 | |||
| 7e6278684c | |||
| 2d7a6ccf3c | |||
| 18b99c0de4 | |||
| 96674571a5 | |||
| 29a330b1f4 | |||
| a644f45625 | |||
| 3db669b24d | |||
| f38868a97f | |||
| 4f3dc5422c | |||
| 1ba7181067 | |||
| 74bf54cb8f | |||
| d4732d3ab0 | |||
| cb9631b122 | |||
| 4077893d08 | |||
| 4ee1c21144 | |||
| c8eca56690 | |||
| 300e2d0b7d | |||
| a8040777b3 | |||
| e34de921b6 | |||
| a04f707f63 | |||
| 9aec899bfd | |||
| afb66df1a4 | |||
| 54b888bb08 | |||
| eefff8497a | |||
| ecbab64c35 | |||
| c8447dea3d | |||
| 5021e8ba91 | |||
| f846d78778 | |||
| fe9703dd94 | |||
| b44a7c73d8 | |||
| 9ae27f1415 | |||
| 19b928d663 | |||
| 5193342b3a | |||
| 109fb4bb45 | |||
| d6ccd812c2 | |||
| 81a6228028 | |||
| eeb216b75e | |||
| 6714595fee | |||
| 025924c4f7 | |||
| 7c10c8dac7 | |||
| daea8f6ae4 | |||
| 41d1fe9191 | |||
| 7d50e4d65f | |||
| 9a653403ae | |||
| 77f13c9edb | |||
| d95b1a0a41 | |||
| d9cc4980e8 | |||
| 5e987fa8b6 | |||
| 42001be9ec | |||
| dc198fec8c | |||
| acd47d5ec9 | |||
| 72e3fb5bfe | |||
| b2539b843b | |||
| be5dff8472 | |||
| 76037e8b3a | |||
| 11f4bd503b | |||
| 6688b279e7 | |||
| 1ca38015bc | |||
| 656269ff17 | |||
| bd727b825d | |||
| e04c1e7dc9 | |||
| 615df76dd5 | |||
| 112c6252d6 | |||
| b13370bf0d | |||
| 88aa5d3fdb | |||
| b187d8f836 | |||
| 1763a1a717 | |||
| 62b61ed980 | |||
| c11034b9bc | |||
| 58e8fe0bd0 | |||
| a0c8765588 | |||
| 9022059dc6 | |||
| 7f790be1e4 | |||
| 93791c999d | |||
| 5e9f1437ad | |||
| f9655213b3 | |||
| 008d608ec4 | |||
| 78c8d12ad8 | |||
| df0ad4d875 | |||
| 776e0fcd11 | |||
| 6ec3bad49a | |||
| 52f44c3ea6 | |||
| 941d36ebfe | |||
| db8243b4b4 | |||
| f919b7360e | |||
| 8e1b7c0036 | |||
| 9b0e0fa9c2 | |||
| 565d7afa92 | |||
| c914ba946f | |||
| 6f9280f64a | |||
| 8fe460e401 | |||
| b9fe359d23 | |||
| 2c6d494c32 | |||
| dbd1279226 | |||
| b463fcf61b | |||
| 82b4f5125d | |||
| 3f89d6d009 | |||
| 676f843c92 | |||
| c2387dc120 | |||
| 9a8e1534c0 | |||
| dbc4964e94 | |||
| 00b263f345 | |||
| 62d03b0d41 | |||
| b5a4b293a9 | |||
| bfcfdb83a7 | |||
| 4ccbfa8164 | |||
| 675d10c8a6 | |||
| 2cde7336dc | |||
| 169490dbec | |||
| 3ceb297276 | |||
| 12633bfed6 | |||
| 5958bac2a2 | |||
| 37f2d5b8b0 | |||
| 47891d2953 | |||
| af68571f4e | |||
| d0ec925ca3 | |||
| 939194158a | |||
| 576265e09c | |||
| dfaf45344c | |||
| 6c378957e9 | |||
| e8f9bc80a0 | |||
| a30b8b21e4 | |||
| 12204852aa | |||
| edba980b56 | |||
| ba666ddbfa | |||
| 35f9f59c57 | |||
| ac1f493338 | |||
| 1c3c70d460 | |||
| e8e7bdf9e0 | |||
| d263e0e60c | |||
| 028d86c0bb | |||
| f8b6830013 | |||
| 49a40c50e8 | |||
| 2ba48995fe | |||
| 3cc8ade6d8 | |||
| 39c9a0a299 | |||
| 3ad317fb6d | |||
| bd46440d12 | |||
| f3a28814ae | |||
| 9f8f64b9ec | |||
| 1e524a49c0 | |||
| 467c276fca | |||
| f610e39418 | |||
| 27d977b2fa | |||
| b36e72bfcc | |||
| e49701228d | |||
| 48f8b33d7d | |||
| d87ace8c89 | |||
| b1326d4145 | |||
| 7c2862c958 | |||
| 0a4f5ad64d | |||
| c617a11c55 | |||
| 053167965a | |||
| a7ac45b937 | |||
| 5482bbf4bd | |||
| 0a58e106b5 | |||
| a1395a5808 | |||
| a0d50ef03a | |||
| 685e2c8b6d | |||
| c6d9a20fe5 | |||
| 4a952d867b | |||
| cb4cf43fcf | |||
| 1bce7a832b | |||
| 574234f70f | |||
| 42e5470dd0 | |||
| 8199365324 | |||
| 86c92eb31e | |||
| d9fd952c03 | |||
| 967c7ded8d | |||
| a4bf847b56 | |||
| d6917155e8 | |||
| 3f024c1ef4 | |||
| 96d253f0f9 | |||
| 9b166fb9a9 | |||
| b8ae8cd452 | |||
| ca82b227b9 | |||
| 862496495f | |||
| 8bb9a8c5d1 | |||
| 00cb66484b | |||
| cabe2ae18d | |||
| 665a3f3180 | |||
| 3b5d7eaab6 | |||
| aa2358aa03 | |||
| a7decc1948 | |||
| 38b48604f3 | |||
| 60856cb7b9 | |||
| 350d013043 | |||
| 70c92fea15 | |||
| 6211b126a9 | |||
| 54c3fcc72a | |||
| 27c9088ddb | |||
| b8c2d42cad | |||
| 1f5ddd9530 | |||
| 2896ce0dad | |||
| 29bcde145c | |||
| 11db429bcc | |||
| 75aea9f885 | |||
| c80559005f | |||
| 9b927cfcc2 | |||
| 4db7931aa0 | |||
| 1e67329c64 | |||
| 6d17e4d538 | |||
| 350f58ec9d | |||
| de9478a992 | |||
| 70a2c985cf | |||
| 78037dc9ec | |||
| 9b11efd1e5 | |||
| 3c2ee8fbb3 | |||
| 163cc3f795 | |||
| 041382b02f | |||
| 837bfc3aa5 | |||
| 5ba1176f14 | |||
| f08649b02d | |||
| edbe5a254b | |||
| cfedb30628 | |||
| aa18b88a61 | |||
| 05962e71e3 | |||
| bafc3d0082 | |||
| 308f1b44c3 | |||
| cd17789529 | |||
| bf988d89c4 | |||
| b1e842ae47 | |||
| fcc3c35ae2 | |||
| e2524e43cf | |||
| 6aac2d62be | |||
| 95e2636f23 | |||
| 7565492bb9 | |||
| 89f7f12f92 | |||
| cdd15ca818 | |||
| 11f2d88b16 | |||
| 8066d540e0 | |||
| c3091a7346 | |||
| 9cadebcd50 | |||
| 3e54eb7520 | |||
| 068f19c895 | |||
| 3651cce542 | |||
| 9e0b9d9dda | |||
| f194e2a1be | |||
| f56c6f2836 | |||
| ec896461a7 | |||
| 8eeed821d3 | |||
| 80c1689b24 | |||
| 7c29b566be | |||
| 920d595c12 | |||
| 5d7174b2a7 | |||
| 3c60f47e3f | |||
| c4abc59673 | |||
| ff4cdd82ee | |||
| 1c6e9caa40 | |||
| 07ec04ddc6 | |||
| d6b3f5af81 | |||
| ce1fe9321c | |||
| 2c88e4e3ba | |||
| fed37c9dc0 | |||
| e14eefdc31 | |||
| 2525d369d4 | |||
| 0600481a67 | |||
| f0324e4755 | |||
| 00f0f957c0 | |||
| 9c652d784d | |||
| eb2fa74661 | |||
| 146c599deb | |||
| 574c4033ab | |||
| 9f122eec18 | |||
| eb0f6a04d8 | |||
| c7230befe4 | |||
| 9a316ae1a9 | |||
| df4364714e | |||
| a1cd2b39eb | |||
| 8a7a15a361 | |||
| 2cdeecb6e0 | |||
| 638e4a5ac1 | |||
| 93b4ef5f17 | |||
| 26d490f74a | |||
| 01a1190524 | |||
| 2073090628 | |||
| 6d00cb208d | |||
| 13b9bf687d | |||
| 56f7da34d7 | |||
| 0f34440b64 | |||
| bbcc7cca4e | |||
| 0453afcb0e | |||
| cafecd1e19 | |||
| 4b968a9474 | |||
| 9244945e69 | |||
| 78819c1733 | |||
| 394e18f76e | |||
| 40eb950e94 | |||
| 90636a5329 | |||
| 2fc6d4cd21 | |||
| b20bdf3c4e | |||
| a20726a301 | |||
| 39727a1c9f | |||
| 168f46a436 | |||
| 4ec07a6dc7 | |||
| 798a6295ee | |||
| 73cb8da8c1 | |||
| 3167ce9785 | |||
| 63b7b71b49 | |||
| 9965af9ccd | |||
| ba5d2c925a | |||
| 867be09e29 | |||
| 8362a92898 | |||
| 162482dbc4 | |||
| c0f14db5bb | |||
| 3c561914c6 | |||
| 34c6f1bf4d | |||
| 2187898494 | |||
| d4bc6ae7a1 | |||
| 81cdb15353 | |||
| 5cfa9d4bc5 | |||
| 92da453233 | |||
| 2aedfedbd3 |
5
.coveragerc
Normal file
5
.coveragerc
Normal file
@ -0,0 +1,5 @@
|
||||
[report]
|
||||
include = lemur/*.py
|
||||
omit = lemur/migrations/*
|
||||
lemur/tests/*
|
||||
|
||||
3
.gitattributes
vendored
3
.gitattributes
vendored
@ -1 +1,2 @@
|
||||
* text=auto
|
||||
* text=auto
|
||||
version.py export-subst
|
||||
15
.gitignore
vendored
15
.gitignore
vendored
@ -1,3 +1,4 @@
|
||||
/.cache
|
||||
.coverage
|
||||
.tox
|
||||
.DS_Store
|
||||
@ -9,9 +10,11 @@
|
||||
*.db
|
||||
*.pid
|
||||
*.enc
|
||||
*.env
|
||||
MANIFEST
|
||||
test.conf
|
||||
pip-log.txt
|
||||
package-lock.json
|
||||
/htmlcov
|
||||
/cover
|
||||
/build
|
||||
@ -23,8 +26,16 @@ pip-log.txt
|
||||
/lemur/static/dist/
|
||||
/lemur/static/app/vendor/
|
||||
/wheelhouse
|
||||
/lemur/lib
|
||||
/lemur/bin
|
||||
/lemur/lib64
|
||||
/lemur/include
|
||||
|
||||
docs/_build
|
||||
.editorconfig
|
||||
.idea
|
||||
test.conf
|
||||
lemur/tests/tmp
|
||||
.pytest_cache
|
||||
lemur/tests/tmp
|
||||
|
||||
/lemur/plugins/lemur_email/tests/expiration-rendered.html
|
||||
/lemur/plugins/lemur_email/tests/rotation-rendered.html
|
||||
|
||||
@ -8,7 +8,7 @@
|
||||
"eqeqeq": true,
|
||||
"immed": true,
|
||||
"indent": 2,
|
||||
"latedef": true,
|
||||
"latedef": false,
|
||||
"newcap": false,
|
||||
"noarg": true,
|
||||
"quotmark": "single",
|
||||
@ -22,6 +22,8 @@
|
||||
"angular": false,
|
||||
"moment": false,
|
||||
"toaster": false,
|
||||
"d3": false,
|
||||
"self": false,
|
||||
"_": false
|
||||
}
|
||||
}
|
||||
|
||||
24
.pre-commit-config.yaml
Normal file
24
.pre-commit-config.yaml
Normal file
@ -0,0 +1,24 @@
|
||||
- repo: git://github.com/pre-commit/pre-commit-hooks
|
||||
sha: v0.9.1
|
||||
hooks:
|
||||
- id: trailing-whitespace
|
||||
- id: flake8
|
||||
- id: check-merge-conflict
|
||||
- repo: git://github.com/pre-commit/mirrors-jshint
|
||||
sha: v2.9.5
|
||||
hooks:
|
||||
- id: jshint
|
||||
- repo: https://github.com/ambv/black
|
||||
rev: stable
|
||||
hooks:
|
||||
- id: black
|
||||
language_version: python3.7
|
||||
|
||||
- repo: local
|
||||
hooks:
|
||||
- id: python-bandit-vulnerability-check
|
||||
name: bandit
|
||||
entry: bandit
|
||||
args: ['--ini', 'tox.ini', '-r', 'consoleme']
|
||||
language: system
|
||||
pass_filenames: false
|
||||
35
.travis.yml
35
.travis.yml
@ -1,18 +1,16 @@
|
||||
sudo: false
|
||||
|
||||
language: python
|
||||
dist: xenial
|
||||
|
||||
node_js:
|
||||
- "6.2.0"
|
||||
|
||||
addons:
|
||||
postgresql: "9.4"
|
||||
|
||||
matrix:
|
||||
include:
|
||||
- python: "2.7"
|
||||
env: TOXENV=py27
|
||||
- python: "3.3"
|
||||
env: TOXENV=py33
|
||||
- python: "3.4"
|
||||
env: TOXENV=py34
|
||||
- python: "3.7"
|
||||
env: TOXENV=py37
|
||||
|
||||
cache:
|
||||
directories:
|
||||
@ -22,18 +20,31 @@ cache:
|
||||
env:
|
||||
global:
|
||||
- PIP_DOWNLOAD_CACHE=".pip_download_cache"
|
||||
|
||||
install:
|
||||
- make dev-postgres
|
||||
# The following line is a temporary workaround for this issue: https://github.com/pypa/setuptools/issues/2230
|
||||
- SETUPTOOLS_USE_DISTUTILS=stdlib
|
||||
# do not load /etc/boto.cfg with Python 3 incompatible plugin
|
||||
# https://github.com/travis-ci/travis-ci/issues/5246#issuecomment-166460882
|
||||
- BOTO_CONFIG=/doesnotexist
|
||||
|
||||
before_script:
|
||||
- psql -c "create database lemur;" -U postgres
|
||||
- psql -c "create user lemur with password 'lemur;'" -U postgres
|
||||
- psql lemur -c "create extension IF NOT EXISTS pg_trgm;" -U postgres
|
||||
- npm config set registry https://registry.npmjs.org
|
||||
- npm install -g bower
|
||||
- pip install --upgrade setuptools
|
||||
|
||||
install:
|
||||
- pip install coveralls
|
||||
- pip install bandit
|
||||
|
||||
script:
|
||||
- make test
|
||||
- bandit -r . -ll -ii -x lemur/tests/,docs
|
||||
|
||||
after_success:
|
||||
- coveralls
|
||||
|
||||
notifications:
|
||||
email:
|
||||
kglisson@netflix.com
|
||||
ccastrapel@netflix.com
|
||||
|
||||
1
AUTHORS
1
AUTHORS
@ -1,2 +1,3 @@
|
||||
- Kevin Glisson <kglisson@netflix.com>
|
||||
- Jeremy Heffner <jheffner@netflix.com>
|
||||
|
||||
|
||||
238
CHANGELOG.rst
Normal file
238
CHANGELOG.rst
Normal file
@ -0,0 +1,238 @@
|
||||
Changelog
|
||||
=========
|
||||
|
||||
|
||||
0.7 - `2018-05-07`
|
||||
~~~~~~~~~~~~~~
|
||||
|
||||
This release adds LetsEncrypt support with DNS providers Dyn, Route53, and Cloudflare, and expands on the pending certificate functionality.
|
||||
The linux_dst plugin will also be deprecated and removed.
|
||||
|
||||
The pending_dns_authorizations and dns_providers tables were created. New columns
|
||||
were added to the certificates and pending_certificates tables, (For the DNS provider ID), and authorities (For options).
|
||||
Please run a database migration when upgrading.
|
||||
|
||||
The Let's Encrypt flow will run asynchronously. When a certificate is requested through the acme-issuer, a pending certificate
|
||||
will be created. A cron needs to be defined to run `lemur pending_certs fetch_all_acme`. This command will iterate through all of the pending
|
||||
certificates, request a DNS challenge token from Let's Encrypt, and set the appropriate _acme-challenge TXT entry. It will
|
||||
then iterate through and resolve the challenges before requesting a certificate for each pending certificate. If a certificate
|
||||
is successfully obtained, the pending_certificate will be moved to the certificates table with the appropriate properties.
|
||||
|
||||
Special thanks to all who helped with this release, notably:
|
||||
|
||||
- The folks at Cloudflare
|
||||
- dmitryzykov
|
||||
- jchuong
|
||||
- seils
|
||||
- titouanc
|
||||
|
||||
|
||||
Upgrading
|
||||
---------
|
||||
|
||||
.. note:: This release will need a migration change. Please follow the `documentation <https://lemur.readthedocs.io/en/latest/administration.html#upgrading-lemur>`_ to upgrade Lemur.
|
||||
|
||||
0.6 - `2018-01-02`
|
||||
~~~~~~~~~~~~~~~~~~
|
||||
|
||||
Happy Holidays! This is a big release with lots of bug fixes and features. Below are the highlights and are not exhaustive.
|
||||
|
||||
|
||||
Features:
|
||||
|
||||
* Per-certificate rotation policies, requires a database migration. The default rotation policy for all certificates.
|
||||
is 30 days. Every certificate will gain a policy regardless of if auto-rotation is used.
|
||||
* Adds per-user API Keys, allows users to issue multiple long-lived API tokens with the same permission as the user creating them.
|
||||
* Adds the ability to revoke certificates from the Lemur UI/API, this is currently only supported for the digicert CIS and cfssl plugins.
|
||||
* Allow destinations to support an export function. Useful for file system destinations e.g. S3 to specify the export plugin you wish to run before being sent to the destination.
|
||||
* Adds support for uploading certificates to Cloudfront.
|
||||
* Re-worked certificate metadata pane for improved readability.
|
||||
* Adds support for LDAP user authentication
|
||||
|
||||
Bugs:
|
||||
|
||||
* Closed `#767 <https://github.com/Netflix/lemur/issues/767>`_ - Fixed issue with login redirect loop.
|
||||
* Closed `#792 <https://github.com/Netflix/lemur/issues/792>`_ - Fixed an issue with a unique constraint was violated when replacing certificates.
|
||||
* Closed `#752 <https://github.com/Netflix/lemur/issues/752>`_ - Fixed an internal server error when validating notification units.
|
||||
* Closed `#684 <https://github.com/Netflix/lemur/issues/684>`_ - Fixed migration failure when null values encountered.
|
||||
* Closes `#661 <https://github.com/Netflix/lemur/issues/661>`_ - Fixed an issue where default values were missing during clone operations.
|
||||
|
||||
|
||||
Special thanks to all who helped with this release, notably:
|
||||
|
||||
- intgr
|
||||
- SecurityInsanity
|
||||
- johanneslange
|
||||
- RickB17
|
||||
- pr8kerl
|
||||
- bunjiboys
|
||||
|
||||
See the full list of issues closed in `0.6 <https://github.com/Netflix/lemur/milestone/5>`_.
|
||||
|
||||
Upgrading
|
||||
---------
|
||||
|
||||
.. note:: This release will need a migration change. Please follow the `documentation <https://lemur.readthedocs.io/en/latest/administration.html#upgrading-lemur>`_ to upgrade Lemur.
|
||||
|
||||
|
||||
|
||||
0.5 - `2016-04-08`
|
||||
~~~~~~~~~~~~~~~~~~
|
||||
|
||||
This release is most notable for dropping support for python2.7. All Lemur versions >0.4 will now support python3.5 only.
|
||||
|
||||
Big thanks to neilschelly for quite a lot of improvements to the `lemur-cryptography` plugin.
|
||||
|
||||
Other Highlights:
|
||||
|
||||
* Closed `#501 <https://github.com/Netflix/lemur/issues/501>`_ - Endpoint resource as now kept in sync via an
|
||||
expiration mechanism. Such that non-existant endpoints gracefully fall out of Lemur. Certificates are never
|
||||
removed from Lemur.
|
||||
* Closed `#551 <https://github.com/Netflix/lemur/pull/551>`_ - Added the ability to create a 4096 bit key during certificate
|
||||
creation. Closed `#528 <https://github.com/Netflix/lemur/pull/528>`_ to ensure that issuer plugins supported the new 4096 bit keys.
|
||||
* Closed `#566 <https://github.com/Netflix/lemur/issues/566>`_ - Fixed an issue changing the notification status for certificates
|
||||
without private keys.
|
||||
* Closed `#594 <https://github.com/Netflix/lemur/issues/594>`_ - Added `replaced` field indicating if a certificate has been superseded.
|
||||
* Closed `#602 <https://github.com/Netflix/lemur/issues/602>`_ - AWS plugin added support for ALBs for endpoint tracking.
|
||||
|
||||
|
||||
Special thanks to all who helped with this release, notably:
|
||||
|
||||
- RcRonco
|
||||
- harmw
|
||||
- jeremyguarini
|
||||
|
||||
See the full list of issues closed in `0.5 <https://github.com/Netflix/lemur/milestone/4>`_.
|
||||
|
||||
Upgrading
|
||||
---------
|
||||
|
||||
.. note:: This release will need a slight migration change. Please follow the `documentation <https://lemur.readthedocs.io/en/latest/administration.html#upgrading-lemur>`_ to upgrade Lemur.
|
||||
|
||||
|
||||
0.4 - `2016-11-17`
|
||||
~~~~~~~~~~~~~~~~~~
|
||||
|
||||
There have been quite a few issues closed in this release. Some notables:
|
||||
|
||||
* Closed `#284 <https://github.com/Netflix/lemur/issues/284>`_ - Created new models for `Endpoints` created associated
|
||||
AWS ELB endpoint tracking code. This was the major stated goal of this milestone and should serve as the basis for
|
||||
future enhancements of Lemur's certificate 'deployment' capabilities.
|
||||
|
||||
* Closed `#334 <https://github.com/Netflix/lemur/issues/334>`_ - Lemur not has the ability
|
||||
to restrict certificate expiration dates to weekdays.
|
||||
|
||||
Several fixes/tweaks to Lemurs python3 support (thanks chadhendrie!)
|
||||
|
||||
This will most likely be the last release to support python2.7 moving Lemur to target python3 exclusively. Please comment
|
||||
on issue #340 if this negatively affects your usage of Lemur.
|
||||
|
||||
See the full list of issues closed in `0.4 <https://github.com/Netflix/lemur/milestone/3>`_.
|
||||
|
||||
Upgrading
|
||||
---------
|
||||
|
||||
.. note:: This release will need a slight migration change. Please follow the `documentation <https://lemur.readthedocs.io/en/latest/administration.html#upgrading-lemur>`_ to upgrade Lemur.
|
||||
|
||||
|
||||
0.3.0 - `2016-06-06`
|
||||
~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
This is quite a large upgrade, it is highly advised you backup your database before attempting to upgrade as this release
|
||||
requires the migration of database structure as well as data.
|
||||
|
||||
|
||||
Upgrading
|
||||
---------
|
||||
|
||||
Please follow the `documentation <https://lemur.readthedocs.io/en/latest/administration.html#upgrading-lemur>`_ to upgrade Lemur.
|
||||
|
||||
|
||||
Source Plugin Owners
|
||||
--------------------
|
||||
|
||||
The dictionary returned from a source plugin has changed keys from `public_certificate` to `body` and `intermediate_certificate` to chain.
|
||||
|
||||
|
||||
Issuer Plugin Owners
|
||||
--------------------
|
||||
|
||||
This release may break your plugins, the keys in `issuer_options` have been changed from `camelCase` to `under_score`.
|
||||
This change was made to break an undue reliance on downstream options maintains a more pythonic naming convention. Renaming
|
||||
these keys should be fairly trivial, additionally pull requests have been submitted to affected plugins to help ease the transition.
|
||||
|
||||
.. note:: This change only affects issuer plugins and does not affect any other types of plugins.
|
||||
|
||||
|
||||
* Closed `#63 <https://github.com/Netflix/lemur/issues/63>`_ - Validates all endpoints with Marshmallow schemas, this allows for
|
||||
stricter input validation and better error messages when validation fails.
|
||||
* Closed `#146 <https://github.com/Netflix/lemur/issues/146>`_ - Moved authority type to first pane of authority creation wizard.
|
||||
* Closed `#147 <https://github.com/Netflix/lemur/issues/147>`_ - Added and refactored the relationship between authorities and their
|
||||
root certificates. Displays the certificates (and chains) next to the authority in question.
|
||||
* Closed `#199 <https://github.com/Netflix/lemur/issues/199>`_ - Ensures that the dates submitted to Lemur during authority and
|
||||
certificate creation are actually dates.
|
||||
* Closed `#230 <https://github.com/Netflix/lemur/issues/230>`_ - Migrated authority dropdown to an ui-select based dropdown, this
|
||||
should be easier to determine what authorities are available and when an authority has actually been selected.
|
||||
* Closed `#254 <https://github.com/Netflix/lemur/issues/254>`_ - Forces certificate names to be generally unique. If a certificate name
|
||||
(generated or otherwise) is found to be a duplicate we increment by appending a counter.
|
||||
* Closed `#254 <https://github.com/Netflix/lemur/issues/275>`_ - Switched to using Fernet generated passphrases for exported items.
|
||||
These are more sounds that pseudo random passphrases generated before and have the nice property of being in base64.
|
||||
* Closed `#278 <https://github.com/Netflix/lemur/issues/278>`_ - Added ability to specify a custom name to certificate creation, previously
|
||||
this was only available in the certificate import wizard.
|
||||
* Closed `#281 <https://github.com/Netflix/lemur/issues/281>`_ - Fixed an issue where notifications could not be removed from a certificate
|
||||
via the UI.
|
||||
* Closed `#289 <https://github.com/Netflix/lemur/issues/289>`_ - Fixed and issue where intermediates were not being properly exported.
|
||||
* Closed `#315 <https://github.com/Netflix/lemur/issues/315>`_ - Made how roles are associated with certificates and authorities much more
|
||||
explicit, including adding the ability to add roles directly to certificates and authorities on creation.
|
||||
|
||||
|
||||
|
||||
0.2.2 - 2016-02-05
|
||||
~~~~~~~~~~~~~~~~~~
|
||||
|
||||
* Closed `#234 <https://github.com/Netflix/lemur/issues/234>`_ - Allows export plugins to define whether they need
|
||||
private key material (default is True)
|
||||
* Closed `#231 <https://github.com/Netflix/lemur/issues/231>`_ - Authorities were not respecting 'owning' roles and their
|
||||
users
|
||||
* Closed `#228 <https://github.com/Netflix/lemur/issues/228>`_ - Fixed documentation with correct filter values
|
||||
* Closed `#226 <https://github.com/Netflix/lemur/issues/226>`_ - Fixes issue were `import_certificate` was requiring
|
||||
replacement certificates to be specified
|
||||
* Closed `#224 <https://github.com/Netflix/lemur/issues/224>`_ - Fixed an issue where NPM might not be globally available (thanks AlexClineBB!)
|
||||
* Closed `#221 <https://github.com/Netflix/lemur/issues/234>`_ - Fixes several reported issues where older migration scripts were
|
||||
missing tables, this change removes pre 0.2 migration scripts
|
||||
* Closed `#218 <https://github.com/Netflix/lemur/issues/234>`_ - Fixed an issue where export passphrases would not validate
|
||||
|
||||
|
||||
0.2.1 - 2015-12-14
|
||||
~~~~~~~~~~~~~~~~~~
|
||||
|
||||
* Fixed bug with search not refreshing values
|
||||
* Cleaned up documentation, including working supervisor example (thanks rpicard!)
|
||||
* Closed #165 - Fixed an issue with email templates
|
||||
* Closed #188 - Added ability to submit third party CSR
|
||||
* Closed #176 - Java-export should allow user to specify truststore/keystore
|
||||
* Closed #176 - Extended support for exporting certificate in P12 format
|
||||
|
||||
|
||||
0.2.0 - 2015-12-02
|
||||
~~~~~~~~~~~~~~~~~~
|
||||
|
||||
* Closed #120 - Error messages not displaying long enough
|
||||
* Closed #121 - Certificate create form should not be valid until a Certificate Authority object is available
|
||||
* Closed #122 - Certificate API should allow for the specification of preceding certificates
|
||||
You can now target a certificate(s) for replacement. When specified the replaced certificate will be marked as
|
||||
'inactive'. This means that there will be no notifications for that certificate.
|
||||
* Closed #139 - SubCA autogenerated descriptions for their certs are incorrect
|
||||
* Closed #140 - Permalink does not change with filtering
|
||||
* Closed #144 - Should be able to search certificates by domains covered, included wildcards
|
||||
* Closed #165 - Cleaned up expiration notification template
|
||||
* Closed #160 - Cleaned up quickstart documentation (thanks forkd!)
|
||||
* Closed #144 - Now able to search by all domains in a given certificate, not just by common name
|
||||
|
||||
|
||||
0.1.5 - 2015-10-26
|
||||
~~~~~~~~~~~~~~~~~~
|
||||
|
||||
* **SECURITY ISSUE**: Switched from use an AES static key to Fernet encryption.
|
||||
Affects all versions prior to 0.1.5. If upgrading this will require a data migration.
|
||||
see: `Upgrading Lemur <https://lemur.readthedocs.io/administration#UpgradingLemur>`_
|
||||
14
Dockerfile
Normal file
14
Dockerfile
Normal file
@ -0,0 +1,14 @@
|
||||
FROM python:3.7
|
||||
RUN apt-get update
|
||||
RUN apt-get install -y make software-properties-common curl
|
||||
RUN curl -sL https://deb.nodesource.com/setup_7.x | bash -
|
||||
RUN apt-get update
|
||||
RUN apt-get install -y npm libldap2-dev libsasl2-dev libldap2-dev libssl-dev
|
||||
RUN pip install pip==20.0.2
|
||||
RUN pip install -U setuptools
|
||||
RUN pip install coveralls bandit
|
||||
WORKDIR /app
|
||||
COPY . /app/
|
||||
RUN pip install -e .
|
||||
RUN pip install "file://`pwd`#egg=lemur[dev]"
|
||||
RUN pip install "file://`pwd`#egg=lemur[tests]"
|
||||
3
LICENSE
3
LICENSE
@ -1,4 +1,3 @@
|
||||
|
||||
Apache License
|
||||
Version 2.0, January 2004
|
||||
http://www.apache.org/licenses/
|
||||
@ -187,7 +186,7 @@
|
||||
same "printed page" as the copyright notice for easier
|
||||
identification within third-party archives.
|
||||
|
||||
Copyright 2014 Netflix, Inc.
|
||||
Copyright 2018 Netflix, Inc.
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
|
||||
@ -1,4 +1,4 @@
|
||||
include setup.py package.json bower.json gulpfile.js README.rst MANIFEST.in LICENSE AUTHORS
|
||||
include setup.py version.py package.json bower.json gulpfile.js README.rst MANIFEST.in LICENSE AUTHORS requirements*.txt
|
||||
recursive-include lemur/plugins/lemur_email/templates *
|
||||
recursive-include lemur/static *
|
||||
global-exclude *~
|
||||
|
||||
66
Makefile
66
Makefile
@ -1,33 +1,59 @@
|
||||
NPM_ROOT = ./node_modules
|
||||
STATIC_DIR = src/lemur/static/app
|
||||
SHELL=/bin/bash
|
||||
USER := $(shell whoami)
|
||||
|
||||
develop: update-submodules setup-git
|
||||
@echo "--> Installing dependencies"
|
||||
ifeq ($(USER), root)
|
||||
@echo "WARNING: It looks like you are installing Lemur as root. This is not generally advised."
|
||||
npm install --unsafe-perm
|
||||
else
|
||||
npm install
|
||||
endif
|
||||
pip install "setuptools>=0.9.8"
|
||||
# order matters here, base package must install first
|
||||
pip install -e .
|
||||
pip install "file://`pwd`#egg=lemur[dev]"
|
||||
pip install "file://`pwd`#egg=lemur[tests]"
|
||||
node_modules/.bin/gulp build
|
||||
node_modules/.bin/gulp package
|
||||
node_modules/.bin/gulp package --urlContextPath=$(urlContextPath)
|
||||
@echo ""
|
||||
|
||||
release:
|
||||
@echo "--> Installing dependencies"
|
||||
ifeq ($(USER), root)
|
||||
@echo "WARNING: It looks like you are installing Lemur as root. This is not generally advised."
|
||||
npm install --unsafe-perm
|
||||
else
|
||||
npm install
|
||||
endif
|
||||
pip install "setuptools>=0.9.8"
|
||||
# order matters here, base package must install first
|
||||
pip install -e .
|
||||
node_modules/.bin/gulp build
|
||||
node_modules/.bin/gulp package --urlContextPath=$(urlContextPath)
|
||||
@echo ""
|
||||
|
||||
dev-docs:
|
||||
pip install -r docs/requirements.txt
|
||||
pip install -r requirements-docs.txt
|
||||
|
||||
reset-db:
|
||||
@echo "--> Dropping existing 'lemur' database"
|
||||
dropdb lemur || true
|
||||
@echo "--> Creating 'lemur' database"
|
||||
createdb -E utf-8 lemur
|
||||
@echo "--> Enabling pg_trgm extension"
|
||||
psql lemur -c "create extension IF NOT EXISTS pg_trgm;"
|
||||
@echo "--> Applying migrations"
|
||||
lemur db upgrade
|
||||
cd lemur && lemur db upgrade
|
||||
|
||||
setup-git:
|
||||
@echo "--> Installing git hooks"
|
||||
git config branch.autosetuprebase always
|
||||
cd .git/hooks && ln -sf ../../hooks/* ./
|
||||
if [ -d .git/hooks ]; then \
|
||||
git config branch.autosetuprebase always; \
|
||||
cd .git/hooks && ln -sf ../../hooks/* ./; \
|
||||
fi
|
||||
@echo ""
|
||||
|
||||
clean:
|
||||
@ -41,7 +67,7 @@ test: develop lint test-python
|
||||
|
||||
testloop: develop
|
||||
pip install pytest-xdist
|
||||
py.test tests -f
|
||||
coverage run --source lemur -m py.test
|
||||
|
||||
test-cli:
|
||||
@echo "--> Testing CLI"
|
||||
@ -60,7 +86,7 @@ test-js:
|
||||
|
||||
test-python:
|
||||
@echo "--> Running Python tests"
|
||||
py.test lemur/tests || exit 1
|
||||
coverage run --source lemur -m py.test
|
||||
@echo ""
|
||||
|
||||
lint: lint-python lint-js
|
||||
@ -82,4 +108,28 @@ coverage: develop
|
||||
publish:
|
||||
python setup.py sdist bdist_wheel upload
|
||||
|
||||
.PHONY: develop dev-postgres dev-docs setup-git build clean update-submodules test testloop test-cli test-js test-python lint lint-python lint-js coverage publish
|
||||
up-reqs:
|
||||
ifndef VIRTUAL_ENV
|
||||
$(error Please activate virtualenv first)
|
||||
endif
|
||||
@echo "--> Updating Python requirements"
|
||||
pip install --upgrade pip
|
||||
pip install --upgrade pip-tools
|
||||
pip-compile --output-file requirements.txt requirements.in -U --no-index
|
||||
pip-compile --output-file requirements-docs.txt requirements-docs.in -U --no-index
|
||||
pip-compile --output-file requirements-dev.txt requirements-dev.in -U --no-index
|
||||
pip-compile --output-file requirements-tests.txt requirements-tests.in -U --no-index
|
||||
@echo "--> Done updating Python requirements"
|
||||
@echo "--> Removing python-ldap from requirements-docs.txt"
|
||||
grep -v "python-ldap" requirements-docs.txt > tempreqs && mv tempreqs requirements-docs.txt
|
||||
@echo "--> Installing new dependencies"
|
||||
pip install -e .
|
||||
@echo "--> Done installing new dependencies"
|
||||
@echo ""
|
||||
|
||||
# Execute with make checkout-pr pr=<pr number>
|
||||
checkout-pr:
|
||||
git fetch upstream pull/$(pr)/head:pr-$(pr)
|
||||
|
||||
|
||||
.PHONY: develop dev-postgres dev-docs setup-git build clean update-submodules test testloop test-cli test-js test-python lint lint-python lint-js coverage publish release
|
||||
|
||||
1
OSSMETADATA
Normal file
1
OSSMETADATA
Normal file
@ -0,0 +1 @@
|
||||
osslifecycle=active
|
||||
17
README.rst
17
README.rst
@ -5,28 +5,31 @@ Lemur
|
||||
:alt: Join the chat at https://gitter.im/Netflix/lemur
|
||||
:target: https://gitter.im/Netflix/lemur?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge
|
||||
|
||||
.. image:: https://img.shields.io/pypi/v/lemur.svg
|
||||
:target: https://pypi.python.org/pypi/lemur/
|
||||
:alt: Latest Version
|
||||
|
||||
.. image:: https://readthedocs.org/projects/lemur/badge/?version=latest
|
||||
:target: https://lemur.readthedocs.org
|
||||
:target: https://lemur.readthedocs.io
|
||||
:alt: Latest Docs
|
||||
|
||||
.. image:: https://img.shields.io/badge/NetflixOSS-active-brightgreen.svg
|
||||
|
||||
.. image:: https://travis-ci.org/Netflix/lemur.svg
|
||||
:target: https://travis-ci.org/Netflix/lemur
|
||||
|
||||
.. image:: https://coveralls.io/repos/github/Netflix/lemur/badge.svg?branch=master
|
||||
:target: https://coveralls.io/github/Netflix/lemur?branch=master
|
||||
|
||||
|
||||
|
||||
Lemur manages TLS certificate creation. While not able to issue certificates itself, Lemur acts as a broker between CAs
|
||||
and environments providing a central portal for developers to issue TLS certificates with 'sane' defaults.
|
||||
|
||||
It works on Python 3.7. We deploy on Ubuntu and develop on OS X.
|
||||
|
||||
It works on CPython 2.7, 3.3, 3.4. We deploy on Ubuntu and develop on OS X.
|
||||
|
||||
Project resources
|
||||
=================
|
||||
|
||||
- `Lemur Blog Post <http://techblog.netflix.com/2015/09/introducing-lemur.html>`_
|
||||
- `Documentation <http://lemur.readthedocs.org/>`_
|
||||
- `Documentation <http://lemur.readthedocs.io/>`_
|
||||
- `Source code <https://github.com/netflix/lemur>`_
|
||||
- `Issue tracker <https://github.com/netflix/lemur/issues>`_
|
||||
- `Docker <https://github.com/Netflix/lemur-docker>`_
|
||||
|
||||
63
bower.json
63
bower.json
@ -6,38 +6,45 @@
|
||||
},
|
||||
"private": true,
|
||||
"dependencies": {
|
||||
"angular": "1.3",
|
||||
"json3": "~3.3",
|
||||
"es5-shim": "~4.0",
|
||||
"jquery": "~2.1",
|
||||
"angular-resource": "1.2.15",
|
||||
"angular-cookies": "1.2.15",
|
||||
"angular-sanitize": "1.2.15",
|
||||
"angular-route": "1.2.15",
|
||||
"angular-strap": "~2.0.2",
|
||||
"restangular": "~1.4.0",
|
||||
"ng-table": "~0.5.4",
|
||||
"ngAnimate": "*",
|
||||
"moment": "~2.6.0",
|
||||
"angular-animate": "~1.4.0",
|
||||
"angular-loading-bar": "~0.6.0",
|
||||
"fontawesome": "~4.2.0",
|
||||
"jquery": "~2.2.0",
|
||||
"angular-wizard": "~0.4.0",
|
||||
"bootswatch": "3.3.1+2",
|
||||
"angular-spinkit": "~0.3.3",
|
||||
"angular-bootstrap": "~0.12.0",
|
||||
"angular": "1.4.9",
|
||||
"json3": "~3.3",
|
||||
"es5-shim": "~4.5.0",
|
||||
"angular-bootstrap": "~1.1.1",
|
||||
"angular-animate": "~1.4.9",
|
||||
"restangular": "~1.5.1",
|
||||
"ng-table": "~0.8.3",
|
||||
"moment": "~2.11.1",
|
||||
"bootstrap": "~3.4.1",
|
||||
"angular-loading-bar": "~0.8.0",
|
||||
"angular-moment": "~0.10.3",
|
||||
"moment-range": "~2.1.0",
|
||||
"angular-clipboard": "~1.3.0",
|
||||
"angularjs-toaster": "~1.0.0",
|
||||
"angular-chart.js": "~0.8.8",
|
||||
"ngletteravatar": "~4.0.0",
|
||||
"bootswatch": "3.4.1+1",
|
||||
"fontawesome": "~4.5.0",
|
||||
"satellizer": "~0.13.4",
|
||||
"angular-ui-router": "~0.2.15",
|
||||
"font-awesome": "~4.5.0",
|
||||
"lodash": "~4.0.1",
|
||||
"underscore": "~1.8.3",
|
||||
"angular-smart-table": "2.1.8",
|
||||
"angular-strap": ">= 2.2.2",
|
||||
"angular-underscore": "^0.5.0",
|
||||
"angular-translate": "^2.9.0",
|
||||
"angular-ui-switch": "~0.1.0",
|
||||
"angular-chart.js": "~0.7.1",
|
||||
"satellizer": "~0.9.4",
|
||||
"angularjs-toaster": "~0.4.14"
|
||||
},
|
||||
"devDependencies": {
|
||||
"angular-mocks": "~1.3",
|
||||
"angular-scenario": "~1.3"
|
||||
"angular-sanitize": "~1.5.0",
|
||||
"angular-file-saver": "~1.0.1",
|
||||
"angular-ui-select": "~0.17.1",
|
||||
"d3": "^3.5.17"
|
||||
},
|
||||
"resolutions": {
|
||||
"bootstrap": "~3.3.1",
|
||||
"angular": "1.3"
|
||||
"moment": ">=2.8.0 <2.11.0",
|
||||
"lodash": ">=1.3.0 <2.5.0",
|
||||
"angular": "1.4.9"
|
||||
},
|
||||
"ignore": [
|
||||
"**/.*",
|
||||
|
||||
1
db/lemur.sql
Normal file
1
db/lemur.sql
Normal file
@ -0,0 +1 @@
|
||||
CREATE EXTENSION pg_trgm;
|
||||
29
dicos/50_lemur.xml
Normal file
29
dicos/50_lemur.xml
Normal file
@ -0,0 +1,29 @@
|
||||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<creole>
|
||||
<files>
|
||||
<!--service>lemur</service-->
|
||||
<file name='/etc/lemur/lemur.conf.py' mkdir='True'/>
|
||||
<file name='/etc/eole/eole-db.d/lemur.yml'/>
|
||||
<file name='/etc/nginx/web.d/lemur.conf' source='nginx-lemur.conf'/>
|
||||
</files>
|
||||
<variables>
|
||||
<family name='lemur'>
|
||||
<variable name='lemur_secret' type='password' description="Secret pour Lemur" auto_save="True"/>
|
||||
<variable name='lemur_token_secret' type='password' description="Token secret pour Lemur" auto_save="True"/>
|
||||
<variable name='lemur_encrypt_keys' type='password' description="Encrypt keys pour Lemur" auto_save="True"/>
|
||||
<variable name='lemur_db_name' type='string' description="Nom de la base de donnée de Lemur" mode="expert">
|
||||
<value>lemur</value>
|
||||
</variable>
|
||||
<variable name='lemur_db_user' type='string' description="Nom de l'utilisateur de la base de donnée de Lemur" mode="expert">
|
||||
<value>lemur</value>
|
||||
</variable>
|
||||
<variable name='lemur_admin_password' type='password' description="Mot de passe de l'utilisateur admin de Lemur" auto_save="True"/>
|
||||
</family>
|
||||
</variables>
|
||||
<constraints>
|
||||
<fill name='gen_random_base64' target='lemur_secret'/>
|
||||
<fill name='gen_random_base64' target='lemur_token_secret'/>
|
||||
<fill name='gen_random_base64' target='lemur_encrypt_keys'/>
|
||||
<fill name='gen_random' target='lemur_admin_password'/>
|
||||
</constraints>
|
||||
</creole>
|
||||
27
docker-compose.yml
Normal file
27
docker-compose.yml
Normal file
@ -0,0 +1,27 @@
|
||||
---
|
||||
version: '2.0'
|
||||
services:
|
||||
test:
|
||||
build: .
|
||||
volumes:
|
||||
- ".:/app"
|
||||
links:
|
||||
- postgres
|
||||
command: make test
|
||||
environment:
|
||||
SQLALCHEMY_DATABASE_URI: postgresql://lemur:lemur@postgres:5432/lemur
|
||||
VIRTUAL_ENV: 'true'
|
||||
|
||||
postgres:
|
||||
image: postgres
|
||||
restart: always
|
||||
environment:
|
||||
POSTGRES_USER: lemur
|
||||
POSTGRES_PASSWORD: lemur
|
||||
ports:
|
||||
- "5432:5432"
|
||||
|
||||
redis:
|
||||
image: "redis:alpine"
|
||||
ports:
|
||||
- "6379:6379"
|
||||
3
docker/.dockerignore
Normal file
3
docker/.dockerignore
Normal file
@ -0,0 +1,3 @@
|
||||
*-env
|
||||
docker-compose.yml
|
||||
Dockerfile
|
||||
67
docker/Dockerfile
Normal file
67
docker/Dockerfile
Normal file
@ -0,0 +1,67 @@
|
||||
FROM alpine:3.8
|
||||
|
||||
ARG VERSION
|
||||
ENV VERSION master
|
||||
|
||||
ARG URLCONTEXT
|
||||
|
||||
ENV uid 1337
|
||||
ENV gid 1337
|
||||
ENV user lemur
|
||||
ENV group lemur
|
||||
|
||||
RUN addgroup -S ${group} -g ${gid} && \
|
||||
adduser -D -S ${user} -G ${group} -u ${uid} && \
|
||||
apk --update add python3 libldap postgresql-client nginx supervisor curl tzdata openssl bash && \
|
||||
apk --update add --virtual build-dependencies \
|
||||
git \
|
||||
tar \
|
||||
curl \
|
||||
python3-dev \
|
||||
npm \
|
||||
bash \
|
||||
musl-dev \
|
||||
gcc \
|
||||
autoconf \
|
||||
automake \
|
||||
libtool \
|
||||
make \
|
||||
nasm \
|
||||
zlib-dev \
|
||||
postgresql-dev \
|
||||
libressl-dev \
|
||||
libffi-dev \
|
||||
cyrus-sasl-dev \
|
||||
openldap-dev && \
|
||||
mkdir -p /opt/lemur /home/lemur/.lemur/ && \
|
||||
curl -sSL https://github.com/Netflix/lemur/archive/$VERSION.tar.gz | tar xz -C /opt/lemur --strip-components=1 && \
|
||||
pip3 install --upgrade pip && \
|
||||
pip3 install --upgrade setuptools && \
|
||||
mkdir -p /run/nginx/ /etc/nginx/ssl/ && \
|
||||
chown -R $user:$group /opt/lemur/ /home/lemur/.lemur/
|
||||
|
||||
WORKDIR /opt/lemur
|
||||
|
||||
RUN npm install --unsafe-perm && \
|
||||
pip3 install -e . && \
|
||||
node_modules/.bin/gulp build && \
|
||||
node_modules/.bin/gulp package --urlContextPath=${URLCONTEXT} && \
|
||||
apk del build-dependencies
|
||||
|
||||
COPY entrypoint /
|
||||
COPY src/lemur.conf.py /home/lemur/.lemur/lemur.conf.py
|
||||
COPY supervisor.conf /
|
||||
COPY nginx/default.conf /etc/nginx/conf.d/
|
||||
COPY nginx/default-ssl.conf /etc/nginx/conf.d/
|
||||
|
||||
RUN chmod +x /entrypoint
|
||||
WORKDIR /
|
||||
|
||||
HEALTHCHECK --interval=12s --timeout=12s --start-period=30s \
|
||||
CMD curl --fail http://localhost:80/api/1/healthcheck | grep -q ok || exit 1
|
||||
|
||||
USER root
|
||||
|
||||
ENTRYPOINT ["/entrypoint"]
|
||||
|
||||
CMD ["/usr/bin/supervisord","-c","supervisor.conf"]
|
||||
67
docker/Dockerfile-src
Normal file
67
docker/Dockerfile-src
Normal file
@ -0,0 +1,67 @@
|
||||
FROM alpine:3.8
|
||||
|
||||
ARG VERSION
|
||||
ENV VERSION master
|
||||
|
||||
ARG URLCONTEXT
|
||||
|
||||
ENV uid 1337
|
||||
ENV gid 1337
|
||||
ENV user lemur
|
||||
ENV group lemur
|
||||
|
||||
RUN addgroup -S ${group} -g ${gid} && \
|
||||
adduser -D -S ${user} -G ${group} -u ${uid} && \
|
||||
apk --update add python3 libldap postgresql-client nginx supervisor curl tzdata openssl bash && \
|
||||
apk --update add --virtual build-dependencies \
|
||||
git \
|
||||
tar \
|
||||
curl \
|
||||
python3-dev \
|
||||
npm \
|
||||
bash \
|
||||
musl-dev \
|
||||
gcc \
|
||||
autoconf \
|
||||
automake \
|
||||
libtool \
|
||||
make \
|
||||
nasm \
|
||||
zlib-dev \
|
||||
postgresql-dev \
|
||||
libressl-dev \
|
||||
libffi-dev \
|
||||
cyrus-sasl-dev \
|
||||
openldap-dev && \
|
||||
pip3 install --upgrade pip && \
|
||||
pip3 install --upgrade setuptools && \
|
||||
mkdir -p /home/lemur/.lemur/ && \
|
||||
mkdir -p /run/nginx/ /etc/nginx/ssl/
|
||||
|
||||
COPY ./ /opt/lemur
|
||||
WORKDIR /opt/lemur
|
||||
|
||||
RUN chown -R $user:$group /opt/lemur/ /home/lemur/.lemur/ && \
|
||||
npm install --unsafe-perm && \
|
||||
pip3 install -e . && \
|
||||
node_modules/.bin/gulp build && \
|
||||
node_modules/.bin/gulp package --urlContextPath=${URLCONTEXT} && \
|
||||
apk del build-dependencies
|
||||
|
||||
COPY docker/entrypoint /
|
||||
COPY docker/src/lemur.conf.py /home/lemur/.lemur/lemur.conf.py
|
||||
COPY docker/supervisor.conf /
|
||||
COPY docker/nginx/default.conf /etc/nginx/conf.d/
|
||||
COPY docker/nginx/default-ssl.conf /etc/nginx/conf.d/
|
||||
|
||||
RUN chmod +x /entrypoint
|
||||
WORKDIR /
|
||||
|
||||
HEALTHCHECK --interval=12s --timeout=12s --start-period=30s \
|
||||
CMD curl --fail http://localhost:80/api/1/healthcheck | grep -q ok || exit 1
|
||||
|
||||
USER root
|
||||
|
||||
ENTRYPOINT ["/entrypoint"]
|
||||
|
||||
CMD ["/usr/bin/supervisord","-c","supervisor.conf"]
|
||||
29
docker/docker-compose.yml
Normal file
29
docker/docker-compose.yml
Normal file
@ -0,0 +1,29 @@
|
||||
version: '3'
|
||||
|
||||
services:
|
||||
postgres:
|
||||
image: "postgres:10"
|
||||
restart: always
|
||||
volumes:
|
||||
- pg_data:/var/lib/postgresql/data
|
||||
env_file:
|
||||
- pgsql-env
|
||||
|
||||
lemur:
|
||||
# image: "netlix-lemur:latest"
|
||||
build: .
|
||||
depends_on:
|
||||
- postgres
|
||||
- redis
|
||||
env_file:
|
||||
- lemur-env
|
||||
- pgsql-env
|
||||
ports:
|
||||
- 80:80
|
||||
- 443:443
|
||||
|
||||
redis:
|
||||
image: "redis:alpine"
|
||||
|
||||
volumes:
|
||||
pg_data: {}
|
||||
59
docker/entrypoint
Normal file
59
docker/entrypoint
Normal file
@ -0,0 +1,59 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -eo pipefail
|
||||
|
||||
if [ -z "${POSTGRES_USER}" ] || [ -z "${POSTGRES_PASSWORD}" ] || [ -z "${POSTGRES_HOST}" ] || [ -z "${POSTGRES_DB}" ];then
|
||||
echo "Database vars not set"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
export POSTGRES_PORT="${POSTGRES_PORT:-5432}"
|
||||
|
||||
export LEMUR_ADMIN_PASSWORD="${LEMUR_ADMIN_PASSWORD:-admin}"
|
||||
|
||||
export SQLALCHEMY_DATABASE_URI="postgresql://$POSTGRES_USER:$POSTGRES_PASSWORD@$POSTGRES_HOST:$POSTGRES_PORT/$POSTGRES_DB"
|
||||
|
||||
|
||||
PGPASSWORD=$POSTGRES_PASSWORD psql -h $POSTGRES_HOST -p $POSTGRES_PORT -U $POSTGRES_USER -d $POSTGRES_DB --command 'select 1;'
|
||||
|
||||
echo " # Create Postgres trgm extension"
|
||||
PGPASSWORD=$POSTGRES_PASSWORD psql -h $POSTGRES_HOST -p $POSTGRES_PORT -U $POSTGRES_USER -d $POSTGRES_DB --command 'CREATE EXTENSION IF NOT EXISTS pg_trgm;'
|
||||
echo " # Done"
|
||||
|
||||
if [ -z "${SKIP_SSL}" ]; then
|
||||
if [ ! -f /etc/nginx/ssl/server.crt ] && [ ! -f /etc/nginx/ssl/server.key ]; then
|
||||
openssl req -x509 -newkey rsa:4096 -nodes -keyout /etc/nginx/ssl/server.key -out /etc/nginx/ssl/server.crt -days 365 -subj "/C=US/ST=FAKE/L=FAKE/O=FAKE/OU=FAKE/CN=FAKE"
|
||||
fi
|
||||
[ -f "/etc/nginx/conf.d/default-ssl.conf.a" ] && mv /etc/nginx/conf.d/default-ssl.conf.a /etc/nginx/conf.d/default-ssl.conf
|
||||
[ -f "/etc/nginx/conf.d/default.conf" ] && mv -f /etc/nginx/conf.d/default.conf /etc/nginx/conf.d/default.conf.a
|
||||
fi
|
||||
|
||||
# if [ ! -f /home/lemur/.lemur/lemur.conf.py ]; then
|
||||
# echo "Creating config"
|
||||
# https://github.com/Netflix/lemur/issues/2257
|
||||
# python3 /opt/lemur/lemur/manage.py create_config
|
||||
# echo "Done"
|
||||
# fi
|
||||
|
||||
echo " # Running init"
|
||||
su lemur -s /bin/bash -c "cd /opt/lemur/lemur; lemur init -p ${LEMUR_ADMIN_PASSWORD}"
|
||||
echo " # Done"
|
||||
|
||||
# echo "Creating user"
|
||||
# https://github.com/Netflix/lemur/issues/
|
||||
# echo "something that will create user" | python3 /opt/lemur/lemur/manage.py shell
|
||||
# echo "Done"
|
||||
|
||||
cron_notify="${CRON_NOTIFY:-"0 22 * * *"}"
|
||||
cron_sync="${CRON_SYNC:-"*/15 * * * *"}"
|
||||
cron_revoked="${CRON_CHECK_REVOKED:-"0 22 * * *"}"
|
||||
cron_reissue="${CRON_REISSUE:-"0 23 * * *"}"
|
||||
|
||||
echo " # Populating crontab"
|
||||
echo "${cron_notify} lemur notify expirations" > /etc/crontabs/lemur
|
||||
echo "${cron_sync} lemur source sync -s all" >> /etc/crontabs/lemur
|
||||
echo "${cron_revoked} lemur certificate check_revoked" >> /etc/crontabs/lemur
|
||||
echo "${cron_reissue} lemur certificate reissue -c" >> /etc/crontabs/lemur
|
||||
echo " # Done"
|
||||
|
||||
exec "$@"
|
||||
25
docker/lemur-env
Normal file
25
docker/lemur-env
Normal file
@ -0,0 +1,25 @@
|
||||
# SKIP_SSL=1
|
||||
# LEMUR_TOKEN_SECRET=
|
||||
# LEMUR_DEFAULT_COUNTRY=
|
||||
# LEMUR_DEFAULT_STATE=
|
||||
# LEMUR_DEFAULT_LOCATION=
|
||||
# LEMUR_DEFAULT_ORGANIZATION=
|
||||
# LEMUR_DEFAULT_ORGANIZATIONAL_UNIT=
|
||||
# LEMUR_DEFAULT_ISSUER_PLUGIN=cryptography-issuer
|
||||
# LEMUR_DEFAULT_AUTHORITY=cryptography
|
||||
# MAIL_SERVER=mail.example.com
|
||||
# MAIL_PORT=25
|
||||
# LEMUR_EMAIL=lemur@example.com
|
||||
# LEMUR_SECURITY_TEAM_EMAIL=['team@example.com']
|
||||
# LEMUR_TOKEN_SECRET=
|
||||
# LEMUR_ENCRYPTION_KEYS=['']
|
||||
# DEBUG=True
|
||||
# LDAP_DEBUG=True
|
||||
# LDAP_AUTH=True
|
||||
# LDAP_BIND_URI=ldap://example.com
|
||||
# LDAP_BASE_DN=DC=example,DC=com
|
||||
# LDAP_EMAIL_DOMAIN=example.com
|
||||
# LDAP_USE_TLS=False
|
||||
# LDAP_REQUIRED_GROUP=certificate-management-admins
|
||||
# LDAP_GROUPS_TO_ROLES={'certificate-management-admins': 'admin', 'Team': 'team@example.com'}
|
||||
# LDAP_IS_ACTIVE_DIRECTORY=False
|
||||
37
docker/nginx/default-ssl.conf
Normal file
37
docker/nginx/default-ssl.conf
Normal file
@ -0,0 +1,37 @@
|
||||
add_header X-Frame-Options DENY;
|
||||
add_header X-Content-Type-Options nosniff;
|
||||
add_header X-XSS-Protection "1; mode=block";
|
||||
|
||||
server {
|
||||
listen 80;
|
||||
server_name _;
|
||||
return 301 https://$host$request_uri;
|
||||
}
|
||||
|
||||
server {
|
||||
listen 443 ssl;
|
||||
server_name _;
|
||||
access_log /dev/stdout;
|
||||
error_log /dev/stderr;
|
||||
ssl_certificate /etc/nginx/ssl/server.crt;
|
||||
ssl_certificate_key /etc/nginx/ssl/server.key;
|
||||
ssl_protocols TLSv1 TLSv1.1 TLSv1.2;
|
||||
ssl_ciphers HIGH:!aNULL:!MD5;
|
||||
|
||||
location /api {
|
||||
proxy_pass http://127.0.0.1:8000;
|
||||
proxy_next_upstream error timeout invalid_header http_500 http_502 http_503 http_504;
|
||||
proxy_redirect off;
|
||||
proxy_buffering off;
|
||||
proxy_set_header Host $host;
|
||||
proxy_set_header X-Real-IP $remote_addr;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
}
|
||||
|
||||
location / {
|
||||
root /opt/lemur/lemur/static/dist;
|
||||
include mime.types;
|
||||
index index.html;
|
||||
}
|
||||
|
||||
}
|
||||
26
docker/nginx/default.conf
Normal file
26
docker/nginx/default.conf
Normal file
@ -0,0 +1,26 @@
|
||||
add_header X-Frame-Options DENY;
|
||||
add_header X-Content-Type-Options nosniff;
|
||||
add_header X-XSS-Protection "1; mode=block";
|
||||
|
||||
server {
|
||||
listen 80;
|
||||
access_log /dev/stdout;
|
||||
error_log /dev/stderr;
|
||||
|
||||
location /api {
|
||||
proxy_pass http://127.0.0.1:8000;
|
||||
proxy_next_upstream error timeout invalid_header http_500 http_502 http_503 http_504;
|
||||
proxy_redirect off;
|
||||
proxy_buffering off;
|
||||
proxy_set_header Host $host;
|
||||
proxy_set_header X-Real-IP $remote_addr;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
}
|
||||
|
||||
location / {
|
||||
root /opt/lemur/lemur/static/dist;
|
||||
include mime.types;
|
||||
index index.html;
|
||||
}
|
||||
|
||||
}
|
||||
4
docker/pgsql-env
Normal file
4
docker/pgsql-env
Normal file
@ -0,0 +1,4 @@
|
||||
POSTGRES_USER=lemur
|
||||
POSTGRES_PASSWORD=12345
|
||||
POSTGRES_DB=lemur
|
||||
POSTGRES_HOST=postgres
|
||||
60
docker/src/lemur.conf.py
Normal file
60
docker/src/lemur.conf.py
Normal file
@ -0,0 +1,60 @@
|
||||
import os
|
||||
import random
|
||||
import string
|
||||
import base64
|
||||
from ast import literal_eval
|
||||
|
||||
_basedir = os.path.abspath(os.path.dirname(__file__))
|
||||
|
||||
CORS = os.environ.get("CORS") == "True"
|
||||
debug = os.environ.get("DEBUG") == "True"
|
||||
|
||||
|
||||
def get_random_secret(length):
|
||||
secret_key = ''.join(random.choice(string.ascii_uppercase) for x in range(round(length / 4)))
|
||||
secret_key = secret_key + ''.join(random.choice("~!@#$%^&*()_+") for x in range(round(length / 4)))
|
||||
secret_key = secret_key + ''.join(random.choice(string.ascii_lowercase) for x in range(round(length / 4)))
|
||||
return secret_key + ''.join(random.choice(string.digits) for x in range(round(length / 4)))
|
||||
|
||||
|
||||
SECRET_KEY = repr(os.environ.get('SECRET_KEY', get_random_secret(32).encode('utf8')))
|
||||
|
||||
LEMUR_TOKEN_SECRET = repr(os.environ.get('LEMUR_TOKEN_SECRET',
|
||||
base64.b64encode(get_random_secret(32).encode('utf8'))))
|
||||
LEMUR_ENCRYPTION_KEYS = repr(os.environ.get('LEMUR_ENCRYPTION_KEYS',
|
||||
base64.b64encode(get_random_secret(32).encode('utf8'))))
|
||||
|
||||
LEMUR_ALLOWED_DOMAINS = []
|
||||
|
||||
LEMUR_EMAIL = ''
|
||||
LEMUR_SECURITY_TEAM_EMAIL = []
|
||||
|
||||
ALLOW_CERT_DELETION = os.environ.get('ALLOW_CERT_DELETION') == "True"
|
||||
|
||||
LEMUR_DEFAULT_COUNTRY = str(os.environ.get('LEMUR_DEFAULT_COUNTRY',''))
|
||||
LEMUR_DEFAULT_STATE = str(os.environ.get('LEMUR_DEFAULT_STATE',''))
|
||||
LEMUR_DEFAULT_LOCATION = str(os.environ.get('LEMUR_DEFAULT_LOCATION',''))
|
||||
LEMUR_DEFAULT_ORGANIZATION = str(os.environ.get('LEMUR_DEFAULT_ORGANIZATION',''))
|
||||
LEMUR_DEFAULT_ORGANIZATIONAL_UNIT = str(os.environ.get('LEMUR_DEFAULT_ORGANIZATIONAL_UNIT',''))
|
||||
|
||||
LEMUR_DEFAULT_ISSUER_PLUGIN = str(os.environ.get('LEMUR_DEFAULT_ISSUER_PLUGIN',''))
|
||||
LEMUR_DEFAULT_AUTHORITY = str(os.environ.get('LEMUR_DEFAULT_AUTHORITY',''))
|
||||
|
||||
ACTIVE_PROVIDERS = []
|
||||
|
||||
METRIC_PROVIDERS = []
|
||||
|
||||
LOG_LEVEL = str(os.environ.get('LOG_LEVEL','DEBUG'))
|
||||
LOG_FILE = str(os.environ.get('LOG_FILE','/home/lemur/.lemur/lemur.log'))
|
||||
|
||||
SQLALCHEMY_DATABASE_URI = os.environ.get('SQLALCHEMY_DATABASE_URI','postgresql://lemur:lemur@localhost:5432/lemur')
|
||||
|
||||
LDAP_DEBUG = os.environ.get('LDAP_DEBUG') == "True"
|
||||
LDAP_AUTH = os.environ.get('LDAP_AUTH') == "True"
|
||||
LDAP_IS_ACTIVE_DIRECTORY = os.environ.get('LDAP_IS_ACTIVE_DIRECTORY') == "True"
|
||||
LDAP_BIND_URI = str(os.environ.get('LDAP_BIND_URI',''))
|
||||
LDAP_BASE_DN = str(os.environ.get('LDAP_BASE_DN',''))
|
||||
LDAP_EMAIL_DOMAIN = str(os.environ.get('LDAP_EMAIL_DOMAIN',''))
|
||||
LDAP_USE_TLS = str(os.environ.get('LDAP_USE_TLS',''))
|
||||
LDAP_REQUIRED_GROUP = str(os.environ.get('LDAP_REQUIRED_GROUP',''))
|
||||
LDAP_GROUPS_TO_ROLES = literal_eval(os.environ.get('LDAP_GROUPS_TO_ROLES') or "{}")
|
||||
33
docker/supervisor.conf
Normal file
33
docker/supervisor.conf
Normal file
@ -0,0 +1,33 @@
|
||||
[supervisord]
|
||||
nodaemon=true
|
||||
user=root
|
||||
logfile=/dev/stdout
|
||||
logfile_maxbytes=0
|
||||
pidfile = /tmp/supervisord.pid
|
||||
|
||||
[program:lemur]
|
||||
environment=LEMUR_CONF=/home/lemur/.lemur/lemur.conf.py
|
||||
command=lemur start -b 0.0.0.0:8000
|
||||
user=lemur
|
||||
directory=/opt/lemur/lemur
|
||||
stdout_logfile=/dev/stdout
|
||||
stdout_logfile_maxbytes = 0
|
||||
stderr_logfile=/dev/stderr
|
||||
stderr_logfile_maxbytes=0
|
||||
|
||||
[program:nginx]
|
||||
command=/usr/sbin/nginx -g "daemon off;"
|
||||
user=root
|
||||
stdout_logfile=/dev/stdout
|
||||
stdout_logfile_maxbytes = 0
|
||||
stderr_logfile=/dev/stderr
|
||||
stderr_logfile_maxbytes=0
|
||||
|
||||
[program:cron]
|
||||
environment=LEMUR_CONF=/home/lemur/.lemur/lemur.conf.py
|
||||
command=/usr/sbin/crond -f
|
||||
user=root
|
||||
stdout_logfile=/dev/stdout
|
||||
stdout_logfile_maxbytes = 0
|
||||
stderr_logfile=/dev/stderr
|
||||
stderr_logfile_maxbytes=0
|
||||
1603
docs/administration.rst
Normal file
1603
docs/administration.rst
Normal file
@ -0,0 +1,1603 @@
|
||||
Configuration
|
||||
=============
|
||||
|
||||
.. warning::
|
||||
There are many secrets that Lemur uses that must be protected. All of these options are set via the Lemur configuration
|
||||
file. It is highly advised that you do not store your secrets in this file! Lemur provides functions
|
||||
that allow you to encrypt files at rest and decrypt them when it's time for deployment. See :ref:`Credential Management <CredentialManagement>`
|
||||
for more information.
|
||||
|
||||
.. note::
|
||||
All configuration values are python strings unless otherwise noted.
|
||||
|
||||
|
||||
Basic Configuration
|
||||
-------------------
|
||||
|
||||
.. data:: LOG_LEVEL
|
||||
:noindex:
|
||||
|
||||
::
|
||||
|
||||
LOG_LEVEL = "DEBUG"
|
||||
|
||||
.. data:: LOG_FILE
|
||||
:noindex:
|
||||
|
||||
::
|
||||
|
||||
LOG_FILE = "/logs/lemur/lemur-test.log"
|
||||
|
||||
.. data:: DEBUG
|
||||
:noindex:
|
||||
|
||||
Sets the flask debug flag to true (if supported by the webserver)
|
||||
|
||||
::
|
||||
|
||||
DEBUG = False
|
||||
|
||||
.. warning::
|
||||
This should never be used in a production environment as it exposes Lemur to
|
||||
remote code execution through the debug console.
|
||||
|
||||
|
||||
.. data:: CORS
|
||||
:noindex:
|
||||
|
||||
Allows for cross domain requests, this is most commonly used for development but could
|
||||
be use in production if you decided to host the webUI on a different domain than the server.
|
||||
|
||||
Use this cautiously, if you're not sure. Set it to `False`
|
||||
|
||||
::
|
||||
|
||||
CORS = False
|
||||
|
||||
|
||||
.. data:: SQLALCHEMY_DATABASE_URI
|
||||
:noindex:
|
||||
|
||||
If you have ever used sqlalchemy before this is the standard connection string used. Lemur uses a postgres database and the connection string would look something like:
|
||||
|
||||
::
|
||||
|
||||
SQLALCHEMY_DATABASE_URI = 'postgresql://<user>:<password>@<hostname>:5432/lemur'
|
||||
|
||||
|
||||
.. data:: SQLALCHEMY_POOL_SIZE
|
||||
:noindex:
|
||||
|
||||
The default connection pool size is 5 for sqlalchemy managed connections. Depending on the number of Lemur instances,
|
||||
please specify per instance connection pool size. Below is an example to set connection pool size to 10.
|
||||
|
||||
::
|
||||
|
||||
SQLALCHEMY_POOL_SIZE = 10
|
||||
|
||||
|
||||
.. warning::
|
||||
This is an optional setting but important to review and set for optimal database connection usage and for overall database performance.
|
||||
|
||||
.. data:: SQLALCHEMY_MAX_OVERFLOW
|
||||
:noindex:
|
||||
|
||||
This setting allows to create connections in addition to specified number of connections in pool size. By default, sqlalchemy
|
||||
allows 10 connections to create in addition to the pool size. This is also an optional setting. If `SQLALCHEMY_POOL_SIZE` and
|
||||
`SQLALCHEMY_MAX_OVERFLOW` are not speficied then each Lemur instance may create maximum of 15 connections.
|
||||
|
||||
::
|
||||
|
||||
SQLALCHECK_MAX_OVERFLOW = 0
|
||||
|
||||
|
||||
.. note::
|
||||
Specifying the `SQLALCHEMY_MAX_OVERFLOW` to 0 will enforce limit to not create connections above specified pool size.
|
||||
|
||||
|
||||
.. data:: LEMUR_ALLOW_WEEKEND_EXPIRATION
|
||||
:noindex:
|
||||
|
||||
Specifies whether to allow certificates created by Lemur to expire on weekends. Default is True.
|
||||
|
||||
.. data:: LEMUR_ALLOWED_DOMAINS
|
||||
:noindex:
|
||||
|
||||
List of regular expressions for domain restrictions; if the list is not empty, normal users can only issue
|
||||
certificates for domain names matching at least one pattern on this list. Administrators are exempt from this
|
||||
restriction.
|
||||
|
||||
Cerificate common name is matched against these rules *if* it does not contain a space. SubjectAltName DNS names
|
||||
are always matched against these rules.
|
||||
|
||||
Take care to write patterns in such way to not allow the `*` wildcard character inadvertently. To match a `.`
|
||||
character, it must be escaped (as `\.`).
|
||||
|
||||
.. data:: LEMUR_OWNER_EMAIL_IN_SUBJECT
|
||||
:noindex:
|
||||
|
||||
By default, Lemur will add the certificate owner's email address to certificate subject (for CAs that allow it).
|
||||
Set this to `False` to disable this.
|
||||
|
||||
.. data:: LEMUR_TOKEN_SECRET
|
||||
:noindex:
|
||||
|
||||
The TOKEN_SECRET is the secret used to create JWT tokens that are given out to users. This should be securely generated and kept private.
|
||||
|
||||
::
|
||||
|
||||
LEMUR_TOKEN_SECRET = 'supersecret'
|
||||
|
||||
An example of how you might generate a random string:
|
||||
|
||||
>>> import random
|
||||
>>> secret_key = ''.join(random.choice(string.ascii_uppercase) for x in range(6))
|
||||
>>> secret_key = secret_key + ''.join(random.choice("~!@#$%^&*()_+") for x in range(6))
|
||||
>>> secret_key = secret_key + ''.join(random.choice(string.ascii_lowercase) for x in range(6))
|
||||
>>> secret_key = secret_key + ''.join(random.choice(string.digits) for x in range(6))
|
||||
|
||||
|
||||
.. data:: LEMUR_ENCRYPTION_KEYS
|
||||
:noindex:
|
||||
|
||||
The LEMUR_ENCRYPTION_KEYS is used to encrypt data at rest within Lemur's database. Without a key Lemur will refuse
|
||||
to start. Multiple keys can be provided to facilitate key rotation. The first key in the list is used for
|
||||
encryption and all keys are tried for decryption until one works. Each key must be 32 URL safe base-64 encoded bytes.
|
||||
|
||||
Running lemur create_config will securely generate a key for your configuration file.
|
||||
If you would like to generate your own, we recommend the following method:
|
||||
|
||||
>>> import os
|
||||
>>> import base64
|
||||
>>> base64.urlsafe_b64encode(os.urandom(32))
|
||||
|
||||
::
|
||||
|
||||
LEMUR_ENCRYPTION_KEYS = ['1YeftooSbxCiX2zo8m1lXtpvQjy27smZcUUaGmffhMY=', 'LAfQt6yrkLqOK5lwpvQcT4jf2zdeTQJV1uYeh9coT5s=']
|
||||
|
||||
|
||||
.. data:: PUBLIC_CA_MAX_VALIDITY_DAYS
|
||||
:noindex:
|
||||
Use this config to override the limit of 397 days of validity for certificates issued by CA/Browser compliant authorities.
|
||||
The authorities with cab_compliant option set to true will use this config. The example below overrides the default validity
|
||||
of 397 days and sets it to 365 days.
|
||||
|
||||
::
|
||||
|
||||
PUBLIC_CA_MAX_VALIDITY_DAYS = 365
|
||||
|
||||
|
||||
.. data:: DEFAULT_VALIDITY_DAYS
|
||||
:noindex:
|
||||
Use this config to override the default validity of 365 days for certificates offered through Lemur UI. Any CA which
|
||||
is not CA/Browser Forum compliant will be using this value as default validity to be displayed on UI. Please
|
||||
note that this config is used for cert issuance only through Lemur UI. The example below overrides the default validity
|
||||
of 365 days and sets it to 1095 days (3 years).
|
||||
|
||||
::
|
||||
|
||||
DEFAULT_VALIDITY_DAYS = 1095
|
||||
|
||||
|
||||
.. data:: DEBUG_DUMP
|
||||
:noindex:
|
||||
|
||||
Dump all imported or generated CSR and certificate details to stdout using OpenSSL. (default: `False`)
|
||||
|
||||
.. data:: ALLOW_CERT_DELETION
|
||||
:noindex:
|
||||
|
||||
When set to True, certificates can be marked as deleted via the API and deleted certificates will not be displayed
|
||||
in the UI. When set to False (the default), the certificate delete API will always return "405 method not allowed"
|
||||
and deleted certificates will always be visible in the UI. (default: `False`)
|
||||
|
||||
|
||||
Certificate Default Options
|
||||
---------------------------
|
||||
|
||||
Lemur allows you to fine tune your certificates to your organization. The following defaults are presented in the UI
|
||||
and are used when Lemur creates the CSR for your certificates.
|
||||
|
||||
|
||||
.. data:: LEMUR_DEFAULT_COUNTRY
|
||||
:noindex:
|
||||
|
||||
::
|
||||
|
||||
LEMUR_DEFAULT_COUNTRY = "US"
|
||||
|
||||
|
||||
.. data:: LEMUR_DEFAULT_STATE
|
||||
:noindex:
|
||||
|
||||
::
|
||||
|
||||
LEMUR_DEFAULT_STATE = "California"
|
||||
|
||||
|
||||
.. data:: LEMUR_DEFAULT_LOCATION
|
||||
:noindex:
|
||||
|
||||
::
|
||||
|
||||
LEMUR_DEFAULT_LOCATION = "Los Gatos"
|
||||
|
||||
|
||||
.. data:: LEMUR_DEFAULT_ORGANIZATION
|
||||
:noindex:
|
||||
|
||||
::
|
||||
|
||||
LEMUR_DEFAULT_ORGANIZATION = "Netflix"
|
||||
|
||||
|
||||
.. data:: LEMUR_DEFAULT_ORGANIZATIONAL_UNIT
|
||||
:noindex:
|
||||
|
||||
::
|
||||
|
||||
LEMUR_DEFAULT_ORGANIZATIONAL_UNIT = ""
|
||||
|
||||
|
||||
.. data:: LEMUR_DEFAULT_ISSUER_PLUGIN
|
||||
:noindex:
|
||||
|
||||
::
|
||||
|
||||
LEMUR_DEFAULT_ISSUER_PLUGIN = "verisign-issuer"
|
||||
|
||||
|
||||
.. data:: LEMUR_DEFAULT_AUTHORITY
|
||||
:noindex:
|
||||
|
||||
::
|
||||
|
||||
LEMUR_DEFAULT_AUTHORITY = "verisign"
|
||||
|
||||
|
||||
Notification Options
|
||||
--------------------
|
||||
|
||||
Lemur currently has very basic support for notifications. Currently only expiration notifications are supported. Actual notification
|
||||
is handled by the notification plugins that you have configured. Lemur ships with the 'Email' notification that allows expiration emails
|
||||
to be sent to subscribers.
|
||||
|
||||
Templates for expiration emails are located under `lemur/plugins/lemur_email/templates` and can be modified for your needs.
|
||||
Notifications are sent to the certificate creator, owner and security team as specified by the `LEMUR_SECURITY_TEAM_EMAIL` configuration parameter.
|
||||
|
||||
Certificates marked as inactive will **not** be notified of upcoming expiration. This enables a user to essentially
|
||||
silence the expiration. If a certificate is active and is expiring the above will be notified according to the `LEMUR_DEFAULT_EXPIRATION_NOTIFICATION_INTERVALS` or
|
||||
30, 15, 2 days before expiration if no intervals are set.
|
||||
|
||||
Lemur supports sending certification expiration notifications through SES and SMTP.
|
||||
|
||||
|
||||
.. data:: LEMUR_EMAIL_SENDER
|
||||
:noindex:
|
||||
|
||||
Specifies which service will be delivering notification emails. Valid values are `SMTP` or `SES`
|
||||
|
||||
.. note::
|
||||
If using SMTP as your provider you will need to define additional configuration options as specified by Flask-Mail.
|
||||
See: `Flask-Mail <https://pythonhosted.org/Flask-Mail>`_
|
||||
|
||||
If you are using SES the email specified by the `LEMUR_MAIL` configuration will need to be verified by AWS before
|
||||
you can send any mail. See: `Verifying Email Address in Amazon SES <http://docs.aws.amazon.com/ses/latest/DeveloperGuide/verify-email-addresses.html>`_
|
||||
|
||||
|
||||
.. data:: LEMUR_EMAIL
|
||||
:noindex:
|
||||
|
||||
Lemur sender's email
|
||||
|
||||
::
|
||||
|
||||
LEMUR_EMAIL = 'lemur.example.com'
|
||||
|
||||
|
||||
.. data:: LEMUR_SECURITY_TEAM_EMAIL
|
||||
:noindex:
|
||||
|
||||
This is an email or list of emails that should be notified when a certificate is expiring. It is also the contact email address for any discovered certificate.
|
||||
|
||||
::
|
||||
|
||||
LEMUR_SECURITY_TEAM_EMAIL = ['security@example.com']
|
||||
|
||||
.. data:: LEMUR_DEFAULT_EXPIRATION_NOTIFICATION_INTERVALS
|
||||
:noindex:
|
||||
|
||||
Lemur notification intervals
|
||||
|
||||
::
|
||||
|
||||
LEMUR_DEFAULT_EXPIRATION_NOTIFICATION_INTERVALS = [30, 15, 2]
|
||||
|
||||
.. data:: LEMUR_SECURITY_TEAM_EMAIL_INTERVALS
|
||||
:noindex:
|
||||
|
||||
Alternate notification interval set for security team notifications. Use this if you would like the default security team notification interval for new certificates to differ from the global default as specified in LEMUR_DEFAULT_EXPIRATION_NOTIFICATION_INTERVALS. If unspecified, the value of LEMUR_DEFAULT_EXPIRATION_NOTIFICATION_INTERVALS is used. Security team default notifications for new certificates can effectively be disabled by setting this value to an empty array.
|
||||
|
||||
::
|
||||
|
||||
LEMUR_SECURITY_TEAM_EMAIL_INTERVALS = [15, 2]
|
||||
|
||||
|
||||
Celery Options
|
||||
---------------
|
||||
To make use of automated tasks within lemur (e.g. syncing source/destinations, or reissuing ACME certificates), you
|
||||
need to configure celery. See :ref:`Periodic Tasks <PeriodicTasks>` for more in depth documentation.
|
||||
|
||||
.. data:: CELERY_RESULT_BACKEND
|
||||
:noindex:
|
||||
|
||||
The url to your redis backend (needs to be in the format `redis://<host>:<port>/<database>`)
|
||||
|
||||
.. data:: CELERY_BROKER_URL
|
||||
:noindex:
|
||||
|
||||
The url to your redis broker (needs to be in the format `redis://<host>:<port>/<database>`)
|
||||
|
||||
.. data:: CELERY_IMPORTS
|
||||
:noindex:
|
||||
|
||||
The module that celery needs to import, in our case thats `lemur.common.celery`
|
||||
|
||||
.. data:: CELERY_TIMEZONE
|
||||
:noindex:
|
||||
|
||||
The timezone for celery to work with
|
||||
|
||||
|
||||
.. data:: CELERYBEAT_SCHEDULE
|
||||
:noindex:
|
||||
|
||||
This defines the schedule, with which the celery beat makes the worker run the specified tasks.
|
||||
|
||||
Since the celery module, relies on the RedisHandler, the following options also need to be set.
|
||||
|
||||
.. data:: REDIS_HOST
|
||||
:noindex:
|
||||
|
||||
Hostname of your redis instance
|
||||
|
||||
.. data:: REDIS_PORT
|
||||
:noindex:
|
||||
|
||||
Port on which redis is running (default: 6379)
|
||||
|
||||
.. data:: REDIS_DB
|
||||
:noindex:
|
||||
|
||||
Which redis database to be used, by default redis offers databases 0-15 (default: 0)
|
||||
|
||||
Authentication Options
|
||||
----------------------
|
||||
Lemur currently supports Basic Authentication, LDAP Authentication, Ping OAuth2, and Google out of the box. Additional flows can be added relatively easily.
|
||||
|
||||
LDAP Options
|
||||
~~~~~~~~~~~~
|
||||
|
||||
Lemur supports the use of an LDAP server in conjunction with Basic Authentication. Lemur local users can still be defined and take precedence over LDAP users. If a local user does not exist, LDAP will be queried for authentication. Only simple ldap binding with or without TLS is supported.
|
||||
|
||||
LDAP support requires the pyldap python library, which also depends on the following openldap packages.
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
$ sudo apt-get update
|
||||
$ sudo apt-get install libldap2-dev libsasl2-dev libldap2-dev libssl-dev
|
||||
|
||||
|
||||
To configure the use of an LDAP server, a number of settings need to be configured in `lemur.conf.py`.
|
||||
|
||||
Here is an example LDAP configuration stanza you can add to your config. Adjust to suit your environment of course.
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
LDAP_AUTH = True
|
||||
LDAP_BIND_URI='ldaps://secure.evilcorp.net'
|
||||
LDAP_BASE_DN='DC=users,DC=evilcorp,DC=net'
|
||||
LDAP_EMAIL_DOMAIN='evilcorp.net'
|
||||
LDAP_USE_TLS = True
|
||||
LDAP_CACERT_FILE = '/opt/lemur/trusted.pem'
|
||||
LDAP_REQUIRED_GROUP = 'certificate-management-access'
|
||||
LDAP_GROUPS_TO_ROLES = {'certificate-management-admin': 'admin', 'certificate-management-read-only': 'read-only'}
|
||||
LDAP_IS_ACTIVE_DIRECTORY = True
|
||||
|
||||
|
||||
The lemur ldap module uses the `user principal name` (upn) of the authenticating user to bind. This is done once for each user at login time. The UPN is effectively the email address in AD/LDAP of the user. If the user doesn't provide the email address, it constructs one based on the username supplied (which should normally match the samAccountName) and the value provided by the config LDAP_EMAIL_DOMAIN.
|
||||
The config LDAP_BASE_DN tells lemur where to search within the AD/LDAP tree for the given UPN (user). If the bind with those credentials is successful - there is a valid user in AD with correct password.
|
||||
|
||||
Each of the LDAP options are described below.
|
||||
|
||||
.. data:: LDAP_AUTH
|
||||
:noindex:
|
||||
|
||||
This enables the use of LDAP
|
||||
|
||||
::
|
||||
|
||||
LDAP_AUTH = True
|
||||
|
||||
.. data:: LDAP_BIND_URI
|
||||
:noindex:
|
||||
|
||||
Specifies the LDAP server connection string
|
||||
|
||||
::
|
||||
|
||||
LDAP_BIND_URI = 'ldaps://hostname'
|
||||
|
||||
.. data:: LDAP_BASE_DN
|
||||
:noindex:
|
||||
|
||||
Specifies the LDAP distinguished name location to search for users
|
||||
|
||||
::
|
||||
|
||||
LDAP_BASE_DN = 'DC=Users,DC=Evilcorp,DC=com'
|
||||
|
||||
.. data:: LDAP_EMAIL_DOMAIN
|
||||
:noindex:
|
||||
|
||||
The email domain used by users in your directory. This is used to build the userPrincipalName to search with.
|
||||
|
||||
::
|
||||
|
||||
LDAP_EMAIL_DOMAIN = 'evilcorp.com'
|
||||
|
||||
The following LDAP options are not required, however TLS is always recommended.
|
||||
|
||||
.. data:: LDAP_USE_TLS
|
||||
:noindex:
|
||||
|
||||
Enables the use of TLS when connecting to the LDAP server. Ensure the LDAP_BIND_URI is using ldaps scheme.
|
||||
|
||||
::
|
||||
|
||||
LDAP_USE_TLS = True
|
||||
|
||||
.. data:: LDAP_CACERT_FILE
|
||||
:noindex:
|
||||
|
||||
Specify a Certificate Authority file containing PEM encoded trusted issuer certificates. This can be used if your LDAP server is using certificates issued by a private CA.
|
||||
|
||||
::
|
||||
|
||||
LDAP_CACERT_FILE = '/path/to/cacert/file'
|
||||
|
||||
.. data:: LDAP_REQUIRED_GROUP
|
||||
:noindex:
|
||||
|
||||
Lemur has pretty open permissions. You can define an LDAP group to specify who can access Lemur. Only members of this group will be able to login.
|
||||
|
||||
::
|
||||
|
||||
LDAP_REQUIRED_GROUP = 'Lemur LDAP Group Name'
|
||||
|
||||
.. data:: LDAP_GROUPS_TO_ROLES
|
||||
:noindex:
|
||||
|
||||
You can also define a dictionary of ldap groups mapped to lemur roles. This allows you to use ldap groups to manage access to owner/creator roles in Lemur
|
||||
|
||||
::
|
||||
|
||||
LDAP_GROUPS_TO_ROLES = {'lemur_admins': 'admin', 'Lemur Team DL Group': 'team@example.com'}
|
||||
|
||||
|
||||
.. data:: LDAP_IS_ACTIVE_DIRECTORY
|
||||
:noindex:
|
||||
|
||||
When set to True, nested group memberships are supported, by searching for groups with the member:1.2.840.113556.1.4.1941 attribute set to the user DN.
|
||||
When set to False, the list of groups will be determined by the 'memberof' attribute of the LDAP user logging in.
|
||||
|
||||
::
|
||||
|
||||
LDAP_IS_ACTIVE_DIRECTORY = False
|
||||
|
||||
|
||||
Authentication Providers
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
If you are not using an authentication provider you do not need to configure any of these options.
|
||||
|
||||
For more information about how to use social logins, see: `Satellizer <https://github.com/sahat/satellizer>`_
|
||||
|
||||
.. data:: ACTIVE_PROVIDERS
|
||||
:noindex:
|
||||
|
||||
::
|
||||
|
||||
ACTIVE_PROVIDERS = ["ping", "google", "oauth2"]
|
||||
|
||||
.. data:: PING_SECRET
|
||||
:noindex:
|
||||
|
||||
::
|
||||
|
||||
PING_SECRET = 'somethingsecret'
|
||||
|
||||
.. data:: PING_ACCESS_TOKEN_URL
|
||||
:noindex:
|
||||
|
||||
::
|
||||
|
||||
PING_ACCESS_TOKEN_URL = "https://<yourpingserver>/as/token.oauth2"
|
||||
|
||||
|
||||
.. data:: PING_USER_API_URL
|
||||
:noindex:
|
||||
|
||||
::
|
||||
|
||||
PING_USER_API_URL = "https://<yourpingserver>/idp/userinfo.openid"
|
||||
|
||||
.. data:: PING_JWKS_URL
|
||||
:noindex:
|
||||
|
||||
::
|
||||
|
||||
PING_JWKS_URL = "https://<yourpingserver>/pf/JWKS"
|
||||
|
||||
.. data:: PING_NAME
|
||||
:noindex:
|
||||
|
||||
::
|
||||
|
||||
PING_NAME = "Example Oauth2 Provider"
|
||||
|
||||
.. data:: PING_CLIENT_ID
|
||||
:noindex:
|
||||
|
||||
::
|
||||
|
||||
PING_CLIENT_ID = "client-id"
|
||||
|
||||
.. data:: PING_REDIRECT_URI
|
||||
:noindex:
|
||||
|
||||
::
|
||||
|
||||
PING_REDIRECT_URI = "https://<yourlemurserver>/api/1/auth/ping"
|
||||
|
||||
.. data:: PING_AUTH_ENDPOINT
|
||||
:noindex:
|
||||
|
||||
::
|
||||
|
||||
PING_AUTH_ENDPOINT = "https://<yourpingserver>/oauth2/authorize"
|
||||
|
||||
.. data:: OAUTH2_SECRET
|
||||
:noindex:
|
||||
|
||||
::
|
||||
|
||||
OAUTH2_SECRET = 'somethingsecret'
|
||||
|
||||
.. data:: OAUTH2_ACCESS_TOKEN_URL
|
||||
:noindex:
|
||||
|
||||
::
|
||||
|
||||
OAUTH2_ACCESS_TOKEN_URL = "https://<youroauthserver> /oauth2/v1/authorize"
|
||||
|
||||
|
||||
.. data:: OAUTH2_USER_API_URL
|
||||
:noindex:
|
||||
|
||||
::
|
||||
|
||||
OAUTH2_USER_API_URL = "https://<youroauthserver>/oauth2/v1/userinfo"
|
||||
|
||||
.. data:: OAUTH2_JWKS_URL
|
||||
:noindex:
|
||||
|
||||
::
|
||||
|
||||
OAUTH2_JWKS_URL = "https://<youroauthserver>/oauth2/v1/keys"
|
||||
|
||||
.. data:: OAUTH2_NAME
|
||||
:noindex:
|
||||
|
||||
::
|
||||
|
||||
OAUTH2_NAME = "Example Oauth2 Provider"
|
||||
|
||||
.. data:: OAUTH2_CLIENT_ID
|
||||
:noindex:
|
||||
|
||||
::
|
||||
|
||||
OAUTH2_CLIENT_ID = "client-id"
|
||||
|
||||
.. data:: OAUTH2_REDIRECT_URI
|
||||
:noindex:
|
||||
|
||||
::
|
||||
|
||||
OAUTH2_REDIRECT_URI = "https://<yourlemurserver>/api/1/auth/oauth2"
|
||||
|
||||
.. data:: OAUTH2_AUTH_ENDPOINT
|
||||
:noindex:
|
||||
|
||||
::
|
||||
|
||||
OAUTH2_AUTH_ENDPOINT = "https://<youroauthserver>/oauth2/v1/authorize"
|
||||
|
||||
.. data:: OAUTH2_VERIFY_CERT
|
||||
:noindex:
|
||||
|
||||
::
|
||||
|
||||
OAUTH2_VERIFY_CERT = True
|
||||
|
||||
.. data:: GOOGLE_CLIENT_ID
|
||||
:noindex:
|
||||
|
||||
::
|
||||
|
||||
GOOGLE_CLIENT_ID = "client-id"
|
||||
|
||||
.. data:: GOOGLE_SECRET
|
||||
:noindex:
|
||||
|
||||
::
|
||||
|
||||
GOOGLE_SECRET = "somethingsecret"
|
||||
|
||||
|
||||
Metric Providers
|
||||
~~~~~~~~~~~~~~~~
|
||||
|
||||
If you are not using a metric provider you do not need to configure any of these options.
|
||||
|
||||
.. data:: ACTIVE_PROVIDERS
|
||||
:noindex:
|
||||
|
||||
A list of metric plugins slugs to be ativated.
|
||||
|
||||
::
|
||||
|
||||
METRIC_PROVIDERS = ['atlas-metric']
|
||||
|
||||
|
||||
Plugin Specific Options
|
||||
-----------------------
|
||||
|
||||
Active Directory Certificate Services Plugin
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
|
||||
.. data:: ADCS_SERVER
|
||||
:noindex:
|
||||
|
||||
FQDN of your ADCS Server
|
||||
|
||||
|
||||
.. data:: ADCS_AUTH_METHOD
|
||||
:noindex:
|
||||
|
||||
The chosen authentication method. Either ‘basic’ (the default), ‘ntlm’ or ‘cert’ (SSL client certificate). The next 2 variables are interpreted differently for different methods.
|
||||
|
||||
|
||||
.. data:: ADCS_USER
|
||||
:noindex:
|
||||
|
||||
The username (basic) or the path to the public cert (cert) of the user accessing PKI
|
||||
|
||||
|
||||
.. data:: ADCS_PWD
|
||||
:noindex:
|
||||
|
||||
The passwd (basic) or the path to the private key (cert) of the user accessing PKI
|
||||
|
||||
|
||||
.. data:: ADCS_TEMPLATE
|
||||
:noindex:
|
||||
|
||||
Template to be used for certificate issuing. Usually display name w/o spaces
|
||||
|
||||
.. data:: ADCS_TEMPLATE_<upper(authority.name)>
|
||||
:noindex:
|
||||
|
||||
If there is a config variable ADCS_TEMPLATE_<upper(authority.name)> take the value as Cert template else default to ADCS_TEMPLATE to be compatible with former versions. Template to be used for certificate issuing. Usually display name w/o spaces
|
||||
|
||||
.. data:: ADCS_START
|
||||
:noindex:
|
||||
Used in ADCS-Sourceplugin. Minimum id of the first certificate to be returned. ID is increased by one until ADCS_STOP. Missing cert-IDs are ignored
|
||||
|
||||
.. data:: ADCS_STOP
|
||||
:noindex:
|
||||
Used for ADCS-Sourceplugin. Maximum id of the certificates returned.
|
||||
|
||||
|
||||
.. data:: ADCS_ISSUING
|
||||
:noindex:
|
||||
|
||||
Contains the issuing cert of the CA
|
||||
|
||||
|
||||
.. data:: ADCS_ROOT
|
||||
:noindex:
|
||||
|
||||
Contains the root cert of the CA
|
||||
|
||||
Entrust Plugin
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
Enables the creation of Entrust certificates. You need to set the API access up with Entrust support. Check the information in the Entrust Portal as well.
|
||||
Certificates are created as "SERVER_AND_CLIENT_AUTH".
|
||||
Caution: Sometimes the entrust API does not respond in a timely manner. This error is handled and reported by the plugin. Should this happen you just have to hit the create button again after to create a valid certificate.
|
||||
The following parameters have to be set in the configuration files.
|
||||
|
||||
.. data:: ENTRUST_URL
|
||||
:noindex:
|
||||
|
||||
This is the url for the Entrust API. Refer to the API documentation.
|
||||
|
||||
.. data:: ENTRUST_API_CERT
|
||||
:noindex:
|
||||
|
||||
Path to the certificate file in PEM format. This certificate is created in the onboarding process. Refer to the API documentation.
|
||||
|
||||
.. data:: ENTRUST_API_KEY
|
||||
:noindex:
|
||||
|
||||
Path to the key file in RSA format. This certificate is created in the onboarding process. Refer to the API documentation. Caution: the request library cannot handle encrypted keys. The keyfile therefore has to contain the unencrypted key. Please put this in a secure location on the server.
|
||||
|
||||
.. data:: ENTRUST_API_USER
|
||||
:noindex:
|
||||
|
||||
String with the API user. This user is created in the onboarding process. Refer to the API documentation.
|
||||
|
||||
.. data:: ENTRUST_API_PASS
|
||||
:noindex:
|
||||
|
||||
String with the password for the API user. This password is created in the onboarding process. Refer to the API documentation.
|
||||
|
||||
.. data:: ENTRUST_NAME
|
||||
:noindex:
|
||||
|
||||
String with the name that should appear as certificate owner in the Entrust portal. Refer to the API documentation.
|
||||
|
||||
.. data:: ENTRUST_EMAIL
|
||||
:noindex:
|
||||
|
||||
String with the email address that should appear as certificate contact email in the Entrust portal. Refer to the API documentation.
|
||||
|
||||
.. data:: ENTRUST_PHONE
|
||||
:noindex:
|
||||
|
||||
String with the phone number that should appear as certificate contact in the Entrust portal. Refer to the API documentation.
|
||||
|
||||
.. data:: ENTRUST_ISSUING
|
||||
:noindex:
|
||||
|
||||
Contains the issuing cert of the CA
|
||||
|
||||
.. data:: ENTRUST_ROOT
|
||||
:noindex:
|
||||
|
||||
Contains the root cert of the CA
|
||||
|
||||
.. data:: ENTRUST_PRODUCT_<upper(authority.name)>
|
||||
:noindex:
|
||||
|
||||
If there is a config variable ENTRUST_PRODUCT_<upper(authority.name)> take the value as cert product name else default to "STANDARD_SSL". Refer to the API documentation for valid products names.
|
||||
|
||||
Verisign Issuer Plugin
|
||||
~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
Authorities will each have their own configuration options. There is currently just one plugin bundled with Lemur,
|
||||
Verisign/Symantec. Additional plugins may define additional options. Refer to the plugin's own documentation
|
||||
for those plugins.
|
||||
|
||||
.. data:: VERISIGN_URL
|
||||
:noindex:
|
||||
|
||||
This is the url for the Verisign API
|
||||
|
||||
|
||||
.. data:: VERISIGN_PEM_PATH
|
||||
:noindex:
|
||||
|
||||
This is the path to the mutual TLS certificate used for communicating with Verisign
|
||||
|
||||
|
||||
.. data:: VERISIGN_FIRST_NAME
|
||||
:noindex:
|
||||
|
||||
This is the first name to be used when requesting the certificate
|
||||
|
||||
|
||||
.. data:: VERISIGN_LAST_NAME
|
||||
:noindex:
|
||||
|
||||
This is the last name to be used when requesting the certificate
|
||||
|
||||
.. data:: VERISIGN_EMAIL
|
||||
:noindex:
|
||||
|
||||
This is the email to be used when requesting the certificate
|
||||
|
||||
|
||||
.. data:: VERISIGN_INTERMEDIATE
|
||||
:noindex:
|
||||
|
||||
This is the intermediate to be used for your CA chain
|
||||
|
||||
|
||||
.. data:: VERISIGN_ROOT
|
||||
:noindex:
|
||||
|
||||
This is the root to be used for your CA chain
|
||||
|
||||
|
||||
Digicert Issuer Plugin
|
||||
~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
The following configuration properties are required to use the Digicert issuer plugin.
|
||||
|
||||
|
||||
.. data:: DIGICERT_URL
|
||||
:noindex:
|
||||
|
||||
This is the url for the Digicert API (e.g. https://www.digicert.com)
|
||||
|
||||
|
||||
.. data:: DIGICERT_ORDER_TYPE
|
||||
:noindex:
|
||||
|
||||
This is the type of certificate to order. (e.g. ssl_plus, ssl_ev_plus see: https://www.digicert.com/services/v2/documentation/order/overview-submit)
|
||||
|
||||
|
||||
.. data:: DIGICERT_API_KEY
|
||||
:noindex:
|
||||
|
||||
This is the Digicert API key
|
||||
|
||||
|
||||
.. data:: DIGICERT_ORG_ID
|
||||
:noindex:
|
||||
|
||||
This is the Digicert organization ID tied to your API key
|
||||
|
||||
|
||||
.. data:: DIGICERT_ROOT
|
||||
:noindex:
|
||||
|
||||
This is the root to be used for your CA chain
|
||||
|
||||
|
||||
.. data:: DIGICERT_DEFAULT_VALIDITY_DAYS
|
||||
:noindex:
|
||||
|
||||
This is the default validity (in days), if no end date is specified. (Default: 397)
|
||||
|
||||
|
||||
.. data:: DIGICERT_MAX_VALIDITY_DAYS
|
||||
:noindex:
|
||||
|
||||
This is the maximum validity (in days). (Default: value of DIGICERT_DEFAULT_VALIDITY_DAYS)
|
||||
|
||||
|
||||
.. data:: DIGICERT_PRIVATE
|
||||
:noindex:
|
||||
|
||||
This is whether or not to issue a private certificate. (Default: False)
|
||||
|
||||
|
||||
CFSSL Issuer Plugin
|
||||
~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
The following configuration properties are required to use the CFSSL issuer plugin.
|
||||
|
||||
.. data:: CFSSL_URL
|
||||
:noindex:
|
||||
|
||||
This is the URL for the CFSSL API
|
||||
|
||||
.. data:: CFSSL_ROOT
|
||||
:noindex:
|
||||
|
||||
This is the root to be used for your CA chain
|
||||
|
||||
.. data:: CFSSL_INTERMEDIATE
|
||||
:noindex:
|
||||
|
||||
This is the intermediate to be used for your CA chain
|
||||
|
||||
.. data:: CFSSL_KEY
|
||||
:noindex:
|
||||
|
||||
This is the hmac key to authenticate to the CFSSL service. (Optional)
|
||||
|
||||
|
||||
Hashicorp Vault Source/Destination Plugin
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
Lemur can import and export certificate data to and from a Hashicorp Vault secrets store. Lemur can connect to a different Vault service per source/destination.
|
||||
|
||||
.. note:: This plugin does not supersede or overlap the 3rd party Vault Issuer plugin.
|
||||
|
||||
.. note:: Vault does not have any configuration properties however it does read from a file on disk for a vault access token. The Lemur service account needs read access to this file.
|
||||
|
||||
Vault Source
|
||||
""""""""""""
|
||||
|
||||
The Vault Source Plugin will read from one Vault object location per source defined. There is expected to be one or more certificates defined in each object in Vault.
|
||||
|
||||
Vault Destination
|
||||
"""""""""""""""""
|
||||
|
||||
A Vault destination can be one object in Vault or a directory where all certificates will be stored as their own object by CN.
|
||||
|
||||
Vault Destination supports a regex filter to prevent certificates with SAN that do not match the regex filter from being deployed. This is an optional feature per destination defined.
|
||||
|
||||
|
||||
AWS Source/Destination Plugin
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
In order for Lemur to manage its own account and other accounts we must ensure it has the correct AWS permissions.
|
||||
|
||||
.. note:: AWS usage is completely optional. Lemur can upload, find and manage TLS certificates in AWS. But is not required to do so.
|
||||
|
||||
Setting up IAM roles
|
||||
""""""""""""""""""""
|
||||
|
||||
Lemur's AWS plugin uses boto heavily to talk to all the AWS resources it manages. By default it uses the on-instance credentials to make the necessary calls.
|
||||
|
||||
In order to limit the permissions, we will create two new IAM roles for Lemur. You can name them whatever you would like but for example sake we will be calling them LemurInstanceProfile and Lemur.
|
||||
|
||||
Lemur uses to STS to talk to different accounts. For managing one account this isn't necessary but we will still use it so that we can easily add new accounts.
|
||||
|
||||
LemurInstanceProfile is the IAM role you will launch your instance with. It actually has almost no rights. In fact it should really only be able to use STS to assume role to the Lemur role.
|
||||
|
||||
Here are example policies for the LemurInstanceProfile:
|
||||
|
||||
SES-SendEmail
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
{
|
||||
"Version": "2012-10-17",
|
||||
"Statement": [
|
||||
{
|
||||
"Effect": "Allow",
|
||||
"Action": [
|
||||
"ses:SendEmail"
|
||||
],
|
||||
"Resource": "*"
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
|
||||
STS-AssumeRole
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
{
|
||||
"Version": "2012-10-17",
|
||||
"Statement": [
|
||||
{
|
||||
"Effect": "Allow",
|
||||
"Action":
|
||||
"sts:AssumeRole",
|
||||
"Resource": "*"
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
|
||||
|
||||
Next we will create the Lemur IAM role.
|
||||
|
||||
.. note::
|
||||
|
||||
The default IAM role that Lemur assumes into is called `Lemur`, if you need to change this ensure you set `LEMUR_INSTANCE_PROFILE` to your role name in the configuration.
|
||||
|
||||
|
||||
Here is an example policy for Lemur:
|
||||
|
||||
IAM-ServerCertificate
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
{
|
||||
"Statement": [
|
||||
{
|
||||
"Action": [
|
||||
"iam:ListServerCertificates",
|
||||
"iam:UpdateServerCertificate",
|
||||
"iam:GetServerCertificate",
|
||||
"iam:UploadServerCertificate"
|
||||
],
|
||||
"Resource": [
|
||||
"*"
|
||||
],
|
||||
"Effect": "Allow",
|
||||
"Sid": "Stmt1404836868000"
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
{
|
||||
"Statement": [
|
||||
{
|
||||
"Action": [
|
||||
"elasticloadbalancing:DescribeInstanceHealth",
|
||||
"elasticloadbalancing:DescribeLoadBalancerAttributes",
|
||||
"elasticloadbalancing:DescribeLoadBalancerPolicyTypes",
|
||||
"elasticloadbalancing:DescribeLoadBalancerPolicies",
|
||||
"elasticloadbalancing:DescribeLoadBalancers",
|
||||
"elasticloadbalancing:DeleteLoadBalancerListeners",
|
||||
"elasticloadbalancing:CreateLoadBalancerListeners"
|
||||
],
|
||||
"Resource": [
|
||||
"*"
|
||||
],
|
||||
"Effect": "Allow",
|
||||
"Sid": "Stmt1404841912000"
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
|
||||
Setting up STS access
|
||||
"""""""""""""""""""""
|
||||
|
||||
Once we have setup our accounts we need to ensure that we create a trust relationship so that LemurInstanceProfile can assume the Lemur role.
|
||||
|
||||
In the AWS console select the Lemur IAM role and select the Trust Relationships tab and click Edit Trust Relationship
|
||||
|
||||
Below is an example policy:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
{
|
||||
"Version": "2008-10-17",
|
||||
"Statement": [
|
||||
{
|
||||
"Sid": "",
|
||||
"Effect": "Allow",
|
||||
"Principal": {
|
||||
"AWS": [
|
||||
"arn:aws:iam::<awsaccountnumber>:role/LemurInstanceProfile",
|
||||
]
|
||||
},
|
||||
"Action": "sts:AssumeRole"
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
|
||||
Adding N+1 accounts
|
||||
"""""""""""""""""""
|
||||
|
||||
To add another account we go to the new account and create a new Lemur IAM role with the same policy as above.
|
||||
|
||||
Then we would go to the account that Lemur is running is and edit the trust relationship policy.
|
||||
|
||||
An example policy:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
{
|
||||
"Version": "2008-10-17",
|
||||
"Statement": [
|
||||
{
|
||||
"Sid": "",
|
||||
"Effect": "Allow",
|
||||
"Principal": {
|
||||
"AWS": [
|
||||
"arn:aws:iam::<awsaccountnumber>:role/LemurInstanceProfile",
|
||||
"arn:aws:iam::<awsaccountnumber1>:role/LemurInstanceProfile",
|
||||
]
|
||||
},
|
||||
"Action": "sts:AssumeRole"
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
Setting up SES
|
||||
""""""""""""""
|
||||
|
||||
Lemur has built in support for sending it's certificate notifications via Amazon's simple email service (SES). To force
|
||||
Lemur to use SES ensure you are the running as the IAM role defined above and that you have followed the steps outlined
|
||||
in Amazon's documentation `Setting up Amazon SES <http://docs.aws.amazon.com/ses/latest/DeveloperGuide/setting-up-ses.html>`_
|
||||
|
||||
The configuration::
|
||||
|
||||
LEMUR_MAIL = 'lemur.example.com'
|
||||
|
||||
Will be the sender of all notifications, so ensure that it is verified with AWS.
|
||||
|
||||
SES if the default notification gateway and will be used unless SMTP settings are configured in the application configuration
|
||||
settings.
|
||||
|
||||
PowerDNS ACME Plugin
|
||||
~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
The following configuration properties are required to use the PowerDNS ACME Plugin for domain validation.
|
||||
|
||||
|
||||
.. data:: ACME_POWERDNS_DOMAIN
|
||||
:noindex:
|
||||
|
||||
This is the FQDN for the PowerDNS API (without path)
|
||||
|
||||
|
||||
.. data:: ACME_POWERDNS_SERVERID
|
||||
:noindex:
|
||||
|
||||
This is the ServerID attribute of the PowerDNS API Server (i.e. "localhost")
|
||||
|
||||
|
||||
.. data:: ACME_POWERDNS_APIKEYNAME
|
||||
:noindex:
|
||||
|
||||
This is the Key name to use for authentication (i.e. "X-API-Key")
|
||||
|
||||
|
||||
.. data:: ACME_POWERDNS_APIKEY
|
||||
:noindex:
|
||||
|
||||
This is the API Key to use for authentication (i.e. "Password")
|
||||
|
||||
|
||||
.. data:: ACME_POWERDNS_RETRIES
|
||||
:noindex:
|
||||
|
||||
This is the number of times DNS Verification should be attempted (i.e. 20)
|
||||
|
||||
|
||||
.. data:: ACME_POWERDNS_VERIFY
|
||||
:noindex:
|
||||
|
||||
This configures how TLS certificates on the PowerDNS API target are validated. The PowerDNS Plugin depends on the PyPi requests library, which supports the following options for the verify parameter:
|
||||
|
||||
True: Verifies the TLS certificate was issued by a known publicly-trusted CA. (Default)
|
||||
|
||||
False: Disables certificate validation (Not Recommended)
|
||||
|
||||
File/Dir path to CA Bundle: Verifies the TLS certificate was issued by a Certificate Authority in the provided CA bundle.
|
||||
|
||||
ACME Plugin
|
||||
~~~~~~~~~~~~
|
||||
|
||||
The following configration properties are optional for the ACME plugin to use. They allow reusing an existing ACME
|
||||
account. See :ref:`Using a pre-existing ACME account <AcmeAccountReuse>` for more details.
|
||||
|
||||
|
||||
.. data:: ACME_PRIVATE_KEY
|
||||
:noindex:
|
||||
|
||||
This is the private key, the account was registered with (in JWK format)
|
||||
|
||||
.. data:: ACME_REGR
|
||||
:noindex:
|
||||
|
||||
This is the registration for the ACME account, the most important part is the uri attribute (in JSON)
|
||||
|
||||
.. _CommandLineInterface:
|
||||
|
||||
Command Line Interface
|
||||
======================
|
||||
|
||||
Lemur installs a command line script under the name ``lemur``. This will allow you to
|
||||
perform most required operations that are unachievable within the web UI.
|
||||
|
||||
If you're using a non-standard configuration location, you'll need to prefix every command with
|
||||
--config (excluding create_config, which is a special case). For example::
|
||||
|
||||
lemur --config=/etc/lemur.conf.py help
|
||||
|
||||
For a list of commands, you can also use ``lemur help``, or ``lemur [command] --help``
|
||||
for help on a specific command.
|
||||
|
||||
.. note:: The script is powered by a library called `Flask-Script <https://github.com/smurfix/flask-script>`_
|
||||
|
||||
Builtin Commands
|
||||
----------------
|
||||
|
||||
All commands default to `~/.lemur/lemur.conf.py` if a configuration is not specified.
|
||||
|
||||
.. data:: create_config
|
||||
|
||||
Creates a default configuration file for Lemur.
|
||||
|
||||
Path defaults to ``~/.lemur/lemur.config.py``
|
||||
|
||||
::
|
||||
|
||||
lemur create_config .
|
||||
|
||||
.. note::
|
||||
This command is a special case and does not depend on the configuration file
|
||||
being set.
|
||||
|
||||
|
||||
.. data:: init
|
||||
|
||||
Initializes the configuration file for Lemur.
|
||||
|
||||
::
|
||||
|
||||
lemur -c /etc/lemur.conf.py init
|
||||
|
||||
|
||||
.. data:: start
|
||||
|
||||
Starts a Lemur service. You can also pass any flag that Gunicorn uses to specify the webserver configuration.
|
||||
|
||||
::
|
||||
|
||||
lemur start -w 6 -b 127.0.0.1:8080
|
||||
|
||||
|
||||
.. data:: db upgrade
|
||||
|
||||
Performs any needed database migrations.
|
||||
|
||||
::
|
||||
|
||||
lemur db upgrade
|
||||
|
||||
|
||||
.. data:: check_revoked
|
||||
|
||||
Traverses every certificate that Lemur is aware of and attempts to understand its validity.
|
||||
It utilizes both OCSP and CRL. If Lemur is unable to come to a conclusion about a certificates
|
||||
validity its status is marked 'unknown'.
|
||||
|
||||
|
||||
.. data:: sync
|
||||
|
||||
Sync attempts to discover certificates in the environment that were not created by Lemur. If you wish to only sync
|
||||
a few sources you can pass a comma delimited list of sources to sync.
|
||||
|
||||
::
|
||||
|
||||
lemur sync -s source1,source2
|
||||
|
||||
|
||||
Additionally you can also list the available sources that Lemur can sync.
|
||||
|
||||
::
|
||||
|
||||
lemur sync
|
||||
|
||||
|
||||
.. data:: notify
|
||||
|
||||
Will traverse all current notifications and see if any of them need to be triggered.
|
||||
|
||||
::
|
||||
|
||||
lemur notify
|
||||
|
||||
|
||||
.. data:: acme
|
||||
|
||||
Handles all ACME related tasks, like ACME plugin testing.
|
||||
|
||||
::
|
||||
|
||||
lemur acme
|
||||
|
||||
|
||||
Sub-commands
|
||||
------------
|
||||
|
||||
Lemur includes several sub-commands for interacting with Lemur such as creating new users, creating new roles and even
|
||||
issuing certificates.
|
||||
|
||||
The best way to discover these commands is by using the built in help pages
|
||||
|
||||
::
|
||||
|
||||
lemur --help
|
||||
|
||||
|
||||
and to get help on sub-commands
|
||||
|
||||
::
|
||||
|
||||
lemur certificates --help
|
||||
|
||||
|
||||
|
||||
Upgrading Lemur
|
||||
===============
|
||||
|
||||
To upgrade Lemur to the newest release you will need to ensure you have the latest code and have run any needed
|
||||
database migrations.
|
||||
|
||||
To get the latest code from github run
|
||||
|
||||
::
|
||||
|
||||
cd <lemur-source-directory>
|
||||
git pull -t <version>
|
||||
python setup.py develop
|
||||
|
||||
|
||||
.. note::
|
||||
It's important to grab the latest release by specifying the release tag. This tags denote stable versions of Lemur.
|
||||
If you want to try the bleeding edge version of Lemur you can by using the master branch.
|
||||
|
||||
|
||||
After you have the latest version of the Lemur code base you must run any needed database migrations. To run migrations
|
||||
|
||||
::
|
||||
|
||||
cd <lemur-source-directory>/lemur
|
||||
lemur db upgrade
|
||||
|
||||
|
||||
This will ensure that any needed tables or columns are created or destroyed.
|
||||
|
||||
.. note::
|
||||
Internally, this uses `Alembic <http://alembic.zzzcomputing.com/en/latest/>`_ to manage database migrations.
|
||||
|
||||
.. note::
|
||||
By default Alembic looks for the `migrations` folder in the current working directory.The migrations folder is
|
||||
located under `<LEMUR_HOME>/lemur/migrations` if you are running the lemur command from any location besides
|
||||
`<LEMUR_HOME>/lemur` you will need to pass the `-d` flag to specify the absolute file path to the `migrations` folder.
|
||||
|
||||
Plugins
|
||||
=======
|
||||
|
||||
There are several interfaces currently available to extend Lemur. These are a work in
|
||||
progress and the API is not frozen.
|
||||
|
||||
Lemur includes several plugins by default. Including extensive support for AWS, VeriSign/Symantec.
|
||||
|
||||
Verisign/Symantec
|
||||
-----------------
|
||||
|
||||
:Authors:
|
||||
Kevin Glisson <kglisson@netflix.com>,
|
||||
Curtis Castrapel <ccastrapel@netflix.com>,
|
||||
Hossein Shafagh <hshafagh@netflix.com>
|
||||
:Type:
|
||||
Issuer
|
||||
:Description:
|
||||
Basic support for the VICE 2.0 API
|
||||
|
||||
|
||||
Cryptography
|
||||
------------
|
||||
|
||||
:Authors:
|
||||
Kevin Glisson <kglisson@netflix.com>,
|
||||
Mikhail Khodorovskiy <mikhail.khodorovskiy@jivesoftware.com>
|
||||
:Type:
|
||||
Issuer
|
||||
:Description:
|
||||
Toy certificate authority that creates self-signed certificate authorities.
|
||||
Allows for the creation of arbitrary authorities and end-entity certificates.
|
||||
This is *not* recommended for production use.
|
||||
|
||||
|
||||
Acme
|
||||
----
|
||||
|
||||
:Authors:
|
||||
Kevin Glisson <kglisson@netflix.com>,
|
||||
Curtis Castrapel <ccastrapel@netflix.com>,
|
||||
Hossein Shafagh <hshafagh@netflix.com>,
|
||||
Mikhail Khodorovskiy <mikhail.khodorovskiy@jivesoftware.com>,
|
||||
Chad Sine <csine@netflix.com>
|
||||
:Type:
|
||||
Issuer
|
||||
:Description:
|
||||
Adds support for the ACME protocol (including LetsEncrypt) with domain validation using several providers.
|
||||
|
||||
|
||||
Atlas
|
||||
-----
|
||||
|
||||
:Authors:
|
||||
Kevin Glisson <kglisson@netflix.com>,
|
||||
Curtis Castrapel <ccastrapel@netflix.com>,
|
||||
Hossein Shafagh <hshafagh@netflix.com>
|
||||
:Type:
|
||||
Metric
|
||||
:Description:
|
||||
Adds basic support for the `Atlas <https://github.com/Netflix/atlas/wiki>`_ telemetry system.
|
||||
|
||||
|
||||
Email
|
||||
-----
|
||||
|
||||
:Authors:
|
||||
Kevin Glisson <kglisson@netflix.com>,
|
||||
Curtis Castrapel <ccastrapel@netflix.com>,
|
||||
Hossein Shafagh <hshafagh@netflix.com>
|
||||
:Type:
|
||||
Notification
|
||||
:Description:
|
||||
Adds support for basic email notifications via SES.
|
||||
|
||||
|
||||
Slack
|
||||
-----
|
||||
|
||||
:Authors:
|
||||
Harm Weites <harm@weites.com>
|
||||
:Type:
|
||||
Notification
|
||||
:Description:
|
||||
Adds support for slack notifications.
|
||||
|
||||
|
||||
AWS
|
||||
----
|
||||
|
||||
:Authors:
|
||||
Kevin Glisson <kglisson@netflix.com>,
|
||||
Curtis Castrapel <ccastrapel@netflix.com>,
|
||||
Hossein Shafagh <hshafagh@netflix.com>
|
||||
:Type:
|
||||
Source
|
||||
:Description:
|
||||
Uses AWS IAM as a source of certificates to manage. Supports a multi-account deployment.
|
||||
|
||||
|
||||
AWS
|
||||
----
|
||||
|
||||
:Authors:
|
||||
Kevin Glisson <kglisson@netflix.com>,
|
||||
Curtis Castrapel <ccastrapel@netflix.com>,
|
||||
Hossein Shafagh <hshafagh@netflix.com>
|
||||
:Type:
|
||||
Destination
|
||||
:Description:
|
||||
Uses AWS IAM as a destination for Lemur generated certificates. Support a multi-account deployment.
|
||||
|
||||
|
||||
Kubernetes
|
||||
----------
|
||||
|
||||
:Authors:
|
||||
Mikhail Khodorovskiy <mikhail.khodorovskiy@jivesoftware.com>
|
||||
:Type:
|
||||
Destination
|
||||
:Description:
|
||||
Allows Lemur to upload generated certificates to the Kubernetes certificate store.
|
||||
|
||||
|
||||
Java
|
||||
----
|
||||
|
||||
:Authors:
|
||||
Kevin Glisson <kglisson@netflix.com>
|
||||
:Type:
|
||||
Export
|
||||
:Description:
|
||||
Generates java compatible .jks keystores and truststores from Lemur managed certificates.
|
||||
|
||||
|
||||
Openssl
|
||||
-------
|
||||
|
||||
:Authors:
|
||||
Kevin Glisson <kglisson@netflix.com>
|
||||
:Type:
|
||||
Export
|
||||
:Description:
|
||||
Leverages Openssl to support additional export formats (pkcs12)
|
||||
|
||||
|
||||
CFSSL
|
||||
-----
|
||||
|
||||
:Authors:
|
||||
Charles Hendrie <chad.hendrie@thomsonreuters.com>
|
||||
:Type:
|
||||
Issuer
|
||||
:Description:
|
||||
Basic support for generating certificates from the private certificate authority CFSSL
|
||||
|
||||
Vault
|
||||
-----
|
||||
|
||||
:Authors:
|
||||
Christopher Jolley <chris@alwaysjolley.com>
|
||||
:Type:
|
||||
Source
|
||||
:Description:
|
||||
Source plugin imports certificates from Hashicorp Vault secret store.
|
||||
|
||||
Vault
|
||||
-----
|
||||
|
||||
:Authors:
|
||||
Christopher Jolley <chris@alwaysjolley.com>
|
||||
:Type:
|
||||
Destination
|
||||
:Description:
|
||||
Destination plugin to deploy certificates to Hashicorp Vault secret store.
|
||||
|
||||
|
||||
3rd Party Plugins
|
||||
=================
|
||||
|
||||
The following plugins are available and maintained by members of the Lemur community:
|
||||
|
||||
Digicert
|
||||
--------
|
||||
|
||||
:Authors:
|
||||
Chris Dorros
|
||||
:Type:
|
||||
Issuer
|
||||
:Description:
|
||||
Adds support for basic Digicert
|
||||
:Links:
|
||||
https://github.com/opendns/lemur-digicert
|
||||
|
||||
|
||||
InfluxDB
|
||||
--------
|
||||
|
||||
:Authors:
|
||||
Titouan Christophe
|
||||
:Type:
|
||||
Metric
|
||||
:Description:
|
||||
Sends key metrics to InfluxDB
|
||||
:Links:
|
||||
https://github.com/titouanc/lemur-influxdb
|
||||
|
||||
Hashicorp Vault
|
||||
---------------
|
||||
|
||||
:Authors:
|
||||
Ron Cohen
|
||||
:Type:
|
||||
Issuer
|
||||
:Description:
|
||||
Adds support for basic Vault PKI secret backend.
|
||||
:Links:
|
||||
https://github.com/RcRonco/lemur_vault
|
||||
|
||||
|
||||
Have an extension that should be listed here? Submit a `pull request <https://github.com/netflix/lemur>`_ and we'll
|
||||
get it added.
|
||||
|
||||
Want to create your own extension? See :doc:`../developer/plugins/index` to get started.
|
||||
|
||||
|
||||
Identity and Access Management
|
||||
==============================
|
||||
|
||||
Lemur uses a Role Based Access Control (RBAC) mechanism to control which users have access to which resources. When a
|
||||
user is first created in Lemur they can be assigned one or more roles. These roles are typically dynamically created
|
||||
depending on an external identity provider (Google, LDAP, etc.), or are hardcoded within Lemur and associated with special
|
||||
meaning.
|
||||
|
||||
Within Lemur there are three main permissions: AdminPermission, CreatorPermission, OwnerPermission. Sub-permissions such
|
||||
as ViewPrivateKeyPermission are compositions of these three main Permissions.
|
||||
|
||||
Lets take a look at how these permissions are used:
|
||||
|
||||
Each `Authority` has a set of roles associated with it. If a user is also associated with the same roles
|
||||
that the `Authority` is associated with, Lemur allows that user to user/view/update that `Authority`.
|
||||
|
||||
This RBAC is also used when determining which users can access which certificate private key. Lemur's current permission
|
||||
structure is setup such that if the user is a `Creator` or `Owner` of a given certificate they are allow to view that
|
||||
private key. Owners can also be a role name, such that any user with the same role as owner will be allowed to view the
|
||||
private key information.
|
||||
|
||||
These permissions are applied to the user upon login and refreshed on every request.
|
||||
|
||||
.. seealso::
|
||||
|
||||
`Flask-Principal <https://pythonhosted.org/Flask-Principal>`_
|
||||
@ -1,623 +0,0 @@
|
||||
Configuration
|
||||
=============
|
||||
|
||||
.. warning::
|
||||
There are many secrets that Lemur uses that must be protected. All of these options are set via the Lemur configuration
|
||||
file. It is highly advised that you do not store your secrets in this file! Lemur provides functions
|
||||
that allow you to encrypt files at rest and decrypt them when it's time for deployment. See :ref:`Credential Management <CredentialManagement>`
|
||||
for more information.
|
||||
|
||||
Basic Configuration
|
||||
-------------------
|
||||
|
||||
.. data:: LOG_LEVEL
|
||||
:noindex:
|
||||
|
||||
::
|
||||
|
||||
LOG_LEVEL = "DEBUG"
|
||||
|
||||
.. data:: LOG_FILE
|
||||
:noindex:
|
||||
|
||||
::
|
||||
|
||||
LOG_FILE = "/logs/lemur/lemur-test.log"
|
||||
|
||||
|
||||
.. data:: debug
|
||||
:noindex:
|
||||
|
||||
Sets the flask debug flag to true (if supported by the webserver)
|
||||
|
||||
::
|
||||
|
||||
debug = False
|
||||
|
||||
|
||||
.. warning::
|
||||
This should never be used in a production environment as it exposes Lemur to
|
||||
remote code execution through the debug console.
|
||||
|
||||
|
||||
.. data:: CORS
|
||||
:noindex:
|
||||
|
||||
Allows for cross domain requests, this is most commonly used for development but could
|
||||
be use in production if you decided to host the webUI on a different domain than the server.
|
||||
|
||||
Use this cautiously, if you're not sure. Set it to `False`
|
||||
|
||||
::
|
||||
|
||||
CORS = False
|
||||
|
||||
|
||||
.. data:: SQLACHEMY_DATABASE_URI
|
||||
:noindex:
|
||||
|
||||
If you have ever used sqlalchemy before this is the standard connection string used. Lemur uses a postgres database and the connection string would look something like:
|
||||
|
||||
::
|
||||
|
||||
SQLALCHEMY_DATABASE_URI = 'postgresql://<user>:<password>@<hostname>:5432/lemur'
|
||||
|
||||
|
||||
.. data:: LEMUR_RESTRICTED_DOMAINS
|
||||
:noindex:
|
||||
|
||||
This allows the administrator to mark a subset of domains or domains matching a particular regex as
|
||||
*restricted*. This means that only an administrator is allows to issue the domains in question.
|
||||
|
||||
.. data:: LEMUR_TOKEN_SECRET
|
||||
:noindex:
|
||||
|
||||
The TOKEN_SECRET is the secret used to create JWT tokens that are given out to users. This should be securely generated and be kept private.
|
||||
|
||||
::
|
||||
|
||||
LEMUR_TOKEN_SECRET = 'supersecret'
|
||||
|
||||
An example of how you might generate a random string:
|
||||
|
||||
>>> import random
|
||||
>>> secret_key = ''.join(random.choice(string.ascii_uppercase) for x in range(6))
|
||||
>>> secret_key = secret_key + ''.join(random.choice("~!@#$%^&*()_+") for x in range(6))
|
||||
>>> secret_key = secret_key + ''.join(random.choice(string.ascii_lowercase) for x in range(6))
|
||||
>>> secret_key = secret_key + ''.join(random.choice(string.digits) for x in range(6))
|
||||
|
||||
|
||||
.. data:: LEMUR_ENCRYPTION_KEY
|
||||
:noindex:
|
||||
|
||||
The LEMUR_ENCRYPTION_KEY is used to encrypt data at rest within Lemur's database. Without this key Lemur will refuse
|
||||
to start.
|
||||
|
||||
See `LEMUR_TOKEN_SECRET` for methods of secure secret generation.
|
||||
|
||||
::
|
||||
|
||||
LEMUR_ENCRYPTION_KEY = 'supersupersecret'
|
||||
|
||||
|
||||
Certificate Default Options
|
||||
---------------------------
|
||||
|
||||
Lemur allows you to find tune your certificates to your organization. The following defaults are presented in the UI
|
||||
and are used when Lemur creates the CSR for your certificates.
|
||||
|
||||
|
||||
.. data:: LEMUR_DEFAULT_COUNTRY
|
||||
:noindex:
|
||||
|
||||
::
|
||||
|
||||
LEMUR_DEFAULT_COUNTRY = "US"
|
||||
|
||||
|
||||
.. data:: LEMUR_DEFAULT_STATE
|
||||
:noindex:
|
||||
|
||||
::
|
||||
|
||||
LEMUR_DEFAULT_STATE = "California"
|
||||
|
||||
|
||||
.. data:: LEMUR_DEFAULT_LOCATION
|
||||
:noindex:
|
||||
|
||||
::
|
||||
|
||||
LEMUR_DEFAULT_LOCATION = "Los Gatos"
|
||||
|
||||
|
||||
.. data:: LEMUR_DEFAULT_ORGANIZATION
|
||||
:noindex:
|
||||
|
||||
::
|
||||
|
||||
LEMUR_DEFAULT_ORGANIZATION = "Netflix"
|
||||
|
||||
|
||||
.. data:: LEMUR_DEFAULT_ORGANIZATION_UNIT
|
||||
:noindex:
|
||||
|
||||
::
|
||||
|
||||
LEMUR_DEFAULT_ORGANIZATIONAL_UNIT = "Operations"
|
||||
|
||||
|
||||
Notification Options
|
||||
--------------------
|
||||
|
||||
Lemur currently has very basic support for notifications. Currently only expiration notifications are supported. Actual notification
|
||||
is handled by the notification plugins that you have configured. Lemur ships with the 'Email' notification that allows expiration emails
|
||||
to be sent to subscribers.
|
||||
|
||||
Templates for expiration emails are located under `lemur/plugins/lemur_email/templates` and can be modified for your needs.
|
||||
Notifications are sent to the certificate creator, owner and security team as specified by the `LEMUR_SECURITY_TEAM_EMAIL` configuration parameter.
|
||||
|
||||
Certificates marked as in-active will **not** be notified of upcoming expiration. This enables a user to essentially
|
||||
silence the expiration. If a certificate is active and is expiring the above will be notified according to the `LEMUR_DEFAULT_EXPIRATION_NOTIFICATION_INTERVALS` or
|
||||
30, 15, 2 days before expiration if no intervals are set.
|
||||
|
||||
Lemur supports sending certification expiration notifications through SES and SMTP.
|
||||
|
||||
|
||||
.. data:: LEMUR_EMAIL_SENDER
|
||||
:noindex:
|
||||
|
||||
Specifies which service will be delivering notification emails. Valid values are `SMTP` or `SES`
|
||||
|
||||
.. note::
|
||||
If using STMP as your provider you will need to define additional configuration options as specified by Flask-Mail.
|
||||
See: `Flask-Mail <https://pythonhosted.org/Flask-Mail>`_
|
||||
|
||||
If you are using SES the email specified by the `LEMUR_MAIL` configuration will need to be verified by AWS before
|
||||
you can send any mail. See: `Verifying Email Address in Amazon SES <http://docs.aws.amazon.com/ses/latest/DeveloperGuide/verify-email-addresses.html>`_
|
||||
|
||||
.. data:: LEMUR_MAIL
|
||||
:noindex:
|
||||
|
||||
Lemur sender's email
|
||||
|
||||
::
|
||||
|
||||
LEMUR_MAIL = 'lemur.example.com'
|
||||
|
||||
|
||||
.. data:: LEMUR_SECURITY_TEAM_EMAIL
|
||||
:noindex:
|
||||
|
||||
This is an email or list of emails that should be notified when a certificate is expiring. It is also the contact email address for any discovered certificate.
|
||||
|
||||
::
|
||||
|
||||
LEMUR_SECURITY_TEAM_EMAIL = ['security@example.com']
|
||||
|
||||
|
||||
.. data:: LEMUR_DEFAULT_EXPIRATION_NOTIFICATION_INTERVALS
|
||||
:noindex:
|
||||
|
||||
Lemur notification intervals
|
||||
|
||||
::
|
||||
|
||||
LEMUR_DEFAULT_EXPIRATION_NOTIFICATION_INTERVALS = [30, 15, 2]
|
||||
|
||||
|
||||
Authority Options
|
||||
-----------------
|
||||
|
||||
Authorities will each have their own configuration options. There is currently just one plugin bundled with Lemur,
|
||||
Verisign/Symantec. Additional plugins may define additional options. Refer to the plugins own documentation
|
||||
for those plugins.
|
||||
|
||||
.. data:: VERISIGN_URL
|
||||
:noindex:
|
||||
|
||||
This is the url for the verisign API
|
||||
|
||||
|
||||
.. data:: VERISIGN_PEM_PATH
|
||||
:noindex:
|
||||
|
||||
This is the path to the mutual TLS certificate used for communicating with Verisign
|
||||
|
||||
|
||||
.. data:: VERISIGN_FIRST_NAME
|
||||
:noindex:
|
||||
|
||||
This is the first name to be used when requesting the certificate
|
||||
|
||||
|
||||
.. data:: VERISIGN_LAST_NAME
|
||||
:noindex:
|
||||
|
||||
This is the last name to be used when requesting the certificate
|
||||
|
||||
.. data:: VERISIGN_EMAIL
|
||||
:noindex:
|
||||
|
||||
This is the email to be used when requesting the certificate
|
||||
|
||||
|
||||
.. data:: VERISIGN_INTERMEDIATE
|
||||
:noindex:
|
||||
|
||||
This is the intermediate to be used for your CA chain
|
||||
|
||||
|
||||
.. data:: VERISIGN_ROOT
|
||||
:noindex:
|
||||
|
||||
This is the root to be used for your CA chain
|
||||
|
||||
|
||||
Authentication
|
||||
--------------
|
||||
Lemur currently supports Basic Authentication and Ping OAuth2 out of the box, additional flows can be added relatively easily.
|
||||
If you are not using Ping you do not need to configure any of these options.
|
||||
|
||||
For more information about how to use social logins, see: `Satellizer <https://github.com/sahat/satellizer>`_
|
||||
|
||||
.. data:: PING_SECRET
|
||||
:noindex:
|
||||
|
||||
::
|
||||
|
||||
PING_SECRET = 'somethingsecret'
|
||||
|
||||
.. data:: PING_ACCESS_TOKEN_URL
|
||||
:noindex:
|
||||
|
||||
::
|
||||
|
||||
PING_ACCESS_TOKEN_URL = "https://<yourpingserver>/as/token.oauth2"
|
||||
|
||||
|
||||
.. data:: PING_USER_API_URL
|
||||
:noindex:
|
||||
|
||||
::
|
||||
|
||||
PING_USER_API_URL = "https://<yourpingserver>/idp/userinfo.openid"
|
||||
|
||||
.. data:: PING_JWKS_URL
|
||||
:noindex:
|
||||
|
||||
::
|
||||
|
||||
PING_JWKS_URL = "https://<yourpingserver>/pf/JWKS"
|
||||
|
||||
|
||||
|
||||
AWS Plugin Configuration
|
||||
========================
|
||||
|
||||
In order for Lemur to manage it's own account and other accounts we must ensure it has the correct AWS permissions.
|
||||
|
||||
.. note:: AWS usage is completely optional. Lemur can upload, find and manage TLS certificates in AWS. But is not required to do so.
|
||||
|
||||
Setting up IAM roles
|
||||
--------------------
|
||||
|
||||
Lemur's AWS plugin uses boto heavily to talk to all the AWS resources it manages. By default it uses the on-instance credentials to make the necessary calls.
|
||||
|
||||
In order to limit the permissions, we will create two new IAM roles for Lemur. You can name them whatever you would like but for example sake we will be calling them LemurInstanceProfile and Lemur.
|
||||
|
||||
Lemur uses to STS to talk to different accounts. For managing one account this isn't necessary but we will still use it so that we can easily add new accounts.
|
||||
|
||||
LemurInstanceProfile is the IAM role you will launch your instance with. It actually has almost no rights. In fact it should really only be able to use STS to assume role to the Lemur role.
|
||||
|
||||
Here are example policies for the LemurInstanceProfile:
|
||||
|
||||
SES-SendEmail
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
{
|
||||
"Version": "2012-10-17",
|
||||
"Statement": [
|
||||
{
|
||||
"Effect": "Allow",
|
||||
"Action": [
|
||||
"ses:SendEmail"
|
||||
],
|
||||
"Resource": "*"
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
|
||||
STS-AssumeRole
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
{
|
||||
"Version": "2012-10-17",
|
||||
"Statement": [
|
||||
{
|
||||
"Effect": "Allow",
|
||||
"Action":
|
||||
"sts:AssumeRole",
|
||||
"Resource": "*"
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
|
||||
|
||||
Next we will create the the Lemur IAM role. Lemur
|
||||
|
||||
..note::
|
||||
|
||||
The default IAM role that Lemur assumes into is called `Lemur`, if you need to change this ensure you set `LEMUR_INSTANCE_PROFILE` to your role name in the configuration.
|
||||
|
||||
|
||||
Here is an example policy for Lemur:
|
||||
|
||||
IAM-ServerCertificate
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
{
|
||||
"Statement": [
|
||||
{
|
||||
"Action": [
|
||||
"iam:ListServerCertificates",
|
||||
"iam:UpdateServerCertificate",
|
||||
"iam:GetServerCertificate",
|
||||
"iam:UploadServerCertificate"
|
||||
],
|
||||
"Resource": [
|
||||
"*"
|
||||
],
|
||||
"Effect": "Allow",
|
||||
"Sid": "Stmt1404836868000"
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
{
|
||||
"Statement": [
|
||||
{
|
||||
"Action": [
|
||||
"elasticloadbalancing:DescribeInstanceHealth",
|
||||
"elasticloadbalancing:DescribeLoadBalancerAttributes",
|
||||
"elasticloadbalancing:DescribeLoadBalancerPolicyTypes",
|
||||
"elasticloadbalancing:DescribeLoadBalancerPolicies",
|
||||
"elasticloadbalancing:DescribeLoadBalancers",
|
||||
"elasticloadbalancing:DeleteLoadBalancerListeners",
|
||||
"elasticloadbalancing:CreateLoadBalancerListeners"
|
||||
],
|
||||
"Resource": [
|
||||
"*"
|
||||
],
|
||||
"Effect": "Allow",
|
||||
"Sid": "Stmt1404841912000"
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
|
||||
Setting up STS access
|
||||
---------------------
|
||||
Once we have setup our accounts we need to ensure that we create a trust relationship so that LemurInstanceProfile can assume the Lemur role.
|
||||
|
||||
In the AWS console select the Lemur IAM role and select the Trust Relationships tab and click Edit Trust Relationship
|
||||
|
||||
Below is an example policy:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
{
|
||||
"Version": "2008-10-17",
|
||||
"Statement": [
|
||||
{
|
||||
"Sid": "",
|
||||
"Effect": "Allow",
|
||||
"Principal": {
|
||||
"AWS": [
|
||||
"arn:aws:iam::<awsaccountnumber>:role/LemurInstanceProfile",
|
||||
]
|
||||
},
|
||||
"Action": "sts:AssumeRole"
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
|
||||
Adding N+1 accounts
|
||||
-------------------
|
||||
|
||||
To add another account we go to the new account and create a new Lemur IAM role with the same policy as above.
|
||||
|
||||
Then we would go to the account that Lemur is running is and edit the trust relationship policy.
|
||||
|
||||
An example policy:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
{
|
||||
"Version": "2008-10-17",
|
||||
"Statement": [
|
||||
{
|
||||
"Sid": "",
|
||||
"Effect": "Allow",
|
||||
"Principal": {
|
||||
"AWS": [
|
||||
"arn:aws:iam::<awsaccountnumber>:role/LemurInstanceProfile",
|
||||
"arn:aws:iam::<awsaccountnumber1>:role/LemurInstanceProfile",
|
||||
]
|
||||
},
|
||||
"Action": "sts:AssumeRole"
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
Setting up SES
|
||||
--------------
|
||||
|
||||
Lemur has built in support for sending it's certificate notifications via Amazon's simple email service (SES). To force
|
||||
Lemur to use SES ensure you are the running as the IAM role defined above and that you have followed the steps outlined
|
||||
in Amazon's documentation `Setting up Amazon SES <http://docs.aws.amazon.com/ses/latest/DeveloperGuide/setting-up-ses.html>`_
|
||||
|
||||
The configuration::
|
||||
|
||||
LEMUR_MAIL = 'lemur.example.com'
|
||||
|
||||
Will be sender of all notifications, so ensure that it is verified with AWS.
|
||||
|
||||
SES if the default notification gateway and will be used unless SMTP settings are configured in the application configuration
|
||||
settings.
|
||||
|
||||
Upgrading Lemur
|
||||
===============
|
||||
|
||||
Lemur provides an easy way to upgrade between versions. Simply download the newest
|
||||
version of Lemur from pypi and then apply any schema changes with the following command.
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
$ lemur db upgrade
|
||||
|
||||
.. note:: Internally, this uses `Alembic <https://alembic.readthedocs.org/en/latest/>`_ to manage database migrations.
|
||||
|
||||
.. _CommandLineInterface:
|
||||
|
||||
Command Line Interface
|
||||
======================
|
||||
|
||||
Lemur installs a command line script under the name ``lemur``. This will allow you to
|
||||
perform most required operations that are unachievable within the web UI.
|
||||
|
||||
If you're using a non-standard configuration location, you'll need to prefix every command with
|
||||
--config (excluding create_config, which is a special case). For example::
|
||||
|
||||
lemur --config=/etc/lemur.conf.py help
|
||||
|
||||
For a list of commands, you can also use ``lemur help``, or ``lemur [command] --help``
|
||||
for help on a specific command.
|
||||
|
||||
.. note:: The script is powered by a library called `Flask-Script <https://github.com/smurfix/flask-script>`_
|
||||
|
||||
Builtin Commands
|
||||
----------------
|
||||
|
||||
All commands default to `~/.lemur/lemur.conf.py` if a configuration is not specified.
|
||||
|
||||
.. data:: create_config
|
||||
|
||||
Creates a default configuration file for Lemur.
|
||||
|
||||
Path defaults to ``~/.lemur/lemur.config.py``
|
||||
|
||||
::
|
||||
|
||||
lemur create_config .
|
||||
|
||||
.. note::
|
||||
This command is a special case and does not depend on the configuration file
|
||||
being set.
|
||||
|
||||
|
||||
.. data:: init
|
||||
|
||||
Initializes the configuration file for Lemur.
|
||||
|
||||
::
|
||||
|
||||
lemur -c /etc/lemur.conf.py init
|
||||
|
||||
|
||||
.. data:: start
|
||||
|
||||
Starts a Lemur service. You can also pass any flag that Gunicorn uses to specify the webserver configuration.
|
||||
|
||||
::
|
||||
|
||||
lemur start -w 6 -b 127.0.0.1:8080
|
||||
|
||||
|
||||
.. data:: db upgrade
|
||||
|
||||
Performs any needed database migrations.
|
||||
|
||||
::
|
||||
|
||||
lemur db upgrade
|
||||
|
||||
|
||||
.. data:: check_revoked
|
||||
|
||||
Traverses every certificate that Lemur is aware of and attempts to understand it's validity.
|
||||
It utilizes both OCSP and CRL. If Lemur is unable to come to a conclusion about a certificates
|
||||
validity it's status is marked 'unknown'
|
||||
|
||||
|
||||
.. data:: sync
|
||||
|
||||
Sync attempts to discover certificates in the environment that were not created by Lemur. If you wish to only sync
|
||||
a few sources you can pass a comma delimited list of sources to sync
|
||||
|
||||
::
|
||||
|
||||
lemur sync source1,source2
|
||||
|
||||
|
||||
Additionally you can also list the available sources that Lemur can sync
|
||||
|
||||
::
|
||||
|
||||
lemur sync -list
|
||||
|
||||
|
||||
Sub-commands
|
||||
------------
|
||||
|
||||
Lemur includes several sub-commands for interacting with Lemur such as creating new users, creating new roles and even
|
||||
issuing certificates.
|
||||
|
||||
The best way to discover these commands is by using the built in help pages
|
||||
|
||||
::
|
||||
|
||||
lemur --help
|
||||
|
||||
|
||||
and to get help on sub-commands
|
||||
|
||||
::
|
||||
|
||||
lemur certificates --help
|
||||
|
||||
|
||||
Identity and Access Management
|
||||
==============================
|
||||
|
||||
Lemur uses a Role Based Access Control (RBAC) mechanism to control which users have access to which resources. When a
|
||||
user is first created in Lemur they can be assigned one or more roles. These roles are typically dynamically created
|
||||
depending on a external identity provider (Google, LDAP, etc.,) or are hardcoded within Lemur and associated with special
|
||||
meaning.
|
||||
|
||||
Within Lemur there are three main permissions: AdminPermission, CreatorPermission, OwnerPermission. Sub-permissions such
|
||||
as ViewPrivateKeyPermission are compositions of these three main Permissions.
|
||||
|
||||
Lets take a look at how these permissions used:
|
||||
|
||||
Each `Authority` has a set of roles associated with it. If a user is also associated with the same roles
|
||||
that the `Authority` is associated with it Lemur allows that user to user/view/update that `Authority`.
|
||||
|
||||
This RBAC is also used when determining which users can access which certificate private key. Lemur's current permission
|
||||
structure is setup such that if the user is a `Creator` or `Owner` of a given certificate they are allow to view that
|
||||
private key. Owners can also be a role name, such that any user with the same role as owner will be allowed to view the
|
||||
private key information.
|
||||
|
||||
These permissions are applied to the user upon login and refreshed on every request.
|
||||
|
||||
.. seealso::
|
||||
`Flask-Principal <https://pythonhosted.org/Flask-Principal>`_
|
||||
@ -1,2 +1 @@
|
||||
Change Log
|
||||
==========
|
||||
.. include:: ../CHANGELOG.rst
|
||||
177
docs/conf.py
177
docs/conf.py
@ -11,231 +11,240 @@
|
||||
#
|
||||
# All configuration values have a default; values that are commented out
|
||||
# serve to show the default.
|
||||
|
||||
import sys
|
||||
import os
|
||||
from unittest.mock import MagicMock
|
||||
|
||||
# If extensions (or modules to document with autodoc) are in another directory,
|
||||
# add these directories to sys.path here. If the directory is relative to the
|
||||
# documentation root, use os.path.abspath to make it absolute, like shown here.
|
||||
sys.path.insert(0, os.path.abspath('..'))
|
||||
sys.path.insert(0, os.path.abspath(".."))
|
||||
|
||||
# Mock packages that cannot be installed on rtd
|
||||
on_rtd = os.environ.get("READTHEDOCS") == "True"
|
||||
if on_rtd:
|
||||
|
||||
class Mock(MagicMock):
|
||||
@classmethod
|
||||
def __getattr__(cls, name):
|
||||
return MagicMock()
|
||||
|
||||
MOCK_MODULES = ["ldap"]
|
||||
sys.modules.update((mod_name, Mock()) for mod_name in MOCK_MODULES)
|
||||
|
||||
# -- General configuration ------------------------------------------------
|
||||
|
||||
# If your documentation needs a minimal Sphinx version, state it here.
|
||||
#needs_sphinx = '1.0'
|
||||
# needs_sphinx = '1.0'
|
||||
|
||||
# Add any Sphinx extension module names here, as strings. They can be
|
||||
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
|
||||
# ones.
|
||||
extensions = [
|
||||
'sphinx.ext.autodoc',
|
||||
'sphinxcontrib.autohttp.flask',
|
||||
'sphinx.ext.todo',
|
||||
]
|
||||
extensions = ["sphinx.ext.autodoc", "sphinxcontrib.autohttp.flask", "sphinx.ext.todo"]
|
||||
|
||||
# Add any paths that contain templates here, relative to this directory.
|
||||
templates_path = ['_templates']
|
||||
templates_path = ["_templates"]
|
||||
|
||||
# The suffix of source filenames.
|
||||
source_suffix = '.rst'
|
||||
source_suffix = ".rst"
|
||||
|
||||
# The encoding of source files.
|
||||
#source_encoding = 'utf-8-sig'
|
||||
# source_encoding = 'utf-8-sig'
|
||||
|
||||
# The master toctree document.
|
||||
master_doc = 'index'
|
||||
master_doc = "index"
|
||||
|
||||
# General information about the project.
|
||||
project = u'lemur'
|
||||
copyright = u'2015, Netflix Inc.'
|
||||
project = u"lemur"
|
||||
copyright = u"2018, Netflix Inc."
|
||||
|
||||
# The version info for the project you're documenting, acts as replacement for
|
||||
# |version| and |release|, also used in various other places throughout the
|
||||
# built documents.
|
||||
#
|
||||
# The short X.Y version.
|
||||
version = '0.1'
|
||||
# The full version, including alpha/beta/rc tags.
|
||||
release = '0.1.3'
|
||||
base_dir = os.path.join(os.path.dirname(__file__), os.pardir)
|
||||
about = {}
|
||||
with open(os.path.join(base_dir, "lemur", "__about__.py")) as f:
|
||||
exec(f.read(), about) # nosec
|
||||
|
||||
version = release = about["__version__"]
|
||||
|
||||
# The language for content autogenerated by Sphinx. Refer to documentation
|
||||
# for a list of supported languages.
|
||||
#language = None
|
||||
# language = None
|
||||
|
||||
# There are two options for replacing |today|: either, you set today to some
|
||||
# non-false value, then it is used:
|
||||
#today = ''
|
||||
# today = ''
|
||||
# Else, today_fmt is used as the format for a strftime call.
|
||||
#today_fmt = '%B %d, %Y'
|
||||
# today_fmt = '%B %d, %Y'
|
||||
|
||||
# List of patterns, relative to source directory, that match files and
|
||||
# directories to ignore when looking for source files.
|
||||
exclude_patterns = ['_build']
|
||||
exclude_patterns = ["_build"]
|
||||
|
||||
# The reST default role (used for this markup: `text`) to use for all
|
||||
# documents.
|
||||
#default_role = None
|
||||
# default_role = None
|
||||
|
||||
# If true, '()' will be appended to :func: etc. cross-reference text.
|
||||
#add_function_parentheses = True
|
||||
# add_function_parentheses = True
|
||||
|
||||
# If true, the current module name will be prepended to all description
|
||||
# unit titles (such as .. function::).
|
||||
#add_module_names = True
|
||||
# add_module_names = True
|
||||
|
||||
# If true, sectionauthor and moduleauthor directives will be shown in the
|
||||
# output. They are ignored by default.
|
||||
#show_authors = False
|
||||
# show_authors = False
|
||||
|
||||
# The name of the Pygments (syntax highlighting) style to use.
|
||||
pygments_style = 'sphinx'
|
||||
pygments_style = "sphinx"
|
||||
|
||||
# A list of ignored prefixes for module index sorting.
|
||||
#modindex_common_prefix = []
|
||||
# modindex_common_prefix = []
|
||||
|
||||
# If true, keep warnings as "system message" paragraphs in the built documents.
|
||||
#keep_warnings = False
|
||||
# keep_warnings = False
|
||||
|
||||
|
||||
# -- Options for HTML output ----------------------------------------------
|
||||
|
||||
# The theme to use for HTML and HTML Help pages. See the documentation for
|
||||
# a list of builtin themes.
|
||||
html_theme = 'default'
|
||||
# on_rtd is whether we are on readthedocs.org, this line of code grabbed from docs.readthedocs.org
|
||||
on_rtd = os.environ.get("READTHEDOCS", None) == "True"
|
||||
|
||||
if not on_rtd: # only import and set the theme if we're building docs locally
|
||||
import sphinx_rtd_theme
|
||||
|
||||
html_theme = "sphinx_rtd_theme"
|
||||
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
|
||||
|
||||
# Theme options are theme-specific and customize the look and feel of a theme
|
||||
# further. For a list of options available for each theme, see the
|
||||
# documentation.
|
||||
#html_theme_options = {}
|
||||
# html_theme_options = {}
|
||||
|
||||
# Add any paths that contain custom themes here, relative to this directory.
|
||||
#html_theme_path = []
|
||||
# html_theme_path = []
|
||||
|
||||
# The name for this set of Sphinx documents. If None, it defaults to
|
||||
# "<project> v<release> documentation".
|
||||
#html_title = None
|
||||
# html_title = None
|
||||
|
||||
# A shorter title for the navigation bar. Default is the same as html_title.
|
||||
#html_short_title = None
|
||||
# html_short_title = None
|
||||
|
||||
# The name of an image file (relative to this directory) to place at the top
|
||||
# of the sidebar.
|
||||
#html_logo = None
|
||||
# html_logo = None
|
||||
|
||||
# The name of an image file (within the static path) to use as favicon of the
|
||||
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
|
||||
# pixels large.
|
||||
#html_favicon = None
|
||||
# html_favicon = None
|
||||
|
||||
# Add any paths that contain custom static files (such as style sheets) here,
|
||||
# relative to this directory. They are copied after the builtin static files,
|
||||
# so a file named "default.css" will overwrite the builtin "default.css".
|
||||
html_static_path = ['_static']
|
||||
html_static_path = ["_static"]
|
||||
|
||||
# Add any extra paths that contain custom files (such as robots.txt or
|
||||
# .htaccess) here, relative to this directory. These files are copied
|
||||
# directly to the root of the documentation.
|
||||
#html_extra_path = []
|
||||
# html_extra_path = []
|
||||
|
||||
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
|
||||
# using the given strftime format.
|
||||
#html_last_updated_fmt = '%b %d, %Y'
|
||||
# html_last_updated_fmt = '%b %d, %Y'
|
||||
|
||||
# If true, SmartyPants will be used to convert quotes and dashes to
|
||||
# typographically correct entities.
|
||||
#html_use_smartypants = True
|
||||
# html_use_smartypants = True
|
||||
|
||||
# Custom sidebar templates, maps document names to template names.
|
||||
#html_sidebars = {}
|
||||
# html_sidebars = {}
|
||||
|
||||
# Additional templates that should be rendered to pages, maps page names to
|
||||
# template names.
|
||||
#html_additional_pages = {}
|
||||
# html_additional_pages = {}
|
||||
|
||||
# If false, no module index is generated.
|
||||
#html_domain_indices = True
|
||||
# html_domain_indices = True
|
||||
|
||||
# If false, no index is generated.
|
||||
#html_use_index = True
|
||||
# html_use_index = True
|
||||
|
||||
# If true, the index is split into individual pages for each letter.
|
||||
#html_split_index = False
|
||||
# html_split_index = False
|
||||
|
||||
# If true, links to the reST sources are added to the pages.
|
||||
#html_show_sourcelink = True
|
||||
# html_show_sourcelink = True
|
||||
|
||||
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
|
||||
#html_show_sphinx = True
|
||||
# html_show_sphinx = True
|
||||
|
||||
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
|
||||
#html_show_copyright = True
|
||||
# html_show_copyright = True
|
||||
|
||||
# If true, an OpenSearch description file will be output, and all pages will
|
||||
# contain a <link> tag referring to it. The value of this option must be the
|
||||
# base URL from which the finished HTML is served.
|
||||
#html_use_opensearch = ''
|
||||
# html_use_opensearch = ''
|
||||
|
||||
# This is the file name suffix for HTML files (e.g. ".xhtml").
|
||||
#html_file_suffix = None
|
||||
# html_file_suffix = None
|
||||
|
||||
# Output file base name for HTML help builder.
|
||||
htmlhelp_basename = 'lemurdoc'
|
||||
htmlhelp_basename = "lemurdoc"
|
||||
|
||||
|
||||
# -- Options for LaTeX output ---------------------------------------------
|
||||
|
||||
latex_elements = {
|
||||
# The paper size ('letterpaper' or 'a4paper').
|
||||
#'papersize': 'letterpaper',
|
||||
|
||||
# The font size ('10pt', '11pt' or '12pt').
|
||||
#'pointsize': '10pt',
|
||||
|
||||
# Additional stuff for the LaTeX preamble.
|
||||
#'preamble': '',
|
||||
# The paper size ('letterpaper' or 'a4paper').
|
||||
#'papersize': 'letterpaper',
|
||||
# The font size ('10pt', '11pt' or '12pt').
|
||||
#'pointsize': '10pt',
|
||||
# Additional stuff for the LaTeX preamble.
|
||||
#'preamble': '',
|
||||
}
|
||||
|
||||
# Grouping the document tree into LaTeX files. List of tuples
|
||||
# (source start file, target name, title,
|
||||
# author, documentclass [howto, manual, or own class]).
|
||||
latex_documents = [
|
||||
('index', 'lemur.tex', u'Lemur Documentation',
|
||||
u'Kevin Glisson', 'manual'),
|
||||
("index", "lemur.tex", u"Lemur Documentation", u"Netflix Security", "manual")
|
||||
]
|
||||
|
||||
# The name of an image file (relative to this directory) to place at the top of
|
||||
# the title page.
|
||||
#latex_logo = None
|
||||
# latex_logo = None
|
||||
|
||||
# For "manual" documents, if this is true, then toplevel headings are parts,
|
||||
# not chapters.
|
||||
#latex_use_parts = False
|
||||
# latex_use_parts = False
|
||||
|
||||
# If true, show page references after internal links.
|
||||
#latex_show_pagerefs = False
|
||||
# latex_show_pagerefs = False
|
||||
|
||||
# If true, show URL addresses after external links.
|
||||
#latex_show_urls = False
|
||||
# latex_show_urls = False
|
||||
|
||||
# Documents to append as an appendix to all manuals.
|
||||
#latex_appendices = []
|
||||
# latex_appendices = []
|
||||
|
||||
# If false, no module index is generated.
|
||||
#latex_domain_indices = True
|
||||
# latex_domain_indices = True
|
||||
|
||||
|
||||
# -- Options for manual page output ---------------------------------------
|
||||
|
||||
# One entry per manual page. List of tuples
|
||||
# (source start file, name, description, authors, manual section).
|
||||
man_pages = [
|
||||
('index', 'Lemur', u'Lemur Documentation',
|
||||
[u'Kevin Glisson'], 1)
|
||||
]
|
||||
man_pages = [("index", "Lemur", u"Lemur Documentation", [u"Netflix Security"], 1)]
|
||||
|
||||
# If true, show URL addresses after external links.
|
||||
#man_show_urls = False
|
||||
# man_show_urls = False
|
||||
|
||||
|
||||
# -- Options for Texinfo output -------------------------------------------
|
||||
@ -244,19 +253,25 @@ man_pages = [
|
||||
# (source start file, target name, title, author,
|
||||
# dir menu entry, description, category)
|
||||
texinfo_documents = [
|
||||
('index', 'Lemur', u'Lemur Documentation',
|
||||
u'Kevin Glisson', 'Lemur', 'SSL Certificate Management',
|
||||
'Miscellaneous'),
|
||||
(
|
||||
"index",
|
||||
"Lemur",
|
||||
u"Lemur Documentation",
|
||||
u"Netflix Security",
|
||||
"Lemur",
|
||||
"SSL Certificate Management",
|
||||
"Miscellaneous",
|
||||
)
|
||||
]
|
||||
|
||||
# Documents to append as an appendix to all manuals.
|
||||
#texinfo_appendices = []
|
||||
# texinfo_appendices = []
|
||||
|
||||
# If false, no module index is generated.
|
||||
#texinfo_domain_indices = True
|
||||
# texinfo_domain_indices = True
|
||||
|
||||
# How to display URL addresses: 'footnote', 'no', or 'inline'.
|
||||
#texinfo_show_urls = 'footnote'
|
||||
# texinfo_show_urls = 'footnote'
|
||||
|
||||
# If true, do not generate a @detailmenu in the "Top" node's menu.
|
||||
#texinfo_no_detailmenu = False
|
||||
# texinfo_no_detailmenu = False
|
||||
|
||||
@ -22,12 +22,18 @@ Once you've got all that, the rest is simple:
|
||||
# If you have a fork, you'll want to clone it instead
|
||||
git clone git://github.com/netflix/lemur.git
|
||||
|
||||
# Create a python virtualenv
|
||||
mkvirtualenv lemur
|
||||
# Create and activate python virtualenv from within the lemur repo
|
||||
python3 -m venv env
|
||||
. env/bin/activate
|
||||
|
||||
# Install doc requirements
|
||||
|
||||
# Make the magic happen
|
||||
make dev-docs
|
||||
|
||||
# Make the docs
|
||||
cd docs
|
||||
make html
|
||||
|
||||
Running ``make dev-docs`` will install the basic requirements to get Sphinx running.
|
||||
|
||||
|
||||
@ -48,7 +54,7 @@ of Lemur. You'll want to make sure you have a few things on your local system fi
|
||||
* pip
|
||||
* virtualenv (ideally virtualenvwrapper)
|
||||
* node.js (for npm and building css/javascript)
|
||||
* (Optional) Potgresql
|
||||
+* `PostgreSQL <https://lemur.readthedocs.io/en/latest/quickstart/index.html#setup-postgres>`_
|
||||
|
||||
Once you've got all that, the rest is simple:
|
||||
|
||||
@ -58,7 +64,7 @@ Once you've got all that, the rest is simple:
|
||||
git clone git://github.com/lemur/lemur.git
|
||||
|
||||
# Create a python virtualenv
|
||||
mkvirtualenv lemur
|
||||
python3 -m venv env
|
||||
|
||||
# Make the magic happen
|
||||
make
|
||||
@ -77,6 +83,7 @@ Create a default Lemur configuration just as if this were a production instance:
|
||||
|
||||
::
|
||||
|
||||
lemur create_config
|
||||
lemur init
|
||||
|
||||
You'll likely want to make some changes to the default configuration (we recommend developing against Postgres, for example). Once done, migrate your database using the following command:
|
||||
@ -86,7 +93,13 @@ You'll likely want to make some changes to the default configuration (we recomme
|
||||
lemur upgrade
|
||||
|
||||
|
||||
.. note:: The ``upgrade`` shortcut is simply a shorcut to Alembic's upgrade command.
|
||||
.. note:: The ``upgrade`` shortcut is simply a shortcut to Alembic's upgrade command.
|
||||
|
||||
|
||||
Running tests with Docker and docker-compose
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
Alternatively you can use Docker and docker-compose for running the tests with ``docker-compose run test``.
|
||||
|
||||
|
||||
Coding Standards
|
||||
@ -113,6 +126,12 @@ HTML:
|
||||
2 Spaces
|
||||
|
||||
|
||||
Git hooks
|
||||
~~~~~~~~~
|
||||
|
||||
To help developers maintain the above standards, Lemur includes a configuration file for Yelp's `pre-commit <http://pre-commit.com/>`_. This is an optional dependency and is not required in order to contribute to Lemur.
|
||||
|
||||
|
||||
Running the Test Suite
|
||||
----------------------
|
||||
|
||||
@ -122,7 +141,7 @@ The test suite consists of multiple parts, testing both the Python and JavaScrip
|
||||
|
||||
make test
|
||||
|
||||
If you only need to run the Python tests, you can do so with ``make test-python``, as well as ``test-js`` for the JavaScript tests.
|
||||
If you only need to run the Python tests, you can do so with ``make test-python``, as well as ``make test-js`` for the JavaScript tests.
|
||||
|
||||
|
||||
You'll notice that the test suite is structured based on where the code lives, and strongly encourages using the mock library to drive more accurate individual tests.
|
||||
@ -144,8 +163,19 @@ If you've made changes and need to compile them by hand for any reason, you can
|
||||
|
||||
The minified and processed files should be committed alongside the unprocessed changes.
|
||||
|
||||
It's also important to note that Lemur's frontend and API are not tied together. The API does not serve any of the static assets, we rely on nginx or some other file server to server all of the static assets.
|
||||
During development that means we need an additional server to serve those static files for the GUI.
|
||||
|
||||
This is accomplished with a Gulp task:
|
||||
|
||||
::
|
||||
|
||||
./node_modules/.bin/gulp serve
|
||||
|
||||
The gulp task compiles all the JS/CSS/HTML files and opens the Lemur welcome page in your default browsers. Additionally any changes to made to the JS/CSS/HTML with be reloaded in your browsers.
|
||||
|
||||
Developing with Flask
|
||||
----------------------
|
||||
---------------------
|
||||
|
||||
Because Lemur is just Flask, you can use all of the standard Flask functionality. The only difference is you'll be accessing commands that would normally go through manage.py using the ``lemur`` CLI helper instead.
|
||||
|
||||
@ -164,7 +194,7 @@ Schema changes should always introduce the new schema in a commit, and then intr
|
||||
|
||||
Removing columns and tables requires a slightly more painful flow, and should resemble the follow multi-commit flow:
|
||||
|
||||
- Remove all references to the column or table (but dont remove the Model itself)
|
||||
- Remove all references to the column or table (but don't remove the Model itself)
|
||||
- Remove the model code
|
||||
- Remove the table or column
|
||||
|
||||
@ -180,19 +210,116 @@ You can see a list of open pull requests (pending changes) by visiting https://g
|
||||
|
||||
Pull requests should be against **master** and pass all TravisCI checks
|
||||
|
||||
Plugins
|
||||
=======
|
||||
|
||||
Writing a Plugin
|
||||
================
|
||||
|
||||
.. toctree::
|
||||
:maxdepth: 1
|
||||
:maxdepth: 2
|
||||
|
||||
plugins/index
|
||||
|
||||
|
||||
REST API
|
||||
========
|
||||
|
||||
Lemur's front end is entirely API driven. Any action that you can accomplish via the UI can also be accomplished by the
|
||||
API. The following is documents and provides examples on how to make requests to the Lemur API.
|
||||
|
||||
Authentication
|
||||
--------------
|
||||
|
||||
.. automodule:: lemur.auth.views
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
Destinations
|
||||
------------
|
||||
|
||||
.. automodule:: lemur.destinations.views
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
Notifications
|
||||
-------------
|
||||
|
||||
.. automodule:: lemur.notifications.views
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
Users
|
||||
-----
|
||||
|
||||
.. automodule:: lemur.users.views
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
Roles
|
||||
-----
|
||||
|
||||
.. automodule:: lemur.roles.views
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
Certificates
|
||||
------------
|
||||
|
||||
.. automodule:: lemur.certificates.views
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
Authorities
|
||||
-----------
|
||||
|
||||
.. automodule:: lemur.authorities.views
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
Domains
|
||||
-------
|
||||
|
||||
.. automodule:: lemur.domains.views
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
Endpoints
|
||||
---------
|
||||
|
||||
.. automodule:: lemur.endpoints.views
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
Logs
|
||||
----
|
||||
|
||||
.. automodule:: lemur.logs.views
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
|
||||
Sources
|
||||
-------
|
||||
|
||||
.. automodule:: lemur.sources.views
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
|
||||
Internals
|
||||
=========
|
||||
|
||||
.. toctree::
|
||||
:maxdepth: 1
|
||||
:maxdepth: 2
|
||||
|
||||
internals/lemur
|
||||
|
||||
|
||||
@ -1,15 +1,6 @@
|
||||
certificates Package
|
||||
====================
|
||||
|
||||
:mod:`exceptions` Module
|
||||
------------------------
|
||||
|
||||
.. automodule:: lemur.certificates.exceptions
|
||||
:noindex:
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
:mod:`models` Module
|
||||
--------------------
|
||||
|
||||
|
||||
20
docs/developer/internals/lemur.plugins.lemur_cfssl.rst
Normal file
20
docs/developer/internals/lemur.plugins.lemur_cfssl.rst
Normal file
@ -0,0 +1,20 @@
|
||||
lemur_cfssl Package
|
||||
===================
|
||||
|
||||
:mod:`lemur_cfssl` Package
|
||||
--------------------------
|
||||
|
||||
.. automodule:: lemur.plugins.lemur_cfssl
|
||||
:noindex:
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
:mod:`plugin` Module
|
||||
--------------------
|
||||
|
||||
.. automodule:: lemur.plugins.lemur_cfssl.plugin
|
||||
:noindex:
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
||||
@ -10,15 +10,6 @@ lemur_verisign Package
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
:mod:`constants` Module
|
||||
-----------------------
|
||||
|
||||
.. automodule:: lemur.plugins.lemur_verisign.constants
|
||||
:noindex:
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
:mod:`plugin` Module
|
||||
--------------------
|
||||
|
||||
|
||||
@ -27,6 +27,6 @@ Subpackages
|
||||
lemur.plugins.base
|
||||
lemur.plugins.bases
|
||||
lemur.plugins.lemur_aws
|
||||
lemur.plugins.lemur_cloudca
|
||||
lemur.plugins.lemur_cfssl
|
||||
lemur.plugins.lemur_email
|
||||
lemur.plugins.lemur_verisign
|
||||
|
||||
@ -96,5 +96,19 @@ Subpackages
|
||||
lemur.notifications
|
||||
lemur.plugins
|
||||
lemur.roles
|
||||
lemur.status
|
||||
lemur.users
|
||||
lemur.sources
|
||||
lemur.logs
|
||||
lemur.reporting
|
||||
lemur.tests
|
||||
lemur.deployment
|
||||
lemur.endpoints
|
||||
lemur.defaults
|
||||
lemur.plugins.lemur_acme
|
||||
lemur.plugins.lemur_atlas
|
||||
lemur.plugins.lemur_cryptography
|
||||
lemur.plugins.lemur_digicert
|
||||
lemur.plugins.lemur_java
|
||||
lemur.plugins.lemur_kubernetes
|
||||
lemur.plugins.lemur_openssl
|
||||
lemur.plugins.lemur_slack
|
||||
|
||||
@ -1,11 +0,0 @@
|
||||
status Package
|
||||
==============
|
||||
|
||||
:mod:`views` Module
|
||||
-------------------
|
||||
|
||||
.. automodule:: lemur.status.views
|
||||
:noindex:
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
||||
@ -1,6 +1,3 @@
|
||||
Writing a Plugin
|
||||
================
|
||||
|
||||
Several interfaces exist for extending Lemur:
|
||||
|
||||
* Issuer (lemur.plugins.base.issuer)
|
||||
@ -28,7 +25,7 @@ if you want to pull the version using pkg_resources (which is what we recommend)
|
||||
try:
|
||||
VERSION = __import__('pkg_resources') \
|
||||
.get_distribution(__name__).version
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
VERSION = 'unknown'
|
||||
|
||||
Inside of ``plugin.py``, you'll declare your Plugin class::
|
||||
@ -73,10 +70,18 @@ at multiple plugins within your package::
|
||||
},
|
||||
)
|
||||
|
||||
Once your plugin files are in place and the ``/www/lemur/setup.py`` file has been modified, you can load your plugin into your instance by reinstalling lemur:
|
||||
::
|
||||
|
||||
(lemur)$cd /www/lemur
|
||||
(lemur)$pip install -e .
|
||||
|
||||
That's it! Users will be able to install your plugin via ``pip install <package name>``.
|
||||
|
||||
.. SeeAlso:: For more information about python packages see `Python Packaging <https://packaging.python.org/en/latest/distributing.html>`_
|
||||
|
||||
.. SeeAlso:: For an example of a plugin operation outside of Lemur's core, see `lemur-digicert <https://github.com/opendns/lemur-digicert>`_
|
||||
|
||||
.. _PluginInterfaces:
|
||||
|
||||
Plugin Interfaces
|
||||
@ -95,10 +100,16 @@ If you have a third party or internal service that creates authorities (EJBCA, e
|
||||
it can treat any issuer plugin as both a source of creating new certificates as well as new authorities.
|
||||
|
||||
|
||||
The `IssuerPlugin` exposes two functions::
|
||||
The `IssuerPlugin` exposes four functions functions::
|
||||
|
||||
def create_certificate(self, options):
|
||||
def create_certificate(self, csr, issuer_options):
|
||||
# requests.get('a third party')
|
||||
def revoke_certificate(self, certificate, comments):
|
||||
# requests.put('a third party')
|
||||
def get_ordered_certificate(self, order_id):
|
||||
# requests.get('already existing certificate')
|
||||
def canceled_ordered_certificate(self, pending_cert, **kwargs):
|
||||
# requests.put('cancel an order that has yet to be issued')
|
||||
|
||||
Lemur will pass a dictionary of all possible options for certificate creation. Including a valid CSR, and the raw options associated with the request.
|
||||
|
||||
@ -134,15 +145,34 @@ The `IssuerPlugin` doesn't have any options like Destination, Source, and Notifi
|
||||
any fields you might need to submit a request to a third party. If there are additional options you need
|
||||
in your plugin feel free to open an issue, or look into adding additional options to issuers yourself.
|
||||
|
||||
Asynchronous Certificates
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
An issuer may take some time to actually issue a certificate for an order. In this case, a `PendingCertificate` is returned, which holds information to recreate a `Certificate` object at a later time. Then, `get_ordered_certificate()` should be run periodically via `python manage.py pending_certs fetch -i all` to attempt to retrieve an ordered certificate::
|
||||
|
||||
def get_ordered_ceriticate(self, order_id):
|
||||
# order_id is the external id of the order, not the external_id of the certificate
|
||||
# retrieve an order, and check if there is an issued certificate attached to it
|
||||
|
||||
`cancel_ordered_certificate()` should be implemented to allow an ordered certificate to be canceled before it is issued::
|
||||
def cancel_ordered_certificate(self, pending_cert, **kwargs):
|
||||
# pending_cert should contain the necessary information to match an order
|
||||
# kwargs can be given to provide information to the issuer for canceling
|
||||
|
||||
Destination
|
||||
-----------
|
||||
|
||||
Destination plugins allow you to propagate certificates managed by Lemur to additional third parties. This provides flexibility when
|
||||
different orchestration systems have their own way of manage certificates or there is an existing system you wish to integrate with Lemur.
|
||||
|
||||
By default destination plugins have a private key requirement. If your plugin does not require a certificates private key mark `requires_key = False`
|
||||
in the plugins base class like so::
|
||||
|
||||
class MyDestinationPlugin(DestinationPlugin):
|
||||
requires_key = False
|
||||
|
||||
The DestinationPlugin requires only one function to be implemented::
|
||||
|
||||
def upload(self, cert, private_key, cert_chain, options, **kwargs):
|
||||
def upload(self, name, body, private_key, cert_chain, options, **kwargs):
|
||||
# request.post('a third party')
|
||||
|
||||
Additionally the DestinationPlugin allows the plugin author to add additional options
|
||||
@ -151,25 +181,25 @@ that can be used to help define sub-destinations.
|
||||
For example, if we look at the aws-destination plugin we can see that it defines an `accountNumber` option::
|
||||
|
||||
options = [
|
||||
{
|
||||
'name': 'accountNumber',
|
||||
'type': 'int',
|
||||
'required': True,
|
||||
'validation': '/^[0-9]{12,12}$/',
|
||||
'helpMessage': 'Must be a valid AWS account number!',
|
||||
}
|
||||
{
|
||||
'name': 'accountNumber',
|
||||
'type': 'int',
|
||||
'required': True,
|
||||
'validation': '/^[0-9]{12,12}$/',
|
||||
'helpMessage': 'Must be a valid AWS account number!',
|
||||
}
|
||||
]
|
||||
|
||||
By defining an `accountNumber` we can make this plugin handle many N number of AWS accounts instead of just one.
|
||||
|
||||
The schema for defining plugin options are pretty straightforward:
|
||||
|
||||
- **Name**: name of the variable you wish to present the user, snake case (snakeCase) is preferrred as Lemur
|
||||
- **Name**: name of the variable you wish to present the user, snake case (snakeCase) is preferred as Lemur
|
||||
will parse these and create pretty variable titles
|
||||
- **Type** there are currently four supported variable types
|
||||
- **Int** creates an html integer box for the user to enter integers into
|
||||
- **Str** creates a html text input box
|
||||
- **Boolean** creates a checkbox for the user to signify truithyness
|
||||
- **Boolean** creates a checkbox for the user to signify truthiness
|
||||
- **Select** creates a select box that gives the user a list of options
|
||||
- When used a `available` key must be provided with a list of selectable options
|
||||
- **Required** determines if this option is required, this **must be a boolean value**
|
||||
@ -185,7 +215,7 @@ Notification
|
||||
------------
|
||||
|
||||
Lemur includes the ability to create Email notifications by **default**. These notifications
|
||||
currently come in the form of expiration noticies. Lemur periodically checks certifications expiration dates and
|
||||
currently come in the form of expiration notices. Lemur periodically checks certifications expiration dates and
|
||||
determines if a given certificate is eligible for notification. There are currently only two parameters used to
|
||||
determine if a certificate is eligible; validity expiration (date the certificate is no longer valid) and the number
|
||||
of days the current date (UTC) is from that expiration date.
|
||||
@ -196,10 +226,10 @@ are trying to create a new notification type (audit, failed logins, etc.) this w
|
||||
You would also then need to build additional code to trigger the new notification type.
|
||||
|
||||
The second is `ExpirationNotificationPlugin`, this object inherits from `NotificationPlugin` object.
|
||||
You will most likely want to base your plugin on, if you want to add new channels for expiration notices (Slack, Hipcat, Jira, etc.). It adds default options that are required by
|
||||
by all expiration notifications (interval, unit). This interface expects for the child to define the following function::
|
||||
You will most likely want to base your plugin on, if you want to add new channels for expiration notices (Slack, HipChat, Jira, etc.). It adds default options that are required by
|
||||
all expiration notifications (interval, unit). This interface expects for the child to define the following function::
|
||||
|
||||
def send(self):
|
||||
def send(self, notification_type, message, targets, options, **kwargs):
|
||||
# request.post("some alerting infrastructure")
|
||||
|
||||
|
||||
@ -207,27 +237,48 @@ Source
|
||||
------
|
||||
|
||||
When building Lemur we realized that although it would be nice if every certificate went through Lemur to get issued, but this is not
|
||||
always be the case. Often times there are third parties that will issue certificates on your behalf and these can get deployed
|
||||
always be the case. Oftentimes there are third parties that will issue certificates on your behalf and these can get deployed
|
||||
to infrastructure without any interaction with Lemur. In an attempt to combat this and try to track every certificate, Lemur has a notion of
|
||||
certificate **Sources**. Lemur will contact the source at periodic intervals and attempt to **sync** against the source. This means downloading or discovering any
|
||||
certificate Lemur does not know about and adding the certificate to it's inventory to be tracked and alerted on.
|
||||
certificate Lemur does not know about and adding the certificate to its inventory to be tracked and alerted on.
|
||||
|
||||
The `SourcePlugin` object has one default option of `pollRate`. This controls the number of seconds which to get new certificates.
|
||||
|
||||
.. warning::
|
||||
Lemur currently has a very basic polling system of running a cron job every 15min to see which source plugins need to be run. A lock file is generated to guarantee that
|
||||
.. warning::
|
||||
Lemur currently has a very basic polling system of running a cron job every 15min to see which source plugins need to be run. A lock file is generated to guarantee that
|
||||
only one sync is running at a time. It also means that the minimum resolution of a source plugin poll rate is effectively 15min. You can always specify a faster cron
|
||||
job if you need a higher resolution sync job.
|
||||
|
||||
|
||||
The `SourcePlugin` object requires implementation of one function::
|
||||
|
||||
def get_certificates(self, **kwargs):
|
||||
def get_certificates(self, options, **kwargs):
|
||||
# request.get("some source of certificates")
|
||||
|
||||
|
||||
.. Note::
|
||||
Often times to facilitate code re-use it makes sense put source and destination plugins into one package.
|
||||
.. note::
|
||||
Oftentimes to facilitate code re-use it makes sense put source and destination plugins into one package.
|
||||
|
||||
|
||||
Export
|
||||
------
|
||||
|
||||
Formats, formats and more formats. That's the current PKI landscape. See the always relevant `xkcd <https://xkcd.com/927/>`_.
|
||||
Thankfully Lemur supports the ability to output your certificates into whatever format you want. This integration comes by the way
|
||||
of Export plugins. Support is still new and evolving, the goal of these plugins is to return raw data in a new format that
|
||||
can then be used by any number of applications. Included in Lemur is the `JavaExportPlugin` which currently supports generating
|
||||
a Java Key Store (JKS) file for use in Java based applications.
|
||||
|
||||
|
||||
The `ExportPlugin` object requires the implementation of one function::
|
||||
|
||||
def export(self, body, chain, key, options, **kwargs):
|
||||
# sys.call('openssl hokuspocus')
|
||||
# return "extension", passphrase, raw
|
||||
|
||||
|
||||
.. note::
|
||||
Support of various formats sometimes relies on external tools system calls. Always be mindful of sanitizing any input to these calls.
|
||||
|
||||
|
||||
Testing
|
||||
@ -246,9 +297,9 @@ Augment your setup.py to ensure at least the following:
|
||||
|
||||
setup(
|
||||
# ...
|
||||
install_requires=[
|
||||
install_requires=[
|
||||
'lemur',
|
||||
]
|
||||
]
|
||||
)
|
||||
|
||||
|
||||
@ -259,11 +310,7 @@ The ``conftest.py`` file is our main entry-point for py.test. We need to configu
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
pytest_plugins = [
|
||||
'lemur.utils.pytest'
|
||||
]
|
||||
from lemur.tests.conftest import * # noqa
|
||||
|
||||
|
||||
Test Cases
|
||||
@ -273,14 +320,18 @@ You can now inherit from Lemur's core test classes. These are Django-based and e
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
# test_myextension.py
|
||||
from __future__ import absolute_import
|
||||
import pytest
|
||||
from lemur.tests.vectors import INTERNAL_CERTIFICATE_A_STR, INTERNAL_PRIVATE_KEY_A_STR
|
||||
|
||||
from lemur.testutils import TestCase
|
||||
def test_export_keystore(app):
|
||||
from lemur.plugins.base import plugins
|
||||
p = plugins.get('java-keystore-jks')
|
||||
options = [{'name': 'passphrase', 'value': 'test1234'}]
|
||||
with pytest.raises(Exception):
|
||||
p.export(INTERNAL_CERTIFICATE_A_STR, "", "", options)
|
||||
|
||||
class MyExtensionTest(TestCase):
|
||||
def test_simple(self):
|
||||
assert 1 != 2
|
||||
raw = p.export(INTERNAL_CERTIFICATE_A_STR, "", INTERNAL_PRIVATE_KEY_A_STR, options)
|
||||
assert raw != b""
|
||||
|
||||
|
||||
Running Tests
|
||||
@ -292,13 +343,14 @@ Running tests follows the py.test standard. As long as your test files and metho
|
||||
|
||||
$ py.test -v
|
||||
============================== test session starts ==============================
|
||||
platform darwin -- Python 2.7.9 -- py-1.4.26 -- pytest-2.6.4/python2.7
|
||||
plugins: django
|
||||
collected 1 items
|
||||
platform darwin -- Python 2.7.10, pytest-2.8.5, py-1.4.30, pluggy-0.3.1
|
||||
cachedir: .cache
|
||||
plugins: flask-0.10.0
|
||||
collected 346 items
|
||||
|
||||
tests/test_myextension.py::MyExtensionTest::test_simple PASSED
|
||||
lemur/plugins/lemur_acme/tests/test_acme.py::test_get_certificates PASSED
|
||||
|
||||
=========================== 1 passed in 0.35 seconds ============================
|
||||
|
||||
|
||||
.. SeeAlso:: Lemur bundles several plugins that use the same interfaces mentioned above. View the source: # TODO
|
||||
.. SeeAlso:: Lemur bundles several plugins that use the same interfaces mentioned above.
|
||||
|
||||
@ -1,66 +0,0 @@
|
||||
Lemur's front end is entirely API driven. Any action that you can accomplish via the UI can also be accomplished by the
|
||||
UI. The following is documents and provides examples on how to make requests to the Lemur API.
|
||||
|
||||
Authentication
|
||||
--------------
|
||||
|
||||
.. automodule:: lemur.auth.views
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
Destinations
|
||||
------------
|
||||
|
||||
.. automodule:: lemur.destinations.views
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
Notifications
|
||||
-------------
|
||||
|
||||
.. automodule:: lemur.notifications.views
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
Users
|
||||
-----
|
||||
|
||||
.. automodule:: lemur.users.views
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
Roles
|
||||
-----
|
||||
|
||||
.. automodule:: lemur.roles.views
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
Certificates
|
||||
------------
|
||||
|
||||
.. automodule:: lemur.certificates.views
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
Authorities
|
||||
-----------
|
||||
|
||||
.. automodule:: lemur.authorities.views
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
Domains
|
||||
-------
|
||||
|
||||
.. automodule:: lemur.domains.views
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
||||
53
docs/doing-a-release.rst
Normal file
53
docs/doing-a-release.rst
Normal file
@ -0,0 +1,53 @@
|
||||
Doing a release
|
||||
===============
|
||||
|
||||
Doing a release of ``lemur`` requires a few steps.
|
||||
|
||||
Bumping the version number
|
||||
--------------------------
|
||||
|
||||
The next step in doing a release is bumping the version number in the
|
||||
software.
|
||||
|
||||
* Update the version number in ``lemur/__about__.py``.
|
||||
* Set the release date in the :doc:`/changelog`.
|
||||
* Do a commit indicating this.
|
||||
* Send a pull request with this.
|
||||
* Wait for it to be merged.
|
||||
|
||||
Performing the release
|
||||
----------------------
|
||||
|
||||
The commit that merged the version number bump is now the official release
|
||||
commit for this release. You will need to have ``gpg`` installed and a ``gpg``
|
||||
key in order to do a release. Once this has happened:
|
||||
|
||||
* Run ``invoke release {version}``.
|
||||
|
||||
The release should now be available on PyPI and a tag should be available in
|
||||
the repository.
|
||||
|
||||
Verifying the release
|
||||
---------------------
|
||||
|
||||
You should verify that ``pip install lemur`` works correctly:
|
||||
|
||||
.. code-block:: pycon
|
||||
|
||||
>>> import lemur
|
||||
>>> lemur.__version__
|
||||
'...'
|
||||
|
||||
Verify that this is the version you just released.
|
||||
|
||||
Post-release tasks
|
||||
------------------
|
||||
|
||||
* Update the version number to the next major (e.g. ``0.5.dev1``) in
|
||||
``lemur/__about__.py`` and
|
||||
* Add new :doc:`/changelog` entry with next version and note that it is under
|
||||
active development
|
||||
* Send a pull request with these items
|
||||
* Check for any outstanding code undergoing a deprecation cycle by looking in
|
||||
``lemur.utils`` for ``DeprecatedIn**`` definitions. If any exist open
|
||||
a ticket to increment them for the next release.
|
||||
27
docs/faq.rst
27
docs/faq.rst
@ -4,9 +4,9 @@ Frequently Asked Questions
|
||||
Common Problems
|
||||
---------------
|
||||
|
||||
In my startup logs I see *'Aborting... Lemur cannot locate db encryption key, is LEMUR_ENCRYPTION_KEY set?'*
|
||||
You likely have not correctly configured **LEMUR_ENCRYPTION_KEY**. See
|
||||
:doc:`administration/index` for more information.
|
||||
In my startup logs I see *'Aborting... Lemur cannot locate db encryption key, is LEMUR_ENCRYPTION_KEYS set?'*
|
||||
You likely have not correctly configured **LEMUR_ENCRYPTION_KEYS**. See
|
||||
:doc:`administration` for more information.
|
||||
|
||||
|
||||
I am seeing Lemur's javascript load in my browser but not the CSS.
|
||||
@ -14,6 +14,27 @@ I am seeing Lemur's javascript load in my browser but not the CSS.
|
||||
:doc:`production/index` for example configurations.
|
||||
|
||||
|
||||
After installing Lemur I am unable to login
|
||||
Ensure that you are trying to login with the credentials you entered during `lemur init`. These are separate
|
||||
from the postgres database credentials.
|
||||
|
||||
|
||||
Running 'lemur db upgrade' seems stuck.
|
||||
Most likely, the upgrade is stuck because an existing query on the database is holding onto a lock that the
|
||||
migration needs.
|
||||
|
||||
To resolve, login to your lemur database and run:
|
||||
|
||||
SELECT * FROM pg_locks l INNER JOIN pg_stat_activity s ON (l.pid = s.pid) WHERE waiting AND NOT granted;
|
||||
|
||||
This will give you a list of queries that are currently waiting to be executed. From there attempt to idenity the PID
|
||||
of the query blocking the migration. Once found execute:
|
||||
|
||||
select pg_terminate_backend(<blocking-pid>);
|
||||
|
||||
See `<http://stackoverflow.com/questions/22896496/alembic-migration-stuck-with-postgresql>`_ for more.
|
||||
|
||||
|
||||
How do I
|
||||
--------
|
||||
|
||||
|
||||
@ -1,261 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
sphinx-autopackage-script
|
||||
|
||||
This script parses a directory tree looking for python modules and packages and
|
||||
creates ReST files appropriately to create code documentation with Sphinx.
|
||||
It also creates a modules index (named modules.<suffix>).
|
||||
"""
|
||||
|
||||
# Copyright 2008 Société des arts technologiques (SAT), http://www.sat.qc.ca/
|
||||
# Copyright 2010 Thomas Waldmann <tw AT waldmann-edv DOT de>
|
||||
# All rights reserved.
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 2 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
|
||||
import os
|
||||
import optparse
|
||||
|
||||
|
||||
# automodule options
|
||||
OPTIONS = ['members',
|
||||
'undoc-members',
|
||||
# 'inherited-members', # disabled because there's a bug in sphinx
|
||||
'show-inheritance',
|
||||
]
|
||||
|
||||
INIT = '__init__.py'
|
||||
|
||||
def makename(package, module):
|
||||
"""Join package and module with a dot."""
|
||||
# Both package and module can be None/empty.
|
||||
if package:
|
||||
name = package
|
||||
if module:
|
||||
name += '.' + module
|
||||
else:
|
||||
name = module
|
||||
return name
|
||||
|
||||
def write_file(name, text, opts):
|
||||
"""Write the output file for module/package <name>."""
|
||||
if opts.dryrun:
|
||||
return
|
||||
fname = os.path.join(opts.destdir, "%s.%s" % (name, opts.suffix))
|
||||
if not opts.force and os.path.isfile(fname):
|
||||
print 'File %s already exists, skipping.' % fname
|
||||
else:
|
||||
print 'Creating file %s.' % fname
|
||||
f = open(fname, 'w')
|
||||
f.write(text)
|
||||
f.close()
|
||||
|
||||
def format_heading(level, text):
|
||||
"""Create a heading of <level> [1, 2 or 3 supported]."""
|
||||
underlining = ['=', '-', '~', ][level-1] * len(text)
|
||||
return '%s\n%s\n\n' % (text, underlining)
|
||||
|
||||
def format_directive(module, package=None):
|
||||
"""Create the automodule directive and add the options."""
|
||||
directive = '.. automodule:: %s\n' % makename(package, module)
|
||||
for option in OPTIONS:
|
||||
directive += ' :%s:\n' % option
|
||||
return directive
|
||||
|
||||
def create_module_file(package, module, opts):
|
||||
"""Build the text of the file and write the file."""
|
||||
text = format_heading(1, '%s Module' % module)
|
||||
text += format_heading(2, ':mod:`%s` Module' % module)
|
||||
text += format_directive(module, package)
|
||||
write_file(makename(package, module), text, opts)
|
||||
|
||||
def create_package_file(root, master_package, subroot, py_files, opts, subs):
|
||||
"""Build the text of the file and write the file."""
|
||||
package = os.path.split(root)[-1]
|
||||
text = format_heading(1, '%s Package' % package)
|
||||
# add each package's module
|
||||
for py_file in py_files:
|
||||
if shall_skip(os.path.join(root, py_file)):
|
||||
continue
|
||||
is_package = py_file == INIT
|
||||
py_file = os.path.splitext(py_file)[0]
|
||||
py_path = makename(subroot, py_file)
|
||||
if is_package:
|
||||
heading = ':mod:`%s` Package' % package
|
||||
else:
|
||||
heading = ':mod:`%s` Module' % py_file
|
||||
text += format_heading(2, heading)
|
||||
text += format_directive(is_package and subroot or py_path, master_package)
|
||||
text += '\n'
|
||||
|
||||
# build a list of directories that are packages (they contain an INIT file)
|
||||
subs = [sub for sub in subs if os.path.isfile(os.path.join(root, sub, INIT))]
|
||||
# if there are some package directories, add a TOC for theses subpackages
|
||||
if subs:
|
||||
text += format_heading(2, 'Subpackages')
|
||||
text += '.. toctree::\n\n'
|
||||
for sub in subs:
|
||||
text += ' %s.%s\n' % (makename(master_package, subroot), sub)
|
||||
text += '\n'
|
||||
|
||||
write_file(makename(master_package, subroot), text, opts)
|
||||
|
||||
def create_modules_toc_file(master_package, modules, opts, name='modules'):
|
||||
"""
|
||||
Create the module's index.
|
||||
"""
|
||||
text = format_heading(1, '%s Modules' % opts.header)
|
||||
text += '.. toctree::\n'
|
||||
text += ' :maxdepth: %s\n\n' % opts.maxdepth
|
||||
|
||||
modules.sort()
|
||||
prev_module = ''
|
||||
for module in modules:
|
||||
# look if the module is a subpackage and, if yes, ignore it
|
||||
if module.startswith(prev_module + '.'):
|
||||
continue
|
||||
prev_module = module
|
||||
text += ' %s\n' % module
|
||||
|
||||
write_file(name, text, opts)
|
||||
|
||||
def shall_skip(module):
|
||||
"""
|
||||
Check if we want to skip this module.
|
||||
"""
|
||||
# skip it, if there is nothing (or just \n or \r\n) in the file
|
||||
return os.path.getsize(module) < 3
|
||||
|
||||
def recurse_tree(path, excludes, opts):
|
||||
"""
|
||||
Look for every file in the directory tree and create the corresponding
|
||||
ReST files.
|
||||
"""
|
||||
# use absolute path for root, as relative paths like '../../foo' cause
|
||||
# 'if "/." in root ...' to filter out *all* modules otherwise
|
||||
path = os.path.abspath(path)
|
||||
# check if the base directory is a package and get is name
|
||||
if INIT in os.listdir(path):
|
||||
package_name = path.split(os.path.sep)[-1]
|
||||
else:
|
||||
package_name = None
|
||||
|
||||
toc = []
|
||||
tree = os.walk(path, False)
|
||||
for root, subs, files in tree:
|
||||
# keep only the Python script files
|
||||
py_files = sorted([f for f in files if os.path.splitext(f)[1] == '.py'])
|
||||
if INIT in py_files:
|
||||
py_files.remove(INIT)
|
||||
py_files.insert(0, INIT)
|
||||
# remove hidden ('.') and private ('_') directories
|
||||
subs = sorted([sub for sub in subs if sub[0] not in ['.', '_']])
|
||||
# check if there are valid files to process
|
||||
# TODO: could add check for windows hidden files
|
||||
if "/." in root or "/_" in root \
|
||||
or not py_files \
|
||||
or is_excluded(root, excludes):
|
||||
continue
|
||||
if INIT in py_files:
|
||||
# we are in package ...
|
||||
if (# ... with subpackage(s)
|
||||
subs
|
||||
or
|
||||
# ... with some module(s)
|
||||
len(py_files) > 1
|
||||
or
|
||||
# ... with a not-to-be-skipped INIT file
|
||||
not shall_skip(os.path.join(root, INIT))
|
||||
):
|
||||
subroot = root[len(path):].lstrip(os.path.sep).replace(os.path.sep, '.')
|
||||
create_package_file(root, package_name, subroot, py_files, opts, subs)
|
||||
toc.append(makename(package_name, subroot))
|
||||
elif root == path:
|
||||
# if we are at the root level, we don't require it to be a package
|
||||
for py_file in py_files:
|
||||
if not shall_skip(os.path.join(path, py_file)):
|
||||
module = os.path.splitext(py_file)[0]
|
||||
create_module_file(package_name, module, opts)
|
||||
toc.append(makename(package_name, module))
|
||||
|
||||
# create the module's index
|
||||
if not opts.notoc:
|
||||
create_modules_toc_file(package_name, toc, opts)
|
||||
|
||||
def normalize_excludes(rootpath, excludes):
|
||||
"""
|
||||
Normalize the excluded directory list:
|
||||
* must be either an absolute path or start with rootpath,
|
||||
* otherwise it is joined with rootpath
|
||||
* with trailing slash
|
||||
"""
|
||||
sep = os.path.sep
|
||||
f_excludes = []
|
||||
for exclude in excludes:
|
||||
if not os.path.isabs(exclude) and not exclude.startswith(rootpath):
|
||||
exclude = os.path.join(rootpath, exclude)
|
||||
if not exclude.endswith(sep):
|
||||
exclude += sep
|
||||
f_excludes.append(exclude)
|
||||
return f_excludes
|
||||
|
||||
def is_excluded(root, excludes):
|
||||
"""
|
||||
Check if the directory is in the exclude list.
|
||||
|
||||
Note: by having trailing slashes, we avoid common prefix issues, like
|
||||
e.g. an exlude "foo" also accidentally excluding "foobar".
|
||||
"""
|
||||
sep = os.path.sep
|
||||
if not root.endswith(sep):
|
||||
root += sep
|
||||
for exclude in excludes:
|
||||
if root.startswith(exclude):
|
||||
return True
|
||||
return False
|
||||
|
||||
def main():
|
||||
"""
|
||||
Parse and check the command line arguments.
|
||||
"""
|
||||
parser = optparse.OptionParser(usage="""usage: %prog [options] <package path> [exclude paths, ...]
|
||||
|
||||
Note: By default this script will not overwrite already created files.""")
|
||||
parser.add_option("-n", "--doc-header", action="store", dest="header", help="Documentation Header (default=Project)", default="Project")
|
||||
parser.add_option("-d", "--dest-dir", action="store", dest="destdir", help="Output destination directory", default="")
|
||||
parser.add_option("-s", "--suffix", action="store", dest="suffix", help="module suffix (default=txt)", default="txt")
|
||||
parser.add_option("-m", "--maxdepth", action="store", dest="maxdepth", help="Maximum depth of submodules to show in the TOC (default=4)", type="int", default=4)
|
||||
parser.add_option("-r", "--dry-run", action="store_true", dest="dryrun", help="Run the script without creating the files")
|
||||
parser.add_option("-f", "--force", action="store_true", dest="force", help="Overwrite all the files")
|
||||
parser.add_option("-t", "--no-toc", action="store_true", dest="notoc", help="Don't create the table of content file")
|
||||
(opts, args) = parser.parse_args()
|
||||
if not args:
|
||||
parser.error("package path is required.")
|
||||
else:
|
||||
rootpath, excludes = args[0], args[1:]
|
||||
if os.path.isdir(rootpath):
|
||||
# check if the output destination is a valid directory
|
||||
if opts.destdir and os.path.isdir(opts.destdir):
|
||||
excludes = normalize_excludes(rootpath, excludes)
|
||||
recurse_tree(rootpath, excludes, opts)
|
||||
else:
|
||||
print '%s is not a valid output destination directory.' % opts.destdir
|
||||
else:
|
||||
print '%s is not a valid directory.' % rootpath
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
@ -18,7 +18,7 @@ that Lemur can then manage.
|
||||
|
||||
.. figure:: create_authority.png
|
||||
|
||||
Enter a authority name and short description about the authority. Enter an owner,
|
||||
Enter an authority name and short description about the authority. Enter an owner,
|
||||
and certificate common name. Depending on the authority and the authority/issuer plugin
|
||||
these values may or may not be used.
|
||||
|
||||
@ -37,7 +37,7 @@ Create a New Certificate
|
||||
|
||||
.. figure:: create_certificate.png
|
||||
|
||||
Enter a owner, short description and the authority you wish to issue this certificate.
|
||||
Enter an owner, short description and the authority you wish to issue this certificate.
|
||||
Enter a common name into the certificate, if no validity range is selected two years is
|
||||
the default.
|
||||
|
||||
@ -56,7 +56,7 @@ Import an Existing Certificate
|
||||
|
||||
.. figure:: upload_certificate.png
|
||||
|
||||
Enter a owner, short description and public certificate. If there are intermediates and private keys
|
||||
Enter an owner, short description and public certificate. If there are intermediates and private keys
|
||||
Lemur will track them just as it does if the certificate were created through Lemur. Lemur generates
|
||||
a certificate name but you can override that by passing a value to the `Custom Name` field.
|
||||
|
||||
|
||||
@ -27,8 +27,7 @@ Administration
|
||||
.. toctree::
|
||||
:maxdepth: 2
|
||||
|
||||
administration/index
|
||||
plugins/index
|
||||
administration
|
||||
|
||||
Developers
|
||||
----------
|
||||
@ -38,17 +37,24 @@ Developers
|
||||
|
||||
developer/index
|
||||
|
||||
|
||||
REST API
|
||||
Security
|
||||
--------
|
||||
|
||||
.. toctree::
|
||||
:maxdepth: 2
|
||||
|
||||
developer/rest
|
||||
security
|
||||
|
||||
Doing a Release
|
||||
---------------
|
||||
|
||||
.. toctree::
|
||||
:maxdepth: 1
|
||||
|
||||
doing-a-release
|
||||
|
||||
FAQ
|
||||
----
|
||||
---
|
||||
|
||||
.. toctree::
|
||||
:maxdepth: 1
|
||||
|
||||
@ -1,20 +0,0 @@
|
||||
Plugins
|
||||
=======
|
||||
|
||||
There are several interfaces currently available to extend Lemur. These are a work in
|
||||
progress and the API is not frozen.
|
||||
|
||||
Bundled Plugins
|
||||
---------------
|
||||
|
||||
Lemur includes several plugins by default. Including extensive support for AWS, VeriSign/Symantec and CloudCA services.
|
||||
|
||||
3rd Party Extensions
|
||||
--------------------
|
||||
|
||||
The following extensions are available and maintained by members of the Lemur community:
|
||||
|
||||
Have an extension that should be listed here? Submit a `pull request <https://github.com/netflix/lemur>`_ and we'll
|
||||
get it added.
|
||||
|
||||
Want to create your own extension? See :doc:`../developer/plugins/index` to get started.
|
||||
BIN
docs/production/create_dns_provider.png
Normal file
BIN
docs/production/create_dns_provider.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 86 KiB |
@ -21,7 +21,7 @@ Credential Management
|
||||
Lemur often contains credentials such as mutual TLS keys or API tokens that are used to communicate with third party resources and for encrypting stored secrets. Lemur comes with the ability
|
||||
to automatically encrypt these keys such that your keys not be in clear text.
|
||||
|
||||
The keys are located within lemur/keys and broken down by environment
|
||||
The keys are located within lemur/keys and broken down by environment.
|
||||
|
||||
To utilize this ability use the following commands:
|
||||
|
||||
@ -37,20 +37,22 @@ Entropy
|
||||
-------
|
||||
|
||||
Lemur generates private keys for the certificates it creates. This means that it is vitally important that Lemur has enough entropy to draw from. To generate private keys Lemur uses the python library `Cryptography <https://cryptography.io>`_. In turn Cryptography uses OpenSSL bindings to generate
|
||||
keys just like you might from the OpenSSL command line. OpenSSL draws it's initial entropy from system during startup and uses PRNGs to generate a stream of random bytes (as output by /dev/urandom) whenever it needs to do a cryptographic operation.
|
||||
keys just like you might from the OpenSSL command line. OpenSSL draws its initial entropy from system during startup and uses PRNGs to generate a stream of random bytes (as output by /dev/urandom) whenever it needs to do a cryptographic operation.
|
||||
|
||||
What does all this mean? Well in order for the keys
|
||||
that Lemur generates to be strong, the system needs to interact with the outside world. This is typically accomplished through the systems hardware (thermal, sound, video user-input, etc.) since the physical world is much more "random" than the computer world.
|
||||
|
||||
If you are running Lemur on its own server with its own hardware "bare metal" then the entropy of the system is typically "good enough" for generating keys. If however you are using an VM on shared hardware there is a potential that your initial seed data (data that was initially
|
||||
fed to the PRNG) is not very good. What's more VMs have been known to be unable to inject more entropy into the system once it has been started. This is because there is typically very little interaction with the server once it has been started.
|
||||
If you are running Lemur on its own server with its own hardware "bare metal" then the entropy of the system is typically "good enough" for generating keys. If however you are using a VM on shared hardware there is a potential that your initial seed data (data that was initially
|
||||
fed to the PRNG) is not very good. What's more, VMs have been known to be unable to inject more entropy into the system once it has been started. This is because there is typically very little interaction with the server once it has been started.
|
||||
|
||||
The amount of effort you wish to expend ensuring that Lemur has good entropy to draw from is up to your specific risk tolerance and how Lemur is configured.
|
||||
|
||||
If you wish to generate more entropy for your system we would suggest you take a look at the following resources:
|
||||
|
||||
- `WES-entropy-client <https://github.com/WhitewoodCrypto/WES-entropy-client>`_
|
||||
- `haveaged <http://www.issihosts.com/haveged/>`_
|
||||
- `WES-entropy-client <https://github.com/Virginian/WES-entropy-client>`_
|
||||
- `haveged <http://www.issihosts.com/haveged/>`_
|
||||
|
||||
The original *WES-entropy-client* repository by WhitewoodCrypto was removed, the link now points to a fork of it.
|
||||
|
||||
For additional information about OpenSSL entropy issues:
|
||||
|
||||
@ -72,7 +74,7 @@ Nginx is a very popular choice to serve a Python project:
|
||||
Nginx doesn't run any Python process, it only serves requests from outside to
|
||||
the Python server.
|
||||
|
||||
Therefor there are two steps:
|
||||
Therefore, there are two steps:
|
||||
|
||||
- Run the Python process.
|
||||
- Run Nginx.
|
||||
@ -90,7 +92,7 @@ You must create a Nginx configuration file for Lemur. On GNU/Linux, they usually
|
||||
go into /etc/nginx/conf.d/. Name it lemur.conf.
|
||||
|
||||
`proxy_pass` just passes the external request to the Python process.
|
||||
The port much match the one used by the 0bin process of course.
|
||||
The port must match the one used by the Lemur process of course.
|
||||
|
||||
You can make some adjustments to get a better user experience::
|
||||
|
||||
@ -110,7 +112,7 @@ You can make some adjustments to get a better user experience::
|
||||
error_log /var/log/nginx/log/lemur.error.log;
|
||||
|
||||
location /api {
|
||||
proxy_pass http://127.0.0.1:5000;
|
||||
proxy_pass http://127.0.0.1:8000;
|
||||
proxy_next_upstream error timeout invalid_header http_500 http_502 http_503 http_504;
|
||||
proxy_redirect off;
|
||||
proxy_buffering off;
|
||||
@ -176,7 +178,7 @@ sensitive nature of Lemur and what it controls makes this essential. This is a s
|
||||
resolver <IP DNS resolver>;
|
||||
|
||||
location /api {
|
||||
proxy_pass http://127.0.0.1:5000;
|
||||
proxy_pass http://127.0.0.1:8000;
|
||||
proxy_next_upstream error timeout invalid_header http_500 http_502 http_503 http_504;
|
||||
proxy_redirect off;
|
||||
proxy_buffering off;
|
||||
@ -217,13 +219,30 @@ An example apache config::
|
||||
# HSTS (mod_headers is required) (15768000 seconds = 6 months)
|
||||
Header always set Strict-Transport-Security "max-age=15768000"
|
||||
...
|
||||
|
||||
# Set the lemur DocumentRoot to static/dist
|
||||
DocumentRoot /www/lemur/lemur/static/dist
|
||||
|
||||
# Uncomment to force http 1.0 connections to proxy
|
||||
# SetEnv force-proxy-request-1.0 1
|
||||
|
||||
#Don't keep proxy connections alive
|
||||
SetEnv proxy-nokeepalive 1
|
||||
|
||||
# Only need to do reverse proxy
|
||||
ProxyRequests Off
|
||||
|
||||
# Proxy requests to the api to the lemur service (and sanitize redirects from it)
|
||||
ProxyPass "/api" "http://127.0.0.1:8000/api"
|
||||
ProxyPassReverse "/api" "http://127.0.0.1:8000/api"
|
||||
|
||||
</VirtualHost>
|
||||
|
||||
Also included in the configurations above are several best practices when it comes to deploying TLS. Things like enabling
|
||||
HSTS, disabling vulnerable ciphers are all good ideas when it comes to deploying Lemur into a production environment.
|
||||
|
||||
.. note::
|
||||
This is a rather incomplete apache config for running Lemur (needs mod_wsgi etc.,), if you have a working apache config please let us know!
|
||||
This is a rather incomplete apache config for running Lemur (needs mod_wsgi etc.), if you have a working apache config please let us know!
|
||||
|
||||
.. seealso::
|
||||
`Mozilla SSL Configuration Generator <https://mozilla.github.io/server-side-tls/ssl-config-generator/>`_
|
||||
@ -240,10 +259,10 @@ most of the time), but here is a quick overview on how to use it.
|
||||
Create a configuration file named supervisor.ini::
|
||||
|
||||
[unix_http_server]
|
||||
file=/tmp/supervisor.sock;
|
||||
file=/tmp/supervisor.sock
|
||||
|
||||
[supervisorctl]
|
||||
serverurl=unix:///tmp/supervisor.sock;
|
||||
serverurl=unix:///tmp/supervisor.sock
|
||||
|
||||
[rpcinterface:supervisor]
|
||||
supervisor.rpcinterface_factory=supervisor.rpcinterface:make_main_rpcinterface
|
||||
@ -257,13 +276,12 @@ Create a configuration file named supervisor.ini::
|
||||
nodaemon=false
|
||||
minfds=1024
|
||||
minprocs=200
|
||||
user=lemur
|
||||
|
||||
[program:lemur]
|
||||
command=python /path/to/lemur/manage.py manage.py start
|
||||
|
||||
directory=/path/to/lemur/
|
||||
environment=PYTHONPATH='/path/to/lemur/'
|
||||
environment=PYTHONPATH='/path/to/lemur/',LEMUR_CONF='/home/lemur/.lemur/lemur.conf.py'
|
||||
user=lemur
|
||||
autostart=true
|
||||
autorestart=true
|
||||
@ -271,7 +289,7 @@ Create a configuration file named supervisor.ini::
|
||||
The 4 first entries are just boiler plate to get you started, you can copy
|
||||
them verbatim.
|
||||
|
||||
The last one define one (you can have many) process supervisor should manage.
|
||||
The last one defines one (you can have many) process supervisor should manage.
|
||||
|
||||
It means it will run the command::
|
||||
|
||||
@ -293,6 +311,247 @@ Then you can manage the process by running::
|
||||
|
||||
supervisorctl -c /path/to/supervisor.ini
|
||||
|
||||
It will start a shell from were you can start/stop/restart the service
|
||||
It will start a shell from which you can start/stop/restart the service.
|
||||
|
||||
You can read all errors that might occurs from /tmp/lemur.log.
|
||||
You can read all errors that might occur from /tmp/lemur.log.
|
||||
|
||||
.. _PeriodicTasks:
|
||||
|
||||
Periodic Tasks
|
||||
==============
|
||||
|
||||
Lemur contains a few tasks that are run and scheduled basis, currently the recommend way to run these tasks is to create
|
||||
celery tasks or cron jobs that run these commands.
|
||||
|
||||
There are currently three commands that could/should be run on a periodic basis:
|
||||
|
||||
- `notify`
|
||||
- `check_revoked`
|
||||
- `sync`
|
||||
|
||||
If you are using LetsEncrypt, you must also run the following:
|
||||
|
||||
- `fetch_all_pending_acme_certs`
|
||||
- `remove_old_acme_certs`
|
||||
|
||||
How often you run these commands is largely up to the user. `notify` and `check_revoked` are typically run at least once a day.
|
||||
`sync` is typically run every 15 minutes. `fetch_all_pending_acme_certs` should be ran frequently (Every minute is fine).
|
||||
`remove_old_acme_certs` can be ran more rarely, such as once every week.
|
||||
|
||||
Example cron entries::
|
||||
|
||||
0 22 * * * lemuruser export LEMUR_CONF=/Users/me/.lemur/lemur.conf.py; /www/lemur/bin/lemur notify expirations
|
||||
*/15 * * * * lemuruser export LEMUR_CONF=/Users/me/.lemur/lemur.conf.py; /www/lemur/bin/lemur source sync -s all
|
||||
0 22 * * * lemuruser export LEMUR_CONF=/Users/me/.lemur/lemur.conf.py; /www/lemur/bin/lemur certificate check_revoked
|
||||
|
||||
|
||||
Example Celery configuration (To be placed in your configuration file)::
|
||||
|
||||
CELERYBEAT_SCHEDULE = {
|
||||
'fetch_all_pending_acme_certs': {
|
||||
'task': 'lemur.common.celery.fetch_all_pending_acme_certs',
|
||||
'options': {
|
||||
'expires': 180
|
||||
},
|
||||
'schedule': crontab(minute="*"),
|
||||
},
|
||||
'remove_old_acme_certs': {
|
||||
'task': 'lemur.common.celery.remove_old_acme_certs',
|
||||
'options': {
|
||||
'expires': 180
|
||||
},
|
||||
'schedule': crontab(hour=7, minute=30, day_of_week=1),
|
||||
},
|
||||
'clean_all_sources': {
|
||||
'task': 'lemur.common.celery.clean_all_sources',
|
||||
'options': {
|
||||
'expires': 180
|
||||
},
|
||||
'schedule': crontab(hour=1, minute=0, day_of_week=1),
|
||||
},
|
||||
'sync_all_sources': {
|
||||
'task': 'lemur.common.celery.sync_all_sources',
|
||||
'options': {
|
||||
'expires': 180
|
||||
},
|
||||
'schedule': crontab(hour="*/3", minute=5),
|
||||
},
|
||||
'sync_source_destination': {
|
||||
'task': 'lemur.common.celery.sync_source_destination',
|
||||
'options': {
|
||||
'expires': 180
|
||||
},
|
||||
'schedule': crontab(hour="*"),
|
||||
}
|
||||
}
|
||||
|
||||
To enable celery support, you must also have configuration values that tell Celery which broker and backend to use.
|
||||
Here are the Celery configuration variables that should be set::
|
||||
|
||||
CELERY_RESULT_BACKEND = 'redis://your_redis_url:6379'
|
||||
CELERY_BROKER_URL = 'redis://your_redis_url:6379/0'
|
||||
CELERY_IMPORTS = ('lemur.common.celery')
|
||||
CELERY_TIMEZONE = 'UTC'
|
||||
|
||||
REDIS_HOST="your_redis_url"
|
||||
REDIS_PORT=6379
|
||||
REDIS_DB=0
|
||||
|
||||
Out of the box, every Redis instance supports 16 databases. The default database (`REDIS_DB`) is set to 0, however, you can use any of the databases from 0-15. Via `redis.conf` more databases can be supported.
|
||||
In the `redis://` url, the database number can be added with a slash after the port. (defaults to 0, if omitted)
|
||||
|
||||
Do not forget to import crontab module in your configuration file::
|
||||
|
||||
from celery.task.schedules import crontab
|
||||
|
||||
You must start a single Celery scheduler instance and one or more worker instances in order to handle incoming tasks.
|
||||
The scheduler can be started with::
|
||||
|
||||
LEMUR_CONF='/location/to/conf.py' /location/to/lemur/bin/celery -A lemur.common.celery beat
|
||||
|
||||
And the worker can be started with desired options such as the following::
|
||||
|
||||
LEMUR_CONF='/location/to/conf.py' /location/to/lemur/bin/celery -A lemur.common.celery worker --concurrency 10 -E -n lemurworker1@%%h
|
||||
|
||||
supervisor or systemd configurations should be created for these in production environments as appropriate.
|
||||
|
||||
Add support for LetsEncrypt
|
||||
===========================
|
||||
|
||||
LetsEncrypt is a free, limited-feature certificate authority that offers publicly trusted certificates that are valid
|
||||
for 90 days. LetsEncrypt does not use organizational validation (OV), and instead relies on domain validation (DV).
|
||||
LetsEncrypt requires that we prove ownership of a domain before we're able to issue a certificate for that domain, each
|
||||
time we want a certificate.
|
||||
|
||||
The most common methods to prove ownership are HTTP validation and DNS validation. Lemur supports DNS validation
|
||||
through the creation of DNS TXT records.
|
||||
|
||||
In a nutshell, when we send a certificate request to LetsEncrypt, they generate a random token and ask us to put that
|
||||
token in a DNS text record to prove ownership of a domain. If a certificate request has multiple domains, we must
|
||||
prove ownership of all of these domains through this method. The token is typically written to a TXT record at
|
||||
-acme_challenge.domain.com. Once we create the appropriate TXT record(s), Lemur will try to validate propagation
|
||||
before requesting that LetsEncrypt finalize the certificate request and send us the certificate.
|
||||
|
||||
.. figure:: letsencrypt_flow.png
|
||||
|
||||
To start issuing certificates through LetsEncrypt, you must enable Celery support within Lemur first. After doing so,
|
||||
you need to create a LetsEncrypt authority. To do this, visit
|
||||
Authorities -> Create. Set the applicable attributes and click "More Options".
|
||||
|
||||
.. figure:: letsencrypt_authority_1.png
|
||||
|
||||
You will need to set "Certificate" to LetsEncrypt's active chain of trust for the authority you want to use. To find
|
||||
the active chain of trust at the time of writing, please visit `LetsEncrypt
|
||||
<https://letsencrypt.org/certificates/>`_.
|
||||
|
||||
Under Acme_url, enter in the appropriate endpoint URL. Lemur supports LetsEncrypt's V2 API, and we recommend you to use
|
||||
this. At the time of writing, the staging and production URLs for LetsEncrypt V2 are
|
||||
https://acme-staging-v02.api.letsencrypt.org/directory and https://acme-v02.api.letsencrypt.org/directory.
|
||||
|
||||
.. figure:: letsencrypt_authority_2.png
|
||||
|
||||
After creating the authorities, we will need to create a DNS provider. Visit `Admin` -> `DNS Providers` and click
|
||||
`Create`. Lemur comes with a few provider plugins built in, with different options. Create a DNS provider with the
|
||||
appropriate choices.
|
||||
|
||||
.. figure:: create_dns_provider.png
|
||||
|
||||
By default, users will need to select the DNS provider that is authoritative over their domain in order for the
|
||||
LetsEncrypt flow to function. However, Lemur will attempt to automatically determine the appropriate provider if
|
||||
possible. To enable this functionality, periodically (or through Cron/Celery) run `lemur dns_providers get_all_zones`.
|
||||
This command will traverse all DNS providers, determine which zones they control, and upload this list of zones to
|
||||
Lemur's database (in the dns_providers table). Alternatively, you can manually input this data.
|
||||
|
||||
|
||||
LetsEncrypt: pinning to cross-signed ICA
|
||||
----------------------------------------
|
||||
|
||||
Let's Encrypt has been using a `cross-signed <https://letsencrypt.org/certificates/>`_ intermediate CA by DST Root CA X3,
|
||||
which is included in many older devices' TrustStore.
|
||||
|
||||
|
||||
Let's Encrypt is `transitioning <https://letsencrypt.org/2019/04/15/transitioning-to-isrg-root.html>`_ to use
|
||||
the intermediate CA issued by their own root (ISRG X1) starting from September 29th 2020.
|
||||
This is in preparation of concluding the initial bootstrapping of their CA, by having it cross-signed by an older CA.
|
||||
|
||||
|
||||
Lemur can temporarily pin to the cross-signed intermediate CA (same public/private key pair as the ICA signed by ISRG X1).
|
||||
This will prolong support for incompatible devices.
|
||||
|
||||
The following must be added to the config file to activate the pinning (the pinning will be removed by September 2021)::
|
||||
|
||||
# remove or update after Mar 17 16:40:46 2021 GMT
|
||||
IDENTRUST_CROSS_SIGNED_LE_ICA_EXPIRATION_DATE = "17/03/21"
|
||||
IDENTRUST_CROSS_SIGNED_LE_ICA = """
|
||||
-----BEGIN CERTIFICATE-----
|
||||
MIIEkjCCA3qgAwIBAgIQCgFBQgAAAVOFc2oLheynCDANBgkqhkiG9w0BAQsFADA/
|
||||
MSQwIgYDVQQKExtEaWdpdGFsIFNpZ25hdHVyZSBUcnVzdCBDby4xFzAVBgNVBAMT
|
||||
DkRTVCBSb290IENBIFgzMB4XDTE2MDMxNzE2NDA0NloXDTIxMDMxNzE2NDA0Nlow
|
||||
SjELMAkGA1UEBhMCVVMxFjAUBgNVBAoTDUxldCdzIEVuY3J5cHQxIzAhBgNVBAMT
|
||||
GkxldCdzIEVuY3J5cHQgQXV0aG9yaXR5IFgzMIIBIjANBgkqhkiG9w0BAQEFAAOC
|
||||
AQ8AMIIBCgKCAQEAnNMM8FrlLke3cl03g7NoYzDq1zUmGSXhvb418XCSL7e4S0EF
|
||||
q6meNQhY7LEqxGiHC6PjdeTm86dicbp5gWAf15Gan/PQeGdxyGkOlZHP/uaZ6WA8
|
||||
SMx+yk13EiSdRxta67nsHjcAHJyse6cF6s5K671B5TaYucv9bTyWaN8jKkKQDIZ0
|
||||
Z8h/pZq4UmEUEz9l6YKHy9v6Dlb2honzhT+Xhq+w3Brvaw2VFn3EK6BlspkENnWA
|
||||
a6xK8xuQSXgvopZPKiAlKQTGdMDQMc2PMTiVFrqoM7hD8bEfwzB/onkxEz0tNvjj
|
||||
/PIzark5McWvxI0NHWQWM6r6hCm21AvA2H3DkwIDAQABo4IBfTCCAXkwEgYDVR0T
|
||||
AQH/BAgwBgEB/wIBADAOBgNVHQ8BAf8EBAMCAYYwfwYIKwYBBQUHAQEEczBxMDIG
|
||||
CCsGAQUFBzABhiZodHRwOi8vaXNyZy50cnVzdGlkLm9jc3AuaWRlbnRydXN0LmNv
|
||||
bTA7BggrBgEFBQcwAoYvaHR0cDovL2FwcHMuaWRlbnRydXN0LmNvbS9yb290cy9k
|
||||
c3Ryb290Y2F4My5wN2MwHwYDVR0jBBgwFoAUxKexpHsscfrb4UuQdf/EFWCFiRAw
|
||||
VAYDVR0gBE0wSzAIBgZngQwBAgEwPwYLKwYBBAGC3xMBAQEwMDAuBggrBgEFBQcC
|
||||
ARYiaHR0cDovL2Nwcy5yb290LXgxLmxldHNlbmNyeXB0Lm9yZzA8BgNVHR8ENTAz
|
||||
MDGgL6AthitodHRwOi8vY3JsLmlkZW50cnVzdC5jb20vRFNUUk9PVENBWDNDUkwu
|
||||
Y3JsMB0GA1UdDgQWBBSoSmpjBH3duubRObemRWXv86jsoTANBgkqhkiG9w0BAQsF
|
||||
AAOCAQEA3TPXEfNjWDjdGBX7CVW+dla5cEilaUcne8IkCJLxWh9KEik3JHRRHGJo
|
||||
uM2VcGfl96S8TihRzZvoroed6ti6WqEBmtzw3Wodatg+VyOeph4EYpr/1wXKtx8/
|
||||
wApIvJSwtmVi4MFU5aMqrSDE6ea73Mj2tcMyo5jMd6jmeWUHK8so/joWUoHOUgwu
|
||||
X4Po1QYz+3dszkDqMp4fklxBwXRsW10KXzPMTZ+sOPAveyxindmjkW8lGy+QsRlG
|
||||
PfZ+G6Z6h7mjem0Y+iWlkYcV4PIWL1iwBi8saCbGS5jN2p8M+X+Q7UNKEkROb3N6
|
||||
KOqkqm57TH2H3eDJAkSnh6/DNFu0Qg==
|
||||
-----END CERTIFICATE-----
|
||||
"""
|
||||
|
||||
|
||||
.. _AcmeAccountReuse:
|
||||
|
||||
LetsEncrypt: Using a pre-existing ACME account
|
||||
-----------------------------------------------
|
||||
|
||||
Let's Encrypt allows reusing an existing ACME account, to create and especially revoke certificates. The current
|
||||
implementation in the acme plugin, only allows for a single account for all ACME authorities, which might be an issue,
|
||||
when you try to use Let's Encrypt together with another certificate authority that uses the ACME protocol.
|
||||
|
||||
To use an existing account, you need to configure the `ACME_PRIVATE_KEY` and `ACME_REGR` variables in the lemur
|
||||
configuration.
|
||||
|
||||
`ACME_PRIVATE_KEY` needs to be in the JWK format::
|
||||
|
||||
{
|
||||
"kty": "RSA",
|
||||
"n": "yr1qBwHizA7ME_iV32bY10ILp.....",
|
||||
"e": "AQAB",
|
||||
"d": "llBlYhil3I.....",
|
||||
"p": "-5LW2Lewogo.........",
|
||||
"q": "zk6dHqHfHksd.........",
|
||||
"dp": "qfe9fFIu3mu.......",
|
||||
"dq": "cXFO-loeOyU.......",
|
||||
"qi": "AfK1sh0_8sLTb..........."
|
||||
}
|
||||
|
||||
|
||||
Using `python-jwt` converting an existing private key in PEM format is quite easy::
|
||||
|
||||
import python_jwt as jwt, jwcrypto.jwk as jwk
|
||||
|
||||
priv_key = jwk.JWK.from_pem(b"""-----BEGIN RSA PRIVATE KEY-----
|
||||
...
|
||||
-----END RSA PRIVATE KEY-----""")
|
||||
|
||||
print(priv_key.export())
|
||||
|
||||
`ACME_REGR` needs to be a valid JSON with a `body` and a `uri` attribute, similar to this::
|
||||
|
||||
{"body": {}, "uri": "https://acme-staging-v02.api.letsencrypt.org/acme/acct/<ACCOUNT_NUMBER>"}
|
||||
|
||||
The URI can be retrieved from the ACME create account endpoint when creating a new account, using the existing key.
|
||||
BIN
docs/production/letsencrypt_authority_1.png
Normal file
BIN
docs/production/letsencrypt_authority_1.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 132 KiB |
BIN
docs/production/letsencrypt_authority_2.png
Normal file
BIN
docs/production/letsencrypt_authority_2.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 218 KiB |
BIN
docs/production/letsencrypt_flow.png
Normal file
BIN
docs/production/letsencrypt_flow.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 89 KiB |
@ -1,178 +1,226 @@
|
||||
Quickstart
|
||||
**********
|
||||
|
||||
This guide will step you through setting up a Python-based virtualenv, installing the required packages, and configuring the basic web service.
|
||||
This guide assumes a clean Ubuntu 14.04 instance, commands may differ based on the OS and configuration being used.
|
||||
This guide will step you through setting up a Python-based virtualenv, installing the required packages, and configuring the basic web service. This guide assumes a clean Ubuntu 14.04 instance, commands may differ based on the OS and configuration being used.
|
||||
|
||||
Pressed for time? See the Lemur docker file on `Github <https://github.com/Netflix/lemur-docker>`_.
|
||||
|
||||
|
||||
Dependencies
|
||||
------------
|
||||
|
||||
Some basic prerequisites which you'll need in order to run Lemur:
|
||||
|
||||
* A UNIX-based operating system. We test on Ubuntu, develop on OS X
|
||||
* Python 2.7
|
||||
* PostgreSQL
|
||||
* A UNIX-based operating system (we test on Ubuntu, develop on OS X)
|
||||
* Python 3.7 or greater
|
||||
* PostgreSQL 9.4 or greater
|
||||
* Nginx
|
||||
|
||||
.. note:: Lemur was built with in AWS in mind. This means that things such as databases (RDS), mail (SES), and TLS (ELB),
|
||||
are largely handled for us. Lemur does **not** require AWS to function. Our guides and documentation try to be
|
||||
be as generic as possible and are not intended to document every step of launching Lemur into a given environment.
|
||||
.. note:: Lemur was built with in AWS in mind. This means that things such as databases (RDS), mail (SES), and TLS (ELB), are largely handled for us. Lemur does **not** require AWS to function. Our guides and documentation try to be as generic as possible and are not intended to document every step of launching Lemur into a given environment.
|
||||
|
||||
|
||||
Installing Build Dependencies
|
||||
-----------------------------
|
||||
|
||||
If installing Lemur on a bare Ubuntu OS you will need to grab the following packages so that Lemur can correctly build its dependencies:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
sudo apt-get update
|
||||
sudo apt-get install nodejs nodejs-legacy python-pip python-dev python3-dev libpq-dev build-essential libssl-dev libffi-dev libsasl2-dev libldap2-dev nginx git supervisor npm postgresql
|
||||
|
||||
.. note:: PostgreSQL is only required if your database is going to be on the same host as the webserver. npm is needed if you're installing Lemur from the source (e.g., from git).
|
||||
|
||||
.. note:: Installing node from a package manager may create the nodejs bin at /usr/bin/nodejs instead of /usr/bin/node If that is the case run the following
|
||||
sudo ln -s /user/bin/nodejs /usr/bin/node
|
||||
|
||||
Now, install Python ``virtualenv`` package:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
sudo pip install -U virtualenv
|
||||
|
||||
|
||||
Setting up an Environment
|
||||
-------------------------
|
||||
|
||||
The first thing you'll need is the Python ``virtualenv`` package. You probably already
|
||||
have this, but if not, you can install it with::
|
||||
In this guide, Lemur will be installed in ``/www``, so you need to create that structure first:
|
||||
|
||||
pip install -U virtualenv
|
||||
.. code-block:: bash
|
||||
|
||||
Once that's done, choose a location for the environment, and create it with the ``virtualenv``
|
||||
command. For our guide, we're going to choose ``/www/lemur/``::
|
||||
sudo mkdir /www
|
||||
cd /www
|
||||
|
||||
virtualenv /www/lemur/
|
||||
Clone Lemur inside the just created directory and give yourself write permission (we assume ``lemur`` is the user):
|
||||
|
||||
Finally, activate your virtualenv::
|
||||
.. code-block:: bash
|
||||
|
||||
source /www/lemur/bin/activate
|
||||
sudo useradd lemur
|
||||
sudo passwd lemur
|
||||
sudo mkdir /home/lemur
|
||||
sudo chown lemur:lemur /home/lemur
|
||||
sudo git clone https://github.com/Netflix/lemur
|
||||
sudo chown -R lemur lemur/
|
||||
|
||||
.. note:: Activating the environment adjusts your PATH, so that things like pip now
|
||||
install into the virtualenv by default.
|
||||
Create the virtual environment, activate it and enter the Lemur's directory:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
Installing build dependencies
|
||||
-----------------------------
|
||||
su lemur
|
||||
virtualenv -p python3 lemur
|
||||
source /www/lemur/bin/activate
|
||||
cd lemur
|
||||
|
||||
If installing Lemur on truely bare Ubuntu OS you will need to grab the following packages so that Lemur can correctly build it's
|
||||
dependencies::
|
||||
|
||||
$ sudo apt-get update
|
||||
$ sudo apt-get install nodejs-legacy python-pip libpq-dev python-dev build-essential libssl-dev libffi-dev nginx git supervisor
|
||||
|
||||
And optionally if your database is going to be on the same host as the webserver::
|
||||
|
||||
$ sudo apt-get install postgresql
|
||||
.. note:: Activating the environment adjusts your PATH, so that things like pip now install into the virtualenv by default.
|
||||
|
||||
|
||||
Installing from Source
|
||||
~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
If you're installing the Lemur source (e.g. from git), you'll also need to install **npm**.
|
||||
|
||||
Once your system is prepared, ensure that you are in the virtualenv:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
$ which python
|
||||
|
||||
which python
|
||||
|
||||
And then run:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
$ make develop
|
||||
make release
|
||||
|
||||
.. Note:: This command will install npm dependencies as well as compile static assets.
|
||||
.. note:: This command will install npm dependencies as well as compile static assets.
|
||||
|
||||
|
||||
You may also run with the urlContextPath variable set. If this is set it will add the desired context path for subsequent calls back to lemur. This will only edit the front end code for calls back to the server, you will have to make sure the server knows about these routes.
|
||||
::
|
||||
|
||||
Example:
|
||||
urlContextPath=lemur
|
||||
/api/1/auth/providers -> /lemur/api/1/auth/providers
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
make release urlContextPath={desired context path}
|
||||
|
||||
|
||||
Creating a configuration
|
||||
------------------------
|
||||
|
||||
Before we run Lemur we must create a valid configuration file for it.
|
||||
|
||||
The Lemur cli comes with a simple command to get you up and running quickly.
|
||||
Before we run Lemur, we must create a valid configuration file for it. The Lemur command line interface comes with a simple command to get you up and running quickly.
|
||||
|
||||
Simply run:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
$ lemur create_config
|
||||
lemur create_config
|
||||
|
||||
.. Note:: This command will create a default configuration under `~/.lemur/lemur.conf.py` you
|
||||
can specify this location by passing the `config_path` parameter to the `create_config` command.
|
||||
.. note:: This command will create a default configuration under ``~/.lemur/lemur.conf.py`` you can specify this location by passing the ``config_path`` parameter to the ``create_config`` command.
|
||||
|
||||
You can specify ``-c`` or ``--config`` to any Lemur command to specify the current environment you are working in. Lemur will also look under the environmental variable ``LEMUR_CONF`` should that be easier to set up in your environment.
|
||||
|
||||
You can specify `-c` or `--config` to any Lemur command to specify the current environment
|
||||
you are working in. Lemur will also look under the environmental variable `LEMUR_CONF` should
|
||||
that be easier to setup in your environment.
|
||||
|
||||
Update your configuration
|
||||
-------------------------
|
||||
|
||||
Once created you will need to update the configuration file with information about your environment,
|
||||
such as which database to talk to, where keys are stores etc..
|
||||
Once created, you will need to update the configuration file with information about your environment, such as which database to talk to, where keys are stored etc.
|
||||
|
||||
.. Note:: If you are unfamiliar with with the SQLALCHEMY_DATABASE_URI string it can be broken up like so:
|
||||
postgresql://userame:password@databasefqdn:databaseport/databasename
|
||||
.. code-block:: bash
|
||||
|
||||
Setup Postgres
|
||||
vi ~/.lemur/lemur.conf.py
|
||||
|
||||
.. note:: If you are unfamiliar with the SQLALCHEMY_DATABASE_URI string it can be broken up like so:
|
||||
``postgresql://userame:password@<database-fqdn>:<database-port>/<database-name>``
|
||||
|
||||
Before Lemur will run you need to fill in a few required variables in the configuration file:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
LEMUR_SECURITY_TEAM_EMAIL
|
||||
#/the e-mail address needs to be enclosed in quotes
|
||||
LEMUR_DEFAULT_COUNTRY
|
||||
LEMUR_DEFAULT_STATE
|
||||
LEMUR_DEFAULT_LOCATION
|
||||
LEMUR_DEFAULT_ORGANIZATION
|
||||
LEMUR_DEFAULT_ORGANIZATIONAL_UNIT
|
||||
|
||||
Set Up Postgres
|
||||
--------------
|
||||
|
||||
For production a dedicated database is recommended, for this guide we will assume postgres has been installed and is on
|
||||
the same machine that Lemur is installed on.
|
||||
For production, a dedicated database is recommended, for this guide we will assume postgres has been installed and is on the same machine that Lemur is installed on.
|
||||
|
||||
First, set a password for the postgres user. For this guide, we will use **lemur** as an example but you should use the database password generated for by Lemur::
|
||||
First, set a password for the postgres user. For this guide, we will use ``lemur`` as an example but you should use the database password generated by Lemur:
|
||||
|
||||
$ sudo -u postgres psql postgres
|
||||
# \password postgres
|
||||
Enter new password: lemur
|
||||
Enter it again: lemur
|
||||
.. code-block:: bash
|
||||
|
||||
Type CTRL-D to exit psql once you have changed the password.
|
||||
sudo -u postgres -i
|
||||
psql
|
||||
postgres=# CREATE USER lemur WITH PASSWORD 'lemur';
|
||||
|
||||
Next, we will create our a new database::
|
||||
Once successful, type CTRL-D to exit the Postgres shell.
|
||||
|
||||
$ sudo -u postgres createdb lemur
|
||||
Next, we will create our new database:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
sudo -u postgres createdb lemur
|
||||
|
||||
.. _InitializingLemur:
|
||||
|
||||
.. note::
|
||||
For this guide we assume you will use the `postgres` user to connect to your database, when deploying to a VM or container this is often all you will need. If you have a shared database it is recommend you give Lemur its own user.
|
||||
|
||||
.. note::
|
||||
Postgres 9.4 or greater is required as Lemur relies advanced data columns (e.g. JSON Column type)
|
||||
|
||||
Initializing Lemur
|
||||
------------------
|
||||
|
||||
Lemur provides a helpful command that will initialize your database for you. It creates a default user (lemur) that is
|
||||
used by Lemur to help associate certificates that do not currently have an owner. This is most commonly the case when
|
||||
Lemur has discovered certificates from a third party source. This is also a default user that can be used to
|
||||
administer Lemur.
|
||||
Lemur provides a helpful command that will initialize your database for you. It creates a default user (``lemur``) that is used by Lemur to help associate certificates that do not currently have an owner. This is most commonly the case when Lemur has discovered certificates from a third party source. This is also a default user that can be used to administer Lemur.
|
||||
|
||||
In addition to create a new User, Lemur also creates a few default email notifications. These notifications are based
|
||||
on a few configuration options such as `LEMUR_SECURITY_TEAM_EMAIL` they basically garentee that every cerificate within
|
||||
Lemur will send one expiration notification to the security team.
|
||||
In addition to creating a new user, Lemur also creates a few default email notifications. These notifications are based on a few configuration options such as ``LEMUR_SECURITY_TEAM_EMAIL``. They basically guarantee that every certificate within Lemur will send one expiration notification to the security team.
|
||||
|
||||
Additional notifications can be created through the UI or API.
|
||||
See :ref:`Creating Notifications <CreatingNotifications>` and :ref:`Command Line Interface <CommandLineInterface>` for details.
|
||||
Your database installation requires the pg_trgm extension. If you do not have this installed already, you can allow the script to install this for you by adding the SUPERUSER permission to the lemur database user.
|
||||
|
||||
**Make note of the password used as this will be used during first login to the Lemur UI**
|
||||
.. code-block:: bash
|
||||
sudo -u postgres -i
|
||||
psql
|
||||
postgres=# ALTER USER lemur WITH SUPERUSER
|
||||
|
||||
Additional notifications can be created through the UI or API. See :ref:`Creating Notifications <CreatingNotifications>` and :ref:`Command Line Interface <CommandLineInterface>` for details.
|
||||
|
||||
**Make note of the password used as this will be used during first login to the Lemur UI.**
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
$ lemur db init
|
||||
cd /www/lemur/lemur
|
||||
lemur init
|
||||
|
||||
.. note:: If you added the SUPERUSER permission to the lemur database user above, it is recommended you revoke that permission now.
|
||||
|
||||
.. code-block:: bash
|
||||
sudo -u postgres -i
|
||||
psql
|
||||
postgres=# ALTER USER lemur WITH NOSUPERUSER
|
||||
|
||||
$ lemur init
|
||||
|
||||
.. note:: It is recommended that once the 'lemur' user is created that you create individual users for every day access.
|
||||
There is currently no way for a user to self enroll for Lemur access, they must have an administrator create an account
|
||||
for them or be enrolled automatically through SSO. This can be done through the CLI or UI.
|
||||
See :ref:`Creating Users <CreatingUsers>` and :ref:`Command Line Interface <CommandLineInterface>` for details
|
||||
.. note:: It is recommended that once the ``lemur`` user is created that you create individual users for every day access. There is currently no way for a user to self enroll for Lemur access, they must have an administrator create an account for them or be enrolled automatically through SSO. This can be done through the CLI or UI. See :ref:`Creating Users <CreatingUsers>` and :ref:`Command Line Interface <CommandLineInterface>` for details.
|
||||
|
||||
Setup a Reverse Proxy
|
||||
Set Up a Reverse Proxy
|
||||
---------------------
|
||||
|
||||
By default, Lemur runs on port 5000. Even if you change this, under normal conditions you won't be able to bind to
|
||||
port 80. To get around this (and to avoid running Lemur as a privileged user, which you shouldn't), we need setup a
|
||||
simple web proxy. There are many different web servers you can use for this, we like and recommend Nginx.
|
||||
By default, Lemur runs on port 8000. Even if you change this, under normal conditions you won't be able to bind to port 80. To get around this (and to avoid running Lemur as a privileged user, which you shouldn't), we need to set up a simple web proxy. There are many different web servers you can use for this, we like and recommend Nginx.
|
||||
|
||||
|
||||
Proxying with Nginx
|
||||
~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
You'll use the builtin HttpProxyModule within Nginx to handle proxying
|
||||
You'll use the builtin ``HttpProxyModule`` within Nginx to handle proxying. Edit the ``/etc/nginx/sites-available/default`` file according to the lines below
|
||||
|
||||
::
|
||||
|
||||
location /api {
|
||||
proxy_pass http://127.0.0.1:5000;
|
||||
proxy_pass http://127.0.0.1:8000;
|
||||
proxy_next_upstream error timeout invalid_header http_500 http_502 http_503 http_504;
|
||||
proxy_redirect off;
|
||||
proxy_buffering off;
|
||||
@ -180,23 +228,29 @@ You'll use the builtin HttpProxyModule within Nginx to handle proxying
|
||||
proxy_set_header X-Real-IP $remote_addr;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
}
|
||||
|
||||
|
||||
location / {
|
||||
root /www/lemur/lemur/static/dist;
|
||||
include mime.types;
|
||||
index index.html;
|
||||
}
|
||||
|
||||
See :doc:`../production/index` for more details on using Nginx.
|
||||
.. note:: See :doc:`../production/index` for more details on using Nginx.
|
||||
|
||||
After making these changes, restart Nginx service to apply them:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
sudo service nginx restart
|
||||
|
||||
|
||||
Starting the Web Service
|
||||
------------------------
|
||||
|
||||
Lemur provides a built-in webserver (powered by gunicorn and eventlet) to get you off the ground quickly.
|
||||
Lemur provides a built-in web server (powered by gunicorn and eventlet) to get you off the ground quickly.
|
||||
|
||||
To start the webserver, you simply use ``lemur start``. If you opted to use an alternative configuration path
|
||||
you can pass that via the --config option.
|
||||
To start the web server, you simply use ``lemur start``. If you opted to use an alternative configuration path
|
||||
you can pass that via the ``--config`` option.
|
||||
|
||||
.. note::
|
||||
You can login with the default user created during :ref:`Initializing Lemur <InitializingLemur>` or any other
|
||||
@ -204,23 +258,23 @@ you can pass that via the --config option.
|
||||
|
||||
::
|
||||
|
||||
# Lemur's server runs on port 5000 by default. Make sure your client reflects
|
||||
# Lemur's server runs on port 8000 by default. Make sure your client reflects
|
||||
# the correct host and port!
|
||||
lemur --config=/etc/lemur.conf.py start -b 127.0.0.1:5000
|
||||
lemur --config=/etc/lemur.conf.py start -b 127.0.0.1:8000
|
||||
|
||||
You should now be able to test the web service by visiting ``http://localhost:8000/``.
|
||||
|
||||
You should now be able to test the web service by visiting `http://localhost:5000/`.
|
||||
|
||||
Running Lemur as a Service
|
||||
---------------------------
|
||||
--------------------------
|
||||
|
||||
We recommend using whatever software you are most familiar with for managing Lemur processes. One option is `Supervisor <http://supervisord.org/>`_.
|
||||
|
||||
We recommend using whatever software you are most familiar with for managing Lemur processes. One option is
|
||||
`Supervisor <http://supervisord.org/>`_.
|
||||
|
||||
Configure ``supervisord``
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
Configuring Supervisor couldn't be more simple. Just point it to the ``lemur`` executable in your virtualenv's bin/
|
||||
folder and you're good to go.
|
||||
Configuring Supervisor couldn't be more simple. Just point it to the ``lemur`` executable in your virtualenv's ``bin/`` folder and you're good to go.
|
||||
|
||||
::
|
||||
|
||||
@ -230,49 +284,48 @@ folder and you're good to go.
|
||||
autostart=true
|
||||
autorestart=true
|
||||
redirect_stderr=true
|
||||
stdout_logfile syslog
|
||||
stderr_logfile syslog
|
||||
stdout_logfile=syslog
|
||||
stderr_logfile=syslog
|
||||
|
||||
See :ref:`Using Supervisor <UsingSupervisor>` for more details on using Supervisor.
|
||||
|
||||
|
||||
Syncing
|
||||
-------
|
||||
|
||||
Lemur uses periodic sync tasks to make sure it is up-to-date with it's environment. As always things can change outside
|
||||
of Lemur, but we do our best to reconcile those changes.
|
||||
Lemur uses periodic sync tasks to make sure it is up-to-date with its environment. Things change outside of Lemur we do our best to reconcile those changes. The recommended method is to use CRON:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
$ crontab -e
|
||||
* 3 * * * lemur sync --all
|
||||
* 3 * * * lemur check_revoked
|
||||
crontab -e
|
||||
*/15 * * * * lemur sync -s all
|
||||
0 22 * * * lemur check_revoked
|
||||
0 22 * * * lemur notify
|
||||
|
||||
|
||||
Additional Utilities
|
||||
--------------------
|
||||
|
||||
If you're familiar with Python you'll quickly find yourself at home, and even more so if you've used Flask. The
|
||||
``lemur`` command is just a simple wrapper around Flask's ``manage.py``, which means you get all of the
|
||||
power and flexibility that goes with it.
|
||||
If you're familiar with Python you'll quickly find yourself at home, and even more so if you've used Flask. The ``lemur`` command is just a simple wrapper around Flask's ``manage.py``, which means you get all of the power and flexibility that goes with it.
|
||||
|
||||
Some of the features which you'll likely find useful are listed below.
|
||||
|
||||
Some of those which you'll likely find useful are:
|
||||
|
||||
lock
|
||||
~~~~
|
||||
|
||||
Encrypts sensitive key material - This is most useful for storing encrypted secrets in source code.
|
||||
Encrypts sensitive key material - this is most useful for storing encrypted secrets in source code.
|
||||
|
||||
|
||||
unlock
|
||||
~~~~~~
|
||||
|
||||
Decrypts sensitive key material - Used to decrypt the secrets stored in source during deployment.
|
||||
Decrypts sensitive key material - used to decrypt the secrets stored in source during deployment.
|
||||
|
||||
|
||||
What's Next?
|
||||
------------
|
||||
|
||||
Get familiar with how Lemur works by reviewing the :doc:`../guide/index`. When you're ready
|
||||
see :doc:`../production/index` for more details on how to configure Lemur for production.
|
||||
|
||||
Remember the above just gets you going, but for production there are several different security considerations to take into account,
|
||||
remember Lemur is handling sensitive data and security is imperative.
|
||||
Get familiar with how Lemur works by reviewing the :doc:`../guide/index`. When you're ready see :doc:`../production/index` for more details on how to configure Lemur for production.
|
||||
|
||||
The above just gets you going, but for production there are several different security considerations to take into account. Remember, Lemur is handling sensitive data and security is imperative.
|
||||
|
||||
@ -1,29 +0,0 @@
|
||||
Jinja2>=2.3
|
||||
Pygments>=1.2
|
||||
Sphinx>=1.3
|
||||
docutils>=0.7
|
||||
markupsafe
|
||||
sphinxcontrib-httpdomain
|
||||
Flask==0.10.1
|
||||
Flask-RESTful==0.3.3
|
||||
Flask-SQLAlchemy==2.0
|
||||
Flask-Script==2.0.5
|
||||
Flask-Migrate==1.4.0
|
||||
Flask-Bcrypt==0.6.2
|
||||
Flask-Principal==0.4.0
|
||||
Flask-Mail==0.9.1
|
||||
SQLAlchemy-Utils==0.30.11
|
||||
BeautifulSoup4
|
||||
requests==2.7.0
|
||||
psycopg2==2.6.1
|
||||
arrow==0.5.4
|
||||
boto==2.38.0 # we might make this optional
|
||||
six==1.9.0
|
||||
gunicorn==19.3.0
|
||||
pycrypto==2.6.1
|
||||
cryptography==1.0.1
|
||||
pyopenssl==0.15.1
|
||||
pyjwt==1.0.1
|
||||
xmltodict==0.9.2
|
||||
lockfile==0.10.2
|
||||
future==0.15.0
|
||||
66
docs/security.rst
Normal file
66
docs/security.rst
Normal file
@ -0,0 +1,66 @@
|
||||
Security
|
||||
========
|
||||
|
||||
We take the security of ``lemur`` seriously. The following are a set of
|
||||
policies we have adopted to ensure that security issues are addressed in a
|
||||
timely fashion.
|
||||
|
||||
Reporting a security issue
|
||||
--------------------------
|
||||
|
||||
We ask that you do not report security issues to our normal GitHub issue
|
||||
tracker.
|
||||
|
||||
If you believe you've identified a security issue with ``lemur``, please
|
||||
report it to ``cloudsecurity@netflix.com``.
|
||||
|
||||
Once you've submitted an issue via email, you should receive an acknowledgment
|
||||
within 48 hours, and depending on the action to be taken, you may receive
|
||||
further follow-up emails.
|
||||
|
||||
Supported Versions
|
||||
------------------
|
||||
|
||||
At any given time, we will provide security support for the `master`_ branch
|
||||
as well as the 2 most recent releases.
|
||||
|
||||
Disclosure Process
|
||||
------------------
|
||||
|
||||
Our process for taking a security issue from private discussion to public
|
||||
disclosure involves multiple steps.
|
||||
|
||||
Approximately one week before full public disclosure, we will send advance
|
||||
notification of the issue to a list of people and organizations, primarily
|
||||
composed of operating-system vendors and other distributors of
|
||||
``lemur``. This notification will consist of an email message
|
||||
containing:
|
||||
|
||||
* A full description of the issue and the affected versions of
|
||||
``lemur``.
|
||||
* The steps we will be taking to remedy the issue.
|
||||
* The patches, if any, that will be applied to ``lemur``.
|
||||
* The date on which the ``lemur`` team will apply these patches, issue
|
||||
new releases, and publicly disclose the issue.
|
||||
|
||||
Simultaneously, the reporter of the issue will receive notification of the date
|
||||
on which we plan to make the issue public.
|
||||
|
||||
On the day of disclosure, we will take the following steps:
|
||||
|
||||
* Apply the relevant patches to the ``lemur`` repository. The commit
|
||||
messages for these patches will indicate that they are for security issues,
|
||||
but will not describe the issue in any detail; instead, they will warn of
|
||||
upcoming disclosure.
|
||||
* Issue the relevant releases.
|
||||
|
||||
If a reported issue is believed to be particularly time-sensitive – due to a
|
||||
known exploit in the wild, for example – the time between advance notification
|
||||
and public disclosure may be shortened considerably.
|
||||
|
||||
The list of people and organizations who receives advanced notification of
|
||||
security issues is not, and will not, be made public. This list generally
|
||||
consists of high-profile downstream distributors and is entirely at the
|
||||
discretion of the ``lemur`` team.
|
||||
|
||||
.. _`master`: https://github.com/Netflix/lemur
|
||||
6
funcs/lemur.py
Normal file
6
funcs/lemur.py
Normal file
@ -0,0 +1,6 @@
|
||||
from secrets import token_bytes as _token_bytes
|
||||
from base64 import urlsafe_b64encode as _urlsafe_b64encode
|
||||
|
||||
|
||||
def gen_random_base64():
|
||||
return _urlsafe_b64encode(_token_bytes(32)).decode()
|
||||
@ -1,13 +1,12 @@
|
||||
'use strict';
|
||||
|
||||
var gulp = require('gulp'),
|
||||
minifycss = require('gulp-minify-css'),
|
||||
concat = require('gulp-concat'),
|
||||
less = require('gulp-less'),
|
||||
gulpif = require('gulp-if'),
|
||||
order = require('gulp-order'),
|
||||
gutil = require('gulp-util'),
|
||||
rename = require('gulp-rename'),
|
||||
foreach = require('gulp-foreach'),
|
||||
debug = require('gulp-debug'),
|
||||
path =require('path'),
|
||||
merge = require('merge-stream'),
|
||||
del = require('del'),
|
||||
@ -27,7 +26,8 @@ var gulp = require('gulp'),
|
||||
minifyHtml = require('gulp-minify-html'),
|
||||
bowerFiles = require('main-bower-files'),
|
||||
karma = require('karma'),
|
||||
replace = require('gulp-replace');
|
||||
replace = require('gulp-replace'),
|
||||
argv = require('yargs').argv;
|
||||
|
||||
gulp.task('default', ['clean'], function () {
|
||||
gulp.start('fonts', 'styles');
|
||||
@ -72,7 +72,6 @@ gulp.task('dev:styles', function () {
|
||||
};
|
||||
|
||||
var fileList = [
|
||||
'lemur/static/app/styles/lemur.css',
|
||||
'bower_components/bootswatch/sandstone/bootswatch.less',
|
||||
'bower_components/fontawesome/css/font-awesome.css',
|
||||
'bower_components/angular-spinkit/src/angular-spinkit.css',
|
||||
@ -80,17 +79,19 @@ gulp.task('dev:styles', function () {
|
||||
'bower_components/angular-loading-bar/src/loading-bar.css',
|
||||
'bower_components/angular-ui-switch/angular-ui-switch.css',
|
||||
'bower_components/angular-wizard/dist/angular-wizard.css',
|
||||
'bower_components/ng-table/ng-table.css',
|
||||
'bower_components/angularjs-toaster/toaster.css'
|
||||
'bower_components/ng-table/dist/ng-table.css',
|
||||
'bower_components/angularjs-toaster/toaster.css',
|
||||
'bower_components/angular-ui-select/dist/select.css',
|
||||
'lemur/static/app/styles/lemur.css'
|
||||
];
|
||||
|
||||
return gulp.src(fileList)
|
||||
.pipe(gulpif(isBootswatchFile, foreach(function (stream, file) {
|
||||
var themeName = path.basename(path.dirname(file.path)),
|
||||
content = replaceAll(baseContent, '$theme$', themeName),
|
||||
file = string_src('bootstrap-' + themeName + '.less', content);
|
||||
file2 = string_src('bootstrap-' + themeName + '.less', content);
|
||||
|
||||
return file;
|
||||
return file2;
|
||||
})))
|
||||
.pipe(less())
|
||||
.pipe(gulpif(isBootstrapFile, foreach(function (stream, file) {
|
||||
@ -100,7 +101,7 @@ gulp.task('dev:styles', function () {
|
||||
// http://stackoverflow.com/questions/21719833/gulp-how-to-add-src-files-in-the-middle-of-a-pipe
|
||||
// https://github.com/gulpjs/gulp/blob/master/docs/recipes/using-multiple-sources-in-one-task.md
|
||||
return merge(stream, gulp.src(['.tmp/styles/font-awesome.css', '.tmp/styles/lemur.css']))
|
||||
.pipe(concat('style-' + themeName + ".css"));
|
||||
.pipe(concat('style-' + themeName + '.css'));
|
||||
})))
|
||||
.pipe(plumber())
|
||||
.pipe(concat('styles.css'))
|
||||
@ -112,7 +113,7 @@ gulp.task('dev:styles', function () {
|
||||
|
||||
// http://stackoverflow.com/questions/1144783/replacing-all-occurrences-of-a-string-in-javascript
|
||||
function escapeRegExp(string) {
|
||||
return string.replace(/([.*+?^=!:${}()|\[\]\/\\])/g, "\\$1");
|
||||
return string.replace(/([.*+?^=!:${}()|\[\]\/\\])/g, '\\$1');
|
||||
}
|
||||
|
||||
function replaceAll(string, find, replace) {
|
||||
@ -122,7 +123,7 @@ function replaceAll(string, find, replace) {
|
||||
function string_src(filename, string) {
|
||||
var src = require('stream').Readable({ objectMode: true });
|
||||
src._read = function () {
|
||||
this.push(new gutil.File({ cwd: "", base: "", path: filename, contents: new Buffer(string) }));
|
||||
this.push(new gutil.File({ cwd: '', base: '', path: filename, contents: new Buffer(string) }));
|
||||
this.push(null);
|
||||
};
|
||||
return src;
|
||||
@ -143,26 +144,18 @@ gulp.task('build:extras', function () {
|
||||
function injectHtml(isDev) {
|
||||
return gulp.src('lemur/static/app/index.html')
|
||||
.pipe(
|
||||
inject(gulp.src(bowerFiles({ base: 'app' }), {
|
||||
read: false
|
||||
}), {
|
||||
inject(gulp.src(bowerFiles({ base: 'app' })), {
|
||||
starttag: '<!-- inject:bower:{{ext}} -->',
|
||||
addRootSlash: false,
|
||||
ignorePath: isDev ? ['lemur/static/app/', '.tmp/'] : null
|
||||
})
|
||||
)
|
||||
.pipe(inject(gulp.src(['lemur/static/app/angular/**/*.js'], {
|
||||
read: false
|
||||
}), {
|
||||
read: false,
|
||||
.pipe(inject(gulp.src(['lemur/static/app/angular/**/*.js']), {
|
||||
starttag: '<!-- inject:{{ext}} -->',
|
||||
addRootSlash: false,
|
||||
ignorePath: isDev ? ['lemur/static/app/', '.tmp/'] : null
|
||||
}))
|
||||
.pipe(inject(gulp.src(['.tmp/styles/**/*.css'], {
|
||||
read: false
|
||||
}), {
|
||||
read: false,
|
||||
.pipe(inject(gulp.src(['.tmp/styles/**/*.css']), {
|
||||
starttag: '<!-- inject:{{ext}} -->',
|
||||
addRootSlash: false,
|
||||
ignorePath: isDev ? ['lemur/static/app/', '.tmp/'] : null
|
||||
@ -170,13 +163,11 @@ function injectHtml(isDev) {
|
||||
.pipe(
|
||||
gulpif(!isDev,
|
||||
inject(gulp.src('lemur/static/dist/ngviews/ngviews.min.js'), {
|
||||
read: false,
|
||||
starttag: '<!-- inject:ngviews -->',
|
||||
addRootSlash: false
|
||||
})
|
||||
)
|
||||
)
|
||||
.pipe(gulp.dest('.tmp/'));
|
||||
).pipe(gulp.dest('.tmp/'));
|
||||
}
|
||||
|
||||
gulp.task('dev:inject', ['dev:styles', 'dev:scripts'], function () {
|
||||
@ -199,23 +190,17 @@ gulp.task('build:ngviews', function () {
|
||||
});
|
||||
|
||||
gulp.task('build:html', ['dev:styles', 'dev:scripts', 'build:ngviews', 'build:inject'], function () {
|
||||
var jsFilter = filter('**/*.js');
|
||||
var cssFilter = filter('**/*.css');
|
||||
|
||||
var assets = useref.assets();
|
||||
var jsFilter = filter(['**/*.js'], {'restore': true});
|
||||
var cssFilter = filter(['**/*.css'], {'restore': true});
|
||||
|
||||
return gulp.src('.tmp/index.html')
|
||||
.pipe(assets)
|
||||
.pipe(rev())
|
||||
.pipe(jsFilter)
|
||||
.pipe(ngAnnotate())
|
||||
.pipe(jsFilter.restore())
|
||||
.pipe(jsFilter.restore)
|
||||
.pipe(cssFilter)
|
||||
.pipe(csso())
|
||||
.pipe(cssFilter.restore())
|
||||
.pipe(assets.restore())
|
||||
.pipe(cssFilter.restore)
|
||||
.pipe(useref())
|
||||
.pipe(revReplace())
|
||||
.pipe(gulp.dest('lemur/static/dist'))
|
||||
.pipe(size());
|
||||
});
|
||||
@ -238,13 +223,42 @@ gulp.task('build:images', function () {
|
||||
|
||||
gulp.task('package:strip', function () {
|
||||
return gulp.src(['lemur/static/dist/scripts/main*'])
|
||||
.pipe(replace('http:\/\/localhost:5000', ''))
|
||||
.pipe(replace('http:\/\/localhost:3000', ''))
|
||||
.pipe(replace('http:\/\/localhost:8000', ''))
|
||||
.pipe(useref())
|
||||
.pipe(revReplace())
|
||||
.pipe(gulp.dest('lemur/static/dist/scripts'))
|
||||
.pipe(size());
|
||||
});
|
||||
|
||||
gulp.task('addUrlContextPath',['addUrlContextPath:revreplace'], function(){
|
||||
var urlContextPathExists = argv.urlContextPath ? true : false;
|
||||
['lemur/static/dist/scripts/main*.js',
|
||||
'lemur/static/dist/angular/**/*.html']
|
||||
.forEach(function(file){
|
||||
return gulp.src(file)
|
||||
.pipe(gulpif(urlContextPathExists, replace('api/', argv.urlContextPath + '/api/')))
|
||||
.pipe(gulpif(urlContextPathExists, replace('/angular/', '/' + argv.urlContextPath + '/angular/')))
|
||||
.pipe(gulp.dest(function(file){
|
||||
return file.base;
|
||||
}))
|
||||
})
|
||||
});
|
||||
|
||||
gulp.task('addUrlContextPath:revision', function(){
|
||||
return gulp.src(['lemur/static/dist/**/*.css','lemur/static/dist/**/*.js'])
|
||||
.pipe(rev())
|
||||
.pipe(gulp.dest('lemur/static/dist'))
|
||||
.pipe(rev.manifest())
|
||||
.pipe(gulp.dest('lemur/static/dist'))
|
||||
})
|
||||
|
||||
gulp.task('addUrlContextPath:revreplace', ['addUrlContextPath:revision'], function(){
|
||||
var manifest = gulp.src("lemur/static/dist/rev-manifest.json");
|
||||
var urlContextPathExists = argv.urlContextPath ? true : false;
|
||||
return gulp.src( "lemur/static/dist/index.html")
|
||||
.pipe(gulp.dest('lemur/static/dist'));
|
||||
})
|
||||
|
||||
|
||||
gulp.task('build', ['build:ngviews', 'build:inject', 'build:images', 'build:fonts', 'build:html', 'build:extras']);
|
||||
gulp.task('package', ['package:strip']);
|
||||
gulp.task('package', ['addUrlContextPath', 'package:strip']);
|
||||
|
||||
@ -6,31 +6,31 @@ var browserSync = require('browser-sync');
|
||||
var httpProxy = require('http-proxy');
|
||||
|
||||
/* This configuration allow you to configure browser sync to proxy your backend */
|
||||
/*
|
||||
var proxyTarget = 'http://localhost/context/'; // The location of your backend
|
||||
var proxyApiPrefix = 'api'; // The element in the URL which differentiate between API request and static file request
|
||||
|
||||
var proxyTarget = 'http://localhost:8000/'; // The location of your backend
|
||||
var proxyApiPrefix = '/api/'; // The element in the URL which differentiate between API request and static file request
|
||||
var proxy = httpProxy.createProxyServer({
|
||||
target: proxyTarget
|
||||
target: proxyTarget
|
||||
});
|
||||
function proxyMiddleware(req, res, next) {
|
||||
if (req.url.indexOf(proxyApiPrefix) !== -1) {
|
||||
proxy.web(req, res);
|
||||
} else {
|
||||
next();
|
||||
if (req.url.indexOf(proxyApiPrefix) !== -1) {
|
||||
proxy.web(req, res);
|
||||
} else {
|
||||
next();
|
||||
}
|
||||
}
|
||||
}
|
||||
*/
|
||||
|
||||
function browserSyncInit(baseDir, files, browser) {
|
||||
browser = browser === undefined ? 'default' : browser;
|
||||
|
||||
browserSync.instance = browserSync.init(files, {
|
||||
startPath: '/index.html',
|
||||
server: {
|
||||
baseDir: baseDir,
|
||||
routes: {
|
||||
'/bower_components': './bower_components'
|
||||
}
|
||||
server: {
|
||||
middleware: [proxyMiddleware],
|
||||
baseDir: baseDir,
|
||||
routes: {
|
||||
'/bower_components': './bower_components'
|
||||
}
|
||||
},
|
||||
browser: browser,
|
||||
ghostMode: false
|
||||
|
||||
@ -1,46 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
import glob
|
||||
import os
|
||||
import sys
|
||||
|
||||
os.environ['PYFLAKES_NODOCTEST'] = '1'
|
||||
|
||||
# pep8.py uses sys.argv to find setup.cfg
|
||||
sys.argv = [os.path.join(os.path.dirname(__file__), os.pardir, os.pardir)]
|
||||
|
||||
# git usurbs your bin path for hooks and will always run system python
|
||||
if 'VIRTUAL_ENV' in os.environ:
|
||||
site_packages = glob.glob(
|
||||
'%s/lib/*/site-packages' % os.environ['VIRTUAL_ENV'])[0]
|
||||
sys.path.insert(0, site_packages)
|
||||
|
||||
|
||||
def py_lint(files_modified):
|
||||
from flake8.main import DEFAULT_CONFIG
|
||||
from flake8.engine import get_style_guide
|
||||
|
||||
# remove non-py files and files which no longer exist
|
||||
files_modified = filter(lambda x: x.endswith('.py'), files_modified)
|
||||
|
||||
flake8_style = get_style_guide(parse_argv=True, config_file=DEFAULT_CONFIG)
|
||||
report = flake8_style.check_files(files_modified)
|
||||
|
||||
return report.total_errors != 0
|
||||
|
||||
|
||||
def main():
|
||||
from flake8.hooks import run
|
||||
|
||||
gitcmd = "git diff-index --cached --name-only HEAD"
|
||||
|
||||
_, files_modified, _ = run(gitcmd)
|
||||
|
||||
files_modified = filter(lambda x: os.path.exists(x), files_modified)
|
||||
|
||||
if py_lint(files_modified):
|
||||
return 1
|
||||
return 0
|
||||
|
||||
if __name__ == '__main__':
|
||||
sys.exit(main())
|
||||
12
lemur.service
Normal file
12
lemur.service
Normal file
@ -0,0 +1,12 @@
|
||||
[Unit]
|
||||
Description=Lemur
|
||||
After=postgresql.service
|
||||
|
||||
[Service]
|
||||
ExecStart=/usr/bin/lemur start -b 127.0.0.1:8002 -c /etc/lemur/lemur.conf.py
|
||||
User=lemur
|
||||
Group=lemur
|
||||
|
||||
[Install]
|
||||
WantedBy=multi-user.target
|
||||
|
||||
24
lemur/__about__.py
Normal file
24
lemur/__about__.py
Normal file
@ -0,0 +1,24 @@
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
__all__ = [
|
||||
"__title__",
|
||||
"__summary__",
|
||||
"__uri__",
|
||||
"__version__",
|
||||
"__author__",
|
||||
"__email__",
|
||||
"__license__",
|
||||
"__copyright__",
|
||||
]
|
||||
|
||||
__title__ = "lemur"
|
||||
__summary__ = "Certificate management and orchestration service"
|
||||
__uri__ = "https://github.com/Netflix/lemur"
|
||||
|
||||
__version__ = "0.7.0"
|
||||
|
||||
__author__ = "The Lemur developers"
|
||||
__email__ = "security@netflix.com"
|
||||
|
||||
__license__ = "Apache License, Version 2.0"
|
||||
__copyright__ = "Copyright 2018 {0}".format(__author__)
|
||||
@ -1,14 +1,19 @@
|
||||
"""
|
||||
.. module: lemur
|
||||
:platform: Unix
|
||||
:copyright: (c) 2015 by Netflix Inc., see AUTHORS for more
|
||||
:copyright: (c) 2018 by Netflix Inc., see AUTHORS for more
|
||||
:license: Apache, see LICENSE for more details.
|
||||
|
||||
.. moduleauthor:: Kevin Glisson <kglisson@netflix.com>
|
||||
|
||||
.. moduleauthor:: Curtis Castrapel <ccastrapel@netflix.com>
|
||||
.. moduleauthor:: Hossein Shafagh <hshafagh@netflix.com>
|
||||
|
||||
"""
|
||||
import time
|
||||
from flask import g, request
|
||||
|
||||
from lemur import factory
|
||||
from lemur.extensions import metrics
|
||||
|
||||
from lemur.users.views import mod as users_bp
|
||||
from lemur.roles.views import mod as roles_bp
|
||||
@ -21,7 +26,34 @@ from lemur.defaults.views import mod as defaults_bp
|
||||
from lemur.plugins.views import mod as plugins_bp
|
||||
from lemur.notifications.views import mod as notifications_bp
|
||||
from lemur.sources.views import mod as sources_bp
|
||||
from lemur.endpoints.views import mod as endpoints_bp
|
||||
from lemur.logs.views import mod as logs_bp
|
||||
from lemur.api_keys.views import mod as api_key_bp
|
||||
from lemur.pending_certificates.views import mod as pending_certificates_bp
|
||||
from lemur.dns_providers.views import mod as dns_providers_bp
|
||||
|
||||
from lemur.__about__ import (
|
||||
__author__,
|
||||
__copyright__,
|
||||
__email__,
|
||||
__license__,
|
||||
__summary__,
|
||||
__title__,
|
||||
__uri__,
|
||||
__version__,
|
||||
)
|
||||
|
||||
|
||||
__all__ = [
|
||||
"__title__",
|
||||
"__summary__",
|
||||
"__uri__",
|
||||
"__version__",
|
||||
"__author__",
|
||||
"__email__",
|
||||
"__license__",
|
||||
"__copyright__",
|
||||
]
|
||||
|
||||
LEMUR_BLUEPRINTS = (
|
||||
users_bp,
|
||||
@ -34,12 +66,19 @@ LEMUR_BLUEPRINTS = (
|
||||
defaults_bp,
|
||||
plugins_bp,
|
||||
notifications_bp,
|
||||
sources_bp
|
||||
sources_bp,
|
||||
endpoints_bp,
|
||||
logs_bp,
|
||||
api_key_bp,
|
||||
pending_certificates_bp,
|
||||
dns_providers_bp,
|
||||
)
|
||||
|
||||
|
||||
def create_app(config=None):
|
||||
app = factory.create_app(app_name=__name__, blueprints=LEMUR_BLUEPRINTS, config=config)
|
||||
def create_app(config_path=None):
|
||||
app = factory.create_app(
|
||||
app_name=__name__, blueprints=LEMUR_BLUEPRINTS, config=config_path
|
||||
)
|
||||
configure_hook(app)
|
||||
return app
|
||||
|
||||
@ -50,16 +89,40 @@ def configure_hook(app):
|
||||
:param app:
|
||||
:return:
|
||||
"""
|
||||
from flask.ext.principal import PermissionDenied
|
||||
from lemur.decorators import crossdomain
|
||||
if app.config.get('CORS'):
|
||||
@app.after_request
|
||||
@crossdomain(origin=u"http://localhost:3000", methods=['PUT', 'HEAD', 'GET', 'POST', 'OPTIONS', 'DELETE'])
|
||||
def after(response):
|
||||
from flask import jsonify
|
||||
from werkzeug.exceptions import HTTPException
|
||||
|
||||
@app.errorhandler(Exception)
|
||||
def handle_error(e):
|
||||
code = 500
|
||||
if isinstance(e, HTTPException):
|
||||
code = e.code
|
||||
|
||||
app.logger.exception(e)
|
||||
return jsonify(error=str(e)), code
|
||||
|
||||
@app.before_request
|
||||
def before_request():
|
||||
g.request_start_time = time.time()
|
||||
|
||||
@app.after_request
|
||||
def after_request(response):
|
||||
# Return early if we don't have the start time
|
||||
if not hasattr(g, "request_start_time"):
|
||||
return response
|
||||
|
||||
@app.errorhandler(PermissionDenied)
|
||||
def handle_invalid_usage(error):
|
||||
response = {'message': 'You are not allow to access this resource'}
|
||||
response.status_code = 403
|
||||
# Get elapsed time in milliseconds
|
||||
elapsed = time.time() - g.request_start_time
|
||||
elapsed = int(round(1000 * elapsed))
|
||||
|
||||
# Collect request/response tags
|
||||
tags = {
|
||||
"endpoint": request.endpoint,
|
||||
"request_method": request.method.lower(),
|
||||
"status_code": response.status_code,
|
||||
}
|
||||
|
||||
# Record our response time metric
|
||||
metrics.send("response_time", "TIMER", elapsed, metric_tags=tags)
|
||||
metrics.send("status_code_{}".format(response.status_code), "counter", 1)
|
||||
return response
|
||||
|
||||
86
lemur/acme_providers/cli.py
Normal file
86
lemur/acme_providers/cli.py
Normal file
@ -0,0 +1,86 @@
|
||||
import time
|
||||
import json
|
||||
|
||||
from flask_script import Manager
|
||||
from flask import current_app
|
||||
|
||||
from lemur.extensions import sentry
|
||||
from lemur.constants import SUCCESS_METRIC_STATUS
|
||||
from lemur.plugins.lemur_acme.plugin import AcmeHandler
|
||||
|
||||
manager = Manager(
|
||||
usage="Handles all ACME related tasks"
|
||||
)
|
||||
|
||||
|
||||
@manager.option(
|
||||
"-d",
|
||||
"--domain",
|
||||
dest="domain",
|
||||
required=True,
|
||||
help="Name of the Domain to store to (ex. \"_acme-chall.test.com\".",
|
||||
)
|
||||
@manager.option(
|
||||
"-t",
|
||||
"--token",
|
||||
dest="token",
|
||||
required=True,
|
||||
help="Value of the Token to store in DNS as content.",
|
||||
)
|
||||
def dnstest(domain, token):
|
||||
"""
|
||||
Create, verify, and delete DNS TXT records using an autodetected provider.
|
||||
"""
|
||||
print("[+] Starting ACME Tests.")
|
||||
change_id = (domain, token)
|
||||
|
||||
acme_handler = AcmeHandler()
|
||||
acme_handler.autodetect_dns_providers(domain)
|
||||
if not acme_handler.dns_providers_for_domain[domain]:
|
||||
raise Exception(f"No DNS providers found for domain: {format(domain)}.")
|
||||
|
||||
# Create TXT Records
|
||||
for dns_provider in acme_handler.dns_providers_for_domain[domain]:
|
||||
dns_provider_plugin = acme_handler.get_dns_provider(dns_provider.provider_type)
|
||||
dns_provider_options = json.loads(dns_provider.credentials)
|
||||
account_number = dns_provider_options.get("account_id")
|
||||
|
||||
print(f"[+] Creating TXT Record in `{dns_provider.name}` provider")
|
||||
change_id = dns_provider_plugin.create_txt_record(domain, token, account_number)
|
||||
|
||||
print("[+] Verifying TXT Record has propagated to DNS.")
|
||||
print("[+] This step could take a while...")
|
||||
time.sleep(10)
|
||||
|
||||
# Verify TXT Records
|
||||
for dns_provider in acme_handler.dns_providers_for_domain[domain]:
|
||||
dns_provider_plugin = acme_handler.get_dns_provider(dns_provider.provider_type)
|
||||
dns_provider_options = json.loads(dns_provider.credentials)
|
||||
account_number = dns_provider_options.get("account_id")
|
||||
|
||||
try:
|
||||
dns_provider_plugin.wait_for_dns_change(change_id, account_number)
|
||||
print(f"[+] Verified TXT Record in `{dns_provider.name}` provider")
|
||||
except Exception:
|
||||
sentry.captureException()
|
||||
current_app.logger.debug(
|
||||
f"Unable to resolve DNS challenge for change_id: {change_id}, account_id: "
|
||||
f"{account_number}",
|
||||
exc_info=True,
|
||||
)
|
||||
print(f"[+] Unable to Verify TXT Record in `{dns_provider.name}` provider")
|
||||
|
||||
time.sleep(10)
|
||||
|
||||
# Delete TXT Records
|
||||
for dns_provider in acme_handler.dns_providers_for_domain[domain]:
|
||||
dns_provider_plugin = acme_handler.get_dns_provider(dns_provider.provider_type)
|
||||
dns_provider_options = json.loads(dns_provider.credentials)
|
||||
account_number = dns_provider_options.get("account_id")
|
||||
|
||||
# TODO(csine@: Add Exception Handling
|
||||
dns_provider_plugin.delete_txt_record(change_id, account_number, domain, token)
|
||||
print(f"[+] Deleted TXT Record in `{dns_provider.name}` provider")
|
||||
|
||||
status = SUCCESS_METRIC_STATUS
|
||||
print("[+] Done with ACME Tests.")
|
||||
@ -1,64 +0,0 @@
|
||||
"""
|
||||
.. module: lemur.analyze.service
|
||||
:copyright: (c) 2015 by Netflix Inc., see AUTHORS for more
|
||||
:license: Apache, see LICENSE for more details.
|
||||
.. moduleauthor:: Kevin Glisson <kglisson@netflix.com>
|
||||
"""
|
||||
# def analyze(endpoints, truststores):
|
||||
# results = {"headings": ["Endpoint"],
|
||||
# "results": [],
|
||||
# "time": datetime.now().strftime("#Y%m%d %H:%M:%S")}
|
||||
#
|
||||
# for store in truststores:
|
||||
# results['headings'].append(os.path.basename(store))
|
||||
#
|
||||
# for endpoint in endpoints:
|
||||
# result_row = [endpoint]
|
||||
# for store in truststores:
|
||||
# result = {'details': []}
|
||||
#
|
||||
# tests = []
|
||||
# for region, ip in REGIONS.items():
|
||||
# try:
|
||||
# domain = dns.name.from_text(endpoint)
|
||||
# if not domain.is_absolute():
|
||||
# domain = domain.concatenate(dns.name.root)
|
||||
#
|
||||
# my_resolver = dns.resolver.Resolver()
|
||||
# my_resolver.nameservers = [ip]
|
||||
# answer = my_resolver.query(domain)
|
||||
#
|
||||
# #force the testing of regional enpoints by changing the dns server
|
||||
# response = requests.get('https://' + str(answer[0]), verify=store)
|
||||
# tests.append('pass')
|
||||
# result['details'].append("{}: SSL testing completed without errors".format(region))
|
||||
#
|
||||
# except SSLError as e:
|
||||
# log.debug(e)
|
||||
# if 'hostname' in str(e):
|
||||
# tests.append('pass')
|
||||
# result['details'].append(
|
||||
# "{}: This test passed ssl negotiation but failed hostname verification because \
|
||||
# the hostname is not included in the certificate".format(region))
|
||||
# elif 'certificate verify failed' in str(e):
|
||||
# tests.append('fail')
|
||||
# result['details'].append("{}: This test failed to verify the SSL certificate".format(region))
|
||||
# else:
|
||||
# tests.append('fail')
|
||||
# result['details'].append("{}: {}".format(region, str(e)))
|
||||
#
|
||||
# except Exception as e:
|
||||
# log.debug(e)
|
||||
# tests.append('fail')
|
||||
# result['details'].append("{}: {}".format(region, str(e)))
|
||||
#
|
||||
# #any failing tests fails the whole endpoint
|
||||
# if 'fail' in tests:
|
||||
# result['test'] = 'fail'
|
||||
# else:
|
||||
# result['test'] = 'pass'
|
||||
#
|
||||
# result_row.append(result)
|
||||
# results['results'].append(result_row)
|
||||
# return results
|
||||
#
|
||||
50
lemur/api_keys/cli.py
Normal file
50
lemur/api_keys/cli.py
Normal file
@ -0,0 +1,50 @@
|
||||
"""
|
||||
.. module: lemur.api_keys.cli
|
||||
:platform: Unix
|
||||
:copyright: (c) 2018 by Netflix Inc., see AUTHORS for more
|
||||
:license: Apache, see LICENSE for more details.
|
||||
.. moduleauthor:: Eric Coan <kungfury@instructure.com>
|
||||
"""
|
||||
from flask_script import Manager
|
||||
from lemur.api_keys import service as api_key_service
|
||||
from lemur.auth.service import create_token
|
||||
|
||||
from datetime import datetime
|
||||
|
||||
manager = Manager(usage="Handles all api key related tasks.")
|
||||
|
||||
|
||||
@manager.option(
|
||||
"-u", "--user-id", dest="uid", help="The User ID this access key belongs too."
|
||||
)
|
||||
@manager.option("-n", "--name", dest="name", help="The name of this API Key.")
|
||||
@manager.option(
|
||||
"-t", "--ttl", dest="ttl", help="The TTL of this API Key. -1 for forever."
|
||||
)
|
||||
def create(uid, name, ttl):
|
||||
"""
|
||||
Create a new api key for a user.
|
||||
:return:
|
||||
"""
|
||||
print("[+] Creating a new api key.")
|
||||
key = api_key_service.create(
|
||||
user_id=uid,
|
||||
name=name,
|
||||
ttl=ttl,
|
||||
issued_at=int(datetime.utcnow().timestamp()),
|
||||
revoked=False,
|
||||
)
|
||||
print("[+] Successfully created a new api key. Generating a JWT...")
|
||||
jwt = create_token(uid, key.id, key.ttl)
|
||||
print("[+] Your JWT is: {jwt}".format(jwt=jwt))
|
||||
|
||||
|
||||
@manager.option("-a", "--api-key-id", dest="aid", help="The API Key ID to revoke.")
|
||||
def revoke(aid):
|
||||
"""
|
||||
Revokes an api key for a user.
|
||||
:return:
|
||||
"""
|
||||
print("[-] Revoking the API Key api key.")
|
||||
api_key_service.revoke(aid=aid)
|
||||
print("[+] Successfully revoked the api key")
|
||||
30
lemur/api_keys/models.py
Normal file
30
lemur/api_keys/models.py
Normal file
@ -0,0 +1,30 @@
|
||||
"""
|
||||
.. module: lemur.api_keys.models
|
||||
:platform: Unix
|
||||
:synopsis: This module contains all of the models need to create an api key within Lemur.
|
||||
:copyright: (c) 2018 by Netflix Inc., see AUTHORS for more
|
||||
:license: Apache, see LICENSE for more details.
|
||||
.. moduleauthor:: Eric Coan <kungfury@instructure.com>
|
||||
"""
|
||||
from sqlalchemy import BigInteger, Boolean, Column, ForeignKey, Integer, String
|
||||
|
||||
from lemur.database import db
|
||||
|
||||
|
||||
class ApiKey(db.Model):
|
||||
__tablename__ = "api_keys"
|
||||
id = Column(Integer, primary_key=True)
|
||||
name = Column(String)
|
||||
user_id = Column(Integer, ForeignKey("users.id"))
|
||||
ttl = Column(BigInteger)
|
||||
issued_at = Column(BigInteger)
|
||||
revoked = Column(Boolean)
|
||||
|
||||
def __repr__(self):
|
||||
return "ApiKey(name={name}, user_id={user_id}, ttl={ttl}, issued_at={iat}, revoked={revoked})".format(
|
||||
user_id=self.user_id,
|
||||
name=self.name,
|
||||
ttl=self.ttl,
|
||||
iat=self.issued_at,
|
||||
revoked=self.revoked,
|
||||
)
|
||||
63
lemur/api_keys/schemas.py
Normal file
63
lemur/api_keys/schemas.py
Normal file
@ -0,0 +1,63 @@
|
||||
"""
|
||||
.. module: lemur.api_keys.schemas
|
||||
:platform: Unix
|
||||
:copyright: (c) 2018 by Netflix Inc., see AUTHORS for more
|
||||
:license: Apache, see LICENSE for more details.
|
||||
.. moduleauthor:: Eric Coan <kungfury@instructure.com>
|
||||
"""
|
||||
from flask import g
|
||||
from marshmallow import fields
|
||||
|
||||
from lemur.common.schema import LemurInputSchema, LemurOutputSchema
|
||||
from lemur.users.schemas import UserNestedOutputSchema, UserInputSchema
|
||||
|
||||
|
||||
def current_user_id():
|
||||
return {
|
||||
"id": g.current_user.id,
|
||||
"email": g.current_user.email,
|
||||
"username": g.current_user.username,
|
||||
}
|
||||
|
||||
|
||||
class ApiKeyInputSchema(LemurInputSchema):
|
||||
name = fields.String(required=False)
|
||||
user = fields.Nested(
|
||||
UserInputSchema, missing=current_user_id, default=current_user_id
|
||||
)
|
||||
ttl = fields.Integer()
|
||||
|
||||
|
||||
class ApiKeyRevokeSchema(LemurInputSchema):
|
||||
id = fields.Integer(required=True)
|
||||
name = fields.String()
|
||||
user = fields.Nested(UserInputSchema, required=True)
|
||||
revoked = fields.Boolean()
|
||||
ttl = fields.Integer()
|
||||
issued_at = fields.Integer(required=False)
|
||||
|
||||
|
||||
class UserApiKeyInputSchema(LemurInputSchema):
|
||||
name = fields.String(required=False)
|
||||
ttl = fields.Integer()
|
||||
|
||||
|
||||
class ApiKeyOutputSchema(LemurOutputSchema):
|
||||
jwt = fields.String()
|
||||
|
||||
|
||||
class ApiKeyDescribedOutputSchema(LemurOutputSchema):
|
||||
id = fields.Integer()
|
||||
name = fields.String()
|
||||
user = fields.Nested(UserNestedOutputSchema)
|
||||
ttl = fields.Integer()
|
||||
issued_at = fields.Integer()
|
||||
revoked = fields.Boolean()
|
||||
|
||||
|
||||
api_key_input_schema = ApiKeyInputSchema()
|
||||
api_key_revoke_schema = ApiKeyRevokeSchema()
|
||||
api_key_output_schema = ApiKeyOutputSchema()
|
||||
api_keys_output_schema = ApiKeyDescribedOutputSchema(many=True)
|
||||
api_key_described_output_schema = ApiKeyDescribedOutputSchema()
|
||||
user_api_key_input_schema = UserApiKeyInputSchema()
|
||||
97
lemur/api_keys/service.py
Normal file
97
lemur/api_keys/service.py
Normal file
@ -0,0 +1,97 @@
|
||||
"""
|
||||
.. module: lemur.api_keys.service
|
||||
:platform: Unix
|
||||
:copyright: (c) 2018 by Netflix Inc., see AUTHORS for more
|
||||
:license: Apache, see LICENSE for more details.
|
||||
.. moduleauthor:: Eric Coan <kungfury@instructure.com>
|
||||
"""
|
||||
from lemur import database
|
||||
from lemur.api_keys.models import ApiKey
|
||||
|
||||
|
||||
def get(aid):
|
||||
"""
|
||||
Retrieves an api key by its ID.
|
||||
:param aid: The access key id to get.
|
||||
:return:
|
||||
"""
|
||||
return database.get(ApiKey, aid)
|
||||
|
||||
|
||||
def delete(access_key):
|
||||
"""
|
||||
Delete an access key. This is one way to remove a key, though you probably should just set revoked.
|
||||
:param access_key:
|
||||
:return:
|
||||
"""
|
||||
database.delete(access_key)
|
||||
|
||||
|
||||
def revoke(aid):
|
||||
"""
|
||||
Revokes an api key.
|
||||
:param aid:
|
||||
:return:
|
||||
"""
|
||||
api_key = get(aid)
|
||||
setattr(api_key, "revoked", False)
|
||||
|
||||
return database.update(api_key)
|
||||
|
||||
|
||||
def get_all_api_keys():
|
||||
"""
|
||||
Retrieves all Api Keys.
|
||||
:return:
|
||||
"""
|
||||
return ApiKey.query.all()
|
||||
|
||||
|
||||
def create(**kwargs):
|
||||
"""
|
||||
Creates a new API Key.
|
||||
|
||||
:param kwargs:
|
||||
:return:
|
||||
"""
|
||||
api_key = ApiKey(**kwargs)
|
||||
database.create(api_key)
|
||||
return api_key
|
||||
|
||||
|
||||
def update(api_key, **kwargs):
|
||||
"""
|
||||
Updates an api key.
|
||||
:param api_key:
|
||||
:param kwargs:
|
||||
:return:
|
||||
"""
|
||||
for key, value in kwargs.items():
|
||||
setattr(api_key, key, value)
|
||||
|
||||
return database.update(api_key)
|
||||
|
||||
|
||||
def render(args):
|
||||
"""
|
||||
Helper to parse REST Api requests
|
||||
|
||||
:param args:
|
||||
:return:
|
||||
"""
|
||||
query = database.session_query(ApiKey)
|
||||
user_id = args.pop("user_id", None)
|
||||
aid = args.pop("id", None)
|
||||
has_permission = args.pop("has_permission", False)
|
||||
requesting_user_id = args.pop("requesting_user_id")
|
||||
|
||||
if user_id:
|
||||
query = query.filter(ApiKey.user_id == user_id)
|
||||
|
||||
if aid:
|
||||
query = query.filter(ApiKey.id == aid)
|
||||
|
||||
if not has_permission:
|
||||
query = query.filter(ApiKey.user_id == requesting_user_id)
|
||||
|
||||
return database.sort_and_page(query, ApiKey, args)
|
||||
621
lemur/api_keys/views.py
Normal file
621
lemur/api_keys/views.py
Normal file
@ -0,0 +1,621 @@
|
||||
"""
|
||||
.. module: lemur.api_keys.views
|
||||
:platform: Unix
|
||||
:copyright: (c) 2018 by Netflix Inc., see AUTHORS for more
|
||||
:license: Apache, see LICENSE for more details.
|
||||
|
||||
.. moduleauthor:: Eric Coan <kungfury@instructure.com>
|
||||
|
||||
"""
|
||||
from datetime import datetime
|
||||
|
||||
from flask import Blueprint, g
|
||||
from flask_restful import reqparse, Api
|
||||
|
||||
from lemur.api_keys import service
|
||||
from lemur.auth.service import AuthenticatedResource, create_token
|
||||
from lemur.auth.permissions import ApiKeyCreatorPermission
|
||||
|
||||
from lemur.common.schema import validate_schema
|
||||
from lemur.common.utils import paginated_parser
|
||||
|
||||
from lemur.api_keys.schemas import (
|
||||
api_key_input_schema,
|
||||
api_key_revoke_schema,
|
||||
api_key_output_schema,
|
||||
api_keys_output_schema,
|
||||
api_key_described_output_schema,
|
||||
user_api_key_input_schema,
|
||||
)
|
||||
|
||||
mod = Blueprint("api_keys", __name__)
|
||||
api = Api(mod)
|
||||
|
||||
|
||||
class ApiKeyList(AuthenticatedResource):
|
||||
""" Defines the 'api_keys' endpoint """
|
||||
|
||||
def __init__(self):
|
||||
super(ApiKeyList, self).__init__()
|
||||
|
||||
@validate_schema(None, api_keys_output_schema)
|
||||
def get(self):
|
||||
"""
|
||||
.. http:get:: /keys
|
||||
|
||||
The current list of api keys, that you can see.
|
||||
|
||||
**Example request**:
|
||||
|
||||
.. sourcecode:: http
|
||||
|
||||
GET /keys HTTP/1.1
|
||||
Host: example.com
|
||||
Accept: application/json, text/javascript
|
||||
|
||||
**Example response**:
|
||||
|
||||
.. sourcecode:: http
|
||||
|
||||
HTTP/1.1 200 OK
|
||||
Vary: Accept
|
||||
Content-Type: text/javascript
|
||||
|
||||
{
|
||||
"items": [
|
||||
{
|
||||
"id": 1,
|
||||
"name": "custom name",
|
||||
"user_id": 1,
|
||||
"ttl": -1,
|
||||
"issued_at": 12,
|
||||
"revoked": false
|
||||
}
|
||||
],
|
||||
"total": 1
|
||||
}
|
||||
|
||||
:query sortBy: field to sort on
|
||||
:query sortDir: asc or desc
|
||||
:query page: int default is 1
|
||||
:query count: count number. default is 10
|
||||
:query user_id: a user to filter by.
|
||||
:query id: an access key to filter by.
|
||||
:reqheader Authorization: OAuth token to authenticate
|
||||
:statuscode 200: no error
|
||||
:statuscode 403: unauthenticated
|
||||
"""
|
||||
parser = paginated_parser.copy()
|
||||
args = parser.parse_args()
|
||||
args["has_permission"] = ApiKeyCreatorPermission().can()
|
||||
args["requesting_user_id"] = g.current_user.id
|
||||
return service.render(args)
|
||||
|
||||
@validate_schema(api_key_input_schema, api_key_output_schema)
|
||||
def post(self, data=None):
|
||||
"""
|
||||
.. http:post:: /keys
|
||||
|
||||
Creates an API Key.
|
||||
|
||||
**Example request**:
|
||||
|
||||
.. sourcecode:: http
|
||||
|
||||
POST /keys HTTP/1.1
|
||||
Host: example.com
|
||||
Accept: application/json, text/javascript
|
||||
|
||||
{
|
||||
"name": "my custom name",
|
||||
"user_id": 1,
|
||||
"ttl": -1
|
||||
}
|
||||
|
||||
**Example response**:
|
||||
|
||||
.. sourcecode:: http
|
||||
|
||||
HTTP/1.1 200 OK
|
||||
Vary: Accept
|
||||
Content-Type: text/javascript
|
||||
|
||||
{
|
||||
|
||||
"jwt": ""
|
||||
}
|
||||
|
||||
:reqheader Authorization: OAuth token to authenticate
|
||||
:statuscode 200: no error
|
||||
:statuscode 403: unauthenticated
|
||||
"""
|
||||
if not ApiKeyCreatorPermission().can():
|
||||
if data["user"]["id"] != g.current_user.id:
|
||||
return (
|
||||
dict(
|
||||
message="You are not authorized to create tokens for: {0}".format(
|
||||
data["user"]["username"]
|
||||
)
|
||||
),
|
||||
403,
|
||||
)
|
||||
|
||||
access_token = service.create(
|
||||
name=data["name"],
|
||||
user_id=data["user"]["id"],
|
||||
ttl=data["ttl"],
|
||||
revoked=False,
|
||||
issued_at=int(datetime.utcnow().timestamp()),
|
||||
)
|
||||
return dict(
|
||||
jwt=create_token(access_token.user_id, access_token.id, access_token.ttl)
|
||||
)
|
||||
|
||||
|
||||
class ApiKeyUserList(AuthenticatedResource):
|
||||
""" Defines the 'keys' endpoint on the 'users' endpoint. """
|
||||
|
||||
def __init__(self):
|
||||
super(ApiKeyUserList, self).__init__()
|
||||
|
||||
@validate_schema(None, api_keys_output_schema)
|
||||
def get(self, user_id):
|
||||
"""
|
||||
.. http:get:: /users/:user_id/keys
|
||||
|
||||
The current list of api keys for a user, that you can see.
|
||||
|
||||
**Example request**:
|
||||
|
||||
.. sourcecode:: http
|
||||
|
||||
GET /users/1/keys HTTP/1.1
|
||||
Host: example.com
|
||||
Accept: application/json, text/javascript
|
||||
|
||||
**Example response**:
|
||||
|
||||
.. sourcecode:: http
|
||||
|
||||
HTTP/1.1 200 OK
|
||||
Vary: Accept
|
||||
Content-Type: text/javascript
|
||||
|
||||
{
|
||||
"items": [
|
||||
{
|
||||
"id": 1,
|
||||
"name": "custom name",
|
||||
"user_id": 1,
|
||||
"ttl": -1,
|
||||
"issued_at": 12,
|
||||
"revoked": false
|
||||
}
|
||||
],
|
||||
"total": 1
|
||||
}
|
||||
|
||||
:query sortBy: field to sort on
|
||||
:query sortDir: asc or desc
|
||||
:query page: int default is 1
|
||||
:query count: count number. default is 10
|
||||
:query id: an access key to filter by.
|
||||
:reqheader Authorization: OAuth token to authenticate
|
||||
:statuscode 200: no error
|
||||
:statuscode 403: unauthenticated
|
||||
"""
|
||||
parser = paginated_parser.copy()
|
||||
args = parser.parse_args()
|
||||
args["has_permission"] = ApiKeyCreatorPermission().can()
|
||||
args["requesting_user_id"] = g.current_user.id
|
||||
args["user_id"] = user_id
|
||||
return service.render(args)
|
||||
|
||||
@validate_schema(user_api_key_input_schema, api_key_output_schema)
|
||||
def post(self, user_id, data=None):
|
||||
"""
|
||||
.. http:post:: /users/:user_id/keys
|
||||
|
||||
Creates an API Key for a user.
|
||||
|
||||
**Example request**:
|
||||
|
||||
.. sourcecode:: http
|
||||
|
||||
POST /users/1/keys HTTP/1.1
|
||||
Host: example.com
|
||||
Accept: application/json, text/javascript
|
||||
|
||||
{
|
||||
"name": "my custom name"
|
||||
"ttl": -1
|
||||
}
|
||||
|
||||
**Example response**:
|
||||
|
||||
.. sourcecode:: http
|
||||
|
||||
HTTP/1.1 200 OK
|
||||
Vary: Accept
|
||||
Content-Type: text/javascript
|
||||
|
||||
{
|
||||
|
||||
"jwt": ""
|
||||
}
|
||||
|
||||
:reqheader Authorization: OAuth token to authenticate
|
||||
:statuscode 200: no error
|
||||
:statuscode 403: unauthenticated
|
||||
"""
|
||||
if not ApiKeyCreatorPermission().can():
|
||||
if user_id != g.current_user.id:
|
||||
return (
|
||||
dict(
|
||||
message="You are not authorized to create tokens for: {0}".format(
|
||||
user_id
|
||||
)
|
||||
),
|
||||
403,
|
||||
)
|
||||
|
||||
access_token = service.create(
|
||||
name=data["name"],
|
||||
user_id=user_id,
|
||||
ttl=data["ttl"],
|
||||
revoked=False,
|
||||
issued_at=int(datetime.utcnow().timestamp()),
|
||||
)
|
||||
return dict(
|
||||
jwt=create_token(access_token.user_id, access_token.id, access_token.ttl)
|
||||
)
|
||||
|
||||
|
||||
class ApiKeys(AuthenticatedResource):
|
||||
def __init__(self):
|
||||
self.reqparse = reqparse.RequestParser()
|
||||
super(ApiKeys, self).__init__()
|
||||
|
||||
@validate_schema(None, api_key_output_schema)
|
||||
def get(self, aid):
|
||||
"""
|
||||
.. http:get:: /keys/1
|
||||
|
||||
Fetch one api key
|
||||
|
||||
**Example request**:
|
||||
|
||||
.. sourcecode:: http
|
||||
|
||||
GET /keys/1 HTTP/1.1
|
||||
Host: example.com
|
||||
Accept: application/json, text/javascript
|
||||
|
||||
**Example response**:
|
||||
|
||||
.. sourcecode:: http
|
||||
|
||||
HTTP/1.1 200 OK
|
||||
Vary: Accept
|
||||
Content-Type: text/javascript
|
||||
|
||||
{
|
||||
"jwt": ""
|
||||
}
|
||||
|
||||
:reqheader Authorization: OAuth token to authenticate
|
||||
:statuscode 200: no error
|
||||
:statuscode 403: unauthenticated
|
||||
"""
|
||||
access_key = service.get(aid)
|
||||
|
||||
if access_key is None:
|
||||
return dict(message="This token does not exist!"), 404
|
||||
|
||||
if access_key.user_id != g.current_user.id:
|
||||
if not ApiKeyCreatorPermission().can():
|
||||
return dict(message="You are not authorized to view this token!"), 403
|
||||
|
||||
return dict(jwt=create_token(access_key.user_id, access_key.id, access_key.ttl))
|
||||
|
||||
@validate_schema(api_key_revoke_schema, api_key_output_schema)
|
||||
def put(self, aid, data=None):
|
||||
"""
|
||||
.. http:put:: /keys/1
|
||||
|
||||
update one api key
|
||||
|
||||
**Example request**:
|
||||
|
||||
.. sourcecode:: http
|
||||
|
||||
PUT /keys/1 HTTP/1.1
|
||||
Host: example.com
|
||||
Accept: application/json, text/javascript
|
||||
|
||||
{
|
||||
"name": "new_name",
|
||||
"revoked": false,
|
||||
"ttl": -1
|
||||
}
|
||||
|
||||
**Example response**:
|
||||
|
||||
.. sourcecode:: http
|
||||
|
||||
HTTP/1.1 200 OK
|
||||
Vary: Accept
|
||||
Content-Type: text/javascript
|
||||
|
||||
{
|
||||
"jwt": ""
|
||||
}
|
||||
|
||||
:reqheader Authorization: OAuth token to authenticate
|
||||
:statuscode 200: no error
|
||||
:statuscode 403: unauthenticated
|
||||
"""
|
||||
access_key = service.get(aid)
|
||||
if access_key is None:
|
||||
return dict(message="This token does not exist!"), 404
|
||||
|
||||
if access_key.user_id != g.current_user.id:
|
||||
if not ApiKeyCreatorPermission().can():
|
||||
return dict(message="You are not authorized to update this token!"), 403
|
||||
|
||||
service.update(
|
||||
access_key, name=data["name"], revoked=data["revoked"], ttl=data["ttl"]
|
||||
)
|
||||
return dict(jwt=create_token(access_key.user_id, access_key.id, access_key.ttl))
|
||||
|
||||
def delete(self, aid):
|
||||
"""
|
||||
.. http:delete:: /keys/1
|
||||
|
||||
deletes one api key
|
||||
|
||||
**Example request**:
|
||||
|
||||
.. sourcecode:: http
|
||||
|
||||
DELETE /keys/1 HTTP/1.1
|
||||
Host: example.com
|
||||
Accept: application/json, text/javascript
|
||||
|
||||
**Example response**:
|
||||
|
||||
.. sourcecode:: http
|
||||
|
||||
HTTP/1.1 200 OK
|
||||
Vary: Accept
|
||||
Content-Type: text/javascript
|
||||
|
||||
{
|
||||
"result": true
|
||||
}
|
||||
|
||||
:reqheader Authorization: OAuth token to authenticate
|
||||
:statuscode 200: no error
|
||||
:statuscode 403: unauthenticated
|
||||
"""
|
||||
access_key = service.get(aid)
|
||||
if access_key is None:
|
||||
return dict(message="This token does not exist!"), 404
|
||||
|
||||
if access_key.user_id != g.current_user.id:
|
||||
if not ApiKeyCreatorPermission().can():
|
||||
return dict(message="You are not authorized to delete this token!"), 403
|
||||
|
||||
service.delete(access_key)
|
||||
return {"result": True}
|
||||
|
||||
|
||||
class UserApiKeys(AuthenticatedResource):
|
||||
def __init__(self):
|
||||
self.reqparse = reqparse.RequestParser()
|
||||
super(UserApiKeys, self).__init__()
|
||||
|
||||
@validate_schema(None, api_key_output_schema)
|
||||
def get(self, uid, aid):
|
||||
"""
|
||||
.. http:get:: /users/1/keys/1
|
||||
|
||||
Fetch one api key
|
||||
|
||||
**Example request**:
|
||||
|
||||
.. sourcecode:: http
|
||||
|
||||
GET /users/1/api_keys/1 HTTP/1.1
|
||||
Host: example.com
|
||||
Accept: application/json, text/javascript
|
||||
|
||||
**Example response**:
|
||||
|
||||
.. sourcecode:: http
|
||||
|
||||
HTTP/1.1 200 OK
|
||||
Vary: Accept
|
||||
Content-Type: text/javascript
|
||||
|
||||
{
|
||||
"jwt": ""
|
||||
}
|
||||
|
||||
:reqheader Authorization: OAuth token to authenticate
|
||||
:statuscode 200: no error
|
||||
:statuscode 403: unauthenticated
|
||||
"""
|
||||
if uid != g.current_user.id:
|
||||
if not ApiKeyCreatorPermission().can():
|
||||
return dict(message="You are not authorized to view this token!"), 403
|
||||
|
||||
access_key = service.get(aid)
|
||||
|
||||
if access_key is None:
|
||||
return dict(message="This token does not exist!"), 404
|
||||
|
||||
if access_key.user_id != uid:
|
||||
return dict(message="You are not authorized to view this token!"), 403
|
||||
|
||||
return dict(jwt=create_token(access_key.user_id, access_key.id, access_key.ttl))
|
||||
|
||||
@validate_schema(api_key_revoke_schema, api_key_output_schema)
|
||||
def put(self, uid, aid, data=None):
|
||||
"""
|
||||
.. http:put:: /users/1/keys/1
|
||||
|
||||
update one api key
|
||||
|
||||
**Example request**:
|
||||
|
||||
.. sourcecode:: http
|
||||
|
||||
PUT /users/1/keys/1 HTTP/1.1
|
||||
Host: example.com
|
||||
Accept: application/json, text/javascript
|
||||
|
||||
{
|
||||
"name": "new_name",
|
||||
"revoked": false,
|
||||
"ttl": -1
|
||||
}
|
||||
|
||||
**Example response**:
|
||||
|
||||
.. sourcecode:: http
|
||||
|
||||
HTTP/1.1 200 OK
|
||||
Vary: Accept
|
||||
Content-Type: text/javascript
|
||||
|
||||
{
|
||||
"jwt": ""
|
||||
}
|
||||
|
||||
:reqheader Authorization: OAuth token to authenticate
|
||||
:statuscode 200: no error
|
||||
:statuscode 403: unauthenticated
|
||||
"""
|
||||
if uid != g.current_user.id:
|
||||
if not ApiKeyCreatorPermission().can():
|
||||
return dict(message="You are not authorized to view this token!"), 403
|
||||
|
||||
access_key = service.get(aid)
|
||||
if access_key is None:
|
||||
return dict(message="This token does not exist!"), 404
|
||||
|
||||
if access_key.user_id != uid:
|
||||
return dict(message="You are not authorized to update this token!"), 403
|
||||
|
||||
service.update(
|
||||
access_key, name=data["name"], revoked=data["revoked"], ttl=data["ttl"]
|
||||
)
|
||||
return dict(jwt=create_token(access_key.user_id, access_key.id, access_key.ttl))
|
||||
|
||||
def delete(self, uid, aid):
|
||||
"""
|
||||
.. http:delete:: /users/1/keys/1
|
||||
|
||||
deletes one api key
|
||||
|
||||
**Example request**:
|
||||
|
||||
.. sourcecode:: http
|
||||
|
||||
DELETE /users/1/keys/1 HTTP/1.1
|
||||
Host: example.com
|
||||
Accept: application/json, text/javascript
|
||||
|
||||
**Example response**:
|
||||
|
||||
.. sourcecode:: http
|
||||
|
||||
HTTP/1.1 200 OK
|
||||
Vary: Accept
|
||||
Content-Type: text/javascript
|
||||
|
||||
{
|
||||
"result": true
|
||||
}
|
||||
|
||||
:reqheader Authorization: OAuth token to authenticate
|
||||
:statuscode 200: no error
|
||||
:statuscode 403: unauthenticated
|
||||
"""
|
||||
if uid != g.current_user.id:
|
||||
if not ApiKeyCreatorPermission().can():
|
||||
return dict(message="You are not authorized to view this token!"), 403
|
||||
|
||||
access_key = service.get(aid)
|
||||
if access_key is None:
|
||||
return dict(message="This token does not exist!"), 404
|
||||
|
||||
if access_key.user_id != uid:
|
||||
return dict(message="You are not authorized to delete this token!"), 403
|
||||
|
||||
service.delete(access_key)
|
||||
return {"result": True}
|
||||
|
||||
|
||||
class ApiKeysDescribed(AuthenticatedResource):
|
||||
def __init__(self):
|
||||
self.reqparse = reqparse.RequestParser()
|
||||
super(ApiKeysDescribed, self).__init__()
|
||||
|
||||
@validate_schema(None, api_key_described_output_schema)
|
||||
def get(self, aid):
|
||||
"""
|
||||
.. http:get:: /keys/1/described
|
||||
|
||||
Fetch one api key
|
||||
|
||||
**Example request**:
|
||||
|
||||
.. sourcecode:: http
|
||||
|
||||
GET /keys/1 HTTP/1.1
|
||||
Host: example.com
|
||||
Accept: application/json, text/javascript
|
||||
|
||||
**Example response**:
|
||||
|
||||
.. sourcecode:: http
|
||||
|
||||
HTTP/1.1 200 OK
|
||||
Vary: Accept
|
||||
Content-Type: text/javascript
|
||||
|
||||
{
|
||||
"id": 2,
|
||||
"name": "hoi",
|
||||
"user_id": 2,
|
||||
"ttl": -1,
|
||||
"issued_at": 1222222,
|
||||
"revoked": false
|
||||
}
|
||||
|
||||
:reqheader Authorization: OAuth token to authenticate
|
||||
:statuscode 200: no error
|
||||
:statuscode 403: unauthenticated
|
||||
"""
|
||||
access_key = service.get(aid)
|
||||
if access_key is None:
|
||||
return dict(message="This token does not exist!"), 404
|
||||
|
||||
if access_key.user_id != g.current_user.id:
|
||||
if not ApiKeyCreatorPermission().can():
|
||||
return dict(message="You are not authorized to view this token!"), 403
|
||||
|
||||
return access_key
|
||||
|
||||
|
||||
api.add_resource(ApiKeyList, "/keys", endpoint="api_keys")
|
||||
api.add_resource(ApiKeys, "/keys/<int:aid>", endpoint="api_key")
|
||||
api.add_resource(
|
||||
ApiKeysDescribed, "/keys/<int:aid>/described", endpoint="api_key_described"
|
||||
)
|
||||
api.add_resource(ApiKeyUserList, "/users/<int:user_id>/keys", endpoint="user_api_keys")
|
||||
api.add_resource(
|
||||
UserApiKeys, "/users/<int:uid>/keys/<int:aid>", endpoint="user_api_key"
|
||||
)
|
||||
229
lemur/auth/ldap.py
Normal file
229
lemur/auth/ldap.py
Normal file
@ -0,0 +1,229 @@
|
||||
"""
|
||||
.. module: lemur.auth.ldap
|
||||
:platform: Unix
|
||||
:copyright: (c) 2018 by Netflix Inc., see AUTHORS for more
|
||||
:license: Apache, see LICENSE for more details.
|
||||
.. moduleauthor:: Ian Stahnke <ian.stahnke@myob.com>
|
||||
"""
|
||||
import ldap
|
||||
|
||||
from flask import current_app
|
||||
|
||||
from lemur.users import service as user_service
|
||||
from lemur.roles import service as role_service
|
||||
from lemur.common.utils import validate_conf, get_psuedo_random_string
|
||||
|
||||
|
||||
class LdapPrincipal:
|
||||
"""
|
||||
Provides methods for authenticating against an LDAP server.
|
||||
"""
|
||||
|
||||
def __init__(self, args):
|
||||
self._ldap_validate_conf()
|
||||
# setup ldap config
|
||||
if not args["username"]:
|
||||
raise Exception("missing ldap username")
|
||||
if not args["password"]:
|
||||
self.error_message = "missing ldap password"
|
||||
raise Exception("missing ldap password")
|
||||
self.ldap_principal = args["username"]
|
||||
self.ldap_email_domain = current_app.config.get("LDAP_EMAIL_DOMAIN", None)
|
||||
if "@" not in self.ldap_principal:
|
||||
self.ldap_principal = "%s@%s" % (
|
||||
self.ldap_principal,
|
||||
self.ldap_email_domain,
|
||||
)
|
||||
self.ldap_username = args["username"]
|
||||
if "@" in self.ldap_username:
|
||||
self.ldap_username = args["username"].split("@")[0]
|
||||
self.ldap_password = args["password"]
|
||||
self.ldap_server = current_app.config.get("LDAP_BIND_URI", None)
|
||||
self.ldap_base_dn = current_app.config.get("LDAP_BASE_DN", None)
|
||||
self.ldap_use_tls = current_app.config.get("LDAP_USE_TLS", False)
|
||||
self.ldap_cacert_file = current_app.config.get("LDAP_CACERT_FILE", None)
|
||||
self.ldap_default_role = current_app.config.get("LEMUR_DEFAULT_ROLE", None)
|
||||
self.ldap_required_group = current_app.config.get("LDAP_REQUIRED_GROUP", None)
|
||||
self.ldap_groups_to_roles = current_app.config.get("LDAP_GROUPS_TO_ROLES", None)
|
||||
self.ldap_is_active_directory = current_app.config.get(
|
||||
"LDAP_IS_ACTIVE_DIRECTORY", False
|
||||
)
|
||||
self.ldap_attrs = ["memberOf"]
|
||||
self.ldap_client = None
|
||||
self.ldap_groups = None
|
||||
|
||||
def _update_user(self, roles):
|
||||
"""
|
||||
create or update a local user instance.
|
||||
"""
|
||||
# try to get user from local database
|
||||
user = user_service.get_by_email(self.ldap_principal)
|
||||
|
||||
# create them a local account
|
||||
if not user:
|
||||
user = user_service.create(
|
||||
self.ldap_username,
|
||||
get_psuedo_random_string(),
|
||||
self.ldap_principal,
|
||||
True,
|
||||
"", # thumbnailPhotoUrl
|
||||
list(roles),
|
||||
)
|
||||
else:
|
||||
# we add 'lemur' specific roles, so they do not get marked as removed
|
||||
for ur in user.roles:
|
||||
if not ur.third_party:
|
||||
roles.add(ur)
|
||||
|
||||
# update any changes to the user
|
||||
user_service.update(
|
||||
user.id,
|
||||
self.ldap_username,
|
||||
self.ldap_principal,
|
||||
user.active,
|
||||
user.profile_picture,
|
||||
list(roles),
|
||||
)
|
||||
return user
|
||||
|
||||
def _authorize(self):
|
||||
"""
|
||||
check groups and roles to confirm access.
|
||||
return a list of roles if ok.
|
||||
raise an exception on error.
|
||||
"""
|
||||
if not self.ldap_principal:
|
||||
return None
|
||||
|
||||
if self.ldap_required_group:
|
||||
# ensure the user has the required group in their group list
|
||||
if self.ldap_required_group not in self.ldap_groups:
|
||||
return None
|
||||
|
||||
roles = set()
|
||||
if self.ldap_default_role:
|
||||
role = role_service.get_by_name(self.ldap_default_role)
|
||||
if role:
|
||||
if not role.third_party:
|
||||
role = role_service.set_third_party(role.id, third_party_status=True)
|
||||
roles.add(role)
|
||||
|
||||
# update their 'roles'
|
||||
role = role_service.get_by_name(self.ldap_principal)
|
||||
if not role:
|
||||
description = "auto generated role based on owner: {0}".format(
|
||||
self.ldap_principal
|
||||
)
|
||||
role = role_service.create(
|
||||
self.ldap_principal, description=description, third_party=True
|
||||
)
|
||||
if not role.third_party:
|
||||
role = role_service.set_third_party(role.id, third_party_status=True)
|
||||
roles.add(role)
|
||||
if not self.ldap_groups_to_roles:
|
||||
return roles
|
||||
|
||||
for ldap_group_name, role_name in self.ldap_groups_to_roles.items():
|
||||
role = role_service.get_by_name(role_name)
|
||||
if role:
|
||||
if ldap_group_name in self.ldap_groups:
|
||||
current_app.logger.debug(
|
||||
"assigning role {0} to ldap user {1}".format(
|
||||
self.ldap_principal, role
|
||||
)
|
||||
)
|
||||
if not role.third_party:
|
||||
role = role_service.set_third_party(
|
||||
role.id, third_party_status=True
|
||||
)
|
||||
roles.add(role)
|
||||
return roles
|
||||
|
||||
def authenticate(self):
|
||||
"""
|
||||
orchestrate the ldap login.
|
||||
raise an exception on error.
|
||||
"""
|
||||
self._bind()
|
||||
roles = self._authorize()
|
||||
if not roles:
|
||||
raise Exception("ldap authorization failed")
|
||||
return self._update_user(roles)
|
||||
|
||||
def _bind(self):
|
||||
"""
|
||||
authenticate an ldap user.
|
||||
list groups for a user.
|
||||
raise an exception on error.
|
||||
"""
|
||||
if "@" not in self.ldap_principal:
|
||||
self.ldap_principal = "%s@%s" % (
|
||||
self.ldap_principal,
|
||||
self.ldap_email_domain,
|
||||
)
|
||||
ldap_filter = "userPrincipalName=%s" % self.ldap_principal
|
||||
|
||||
# query ldap for auth
|
||||
try:
|
||||
# build a client
|
||||
if not self.ldap_client:
|
||||
self.ldap_client = ldap.initialize(self.ldap_server)
|
||||
# perform a synchronous bind
|
||||
self.ldap_client.set_option(ldap.OPT_REFERRALS, 0)
|
||||
if self.ldap_use_tls:
|
||||
ldap.set_option(ldap.OPT_X_TLS_REQUIRE_CERT, ldap.OPT_X_TLS_NEVER)
|
||||
self.ldap_client.set_option(ldap.OPT_PROTOCOL_VERSION, 3)
|
||||
self.ldap_client.set_option(ldap.OPT_X_TLS, ldap.OPT_X_TLS_DEMAND)
|
||||
self.ldap_client.set_option(ldap.OPT_X_TLS_DEMAND, True)
|
||||
self.ldap_client.set_option(ldap.OPT_DEBUG_LEVEL, 255)
|
||||
if self.ldap_cacert_file:
|
||||
self.ldap_client.set_option(
|
||||
ldap.OPT_X_TLS_CACERTFILE, self.ldap_cacert_file
|
||||
)
|
||||
self.ldap_client.simple_bind_s(self.ldap_principal, self.ldap_password)
|
||||
except ldap.INVALID_CREDENTIALS:
|
||||
self.ldap_client.unbind()
|
||||
raise Exception("The supplied ldap credentials are invalid")
|
||||
except ldap.SERVER_DOWN:
|
||||
raise Exception("ldap server unavailable")
|
||||
except ldap.LDAPError as e:
|
||||
raise Exception("ldap error: {0}".format(e))
|
||||
|
||||
if self.ldap_is_active_directory:
|
||||
# Lookup user DN, needed to search for group membership
|
||||
userdn = self.ldap_client.search_s(
|
||||
self.ldap_base_dn,
|
||||
ldap.SCOPE_SUBTREE,
|
||||
ldap_filter,
|
||||
["distinguishedName"],
|
||||
)[0][1]["distinguishedName"][0]
|
||||
userdn = userdn.decode("utf-8")
|
||||
# Search all groups that have the userDN as a member
|
||||
groupfilter = "(&(objectclass=group)(member:1.2.840.113556.1.4.1941:={0}))".format(
|
||||
userdn
|
||||
)
|
||||
lgroups = self.ldap_client.search_s(
|
||||
self.ldap_base_dn, ldap.SCOPE_SUBTREE, groupfilter, ["cn"]
|
||||
)
|
||||
|
||||
# Create a list of group CN's from the result
|
||||
self.ldap_groups = []
|
||||
for group in lgroups:
|
||||
(dn, values) = group
|
||||
self.ldap_groups.append(values["cn"][0].decode("ascii"))
|
||||
else:
|
||||
lgroups = self.ldap_client.search_s(
|
||||
self.ldap_base_dn, ldap.SCOPE_SUBTREE, ldap_filter, self.ldap_attrs
|
||||
)[0][1]["memberOf"]
|
||||
# lgroups is a list of utf-8 encoded strings
|
||||
# convert to a single string of groups to allow matching
|
||||
self.ldap_groups = b"".join(lgroups).decode("ascii")
|
||||
|
||||
self.ldap_client.unbind()
|
||||
|
||||
def _ldap_validate_conf(self):
|
||||
"""
|
||||
Confirms required ldap config settings exist.
|
||||
"""
|
||||
required_vars = ["LDAP_BIND_URI", "LDAP_BASE_DN", "LDAP_EMAIL_DOMAIN"]
|
||||
validate_conf(current_app, required_vars)
|
||||
@ -2,55 +2,73 @@
|
||||
.. module: lemur.auth.permissions
|
||||
:platform: Unix
|
||||
:synopsis: This module defines all the permission used within Lemur
|
||||
:copyright: (c) 2015 by Netflix Inc., see AUTHORS for more
|
||||
:copyright: (c) 2018 by Netflix Inc., see AUTHORS for more
|
||||
:license: Apache, see LICENSE for more details.
|
||||
.. moduleauthor:: Kevin Glisson <kglisson@netflix.com>
|
||||
"""
|
||||
from functools import partial
|
||||
from collections import namedtuple
|
||||
|
||||
from flask.ext.principal import Permission, RoleNeed
|
||||
from flask import current_app
|
||||
from flask_principal import Permission, RoleNeed
|
||||
|
||||
# Permissions
|
||||
operator_permission = Permission(RoleNeed('operator'))
|
||||
admin_permission = Permission(RoleNeed('admin'))
|
||||
operator_permission = Permission(RoleNeed("operator"))
|
||||
admin_permission = Permission(RoleNeed("admin"))
|
||||
|
||||
CertificateCreator = namedtuple('certificate', ['method', 'value'])
|
||||
CertificateCreatorNeed = partial(CertificateCreator, 'key')
|
||||
CertificateOwner = namedtuple("certificate", ["method", "value"])
|
||||
CertificateOwnerNeed = partial(CertificateOwner, "role")
|
||||
|
||||
|
||||
class ViewKeyPermission(Permission):
|
||||
def __init__(self, certificate_id, owner):
|
||||
c_need = CertificateCreatorNeed(certificate_id)
|
||||
super(ViewKeyPermission, self).__init__(c_need, RoleNeed(owner), RoleNeed('admin'))
|
||||
class SensitiveDomainPermission(Permission):
|
||||
def __init__(self):
|
||||
needs = [RoleNeed("admin")]
|
||||
sensitive_domain_roles = current_app.config.get("SENSITIVE_DOMAIN_ROLES", [])
|
||||
|
||||
if sensitive_domain_roles:
|
||||
for role in sensitive_domain_roles:
|
||||
needs.append(RoleNeed(role))
|
||||
|
||||
super(SensitiveDomainPermission, self).__init__(*needs)
|
||||
|
||||
|
||||
class UpdateCertificatePermission(Permission):
|
||||
def __init__(self, certificate_id, owner):
|
||||
c_need = CertificateCreatorNeed(certificate_id)
|
||||
super(UpdateCertificatePermission, self).__init__(c_need, RoleNeed(owner), RoleNeed('admin'))
|
||||
class CertificatePermission(Permission):
|
||||
def __init__(self, owner, roles):
|
||||
needs = [RoleNeed("admin"), RoleNeed(owner), RoleNeed("creator")]
|
||||
for r in roles:
|
||||
needs.append(CertificateOwnerNeed(str(r)))
|
||||
# Backwards compatibility with mixed-case role names
|
||||
if str(r) != str(r).lower():
|
||||
needs.append(CertificateOwnerNeed(str(r).lower()))
|
||||
|
||||
super(CertificatePermission, self).__init__(*needs)
|
||||
|
||||
|
||||
RoleUser = namedtuple('role', ['method', 'value'])
|
||||
ViewRoleCredentialsNeed = partial(RoleUser, 'roleView')
|
||||
class ApiKeyCreatorPermission(Permission):
|
||||
def __init__(self):
|
||||
super(ApiKeyCreatorPermission, self).__init__(RoleNeed("admin"))
|
||||
|
||||
|
||||
class ViewRoleCredentialsPermission(Permission):
|
||||
RoleMember = namedtuple("role", ["method", "value"])
|
||||
RoleMemberNeed = partial(RoleMember, "member")
|
||||
|
||||
|
||||
class RoleMemberPermission(Permission):
|
||||
def __init__(self, role_id):
|
||||
need = ViewRoleCredentialsNeed(role_id)
|
||||
super(ViewRoleCredentialsPermission, self).__init__(need, RoleNeed('admin'))
|
||||
needs = [RoleNeed("admin"), RoleMemberNeed(role_id)]
|
||||
super(RoleMemberPermission, self).__init__(*needs)
|
||||
|
||||
|
||||
AuthorityCreator = namedtuple('authority', ['method', 'value'])
|
||||
AuthorityCreatorNeed = partial(AuthorityCreator, 'authorityUse')
|
||||
AuthorityCreator = namedtuple("authority", ["method", "value"])
|
||||
AuthorityCreatorNeed = partial(AuthorityCreator, "authorityUse")
|
||||
|
||||
AuthorityOwner = namedtuple('authority', ['method', 'value'])
|
||||
AuthorityOwnerNeed = partial(AuthorityOwner, 'role')
|
||||
AuthorityOwner = namedtuple("authority", ["method", "value"])
|
||||
AuthorityOwnerNeed = partial(AuthorityOwner, "role")
|
||||
|
||||
|
||||
class AuthorityPermission(Permission):
|
||||
def __init__(self, authority_id, roles):
|
||||
needs = [RoleNeed('admin'), AuthorityCreatorNeed(str(authority_id))]
|
||||
needs = [RoleNeed("admin"), AuthorityCreatorNeed(str(authority_id))]
|
||||
for r in roles:
|
||||
needs.append(AuthorityOwnerNeed(str(r)))
|
||||
|
||||
|
||||
@ -3,16 +3,13 @@
|
||||
:platform: Unix
|
||||
:synopsis: This module contains all of the authentication duties for
|
||||
lemur
|
||||
:copyright: (c) 2015 by Netflix Inc., see AUTHORS for more
|
||||
:copyright: (c) 2018 by Netflix Inc., see AUTHORS for more
|
||||
:license: Apache, see LICENSE for more details.
|
||||
.. moduleauthor:: Kevin Glisson <kglisson@netflix.com>
|
||||
|
||||
"""
|
||||
from __future__ import unicode_literals
|
||||
from builtins import bytes
|
||||
import jwt
|
||||
import json
|
||||
import base64
|
||||
import binascii
|
||||
|
||||
from functools import wraps
|
||||
@ -20,31 +17,18 @@ from datetime import datetime, timedelta
|
||||
|
||||
from flask import g, current_app, jsonify, request
|
||||
|
||||
from flask.ext.restful import Resource
|
||||
from flask.ext.principal import identity_loaded, RoleNeed, UserNeed
|
||||
from flask_restful import Resource
|
||||
from flask_principal import identity_loaded, RoleNeed, UserNeed
|
||||
|
||||
from flask.ext.principal import Identity, identity_changed
|
||||
from flask_principal import Identity, identity_changed
|
||||
|
||||
from cryptography.hazmat.backends import default_backend
|
||||
from cryptography.hazmat.primitives import serialization
|
||||
from cryptography.hazmat.primitives.asymmetric.rsa import RSAPublicNumbers
|
||||
|
||||
from lemur.users import service as user_service
|
||||
from lemur.auth.permissions import CertificateCreatorNeed, \
|
||||
AuthorityCreatorNeed, ViewRoleCredentialsNeed
|
||||
|
||||
|
||||
def base64url_decode(data):
|
||||
rem = len(data) % 4
|
||||
|
||||
if rem > 0:
|
||||
data += '=' * (4 - rem)
|
||||
|
||||
return base64.urlsafe_b64decode(bytes(data.encode('latin-1')))
|
||||
|
||||
|
||||
def base64url_encode(data):
|
||||
return base64.urlsafe_b64encode(data).replace('=', '')
|
||||
from lemur.api_keys import service as api_key_service
|
||||
from lemur.auth.permissions import AuthorityCreatorNeed, RoleMemberNeed
|
||||
|
||||
|
||||
def get_rsa_public_key(n, e):
|
||||
@ -55,68 +39,102 @@ def get_rsa_public_key(n, e):
|
||||
:param e:
|
||||
:return: a RSA Public Key in PEM format
|
||||
"""
|
||||
n = int(binascii.hexlify(base64url_decode(n)), 16)
|
||||
e = int(binascii.hexlify(base64url_decode(e)), 16)
|
||||
n = int(binascii.hexlify(jwt.utils.base64url_decode(bytes(n, "utf-8"))), 16)
|
||||
e = int(binascii.hexlify(jwt.utils.base64url_decode(bytes(e, "utf-8"))), 16)
|
||||
|
||||
pub = RSAPublicNumbers(e, n).public_key(default_backend())
|
||||
return pub.public_bytes(
|
||||
encoding=serialization.Encoding.PEM,
|
||||
format=serialization.PublicFormat.SubjectPublicKeyInfo
|
||||
format=serialization.PublicFormat.SubjectPublicKeyInfo,
|
||||
)
|
||||
|
||||
|
||||
def create_token(user):
|
||||
def create_token(user, aid=None, ttl=None):
|
||||
"""
|
||||
Create a valid JWT for a given user, this token is then used to authenticate
|
||||
Create a valid JWT for a given user/api key, this token is then used to authenticate
|
||||
sessions until the token expires.
|
||||
|
||||
:param user:
|
||||
:return:
|
||||
"""
|
||||
expiration_delta = timedelta(days=int(current_app.config.get('LEMUR_TOKEN_EXPIRATION', 1)))
|
||||
payload = {
|
||||
'sub': user.id,
|
||||
'iat': datetime.now(),
|
||||
'exp': datetime.now() + expiration_delta
|
||||
}
|
||||
token = jwt.encode(payload, current_app.config['LEMUR_TOKEN_SECRET'])
|
||||
return token.decode('unicode_escape')
|
||||
expiration_delta = timedelta(
|
||||
days=int(current_app.config.get("LEMUR_TOKEN_EXPIRATION", 1))
|
||||
)
|
||||
payload = {"iat": datetime.utcnow(), "exp": datetime.utcnow() + expiration_delta}
|
||||
|
||||
# Handle Just a User ID & User Object.
|
||||
if isinstance(user, int):
|
||||
payload["sub"] = user
|
||||
else:
|
||||
payload["sub"] = user.id
|
||||
if aid is not None:
|
||||
payload["aid"] = aid
|
||||
# Custom TTLs are only supported on Access Keys.
|
||||
if ttl is not None and aid is not None:
|
||||
# Tokens that are forever until revoked.
|
||||
if ttl == -1:
|
||||
del payload["exp"]
|
||||
else:
|
||||
payload["exp"] = ttl
|
||||
token = jwt.encode(payload, current_app.config["LEMUR_TOKEN_SECRET"])
|
||||
return token.decode("unicode_escape")
|
||||
|
||||
|
||||
def login_required(f):
|
||||
"""
|
||||
Validates the JWT and ensures that is has not expired.
|
||||
Validates the JWT and ensures that is has not expired and the user is still active.
|
||||
|
||||
:param f:
|
||||
:return:
|
||||
"""
|
||||
|
||||
@wraps(f)
|
||||
def decorated_function(*args, **kwargs):
|
||||
if not request.headers.get('Authorization'):
|
||||
response = jsonify(message='Missing authorization header')
|
||||
if not request.headers.get("Authorization"):
|
||||
response = jsonify(message="Missing authorization header")
|
||||
response.status_code = 401
|
||||
return response
|
||||
|
||||
try:
|
||||
token = request.headers.get('Authorization').split()[1]
|
||||
token = request.headers.get("Authorization").split()[1]
|
||||
except Exception as e:
|
||||
return dict(message='Token is invalid'), 403
|
||||
return dict(message="Token is invalid"), 403
|
||||
|
||||
try:
|
||||
payload = jwt.decode(token, current_app.config['LEMUR_TOKEN_SECRET'])
|
||||
payload = jwt.decode(token, current_app.config["LEMUR_TOKEN_SECRET"])
|
||||
except jwt.DecodeError:
|
||||
return dict(message='Token is invalid'), 403
|
||||
return dict(message="Token is invalid"), 403
|
||||
except jwt.ExpiredSignatureError:
|
||||
return dict(message='Token has expired'), 403
|
||||
return dict(message="Token has expired"), 403
|
||||
except jwt.InvalidTokenError:
|
||||
return dict(message='Token is invalid'), 403
|
||||
return dict(message="Token is invalid"), 403
|
||||
|
||||
g.current_user = user_service.get(payload['sub'])
|
||||
if "aid" in payload:
|
||||
access_key = api_key_service.get(payload["aid"])
|
||||
if access_key.revoked:
|
||||
return dict(message="Token has been revoked"), 403
|
||||
if access_key.ttl != -1:
|
||||
current_time = datetime.utcnow()
|
||||
expired_time = datetime.fromtimestamp(
|
||||
access_key.issued_at + access_key.ttl
|
||||
)
|
||||
if current_time >= expired_time:
|
||||
return dict(message="Token has expired"), 403
|
||||
|
||||
user = user_service.get(payload["sub"])
|
||||
|
||||
if not user.active:
|
||||
return dict(message="User is not currently active"), 403
|
||||
|
||||
g.current_user = user
|
||||
|
||||
if not g.current_user:
|
||||
return dict(message='You are not logged in'), 403
|
||||
return dict(message="You are not logged in"), 403
|
||||
|
||||
# Tell Flask-Principal the identity changed
|
||||
identity_changed.send(current_app._get_current_object(), identity=Identity(g.current_user.id))
|
||||
identity_changed.send(
|
||||
current_app._get_current_object(), identity=Identity(g.current_user.id)
|
||||
)
|
||||
|
||||
return f(*args, **kwargs)
|
||||
|
||||
@ -130,21 +148,18 @@ def fetch_token_header(token):
|
||||
:param token:
|
||||
:return: :raise jwt.DecodeError:
|
||||
"""
|
||||
token = token.encode('utf-8')
|
||||
token = token.encode("utf-8")
|
||||
try:
|
||||
signing_input, crypto_segment = token.rsplit(b'.', 1)
|
||||
header_segment, payload_segment = signing_input.split(b'.', 1)
|
||||
signing_input, crypto_segment = token.rsplit(b".", 1)
|
||||
header_segment, payload_segment = signing_input.split(b".", 1)
|
||||
except ValueError:
|
||||
raise jwt.DecodeError('Not enough segments')
|
||||
raise jwt.DecodeError("Not enough segments")
|
||||
|
||||
try:
|
||||
return json.loads(base64url_decode(header_segment))
|
||||
return json.loads(jwt.utils.base64url_decode(header_segment).decode("utf-8"))
|
||||
except TypeError as e:
|
||||
current_app.logger.exception(e)
|
||||
raise jwt.DecodeError('Invalid header padding')
|
||||
except binascii.Error as e:
|
||||
current_app.logger.exception(e)
|
||||
raise jwt.DecodeError('Invalid header padding')
|
||||
raise jwt.DecodeError("Invalid header padding")
|
||||
|
||||
|
||||
@identity_loaded.connect
|
||||
@ -163,21 +178,16 @@ def on_identity_loaded(sender, identity):
|
||||
identity.provides.add(UserNeed(identity.id))
|
||||
|
||||
# identity with the roles that the user provides
|
||||
if hasattr(user, 'roles'):
|
||||
if hasattr(user, "roles"):
|
||||
for role in user.roles:
|
||||
identity.provides.add(ViewRoleCredentialsNeed(role.id))
|
||||
identity.provides.add(RoleNeed(role.name))
|
||||
identity.provides.add(RoleMemberNeed(role.id))
|
||||
|
||||
# apply ownership for authorities
|
||||
if hasattr(user, 'authorities'):
|
||||
if hasattr(user, "authorities"):
|
||||
for authority in user.authorities:
|
||||
identity.provides.add(AuthorityCreatorNeed(authority.id))
|
||||
|
||||
# apply ownership of certificates
|
||||
if hasattr(user, 'certificates'):
|
||||
for certificate in user.certificates:
|
||||
identity.provides.add(CertificateCreatorNeed(certificate.id))
|
||||
|
||||
g.user = user
|
||||
|
||||
|
||||
@ -185,6 +195,7 @@ class AuthenticatedResource(Resource):
|
||||
"""
|
||||
Inherited by all resources that need to be protected by authentication.
|
||||
"""
|
||||
|
||||
method_decorators = [login_required]
|
||||
|
||||
def __init__(self):
|
||||
|
||||
@ -1,7 +1,7 @@
|
||||
"""
|
||||
.. module: lemur.auth.views
|
||||
:platform: Unix
|
||||
:copyright: (c) 2015 by Netflix Inc., see AUTHORS for more
|
||||
:copyright: (c) 2018 by Netflix Inc., see AUTHORS for more
|
||||
:license: Apache, see LICENSE for more details.
|
||||
.. moduleauthor:: Kevin Glisson <kglisson@netflix.com>
|
||||
"""
|
||||
@ -9,22 +9,224 @@ import jwt
|
||||
import base64
|
||||
import requests
|
||||
|
||||
from flask import g, Blueprint, current_app
|
||||
from flask import Blueprint, current_app
|
||||
|
||||
from flask.ext.restful import reqparse, Resource, Api
|
||||
from flask.ext.principal import Identity, identity_changed
|
||||
from flask_restful import reqparse, Resource, Api
|
||||
from flask_principal import Identity, identity_changed
|
||||
|
||||
from lemur.constants import SUCCESS_METRIC_STATUS, FAILURE_METRIC_STATUS
|
||||
from lemur.extensions import metrics
|
||||
from lemur.common.utils import get_psuedo_random_string
|
||||
|
||||
from lemur.users import service as user_service
|
||||
from lemur.roles import service as role_service
|
||||
from lemur.auth.service import create_token, fetch_token_header, get_rsa_public_key
|
||||
from lemur.auth import ldap
|
||||
|
||||
|
||||
mod = Blueprint('auth', __name__)
|
||||
mod = Blueprint("auth", __name__)
|
||||
api = Api(mod)
|
||||
|
||||
|
||||
def exchange_for_access_token(
|
||||
code, redirect_uri, client_id, secret, access_token_url=None, verify_cert=True
|
||||
):
|
||||
"""
|
||||
Exchanges authorization code for access token.
|
||||
|
||||
:param code:
|
||||
:param redirect_uri:
|
||||
:param client_id:
|
||||
:param secret:
|
||||
:param access_token_url:
|
||||
:param verify_cert:
|
||||
:return:
|
||||
:return:
|
||||
"""
|
||||
# take the information we have received from the provider to create a new request
|
||||
params = {
|
||||
"grant_type": "authorization_code",
|
||||
"scope": "openid email profile address",
|
||||
"code": code,
|
||||
"redirect_uri": redirect_uri,
|
||||
"client_id": client_id,
|
||||
}
|
||||
|
||||
# the secret and cliendId will be given to you when you signup for the provider
|
||||
token = "{0}:{1}".format(client_id, secret)
|
||||
|
||||
basic = base64.b64encode(bytes(token, "utf-8"))
|
||||
headers = {
|
||||
"Content-Type": "application/x-www-form-urlencoded",
|
||||
"authorization": "basic {0}".format(basic.decode("utf-8")),
|
||||
}
|
||||
|
||||
# exchange authorization code for access token.
|
||||
r = requests.post(
|
||||
access_token_url, headers=headers, params=params, verify=verify_cert
|
||||
)
|
||||
if r.status_code == 400:
|
||||
r = requests.post(
|
||||
access_token_url, headers=headers, data=params, verify=verify_cert
|
||||
)
|
||||
id_token = r.json()["id_token"]
|
||||
access_token = r.json()["access_token"]
|
||||
|
||||
return id_token, access_token
|
||||
|
||||
|
||||
def validate_id_token(id_token, client_id, jwks_url):
|
||||
"""
|
||||
Ensures that the token we receive is valid.
|
||||
|
||||
:param id_token:
|
||||
:param client_id:
|
||||
:param jwks_url:
|
||||
:return:
|
||||
"""
|
||||
# fetch token public key
|
||||
header_data = fetch_token_header(id_token)
|
||||
|
||||
# retrieve the key material as specified by the token header
|
||||
r = requests.get(jwks_url)
|
||||
for key in r.json()["keys"]:
|
||||
if key["kid"] == header_data["kid"]:
|
||||
secret = get_rsa_public_key(key["n"], key["e"])
|
||||
algo = header_data["alg"]
|
||||
break
|
||||
else:
|
||||
return dict(message="Key not found"), 401
|
||||
|
||||
# validate your token based on the key it was signed with
|
||||
try:
|
||||
jwt.decode(
|
||||
id_token, secret.decode("utf-8"), algorithms=[algo], audience=client_id
|
||||
)
|
||||
except jwt.DecodeError:
|
||||
return dict(message="Token is invalid"), 401
|
||||
except jwt.ExpiredSignatureError:
|
||||
return dict(message="Token has expired"), 401
|
||||
except jwt.InvalidTokenError:
|
||||
return dict(message="Token is invalid"), 401
|
||||
|
||||
|
||||
def retrieve_user(user_api_url, access_token):
|
||||
"""
|
||||
Fetch user information from provided user api_url.
|
||||
|
||||
:param user_api_url:
|
||||
:param access_token:
|
||||
:return:
|
||||
"""
|
||||
user_params = dict(access_token=access_token, schema="profile")
|
||||
|
||||
headers = {}
|
||||
|
||||
if current_app.config.get("PING_INCLUDE_BEARER_TOKEN"):
|
||||
headers = {"Authorization": f"Bearer {access_token}"}
|
||||
|
||||
# retrieve information about the current user.
|
||||
r = requests.get(user_api_url, params=user_params, headers=headers)
|
||||
# Some IDPs, like "Keycloak", require a POST instead of a GET
|
||||
if r.status_code == 400:
|
||||
r = requests.post(user_api_url, data=user_params, headers=headers)
|
||||
|
||||
profile = r.json()
|
||||
|
||||
user = user_service.get_by_email(profile["email"])
|
||||
return user, profile
|
||||
|
||||
|
||||
def create_user_roles(profile):
|
||||
"""Creates new roles based on profile information.
|
||||
|
||||
:param profile:
|
||||
:return:
|
||||
"""
|
||||
roles = []
|
||||
|
||||
# update their google 'roles'
|
||||
if "googleGroups" in profile:
|
||||
for group in profile["googleGroups"]:
|
||||
role = role_service.get_by_name(group)
|
||||
if not role:
|
||||
role = role_service.create(
|
||||
group,
|
||||
description="This is a google group based role created by Lemur",
|
||||
third_party=True,
|
||||
)
|
||||
if not role.third_party:
|
||||
role = role_service.set_third_party(role.id, third_party_status=True)
|
||||
roles.append(role)
|
||||
else:
|
||||
current_app.logger.warning(
|
||||
"'googleGroups' not sent by identity provider, no specific roles will assigned to the user."
|
||||
)
|
||||
|
||||
role = role_service.get_by_name(profile["email"])
|
||||
|
||||
if not role:
|
||||
role = role_service.create(
|
||||
profile["email"],
|
||||
description="This is a user specific role",
|
||||
third_party=True,
|
||||
)
|
||||
if not role.third_party:
|
||||
role = role_service.set_third_party(role.id, third_party_status=True)
|
||||
|
||||
roles.append(role)
|
||||
|
||||
# every user is an operator (tied to a default role)
|
||||
if current_app.config.get("LEMUR_DEFAULT_ROLE"):
|
||||
default = role_service.get_by_name(current_app.config["LEMUR_DEFAULT_ROLE"])
|
||||
if not default:
|
||||
default = role_service.create(
|
||||
current_app.config["LEMUR_DEFAULT_ROLE"],
|
||||
description="This is the default Lemur role.",
|
||||
)
|
||||
if not default.third_party:
|
||||
role_service.set_third_party(default.id, third_party_status=True)
|
||||
roles.append(default)
|
||||
|
||||
return roles
|
||||
|
||||
|
||||
def update_user(user, profile, roles):
|
||||
"""Updates user with current profile information and associated roles.
|
||||
|
||||
:param user:
|
||||
:param profile:
|
||||
:param roles:
|
||||
"""
|
||||
|
||||
# if we get an sso user create them an account
|
||||
if not user:
|
||||
user = user_service.create(
|
||||
profile["email"],
|
||||
get_psuedo_random_string(),
|
||||
profile["email"],
|
||||
True,
|
||||
profile.get("thumbnailPhotoUrl"),
|
||||
roles,
|
||||
)
|
||||
|
||||
else:
|
||||
# we add 'lemur' specific roles, so they do not get marked as removed
|
||||
for ur in user.roles:
|
||||
if not ur.third_party:
|
||||
roles.append(ur)
|
||||
|
||||
# update any changes to the user
|
||||
user_service.update(
|
||||
user.id,
|
||||
profile["email"],
|
||||
profile["email"],
|
||||
True,
|
||||
profile.get("thumbnailPhotoUrl"), # profile isn't google+ enabled
|
||||
roles,
|
||||
)
|
||||
|
||||
|
||||
class Login(Resource):
|
||||
"""
|
||||
Provides an endpoint for Lemur's basic authentication. It takes a username and password
|
||||
@ -35,13 +237,14 @@ class Login(Resource):
|
||||
|
||||
Authorization:Bearer <token>
|
||||
|
||||
Tokens have a set expiration date. You can inspect the token expiration be base64 decoding the token and inspecting
|
||||
Tokens have a set expiration date. You can inspect the token expiration by base64 decoding the token and inspecting
|
||||
it's contents.
|
||||
|
||||
.. note:: It is recommended that the token expiration is fairly short lived (hours not days). This will largely depend \
|
||||
on your uses cases but. It is important to not that there is currently no build in method to revoke a users token \
|
||||
and force re-authentication.
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
self.reqparse = reqparse.RequestParser()
|
||||
super(Login, self).__init__()
|
||||
@ -82,153 +285,299 @@ class Login(Resource):
|
||||
:statuscode 401: invalid credentials
|
||||
:statuscode 200: no error
|
||||
"""
|
||||
self.reqparse.add_argument('username', type=str, required=True, location='json')
|
||||
self.reqparse.add_argument('password', type=str, required=True, location='json')
|
||||
self.reqparse.add_argument("username", type=str, required=True, location="json")
|
||||
self.reqparse.add_argument("password", type=str, required=True, location="json")
|
||||
|
||||
args = self.reqparse.parse_args()
|
||||
|
||||
if '@' in args['username']:
|
||||
user = user_service.get_by_email(args['username'])
|
||||
if "@" in args["username"]:
|
||||
user = user_service.get_by_email(args["username"])
|
||||
else:
|
||||
user = user_service.get_by_username(args['username'])
|
||||
user = user_service.get_by_username(args["username"])
|
||||
|
||||
if user and user.check_password(args['password']):
|
||||
# default to local authentication
|
||||
if user and user.check_password(args["password"]) and user.active:
|
||||
# Tell Flask-Principal the identity changed
|
||||
identity_changed.send(current_app._get_current_object(),
|
||||
identity=Identity(user.id))
|
||||
identity_changed.send(
|
||||
current_app._get_current_object(), identity=Identity(user.id)
|
||||
)
|
||||
|
||||
metrics.send(
|
||||
"login", "counter", 1, metric_tags={"status": SUCCESS_METRIC_STATUS}
|
||||
)
|
||||
return dict(token=create_token(user))
|
||||
|
||||
return dict(message='The supplied credentials are invalid'), 401
|
||||
# try ldap login
|
||||
if current_app.config.get("LDAP_AUTH"):
|
||||
try:
|
||||
ldap_principal = ldap.LdapPrincipal(args)
|
||||
user = ldap_principal.authenticate()
|
||||
if user and user.active:
|
||||
# Tell Flask-Principal the identity changed
|
||||
identity_changed.send(
|
||||
current_app._get_current_object(), identity=Identity(user.id)
|
||||
)
|
||||
metrics.send(
|
||||
"login",
|
||||
"counter",
|
||||
1,
|
||||
metric_tags={"status": SUCCESS_METRIC_STATUS},
|
||||
)
|
||||
return dict(token=create_token(user))
|
||||
except Exception as e:
|
||||
current_app.logger.error("ldap error: {0}".format(e))
|
||||
ldap_message = "ldap error: %s" % e
|
||||
metrics.send(
|
||||
"login", "counter", 1, metric_tags={"status": FAILURE_METRIC_STATUS}
|
||||
)
|
||||
return dict(message=ldap_message), 403
|
||||
|
||||
def get(self):
|
||||
return {'username': g.current_user.username, 'roles': [r.name for r in g.current_user.roles]}
|
||||
# if not valid user - no certificates for you
|
||||
metrics.send(
|
||||
"login", "counter", 1, metric_tags={"status": FAILURE_METRIC_STATUS}
|
||||
)
|
||||
return dict(message="The supplied credentials are invalid"), 403
|
||||
|
||||
|
||||
class Ping(Resource):
|
||||
"""
|
||||
This class serves as an example of how one might implement an SSO provider for use with Lemur. In
|
||||
this example we use a OpenIDConnect authentication flow, that is essentially OAuth2 underneath. If you have an
|
||||
this example we use an OpenIDConnect authentication flow, that is essentially OAuth2 underneath. If you have an
|
||||
OAuth2 provider you want to use Lemur there would be two steps:
|
||||
|
||||
1. Define your own class that inherits from :class:`flask.ext.restful.Resource` and create the HTTP methods the \
|
||||
provider uses for it's callbacks.
|
||||
1. Define your own class that inherits from :class:`flask_restful.Resource` and create the HTTP methods the \
|
||||
provider uses for its callbacks.
|
||||
2. Add or change the Lemur AngularJS Configuration to point to your new provider
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
self.reqparse = reqparse.RequestParser()
|
||||
super(Ping, self).__init__()
|
||||
|
||||
def get(self):
|
||||
return "Redirecting..."
|
||||
|
||||
def post(self):
|
||||
self.reqparse.add_argument('clientId', type=str, required=True, location='json')
|
||||
self.reqparse.add_argument('redirectUri', type=str, required=True, location='json')
|
||||
self.reqparse.add_argument('code', type=str, required=True, location='json')
|
||||
self.reqparse.add_argument("clientId", type=str, required=True, location="json")
|
||||
self.reqparse.add_argument(
|
||||
"redirectUri", type=str, required=True, location="json"
|
||||
)
|
||||
self.reqparse.add_argument("code", type=str, required=True, location="json")
|
||||
|
||||
args = self.reqparse.parse_args()
|
||||
|
||||
# take the information we have received from the provider to create a new request
|
||||
params = {
|
||||
'client_id': args['clientId'],
|
||||
'grant_type': 'authorization_code',
|
||||
'scope': 'openid email profile address',
|
||||
'redirect_uri': args['redirectUri'],
|
||||
'code': args['code']
|
||||
}
|
||||
|
||||
# you can either discover these dynamically or simply configure them
|
||||
access_token_url = current_app.config.get('PING_ACCESS_TOKEN_URL')
|
||||
user_api_url = current_app.config.get('PING_USER_API_URL')
|
||||
access_token_url = current_app.config.get("PING_ACCESS_TOKEN_URL")
|
||||
user_api_url = current_app.config.get("PING_USER_API_URL")
|
||||
|
||||
# the secret and cliendId will be given to you when you signup for the provider
|
||||
basic = base64.b64encode('{0}:{1}'.format(args['clientId'], current_app.config.get("PING_SECRET")))
|
||||
headers = {'Authorization': 'Basic {0}'.format(basic)}
|
||||
secret = current_app.config.get("PING_SECRET")
|
||||
|
||||
# exchange authorization code for access token.
|
||||
id_token, access_token = exchange_for_access_token(
|
||||
args["code"],
|
||||
args["redirectUri"],
|
||||
args["clientId"],
|
||||
secret,
|
||||
access_token_url=access_token_url,
|
||||
)
|
||||
|
||||
r = requests.post(access_token_url, headers=headers, params=params)
|
||||
id_token = r.json()['id_token']
|
||||
access_token = r.json()['access_token']
|
||||
jwks_url = current_app.config.get("PING_JWKS_URL")
|
||||
error_code = validate_id_token(id_token, args["clientId"], jwks_url)
|
||||
if error_code:
|
||||
return error_code
|
||||
user, profile = retrieve_user(user_api_url, access_token)
|
||||
roles = create_user_roles(profile)
|
||||
update_user(user, profile, roles)
|
||||
|
||||
# fetch token public key
|
||||
header_data = fetch_token_header(id_token)
|
||||
jwks_url = current_app.config.get('PING_JWKS_URL')
|
||||
|
||||
# retrieve the key material as specified by the token header
|
||||
r = requests.get(jwks_url)
|
||||
for key in r.json()['keys']:
|
||||
if key['kid'] == header_data['kid']:
|
||||
secret = get_rsa_public_key(key['n'], key['e'])
|
||||
algo = header_data['alg']
|
||||
break
|
||||
else:
|
||||
return dict(message='Key not found'), 403
|
||||
|
||||
# validate your token based on the key it was signed with
|
||||
try:
|
||||
jwt.decode(id_token, secret, algorithms=[algo], audience=args['clientId'])
|
||||
except jwt.DecodeError:
|
||||
return dict(message='Token is invalid'), 403
|
||||
except jwt.ExpiredSignatureError:
|
||||
return dict(message='Token has expired'), 403
|
||||
except jwt.InvalidTokenError:
|
||||
return dict(message='Token is invalid'), 403
|
||||
|
||||
user_params = dict(access_token=access_token, schema='profile')
|
||||
|
||||
# retrieve information about the current user.
|
||||
r = requests.get(user_api_url, params=user_params)
|
||||
profile = r.json()
|
||||
|
||||
user = user_service.get_by_email(profile['email'])
|
||||
|
||||
# update their google 'roles'
|
||||
roles = []
|
||||
|
||||
for group in profile['googleGroups']:
|
||||
role = role_service.get_by_name(group)
|
||||
if not role:
|
||||
role = role_service.create(group, description='This is a google group based role created by Lemur')
|
||||
roles.append(role)
|
||||
|
||||
# if we get an sso user create them an account
|
||||
# we still pick a random password in case sso is down
|
||||
if not user:
|
||||
|
||||
# every user is an operator (tied to a default role)
|
||||
if current_app.config.get('LEMUR_DEFAULT_ROLE'):
|
||||
v = role_service.get_by_name(current_app.config.get('LEMUR_DEFAULT_ROLE'))
|
||||
if v:
|
||||
roles.append(v)
|
||||
|
||||
user = user_service.create(
|
||||
profile['email'],
|
||||
get_psuedo_random_string(),
|
||||
profile['email'],
|
||||
True,
|
||||
profile.get('thumbnailPhotoUrl'),
|
||||
roles
|
||||
)
|
||||
|
||||
else:
|
||||
# we add 'lemur' specific roles, so they do not get marked as removed
|
||||
for ur in user.roles:
|
||||
if ur.authority_id:
|
||||
roles.append(ur)
|
||||
|
||||
# update any changes to the user
|
||||
user_service.update(
|
||||
user.id,
|
||||
profile['email'],
|
||||
profile['email'],
|
||||
True,
|
||||
profile.get('thumbnailPhotoUrl'), # incase profile isn't google+ enabled
|
||||
roles
|
||||
if not user or not user.active:
|
||||
metrics.send(
|
||||
"login", "counter", 1, metric_tags={"status": FAILURE_METRIC_STATUS}
|
||||
)
|
||||
return dict(message="The supplied credentials are invalid"), 403
|
||||
|
||||
# Tell Flask-Principal the identity changed
|
||||
identity_changed.send(current_app._get_current_object(), identity=Identity(user.id))
|
||||
identity_changed.send(
|
||||
current_app._get_current_object(), identity=Identity(user.id)
|
||||
)
|
||||
|
||||
metrics.send(
|
||||
"login", "counter", 1, metric_tags={"status": SUCCESS_METRIC_STATUS}
|
||||
)
|
||||
return dict(token=create_token(user))
|
||||
|
||||
|
||||
class OAuth2(Resource):
|
||||
def __init__(self):
|
||||
self.reqparse = reqparse.RequestParser()
|
||||
super(OAuth2, self).__init__()
|
||||
|
||||
def get(self):
|
||||
return "Redirecting..."
|
||||
|
||||
def post(self):
|
||||
self.reqparse.add_argument("clientId", type=str, required=True, location="json")
|
||||
self.reqparse.add_argument(
|
||||
"redirectUri", type=str, required=True, location="json"
|
||||
)
|
||||
self.reqparse.add_argument("code", type=str, required=True, location="json")
|
||||
|
||||
args = self.reqparse.parse_args()
|
||||
|
||||
# you can either discover these dynamically or simply configure them
|
||||
access_token_url = current_app.config.get("OAUTH2_ACCESS_TOKEN_URL")
|
||||
user_api_url = current_app.config.get("OAUTH2_USER_API_URL")
|
||||
verify_cert = current_app.config.get("OAUTH2_VERIFY_CERT")
|
||||
|
||||
secret = current_app.config.get("OAUTH2_SECRET")
|
||||
|
||||
id_token, access_token = exchange_for_access_token(
|
||||
args["code"],
|
||||
args["redirectUri"],
|
||||
args["clientId"],
|
||||
secret,
|
||||
access_token_url=access_token_url,
|
||||
verify_cert=verify_cert,
|
||||
)
|
||||
|
||||
jwks_url = current_app.config.get("OAUTH2_JWKS_URL")
|
||||
error_code = validate_id_token(id_token, args["clientId"], jwks_url)
|
||||
if error_code:
|
||||
return error_code
|
||||
|
||||
user, profile = retrieve_user(user_api_url, access_token)
|
||||
roles = create_user_roles(profile)
|
||||
update_user(user, profile, roles)
|
||||
|
||||
if not user.active:
|
||||
metrics.send(
|
||||
"login", "counter", 1, metric_tags={"status": FAILURE_METRIC_STATUS}
|
||||
)
|
||||
return dict(message="The supplied credentials are invalid"), 403
|
||||
|
||||
# Tell Flask-Principal the identity changed
|
||||
identity_changed.send(
|
||||
current_app._get_current_object(), identity=Identity(user.id)
|
||||
)
|
||||
|
||||
metrics.send(
|
||||
"login", "counter", 1, metric_tags={"status": SUCCESS_METRIC_STATUS}
|
||||
)
|
||||
|
||||
return dict(token=create_token(user))
|
||||
|
||||
|
||||
api.add_resource(Login, '/auth/login', endpoint='login')
|
||||
api.add_resource(Ping, '/auth/ping', endpoint='ping')
|
||||
class Google(Resource):
|
||||
def __init__(self):
|
||||
self.reqparse = reqparse.RequestParser()
|
||||
super(Google, self).__init__()
|
||||
|
||||
def post(self):
|
||||
access_token_url = "https://accounts.google.com/o/oauth2/token"
|
||||
people_api_url = "https://www.googleapis.com/plus/v1/people/me/openIdConnect"
|
||||
|
||||
self.reqparse.add_argument("clientId", type=str, required=True, location="json")
|
||||
self.reqparse.add_argument(
|
||||
"redirectUri", type=str, required=True, location="json"
|
||||
)
|
||||
self.reqparse.add_argument("code", type=str, required=True, location="json")
|
||||
|
||||
args = self.reqparse.parse_args()
|
||||
|
||||
# Step 1. Exchange authorization code for access token
|
||||
payload = {
|
||||
"client_id": args["clientId"],
|
||||
"grant_type": "authorization_code",
|
||||
"redirect_uri": args["redirectUri"],
|
||||
"code": args["code"],
|
||||
"client_secret": current_app.config.get("GOOGLE_SECRET"),
|
||||
}
|
||||
|
||||
r = requests.post(access_token_url, data=payload)
|
||||
token = r.json()
|
||||
|
||||
# Step 2. Retrieve information about the current user
|
||||
headers = {"Authorization": "Bearer {0}".format(token["access_token"])}
|
||||
|
||||
r = requests.get(people_api_url, headers=headers)
|
||||
profile = r.json()
|
||||
|
||||
user = user_service.get_by_email(profile["email"])
|
||||
|
||||
if not (user and user.active):
|
||||
metrics.send(
|
||||
"login", "counter", 1, metric_tags={"status": FAILURE_METRIC_STATUS}
|
||||
)
|
||||
return dict(message="The supplied credentials are invalid."), 403
|
||||
|
||||
if user:
|
||||
metrics.send(
|
||||
"login", "counter", 1, metric_tags={"status": SUCCESS_METRIC_STATUS}
|
||||
)
|
||||
return dict(token=create_token(user))
|
||||
|
||||
metrics.send(
|
||||
"login", "counter", 1, metric_tags={"status": FAILURE_METRIC_STATUS}
|
||||
)
|
||||
|
||||
|
||||
class Providers(Resource):
|
||||
def get(self):
|
||||
active_providers = []
|
||||
|
||||
for provider in current_app.config.get("ACTIVE_PROVIDERS", []):
|
||||
provider = provider.lower()
|
||||
|
||||
if provider == "google":
|
||||
active_providers.append(
|
||||
{
|
||||
"name": "google",
|
||||
"clientId": current_app.config.get("GOOGLE_CLIENT_ID"),
|
||||
"url": api.url_for(Google),
|
||||
}
|
||||
)
|
||||
|
||||
elif provider == "ping":
|
||||
active_providers.append(
|
||||
{
|
||||
"name": current_app.config.get("PING_NAME"),
|
||||
"url": current_app.config.get("PING_REDIRECT_URI"),
|
||||
"redirectUri": current_app.config.get("PING_REDIRECT_URI"),
|
||||
"clientId": current_app.config.get("PING_CLIENT_ID"),
|
||||
"responseType": "code",
|
||||
"scope": ["openid", "email", "profile", "address"],
|
||||
"scopeDelimiter": " ",
|
||||
"authorizationEndpoint": current_app.config.get(
|
||||
"PING_AUTH_ENDPOINT"
|
||||
),
|
||||
"requiredUrlParams": ["scope"],
|
||||
"type": "2.0",
|
||||
}
|
||||
)
|
||||
|
||||
elif provider == "oauth2":
|
||||
active_providers.append(
|
||||
{
|
||||
"name": current_app.config.get("OAUTH2_NAME"),
|
||||
"url": current_app.config.get("OAUTH2_REDIRECT_URI"),
|
||||
"redirectUri": current_app.config.get("OAUTH2_REDIRECT_URI"),
|
||||
"clientId": current_app.config.get("OAUTH2_CLIENT_ID"),
|
||||
"responseType": "code",
|
||||
"scope": ["openid", "email", "profile", "groups"],
|
||||
"scopeDelimiter": " ",
|
||||
"authorizationEndpoint": current_app.config.get(
|
||||
"OAUTH2_AUTH_ENDPOINT"
|
||||
),
|
||||
"requiredUrlParams": ["scope", "state", "nonce"],
|
||||
"state": "STATE",
|
||||
"nonce": get_psuedo_random_string(),
|
||||
"type": "2.0",
|
||||
}
|
||||
)
|
||||
|
||||
return active_providers
|
||||
|
||||
|
||||
api.add_resource(Login, "/auth/login", endpoint="login")
|
||||
api.add_resource(Ping, "/auth/ping", endpoint="ping")
|
||||
api.add_resource(Google, "/auth/google", endpoint="google")
|
||||
api.add_resource(OAuth2, "/auth/oauth2", endpoint="oauth2")
|
||||
api.add_resource(Providers, "/auth/providers", endpoint="providers")
|
||||
|
||||
@ -1,58 +1,115 @@
|
||||
"""
|
||||
.. module: lemur.authorities.models
|
||||
:platform: unix
|
||||
:synopsis: This module contains all of the models need to create a authority within Lemur.
|
||||
:copyright: (c) 2015 by Netflix Inc., see AUTHORS for more
|
||||
:synopsis: This module contains all of the models need to create an authority within Lemur.
|
||||
:copyright: (c) 2018 by Netflix Inc., see AUTHORS for more
|
||||
:license: Apache, see LICENSE for more details.
|
||||
.. moduleauthor:: Kevin Glisson <kglisson@netflix.com>
|
||||
"""
|
||||
from cryptography import x509
|
||||
from cryptography.hazmat.backends import default_backend
|
||||
import json
|
||||
|
||||
from flask import current_app
|
||||
from sqlalchemy.orm import relationship
|
||||
from sqlalchemy import Column, Integer, String, Text, func, ForeignKey, DateTime, PassiveDefault, Boolean
|
||||
from sqlalchemy import (
|
||||
Column,
|
||||
Integer,
|
||||
String,
|
||||
Text,
|
||||
func,
|
||||
ForeignKey,
|
||||
DateTime,
|
||||
PassiveDefault,
|
||||
Boolean,
|
||||
)
|
||||
from sqlalchemy.dialects.postgresql import JSON
|
||||
|
||||
from lemur.database import db
|
||||
from lemur.certificates.models import cert_get_cn, cert_get_not_after, cert_get_not_before
|
||||
from lemur.plugins.base import plugins
|
||||
from lemur.models import roles_authorities
|
||||
|
||||
|
||||
class Authority(db.Model):
|
||||
__tablename__ = 'authorities'
|
||||
__tablename__ = "authorities"
|
||||
id = Column(Integer, primary_key=True)
|
||||
owner = Column(String(128))
|
||||
owner = Column(String(128), nullable=False)
|
||||
name = Column(String(128), unique=True)
|
||||
body = Column(Text())
|
||||
chain = Column(Text())
|
||||
bits = Column(Integer())
|
||||
cn = Column(String(128))
|
||||
not_before = Column(DateTime)
|
||||
not_after = Column(DateTime)
|
||||
active = Column(Boolean, default=True)
|
||||
date_created = Column(DateTime, PassiveDefault(func.now()), nullable=False)
|
||||
plugin_name = Column(String(64))
|
||||
description = Column(Text)
|
||||
options = Column(JSON)
|
||||
roles = relationship('Role', backref=db.backref('authority'), lazy='dynamic')
|
||||
user_id = Column(Integer, ForeignKey('users.id'))
|
||||
certificates = relationship("Certificate", backref='authority')
|
||||
date_created = Column(DateTime, PassiveDefault(func.now()), nullable=False)
|
||||
roles = relationship(
|
||||
"Role",
|
||||
secondary=roles_authorities,
|
||||
passive_deletes=True,
|
||||
backref=db.backref("authority"),
|
||||
lazy="dynamic",
|
||||
)
|
||||
user_id = Column(Integer, ForeignKey("users.id"))
|
||||
authority_certificate = relationship(
|
||||
"Certificate",
|
||||
backref="root_authority",
|
||||
uselist=False,
|
||||
foreign_keys="Certificate.root_authority_id",
|
||||
)
|
||||
certificates = relationship(
|
||||
"Certificate", backref="authority", foreign_keys="Certificate.authority_id"
|
||||
)
|
||||
|
||||
def __init__(self, name, owner, plugin_name, body, roles=None, chain=None, description=None):
|
||||
self.name = name
|
||||
self.body = body
|
||||
self.chain = chain
|
||||
self.owner = owner
|
||||
self.plugin_name = plugin_name
|
||||
cert = x509.load_pem_x509_certificate(str(body), default_backend())
|
||||
self.cn = cert_get_cn(cert)
|
||||
self.not_before = cert_get_not_before(cert)
|
||||
self.not_after = cert_get_not_after(cert)
|
||||
self.roles = roles
|
||||
self.description = description
|
||||
authority_pending_certificate = relationship(
|
||||
"PendingCertificate",
|
||||
backref="root_authority",
|
||||
uselist=False,
|
||||
foreign_keys="PendingCertificate.root_authority_id",
|
||||
)
|
||||
pending_certificates = relationship(
|
||||
"PendingCertificate",
|
||||
backref="authority",
|
||||
foreign_keys="PendingCertificate.authority_id",
|
||||
)
|
||||
|
||||
def as_dict(self):
|
||||
return {c.name: getattr(self, c.name) for c in self.__table__.columns}
|
||||
def __init__(self, **kwargs):
|
||||
self.owner = kwargs["owner"]
|
||||
self.roles = kwargs.get("roles", [])
|
||||
self.name = kwargs.get("name")
|
||||
self.description = kwargs.get("description")
|
||||
self.authority_certificate = kwargs["authority_certificate"]
|
||||
self.plugin_name = kwargs["plugin"]["slug"]
|
||||
self.options = kwargs.get("options")
|
||||
|
||||
def serialize(self):
|
||||
blob = self.as_dict()
|
||||
return blob
|
||||
@property
|
||||
def plugin(self):
|
||||
return plugins.get(self.plugin_name)
|
||||
|
||||
@property
|
||||
def is_cab_compliant(self):
|
||||
"""
|
||||
Parse the options to find whether authority is CAB Forum Compliant,
|
||||
i.e., adhering to the CA/Browser Forum Baseline Requirements.
|
||||
Returns None if option is not available
|
||||
"""
|
||||
if not self.options:
|
||||
return None
|
||||
|
||||
for option in json.loads(self.options):
|
||||
if "name" in option and option["name"] == 'cab_compliant':
|
||||
return option["value"]
|
||||
|
||||
return None
|
||||
|
||||
@property
|
||||
def max_issuance_days(self):
|
||||
if self.is_cab_compliant:
|
||||
return current_app.config.get("PUBLIC_CA_MAX_VALIDITY_DAYS", 397)
|
||||
|
||||
@property
|
||||
def default_validity_days(self):
|
||||
if self.is_cab_compliant:
|
||||
return current_app.config.get("PUBLIC_CA_MAX_VALIDITY_DAYS", 397)
|
||||
|
||||
return current_app.config.get("DEFAULT_VALIDITY_DAYS", 365) # 1 year default
|
||||
|
||||
def __repr__(self):
|
||||
return "Authority(name={name})".format(name=self.name)
|
||||
|
||||
150
lemur/authorities/schemas.py
Normal file
150
lemur/authorities/schemas.py
Normal file
@ -0,0 +1,150 @@
|
||||
"""
|
||||
.. module: lemur.authorities.schemas
|
||||
:platform: unix
|
||||
:copyright: (c) 2018 by Netflix Inc., see AUTHORS for more
|
||||
:license: Apache, see LICENSE for more details.
|
||||
.. moduleauthor:: Kevin Glisson <kglisson@netflix.com>
|
||||
"""
|
||||
from flask import current_app
|
||||
|
||||
from marshmallow import fields, validates_schema, pre_load
|
||||
from marshmallow import validate
|
||||
from marshmallow.exceptions import ValidationError
|
||||
|
||||
from lemur.schemas import (
|
||||
PluginInputSchema,
|
||||
PluginOutputSchema,
|
||||
ExtensionSchema,
|
||||
AssociatedAuthoritySchema,
|
||||
AssociatedRoleSchema,
|
||||
)
|
||||
from lemur.users.schemas import UserNestedOutputSchema
|
||||
from lemur.common.schema import LemurInputSchema, LemurOutputSchema
|
||||
from lemur.common import validators, missing
|
||||
|
||||
from lemur.common.fields import ArrowDateTime
|
||||
from lemur.constants import CERTIFICATE_KEY_TYPES
|
||||
|
||||
|
||||
class AuthorityInputSchema(LemurInputSchema):
|
||||
name = fields.String(required=True)
|
||||
owner = fields.Email(required=True)
|
||||
description = fields.String()
|
||||
common_name = fields.String(required=True, validate=validators.common_name)
|
||||
|
||||
validity_start = ArrowDateTime()
|
||||
validity_end = ArrowDateTime()
|
||||
validity_years = fields.Integer()
|
||||
|
||||
# certificate body fields
|
||||
organizational_unit = fields.String(
|
||||
missing=lambda: current_app.config.get("LEMUR_DEFAULT_ORGANIZATIONAL_UNIT")
|
||||
)
|
||||
organization = fields.String(
|
||||
missing=lambda: current_app.config.get("LEMUR_DEFAULT_ORGANIZATION")
|
||||
)
|
||||
location = fields.String()
|
||||
country = fields.String(
|
||||
missing=lambda: current_app.config.get("LEMUR_DEFAULT_COUNTRY")
|
||||
)
|
||||
state = fields.String(missing=lambda: current_app.config.get("LEMUR_DEFAULT_STATE"))
|
||||
# Creating a String field instead of Email to allow empty value
|
||||
email = fields.String()
|
||||
|
||||
plugin = fields.Nested(PluginInputSchema)
|
||||
|
||||
# signing related options
|
||||
type = fields.String(validate=validate.OneOf(["root", "subca"]), missing="root")
|
||||
parent = fields.Nested(AssociatedAuthoritySchema)
|
||||
signing_algorithm = fields.String(
|
||||
validate=validate.OneOf(["sha256WithRSA", "sha1WithRSA",
|
||||
"sha256WithECDSA", "SHA384withECDSA", "SHA512withECDSA"]),
|
||||
missing="sha256WithRSA",
|
||||
)
|
||||
key_type = fields.String(
|
||||
validate=validate.OneOf(CERTIFICATE_KEY_TYPES), missing="RSA2048"
|
||||
)
|
||||
key_name = fields.String()
|
||||
sensitivity = fields.String(
|
||||
validate=validate.OneOf(["medium", "high"]), missing="medium"
|
||||
)
|
||||
serial_number = fields.Integer()
|
||||
first_serial = fields.Integer(missing=1)
|
||||
|
||||
extensions = fields.Nested(ExtensionSchema)
|
||||
|
||||
roles = fields.Nested(AssociatedRoleSchema(many=True))
|
||||
|
||||
@validates_schema
|
||||
def validate_dates(self, data):
|
||||
validators.dates(data)
|
||||
|
||||
@validates_schema
|
||||
def validate_subca(self, data):
|
||||
if data["type"] == "subca":
|
||||
if not data.get("parent"):
|
||||
raise ValidationError(
|
||||
"If generating a subca, parent 'authority' must be specified."
|
||||
)
|
||||
|
||||
@pre_load
|
||||
def ensure_dates(self, data):
|
||||
return missing.convert_validity_years(data)
|
||||
|
||||
|
||||
class AuthorityUpdateSchema(LemurInputSchema):
|
||||
owner = fields.Email(required=True)
|
||||
description = fields.String()
|
||||
active = fields.Boolean(missing=True)
|
||||
roles = fields.Nested(AssociatedRoleSchema(many=True))
|
||||
|
||||
|
||||
class RootAuthorityCertificateOutputSchema(LemurOutputSchema):
|
||||
__envelope__ = False
|
||||
id = fields.Integer()
|
||||
active = fields.Boolean()
|
||||
bits = fields.Integer()
|
||||
body = fields.String()
|
||||
chain = fields.String()
|
||||
description = fields.String()
|
||||
name = fields.String()
|
||||
cn = fields.String()
|
||||
not_after = fields.DateTime()
|
||||
not_before = fields.DateTime()
|
||||
owner = fields.Email()
|
||||
status = fields.Boolean()
|
||||
user = fields.Nested(UserNestedOutputSchema)
|
||||
|
||||
|
||||
class AuthorityOutputSchema(LemurOutputSchema):
|
||||
id = fields.Integer()
|
||||
description = fields.String()
|
||||
name = fields.String()
|
||||
owner = fields.Email()
|
||||
plugin = fields.Nested(PluginOutputSchema)
|
||||
active = fields.Boolean()
|
||||
options = fields.Dict()
|
||||
roles = fields.List(fields.Nested(AssociatedRoleSchema))
|
||||
max_issuance_days = fields.Integer()
|
||||
default_validity_days = fields.Integer()
|
||||
authority_certificate = fields.Nested(RootAuthorityCertificateOutputSchema)
|
||||
|
||||
|
||||
class AuthorityNestedOutputSchema(LemurOutputSchema):
|
||||
__envelope__ = False
|
||||
id = fields.Integer()
|
||||
description = fields.String()
|
||||
name = fields.String()
|
||||
owner = fields.Email()
|
||||
plugin = fields.Nested(PluginOutputSchema)
|
||||
active = fields.Boolean()
|
||||
authority_certificate = fields.Nested(RootAuthorityCertificateOutputSchema, only=["not_after", "not_before"])
|
||||
is_cab_compliant = fields.Boolean()
|
||||
max_issuance_days = fields.Integer()
|
||||
default_validity_days = fields.Integer()
|
||||
|
||||
|
||||
authority_update_schema = AuthorityUpdateSchema()
|
||||
authority_input_schema = AuthorityInputSchema()
|
||||
authority_output_schema = AuthorityOutputSchema()
|
||||
authorities_output_schema = AuthorityOutputSchema(many=True)
|
||||
@ -3,100 +3,145 @@
|
||||
:platform: Unix
|
||||
:synopsis: This module contains all of the services level functions used to
|
||||
administer authorities in Lemur
|
||||
:copyright: (c) 2015 by Netflix Inc., see AUTHORS for more
|
||||
:copyright: (c) 2018 by Netflix Inc., see AUTHORS for more
|
||||
:license: Apache, see LICENSE for more details.
|
||||
.. moduleauthor:: Kevin Glisson <kglisson@netflix.com>
|
||||
|
||||
"""
|
||||
from flask import g
|
||||
from flask import current_app
|
||||
|
||||
import json
|
||||
|
||||
from lemur import database
|
||||
from lemur.common.utils import truthiness
|
||||
from lemur.extensions import metrics
|
||||
from lemur.authorities.models import Authority
|
||||
from lemur.roles import service as role_service
|
||||
from lemur.notifications import service as notification_service
|
||||
|
||||
from lemur.roles.models import Role
|
||||
from lemur.certificates.models import Certificate
|
||||
from lemur.roles import service as role_service
|
||||
|
||||
from lemur.plugins.base import plugins
|
||||
from lemur.certificates.service import upload
|
||||
|
||||
|
||||
def update(authority_id, description=None, owner=None, active=None, roles=None):
|
||||
def update(authority_id, description, owner, active, roles):
|
||||
"""
|
||||
Update a an authority with new values.
|
||||
Update an authority with new values.
|
||||
|
||||
:param authority_id:
|
||||
:param roles: roles that are allowed to use this authority
|
||||
:rtype : Authority
|
||||
:return:
|
||||
"""
|
||||
authority = get(authority_id)
|
||||
if roles:
|
||||
authority = database.update_list(authority, 'roles', Role, roles)
|
||||
|
||||
if active:
|
||||
authority.active = active
|
||||
|
||||
authority.roles = roles
|
||||
authority.active = active
|
||||
authority.description = description
|
||||
authority.owner = owner
|
||||
|
||||
return database.update(authority)
|
||||
|
||||
|
||||
def create(kwargs):
|
||||
def update_options(authority_id, options):
|
||||
"""
|
||||
Create a new authority.
|
||||
Update an authority with new options.
|
||||
|
||||
:rtype : Authority
|
||||
:param authority_id:
|
||||
:param options: the new options to be saved into the authority
|
||||
:return:
|
||||
"""
|
||||
|
||||
issuer = plugins.get(kwargs.get('pluginName'))
|
||||
authority = get(authority_id)
|
||||
|
||||
kwargs['creator'] = g.current_user.email
|
||||
cert_body, intermediate, issuer_roles = issuer.create_authority(kwargs)
|
||||
authority.options = options
|
||||
|
||||
cert = Certificate(cert_body, chain=intermediate)
|
||||
cert.owner = kwargs['ownerEmail']
|
||||
cert.description = "This is the ROOT certificate for the {0} certificate authority".format(kwargs.get('caName'))
|
||||
cert.user = g.current_user
|
||||
return database.update(authority)
|
||||
|
||||
cert.notifications = notification_service.create_default_expiration_notifications(
|
||||
'DEFAULT_SECURITY',
|
||||
current_app.config.get('LEMUR_SECURITY_TEAM_EMAIL')
|
||||
|
||||
def mint(**kwargs):
|
||||
"""
|
||||
Creates the authority based on the plugin provided.
|
||||
"""
|
||||
issuer = kwargs["plugin"]["plugin_object"]
|
||||
values = issuer.create_authority(kwargs)
|
||||
|
||||
# support older plugins
|
||||
if len(values) == 3:
|
||||
body, chain, roles = values
|
||||
private_key = None
|
||||
elif len(values) == 4:
|
||||
body, private_key, chain, roles = values
|
||||
|
||||
roles = create_authority_roles(
|
||||
roles,
|
||||
kwargs["owner"],
|
||||
kwargs["plugin"]["plugin_object"].title,
|
||||
kwargs["creator"],
|
||||
)
|
||||
return body, private_key, chain, roles
|
||||
|
||||
# we create and attach any roles that the issuer gives us
|
||||
|
||||
def create_authority_roles(roles, owner, plugin_title, creator):
|
||||
"""
|
||||
Creates all of the necessary authority roles.
|
||||
:param creator:
|
||||
:param roles:
|
||||
:return:
|
||||
"""
|
||||
role_objs = []
|
||||
for r in issuer_roles:
|
||||
|
||||
role = role_service.create(
|
||||
r['name'],
|
||||
password=r['password'],
|
||||
description="{0} auto generated role".format(kwargs.get('pluginName')),
|
||||
username=r['username'])
|
||||
for r in roles:
|
||||
role = role_service.get_by_name(r["name"])
|
||||
if not role:
|
||||
role = role_service.create(
|
||||
r["name"],
|
||||
password=r["password"],
|
||||
description="Auto generated role for {0}".format(plugin_title),
|
||||
username=r["username"],
|
||||
)
|
||||
|
||||
# the user creating the authority should be able to administer it
|
||||
if role.username == 'admin':
|
||||
g.current_user.roles.append(role)
|
||||
if role.username == "admin":
|
||||
creator.roles.append(role)
|
||||
|
||||
role_objs.append(role)
|
||||
|
||||
authority = Authority(
|
||||
kwargs.get('caName'),
|
||||
kwargs['ownerEmail'],
|
||||
kwargs['pluginName'],
|
||||
cert_body,
|
||||
description=kwargs['caDescription'],
|
||||
chain=intermediate,
|
||||
roles=role_objs
|
||||
)
|
||||
# create an role for the owner and assign it
|
||||
owner_role = role_service.get_by_name(owner)
|
||||
if not owner_role:
|
||||
owner_role = role_service.create(
|
||||
owner, description="Auto generated role based on owner: {0}".format(owner)
|
||||
)
|
||||
|
||||
database.update(cert)
|
||||
role_objs.append(owner_role)
|
||||
return role_objs
|
||||
|
||||
|
||||
def create(**kwargs):
|
||||
"""
|
||||
Creates a new authority.
|
||||
"""
|
||||
body, private_key, chain, roles = mint(**kwargs)
|
||||
|
||||
kwargs["creator"].roles = list(set(list(kwargs["creator"].roles) + roles))
|
||||
|
||||
kwargs["body"] = body
|
||||
kwargs["private_key"] = private_key
|
||||
kwargs["chain"] = chain
|
||||
|
||||
if kwargs.get("roles"):
|
||||
kwargs["roles"] += roles
|
||||
else:
|
||||
kwargs["roles"] = roles
|
||||
|
||||
cert = upload(**kwargs)
|
||||
kwargs["authority_certificate"] = cert
|
||||
if kwargs.get("plugin", {}).get("plugin_options", []):
|
||||
kwargs["options"] = json.dumps(kwargs["plugin"]["plugin_options"])
|
||||
|
||||
authority = Authority(**kwargs)
|
||||
authority = database.create(authority)
|
||||
kwargs["creator"].authorities.append(authority)
|
||||
|
||||
g.current_user.authorities.append(authority)
|
||||
|
||||
metrics.send(
|
||||
"authority_created", "counter", 1, metric_tags=dict(owner=authority.owner)
|
||||
)
|
||||
return authority
|
||||
|
||||
|
||||
@ -115,7 +160,6 @@ def get(authority_id):
|
||||
"""
|
||||
Retrieves an authority given it's ID
|
||||
|
||||
:rtype : Authority
|
||||
:param authority_id:
|
||||
:return:
|
||||
"""
|
||||
@ -127,28 +171,22 @@ def get_by_name(authority_name):
|
||||
Retrieves an authority given it's name.
|
||||
|
||||
:param authority_name:
|
||||
:rtype : Authority
|
||||
:return:
|
||||
"""
|
||||
return database.get(Authority, authority_name, field='name')
|
||||
return database.get(Authority, authority_name, field="name")
|
||||
|
||||
|
||||
def get_authority_role(ca_name):
|
||||
def get_authority_role(ca_name, creator=None):
|
||||
"""
|
||||
Attempts to get the authority role for a given ca uses current_user
|
||||
as a basis for accomplishing that.
|
||||
|
||||
:param ca_name:
|
||||
"""
|
||||
if g.current_user.is_admin:
|
||||
authority = get_by_name(ca_name)
|
||||
# TODO we should pick admin ca roles for admin
|
||||
return authority.roles[0]
|
||||
else:
|
||||
for role in g.current_user.roles:
|
||||
if role.authority:
|
||||
if role.authority.name == ca_name:
|
||||
return role
|
||||
if creator:
|
||||
if creator.is_admin:
|
||||
return role_service.get_by_name("{0}_admin".format(ca_name))
|
||||
return role_service.get_by_name("{0}_operator".format(ca_name))
|
||||
|
||||
|
||||
def render(args):
|
||||
@ -158,30 +196,33 @@ def render(args):
|
||||
:return:
|
||||
"""
|
||||
query = database.session_query(Authority)
|
||||
sort_by = args.pop('sort_by')
|
||||
sort_dir = args.pop('sort_dir')
|
||||
page = args.pop('page')
|
||||
count = args.pop('count')
|
||||
filt = args.pop('filter')
|
||||
filt = args.pop("filter")
|
||||
|
||||
if filt:
|
||||
terms = filt.split(';')
|
||||
if 'active' in filt: # this is really weird but strcmp seems to not work here??
|
||||
query = query.filter(Authority.active == terms[1])
|
||||
terms = filt.split(";")
|
||||
if "active" in filt:
|
||||
query = query.filter(Authority.active == truthiness(terms[1]))
|
||||
elif "cn" in filt:
|
||||
term = "%{0}%".format(terms[1])
|
||||
sub_query = (
|
||||
database.session_query(Certificate.root_authority_id)
|
||||
.filter(Certificate.cn.ilike(term))
|
||||
.subquery()
|
||||
)
|
||||
|
||||
query = query.filter(Authority.id.in_(sub_query))
|
||||
else:
|
||||
query = database.filter(query, Authority, terms)
|
||||
|
||||
# we make sure that a user can only use an authority they either own are are a member of - admins can see all
|
||||
if not g.current_user.is_admin:
|
||||
# we make sure that a user can only use an authority they either own are a member of - admins can see all
|
||||
if not args["user"].is_admin:
|
||||
authority_ids = []
|
||||
for role in g.current_user.roles:
|
||||
if role.authority:
|
||||
authority_ids.append(role.authority.id)
|
||||
for authority in args["user"].authorities:
|
||||
authority_ids.append(authority.id)
|
||||
|
||||
for role in args["user"].roles:
|
||||
for authority in role.authorities:
|
||||
authority_ids.append(authority.id)
|
||||
query = query.filter(Authority.id.in_(authority_ids))
|
||||
|
||||
query = database.find_all(query, Authority, args)
|
||||
|
||||
if sort_by and sort_dir:
|
||||
query = database.sort(query, Authority, sort_by, sort_dir)
|
||||
|
||||
return database.paginate(query, page, count)
|
||||
return database.sort_and_page(query, Authority, args)
|
||||
|
||||
@ -1,48 +1,41 @@
|
||||
"""
|
||||
.. module: lemur.authorities.views
|
||||
:platform: Unix
|
||||
:copyright: (c) 2015 by Netflix Inc., see AUTHORS for more
|
||||
:copyright: (c) 2018 by Netflix Inc., see AUTHORS for more
|
||||
:license: Apache, see LICENSE for more details.
|
||||
.. moduleauthor:: Kevin Glisson <kglisson@netflix.com>
|
||||
"""
|
||||
from flask import Blueprint, g
|
||||
from flask.ext.restful import reqparse, fields, Api
|
||||
from flask_restful import reqparse, Api
|
||||
|
||||
from lemur.authorities import service
|
||||
from lemur.roles import service as role_service
|
||||
from lemur.certificates import service as certificate_service
|
||||
from lemur.common.utils import paginated_parser
|
||||
from lemur.common.schema import validate_schema
|
||||
from lemur.auth.service import AuthenticatedResource
|
||||
|
||||
from lemur.auth.permissions import AuthorityPermission
|
||||
|
||||
from lemur.common.utils import paginated_parser, marshal_items
|
||||
from lemur.certificates import service as certificate_service
|
||||
|
||||
from lemur.authorities import service
|
||||
from lemur.authorities.schemas import (
|
||||
authority_input_schema,
|
||||
authority_output_schema,
|
||||
authorities_output_schema,
|
||||
authority_update_schema,
|
||||
)
|
||||
|
||||
|
||||
FIELDS = {
|
||||
'name': fields.String,
|
||||
'owner': fields.String,
|
||||
'description': fields.String,
|
||||
'options': fields.Raw,
|
||||
'pluginName': fields.String,
|
||||
'body': fields.String,
|
||||
'chain': fields.String,
|
||||
'active': fields.Boolean,
|
||||
'notBefore': fields.DateTime(dt_format='iso8601', attribute='not_before'),
|
||||
'notAfter': fields.DateTime(dt_format='iso8601', attribute='not_after'),
|
||||
'id': fields.Integer,
|
||||
}
|
||||
|
||||
mod = Blueprint('authorities', __name__)
|
||||
mod = Blueprint("authorities", __name__)
|
||||
api = Api(mod)
|
||||
|
||||
|
||||
class AuthoritiesList(AuthenticatedResource):
|
||||
""" Defines the 'authorities' endpoint """
|
||||
|
||||
def __init__(self):
|
||||
self.reqparse = reqparse.RequestParser()
|
||||
super(AuthoritiesList, self).__init__()
|
||||
|
||||
@marshal_items(FIELDS)
|
||||
@validate_schema(None, authorities_output_schema)
|
||||
def get(self):
|
||||
"""
|
||||
.. http:get:: /authorities
|
||||
@ -66,28 +59,52 @@ class AuthoritiesList(AuthenticatedResource):
|
||||
Content-Type: text/javascript
|
||||
|
||||
{
|
||||
"items": [
|
||||
{
|
||||
"id": 1,
|
||||
"name": "authority1",
|
||||
"description": "this is authority1",
|
||||
"pluginName": null,
|
||||
"chain": "-----Begin ...",
|
||||
"body": "-----Begin ...",
|
||||
"active": true,
|
||||
"notBefore": "2015-06-05T17:09:39",
|
||||
"notAfter": "2015-06-10T17:09:39"
|
||||
"options": null
|
||||
}
|
||||
]
|
||||
"items": [{
|
||||
"name": "TestAuthority",
|
||||
"roles": [{
|
||||
"id": 123,
|
||||
"name": "secure@example.com"
|
||||
}, {
|
||||
"id": 564,
|
||||
"name": "TestAuthority_admin"
|
||||
}, {
|
||||
"id": 565,
|
||||
"name": "TestAuthority_operator"
|
||||
}],
|
||||
"options": null,
|
||||
"active": true,
|
||||
"authorityCertificate": {
|
||||
"body": "-----BEGIN CERTIFICATE-----IyMzU5MTVaMHk...",
|
||||
"status": true,
|
||||
"cn": "AcommonName",
|
||||
"description": "This is the ROOT certificate for the TestAuthority certificate authority.",
|
||||
"chain": "",
|
||||
"notBefore": "2016-06-02T00:00:15+00:00",
|
||||
"notAfter": "2023-06-02T23:59:15+00:00",
|
||||
"owner": "secure@example.com",
|
||||
"user": {
|
||||
"username": "joe@example.com",
|
||||
"active": true,
|
||||
"email": "joe@example.com",
|
||||
"id": 3
|
||||
},
|
||||
"active": true,
|
||||
"bits": 2048,
|
||||
"id": 2235,
|
||||
"name": "TestAuthority"
|
||||
},
|
||||
"owner": "secure@example.com",
|
||||
"id": 43,
|
||||
"description": "This is the ROOT certificate for the TestAuthority certificate authority."
|
||||
}],
|
||||
"total": 1
|
||||
}
|
||||
|
||||
:query sortBy: field to sort on
|
||||
:query sortDir: acs or desc
|
||||
:query page: int. default is 1
|
||||
:query filter: key value pair. format is k=v;
|
||||
:query limit: limit number. default is 10
|
||||
:query sortDir: asc or desc
|
||||
:query page: int default is 1
|
||||
:query filter: key value pair. format is k;v
|
||||
:query count: count number default is 10
|
||||
:reqheader Authorization: OAuth token to authenticate
|
||||
:statuscode 200: no error
|
||||
:statuscode 403: unauthenticated
|
||||
@ -96,10 +113,11 @@ class AuthoritiesList(AuthenticatedResource):
|
||||
"""
|
||||
parser = paginated_parser.copy()
|
||||
args = parser.parse_args()
|
||||
args["user"] = g.current_user
|
||||
return service.render(args)
|
||||
|
||||
@marshal_items(FIELDS)
|
||||
def post(self):
|
||||
@validate_schema(authority_input_schema, authority_output_schema)
|
||||
def post(self, data=None):
|
||||
"""
|
||||
.. http:post:: /authorities
|
||||
|
||||
@ -113,31 +131,31 @@ class AuthoritiesList(AuthenticatedResource):
|
||||
Host: example.com
|
||||
Accept: application/json, text/javascript
|
||||
|
||||
{
|
||||
"caDN": {
|
||||
"country": "US",
|
||||
"state": "CA",
|
||||
"location": "A Location",
|
||||
"organization": "ExampleInc",
|
||||
"organizationalUnit": "Operations",
|
||||
"commonName": "a common name"
|
||||
},
|
||||
"caType": "root",
|
||||
"caSigningAlgo": "sha256WithRSA",
|
||||
"caSensitivity": "medium",
|
||||
{
|
||||
"country": "US",
|
||||
"state": "California",
|
||||
"location": "Los Gatos",
|
||||
"organization": "Netflix",
|
||||
"organizationalUnit": "Operations",
|
||||
"type": "root",
|
||||
"signingAlgorithm": "sha256WithRSA",
|
||||
"sensitivity": "medium",
|
||||
"keyType": "RSA2048",
|
||||
"pluginName": "cloudca",
|
||||
"validityStart": "2015-06-11T07:00:00.000Z",
|
||||
"validityEnd": "2015-06-13T07:00:00.000Z",
|
||||
"caName": "DoctestCA",
|
||||
"ownerEmail": "jimbob@example.com",
|
||||
"caDescription": "Example CA",
|
||||
"extensions": {
|
||||
"subAltNames": {
|
||||
"names": []
|
||||
}
|
||||
"plugin": {
|
||||
"slug": "cloudca-issuer"
|
||||
},
|
||||
}
|
||||
"name": "TimeTestAuthority5",
|
||||
"owner": "secure@example.com",
|
||||
"description": "test",
|
||||
"commonName": "AcommonName",
|
||||
"validityYears": "20",
|
||||
"extensions": {
|
||||
"subAltNames": {
|
||||
"names": []
|
||||
},
|
||||
"custom": []
|
||||
}
|
||||
}
|
||||
|
||||
**Example response**:
|
||||
|
||||
@ -148,57 +166,68 @@ class AuthoritiesList(AuthenticatedResource):
|
||||
Content-Type: text/javascript
|
||||
|
||||
{
|
||||
"id": 1,
|
||||
"name": "authority1",
|
||||
"description": "this is authority1",
|
||||
"pluginName": null,
|
||||
"chain": "-----Begin ...",
|
||||
"body": "-----Begin ...",
|
||||
"name": "TestAuthority",
|
||||
"roles": [{
|
||||
"id": 123,
|
||||
"name": "secure@example.com"
|
||||
}, {
|
||||
"id": 564,
|
||||
"name": "TestAuthority_admin"
|
||||
}, {
|
||||
"id": 565,
|
||||
"name": "TestAuthority_operator"
|
||||
}],
|
||||
"options": null,
|
||||
"active": true,
|
||||
"notBefore": "2015-06-05T17:09:39",
|
||||
"notAfter": "2015-06-10T17:09:39"
|
||||
"options": null
|
||||
"authorityCertificate": {
|
||||
"body": "-----BEGIN CERTIFICATE-----IyMzU5MTVaMHk...",
|
||||
"status": true,
|
||||
"cn": "AcommonName",
|
||||
"description": "This is the ROOT certificate for the TestAuthority certificate authority.",
|
||||
"chain": "",
|
||||
"notBefore": "2016-06-02T00:00:15+00:00",
|
||||
"notAfter": "2023-06-02T23:59:15+00:00",
|
||||
"owner": "secure@example.com",
|
||||
"user": {
|
||||
"username": "joe@example.com",
|
||||
"active": true,
|
||||
"email": "joe@example.com",
|
||||
"id": 3
|
||||
},
|
||||
"active": true,
|
||||
"bits": 2048,
|
||||
"id": 2235,
|
||||
"name": "TestAuthority"
|
||||
},
|
||||
"owner": "secure@example.com",
|
||||
"id": 43,
|
||||
"description": "This is the ROOT certificate for the TestAuthority certificate authority."
|
||||
}
|
||||
|
||||
:arg caName: authority's name
|
||||
:arg caDescription: a sensible description about what the CA with be used for
|
||||
:arg ownerEmail: the team or person who 'owns' this authority
|
||||
|
||||
:arg name: authority's name
|
||||
:arg description: a sensible description about what the CA with be used for
|
||||
:arg owner: the team or person who 'owns' this authority
|
||||
:arg validityStart: when this authority should start issuing certificates
|
||||
:arg validityEnd: when this authority should stop issuing certificates
|
||||
:arg validityYears: starting from `now` how many years into the future the authority should be valid
|
||||
:arg extensions: certificate extensions
|
||||
:arg pluginName: name of the plugin to create the authority
|
||||
:arg caType: the type of authority (root/subca)
|
||||
:arg caParent: the parent authority if this is to be a subca
|
||||
:arg caSigningAlgo: algorithm used to sign the authority
|
||||
:arg plugin: name of the plugin to create the authority
|
||||
:arg type: the type of authority (root/subca)
|
||||
:arg parent: the parent authority if this is to be a subca
|
||||
:arg signingAlgorithm: algorithm used to sign the authority
|
||||
:arg keyType: key type
|
||||
:arg caSensitivity: the sensitivity of the root key, for CloudCA this determines if the root keys are stored
|
||||
:arg sensitivity: the sensitivity of the root key, for CloudCA this determines if the root keys are stored
|
||||
in an HSM
|
||||
:arg caKeyName: name of the key to store in the HSM (CloudCA)
|
||||
:arg caSerialNumber: serial number of the authority
|
||||
:arg caFirstSerial: specifies the starting serial number for certificates issued off of this authority
|
||||
:arg keyName: name of the key to store in the HSM (CloudCA)
|
||||
:arg serialNumber: serial number of the authority
|
||||
:arg firstSerial: specifies the starting serial number for certificates issued off of this authority
|
||||
:reqheader Authorization: OAuth token to authenticate
|
||||
:statuscode 403: unauthenticated
|
||||
:statuscode 200: no error
|
||||
"""
|
||||
self.reqparse.add_argument('caName', type=str, location='json', required=True)
|
||||
self.reqparse.add_argument('caDescription', type=str, location='json', required=False)
|
||||
self.reqparse.add_argument('ownerEmail', type=str, location='json', required=True)
|
||||
self.reqparse.add_argument('caDN', type=dict, location='json', required=False)
|
||||
self.reqparse.add_argument('validityStart', type=str, location='json', required=False) # TODO validate
|
||||
self.reqparse.add_argument('validityEnd', type=str, location='json', required=False) # TODO validate
|
||||
self.reqparse.add_argument('extensions', type=dict, location='json', required=False)
|
||||
self.reqparse.add_argument('pluginName', type=str, location='json', required=True)
|
||||
self.reqparse.add_argument('caType', type=str, location='json', required=False)
|
||||
self.reqparse.add_argument('caParent', type=str, location='json', required=False)
|
||||
self.reqparse.add_argument('caSigningAlgo', type=str, location='json', required=False)
|
||||
self.reqparse.add_argument('keyType', type=str, location='json', required=False)
|
||||
self.reqparse.add_argument('caSensitivity', type=str, location='json', required=False)
|
||||
self.reqparse.add_argument('caKeyName', type=str, location='json', required=False)
|
||||
self.reqparse.add_argument('caSerialNumber', type=int, location='json', required=False)
|
||||
self.reqparse.add_argument('caFirstSerial', type=int, location='json', required=False)
|
||||
|
||||
args = self.reqparse.parse_args()
|
||||
return service.create(args)
|
||||
data["creator"] = g.current_user
|
||||
return service.create(**data)
|
||||
|
||||
|
||||
class Authorities(AuthenticatedResource):
|
||||
@ -206,7 +235,7 @@ class Authorities(AuthenticatedResource):
|
||||
self.reqparse = reqparse.RequestParser()
|
||||
super(Authorities, self).__init__()
|
||||
|
||||
@marshal_items(FIELDS)
|
||||
@validate_schema(None, authority_output_schema)
|
||||
def get(self, authority_id):
|
||||
"""
|
||||
.. http:get:: /authorities/1
|
||||
@ -230,30 +259,40 @@ class Authorities(AuthenticatedResource):
|
||||
Content-Type: text/javascript
|
||||
|
||||
{
|
||||
"id": 1,
|
||||
"name": "authority1",
|
||||
"description": "this is authority1",
|
||||
"pluginName": null,
|
||||
"chain": "-----Begin ...",
|
||||
"body": "-----Begin ...",
|
||||
"roles": [{
|
||||
"id": 123,
|
||||
"name": "secure@example.com"
|
||||
}, {
|
||||
"id": 564,
|
||||
"name": "TestAuthority_admin"
|
||||
}, {
|
||||
"id": 565,
|
||||
"name": "TestAuthority_operator"
|
||||
}],
|
||||
"active": true,
|
||||
"notBefore": "2015-06-05T17:09:39",
|
||||
"notAfter": "2015-06-10T17:09:39"
|
||||
"options": null
|
||||
"owner": "secure@example.com",
|
||||
"id": 43,
|
||||
"description": "This is the ROOT certificate for the TestAuthority certificate authority."
|
||||
}
|
||||
|
||||
:arg description: a sensible description about what the CA with be used for
|
||||
:arg owner: the team or person who 'owns' this authority
|
||||
:arg active: set whether this authoritity is currently in use
|
||||
:reqheader Authorization: OAuth token to authenticate
|
||||
:statuscode 403: unauthenticated
|
||||
:statuscode 200: no error
|
||||
:reqheader Authorization: OAuth token to authenticate
|
||||
:statuscode 200: no error
|
||||
:statuscode 403: unauthenticated
|
||||
"""
|
||||
return service.get(authority_id)
|
||||
|
||||
@marshal_items(FIELDS)
|
||||
def put(self, authority_id):
|
||||
@validate_schema(authority_update_schema, authority_output_schema)
|
||||
def put(self, authority_id, data=None):
|
||||
"""
|
||||
.. http:put:: /authorities/1
|
||||
|
||||
Update a authority
|
||||
Update an authority
|
||||
|
||||
**Example request**:
|
||||
|
||||
@ -264,11 +303,42 @@ class Authorities(AuthenticatedResource):
|
||||
Accept: application/json, text/javascript
|
||||
|
||||
{
|
||||
"roles": [],
|
||||
"active": false,
|
||||
"owner": "bob@example.com",
|
||||
"description": "this is authority1"
|
||||
}
|
||||
"name": "TestAuthority5",
|
||||
"roles": [{
|
||||
"id": 566,
|
||||
"name": "TestAuthority5_admin"
|
||||
}, {
|
||||
"id": 567,
|
||||
"name": "TestAuthority5_operator"
|
||||
}, {
|
||||
"id": 123,
|
||||
"name": "secure@example.com"
|
||||
}],
|
||||
"active": true,
|
||||
"authorityCertificate": {
|
||||
"body": "-----BEGIN CERTIFICATE-----",
|
||||
"status": null,
|
||||
"cn": "AcommonName",
|
||||
"description": "This is the ROOT certificate for the TestAuthority5 certificate authority.",
|
||||
"chain": "",
|
||||
"notBefore": "2016-06-03T00:00:51+00:00",
|
||||
"notAfter": "2036-06-03T23:59:51+00:00",
|
||||
"owner": "secure@example.com",
|
||||
"user": {
|
||||
"username": "joe@example.com",
|
||||
"active": true,
|
||||
"email": "joe@example.com",
|
||||
"id": 3
|
||||
},
|
||||
"active": true,
|
||||
"bits": 2048,
|
||||
"id": 2280,
|
||||
"name": "TestAuthority5"
|
||||
},
|
||||
"owner": "secure@example.com",
|
||||
"id": 44,
|
||||
"description": "This is the ROOT certificate for the TestAuthority5 certificate authority."
|
||||
}
|
||||
|
||||
**Example response**:
|
||||
|
||||
@ -279,64 +349,74 @@ class Authorities(AuthenticatedResource):
|
||||
Content-Type: text/javascript
|
||||
|
||||
{
|
||||
"id": 1,
|
||||
"name": "authority1",
|
||||
"description": "this is authority1",
|
||||
"pluginName": null,
|
||||
"chain": "-----begin ...",
|
||||
"body": "-----begin ...",
|
||||
"active": false,
|
||||
"notBefore": "2015-06-05t17:09:39",
|
||||
"notAfter": "2015-06-10t17:09:39"
|
||||
"options": null
|
||||
"name": "TestAuthority",
|
||||
"roles": [{
|
||||
"id": 123,
|
||||
"name": "secure@example.com"
|
||||
}, {
|
||||
"id": 564,
|
||||
"name": "TestAuthority_admin"
|
||||
}, {
|
||||
"id": 565,
|
||||
"name": "TestAuthority_operator"
|
||||
}],
|
||||
"options": null,
|
||||
"active": true,
|
||||
"authorityCertificate": {
|
||||
"body": "-----BEGIN CERTIFICATE-----IyMzU5MTVaMHk...",
|
||||
"status": true,
|
||||
"cn": "AcommonName",
|
||||
"description": "This is the ROOT certificate for the TestAuthority certificate authority.",
|
||||
"chain": "",
|
||||
"notBefore": "2016-06-02T00:00:15+00:00",
|
||||
"notAfter": "2023-06-02T23:59:15+00:00",
|
||||
"owner": "secure@example.com",
|
||||
"user": {
|
||||
"username": "joe@example.com",
|
||||
"active": true,
|
||||
"email": "joe@example.com",
|
||||
"id": 3
|
||||
},
|
||||
"active": true,
|
||||
"bits": 2048,
|
||||
"id": 2235,
|
||||
"name": "TestAuthority"
|
||||
},
|
||||
"owner": "secure@example.com",
|
||||
"id": 43,
|
||||
"description": "This is the ROOT certificate for the TestAuthority certificate authority."
|
||||
}
|
||||
|
||||
:reqheader Authorization: OAuth token to authenticate
|
||||
:statuscode 200: no error
|
||||
:statuscode 403: unauthenticated
|
||||
"""
|
||||
self.reqparse.add_argument('roles', type=list, default=[], location='json')
|
||||
self.reqparse.add_argument('active', type=str, location='json', required=True)
|
||||
self.reqparse.add_argument('owner', type=str, location='json', required=True)
|
||||
self.reqparse.add_argument('description', type=str, location='json', required=True)
|
||||
args = self.reqparse.parse_args()
|
||||
|
||||
authority = service.get(authority_id)
|
||||
role = role_service.get_by_name(authority.owner)
|
||||
|
||||
if not authority:
|
||||
return dict(message="Not Found"), 404
|
||||
|
||||
# all the authority role members should be allowed
|
||||
roles = [x.name for x in authority.roles]
|
||||
|
||||
# allow "owner" roles by team DL
|
||||
roles.append(role)
|
||||
permission = AuthorityPermission(authority_id, roles)
|
||||
|
||||
# we want to make sure that we cannot add roles that we are not members of
|
||||
if not g.current_user.is_admin:
|
||||
role_ids = set([r['id'] for r in args['roles']])
|
||||
user_role_ids = set([r.id for r in g.current_user.roles])
|
||||
|
||||
if not role_ids.issubset(user_role_ids):
|
||||
return dict(message="You are not allowed to associate a role which you are not a member of"), 400
|
||||
|
||||
if permission.can():
|
||||
return service.update(
|
||||
authority_id,
|
||||
owner=args['owner'],
|
||||
description=args['description'],
|
||||
active=args['active'],
|
||||
roles=args['roles']
|
||||
owner=data["owner"],
|
||||
description=data["description"],
|
||||
active=data["active"],
|
||||
roles=data["roles"],
|
||||
)
|
||||
|
||||
return dict(message="You are not authorized to update this authority"), 403
|
||||
return dict(message="You are not authorized to update this authority."), 403
|
||||
|
||||
|
||||
class CertificateAuthority(AuthenticatedResource):
|
||||
def __init__(self):
|
||||
self.reqparse = reqparse.RequestParser()
|
||||
super(CertificateAuthority, self).__init__()
|
||||
|
||||
@marshal_items(FIELDS)
|
||||
@validate_schema(None, authority_output_schema)
|
||||
def get(self, certificate_id):
|
||||
"""
|
||||
.. http:get:: /certificates/1/authority
|
||||
@ -360,16 +440,42 @@ class CertificateAuthority(AuthenticatedResource):
|
||||
Content-Type: text/javascript
|
||||
|
||||
{
|
||||
"id": 1,
|
||||
"name": "authority1",
|
||||
"description": "this is authority1",
|
||||
"pluginName": null,
|
||||
"chain": "-----Begin ...",
|
||||
"body": "-----Begin ...",
|
||||
"name": "TestAuthority",
|
||||
"roles": [{
|
||||
"id": 123,
|
||||
"name": "secure@example.com"
|
||||
}, {
|
||||
"id": 564,
|
||||
"name": "TestAuthority_admin"
|
||||
}, {
|
||||
"id": 565,
|
||||
"name": "TestAuthority_operator"
|
||||
}],
|
||||
"options": null,
|
||||
"active": true,
|
||||
"notBefore": "2015-06-05T17:09:39",
|
||||
"notAfter": "2015-06-10T17:09:39"
|
||||
"options": null
|
||||
"authorityCertificate": {
|
||||
"body": "-----BEGIN CERTIFICATE-----IyMzU5MTVaMHk...",
|
||||
"status": true,
|
||||
"cn": "AcommonName",
|
||||
"description": "This is the ROOT certificate for the TestAuthority certificate authority.",
|
||||
"chain": "",
|
||||
"notBefore": "2016-06-02T00:00:15+00:00",
|
||||
"notAfter": "2023-06-02T23:59:15+00:00",
|
||||
"owner": "secure@example.com",
|
||||
"user": {
|
||||
"username": "joe@example.com",
|
||||
"active": true,
|
||||
"email": "joe@example.com",
|
||||
"id": 3
|
||||
},
|
||||
"active": true,
|
||||
"bits": 2048,
|
||||
"id": 2235,
|
||||
"name": "TestAuthority"
|
||||
},
|
||||
"owner": "secure@example.com",
|
||||
"id": 43,
|
||||
"description": "This is the ROOT certificate for the TestAuthority certificate authority."
|
||||
}
|
||||
|
||||
:reqheader Authorization: OAuth token to authenticate
|
||||
@ -378,10 +484,48 @@ class CertificateAuthority(AuthenticatedResource):
|
||||
"""
|
||||
cert = certificate_service.get(certificate_id)
|
||||
if not cert:
|
||||
return dict(message="Certificate not found"), 404
|
||||
return dict(message="Certificate not found."), 404
|
||||
|
||||
return cert.authority
|
||||
|
||||
api.add_resource(AuthoritiesList, '/authorities', endpoint='authorities')
|
||||
api.add_resource(Authorities, '/authorities/<int:authority_id>', endpoint='authority')
|
||||
api.add_resource(CertificateAuthority, '/certificates/<int:certificate_id>/authority', endpoint='certificateAuthority')
|
||||
|
||||
class AuthorityVisualizations(AuthenticatedResource):
|
||||
def get(self, authority_id):
|
||||
"""
|
||||
{"name": "flare",
|
||||
"children": [
|
||||
{
|
||||
"name": "analytics",
|
||||
"children": [
|
||||
{
|
||||
"name": "cluster",
|
||||
"children": [
|
||||
{"name": "AgglomerativeCluster", "size": 3938},
|
||||
{"name": "CommunityStructure", "size": 3812},
|
||||
{"name": "HierarchicalCluster", "size": 6714},
|
||||
{"name": "MergeEdge", "size": 743}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
]}
|
||||
"""
|
||||
authority = service.get(authority_id)
|
||||
return dict(
|
||||
name=authority.name,
|
||||
children=[{"name": c.name} for c in authority.certificates],
|
||||
)
|
||||
|
||||
|
||||
api.add_resource(AuthoritiesList, "/authorities", endpoint="authorities")
|
||||
api.add_resource(Authorities, "/authorities/<int:authority_id>", endpoint="authority")
|
||||
api.add_resource(
|
||||
AuthorityVisualizations,
|
||||
"/authorities/<int:authority_id>/visualize",
|
||||
endpoint="authority_visualizations",
|
||||
)
|
||||
api.add_resource(
|
||||
CertificateAuthority,
|
||||
"/certificates/<int:certificate_id>/authority",
|
||||
endpoint="certificateAuthority",
|
||||
)
|
||||
|
||||
0
lemur/authorizations/__init__.py
Normal file
0
lemur/authorizations/__init__.py
Normal file
34
lemur/authorizations/models.py
Normal file
34
lemur/authorizations/models.py
Normal file
@ -0,0 +1,34 @@
|
||||
"""
|
||||
.. module: lemur.authorizations.models
|
||||
:platform: unix
|
||||
:copyright: (c) 2018 by Netflix Inc., see AUTHORS for more
|
||||
:license: Apache, see LICENSE for more details.
|
||||
.. moduleauthor:: Netflix Secops <secops@netflix.com>
|
||||
"""
|
||||
from sqlalchemy import Column, Integer, String
|
||||
from sqlalchemy_utils import JSONType
|
||||
from lemur.database import db
|
||||
|
||||
from lemur.plugins.base import plugins
|
||||
|
||||
|
||||
class Authorization(db.Model):
|
||||
__tablename__ = "pending_dns_authorizations"
|
||||
id = Column(Integer, primary_key=True, autoincrement=True)
|
||||
account_number = Column(String(128))
|
||||
domains = Column(JSONType)
|
||||
dns_provider_type = Column(String(128))
|
||||
options = Column(JSONType)
|
||||
|
||||
@property
|
||||
def plugin(self):
|
||||
return plugins.get(self.plugin_name)
|
||||
|
||||
def __repr__(self):
|
||||
return "Authorization(id={id})".format(id=self.id)
|
||||
|
||||
def __init__(self, account_number, domains, dns_provider_type, options=None):
|
||||
self.account_number = account_number
|
||||
self.domains = domains
|
||||
self.dns_provider_type = dns_provider_type
|
||||
self.options = options
|
||||
24
lemur/authorizations/service.py
Normal file
24
lemur/authorizations/service.py
Normal file
@ -0,0 +1,24 @@
|
||||
"""
|
||||
.. module: lemur.pending_certificates.service
|
||||
Copyright (c) 2018 and onwards Netflix, Inc. All rights reserved.
|
||||
.. moduleauthor:: Secops <secops@netflix.com>
|
||||
"""
|
||||
from lemur import database
|
||||
|
||||
from lemur.authorizations.models import Authorization
|
||||
|
||||
|
||||
def get(authorization_id):
|
||||
"""
|
||||
Retrieve dns authorization by ID
|
||||
"""
|
||||
return database.get(Authorization, authorization_id)
|
||||
|
||||
|
||||
def create(account_number, domains, dns_provider_type, options=None):
|
||||
"""
|
||||
Creates a new dns authorization.
|
||||
"""
|
||||
|
||||
authorization = Authorization(account_number, domains, dns_provider_type, options)
|
||||
return database.create(authorization)
|
||||
737
lemur/certificates/cli.py
Normal file
737
lemur/certificates/cli.py
Normal file
@ -0,0 +1,737 @@
|
||||
"""
|
||||
.. module: lemur.certificate.cli
|
||||
:platform: Unix
|
||||
:copyright: (c) 2018 by Netflix Inc., see AUTHORS for more
|
||||
:license: Apache, see LICENSE for more details.
|
||||
.. moduleauthor:: Kevin Glisson <kglisson@netflix.com>
|
||||
"""
|
||||
import multiprocessing
|
||||
import sys
|
||||
from flask import current_app
|
||||
from flask_principal import Identity, identity_changed
|
||||
from flask_script import Manager
|
||||
from sqlalchemy import or_
|
||||
from tabulate import tabulate
|
||||
|
||||
from lemur import database
|
||||
from lemur.authorities.models import Authority
|
||||
from lemur.authorities.service import get as authorities_get_by_id
|
||||
from lemur.certificates.models import Certificate
|
||||
from lemur.certificates.schemas import CertificateOutputSchema
|
||||
from lemur.certificates.service import (
|
||||
reissue_certificate,
|
||||
get_certificate_primitives,
|
||||
get_all_pending_reissue,
|
||||
get_by_name,
|
||||
get_all_valid_certs,
|
||||
get,
|
||||
get_all_certs_attached_to_endpoint_without_autorotate,
|
||||
)
|
||||
from lemur.certificates.verify import verify_string
|
||||
from lemur.constants import SUCCESS_METRIC_STATUS, FAILURE_METRIC_STATUS
|
||||
from lemur.deployment import service as deployment_service
|
||||
from lemur.domains.models import Domain
|
||||
from lemur.endpoints import service as endpoint_service
|
||||
from lemur.extensions import sentry, metrics
|
||||
from lemur.notifications.messaging import send_rotation_notification
|
||||
from lemur.plugins.base import plugins
|
||||
|
||||
manager = Manager(usage="Handles all certificate related tasks.")
|
||||
|
||||
|
||||
def print_certificate_details(details):
|
||||
"""
|
||||
Print the certificate details with formatting.
|
||||
:param details:
|
||||
:return:
|
||||
"""
|
||||
details, errors = CertificateOutputSchema().dump(details)
|
||||
print("[+] Re-issuing certificate with the following details: ")
|
||||
print(
|
||||
"\t[+] Common Name: {common_name}\n"
|
||||
"\t[+] Subject Alternate Names: {sans}\n"
|
||||
"\t[+] Authority: {authority_name}\n"
|
||||
"\t[+] Validity Start: {validity_start}\n"
|
||||
"\t[+] Validity End: {validity_end}\n".format(
|
||||
common_name=details["commonName"],
|
||||
sans=",".join(
|
||||
x["value"] for x in details["extensions"]["subAltNames"]["names"]
|
||||
)
|
||||
or None,
|
||||
authority_name=details["authority"]["name"],
|
||||
validity_start=details["validityStart"],
|
||||
validity_end=details["validityEnd"],
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
def validate_certificate(certificate_name):
|
||||
"""
|
||||
Ensuring that the specified certificate exists.
|
||||
:param certificate_name:
|
||||
:return:
|
||||
"""
|
||||
if certificate_name:
|
||||
cert = get_by_name(certificate_name)
|
||||
|
||||
if not cert:
|
||||
print("[-] No certificate found with name: {0}".format(certificate_name))
|
||||
sys.exit(1)
|
||||
|
||||
return cert
|
||||
|
||||
|
||||
def validate_endpoint(endpoint_name):
|
||||
"""
|
||||
Ensuring that the specified endpoint exists.
|
||||
:param endpoint_name:
|
||||
:return:
|
||||
"""
|
||||
if endpoint_name:
|
||||
endpoint = endpoint_service.get_by_name(endpoint_name)
|
||||
|
||||
if not endpoint:
|
||||
print("[-] No endpoint found with name: {0}".format(endpoint_name))
|
||||
sys.exit(1)
|
||||
|
||||
return endpoint
|
||||
|
||||
|
||||
def request_rotation(endpoint, certificate, message, commit):
|
||||
"""
|
||||
Rotates a certificate and handles any exceptions during
|
||||
execution.
|
||||
:param endpoint:
|
||||
:param certificate:
|
||||
:param message:
|
||||
:param commit:
|
||||
:return:
|
||||
"""
|
||||
status = FAILURE_METRIC_STATUS
|
||||
if commit:
|
||||
try:
|
||||
deployment_service.rotate_certificate(endpoint, certificate)
|
||||
|
||||
if message:
|
||||
send_rotation_notification(certificate)
|
||||
|
||||
status = SUCCESS_METRIC_STATUS
|
||||
|
||||
except Exception as e:
|
||||
print(
|
||||
"[!] Failed to rotate endpoint {0} to certificate {1} reason: {2}".format(
|
||||
endpoint.name, certificate.name, e
|
||||
)
|
||||
)
|
||||
|
||||
metrics.send("endpoint_rotation", "counter", 1, metric_tags={"status": status})
|
||||
|
||||
|
||||
def request_reissue(certificate, commit):
|
||||
"""
|
||||
Reissuing certificate and handles any exceptions.
|
||||
:param certificate:
|
||||
:param commit:
|
||||
:return:
|
||||
"""
|
||||
status = FAILURE_METRIC_STATUS
|
||||
try:
|
||||
print("[+] {0} is eligible for re-issuance".format(certificate.name))
|
||||
|
||||
# set the lemur identity for all cli commands
|
||||
identity_changed.send(current_app._get_current_object(), identity=Identity(1))
|
||||
|
||||
details = get_certificate_primitives(certificate)
|
||||
print_certificate_details(details)
|
||||
|
||||
if commit:
|
||||
new_cert = reissue_certificate(certificate, replace=True)
|
||||
print("[+] New certificate named: {0}".format(new_cert.name))
|
||||
|
||||
status = SUCCESS_METRIC_STATUS
|
||||
|
||||
except Exception as e:
|
||||
sentry.captureException(extra={"certificate_name": str(certificate.name)})
|
||||
current_app.logger.exception(
|
||||
f"Error reissuing certificate: {certificate.name}", exc_info=True
|
||||
)
|
||||
print(f"[!] Failed to reissue certificate: {certificate.name}. Reason: {e}")
|
||||
|
||||
metrics.send(
|
||||
"certificate_reissue",
|
||||
"counter",
|
||||
1,
|
||||
metric_tags={"status": status, "certificate": certificate.name},
|
||||
)
|
||||
|
||||
|
||||
@manager.option(
|
||||
"-e",
|
||||
"--endpoint",
|
||||
dest="endpoint_name",
|
||||
help="Name of the endpoint you wish to rotate.",
|
||||
)
|
||||
@manager.option(
|
||||
"-n",
|
||||
"--new-certificate",
|
||||
dest="new_certificate_name",
|
||||
help="Name of the certificate you wish to rotate to.",
|
||||
)
|
||||
@manager.option(
|
||||
"-o",
|
||||
"--old-certificate",
|
||||
dest="old_certificate_name",
|
||||
help="Name of the certificate you wish to rotate.",
|
||||
)
|
||||
@manager.option(
|
||||
"-a",
|
||||
"--notify",
|
||||
dest="message",
|
||||
action="store_true",
|
||||
help="Send a rotation notification to the certificates owner.",
|
||||
)
|
||||
@manager.option(
|
||||
"-c",
|
||||
"--commit",
|
||||
dest="commit",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help="Persist changes.",
|
||||
)
|
||||
def rotate(endpoint_name, new_certificate_name, old_certificate_name, message, commit):
|
||||
"""
|
||||
Rotates an endpoint and reissues it if it has not already been replaced. If it has
|
||||
been replaced, will use the replacement certificate for the rotation.
|
||||
"""
|
||||
if commit:
|
||||
print("[!] Running in COMMIT mode.")
|
||||
|
||||
print("[+] Starting endpoint rotation.")
|
||||
|
||||
status = FAILURE_METRIC_STATUS
|
||||
|
||||
log_data = {
|
||||
"function": f"{__name__}.{sys._getframe().f_code.co_name}",
|
||||
}
|
||||
|
||||
try:
|
||||
old_cert = validate_certificate(old_certificate_name)
|
||||
new_cert = validate_certificate(new_certificate_name)
|
||||
endpoint = validate_endpoint(endpoint_name)
|
||||
|
||||
if endpoint and new_cert:
|
||||
print(
|
||||
f"[+] Rotating endpoint: {endpoint.name} to certificate {new_cert.name}"
|
||||
)
|
||||
log_data["message"] = "Rotating endpoint"
|
||||
log_data["endpoint"] = endpoint.dnsname
|
||||
log_data["certificate"] = new_cert.name
|
||||
request_rotation(endpoint, new_cert, message, commit)
|
||||
current_app.logger.info(log_data)
|
||||
|
||||
elif old_cert and new_cert:
|
||||
print(f"[+] Rotating all endpoints from {old_cert.name} to {new_cert.name}")
|
||||
|
||||
log_data["message"] = "Rotating all endpoints"
|
||||
log_data["certificate"] = new_cert.name
|
||||
log_data["certificate_old"] = old_cert.name
|
||||
log_data["message"] = "Rotating endpoint from old to new cert"
|
||||
for endpoint in old_cert.endpoints:
|
||||
print(f"[+] Rotating {endpoint.name}")
|
||||
log_data["endpoint"] = endpoint.dnsname
|
||||
request_rotation(endpoint, new_cert, message, commit)
|
||||
current_app.logger.info(log_data)
|
||||
|
||||
else:
|
||||
print("[+] Rotating all endpoints that have new certificates available")
|
||||
log_data["message"] = "Rotating all endpoints that have new certificates available"
|
||||
for endpoint in endpoint_service.get_all_pending_rotation():
|
||||
log_data["endpoint"] = endpoint.dnsname
|
||||
if len(endpoint.certificate.replaced) == 1:
|
||||
print(
|
||||
f"[+] Rotating {endpoint.name} to {endpoint.certificate.replaced[0].name}"
|
||||
)
|
||||
log_data["certificate"] = endpoint.certificate.replaced[0].name
|
||||
request_rotation(
|
||||
endpoint, endpoint.certificate.replaced[0], message, commit
|
||||
)
|
||||
current_app.logger.info(log_data)
|
||||
|
||||
else:
|
||||
log_data["message"] = "Failed to rotate endpoint due to Multiple replacement certificates found"
|
||||
print(log_data)
|
||||
metrics.send(
|
||||
"endpoint_rotation",
|
||||
"counter",
|
||||
1,
|
||||
metric_tags={
|
||||
"status": FAILURE_METRIC_STATUS,
|
||||
"old_certificate_name": str(old_cert),
|
||||
"new_certificate_name": str(
|
||||
endpoint.certificate.replaced[0].name
|
||||
),
|
||||
"endpoint_name": str(endpoint.name),
|
||||
"message": str(message),
|
||||
},
|
||||
)
|
||||
print(
|
||||
f"[!] Failed to rotate endpoint {endpoint.name} reason: "
|
||||
"Multiple replacement certificates found."
|
||||
)
|
||||
|
||||
status = SUCCESS_METRIC_STATUS
|
||||
print("[+] Done!")
|
||||
|
||||
except Exception as e:
|
||||
sentry.captureException(
|
||||
extra={
|
||||
"old_certificate_name": str(old_certificate_name),
|
||||
"new_certificate_name": str(new_certificate_name),
|
||||
"endpoint_name": str(endpoint_name),
|
||||
"message": str(message),
|
||||
}
|
||||
)
|
||||
|
||||
metrics.send(
|
||||
"endpoint_rotation_job",
|
||||
"counter",
|
||||
1,
|
||||
metric_tags={
|
||||
"status": status,
|
||||
"old_certificate_name": str(old_certificate_name),
|
||||
"new_certificate_name": str(new_certificate_name),
|
||||
"endpoint_name": str(endpoint_name),
|
||||
"message": str(message),
|
||||
"endpoint": str(globals().get("endpoint")),
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
def request_rotation_region(endpoint, new_cert, message, commit, log_data, region):
|
||||
if region in endpoint.dnsname:
|
||||
log_data["message"] = "Rotating endpoint in region"
|
||||
request_rotation(endpoint, new_cert, message, commit)
|
||||
else:
|
||||
log_data["message"] = "Skipping rotation, region mismatch"
|
||||
|
||||
print(log_data)
|
||||
current_app.logger.info(log_data)
|
||||
|
||||
|
||||
@manager.option(
|
||||
"-e",
|
||||
"--endpoint",
|
||||
dest="endpoint_name",
|
||||
help="Name of the endpoint you wish to rotate.",
|
||||
)
|
||||
@manager.option(
|
||||
"-n",
|
||||
"--new-certificate",
|
||||
dest="new_certificate_name",
|
||||
help="Name of the certificate you wish to rotate to.",
|
||||
)
|
||||
@manager.option(
|
||||
"-o",
|
||||
"--old-certificate",
|
||||
dest="old_certificate_name",
|
||||
help="Name of the certificate you wish to rotate.",
|
||||
)
|
||||
@manager.option(
|
||||
"-a",
|
||||
"--notify",
|
||||
dest="message",
|
||||
action="store_true",
|
||||
help="Send a rotation notification to the certificates owner.",
|
||||
)
|
||||
@manager.option(
|
||||
"-c",
|
||||
"--commit",
|
||||
dest="commit",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help="Persist changes.",
|
||||
)
|
||||
@manager.option(
|
||||
"-r",
|
||||
"--region",
|
||||
dest="region",
|
||||
required=True,
|
||||
help="Region in which to rotate the endpoint.",
|
||||
)
|
||||
def rotate_region(endpoint_name, new_certificate_name, old_certificate_name, message, commit, region):
|
||||
"""
|
||||
Rotates an endpoint in a defined region it if it has not already been replaced. If it has
|
||||
been replaced, will use the replacement certificate for the rotation.
|
||||
:param old_certificate_name: Name of the certificate you wish to rotate.
|
||||
:param new_certificate_name: Name of the certificate you wish to rotate to.
|
||||
:param endpoint_name: Name of the endpoint you wish to rotate.
|
||||
:param message: Send a rotation notification to the certificates owner.
|
||||
:param commit: Persist changes.
|
||||
:param region: Region in which to rotate the endpoint.
|
||||
"""
|
||||
if commit:
|
||||
print("[!] Running in COMMIT mode.")
|
||||
|
||||
print("[+] Starting endpoint rotation.")
|
||||
status = FAILURE_METRIC_STATUS
|
||||
|
||||
log_data = {
|
||||
"function": f"{__name__}.{sys._getframe().f_code.co_name}",
|
||||
"region": region,
|
||||
}
|
||||
|
||||
try:
|
||||
old_cert = validate_certificate(old_certificate_name)
|
||||
new_cert = validate_certificate(new_certificate_name)
|
||||
endpoint = validate_endpoint(endpoint_name)
|
||||
|
||||
if endpoint and new_cert:
|
||||
log_data["endpoint"] = endpoint.dnsname
|
||||
log_data["certificate"] = new_cert.name
|
||||
request_rotation_region(endpoint, new_cert, message, commit, log_data, region)
|
||||
|
||||
elif old_cert and new_cert:
|
||||
log_data["certificate"] = new_cert.name
|
||||
log_data["certificate_old"] = old_cert.name
|
||||
log_data["message"] = "Rotating endpoint from old to new cert"
|
||||
print(log_data)
|
||||
current_app.logger.info(log_data)
|
||||
for endpoint in old_cert.endpoints:
|
||||
log_data["endpoint"] = endpoint.dnsname
|
||||
request_rotation_region(endpoint, new_cert, message, commit, log_data, region)
|
||||
|
||||
else:
|
||||
log_data["message"] = "Rotating all endpoints that have new certificates available"
|
||||
print(log_data)
|
||||
current_app.logger.info(log_data)
|
||||
all_pending_rotation_endpoints = endpoint_service.get_all_pending_rotation()
|
||||
for endpoint in all_pending_rotation_endpoints:
|
||||
log_data["endpoint"] = endpoint.dnsname
|
||||
if region not in endpoint.dnsname:
|
||||
log_data["message"] = "Skipping rotation, region mismatch"
|
||||
print(log_data)
|
||||
current_app.logger.info(log_data)
|
||||
metrics.send(
|
||||
"endpoint_rotation_region_skipped",
|
||||
"counter",
|
||||
1,
|
||||
metric_tags={
|
||||
"region": region,
|
||||
"old_certificate_name": str(old_cert),
|
||||
"new_certificate_name": str(endpoint.certificate.replaced[0].name),
|
||||
"endpoint_name": str(endpoint.dnsname),
|
||||
},
|
||||
)
|
||||
|
||||
if len(endpoint.certificate.replaced) == 1:
|
||||
log_data["certificate"] = endpoint.certificate.replaced[0].name
|
||||
log_data["message"] = "Rotating all endpoints in region"
|
||||
print(log_data)
|
||||
current_app.logger.info(log_data)
|
||||
request_rotation(endpoint, endpoint.certificate.replaced[0], message, commit)
|
||||
status = SUCCESS_METRIC_STATUS
|
||||
else:
|
||||
status = FAILURE_METRIC_STATUS
|
||||
log_data["message"] = "Failed to rotate endpoint due to Multiple replacement certificates found"
|
||||
print(log_data)
|
||||
current_app.logger.info(log_data)
|
||||
|
||||
metrics.send(
|
||||
"endpoint_rotation_region",
|
||||
"counter",
|
||||
1,
|
||||
metric_tags={
|
||||
"status": FAILURE_METRIC_STATUS,
|
||||
"old_certificate_name": str(old_cert),
|
||||
"new_certificate_name": str(endpoint.certificate.replaced[0].name),
|
||||
"endpoint_name": str(endpoint.dnsname),
|
||||
"message": str(message),
|
||||
"region": str(region),
|
||||
},
|
||||
)
|
||||
status = SUCCESS_METRIC_STATUS
|
||||
print("[+] Done!")
|
||||
|
||||
except Exception as e:
|
||||
sentry.captureException(
|
||||
extra={
|
||||
"old_certificate_name": str(old_certificate_name),
|
||||
"new_certificate_name": str(new_certificate_name),
|
||||
"endpoint": str(endpoint_name),
|
||||
"message": str(message),
|
||||
"region": str(region),
|
||||
}
|
||||
)
|
||||
|
||||
metrics.send(
|
||||
"endpoint_rotation_region_job",
|
||||
"counter",
|
||||
1,
|
||||
metric_tags={
|
||||
"status": status,
|
||||
"old_certificate_name": str(old_certificate_name),
|
||||
"new_certificate_name": str(new_certificate_name),
|
||||
"endpoint_name": str(endpoint_name),
|
||||
"message": str(message),
|
||||
"endpoint": str(globals().get("endpoint")),
|
||||
"region": str(region),
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
@manager.option(
|
||||
"-o",
|
||||
"--old-certificate",
|
||||
dest="old_certificate_name",
|
||||
help="Name of the certificate you wish to reissue.",
|
||||
)
|
||||
@manager.option(
|
||||
"-c",
|
||||
"--commit",
|
||||
dest="commit",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help="Persist changes.",
|
||||
)
|
||||
def reissue(old_certificate_name, commit):
|
||||
"""
|
||||
Reissues certificate with the same parameters as it was originally issued with.
|
||||
If not time period is provided, reissues certificate as valid from today to
|
||||
today + length of original.
|
||||
"""
|
||||
if commit:
|
||||
print("[!] Running in COMMIT mode.")
|
||||
|
||||
print("[+] Starting certificate re-issuance.")
|
||||
|
||||
status = FAILURE_METRIC_STATUS
|
||||
|
||||
try:
|
||||
old_cert = validate_certificate(old_certificate_name)
|
||||
|
||||
if not old_cert:
|
||||
for certificate in get_all_pending_reissue():
|
||||
request_reissue(certificate, commit)
|
||||
else:
|
||||
request_reissue(old_cert, commit)
|
||||
|
||||
status = SUCCESS_METRIC_STATUS
|
||||
print("[+] Done!")
|
||||
except Exception as e:
|
||||
sentry.captureException()
|
||||
current_app.logger.exception("Error reissuing certificate.", exc_info=True)
|
||||
print("[!] Failed to reissue certificates. Reason: {}".format(e))
|
||||
|
||||
metrics.send(
|
||||
"certificate_reissue_job", "counter", 1, metric_tags={"status": status}
|
||||
)
|
||||
|
||||
|
||||
@manager.option(
|
||||
"-f",
|
||||
"--fqdns",
|
||||
dest="fqdns",
|
||||
help="FQDNs to query. Multiple fqdns specified via comma.",
|
||||
)
|
||||
@manager.option("-i", "--issuer", dest="issuer", help="Issuer to query for.")
|
||||
@manager.option("-o", "--owner", dest="owner", help="Owner to query for.")
|
||||
@manager.option(
|
||||
"-e",
|
||||
"--expired",
|
||||
dest="expired",
|
||||
type=bool,
|
||||
default=False,
|
||||
help="Include expired certificates.",
|
||||
)
|
||||
def query(fqdns, issuer, owner, expired):
|
||||
"""Prints certificates that match the query params."""
|
||||
table = []
|
||||
|
||||
q = database.session_query(Certificate)
|
||||
if issuer:
|
||||
sub_query = (
|
||||
database.session_query(Authority.id)
|
||||
.filter(Authority.name.ilike("%{0}%".format(issuer)))
|
||||
.subquery()
|
||||
)
|
||||
|
||||
q = q.filter(
|
||||
or_(
|
||||
Certificate.issuer.ilike("%{0}%".format(issuer)),
|
||||
Certificate.authority_id.in_(sub_query),
|
||||
)
|
||||
)
|
||||
if owner:
|
||||
q = q.filter(Certificate.owner.ilike("%{0}%".format(owner)))
|
||||
|
||||
if not expired:
|
||||
q = q.filter(Certificate.expired == False) # noqa
|
||||
|
||||
if fqdns:
|
||||
for f in fqdns.split(","):
|
||||
q = q.filter(
|
||||
or_(
|
||||
Certificate.cn.ilike("%{0}%".format(f)),
|
||||
Certificate.domains.any(Domain.name.ilike("%{0}%".format(f))),
|
||||
)
|
||||
)
|
||||
|
||||
for c in q.all():
|
||||
table.append([c.id, c.name, c.owner, c.issuer])
|
||||
|
||||
print(tabulate(table, headers=["Id", "Name", "Owner", "Issuer"], tablefmt="csv"))
|
||||
|
||||
|
||||
def worker(data, commit, reason):
|
||||
parts = [x for x in data.split(" ") if x]
|
||||
try:
|
||||
cert = get(int(parts[0].strip()))
|
||||
plugin = plugins.get(cert.authority.plugin_name)
|
||||
|
||||
print("[+] Revoking certificate. Id: {0} Name: {1}".format(cert.id, cert.name))
|
||||
if commit:
|
||||
plugin.revoke_certificate(cert, reason)
|
||||
|
||||
metrics.send(
|
||||
"certificate_revoke",
|
||||
"counter",
|
||||
1,
|
||||
metric_tags={"status": SUCCESS_METRIC_STATUS},
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
sentry.captureException()
|
||||
metrics.send(
|
||||
"certificate_revoke",
|
||||
"counter",
|
||||
1,
|
||||
metric_tags={"status": FAILURE_METRIC_STATUS},
|
||||
)
|
||||
print("[!] Failed to revoke certificates. Reason: {}".format(e))
|
||||
|
||||
|
||||
@manager.command
|
||||
def clear_pending():
|
||||
"""
|
||||
Function clears all pending certificates.
|
||||
:return:
|
||||
"""
|
||||
v = plugins.get("verisign-issuer")
|
||||
v.clear_pending_certificates()
|
||||
|
||||
|
||||
@manager.option(
|
||||
"-p", "--path", dest="path", help="Absolute file path to a Lemur query csv."
|
||||
)
|
||||
@manager.option("-r", "--reason", dest="reason", help="Reason to revoke certificate.")
|
||||
@manager.option(
|
||||
"-c",
|
||||
"--commit",
|
||||
dest="commit",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help="Persist changes.",
|
||||
)
|
||||
def revoke(path, reason, commit):
|
||||
"""
|
||||
Revokes given certificate.
|
||||
"""
|
||||
if commit:
|
||||
print("[!] Running in COMMIT mode.")
|
||||
|
||||
print("[+] Starting certificate revocation.")
|
||||
|
||||
with open(path, "r") as f:
|
||||
args = [[x, commit, reason] for x in f.readlines()[2:]]
|
||||
|
||||
with multiprocessing.Pool(processes=3) as pool:
|
||||
pool.starmap(worker, args)
|
||||
|
||||
|
||||
@manager.command
|
||||
def check_revoked():
|
||||
"""
|
||||
Function attempts to update Lemur's internal cache with revoked
|
||||
certificates. This is called periodically by Lemur. It checks both
|
||||
CRLs and OCSP to see if a certificate is revoked. If Lemur is unable
|
||||
encounters an issue with verification it marks the certificate status
|
||||
as `unknown`.
|
||||
"""
|
||||
|
||||
log_data = {
|
||||
"function": f"{__name__}.{sys._getframe().f_code.co_name}",
|
||||
"message": "Checking for revoked Certificates"
|
||||
}
|
||||
|
||||
certs = get_all_valid_certs(current_app.config.get("SUPPORTED_REVOCATION_AUTHORITY_PLUGINS", []))
|
||||
for cert in certs:
|
||||
try:
|
||||
if cert.chain:
|
||||
status = verify_string(cert.body, cert.chain)
|
||||
else:
|
||||
status = verify_string(cert.body, "")
|
||||
|
||||
cert.status = "valid" if status else "revoked"
|
||||
|
||||
if cert.status == "revoked":
|
||||
log_data["valid"] = cert.status
|
||||
log_data["certificate_name"] = cert.name
|
||||
log_data["certificate_id"] = cert.id
|
||||
metrics.send(
|
||||
"certificate_revoked",
|
||||
"counter",
|
||||
1,
|
||||
metric_tags={"status": log_data["valid"],
|
||||
"certificate_name": log_data["certificate_name"],
|
||||
"certificate_id": log_data["certificate_id"]},
|
||||
)
|
||||
current_app.logger.info(log_data)
|
||||
|
||||
except Exception as e:
|
||||
sentry.captureException()
|
||||
current_app.logger.exception(e)
|
||||
cert.status = "unknown"
|
||||
|
||||
database.update(cert)
|
||||
|
||||
|
||||
@manager.command
|
||||
def automatically_enable_autorotate():
|
||||
"""
|
||||
This function automatically enables auto-rotation for unexpired certificates that are
|
||||
attached to an endpoint but do not have autorotate enabled.
|
||||
|
||||
WARNING: This will overwrite the Auto-rotate toggle!
|
||||
"""
|
||||
log_data = {
|
||||
"function": f"{__name__}.{sys._getframe().f_code.co_name}",
|
||||
"message": "Enabling auto-rotate for certificate"
|
||||
}
|
||||
|
||||
permitted_authorities = current_app.config.get("ENABLE_AUTO_ROTATE_AUTHORITY", [])
|
||||
|
||||
eligible_certs = get_all_certs_attached_to_endpoint_without_autorotate()
|
||||
for cert in eligible_certs:
|
||||
|
||||
if cert.authority_id not in permitted_authorities:
|
||||
continue
|
||||
|
||||
log_data["certificate"] = cert.name
|
||||
log_data["certificate_id"] = cert.id
|
||||
log_data["authority_id"] = cert.authority_id
|
||||
log_data["authority_name"] = authorities_get_by_id(cert.authority_id).name
|
||||
if cert.destinations:
|
||||
log_data["destination_names"] = ', '.join([d.label for d in cert.destinations])
|
||||
else:
|
||||
log_data["destination_names"] = "NONE"
|
||||
current_app.logger.info(log_data)
|
||||
metrics.send("automatically_enable_autorotate",
|
||||
"counter", 1,
|
||||
metric_tags={"certificate": log_data["certificate"],
|
||||
"certificate_id": log_data["certificate_id"],
|
||||
"authority_id": log_data["authority_id"],
|
||||
"authority_name": log_data["authority_name"],
|
||||
"destination_names": log_data["destination_names"]
|
||||
})
|
||||
cert.rotation = True
|
||||
database.update(cert)
|
||||
@ -1,88 +0,0 @@
|
||||
"""
|
||||
.. module: lemur.certificates.exceptions
|
||||
:synopsis: Defines all monterey specific exceptions
|
||||
:copyright: (c) 2015 by Netflix Inc., see AUTHORS for more
|
||||
:license: Apache, see LICENSE for more details.
|
||||
.. moduleauthor:: Kevin Glisson <kglisson@netflix.com>
|
||||
"""
|
||||
from flask import current_app
|
||||
from lemur.exceptions import LemurException
|
||||
|
||||
|
||||
class UnknownAuthority(LemurException):
|
||||
def __init__(self, authority):
|
||||
self.code = 404
|
||||
self.authority = authority
|
||||
self.data = {"message": "The authority specified '{}' is not a valid authority".format(self.authority)}
|
||||
|
||||
current_app.logger.warning(self)
|
||||
|
||||
def __str__(self):
|
||||
return repr(self.data['message'])
|
||||
|
||||
|
||||
class InsufficientDomains(LemurException):
|
||||
def __init__(self):
|
||||
self.code = 400
|
||||
self.data = {"message": "Need at least one domain specified in order create a certificate"}
|
||||
|
||||
current_app.logger.warning(self)
|
||||
|
||||
def __str__(self):
|
||||
return repr(self.data['message'])
|
||||
|
||||
|
||||
class InvalidCertificate(LemurException):
|
||||
def __init__(self):
|
||||
self.code = 400
|
||||
self.data = {"message": "Need at least one domain specified in order create a certificate"}
|
||||
|
||||
current_app.logger.warning(self)
|
||||
|
||||
def __str__(self):
|
||||
return repr(self.data['message'])
|
||||
|
||||
|
||||
class UnableToCreateCSR(LemurException):
|
||||
def __init__(self):
|
||||
self.code = 500
|
||||
self.data = {"message": "Unable to generate CSR"}
|
||||
|
||||
current_app.logger.error(self)
|
||||
|
||||
def __str__(self):
|
||||
return repr(self.data['message'])
|
||||
|
||||
|
||||
class UnableToCreatePrivateKey(LemurException):
|
||||
def __init__(self):
|
||||
self.code = 500
|
||||
self.data = {"message": "Unable to generate Private Key"}
|
||||
|
||||
current_app.logger.error(self)
|
||||
|
||||
def __str__(self):
|
||||
return repr(self.data['message'])
|
||||
|
||||
|
||||
class MissingFiles(LemurException):
|
||||
def __init__(self, path):
|
||||
self.code = 500
|
||||
self.path = path
|
||||
self.data = {"path": self.path, "message": "Expecting missing files"}
|
||||
|
||||
current_app.logger.error(self)
|
||||
|
||||
def __str__(self):
|
||||
return repr(self.data['message'])
|
||||
|
||||
|
||||
class NoPersistanceFound(LemurException):
|
||||
def __init__(self):
|
||||
self.code = 500
|
||||
self.data = {"code": 500, "message": "No peristence method found, Lemur cannot persist sensitive information"}
|
||||
|
||||
current_app.logger.error(self)
|
||||
|
||||
def __str__(self):
|
||||
return repr(self.data['message'])
|
||||
47
lemur/certificates/hooks.py
Normal file
47
lemur/certificates/hooks.py
Normal file
@ -0,0 +1,47 @@
|
||||
"""
|
||||
Debugging hooks for dumping imported or generated CSR and certificate details to stdout via OpenSSL.
|
||||
|
||||
.. module: lemur.certificates.hooks
|
||||
:platform: Unix
|
||||
:copyright: (c) 2018 by Marti Raudsepp, see AUTHORS for more
|
||||
:license: Apache, see LICENSE for more details.
|
||||
|
||||
.. moduleauthor:: Marti Raudsepp <marti@juffo.org>
|
||||
"""
|
||||
import subprocess
|
||||
|
||||
from flask import current_app
|
||||
|
||||
from lemur.certificates.service import (
|
||||
csr_created,
|
||||
csr_imported,
|
||||
certificate_issued,
|
||||
certificate_imported,
|
||||
)
|
||||
|
||||
|
||||
def csr_dump_handler(sender, csr, **kwargs):
|
||||
try:
|
||||
subprocess.run(
|
||||
["openssl", "req", "-text", "-noout", "-reqopt", "no_sigdump,no_pubkey"],
|
||||
input=csr.encode("utf8"),
|
||||
)
|
||||
except Exception as err:
|
||||
current_app.logger.warning("Error inspecting CSR: %s", err)
|
||||
|
||||
|
||||
def cert_dump_handler(sender, certificate, **kwargs):
|
||||
try:
|
||||
subprocess.run(
|
||||
["openssl", "x509", "-text", "-noout", "-certopt", "no_sigdump,no_pubkey"],
|
||||
input=certificate.body.encode("utf8"),
|
||||
)
|
||||
except Exception as err:
|
||||
current_app.logger.warning("Error inspecting certificate: %s", err)
|
||||
|
||||
|
||||
def activate_debug_dump():
|
||||
csr_created.connect(csr_dump_handler)
|
||||
csr_imported.connect(csr_dump_handler)
|
||||
certificate_issued.connect(cert_dump_handler)
|
||||
certificate_imported.connect(cert_dump_handler)
|
||||
@ -1,286 +1,494 @@
|
||||
"""
|
||||
.. module: lemur.certificates.models
|
||||
:platform: Unix
|
||||
:copyright: (c) 2015 by Netflix Inc., see AUTHORS for more
|
||||
:copyright: (c) 2018 by Netflix Inc., see AUTHORS for more
|
||||
:license: Apache, see LICENSE for more details.
|
||||
.. moduleauthor:: Kevin Glisson <kglisson@netflix.com>
|
||||
"""
|
||||
import datetime
|
||||
from flask import current_app
|
||||
from datetime import timedelta
|
||||
|
||||
import arrow
|
||||
from cryptography import x509
|
||||
from cryptography.hazmat.backends import default_backend
|
||||
from flask import current_app
|
||||
from idna.core import InvalidCodepoint
|
||||
from sqlalchemy import (
|
||||
event,
|
||||
Integer,
|
||||
ForeignKey,
|
||||
String,
|
||||
PassiveDefault,
|
||||
func,
|
||||
Column,
|
||||
Text,
|
||||
Boolean,
|
||||
Index,
|
||||
)
|
||||
from sqlalchemy.ext.hybrid import hybrid_property
|
||||
from sqlalchemy.orm import relationship
|
||||
from sqlalchemy import event, Integer, ForeignKey, String, DateTime, PassiveDefault, func, Column, Text, Boolean
|
||||
from sqlalchemy.sql.expression import case, extract
|
||||
from sqlalchemy_utils.types.arrow import ArrowType
|
||||
from werkzeug.utils import cached_property
|
||||
|
||||
from sqlalchemy_utils import EncryptedType
|
||||
|
||||
from lemur.utils import get_key
|
||||
from lemur.common import defaults, utils, validators
|
||||
from lemur.constants import SUCCESS_METRIC_STATUS, FAILURE_METRIC_STATUS
|
||||
from lemur.database import db
|
||||
from lemur.plugins.base import plugins
|
||||
|
||||
from lemur.domains.models import Domain
|
||||
|
||||
from lemur.constants import SAN_NAMING_TEMPLATE, DEFAULT_NAMING_TEMPLATE
|
||||
|
||||
from lemur.models import certificate_associations, certificate_source_associations, \
|
||||
certificate_destination_associations, certificate_notification_associations
|
||||
from lemur.extensions import metrics
|
||||
from lemur.extensions import sentry
|
||||
from lemur.models import (
|
||||
certificate_associations,
|
||||
certificate_source_associations,
|
||||
certificate_destination_associations,
|
||||
certificate_notification_associations,
|
||||
certificate_replacement_associations,
|
||||
roles_certificates,
|
||||
pending_cert_replacement_associations,
|
||||
)
|
||||
from lemur.plugins.base import plugins
|
||||
from lemur.policies.models import RotationPolicy
|
||||
from lemur.utils import Vault
|
||||
|
||||
|
||||
def create_name(issuer, not_before, not_after, subject, san):
|
||||
"""
|
||||
Create a name for our certificate. A naming standard
|
||||
is based on a series of templates. The name includes
|
||||
useful information such as Common Name, Validation dates,
|
||||
and Issuer.
|
||||
def get_sequence(name):
|
||||
if "-" not in name:
|
||||
return name, None
|
||||
|
||||
:rtype : str
|
||||
:return:
|
||||
"""
|
||||
if san:
|
||||
t = SAN_NAMING_TEMPLATE
|
||||
else:
|
||||
t = DEFAULT_NAMING_TEMPLATE
|
||||
parts = name.split("-")
|
||||
|
||||
temp = t.format(
|
||||
subject=subject,
|
||||
issuer=issuer,
|
||||
not_before=not_before.strftime('%Y%m%d'),
|
||||
not_after=not_after.strftime('%Y%m%d')
|
||||
)
|
||||
|
||||
# NOTE we may want to give more control over naming
|
||||
# aws doesn't allow special chars except '-'
|
||||
disallowed_chars = ''.join(c for c in map(chr, range(256)) if not c.isalnum())
|
||||
disallowed_chars = disallowed_chars.replace("-", "")
|
||||
disallowed_chars = disallowed_chars.replace(".", "")
|
||||
temp = temp.replace('*', "WILDCARD")
|
||||
|
||||
for c in disallowed_chars:
|
||||
temp = temp.replace(c, "")
|
||||
|
||||
# white space is silly too
|
||||
return temp.replace(" ", "-")
|
||||
|
||||
|
||||
def cert_get_cn(cert):
|
||||
"""
|
||||
Attempts to get a sane common name from a given certificate.
|
||||
|
||||
:param cert:
|
||||
:return: Common name or None
|
||||
"""
|
||||
return cert.subject.get_attributes_for_oid(
|
||||
x509.OID_COMMON_NAME
|
||||
)[0].value.strip()
|
||||
|
||||
|
||||
def cert_get_domains(cert):
|
||||
"""
|
||||
Attempts to get an domains listed in a certificate.
|
||||
If 'subjectAltName' extension is not available we simply
|
||||
return the common name.
|
||||
|
||||
:param cert:
|
||||
:return: List of domains
|
||||
"""
|
||||
domains = []
|
||||
# see if we have an int at the end of our name
|
||||
try:
|
||||
ext = cert.extensions.get_extension_for_oid(x509.OID_SUBJECT_ALTERNATIVE_NAME)
|
||||
entries = ext.value.get_values_for_type(x509.DNSName)
|
||||
for entry in entries:
|
||||
domains.append(entry)
|
||||
except Exception as e:
|
||||
current_app.logger.warning("Failed to get SubjectAltName: {0}".format(e))
|
||||
seq = int(parts[-1])
|
||||
except ValueError:
|
||||
return name, None
|
||||
|
||||
return domains
|
||||
# we might have a date at the end of our name
|
||||
if len(parts[-1]) == 8:
|
||||
return name, None
|
||||
|
||||
root = "-".join(parts[:-1])
|
||||
return root, seq
|
||||
|
||||
|
||||
def cert_get_serial(cert):
|
||||
"""
|
||||
Fetch the serial number from the certificate.
|
||||
def get_or_increase_name(name, serial):
|
||||
certificates = Certificate.query.filter(Certificate.name == name).all()
|
||||
|
||||
:param cert:
|
||||
:return: serial number
|
||||
"""
|
||||
return cert.serial
|
||||
if not certificates:
|
||||
return name
|
||||
|
||||
serial_name = "{0}-{1}".format(name, hex(int(serial))[2:].upper())
|
||||
certificates = Certificate.query.filter(Certificate.name == serial_name).all()
|
||||
|
||||
def cert_is_san(cert):
|
||||
"""
|
||||
Determines if a given certificate is a SAN certificate.
|
||||
SAN certificates are simply certificates that cover multiple domains.
|
||||
if not certificates:
|
||||
return serial_name
|
||||
|
||||
:param cert:
|
||||
:return: Bool
|
||||
"""
|
||||
if len(cert_get_domains(cert)) > 1:
|
||||
return True
|
||||
certificates = Certificate.query.filter(
|
||||
Certificate.name.ilike("{0}%".format(serial_name))
|
||||
).all()
|
||||
|
||||
ends = [0]
|
||||
root, end = get_sequence(serial_name)
|
||||
for cert in certificates:
|
||||
root, end = get_sequence(cert.name)
|
||||
if end:
|
||||
ends.append(end)
|
||||
|
||||
def cert_is_wildcard(cert):
|
||||
"""
|
||||
Determines if certificate is a wildcard certificate.
|
||||
|
||||
:param cert:
|
||||
:return: Bool
|
||||
"""
|
||||
domains = cert_get_domains(cert)
|
||||
if len(domains) == 1 and domains[0][0:1] == "*":
|
||||
return True
|
||||
|
||||
if cert.subject.get_attributes_for_oid(x509.OID_COMMON_NAME)[0].value[0:1] == "*":
|
||||
return True
|
||||
|
||||
|
||||
def cert_get_bitstrength(cert):
|
||||
"""
|
||||
Calculates a certificates public key bit length.
|
||||
|
||||
:param cert:
|
||||
:return: Integer
|
||||
"""
|
||||
return cert.public_key().key_size
|
||||
|
||||
|
||||
def cert_get_issuer(cert):
|
||||
"""
|
||||
Gets a sane issuer from a given certificate.
|
||||
|
||||
:param cert:
|
||||
:return: Issuer
|
||||
"""
|
||||
delchars = ''.join(c for c in map(chr, range(256)) if not c.isalnum())
|
||||
try:
|
||||
issuer = str(cert.issuer.get_attributes_for_oid(x509.OID_ORGANIZATION_NAME)[0].value)
|
||||
for c in delchars:
|
||||
issuer = issuer.replace(c, "")
|
||||
return issuer
|
||||
except Exception as e:
|
||||
current_app.logger.error("Unable to get issuer! {0}".format(e))
|
||||
|
||||
|
||||
def cert_get_not_before(cert):
|
||||
"""
|
||||
Gets the naive datetime of the certificates 'not_before' field.
|
||||
This field denotes the first date in time which the given certificate
|
||||
is valid.
|
||||
|
||||
:param cert:
|
||||
:return: Datetime
|
||||
"""
|
||||
return cert.not_valid_before
|
||||
|
||||
|
||||
def cert_get_not_after(cert):
|
||||
"""
|
||||
Gets the naive datetime of the certificates 'not_after' field.
|
||||
This field denotes the last date in time which the given certificate
|
||||
is valid.
|
||||
|
||||
:param cert:
|
||||
:return: Datetime
|
||||
"""
|
||||
return cert.not_valid_after
|
||||
|
||||
|
||||
def get_name_from_arn(arn):
|
||||
"""
|
||||
Extract the certificate name from an arn.
|
||||
|
||||
:param arn: IAM SSL arn
|
||||
:return: name of the certificate as uploaded to AWS
|
||||
"""
|
||||
return arn.split("/", 1)[1]
|
||||
|
||||
|
||||
def get_account_number(arn):
|
||||
"""
|
||||
Extract the account number from an arn.
|
||||
|
||||
:param arn: IAM SSL arn
|
||||
:return: account number associated with ARN
|
||||
"""
|
||||
return arn.split(":")[4]
|
||||
return "{0}-{1}".format(root, max(ends) + 1)
|
||||
|
||||
|
||||
class Certificate(db.Model):
|
||||
__tablename__ = 'certificates'
|
||||
__tablename__ = "certificates"
|
||||
__table_args__ = (
|
||||
Index(
|
||||
"ix_certificates_cn",
|
||||
"cn",
|
||||
postgresql_ops={"cn": "gin_trgm_ops"},
|
||||
postgresql_using="gin",
|
||||
),
|
||||
Index(
|
||||
"ix_certificates_name",
|
||||
"name",
|
||||
postgresql_ops={"name": "gin_trgm_ops"},
|
||||
postgresql_using="gin",
|
||||
),
|
||||
)
|
||||
id = Column(Integer, primary_key=True)
|
||||
owner = Column(String(128))
|
||||
body = Column(Text())
|
||||
private_key = Column(EncryptedType(String, get_key))
|
||||
status = Column(String(128))
|
||||
deleted = Column(Boolean, index=True)
|
||||
name = Column(String(128))
|
||||
ix = Index(
|
||||
"ix_certificates_id_desc", id.desc(), postgresql_using="btree", unique=True
|
||||
)
|
||||
external_id = Column(String(128))
|
||||
owner = Column(String(128), nullable=False)
|
||||
name = Column(String(256), unique=True)
|
||||
description = Column(String(1024))
|
||||
notify = Column(Boolean, default=True)
|
||||
|
||||
body = Column(Text(), nullable=False)
|
||||
chain = Column(Text())
|
||||
bits = Column(Integer())
|
||||
csr = Column(Text())
|
||||
private_key = Column(Vault)
|
||||
|
||||
issuer = Column(String(128))
|
||||
serial = Column(String(128))
|
||||
cn = Column(String(128))
|
||||
description = Column(String(1024))
|
||||
active = Column(Boolean, default=True)
|
||||
san = Column(String(1024))
|
||||
not_before = Column(DateTime)
|
||||
not_after = Column(DateTime)
|
||||
date_created = Column(DateTime, PassiveDefault(func.now()), nullable=False)
|
||||
user_id = Column(Integer, ForeignKey('users.id'))
|
||||
authority_id = Column(Integer, ForeignKey('authorities.id'))
|
||||
notifications = relationship("Notification", secondary=certificate_notification_associations, backref='certificate')
|
||||
destinations = relationship("Destination", secondary=certificate_destination_associations, backref='certificate')
|
||||
sources = relationship("Source", secondary=certificate_source_associations, backref='certificate')
|
||||
domains = relationship("Domain", secondary=certificate_associations, backref="certificate")
|
||||
deleted = Column(Boolean, index=True, default=False)
|
||||
dns_provider_id = Column(
|
||||
Integer(), ForeignKey("dns_providers.id", ondelete="CASCADE"), nullable=True
|
||||
)
|
||||
|
||||
def __init__(self, body, private_key=None, chain=None):
|
||||
self.body = body
|
||||
# We encrypt the private_key on creation
|
||||
self.private_key = private_key
|
||||
self.chain = chain
|
||||
cert = x509.load_pem_x509_certificate(str(self.body), default_backend())
|
||||
self.bits = cert_get_bitstrength(cert)
|
||||
self.issuer = cert_get_issuer(cert)
|
||||
self.serial = cert_get_serial(cert)
|
||||
self.cn = cert_get_cn(cert)
|
||||
self.san = cert_is_san(cert)
|
||||
self.not_before = cert_get_not_before(cert)
|
||||
self.not_after = cert_get_not_after(cert)
|
||||
self.name = create_name(self.issuer, self.not_before, self.not_after, self.cn, self.san)
|
||||
not_before = Column(ArrowType)
|
||||
not_after = Column(ArrowType)
|
||||
not_after_ix = Index("ix_certificates_not_after", not_after.desc())
|
||||
|
||||
for domain in cert_get_domains(cert):
|
||||
date_created = Column(ArrowType, PassiveDefault(func.now()), nullable=False)
|
||||
|
||||
signing_algorithm = Column(String(128))
|
||||
status = Column(String(128))
|
||||
bits = Column(Integer())
|
||||
san = Column(String(1024)) # TODO this should be migrated to boolean
|
||||
|
||||
rotation = Column(Boolean, default=False)
|
||||
user_id = Column(Integer, ForeignKey("users.id"))
|
||||
authority_id = Column(Integer, ForeignKey("authorities.id", ondelete="CASCADE"))
|
||||
root_authority_id = Column(
|
||||
Integer, ForeignKey("authorities.id", ondelete="CASCADE")
|
||||
)
|
||||
rotation_policy_id = Column(Integer, ForeignKey("rotation_policies.id"))
|
||||
key_type = Column(String(128))
|
||||
|
||||
notifications = relationship(
|
||||
"Notification",
|
||||
secondary=certificate_notification_associations,
|
||||
backref="certificate",
|
||||
)
|
||||
destinations = relationship(
|
||||
"Destination",
|
||||
secondary=certificate_destination_associations,
|
||||
backref="certificate",
|
||||
)
|
||||
sources = relationship(
|
||||
"Source", secondary=certificate_source_associations, backref="certificate"
|
||||
)
|
||||
domains = relationship(
|
||||
"Domain", secondary=certificate_associations, backref="certificate"
|
||||
)
|
||||
roles = relationship("Role", secondary=roles_certificates, backref="certificate")
|
||||
replaces = relationship(
|
||||
"Certificate",
|
||||
secondary=certificate_replacement_associations,
|
||||
primaryjoin=id == certificate_replacement_associations.c.certificate_id, # noqa
|
||||
secondaryjoin=id
|
||||
== certificate_replacement_associations.c.replaced_certificate_id, # noqa
|
||||
backref="replaced",
|
||||
)
|
||||
|
||||
replaced_by_pending = relationship(
|
||||
"PendingCertificate",
|
||||
secondary=pending_cert_replacement_associations,
|
||||
backref="pending_replace",
|
||||
viewonly=True,
|
||||
)
|
||||
|
||||
logs = relationship("Log", backref="certificate")
|
||||
endpoints = relationship("Endpoint", backref="certificate")
|
||||
rotation_policy = relationship("RotationPolicy")
|
||||
sensitive_fields = ("private_key",)
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
self.body = kwargs["body"].strip()
|
||||
cert = self.parsed_cert
|
||||
|
||||
self.issuer = defaults.issuer(cert)
|
||||
self.cn = defaults.common_name(cert)
|
||||
self.san = defaults.san(cert)
|
||||
self.not_before = defaults.not_before(cert)
|
||||
self.not_after = defaults.not_after(cert)
|
||||
self.serial = defaults.serial(cert)
|
||||
|
||||
# when destinations are appended they require a valid name.
|
||||
if kwargs.get("name"):
|
||||
self.name = get_or_increase_name(
|
||||
defaults.text_to_slug(kwargs["name"]), self.serial
|
||||
)
|
||||
else:
|
||||
self.name = get_or_increase_name(
|
||||
defaults.certificate_name(
|
||||
self.cn, self.issuer, self.not_before, self.not_after, self.san
|
||||
),
|
||||
self.serial,
|
||||
)
|
||||
|
||||
self.owner = kwargs["owner"]
|
||||
|
||||
if kwargs.get("private_key"):
|
||||
self.private_key = kwargs["private_key"].strip()
|
||||
|
||||
if kwargs.get("chain"):
|
||||
self.chain = kwargs["chain"].strip()
|
||||
|
||||
if kwargs.get("csr"):
|
||||
self.csr = kwargs["csr"].strip()
|
||||
|
||||
self.notify = kwargs.get("notify", True)
|
||||
self.destinations = kwargs.get("destinations", [])
|
||||
self.notifications = kwargs.get("notifications", [])
|
||||
self.description = kwargs.get("description")
|
||||
self.roles = list(set(kwargs.get("roles", [])))
|
||||
self.replaces = kwargs.get("replaces", [])
|
||||
self.rotation = kwargs.get("rotation")
|
||||
self.rotation_policy = kwargs.get("rotation_policy")
|
||||
self.key_type = kwargs.get("key_type")
|
||||
self.signing_algorithm = defaults.signing_algorithm(cert)
|
||||
self.bits = defaults.bitstrength(cert)
|
||||
self.external_id = kwargs.get("external_id")
|
||||
self.authority_id = kwargs.get("authority_id")
|
||||
self.dns_provider_id = kwargs.get("dns_provider_id")
|
||||
|
||||
for domain in defaults.domains(cert):
|
||||
self.domains.append(Domain(name=domain))
|
||||
|
||||
@property
|
||||
def is_expired(self):
|
||||
if self.not_after < datetime.datetime.now():
|
||||
return True
|
||||
# Check integrity before saving anything into the database.
|
||||
# For user-facing API calls, validation should also be done in schema validators.
|
||||
self.check_integrity()
|
||||
|
||||
@property
|
||||
def is_unused(self):
|
||||
if self.elb_listeners.count() == 0:
|
||||
return True
|
||||
|
||||
@property
|
||||
def is_revoked(self):
|
||||
# we might not yet know the condition of the cert
|
||||
if self.status:
|
||||
if 'revoked' in self.status:
|
||||
return True
|
||||
|
||||
def get_arn(self, account_number):
|
||||
def check_integrity(self):
|
||||
"""
|
||||
Generate a valid AWS IAM arn
|
||||
Integrity checks: Does the cert have a valid chain and matching private key?
|
||||
"""
|
||||
if self.private_key:
|
||||
validators.verify_private_key_match(
|
||||
utils.parse_private_key(self.private_key),
|
||||
self.parsed_cert,
|
||||
error_class=AssertionError,
|
||||
)
|
||||
|
||||
:rtype : str
|
||||
:param account_number:
|
||||
if self.chain:
|
||||
chain = [self.parsed_cert] + utils.parse_cert_chain(self.chain)
|
||||
validators.verify_cert_chain(chain, error_class=AssertionError)
|
||||
|
||||
@cached_property
|
||||
def parsed_cert(self):
|
||||
assert self.body, "Certificate body not set"
|
||||
return utils.parse_certificate(self.body)
|
||||
|
||||
@property
|
||||
def active(self):
|
||||
return self.notify
|
||||
|
||||
@property
|
||||
def organization(self):
|
||||
return defaults.organization(self.parsed_cert)
|
||||
|
||||
@property
|
||||
def organizational_unit(self):
|
||||
return defaults.organizational_unit(self.parsed_cert)
|
||||
|
||||
@property
|
||||
def country(self):
|
||||
return defaults.country(self.parsed_cert)
|
||||
|
||||
@property
|
||||
def state(self):
|
||||
return defaults.state(self.parsed_cert)
|
||||
|
||||
@property
|
||||
def location(self):
|
||||
return defaults.location(self.parsed_cert)
|
||||
|
||||
@property
|
||||
def distinguished_name(self):
|
||||
return self.parsed_cert.subject.rfc4514_string()
|
||||
|
||||
"""
|
||||
# Commenting this property as key_type is now added as a column. This code can be removed in future.
|
||||
@property
|
||||
def key_type(self):
|
||||
if isinstance(self.parsed_cert.public_key(), rsa.RSAPublicKey):
|
||||
return "RSA{key_size}".format(
|
||||
key_size=self.parsed_cert.public_key().key_size
|
||||
)
|
||||
elif isinstance(self.parsed_cert.public_key(), ec.EllipticCurvePublicKey):
|
||||
return get_key_type_from_ec_curve(self.parsed_cert.public_key().curve.name)
|
||||
"""
|
||||
|
||||
@property
|
||||
def validity_remaining(self):
|
||||
return abs(self.not_after - arrow.utcnow())
|
||||
|
||||
@property
|
||||
def validity_range(self):
|
||||
return self.not_after - self.not_before
|
||||
|
||||
@property
|
||||
def subject(self):
|
||||
return self.parsed_cert.subject
|
||||
|
||||
@property
|
||||
def public_key(self):
|
||||
return self.parsed_cert.public_key()
|
||||
|
||||
@hybrid_property
|
||||
def expired(self):
|
||||
# can't compare offset-naive and offset-aware datetimes
|
||||
if arrow.Arrow.fromdatetime(self.not_after) <= arrow.utcnow():
|
||||
return True
|
||||
|
||||
@expired.expression
|
||||
def expired(cls):
|
||||
return case([(cls.not_after <= arrow.utcnow(), True)], else_=False)
|
||||
|
||||
@hybrid_property
|
||||
def revoked(self):
|
||||
if "revoked" == self.status:
|
||||
return True
|
||||
|
||||
@revoked.expression
|
||||
def revoked(cls):
|
||||
return case([(cls.status == "revoked", True)], else_=False)
|
||||
|
||||
@hybrid_property
|
||||
def has_private_key(self):
|
||||
return self.private_key is not None
|
||||
|
||||
@has_private_key.expression
|
||||
def has_private_key(cls):
|
||||
return case([(cls.private_key.is_(None), True)], else_=False)
|
||||
|
||||
@hybrid_property
|
||||
def in_rotation_window(self):
|
||||
"""
|
||||
Determines if a certificate is available for rotation based
|
||||
on the rotation policy associated.
|
||||
:return:
|
||||
"""
|
||||
return "arn:aws:iam::{}:server-certificate/{}".format(account_number, self.name)
|
||||
now = arrow.utcnow()
|
||||
end = now + timedelta(days=self.rotation_policy.days)
|
||||
|
||||
def as_dict(self):
|
||||
return {c.name: getattr(self, c.name) for c in self.__table__.columns}
|
||||
if self.not_after <= end:
|
||||
return True
|
||||
|
||||
@in_rotation_window.expression
|
||||
def in_rotation_window(cls):
|
||||
"""
|
||||
Determines if a certificate is available for rotation based
|
||||
on the rotation policy associated.
|
||||
:return:
|
||||
"""
|
||||
return case(
|
||||
[(extract("day", cls.not_after - func.now()) <= RotationPolicy.days, True)],
|
||||
else_=False,
|
||||
)
|
||||
|
||||
@property
|
||||
def extensions(self):
|
||||
# setup default values
|
||||
return_extensions = {"sub_alt_names": {"names": []}}
|
||||
|
||||
try:
|
||||
for extension in self.parsed_cert.extensions:
|
||||
value = extension.value
|
||||
if isinstance(value, x509.BasicConstraints):
|
||||
return_extensions["basic_constraints"] = value
|
||||
|
||||
elif isinstance(value, x509.SubjectAlternativeName):
|
||||
return_extensions["sub_alt_names"]["names"] = value
|
||||
|
||||
elif isinstance(value, x509.ExtendedKeyUsage):
|
||||
return_extensions["extended_key_usage"] = value
|
||||
|
||||
elif isinstance(value, x509.KeyUsage):
|
||||
return_extensions["key_usage"] = value
|
||||
|
||||
elif isinstance(value, x509.SubjectKeyIdentifier):
|
||||
return_extensions["subject_key_identifier"] = {"include_ski": True}
|
||||
|
||||
elif isinstance(value, x509.AuthorityInformationAccess):
|
||||
return_extensions["certificate_info_access"] = {"include_aia": True}
|
||||
|
||||
elif isinstance(value, x509.AuthorityKeyIdentifier):
|
||||
aki = {"use_key_identifier": False, "use_authority_cert": False}
|
||||
|
||||
if value.key_identifier:
|
||||
aki["use_key_identifier"] = True
|
||||
|
||||
if value.authority_cert_issuer:
|
||||
aki["use_authority_cert"] = True
|
||||
|
||||
return_extensions["authority_key_identifier"] = aki
|
||||
|
||||
elif isinstance(value, x509.CRLDistributionPoints):
|
||||
return_extensions["crl_distribution_points"] = {
|
||||
"include_crl_dp": value
|
||||
}
|
||||
|
||||
# TODO: Not supporting custom OIDs yet. https://github.com/Netflix/lemur/issues/665
|
||||
else:
|
||||
current_app.logger.warning(
|
||||
"Custom OIDs not yet supported for clone operation."
|
||||
)
|
||||
except InvalidCodepoint as e:
|
||||
sentry.captureException()
|
||||
current_app.logger.warning(
|
||||
"Unable to parse extensions due to underscore in dns name"
|
||||
)
|
||||
except ValueError as e:
|
||||
sentry.captureException()
|
||||
current_app.logger.warning("Unable to parse")
|
||||
current_app.logger.exception(e)
|
||||
|
||||
return return_extensions
|
||||
|
||||
def __repr__(self):
|
||||
return "Certificate(name={name})".format(name=self.name)
|
||||
|
||||
|
||||
@event.listens_for(Certificate.destinations, 'append')
|
||||
@event.listens_for(Certificate.destinations, "append")
|
||||
def update_destinations(target, value, initiator):
|
||||
"""
|
||||
Attempt to upload certificate to the new destination
|
||||
|
||||
:param target:
|
||||
:param value:
|
||||
:param initiator:
|
||||
:return:
|
||||
"""
|
||||
destination_plugin = plugins.get(value.plugin_name)
|
||||
destination_plugin.upload(target.name, target.body, target.private_key, target.chain, value.options)
|
||||
status = FAILURE_METRIC_STATUS
|
||||
|
||||
if target.expired:
|
||||
return
|
||||
try:
|
||||
if target.private_key or not destination_plugin.requires_key:
|
||||
destination_plugin.upload(
|
||||
target.name,
|
||||
target.body,
|
||||
target.private_key,
|
||||
target.chain,
|
||||
value.options,
|
||||
)
|
||||
status = SUCCESS_METRIC_STATUS
|
||||
except Exception as e:
|
||||
sentry.captureException()
|
||||
raise
|
||||
|
||||
metrics.send(
|
||||
"destination_upload",
|
||||
"counter",
|
||||
1,
|
||||
metric_tags={
|
||||
"status": status,
|
||||
"certificate": target.name,
|
||||
"destination": value.label,
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
@event.listens_for(Certificate.replaces, "append")
|
||||
def update_replacement(target, value, initiator):
|
||||
"""
|
||||
When a certificate is marked as 'replaced' we should not notify.
|
||||
|
||||
:param target:
|
||||
:param value:
|
||||
:param initiator:
|
||||
:return:
|
||||
"""
|
||||
value.notify = False
|
||||
|
||||
476
lemur/certificates/schemas.py
Normal file
476
lemur/certificates/schemas.py
Normal file
@ -0,0 +1,476 @@
|
||||
"""
|
||||
.. module: lemur.certificates.schemas
|
||||
:platform: unix
|
||||
:copyright: (c) 2018 by Netflix Inc., see AUTHORS for more
|
||||
:license: Apache, see LICENSE for more details.
|
||||
.. moduleauthor:: Kevin Glisson <kglisson@netflix.com>
|
||||
"""
|
||||
from flask import current_app
|
||||
from flask_restful import inputs
|
||||
from flask_restful.reqparse import RequestParser
|
||||
from marshmallow import fields, validate, validates_schema, post_load, pre_load, post_dump
|
||||
from marshmallow.exceptions import ValidationError
|
||||
|
||||
from lemur.authorities.schemas import AuthorityNestedOutputSchema
|
||||
from lemur.certificates import utils as cert_utils
|
||||
from lemur.common import missing, utils, validators
|
||||
from lemur.common.fields import ArrowDateTime, Hex
|
||||
from lemur.common.schema import LemurInputSchema, LemurOutputSchema
|
||||
from lemur.constants import CERTIFICATE_KEY_TYPES
|
||||
from lemur.destinations.schemas import DestinationNestedOutputSchema
|
||||
from lemur.dns_providers.schemas import DnsProvidersNestedOutputSchema
|
||||
from lemur.domains.schemas import DomainNestedOutputSchema
|
||||
from lemur.notifications import service as notification_service
|
||||
from lemur.notifications.schemas import NotificationNestedOutputSchema
|
||||
from lemur.policies.schemas import RotationPolicyNestedOutputSchema
|
||||
from lemur.roles import service as roles_service
|
||||
from lemur.roles.schemas import RoleNestedOutputSchema
|
||||
from lemur.schemas import (
|
||||
AssociatedAuthoritySchema,
|
||||
AssociatedDestinationSchema,
|
||||
AssociatedCertificateSchema,
|
||||
AssociatedNotificationSchema,
|
||||
AssociatedDnsProviderSchema,
|
||||
PluginInputSchema,
|
||||
ExtensionSchema,
|
||||
AssociatedRoleSchema,
|
||||
EndpointNestedOutputSchema,
|
||||
AssociatedRotationPolicySchema,
|
||||
)
|
||||
from lemur.users.schemas import UserNestedOutputSchema
|
||||
|
||||
|
||||
class CertificateSchema(LemurInputSchema):
|
||||
owner = fields.Email(required=True)
|
||||
description = fields.String(missing="", allow_none=True)
|
||||
|
||||
|
||||
class CertificateCreationSchema(CertificateSchema):
|
||||
@post_load
|
||||
def default_notification(self, data):
|
||||
if not data["notifications"]:
|
||||
data[
|
||||
"notifications"
|
||||
] += notification_service.create_default_expiration_notifications(
|
||||
"DEFAULT_{0}".format(data["owner"].split("@")[0].upper()),
|
||||
[data["owner"]],
|
||||
)
|
||||
|
||||
data[
|
||||
"notifications"
|
||||
] += notification_service.create_default_expiration_notifications(
|
||||
"DEFAULT_SECURITY",
|
||||
current_app.config.get("LEMUR_SECURITY_TEAM_EMAIL"),
|
||||
current_app.config.get("LEMUR_SECURITY_TEAM_EMAIL_INTERVALS", None),
|
||||
)
|
||||
return data
|
||||
|
||||
|
||||
class CertificateInputSchema(CertificateCreationSchema):
|
||||
name = fields.String()
|
||||
common_name = fields.String(required=True, validate=validators.common_name)
|
||||
authority = fields.Nested(AssociatedAuthoritySchema, required=True)
|
||||
|
||||
validity_start = ArrowDateTime(allow_none=True)
|
||||
validity_end = ArrowDateTime(allow_none=True)
|
||||
validity_years = fields.Integer(allow_none=True)
|
||||
|
||||
destinations = fields.Nested(AssociatedDestinationSchema, missing=[], many=True)
|
||||
notifications = fields.Nested(AssociatedNotificationSchema, missing=[], many=True)
|
||||
replaces = fields.Nested(AssociatedCertificateSchema, missing=[], many=True)
|
||||
replacements = fields.Nested(
|
||||
AssociatedCertificateSchema, missing=[], many=True
|
||||
) # deprecated
|
||||
roles = fields.Nested(AssociatedRoleSchema, missing=[], many=True)
|
||||
dns_provider = fields.Nested(
|
||||
AssociatedDnsProviderSchema, missing=None, allow_none=True, required=False
|
||||
)
|
||||
|
||||
csr = fields.String(allow_none=True, validate=validators.csr)
|
||||
|
||||
key_type = fields.String(
|
||||
validate=validate.OneOf(CERTIFICATE_KEY_TYPES), missing="RSA2048"
|
||||
)
|
||||
|
||||
notify = fields.Boolean(default=True)
|
||||
rotation = fields.Boolean()
|
||||
rotation_policy = fields.Nested(
|
||||
AssociatedRotationPolicySchema,
|
||||
missing={"name": "default"},
|
||||
allow_none=True,
|
||||
default={"name": "default"},
|
||||
)
|
||||
|
||||
# certificate body fields
|
||||
organizational_unit = fields.String(
|
||||
missing=lambda: current_app.config.get("LEMUR_DEFAULT_ORGANIZATIONAL_UNIT")
|
||||
)
|
||||
organization = fields.String(
|
||||
missing=lambda: current_app.config.get("LEMUR_DEFAULT_ORGANIZATION")
|
||||
)
|
||||
location = fields.String()
|
||||
country = fields.String(
|
||||
missing=lambda: current_app.config.get("LEMUR_DEFAULT_COUNTRY")
|
||||
)
|
||||
state = fields.String(missing=lambda: current_app.config.get("LEMUR_DEFAULT_STATE"))
|
||||
|
||||
extensions = fields.Nested(ExtensionSchema)
|
||||
|
||||
@validates_schema
|
||||
def validate_authority(self, data):
|
||||
if 'authority' not in data:
|
||||
raise ValidationError("Missing Authority.")
|
||||
|
||||
if isinstance(data["authority"], str):
|
||||
raise ValidationError("Authority not found.")
|
||||
|
||||
if not data["authority"].active:
|
||||
raise ValidationError("The authority is inactive.", ["authority"])
|
||||
|
||||
@validates_schema
|
||||
def validate_dates(self, data):
|
||||
validators.dates(data)
|
||||
|
||||
@pre_load
|
||||
def load_data(self, data):
|
||||
if data.get("replacements"):
|
||||
data["replaces"] = data[
|
||||
"replacements"
|
||||
] # TODO remove when field is deprecated
|
||||
if data.get("csr"):
|
||||
csr_sans = cert_utils.get_sans_from_csr(data["csr"])
|
||||
if not data.get("extensions"):
|
||||
data["extensions"] = {"subAltNames": {"names": []}}
|
||||
elif not data["extensions"].get("subAltNames"):
|
||||
data["extensions"]["subAltNames"] = {"names": []}
|
||||
elif not data["extensions"]["subAltNames"].get("names"):
|
||||
data["extensions"]["subAltNames"]["names"] = []
|
||||
|
||||
data["extensions"]["subAltNames"]["names"] = csr_sans
|
||||
|
||||
common_name = cert_utils.get_cn_from_csr(data["csr"])
|
||||
if common_name:
|
||||
data["common_name"] = common_name
|
||||
key_type = cert_utils.get_key_type_from_csr(data["csr"])
|
||||
if key_type:
|
||||
data["key_type"] = key_type
|
||||
|
||||
# This code will be exercised for certificate import (without CSR)
|
||||
if data.get("key_type") is None:
|
||||
if data.get("body"):
|
||||
data["key_type"] = utils.get_key_type_from_certificate(data["body"])
|
||||
else:
|
||||
data["key_type"] = "RSA2048" # default value
|
||||
|
||||
return missing.convert_validity_years(data)
|
||||
|
||||
|
||||
class CertificateEditInputSchema(CertificateSchema):
|
||||
owner = fields.String()
|
||||
|
||||
notify = fields.Boolean()
|
||||
rotation = fields.Boolean()
|
||||
|
||||
destinations = fields.Nested(AssociatedDestinationSchema, missing=[], many=True)
|
||||
notifications = fields.Nested(AssociatedNotificationSchema, missing=[], many=True)
|
||||
replaces = fields.Nested(AssociatedCertificateSchema, missing=[], many=True)
|
||||
replacements = fields.Nested(
|
||||
AssociatedCertificateSchema, missing=[], many=True
|
||||
) # deprecated
|
||||
roles = fields.Nested(AssociatedRoleSchema, missing=[], many=True)
|
||||
|
||||
@pre_load
|
||||
def load_data(self, data):
|
||||
if data.get("replacements"):
|
||||
data["replaces"] = data[
|
||||
"replacements"
|
||||
] # TODO remove when field is deprecated
|
||||
|
||||
if data.get("owner"):
|
||||
# Check if role already exists. This avoids adding duplicate role.
|
||||
if data.get("roles") and any(r.get("name") == data["owner"] for r in data["roles"]):
|
||||
return data
|
||||
|
||||
# Add required role
|
||||
owner_role = roles_service.get_or_create(
|
||||
data["owner"],
|
||||
description=f"Auto generated role based on owner: {data['owner']}"
|
||||
)
|
||||
|
||||
# Put role info in correct format using RoleNestedOutputSchema
|
||||
owner_role_dict = RoleNestedOutputSchema().dump(owner_role).data
|
||||
if data.get("roles"):
|
||||
data["roles"].append(owner_role_dict)
|
||||
else:
|
||||
data["roles"] = [owner_role_dict]
|
||||
|
||||
return data
|
||||
|
||||
@post_load
|
||||
def enforce_notifications(self, data):
|
||||
"""
|
||||
Add default notification for current owner if none exist.
|
||||
This ensures that the default notifications are added in the event of owner change.
|
||||
Old owner notifications are retained unless explicitly removed later in the code path.
|
||||
:param data:
|
||||
:return:
|
||||
"""
|
||||
if data.get("owner"):
|
||||
notification_name = "DEFAULT_{0}".format(
|
||||
data["owner"].split("@")[0].upper()
|
||||
)
|
||||
|
||||
# Even if one default role exists, return
|
||||
# This allows a User to remove unwanted default notification for current owner
|
||||
if any(n.label.startswith(notification_name) for n in data["notifications"]):
|
||||
return data
|
||||
|
||||
data[
|
||||
"notifications"
|
||||
] += notification_service.create_default_expiration_notifications(
|
||||
notification_name, [data["owner"]]
|
||||
)
|
||||
|
||||
return data
|
||||
|
||||
|
||||
class CertificateNestedOutputSchema(LemurOutputSchema):
|
||||
__envelope__ = False
|
||||
id = fields.Integer()
|
||||
name = fields.String()
|
||||
owner = fields.Email()
|
||||
creator = fields.Nested(UserNestedOutputSchema)
|
||||
description = fields.String()
|
||||
|
||||
status = fields.String()
|
||||
|
||||
bits = fields.Integer()
|
||||
body = fields.String()
|
||||
chain = fields.String()
|
||||
csr = fields.String()
|
||||
active = fields.Boolean()
|
||||
|
||||
rotation = fields.Boolean()
|
||||
notify = fields.Boolean()
|
||||
rotation_policy = fields.Nested(RotationPolicyNestedOutputSchema)
|
||||
|
||||
# Note aliasing is the first step in deprecating these fields.
|
||||
cn = fields.String() # deprecated
|
||||
common_name = fields.String(attribute="cn")
|
||||
|
||||
not_after = fields.DateTime() # deprecated
|
||||
validity_end = ArrowDateTime(attribute="not_after")
|
||||
|
||||
not_before = fields.DateTime() # deprecated
|
||||
validity_start = ArrowDateTime(attribute="not_before")
|
||||
|
||||
issuer = fields.Nested(AuthorityNestedOutputSchema)
|
||||
|
||||
|
||||
class CertificateCloneSchema(LemurOutputSchema):
|
||||
__envelope__ = False
|
||||
description = fields.String()
|
||||
common_name = fields.String()
|
||||
|
||||
|
||||
class CertificateOutputSchema(LemurOutputSchema):
|
||||
id = fields.Integer()
|
||||
external_id = fields.String()
|
||||
bits = fields.Integer()
|
||||
body = fields.String()
|
||||
chain = fields.String()
|
||||
csr = fields.String()
|
||||
deleted = fields.Boolean(default=False)
|
||||
description = fields.String()
|
||||
issuer = fields.String()
|
||||
name = fields.String()
|
||||
dns_provider_id = fields.Integer(required=False, allow_none=True)
|
||||
date_created = ArrowDateTime()
|
||||
resolved = fields.Boolean(required=False, allow_none=True)
|
||||
resolved_cert_id = fields.Integer(required=False, allow_none=True)
|
||||
|
||||
rotation = fields.Boolean()
|
||||
|
||||
# Note aliasing is the first step in deprecating these fields.
|
||||
notify = fields.Boolean()
|
||||
active = fields.Boolean(attribute="notify")
|
||||
has_private_key = fields.Boolean()
|
||||
|
||||
cn = fields.String()
|
||||
common_name = fields.String(attribute="cn")
|
||||
distinguished_name = fields.String()
|
||||
|
||||
not_after = fields.DateTime()
|
||||
validity_end = ArrowDateTime(attribute="not_after")
|
||||
|
||||
not_before = fields.DateTime()
|
||||
validity_start = ArrowDateTime(attribute="not_before")
|
||||
|
||||
owner = fields.Email()
|
||||
san = fields.Boolean()
|
||||
serial = fields.String()
|
||||
serial_hex = Hex(attribute="serial")
|
||||
signing_algorithm = fields.String()
|
||||
key_type = fields.String(allow_none=True)
|
||||
|
||||
status = fields.String()
|
||||
user = fields.Nested(UserNestedOutputSchema)
|
||||
|
||||
extensions = fields.Nested(ExtensionSchema)
|
||||
|
||||
# associated objects
|
||||
domains = fields.Nested(DomainNestedOutputSchema, many=True)
|
||||
destinations = fields.Nested(DestinationNestedOutputSchema, many=True)
|
||||
notifications = fields.Nested(NotificationNestedOutputSchema, many=True)
|
||||
replaces = fields.Nested(CertificateNestedOutputSchema, many=True)
|
||||
authority = fields.Nested(AuthorityNestedOutputSchema)
|
||||
dns_provider = fields.Nested(DnsProvidersNestedOutputSchema)
|
||||
roles = fields.Nested(RoleNestedOutputSchema, many=True)
|
||||
endpoints = fields.Nested(EndpointNestedOutputSchema, many=True, missing=[])
|
||||
replaced_by = fields.Nested(
|
||||
CertificateNestedOutputSchema, many=True, attribute="replaced"
|
||||
)
|
||||
rotation_policy = fields.Nested(RotationPolicyNestedOutputSchema)
|
||||
|
||||
country = fields.String()
|
||||
location = fields.String()
|
||||
state = fields.String()
|
||||
organization = fields.String()
|
||||
organizational_unit = fields.String()
|
||||
|
||||
@post_dump
|
||||
def handle_subject_details(self, data):
|
||||
# Remove subject details if authority is CA/Browser Forum compliant. The code will use default set of values in that case.
|
||||
# If CA/Browser Forum compliance of an authority is unknown (None), it is safe to fallback to default values. Thus below
|
||||
# condition checks for 'not False' ==> 'True or None'
|
||||
if data.get("authority"):
|
||||
is_cab_compliant = data.get("authority").get("isCabCompliant")
|
||||
|
||||
if is_cab_compliant is not False:
|
||||
data.pop("country", None)
|
||||
data.pop("state", None)
|
||||
data.pop("location", None)
|
||||
data.pop("organization", None)
|
||||
data.pop("organizational_unit", None)
|
||||
|
||||
|
||||
class CertificateShortOutputSchema(LemurOutputSchema):
|
||||
id = fields.Integer()
|
||||
name = fields.String()
|
||||
owner = fields.Email()
|
||||
notify = fields.Boolean()
|
||||
authority = fields.Nested(AuthorityNestedOutputSchema)
|
||||
issuer = fields.String()
|
||||
cn = fields.String()
|
||||
|
||||
|
||||
class CertificateUploadInputSchema(CertificateCreationSchema):
|
||||
name = fields.String()
|
||||
authority = fields.Nested(AssociatedAuthoritySchema, required=False)
|
||||
notify = fields.Boolean(missing=True)
|
||||
external_id = fields.String(missing=None, allow_none=True)
|
||||
private_key = fields.String()
|
||||
body = fields.String(required=True)
|
||||
chain = fields.String(missing=None, allow_none=True)
|
||||
csr = fields.String(required=False, allow_none=True, validate=validators.csr)
|
||||
key_type = fields.String()
|
||||
|
||||
destinations = fields.Nested(AssociatedDestinationSchema, missing=[], many=True)
|
||||
notifications = fields.Nested(AssociatedNotificationSchema, missing=[], many=True)
|
||||
replaces = fields.Nested(AssociatedCertificateSchema, missing=[], many=True)
|
||||
roles = fields.Nested(AssociatedRoleSchema, missing=[], many=True)
|
||||
|
||||
@validates_schema
|
||||
def keys(self, data):
|
||||
if data.get("destinations"):
|
||||
if not data.get("private_key"):
|
||||
raise ValidationError("Destinations require private key.")
|
||||
|
||||
@validates_schema
|
||||
def validate_cert_private_key_chain(self, data):
|
||||
cert = None
|
||||
key = None
|
||||
if data.get("body"):
|
||||
try:
|
||||
cert = utils.parse_certificate(data["body"])
|
||||
except ValueError:
|
||||
raise ValidationError(
|
||||
"Public certificate presented is not valid.", field_names=["body"]
|
||||
)
|
||||
|
||||
if data.get("private_key"):
|
||||
try:
|
||||
key = utils.parse_private_key(data["private_key"])
|
||||
except ValueError:
|
||||
raise ValidationError(
|
||||
"Private key presented is not valid.", field_names=["private_key"]
|
||||
)
|
||||
|
||||
if cert and key:
|
||||
# Throws ValidationError
|
||||
validators.verify_private_key_match(key, cert)
|
||||
|
||||
if data.get("chain"):
|
||||
try:
|
||||
chain = utils.parse_cert_chain(data["chain"])
|
||||
except ValueError:
|
||||
raise ValidationError(
|
||||
"Invalid certificate in certificate chain.", field_names=["chain"]
|
||||
)
|
||||
|
||||
# Throws ValidationError
|
||||
validators.verify_cert_chain([cert] + chain)
|
||||
|
||||
@pre_load
|
||||
def load_data(self, data):
|
||||
if data.get("body"):
|
||||
try:
|
||||
data["key_type"] = utils.get_key_type_from_certificate(data["body"])
|
||||
except ValueError:
|
||||
raise ValidationError(
|
||||
"Public certificate presented is not valid.", field_names=["body"]
|
||||
)
|
||||
|
||||
|
||||
class CertificateExportInputSchema(LemurInputSchema):
|
||||
plugin = fields.Nested(PluginInputSchema)
|
||||
|
||||
|
||||
class CertificateNotificationOutputSchema(LemurOutputSchema):
|
||||
description = fields.String()
|
||||
issuer = fields.String()
|
||||
name = fields.String()
|
||||
owner = fields.Email()
|
||||
user = fields.Nested(UserNestedOutputSchema)
|
||||
validity_end = ArrowDateTime(attribute="not_after")
|
||||
replaced_by = fields.Nested(
|
||||
CertificateNestedOutputSchema, many=True, attribute="replaced"
|
||||
)
|
||||
endpoints = fields.Nested(EndpointNestedOutputSchema, many=True, missing=[])
|
||||
|
||||
|
||||
class CertificateRevokeSchema(LemurInputSchema):
|
||||
comments = fields.String()
|
||||
|
||||
|
||||
certificates_list_request_parser = RequestParser()
|
||||
certificates_list_request_parser.add_argument("short", type=inputs.boolean, default=False, location="args")
|
||||
|
||||
|
||||
def certificates_list_output_schema_factory():
|
||||
args = certificates_list_request_parser.parse_args()
|
||||
if args["short"]:
|
||||
return certificates_short_output_schema
|
||||
else:
|
||||
return certificates_output_schema
|
||||
|
||||
|
||||
certificate_input_schema = CertificateInputSchema()
|
||||
certificate_output_schema = CertificateOutputSchema()
|
||||
certificates_output_schema = CertificateOutputSchema(many=True)
|
||||
certificates_short_output_schema = CertificateShortOutputSchema(many=True)
|
||||
certificate_upload_input_schema = CertificateUploadInputSchema()
|
||||
certificate_export_input_schema = CertificateExportInputSchema()
|
||||
certificate_edit_input_schema = CertificateEditInputSchema()
|
||||
certificate_notification_output_schema = CertificateNotificationOutputSchema()
|
||||
certificate_revoke_schema = CertificateRevokeSchema()
|
||||
@ -1,34 +1,45 @@
|
||||
"""
|
||||
.. module: service
|
||||
.. module: lemur.certificate.service
|
||||
:platform: Unix
|
||||
:copyright: (c) 2015 by Netflix Inc., see AUTHORS for more
|
||||
:copyright: (c) 2018 by Netflix Inc., see AUTHORS for more
|
||||
:license: Apache, see LICENSE for more details.
|
||||
.. moduleauthor:: Kevin Glisson <kglisson@netflix.com>
|
||||
"""
|
||||
import arrow
|
||||
|
||||
from sqlalchemy import func, or_
|
||||
from flask import g, current_app
|
||||
|
||||
from lemur import database
|
||||
from lemur.plugins.base import plugins
|
||||
from lemur.certificates.models import Certificate
|
||||
|
||||
from lemur.destinations.models import Destination
|
||||
from lemur.notifications.models import Notification
|
||||
from lemur.authorities.models import Authority
|
||||
|
||||
from lemur.roles.models import Role
|
||||
|
||||
from cryptography import x509
|
||||
from cryptography.hazmat.backends import default_backend
|
||||
from cryptography.hazmat.primitives import hashes, serialization
|
||||
from cryptography.hazmat.primitives.asymmetric import rsa
|
||||
from flask import current_app
|
||||
from sqlalchemy import func, or_, not_, cast, Integer
|
||||
|
||||
from lemur import database
|
||||
from lemur.authorities.models import Authority
|
||||
from lemur.certificates.models import Certificate
|
||||
from lemur.certificates.schemas import CertificateOutputSchema, CertificateInputSchema
|
||||
from lemur.common.utils import generate_private_key, truthiness
|
||||
from lemur.destinations.models import Destination
|
||||
from lemur.domains.models import Domain
|
||||
from lemur.extensions import metrics, sentry, signals
|
||||
from lemur.models import certificate_associations
|
||||
from lemur.notifications.models import Notification
|
||||
from lemur.pending_certificates.models import PendingCertificate
|
||||
from lemur.plugins.base import plugins
|
||||
from lemur.roles import service as role_service
|
||||
from lemur.roles.models import Role
|
||||
|
||||
csr_created = signals.signal("csr_created", "CSR generated")
|
||||
csr_imported = signals.signal("csr_imported", "CSR imported from external source")
|
||||
certificate_issued = signals.signal(
|
||||
"certificate_issued", "Authority issued a certificate"
|
||||
)
|
||||
certificate_imported = signals.signal(
|
||||
"certificate_imported", "Certificate imported from external source"
|
||||
)
|
||||
|
||||
|
||||
def get(cert_id):
|
||||
"""
|
||||
Retrieves certificate by it's ID.
|
||||
Retrieves certificate by its ID.
|
||||
|
||||
:param cert_id:
|
||||
:return:
|
||||
@ -38,12 +49,40 @@ def get(cert_id):
|
||||
|
||||
def get_by_name(name):
|
||||
"""
|
||||
Retrieves certificate by it's Name.
|
||||
Retrieves certificate by its Name.
|
||||
|
||||
:param name:
|
||||
:return:
|
||||
"""
|
||||
return database.get(Certificate, name, field='name')
|
||||
return database.get(Certificate, name, field="name")
|
||||
|
||||
|
||||
def get_by_serial(serial):
|
||||
"""
|
||||
Retrieves certificate(s) by serial number.
|
||||
:param serial:
|
||||
:return:
|
||||
"""
|
||||
if isinstance(serial, int):
|
||||
# although serial is a number, the DB column is String(128)
|
||||
serial = str(serial)
|
||||
return Certificate.query.filter(Certificate.serial == serial).all()
|
||||
|
||||
|
||||
def get_by_attributes(conditions):
|
||||
"""
|
||||
Retrieves certificate(s) by conditions given in a hash of given key=>value pairs.
|
||||
:param serial:
|
||||
:return:
|
||||
"""
|
||||
# Ensure that each of the given conditions corresponds to actual columns
|
||||
# if not, silently remove it
|
||||
for attr in conditions.keys():
|
||||
if attr not in Certificate.__table__.columns:
|
||||
conditions.pop(attr)
|
||||
|
||||
query = database.session_query(Certificate)
|
||||
return database.find_all(query, Certificate, conditions).all()
|
||||
|
||||
|
||||
def delete(cert_id):
|
||||
@ -64,74 +103,212 @@ def get_all_certs():
|
||||
return Certificate.query.all()
|
||||
|
||||
|
||||
def find_duplicates(cert_body):
|
||||
def get_all_valid_certs(authority_plugin_name):
|
||||
"""
|
||||
Retrieves all valid (not expired) certificates within Lemur, for the given authority plugin names
|
||||
ignored if no authority_plugin_name provided.
|
||||
|
||||
Note that depending on the DB size retrieving all certificates might an expensive operation
|
||||
|
||||
:return:
|
||||
"""
|
||||
if authority_plugin_name:
|
||||
return (
|
||||
Certificate.query.outerjoin(Authority, Authority.id == Certificate.authority_id).filter(
|
||||
Certificate.not_after > arrow.now().format("YYYY-MM-DD")).filter(
|
||||
Authority.plugin_name.in_(authority_plugin_name)).all()
|
||||
)
|
||||
else:
|
||||
return (
|
||||
Certificate.query.filter(Certificate.not_after > arrow.now().format("YYYY-MM-DD")).all()
|
||||
)
|
||||
|
||||
|
||||
def get_all_pending_cleaning_expired(source):
|
||||
"""
|
||||
Retrieves all certificates that are available for cleaning. These are certificates which are expired and are not
|
||||
attached to any endpoints.
|
||||
|
||||
:param source: the source to search for certificates
|
||||
:return: list of pending certificates
|
||||
"""
|
||||
return (
|
||||
Certificate.query.filter(Certificate.sources.any(id=source.id))
|
||||
.filter(not_(Certificate.endpoints.any()))
|
||||
.filter(Certificate.expired)
|
||||
.all()
|
||||
)
|
||||
|
||||
|
||||
def get_all_certs_attached_to_endpoint_without_autorotate():
|
||||
"""
|
||||
Retrieves all certificates that are attached to an endpoint, but that do not have autorotate enabled.
|
||||
|
||||
:return: list of certificates attached to an endpoint without autorotate
|
||||
"""
|
||||
return (
|
||||
Certificate.query.filter(Certificate.endpoints.any())
|
||||
.filter(Certificate.rotation == False)
|
||||
.filter(Certificate.not_after >= arrow.now())
|
||||
.filter(not_(Certificate.replaced.any()))
|
||||
.all() # noqa
|
||||
)
|
||||
|
||||
|
||||
def get_all_pending_cleaning_expiring_in_days(source, days_to_expire):
|
||||
"""
|
||||
Retrieves all certificates that are available for cleaning, not attached to endpoint,
|
||||
and within X days from expiration.
|
||||
|
||||
:param days_to_expire: defines how many days till the certificate is expired
|
||||
:param source: the source to search for certificates
|
||||
:return: list of pending certificates
|
||||
"""
|
||||
expiration_window = arrow.now().shift(days=+days_to_expire).format("YYYY-MM-DD")
|
||||
return (
|
||||
Certificate.query.filter(Certificate.sources.any(id=source.id))
|
||||
.filter(not_(Certificate.endpoints.any()))
|
||||
.filter(Certificate.not_after < expiration_window)
|
||||
.all()
|
||||
)
|
||||
|
||||
|
||||
def get_all_pending_cleaning_issued_since_days(source, days_since_issuance):
|
||||
"""
|
||||
Retrieves all certificates that are available for cleaning: not attached to endpoint, and X days since issuance.
|
||||
|
||||
:param days_since_issuance: defines how many days since the certificate is issued
|
||||
:param source: the source to search for certificates
|
||||
:return: list of pending certificates
|
||||
"""
|
||||
not_in_use_window = (
|
||||
arrow.now().shift(days=-days_since_issuance).format("YYYY-MM-DD")
|
||||
)
|
||||
return (
|
||||
Certificate.query.filter(Certificate.sources.any(id=source.id))
|
||||
.filter(not_(Certificate.endpoints.any()))
|
||||
.filter(Certificate.date_created > not_in_use_window)
|
||||
.all()
|
||||
)
|
||||
|
||||
|
||||
def get_all_pending_reissue():
|
||||
"""
|
||||
Retrieves all certificates that need to be rotated.
|
||||
|
||||
Must be X days from expiration, uses the certificates rotation
|
||||
policy to determine how many days from expiration the certificate must be
|
||||
for rotation to be pending.
|
||||
|
||||
:return:
|
||||
"""
|
||||
return (
|
||||
Certificate.query.filter(Certificate.rotation == True)
|
||||
.filter(not_(Certificate.replaced.any()))
|
||||
.filter(Certificate.in_rotation_window == True)
|
||||
.all()
|
||||
) # noqa
|
||||
|
||||
|
||||
def find_duplicates(cert):
|
||||
"""
|
||||
Finds certificates that already exist within Lemur. We do this by looking for
|
||||
certificate bodies that are the same. This is the most reliable way to determine
|
||||
if a certificate is already being tracked by Lemur.
|
||||
|
||||
:param cert_body:
|
||||
:param cert:
|
||||
:return:
|
||||
"""
|
||||
return Certificate.query.filter_by(body=cert_body).all()
|
||||
if cert["chain"]:
|
||||
return Certificate.query.filter_by(
|
||||
body=cert["body"].strip(), chain=cert["chain"].strip()
|
||||
).all()
|
||||
else:
|
||||
return Certificate.query.filter_by(body=cert["body"].strip(), chain=None).all()
|
||||
|
||||
|
||||
def update(cert_id, owner, description, active, destinations, notifications):
|
||||
def export(cert, export_plugin):
|
||||
"""
|
||||
Updates a certificate.
|
||||
Exports a certificate to the requested format. This format
|
||||
may be a binary format.
|
||||
|
||||
:param export_plugin:
|
||||
:param cert:
|
||||
:return:
|
||||
"""
|
||||
plugin = plugins.get(export_plugin["slug"])
|
||||
return plugin.export(
|
||||
cert.body, cert.chain, cert.private_key, export_plugin["pluginOptions"]
|
||||
)
|
||||
|
||||
|
||||
def update(cert_id, **kwargs):
|
||||
"""
|
||||
Updates a certificate
|
||||
:param cert_id:
|
||||
:param owner:
|
||||
:param active:
|
||||
:return:
|
||||
"""
|
||||
from lemur.notifications import service as notification_service
|
||||
cert = get(cert_id)
|
||||
cert.active = active
|
||||
cert.description = description
|
||||
|
||||
# we might have to create new notifications if the owner changes
|
||||
new_notifications = []
|
||||
# get existing names to remove
|
||||
notification_name = "DEFAULT_{0}".format(cert.owner.split('@')[0].upper())
|
||||
for n in notifications:
|
||||
if notification_name not in n.label:
|
||||
new_notifications.append(n)
|
||||
|
||||
notification_name = "DEFAULT_{0}".format(owner.split('@')[0].upper())
|
||||
new_notifications += notification_service.create_default_expiration_notifications(notification_name, owner)
|
||||
|
||||
cert.notifications = new_notifications
|
||||
|
||||
database.update_list(cert, 'destinations', Destination, destinations)
|
||||
|
||||
cert.owner = owner
|
||||
for key, value in kwargs.items():
|
||||
setattr(cert, key, value)
|
||||
|
||||
return database.update(cert)
|
||||
|
||||
|
||||
def mint(issuer_options):
|
||||
def cleanup_owner_roles_notification(owner_name, kwargs):
|
||||
kwargs["roles"] = [r for r in kwargs["roles"] if r.name != owner_name]
|
||||
notification_prefix = f"DEFAULT_{owner_name.split('@')[0].upper()}"
|
||||
kwargs["notifications"] = [n for n in kwargs["notifications"] if not n.label.startswith(notification_prefix)]
|
||||
|
||||
|
||||
def update_notify(cert, notify_flag):
|
||||
"""
|
||||
Toggle notification value which is a boolean
|
||||
:param notify_flag: new notify value
|
||||
:param cert: Certificate object to be updated
|
||||
:return:
|
||||
"""
|
||||
cert.notify = notify_flag
|
||||
return database.update(cert)
|
||||
|
||||
|
||||
def create_certificate_roles(**kwargs):
|
||||
# create a role for the owner and assign it
|
||||
owner_role = role_service.get_or_create(
|
||||
kwargs["owner"],
|
||||
description=f"Auto generated role based on owner: {kwargs['owner']}"
|
||||
)
|
||||
|
||||
# ensure that the authority's owner is also associated with the certificate
|
||||
if kwargs.get("authority"):
|
||||
authority_owner_role = role_service.get_by_name(kwargs["authority"].owner)
|
||||
return [owner_role, authority_owner_role]
|
||||
|
||||
return [owner_role]
|
||||
|
||||
|
||||
def mint(**kwargs):
|
||||
"""
|
||||
Minting is slightly different for each authority.
|
||||
Support for multiple authorities is handled by individual plugins.
|
||||
|
||||
:param issuer_options:
|
||||
"""
|
||||
authority = issuer_options['authority']
|
||||
authority = kwargs["authority"]
|
||||
|
||||
issuer = plugins.get(authority.plugin_name)
|
||||
|
||||
csr, private_key = create_csr(issuer_options)
|
||||
# allow the CSR to be specified by the user
|
||||
if not kwargs.get("csr"):
|
||||
csr, private_key = create_csr(**kwargs)
|
||||
csr_created.send(authority=authority, csr=csr)
|
||||
else:
|
||||
csr = str(kwargs.get("csr"))
|
||||
private_key = None
|
||||
csr_imported.send(authority=authority, csr=csr)
|
||||
|
||||
issuer_options['creator'] = g.user.email
|
||||
cert_body, cert_chain = issuer.create_certificate(csr, issuer_options)
|
||||
|
||||
cert = Certificate(cert_body, private_key, cert_chain)
|
||||
|
||||
cert.user = g.user
|
||||
cert.authority = authority
|
||||
database.update(cert)
|
||||
return cert, private_key, cert_chain,
|
||||
cert_body, cert_chain, external_id = issuer.create_certificate(csr, kwargs)
|
||||
return cert_body, private_key, cert_chain, external_id, csr
|
||||
|
||||
|
||||
def import_certificate(**kwargs):
|
||||
@ -147,67 +324,31 @@ def import_certificate(**kwargs):
|
||||
|
||||
:param kwargs:
|
||||
"""
|
||||
from lemur.users import service as user_service
|
||||
from lemur.notifications import service as notification_service
|
||||
cert = Certificate(kwargs['public_certificate'], chain=kwargs['intermediate_certificate'])
|
||||
if not kwargs.get("owner"):
|
||||
kwargs["owner"] = current_app.config.get("LEMUR_SECURITY_TEAM_EMAIL")[0]
|
||||
|
||||
# TODO future source plugins might have a better understanding of who the 'owner' is we should support this
|
||||
cert.owner = kwargs.get('owner', current_app.config.get('LEMUR_SECURITY_TEAM_EMAIL')[0])
|
||||
cert.creator = kwargs.get('creator', user_service.get_by_email('lemur@nobody'))
|
||||
|
||||
# NOTE existing certs may not follow our naming standard we will
|
||||
# overwrite the generated name with the actual cert name
|
||||
if kwargs.get('name'):
|
||||
cert.name = kwargs.get('name')
|
||||
|
||||
if kwargs.get('user'):
|
||||
cert.user = kwargs.get('user')
|
||||
|
||||
notification_name = 'DEFAULT_SECURITY'
|
||||
notifications = notification_service.create_default_expiration_notifications(notification_name, current_app.config.get('LEMUR_SECURITY_TEAM_EMAIL'))
|
||||
cert.notifications = notifications
|
||||
|
||||
cert = database.create(cert)
|
||||
return cert
|
||||
return upload(**kwargs)
|
||||
|
||||
|
||||
def upload(**kwargs):
|
||||
"""
|
||||
Allows for pre-made certificates to be imported into Lemur.
|
||||
"""
|
||||
from lemur.notifications import service as notification_service
|
||||
cert = Certificate(
|
||||
kwargs.get('public_cert'),
|
||||
kwargs.get('private_key'),
|
||||
kwargs.get('intermediate_cert'),
|
||||
)
|
||||
roles = create_certificate_roles(**kwargs)
|
||||
|
||||
# we override the generated name if one is provided
|
||||
if kwargs.get('name'):
|
||||
cert.name = kwargs['name']
|
||||
if kwargs.get("roles"):
|
||||
kwargs["roles"] += roles
|
||||
else:
|
||||
kwargs["roles"] = roles
|
||||
|
||||
cert.description = kwargs.get('description')
|
||||
|
||||
cert.owner = kwargs['owner']
|
||||
cert = Certificate(**kwargs)
|
||||
cert.authority = kwargs.get("authority")
|
||||
cert = database.create(cert)
|
||||
|
||||
g.user.certificates.append(cert)
|
||||
kwargs["creator"].certificates.append(cert)
|
||||
|
||||
database.update_list(cert, 'destinations', Destination, kwargs.get('destinations'))
|
||||
|
||||
database.update_list(cert, 'notifications', Notification, kwargs.get('notifications'))
|
||||
|
||||
# create default notifications for this certificate if none are provided
|
||||
notifications = []
|
||||
if not kwargs.get('notifications'):
|
||||
notification_name = "DEFAULT_{0}".format(cert.owner.split('@')[0].upper())
|
||||
notifications += notification_service.create_default_expiration_notifications(notification_name, [cert.owner])
|
||||
|
||||
notification_name = 'DEFAULT_SECURITY'
|
||||
notifications += notification_service.create_default_expiration_notifications(notification_name, current_app.config.get('LEMUR_SECURITY_TEAM_EMAIL'))
|
||||
cert.notifications = notifications
|
||||
|
||||
database.update(cert)
|
||||
cert = database.update(cert)
|
||||
certificate_imported.send(certificate=cert, authority=cert.authority)
|
||||
return cert
|
||||
|
||||
|
||||
@ -215,34 +356,54 @@ def create(**kwargs):
|
||||
"""
|
||||
Creates a new certificate.
|
||||
"""
|
||||
from lemur.notifications import service as notification_service
|
||||
cert, private_key, cert_chain = mint(kwargs)
|
||||
try:
|
||||
cert_body, private_key, cert_chain, external_id, csr = mint(**kwargs)
|
||||
except Exception:
|
||||
current_app.logger.error("Exception minting certificate", exc_info=True)
|
||||
sentry.captureException()
|
||||
raise
|
||||
kwargs["body"] = cert_body
|
||||
kwargs["private_key"] = private_key
|
||||
kwargs["chain"] = cert_chain
|
||||
kwargs["external_id"] = external_id
|
||||
kwargs["csr"] = csr
|
||||
|
||||
cert.owner = kwargs['owner']
|
||||
roles = create_certificate_roles(**kwargs)
|
||||
|
||||
database.create(cert)
|
||||
cert.description = kwargs['description']
|
||||
g.user.certificates.append(cert)
|
||||
database.update(g.user)
|
||||
if kwargs.get("roles"):
|
||||
kwargs["roles"] += roles
|
||||
else:
|
||||
kwargs["roles"] = roles
|
||||
|
||||
# do this after the certificate has already been created because if it fails to upload to the third party
|
||||
# we do not want to lose the certificate information.
|
||||
database.update_list(cert, 'destinations', Destination, kwargs.get('destinations'))
|
||||
if cert_body:
|
||||
cert = Certificate(**kwargs)
|
||||
kwargs["creator"].certificates.append(cert)
|
||||
else:
|
||||
cert = PendingCertificate(**kwargs)
|
||||
kwargs["creator"].pending_certificates.append(cert)
|
||||
|
||||
database.update_list(cert, 'notifications', Notification, kwargs.get('notifications'))
|
||||
cert.authority = kwargs["authority"]
|
||||
|
||||
# create default notifications for this certificate if none are provided
|
||||
notifications = cert.notifications
|
||||
if not kwargs.get('notifications'):
|
||||
notification_name = "DEFAULT_{0}".format(cert.owner.split('@')[0].upper())
|
||||
notifications += notification_service.create_default_expiration_notifications(notification_name, [cert.owner])
|
||||
database.commit()
|
||||
|
||||
notification_name = 'DEFAULT_SECURITY'
|
||||
notifications += notification_service.create_default_expiration_notifications(notification_name,
|
||||
current_app.config.get('LEMUR_SECURITY_TEAM_EMAIL'))
|
||||
cert.notifications = notifications
|
||||
if isinstance(cert, Certificate):
|
||||
certificate_issued.send(certificate=cert, authority=cert.authority)
|
||||
metrics.send(
|
||||
"certificate_issued",
|
||||
"counter",
|
||||
1,
|
||||
metric_tags=dict(owner=cert.owner, issuer=cert.issuer),
|
||||
)
|
||||
|
||||
if isinstance(cert, PendingCertificate):
|
||||
# We need to refresh the pending certificate to avoid "Instance is not bound to a Session; "
|
||||
# "attribute refresh operation cannot proceed"
|
||||
pending_cert = database.session_query(PendingCertificate).get(cert.id)
|
||||
from lemur.common.celery import fetch_acme_cert
|
||||
|
||||
if not current_app.config.get("ACME_DISABLE_AUTORESOLVE", False):
|
||||
fetch_acme_cert.apply_async((pending_cert.id,), countdown=5)
|
||||
|
||||
database.update(cert)
|
||||
return cert
|
||||
|
||||
|
||||
@ -255,161 +416,240 @@ def render(args):
|
||||
"""
|
||||
query = database.session_query(Certificate)
|
||||
|
||||
time_range = args.pop('time_range')
|
||||
destination_id = args.pop('destination_id')
|
||||
notification_id = args.pop('notification_id', None)
|
||||
show = args.pop('show')
|
||||
show_expired = args.pop("showExpired")
|
||||
if show_expired != 1:
|
||||
one_month_old = (
|
||||
arrow.now()
|
||||
.shift(months=current_app.config.get("HIDE_EXPIRED_CERTS_AFTER_MONTHS", -1))
|
||||
.format("YYYY-MM-DD")
|
||||
)
|
||||
query = query.filter(Certificate.not_after > one_month_old)
|
||||
|
||||
time_range = args.pop("time_range")
|
||||
|
||||
destination_id = args.pop("destination_id")
|
||||
notification_id = args.pop("notification_id", None)
|
||||
show = args.pop("show")
|
||||
# owner = args.pop('owner')
|
||||
# creator = args.pop('creator') # TODO we should enabling filtering by owner
|
||||
|
||||
filt = args.pop('filter')
|
||||
filt = args.pop("filter")
|
||||
|
||||
if filt:
|
||||
terms = filt.split(';')
|
||||
if 'issuer' in terms:
|
||||
terms = filt.split(";")
|
||||
term = "%{0}%".format(terms[1])
|
||||
# Exact matches for quotes. Only applies to name, issuer, and cn
|
||||
if terms[1].startswith('"') and terms[1].endswith('"'):
|
||||
term = terms[1][1:-1]
|
||||
|
||||
if "issuer" in terms:
|
||||
# we can't rely on issuer being correct in the cert directly so we combine queries
|
||||
sub_query = database.session_query(Authority.id)\
|
||||
.filter(Authority.name.ilike('%{0}%'.format(terms[1])))\
|
||||
sub_query = (
|
||||
database.session_query(Authority.id)
|
||||
.filter(Authority.name.ilike(term))
|
||||
.subquery()
|
||||
)
|
||||
|
||||
query = query.filter(
|
||||
or_(
|
||||
Certificate.issuer.ilike('%{0}%'.format(terms[1])),
|
||||
Certificate.authority_id.in_(sub_query)
|
||||
Certificate.issuer.ilike(term),
|
||||
Certificate.authority_id.in_(sub_query),
|
||||
)
|
||||
)
|
||||
return database.sort_and_page(query, Certificate, args)
|
||||
|
||||
if 'destination' in terms:
|
||||
query = query.filter(Certificate.destinations.any(Destination.id == terms[1]))
|
||||
elif 'active' in filt: # this is really weird but strcmp seems to not work here??
|
||||
query = query.filter(Certificate.active == terms[1])
|
||||
elif "destination" in terms:
|
||||
query = query.filter(
|
||||
Certificate.destinations.any(Destination.id == terms[1])
|
||||
)
|
||||
elif "notify" in filt:
|
||||
query = query.filter(Certificate.notify == truthiness(terms[1]))
|
||||
elif "active" in filt:
|
||||
query = query.filter(Certificate.active == truthiness(terms[1]))
|
||||
elif "cn" in terms:
|
||||
query = query.filter(
|
||||
or_(
|
||||
func.lower(Certificate.cn).like(term.lower()),
|
||||
Certificate.id.in_(like_domain_query(term)),
|
||||
)
|
||||
)
|
||||
elif "id" in terms:
|
||||
query = query.filter(Certificate.id == cast(terms[1], Integer))
|
||||
elif "name" in terms:
|
||||
query = query.filter(
|
||||
or_(
|
||||
func.lower(Certificate.name).like(term.lower()),
|
||||
Certificate.id.in_(like_domain_query(term)),
|
||||
func.lower(Certificate.cn).like(term.lower()),
|
||||
)
|
||||
)
|
||||
elif "fixedName" in terms:
|
||||
# only what matches the fixed name directly if a fixedname is provided
|
||||
query = query.filter(Certificate.name == terms[1])
|
||||
else:
|
||||
query = database.filter(query, Certificate, terms)
|
||||
|
||||
if show:
|
||||
sub_query = database.session_query(Role.name).filter(Role.user_id == g.user.id).subquery()
|
||||
sub_query = (
|
||||
database.session_query(Role.name)
|
||||
.filter(Role.user_id == args["user"].id)
|
||||
.subquery()
|
||||
)
|
||||
query = query.filter(
|
||||
or_(
|
||||
Certificate.user_id == g.user.id,
|
||||
Certificate.owner.in_(sub_query)
|
||||
Certificate.user_id == args["user"].id, Certificate.owner.in_(sub_query)
|
||||
)
|
||||
)
|
||||
|
||||
if destination_id:
|
||||
query = query.filter(Certificate.destinations.any(Destination.id == destination_id))
|
||||
query = query.filter(
|
||||
Certificate.destinations.any(Destination.id == destination_id)
|
||||
)
|
||||
|
||||
if notification_id:
|
||||
query = query.filter(Certificate.notifications.any(Notification.id == notification_id))
|
||||
query = query.filter(
|
||||
Certificate.notifications.any(Notification.id == notification_id)
|
||||
)
|
||||
|
||||
if time_range:
|
||||
to = arrow.now().replace(weeks=+time_range).format('YYYY-MM-DD')
|
||||
now = arrow.now().format('YYYY-MM-DD')
|
||||
query = query.filter(Certificate.not_after <= to).filter(Certificate.not_after >= now)
|
||||
to = arrow.now().shift(weeks=+time_range).format("YYYY-MM-DD")
|
||||
now = arrow.now().format("YYYY-MM-DD")
|
||||
query = query.filter(Certificate.not_after <= to).filter(
|
||||
Certificate.not_after >= now
|
||||
)
|
||||
|
||||
return database.sort_and_page(query, Certificate, args)
|
||||
if current_app.config.get("ALLOW_CERT_DELETION", False):
|
||||
query = query.filter(Certificate.deleted == False) # noqa
|
||||
|
||||
result = database.sort_and_page(query, Certificate, args)
|
||||
return result
|
||||
|
||||
|
||||
def create_csr(csr_config):
|
||||
def like_domain_query(term):
|
||||
domain_query = database.session_query(Domain.id)
|
||||
domain_query = domain_query.filter(func.lower(Domain.name).like(term.lower()))
|
||||
assoc_query = database.session_query(certificate_associations.c.certificate_id)
|
||||
assoc_query = assoc_query.filter(certificate_associations.c.domain_id.in_(domain_query))
|
||||
return assoc_query
|
||||
|
||||
|
||||
def query_name(certificate_name, args):
|
||||
"""
|
||||
Helper function that queries for a certificate by name
|
||||
|
||||
:param args:
|
||||
:return:
|
||||
"""
|
||||
query = database.session_query(Certificate)
|
||||
query = query.filter(Certificate.name == certificate_name)
|
||||
result = database.sort_and_page(query, Certificate, args)
|
||||
return result
|
||||
|
||||
|
||||
def query_common_name(common_name, args):
|
||||
"""
|
||||
Helper function that queries for not expired certificates by common name (and owner)
|
||||
|
||||
:param common_name:
|
||||
:param args:
|
||||
:return:
|
||||
"""
|
||||
owner = args.pop("owner")
|
||||
if not owner:
|
||||
owner = "%"
|
||||
|
||||
# only not expired certificates
|
||||
current_time = arrow.utcnow()
|
||||
|
||||
result = (
|
||||
Certificate.query.filter(Certificate.cn.ilike(common_name))
|
||||
.filter(Certificate.owner.ilike(owner))
|
||||
.filter(Certificate.not_after >= current_time.format("YYYY-MM-DD"))
|
||||
.all()
|
||||
)
|
||||
|
||||
return result
|
||||
|
||||
|
||||
def create_csr(**csr_config):
|
||||
"""
|
||||
Given a list of domains create the appropriate csr
|
||||
for those domains
|
||||
|
||||
:param csr_config:
|
||||
"""
|
||||
private_key = rsa.generate_private_key(
|
||||
public_exponent=65537,
|
||||
key_size=2048,
|
||||
backend=default_backend()
|
||||
)
|
||||
private_key = generate_private_key(csr_config.get("key_type"))
|
||||
|
||||
# TODO When we figure out a better way to validate these options they should be parsed as str
|
||||
builder = x509.CertificateSigningRequestBuilder()
|
||||
builder = builder.subject_name(x509.Name([
|
||||
x509.NameAttribute(x509.OID_COMMON_NAME, csr_config['commonName']),
|
||||
x509.NameAttribute(x509.OID_ORGANIZATION_NAME, csr_config['organization']),
|
||||
x509.NameAttribute(x509.OID_ORGANIZATIONAL_UNIT_NAME, csr_config['organizationalUnit']),
|
||||
x509.NameAttribute(x509.OID_COUNTRY_NAME, csr_config['country']),
|
||||
x509.NameAttribute(x509.OID_STATE_OR_PROVINCE_NAME, csr_config['state']),
|
||||
x509.NameAttribute(x509.OID_LOCALITY_NAME, csr_config['location']),
|
||||
]))
|
||||
name_list = [x509.NameAttribute(x509.OID_COMMON_NAME, csr_config["common_name"])]
|
||||
if current_app.config.get("LEMUR_OWNER_EMAIL_IN_SUBJECT", True):
|
||||
name_list.append(
|
||||
x509.NameAttribute(x509.OID_EMAIL_ADDRESS, csr_config["owner"])
|
||||
)
|
||||
if "organization" in csr_config and csr_config["organization"].strip():
|
||||
name_list.append(
|
||||
x509.NameAttribute(x509.OID_ORGANIZATION_NAME, csr_config["organization"])
|
||||
)
|
||||
if (
|
||||
"organizational_unit" in csr_config
|
||||
and csr_config["organizational_unit"].strip()
|
||||
):
|
||||
name_list.append(
|
||||
x509.NameAttribute(
|
||||
x509.OID_ORGANIZATIONAL_UNIT_NAME, csr_config["organizational_unit"]
|
||||
)
|
||||
)
|
||||
if "country" in csr_config and csr_config["country"].strip():
|
||||
name_list.append(
|
||||
x509.NameAttribute(x509.OID_COUNTRY_NAME, csr_config["country"])
|
||||
)
|
||||
if "state" in csr_config and csr_config["state"].strip():
|
||||
name_list.append(
|
||||
x509.NameAttribute(x509.OID_STATE_OR_PROVINCE_NAME, csr_config["state"])
|
||||
)
|
||||
if "location" in csr_config and csr_config["location"].strip():
|
||||
name_list.append(
|
||||
x509.NameAttribute(x509.OID_LOCALITY_NAME, csr_config["location"])
|
||||
)
|
||||
builder = builder.subject_name(x509.Name(name_list))
|
||||
|
||||
builder = builder.add_extension(
|
||||
x509.BasicConstraints(ca=False, path_length=None), critical=True,
|
||||
)
|
||||
|
||||
if csr_config.get('extensions'):
|
||||
for k, v in csr_config.get('extensions', {}).items():
|
||||
if k == 'subAltNames':
|
||||
# map types to their x509 objects
|
||||
general_names = []
|
||||
for name in v['names']:
|
||||
if name['nameType'] == 'DNSName':
|
||||
general_names.append(x509.DNSName(name['value']))
|
||||
|
||||
builder = builder.add_extension(
|
||||
x509.SubjectAlternativeName(general_names), critical=True
|
||||
extensions = csr_config.get("extensions", {})
|
||||
critical_extensions = ["basic_constraints", "sub_alt_names", "key_usage"]
|
||||
noncritical_extensions = ["extended_key_usage"]
|
||||
for k, v in extensions.items():
|
||||
if v:
|
||||
if k in critical_extensions:
|
||||
current_app.logger.debug(
|
||||
"Adding Critical Extension: {0} {1}".format(k, v)
|
||||
)
|
||||
if k == "sub_alt_names":
|
||||
if v["names"]:
|
||||
builder = builder.add_extension(v["names"], critical=True)
|
||||
else:
|
||||
builder = builder.add_extension(v, critical=True)
|
||||
|
||||
# TODO support more CSR options, none of the authority plugins currently support these options
|
||||
# builder.add_extension(
|
||||
# x509.KeyUsage(
|
||||
# digital_signature=digital_signature,
|
||||
# content_commitment=content_commitment,
|
||||
# key_encipherment=key_enipherment,
|
||||
# data_encipherment=data_encipherment,
|
||||
# key_agreement=key_agreement,
|
||||
# key_cert_sign=key_cert_sign,
|
||||
# crl_sign=crl_sign,
|
||||
# encipher_only=enchipher_only,
|
||||
# decipher_only=decipher_only
|
||||
# ), critical=True
|
||||
# )
|
||||
#
|
||||
# # we must maintain our own list of OIDs here
|
||||
# builder.add_extension(
|
||||
# x509.ExtendedKeyUsage(
|
||||
# server_authentication=server_authentication,
|
||||
# email=
|
||||
# )
|
||||
# )
|
||||
#
|
||||
# builder.add_extension(
|
||||
# x509.AuthorityInformationAccess()
|
||||
# )
|
||||
#
|
||||
# builder.add_extension(
|
||||
# x509.AuthorityKeyIdentifier()
|
||||
# )
|
||||
#
|
||||
# builder.add_extension(
|
||||
# x509.SubjectKeyIdentifier()
|
||||
# )
|
||||
#
|
||||
# builder.add_extension(
|
||||
# x509.CRLDistributionPoints()
|
||||
# )
|
||||
#
|
||||
# builder.add_extension(
|
||||
# x509.ObjectIdentifier(oid)
|
||||
# )
|
||||
if k in noncritical_extensions:
|
||||
current_app.logger.debug("Adding Extension: {0} {1}".format(k, v))
|
||||
builder = builder.add_extension(v, critical=False)
|
||||
|
||||
request = builder.sign(
|
||||
private_key, hashes.SHA256(), default_backend()
|
||||
)
|
||||
ski = extensions.get("subject_key_identifier", {})
|
||||
if ski.get("include_ski", False):
|
||||
builder = builder.add_extension(
|
||||
x509.SubjectKeyIdentifier.from_public_key(private_key.public_key()),
|
||||
critical=False,
|
||||
)
|
||||
|
||||
request = builder.sign(private_key, hashes.SHA256(), default_backend())
|
||||
|
||||
# serialize our private key and CSR
|
||||
pem = private_key.private_bytes(
|
||||
private_key = private_key.private_bytes(
|
||||
encoding=serialization.Encoding.PEM,
|
||||
format=serialization.PrivateFormat.TraditionalOpenSSL, # would like to use PKCS8 but AWS ELBs don't like it
|
||||
encryption_algorithm=serialization.NoEncryption()
|
||||
)
|
||||
encryption_algorithm=serialization.NoEncryption(),
|
||||
).decode("utf-8")
|
||||
|
||||
csr = request.public_bytes(
|
||||
encoding=serialization.Encoding.PEM
|
||||
)
|
||||
csr = request.public_bytes(encoding=serialization.Encoding.PEM).decode("utf-8")
|
||||
|
||||
return csr, pem
|
||||
return csr, private_key
|
||||
|
||||
|
||||
def stats(**kwargs):
|
||||
@ -419,16 +659,19 @@ def stats(**kwargs):
|
||||
:param kwargs:
|
||||
:return:
|
||||
"""
|
||||
if kwargs.get('metric') == 'not_after':
|
||||
if kwargs.get("metric") == "not_after":
|
||||
start = arrow.utcnow()
|
||||
end = start.replace(weeks=+32)
|
||||
items = database.db.session.query(Certificate.issuer, func.count(Certificate.id))\
|
||||
.group_by(Certificate.issuer)\
|
||||
.filter(Certificate.not_after <= end.format('YYYY-MM-DD')) \
|
||||
.filter(Certificate.not_after >= start.format('YYYY-MM-DD')).all()
|
||||
end = start.shift(weeks=+32)
|
||||
items = (
|
||||
database.db.session.query(Certificate.issuer, func.count(Certificate.id))
|
||||
.group_by(Certificate.issuer)
|
||||
.filter(Certificate.not_after <= end.format("YYYY-MM-DD"))
|
||||
.filter(Certificate.not_after >= start.format("YYYY-MM-DD"))
|
||||
.all()
|
||||
)
|
||||
|
||||
else:
|
||||
attr = getattr(Certificate, kwargs.get('metric'))
|
||||
attr = getattr(Certificate, kwargs.get("metric"))
|
||||
query = database.db.session.query(attr, func.count(attr))
|
||||
|
||||
items = query.group_by(attr).all()
|
||||
@ -439,4 +682,96 @@ def stats(**kwargs):
|
||||
keys.append(key)
|
||||
values.append(count)
|
||||
|
||||
return {'labels': keys, 'values': values}
|
||||
return {"labels": keys, "values": values}
|
||||
|
||||
|
||||
def get_account_number(arn):
|
||||
"""
|
||||
Extract the account number from an arn.
|
||||
|
||||
:param arn: IAM SSL arn
|
||||
:return: account number associated with ARN
|
||||
"""
|
||||
return arn.split(":")[4]
|
||||
|
||||
|
||||
def get_name_from_arn(arn):
|
||||
"""
|
||||
Extract the certificate name from an arn.
|
||||
|
||||
:param arn: IAM SSL arn
|
||||
:return: name of the certificate as uploaded to AWS
|
||||
"""
|
||||
return arn.split("/", 1)[1]
|
||||
|
||||
|
||||
def calculate_reissue_range(start, end):
|
||||
"""
|
||||
Determine what the new validity_start and validity_end dates should be.
|
||||
:param start:
|
||||
:param end:
|
||||
:return:
|
||||
"""
|
||||
span = end - start
|
||||
|
||||
new_start = arrow.utcnow()
|
||||
new_end = new_start + span
|
||||
|
||||
return new_start, arrow.get(new_end)
|
||||
|
||||
|
||||
def get_certificate_primitives(certificate):
|
||||
"""
|
||||
Retrieve key primitive from a certificate such that the certificate
|
||||
could be recreated with new expiration or be used to build upon.
|
||||
:param certificate:
|
||||
:return: dict of certificate primitives, should be enough to effectively re-issue
|
||||
certificate via `create`.
|
||||
"""
|
||||
start, end = calculate_reissue_range(certificate.not_before, certificate.not_after)
|
||||
ser = CertificateInputSchema().load(
|
||||
CertificateOutputSchema().dump(certificate).data
|
||||
)
|
||||
assert not ser.errors, "Error re-serializing certificate: %s" % ser.errors
|
||||
data = ser.data
|
||||
|
||||
# we can't quite tell if we are using a custom name, as this is an automated process (typically)
|
||||
# we will rely on the Lemur generated name
|
||||
data.pop("name", None)
|
||||
|
||||
# TODO this can be removed once we migrate away from cn
|
||||
data["cn"] = data["common_name"]
|
||||
|
||||
# needed until we move off not_*
|
||||
data["not_before"] = start
|
||||
data["not_after"] = end
|
||||
data["validity_start"] = start
|
||||
data["validity_end"] = end
|
||||
return data
|
||||
|
||||
|
||||
def reissue_certificate(certificate, replace=None, user=None):
|
||||
"""
|
||||
Reissue certificate with the same properties of the given certificate.
|
||||
:param certificate:
|
||||
:param replace:
|
||||
:param user:
|
||||
:return:
|
||||
"""
|
||||
primitives = get_certificate_primitives(certificate)
|
||||
|
||||
if primitives.get("csr"):
|
||||
# We do not want to re-use the CSR when creating a certificate because this defeats the purpose of rotation.
|
||||
del primitives["csr"]
|
||||
if not user:
|
||||
primitives["creator"] = certificate.user
|
||||
|
||||
else:
|
||||
primitives["creator"] = user
|
||||
|
||||
if replace:
|
||||
primitives["replaces"] = [certificate]
|
||||
|
||||
new_cert = create(**primitives)
|
||||
|
||||
return new_cert
|
||||
|
||||
85
lemur/certificates/utils.py
Normal file
85
lemur/certificates/utils.py
Normal file
@ -0,0 +1,85 @@
|
||||
"""
|
||||
Utils to parse certificate data.
|
||||
|
||||
.. module: lemur.certificates.hooks
|
||||
:platform: Unix
|
||||
:copyright: (c) 2019 by Javier Ramos, see AUTHORS for more
|
||||
:license: Apache, see LICENSE for more details.
|
||||
|
||||
.. moduleauthor:: Javier Ramos <javier.ramos@booking.com>
|
||||
"""
|
||||
|
||||
from cryptography import x509
|
||||
from cryptography.hazmat.backends import default_backend
|
||||
from marshmallow.exceptions import ValidationError
|
||||
from cryptography.hazmat.primitives.asymmetric import rsa, ec
|
||||
from lemur.common.utils import get_key_type_from_ec_curve
|
||||
|
||||
|
||||
def get_sans_from_csr(data):
|
||||
"""
|
||||
Fetches SubjectAlternativeNames from CSR.
|
||||
Works with any kind of SubjectAlternativeName
|
||||
:param data: PEM-encoded string with CSR
|
||||
:return: List of LemurAPI-compatible subAltNames
|
||||
"""
|
||||
sub_alt_names = []
|
||||
try:
|
||||
request = x509.load_pem_x509_csr(data.encode("utf-8"), default_backend())
|
||||
except Exception:
|
||||
raise ValidationError("CSR presented is not valid.")
|
||||
|
||||
try:
|
||||
alt_names = request.extensions.get_extension_for_class(
|
||||
x509.SubjectAlternativeName
|
||||
)
|
||||
for alt_name in alt_names.value:
|
||||
sub_alt_names.append(
|
||||
{"nameType": type(alt_name).__name__, "value": alt_name.value}
|
||||
)
|
||||
except x509.ExtensionNotFound:
|
||||
pass
|
||||
|
||||
return sub_alt_names
|
||||
|
||||
|
||||
def get_cn_from_csr(data):
|
||||
"""
|
||||
Fetches common name (CN) from CSR.
|
||||
Works with any kind of SubjectAlternativeName
|
||||
:param data: PEM-encoded string with CSR
|
||||
:return: the common name
|
||||
"""
|
||||
try:
|
||||
request = x509.load_pem_x509_csr(data.encode("utf-8"), default_backend())
|
||||
except Exception:
|
||||
raise ValidationError("CSR presented is not valid.")
|
||||
|
||||
common_name = request.subject.get_attributes_for_oid(x509.NameOID.COMMON_NAME)
|
||||
return common_name[0].value
|
||||
|
||||
|
||||
def get_key_type_from_csr(data):
|
||||
"""
|
||||
Fetches key_type from CSR.
|
||||
Works with any kind of SubjectAlternativeName
|
||||
:param data: PEM-encoded string with CSR
|
||||
:return: key_type
|
||||
"""
|
||||
try:
|
||||
request = x509.load_pem_x509_csr(data.encode("utf-8"), default_backend())
|
||||
except Exception:
|
||||
raise ValidationError("CSR presented is not valid.")
|
||||
|
||||
try:
|
||||
if isinstance(request.public_key(), rsa.RSAPublicKey):
|
||||
return "RSA{key_size}".format(
|
||||
key_size=request.public_key().key_size
|
||||
)
|
||||
elif isinstance(request.public_key(), ec.EllipticCurvePublicKey):
|
||||
return get_key_type_from_ec_curve(request.public_key().curve.name)
|
||||
else:
|
||||
raise Exception("Unsupported key type")
|
||||
|
||||
except NotImplemented:
|
||||
raise NotImplemented()
|
||||
@ -1,84 +1,138 @@
|
||||
"""
|
||||
.. module: lemur.certificates.verify
|
||||
:platform: Unix
|
||||
:copyright: (c) 2015 by Netflix Inc., see AUTHORS for more
|
||||
:copyright: (c) 2018 by Netflix Inc., see AUTHORS for more
|
||||
:license: Apache, see LICENSE for more details.
|
||||
.. moduleauthor:: Kevin Glisson <kglisson@netflix.com>
|
||||
"""
|
||||
import os
|
||||
import requests
|
||||
import subprocess
|
||||
from OpenSSL import crypto
|
||||
from flask import current_app
|
||||
from lemur.extensions import sentry
|
||||
from requests.exceptions import ConnectionError, InvalidSchema
|
||||
from cryptography import x509
|
||||
from cryptography.hazmat.backends import default_backend
|
||||
|
||||
from flask import current_app
|
||||
from lemur.utils import mktempfile
|
||||
from lemur.common.utils import parse_certificate
|
||||
|
||||
from contextlib import contextmanager
|
||||
from tempfile import NamedTemporaryFile
|
||||
crl_cache = {}
|
||||
|
||||
|
||||
@contextmanager
|
||||
def mktempfile():
|
||||
with NamedTemporaryFile(delete=False) as f:
|
||||
name = f.name
|
||||
|
||||
try:
|
||||
yield name
|
||||
finally:
|
||||
os.unlink(name)
|
||||
|
||||
|
||||
def ocsp_verify(cert_path, issuer_chain_path):
|
||||
def ocsp_verify(cert, cert_path, issuer_chain_path):
|
||||
"""
|
||||
Attempts to verify a certificate via OCSP. OCSP is a more modern version
|
||||
of CRL in that it will query the OCSP URI in order to determine if the
|
||||
certificate as been revoked
|
||||
certificate has been revoked
|
||||
|
||||
:param cert:
|
||||
:param cert_path:
|
||||
:param issuer_chain_path:
|
||||
:return bool: True if certificate is valid, False otherwise
|
||||
"""
|
||||
command = ['openssl', 'x509', '-noout', '-ocsp_uri', '-in', cert_path]
|
||||
command = ["openssl", "x509", "-noout", "-ocsp_uri", "-in", cert_path]
|
||||
p1 = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
||||
url, err = p1.communicate()
|
||||
|
||||
p2 = subprocess.Popen(['openssl', 'ocsp', '-issuer', issuer_chain_path,
|
||||
'-cert', cert_path, "-url", url.strip()], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
||||
if not url:
|
||||
current_app.logger.debug(
|
||||
"No OCSP URL in certificate {}".format(cert.serial_number)
|
||||
)
|
||||
return None
|
||||
|
||||
p2 = subprocess.Popen(
|
||||
[
|
||||
"openssl",
|
||||
"ocsp",
|
||||
"-issuer",
|
||||
issuer_chain_path,
|
||||
"-cert",
|
||||
cert_path,
|
||||
"-url",
|
||||
url.strip(),
|
||||
],
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE,
|
||||
)
|
||||
|
||||
message, err = p2.communicate()
|
||||
if 'error' in message or 'Error' in message:
|
||||
|
||||
p_message = message.decode("utf-8")
|
||||
|
||||
if "error" in p_message or "Error" in p_message:
|
||||
raise Exception("Got error when parsing OCSP url")
|
||||
|
||||
elif 'revoked' in message:
|
||||
return
|
||||
elif "revoked" in p_message:
|
||||
current_app.logger.debug(
|
||||
"OCSP reports certificate revoked: {}".format(cert.serial_number)
|
||||
)
|
||||
return False
|
||||
|
||||
elif 'good' not in message:
|
||||
elif "good" not in p_message:
|
||||
raise Exception("Did not receive a valid response")
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def crl_verify(cert_path):
|
||||
def crl_verify(cert, cert_path):
|
||||
"""
|
||||
Attempts to verify a certificate using CRL.
|
||||
|
||||
:param cert:
|
||||
:param cert_path:
|
||||
:return: True if certificate is valid, False otherwise
|
||||
:raise Exception: If certificate does not have CRL
|
||||
"""
|
||||
with open(cert_path, 'rt') as c:
|
||||
cert = x509.load_pem_x509_certificate(c.read(), default_backend())
|
||||
try:
|
||||
distribution_points = cert.extensions.get_extension_for_oid(
|
||||
x509.OID_CRL_DISTRIBUTION_POINTS
|
||||
).value
|
||||
except x509.ExtensionNotFound:
|
||||
current_app.logger.debug(
|
||||
"No CRLDP extension in certificate {}".format(cert.serial_number)
|
||||
)
|
||||
return None
|
||||
|
||||
distribution_points = cert.extensions.get_extension_for_oid(x509.OID_CRL_DISTRIBUTION_POINTS).value
|
||||
for p in distribution_points:
|
||||
point = p.full_name[0].value
|
||||
response = requests.get(point)
|
||||
crl = crypto.load_crl(crypto.FILETYPE_ASN1, response.content) # TODO this should be switched to cryptography when support exists
|
||||
revoked = crl.get_revoked()
|
||||
for r in revoked:
|
||||
if cert.serial == r.get_serial():
|
||||
return
|
||||
|
||||
if point not in crl_cache:
|
||||
current_app.logger.debug("Retrieving CRL: {}".format(point))
|
||||
try:
|
||||
response = requests.get(point)
|
||||
|
||||
if response.status_code != 200:
|
||||
raise Exception("Unable to retrieve CRL: {0}".format(point))
|
||||
except InvalidSchema:
|
||||
# Unhandled URI scheme (like ldap://); skip this distribution point.
|
||||
continue
|
||||
except ConnectionError:
|
||||
raise Exception("Unable to retrieve CRL: {0}".format(point))
|
||||
|
||||
crl_cache[point] = x509.load_der_x509_crl(
|
||||
response.content, backend=default_backend()
|
||||
)
|
||||
else:
|
||||
current_app.logger.debug("CRL point is cached {}".format(point))
|
||||
|
||||
for r in crl_cache[point]:
|
||||
if cert.serial_number == r.serial_number:
|
||||
try:
|
||||
reason = r.extensions.get_extension_for_class(x509.CRLReason).value
|
||||
# Handle "removeFromCRL" revoke reason as unrevoked;
|
||||
# continue with the next distribution point.
|
||||
# Per RFC 5280 section 6.3.3 (k):
|
||||
# https://tools.ietf.org/html/rfc5280#section-6.3.3
|
||||
if reason == x509.ReasonFlags.remove_from_crl:
|
||||
break
|
||||
except x509.ExtensionNotFound:
|
||||
pass
|
||||
|
||||
current_app.logger.debug(
|
||||
"CRL reports certificate " "revoked: {}".format(cert.serial_number)
|
||||
)
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
|
||||
@ -90,18 +144,33 @@ def verify(cert_path, issuer_chain_path):
|
||||
:param issuer_chain_path:
|
||||
:return: True if valid, False otherwise
|
||||
"""
|
||||
with open(cert_path, "rt") as c:
|
||||
try:
|
||||
cert = parse_certificate(c.read())
|
||||
except ValueError as e:
|
||||
current_app.logger.error(e)
|
||||
return None
|
||||
|
||||
# OCSP is our main source of truth, in a lot of cases CRLs
|
||||
# have been deprecated and are no longer updated
|
||||
verify_result = None
|
||||
try:
|
||||
return ocsp_verify(cert_path, issuer_chain_path)
|
||||
verify_result = ocsp_verify(cert, cert_path, issuer_chain_path)
|
||||
except Exception as e:
|
||||
current_app.logger.debug("Could not use OCSP: {0}".format(e))
|
||||
sentry.captureException()
|
||||
current_app.logger.exception(e)
|
||||
|
||||
if verify_result is None:
|
||||
try:
|
||||
return crl_verify(cert_path)
|
||||
verify_result = crl_verify(cert, cert_path)
|
||||
except Exception as e:
|
||||
current_app.logger.debug("Could not use CRL: {0}".format(e))
|
||||
raise Exception("Failed to verify")
|
||||
raise Exception("Failed to verify")
|
||||
sentry.captureException()
|
||||
current_app.logger.exception(e)
|
||||
|
||||
if verify_result is None:
|
||||
current_app.logger.debug("Failed to verify {}".format(cert.serial_number))
|
||||
|
||||
return verify_result
|
||||
|
||||
|
||||
def verify_string(cert_string, issuer_string):
|
||||
@ -113,10 +182,10 @@ def verify_string(cert_string, issuer_string):
|
||||
:return: True if valid, False otherwise
|
||||
"""
|
||||
with mktempfile() as cert_tmp:
|
||||
with open(cert_tmp, 'w') as f:
|
||||
with open(cert_tmp, "w") as f:
|
||||
f.write(cert_string)
|
||||
with mktempfile() as issuer_tmp:
|
||||
with open(issuer_tmp, 'w') as f:
|
||||
with open(issuer_tmp, "w") as f:
|
||||
f.write(issuer_string)
|
||||
status = verify(cert_tmp, issuer_tmp)
|
||||
return status
|
||||
|
||||
@ -1,112 +1,257 @@
|
||||
"""
|
||||
.. module: lemur.certificates.views
|
||||
:platform: Unix
|
||||
:copyright: (c) 2015 by Netflix Inc., see AUTHORS for more
|
||||
:copyright: (c) 2018 by Netflix Inc., see AUTHORS for more
|
||||
:license: Apache, see LICENSE for more details.
|
||||
.. moduleauthor:: Kevin Glisson <kglisson@netflix.com>
|
||||
"""
|
||||
import base64
|
||||
from builtins import str
|
||||
|
||||
from flask import Blueprint, make_response, jsonify
|
||||
from flask.ext.restful import reqparse, Api, fields
|
||||
from flask import Blueprint, make_response, jsonify, g, current_app
|
||||
from flask_restful import reqparse, Api, inputs
|
||||
|
||||
from cryptography import x509
|
||||
from cryptography.hazmat.backends import default_backend
|
||||
from cryptography.hazmat.primitives import serialization
|
||||
|
||||
from lemur.certificates import service
|
||||
from lemur.authorities.models import Authority
|
||||
from lemur.common.schema import validate_schema
|
||||
from lemur.common.utils import paginated_parser
|
||||
|
||||
from lemur.auth.service import AuthenticatedResource
|
||||
from lemur.auth.permissions import ViewKeyPermission, AuthorityPermission, UpdateCertificatePermission
|
||||
from lemur.auth.permissions import AuthorityPermission, CertificatePermission
|
||||
|
||||
from lemur.certificates import service
|
||||
from lemur.certificates.models import Certificate
|
||||
from lemur.plugins.base import plugins
|
||||
from lemur.certificates.schemas import (
|
||||
certificate_input_schema,
|
||||
certificate_output_schema,
|
||||
certificate_upload_input_schema,
|
||||
certificates_output_schema,
|
||||
certificate_export_input_schema,
|
||||
certificate_edit_input_schema,
|
||||
certificates_list_output_schema_factory,
|
||||
)
|
||||
|
||||
from lemur.roles import service as role_service
|
||||
|
||||
from lemur.common.utils import marshal_items, paginated_parser
|
||||
|
||||
from lemur.notifications.views import notification_list
|
||||
from lemur.logs import service as log_service
|
||||
|
||||
|
||||
mod = Blueprint('certificates', __name__)
|
||||
mod = Blueprint("certificates", __name__)
|
||||
api = Api(mod)
|
||||
|
||||
|
||||
FIELDS = {
|
||||
'name': fields.String,
|
||||
'id': fields.Integer,
|
||||
'bits': fields.Integer,
|
||||
'deleted': fields.String,
|
||||
'issuer': fields.String,
|
||||
'serial': fields.String,
|
||||
'owner': fields.String,
|
||||
'chain': fields.String,
|
||||
'san': fields.String,
|
||||
'active': fields.Boolean,
|
||||
'description': fields.String,
|
||||
'notBefore': fields.DateTime(dt_format='iso8601', attribute='not_before'),
|
||||
'notAfter': fields.DateTime(dt_format='iso8601', attribute='not_after'),
|
||||
'cn': fields.String,
|
||||
'status': fields.String,
|
||||
'body': fields.String
|
||||
}
|
||||
class CertificatesListValid(AuthenticatedResource):
|
||||
""" Defines the 'certificates/valid' endpoint """
|
||||
|
||||
def __init__(self):
|
||||
self.reqparse = reqparse.RequestParser()
|
||||
super(CertificatesListValid, self).__init__()
|
||||
|
||||
@validate_schema(None, certificates_output_schema)
|
||||
def get(self):
|
||||
"""
|
||||
.. http:get:: /certificates/valid/<query>
|
||||
|
||||
The current list of not-expired certificates for a given common name, and owner
|
||||
|
||||
**Example request**:
|
||||
|
||||
.. sourcecode:: http
|
||||
GET /certificates/valid?filter=cn;*.test.example.net&owner=joe@example.com
|
||||
HTTP/1.1
|
||||
Host: example.com
|
||||
Accept: application/json, text/javascript
|
||||
|
||||
**Example response**:
|
||||
|
||||
.. sourcecode:: http
|
||||
|
||||
HTTP/1.1 200 OK
|
||||
Vary: Accept
|
||||
Content-Type: text/javascript
|
||||
|
||||
{
|
||||
"items": [{
|
||||
"status": null,
|
||||
"cn": "*.test.example.net",
|
||||
"chain": "",
|
||||
"csr": "-----BEGIN CERTIFICATE REQUEST-----"
|
||||
"authority": {
|
||||
"active": true,
|
||||
"owner": "secure@example.com",
|
||||
"id": 1,
|
||||
"description": "verisign test authority",
|
||||
"name": "verisign"
|
||||
},
|
||||
"owner": "joe@example.com",
|
||||
"serial": "82311058732025924142789179368889309156",
|
||||
"id": 2288,
|
||||
"issuer": "SymantecCorporation",
|
||||
"dateCreated": "2016-06-03T06:09:42.133769+00:00",
|
||||
"notBefore": "2016-06-03T00:00:00+00:00",
|
||||
"notAfter": "2018-01-12T23:59:59+00:00",
|
||||
"destinations": [],
|
||||
"bits": 2048,
|
||||
"body": "-----BEGIN CERTIFICATE-----...",
|
||||
"description": null,
|
||||
"deleted": null,
|
||||
"notifications": [{
|
||||
"id": 1
|
||||
}],
|
||||
"signingAlgorithm": "sha256",
|
||||
"user": {
|
||||
"username": "jane",
|
||||
"active": true,
|
||||
"email": "jane@example.com",
|
||||
"id": 2
|
||||
},
|
||||
"active": true,
|
||||
"domains": [{
|
||||
"sensitive": false,
|
||||
"id": 1090,
|
||||
"name": "*.test.example.net"
|
||||
}],
|
||||
"replaces": [],
|
||||
"replaced": [],
|
||||
"name": "WILDCARD.test.example.net-SymantecCorporation-20160603-20180112",
|
||||
"roles": [{
|
||||
"id": 464,
|
||||
"description": "This is a google group based role created by Lemur",
|
||||
"name": "joe@example.com"
|
||||
}],
|
||||
"san": null
|
||||
}],
|
||||
"total": 1
|
||||
}
|
||||
|
||||
:reqheader Authorization: OAuth token to authenticate
|
||||
:statuscode 200: no error
|
||||
:statuscode 403: unauthenticated
|
||||
|
||||
"""
|
||||
parser = paginated_parser.copy()
|
||||
args = parser.parse_args()
|
||||
args["user"] = g.user
|
||||
common_name = args["filter"].split(";")[1]
|
||||
return service.query_common_name(common_name, args)
|
||||
|
||||
|
||||
def valid_authority(authority_options):
|
||||
"""
|
||||
Defends against invalid authorities
|
||||
class CertificatesNameQuery(AuthenticatedResource):
|
||||
""" Defines the 'certificates/name' endpoint """
|
||||
|
||||
:param authority_options:
|
||||
:return: :raise ValueError:
|
||||
"""
|
||||
name = authority_options['name']
|
||||
authority = Authority.query.filter(Authority.name == name).one()
|
||||
def __init__(self):
|
||||
self.reqparse = reqparse.RequestParser()
|
||||
super(CertificatesNameQuery, self).__init__()
|
||||
|
||||
if not authority:
|
||||
raise ValueError("Unable to find authority specified")
|
||||
@validate_schema(None, certificates_output_schema)
|
||||
def get(self, certificate_name):
|
||||
"""
|
||||
.. http:get:: /certificates/name/<query>
|
||||
|
||||
if not authority.active:
|
||||
raise ValueError("Selected authority [{0}] is not currently active".format(name))
|
||||
The current list of certificates
|
||||
|
||||
return authority
|
||||
**Example request**:
|
||||
|
||||
.. sourcecode:: http
|
||||
|
||||
def pem_str(value, name):
|
||||
"""
|
||||
Used to validate that the given string is a PEM formatted string
|
||||
GET /certificates/name/WILDCARD.test.example.net-SymantecCorporation-20160603-20180112 HTTP/1.1
|
||||
Host: example.com
|
||||
Accept: application/json, text/javascript
|
||||
|
||||
:param value:
|
||||
:param name:
|
||||
:return: :raise ValueError:
|
||||
"""
|
||||
try:
|
||||
x509.load_pem_x509_certificate(bytes(value), default_backend())
|
||||
except Exception:
|
||||
raise ValueError("The parameter '{0}' needs to be a valid PEM string".format(name))
|
||||
return value
|
||||
**Example response**:
|
||||
|
||||
.. sourcecode:: http
|
||||
|
||||
def private_key_str(value, name):
|
||||
"""
|
||||
User to validate that a given string is a RSA private key
|
||||
HTTP/1.1 200 OK
|
||||
Vary: Accept
|
||||
Content-Type: text/javascript
|
||||
|
||||
:param value:
|
||||
:param name:
|
||||
:return: :raise ValueError:
|
||||
"""
|
||||
try:
|
||||
serialization.load_pem_private_key(bytes(value), None, backend=default_backend())
|
||||
except Exception:
|
||||
raise ValueError("The parameter '{0}' needs to be a valid RSA private key".format(name))
|
||||
return value
|
||||
{
|
||||
"items": [{
|
||||
"status": null,
|
||||
"cn": "*.test.example.net",
|
||||
"chain": "",
|
||||
"csr": "-----BEGIN CERTIFICATE REQUEST-----"
|
||||
"authority": {
|
||||
"active": true,
|
||||
"owner": "secure@example.com",
|
||||
"id": 1,
|
||||
"description": "verisign test authority",
|
||||
"name": "verisign"
|
||||
},
|
||||
"owner": "joe@example.com",
|
||||
"serial": "82311058732025924142789179368889309156",
|
||||
"id": 2288,
|
||||
"issuer": "SymantecCorporation",
|
||||
"dateCreated": "2016-06-03T06:09:42.133769+00:00",
|
||||
"notBefore": "2016-06-03T00:00:00+00:00",
|
||||
"notAfter": "2018-01-12T23:59:59+00:00",
|
||||
"destinations": [],
|
||||
"bits": 2048,
|
||||
"body": "-----BEGIN CERTIFICATE-----...",
|
||||
"description": null,
|
||||
"deleted": null,
|
||||
"notifications": [{
|
||||
"id": 1
|
||||
}],
|
||||
"signingAlgorithm": "sha256",
|
||||
"user": {
|
||||
"username": "jane",
|
||||
"active": true,
|
||||
"email": "jane@example.com",
|
||||
"id": 2
|
||||
},
|
||||
"active": true,
|
||||
"domains": [{
|
||||
"sensitive": false,
|
||||
"id": 1090,
|
||||
"name": "*.test.example.net"
|
||||
}],
|
||||
"replaces": [],
|
||||
"replaced": [],
|
||||
"name": "WILDCARD.test.example.net-SymantecCorporation-20160603-20180112",
|
||||
"roles": [{
|
||||
"id": 464,
|
||||
"description": "This is a google group based role created by Lemur",
|
||||
"name": "joe@example.com"
|
||||
}],
|
||||
"san": null
|
||||
}],
|
||||
"total": 1
|
||||
}
|
||||
|
||||
:query sortBy: field to sort on
|
||||
:query sortDir: asc or desc
|
||||
:query page: int. default is 1
|
||||
:query filter: key value pair format is k;v
|
||||
:query count: count number. default is 10
|
||||
:reqheader Authorization: OAuth token to authenticate
|
||||
:statuscode 200: no error
|
||||
:statuscode 403: unauthenticated
|
||||
|
||||
"""
|
||||
parser = paginated_parser.copy()
|
||||
parser.add_argument("timeRange", type=int, dest="time_range", location="args")
|
||||
parser.add_argument("owner", type=inputs.boolean, location="args")
|
||||
parser.add_argument("id", type=str, location="args")
|
||||
parser.add_argument("active", type=inputs.boolean, location="args")
|
||||
parser.add_argument(
|
||||
"destinationId", type=int, dest="destination_id", location="args"
|
||||
)
|
||||
parser.add_argument("creator", type=str, location="args")
|
||||
parser.add_argument("show", type=str, location="args")
|
||||
|
||||
args = parser.parse_args()
|
||||
args["user"] = g.user
|
||||
return service.query_name(certificate_name, args)
|
||||
|
||||
|
||||
class CertificatesList(AuthenticatedResource):
|
||||
""" Defines the 'certificates' endpoint """
|
||||
|
||||
def __init__(self):
|
||||
self.reqparse = reqparse.RequestParser()
|
||||
super(CertificatesList, self).__init__()
|
||||
|
||||
@marshal_items(FIELDS)
|
||||
@validate_schema(None, certificates_list_output_schema_factory)
|
||||
def get(self):
|
||||
"""
|
||||
.. http:get:: /certificates
|
||||
@ -130,52 +275,87 @@ class CertificatesList(AuthenticatedResource):
|
||||
Content-Type: text/javascript
|
||||
|
||||
{
|
||||
"items": [
|
||||
{
|
||||
"id": 1,
|
||||
"name": "cert1",
|
||||
"description": "this is cert1",
|
||||
"bits": 2048,
|
||||
"deleted": false,
|
||||
"issuer": "ExampeInc.",
|
||||
"serial": "123450",
|
||||
"chain": "-----Begin ...",
|
||||
"body": "-----Begin ...",
|
||||
"san": true,
|
||||
"owner": 'bob@example.com",
|
||||
"active": true,
|
||||
"notBefore": "2015-06-05T17:09:39",
|
||||
"notAfter": "2015-06-10T17:09:39",
|
||||
"cn": "example.com",
|
||||
"status": "unknown"
|
||||
}
|
||||
]
|
||||
"items": [{
|
||||
"status": null,
|
||||
"cn": "*.test.example.net",
|
||||
"chain": "",
|
||||
"csr": "-----BEGIN CERTIFICATE REQUEST-----"
|
||||
"authority": {
|
||||
"active": true,
|
||||
"owner": "secure@example.com",
|
||||
"id": 1,
|
||||
"description": "verisign test authority",
|
||||
"name": "verisign"
|
||||
},
|
||||
"owner": "joe@example.com",
|
||||
"serial": "82311058732025924142789179368889309156",
|
||||
"id": 2288,
|
||||
"issuer": "SymantecCorporation",
|
||||
"dateCreated": "2016-06-03T06:09:42.133769+00:00",
|
||||
"notBefore": "2016-06-03T00:00:00+00:00",
|
||||
"notAfter": "2018-01-12T23:59:59+00:00",
|
||||
"destinations": [],
|
||||
"bits": 2048,
|
||||
"body": "-----BEGIN CERTIFICATE-----...",
|
||||
"description": null,
|
||||
"deleted": null,
|
||||
"notifications": [{
|
||||
"id": 1
|
||||
}],
|
||||
"signingAlgorithm": "sha256",
|
||||
"user": {
|
||||
"username": "jane",
|
||||
"active": true,
|
||||
"email": "jane@example.com",
|
||||
"id": 2
|
||||
},
|
||||
"active": true,
|
||||
"domains": [{
|
||||
"sensitive": false,
|
||||
"id": 1090,
|
||||
"name": "*.test.example.net"
|
||||
}],
|
||||
"replaces": [],
|
||||
"replaced": [],
|
||||
"name": "WILDCARD.test.example.net-SymantecCorporation-20160603-20180112",
|
||||
"roles": [{
|
||||
"id": 464,
|
||||
"description": "This is a google group based role created by Lemur",
|
||||
"name": "joe@example.com"
|
||||
}],
|
||||
"san": null
|
||||
}],
|
||||
"total": 1
|
||||
}
|
||||
|
||||
:query sortBy: field to sort on
|
||||
:query sortDir: acs or desc
|
||||
:query sortDir: asc or desc
|
||||
:query page: int. default is 1
|
||||
:query filter: key value pair. format is k=v;
|
||||
:query limit: limit number. default is 10
|
||||
:query filter: key value pair format is k;v
|
||||
:query count: count number. default is 10
|
||||
:reqheader Authorization: OAuth token to authenticate
|
||||
:statuscode 200: no error
|
||||
:statuscode 403: unauthenticated
|
||||
|
||||
"""
|
||||
parser = paginated_parser.copy()
|
||||
parser.add_argument('timeRange', type=int, dest='time_range', location='args')
|
||||
parser.add_argument('owner', type=bool, location='args')
|
||||
parser.add_argument('id', type=str, location='args')
|
||||
parser.add_argument('active', type=bool, location='args')
|
||||
parser.add_argument('destinationId', type=int, dest="destination_id", location='args')
|
||||
parser.add_argument('creator', type=str, location='args')
|
||||
parser.add_argument('show', type=str, location='args')
|
||||
parser.add_argument("timeRange", type=int, dest="time_range", location="args")
|
||||
parser.add_argument("owner", type=inputs.boolean, location="args")
|
||||
parser.add_argument("id", type=str, location="args")
|
||||
parser.add_argument("active", type=inputs.boolean, location="args")
|
||||
parser.add_argument(
|
||||
"destinationId", type=int, dest="destination_id", location="args"
|
||||
)
|
||||
parser.add_argument("creator", type=str, location="args")
|
||||
parser.add_argument("show", type=str, location="args")
|
||||
parser.add_argument("showExpired", type=int, location="args")
|
||||
|
||||
args = parser.parse_args()
|
||||
args["user"] = g.user
|
||||
return service.render(args)
|
||||
|
||||
@marshal_items(FIELDS)
|
||||
def post(self):
|
||||
@validate_schema(certificate_input_schema, certificate_output_schema)
|
||||
def post(self, data=None):
|
||||
"""
|
||||
.. http:post:: /certificates
|
||||
|
||||
@ -190,86 +370,38 @@ class CertificatesList(AuthenticatedResource):
|
||||
Accept: application/json, text/javascript
|
||||
|
||||
{
|
||||
"country": "US",
|
||||
"state": "CA",
|
||||
"location": "A Place",
|
||||
"organization": "ExampleInc.",
|
||||
"organizationalUnit": "Operations",
|
||||
"owner": "bob@example.com",
|
||||
"description": "test",
|
||||
"selectedAuthority": "timetest2",
|
||||
"authority": {
|
||||
"body": "-----BEGIN...",
|
||||
"name": "timetest2",
|
||||
"chain": "",
|
||||
"notBefore": "2015-06-05T15:20:59",
|
||||
"active": true,
|
||||
"id": 50,
|
||||
"notAfter": "2015-06-17T15:21:08",
|
||||
"description": "dsfdsf"
|
||||
},
|
||||
"notifications": [
|
||||
{
|
||||
"description": "Default 30 day expiration notification",
|
||||
"notificationOptions": [
|
||||
{
|
||||
"name": "interval",
|
||||
"required": true,
|
||||
"value": 30,
|
||||
"helpMessage": "Number of days to be alert before expiration.",
|
||||
"validation": "^\\d+$",
|
||||
"type": "int"
|
||||
},
|
||||
{
|
||||
"available": [
|
||||
"days",
|
||||
"weeks",
|
||||
"months"
|
||||
],
|
||||
"name": "unit",
|
||||
"required": true,
|
||||
"value": "days",
|
||||
"helpMessage": "Interval unit",
|
||||
"validation": "",
|
||||
"type": "select"
|
||||
},
|
||||
{
|
||||
"name": "recipients",
|
||||
"required": true,
|
||||
"value": "bob@example.com",
|
||||
"helpMessage": "Comma delimited list of email addresses",
|
||||
"validation": "^([\\w+-.%]+@[\\w-.]+\\.[A-Za-z]{2,4},?)+$",
|
||||
"type": "str"
|
||||
}
|
||||
],
|
||||
"label": "DEFAULT_KGLISSON_30_DAY",
|
||||
"pluginName": "email-notification",
|
||||
"active": true,
|
||||
"id": 7
|
||||
}
|
||||
],
|
||||
"extensions": {
|
||||
"basicConstraints": {},
|
||||
"keyUsage": {
|
||||
"isCritical": true,
|
||||
"useKeyEncipherment": true,
|
||||
"useDigitalSignature": true
|
||||
},
|
||||
"extendedKeyUsage": {
|
||||
"isCritical": true,
|
||||
"useServerAuthentication": true
|
||||
},
|
||||
"subjectKeyIdentifier": {
|
||||
"includeSKI": true
|
||||
},
|
||||
"owner": "secure@example.net",
|
||||
"commonName": "test.example.net",
|
||||
"country": "US",
|
||||
"extensions": {
|
||||
"subAltNames": {
|
||||
"names": []
|
||||
"names": [
|
||||
{
|
||||
"nameType": "DNSName",
|
||||
"value": "*.test.example.net"
|
||||
},
|
||||
{
|
||||
"nameType": "DNSName",
|
||||
"value": "www.test.example.net"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"commonName": "test",
|
||||
"validityStart": "2015-06-05T07:00:00.000Z",
|
||||
"validityEnd": "2015-06-16T07:00:00.000Z"
|
||||
}
|
||||
},
|
||||
"replacements": [{
|
||||
"id": 1
|
||||
}],
|
||||
"notify": true,
|
||||
"validityEnd": "2026-01-01T08:00:00.000Z",
|
||||
"authority": {
|
||||
"name": "verisign"
|
||||
},
|
||||
"organization": "Netflix, Inc.",
|
||||
"location": "Los Gatos",
|
||||
"state": "California",
|
||||
"validityStart": "2016-11-11T04:19:48.000Z",
|
||||
"organizationalUnit": "Operations"
|
||||
}
|
||||
|
||||
|
||||
**Example response**:
|
||||
|
||||
@ -280,80 +412,99 @@ class CertificatesList(AuthenticatedResource):
|
||||
Content-Type: text/javascript
|
||||
|
||||
{
|
||||
"id": 1,
|
||||
"name": "cert1",
|
||||
"description": "this is cert1",
|
||||
"status": null,
|
||||
"cn": "*.test.example.net",
|
||||
"chain": "",
|
||||
"authority": {
|
||||
"active": true,
|
||||
"owner": "secure@example.com",
|
||||
"id": 1,
|
||||
"description": "verisign test authority",
|
||||
"name": "verisign"
|
||||
},
|
||||
"owner": "joe@example.com",
|
||||
"serial": "82311058732025924142789179368889309156",
|
||||
"id": 2288,
|
||||
"issuer": "SymantecCorporation",
|
||||
"dateCreated": "2016-06-03T06:09:42.133769+00:00",
|
||||
"notBefore": "2016-06-03T00:00:00+00:00",
|
||||
"notAfter": "2018-01-12T23:59:59+00:00",
|
||||
"destinations": [],
|
||||
"bits": 2048,
|
||||
"deleted": false,
|
||||
"issuer": "ExampeInc.",
|
||||
"serial": "123450",
|
||||
"chain": "-----Begin ...",
|
||||
"body": "-----Begin ...",
|
||||
"san": true,
|
||||
"owner": "jimbob@example.com",
|
||||
"active": false,
|
||||
"notBefore": "2015-06-05T17:09:39",
|
||||
"notAfter": "2015-06-10T17:09:39",
|
||||
"cn": "example.com",
|
||||
"status": "unknown"
|
||||
"body": "-----BEGIN CERTIFICATE-----...",
|
||||
"description": null,
|
||||
"deleted": null,
|
||||
"notifications": [{
|
||||
"id": 1
|
||||
}],
|
||||
"signingAlgorithm": "sha256",
|
||||
"user": {
|
||||
"username": "jane",
|
||||
"active": true,
|
||||
"email": "jane@example.com",
|
||||
"id": 2
|
||||
},
|
||||
"active": true,
|
||||
"domains": [{
|
||||
"sensitive": false,
|
||||
"id": 1090,
|
||||
"name": "*.test.example.net"
|
||||
}],
|
||||
"replaces": [{
|
||||
"id": 1
|
||||
}],
|
||||
"rotation": true,
|
||||
"rotationPolicy": {"name": "default"},
|
||||
"name": "WILDCARD.test.example.net-SymantecCorporation-20160603-20180112",
|
||||
"roles": [{
|
||||
"id": 464,
|
||||
"description": "This is a google group based role created by Lemur",
|
||||
"name": "joe@example.com"
|
||||
}],
|
||||
"san": null
|
||||
}
|
||||
|
||||
:arg extensions: extensions to be used in the certificate
|
||||
:arg description: description for new certificate
|
||||
:arg owner: owner email
|
||||
:arg validityStart: when the certificate should start being valid
|
||||
:arg validityEnd: when the certificate should expire
|
||||
:arg authority: authority that should issue the certificate
|
||||
:arg country: country for the CSR
|
||||
:arg state: state for the CSR
|
||||
:arg location: location for the CSR
|
||||
:arg organization: organization for CSR
|
||||
:arg commonName: certiifcate common name
|
||||
:reqheader Authorization: OAuth token to authenticate
|
||||
:statuscode 200: no error
|
||||
:statuscode 403: unauthenticated
|
||||
|
||||
"""
|
||||
self.reqparse.add_argument('extensions', type=dict, location='json')
|
||||
self.reqparse.add_argument('destinations', type=list, default=[], location='json')
|
||||
self.reqparse.add_argument('notifications', type=list, default=[], location='json')
|
||||
self.reqparse.add_argument('validityStart', type=str, location='json') # TODO validate
|
||||
self.reqparse.add_argument('validityEnd', type=str, location='json') # TODO validate
|
||||
self.reqparse.add_argument('authority', type=valid_authority, location='json', required=True)
|
||||
self.reqparse.add_argument('description', type=str, location='json', required=True)
|
||||
self.reqparse.add_argument('country', type=str, location='json', required=True)
|
||||
self.reqparse.add_argument('state', type=str, location='json', required=True)
|
||||
self.reqparse.add_argument('location', type=str, location='json', required=True)
|
||||
self.reqparse.add_argument('organization', type=str, location='json', required=True)
|
||||
self.reqparse.add_argument('organizationalUnit', type=str, location='json', required=True)
|
||||
self.reqparse.add_argument('owner', type=str, location='json', required=True)
|
||||
self.reqparse.add_argument('commonName', type=str, location='json', required=True)
|
||||
|
||||
args = self.reqparse.parse_args()
|
||||
|
||||
authority = args['authority']
|
||||
role = role_service.get_by_name(authority.owner)
|
||||
role = role_service.get_by_name(data["authority"].owner)
|
||||
|
||||
# all the authority role members should be allowed
|
||||
roles = [x.name for x in authority.roles]
|
||||
roles = [x.name for x in data["authority"].roles]
|
||||
|
||||
# allow "owner" roles by team DL
|
||||
roles.append(role)
|
||||
permission = AuthorityPermission(authority.id, roles)
|
||||
authority_permission = AuthorityPermission(data["authority"].id, roles)
|
||||
|
||||
if permission.can():
|
||||
return service.create(**args)
|
||||
if authority_permission.can():
|
||||
data["creator"] = g.user
|
||||
cert = service.create(**data)
|
||||
if isinstance(cert, Certificate):
|
||||
# only log if created, not pending
|
||||
log_service.create(g.user, "create_cert", certificate=cert)
|
||||
return cert
|
||||
|
||||
return dict(message="You are not authorized to use {0}".format(args['authority'].name)), 403
|
||||
return (
|
||||
dict(
|
||||
message="You are not authorized to use the authority: {0}".format(
|
||||
data["authority"].name
|
||||
)
|
||||
),
|
||||
403,
|
||||
)
|
||||
|
||||
|
||||
class CertificatesUpload(AuthenticatedResource):
|
||||
""" Defines the 'certificates' upload endpoint """
|
||||
|
||||
def __init__(self):
|
||||
self.reqparse = reqparse.RequestParser()
|
||||
super(CertificatesUpload, self).__init__()
|
||||
|
||||
@marshal_items(FIELDS)
|
||||
def post(self):
|
||||
@validate_schema(certificate_upload_input_schema, certificate_output_schema)
|
||||
def post(self, data=None):
|
||||
"""
|
||||
.. http:post:: /certificates/upload
|
||||
|
||||
@ -368,12 +519,16 @@ class CertificatesUpload(AuthenticatedResource):
|
||||
Accept: application/json, text/javascript
|
||||
|
||||
{
|
||||
"owner": "joe@exmaple.com",
|
||||
"publicCert": "---Begin Public...",
|
||||
"intermediateCert": "---Begin Public...",
|
||||
"privateKey": "---Begin Private..."
|
||||
"owner": "joe@example.com",
|
||||
"body": "-----BEGIN CERTIFICATE-----...",
|
||||
"chain": "-----BEGIN CERTIFICATE-----...",
|
||||
"privateKey": "-----BEGIN RSA PRIVATE KEY-----..."
|
||||
"csr": "-----BEGIN CERTIFICATE REQUEST-----..."
|
||||
"destinations": [],
|
||||
"notifications": [],
|
||||
"replacements": [],
|
||||
"roles": [],
|
||||
"notify": true,
|
||||
"name": "cert1"
|
||||
}
|
||||
|
||||
@ -386,62 +541,86 @@ class CertificatesUpload(AuthenticatedResource):
|
||||
Content-Type: text/javascript
|
||||
|
||||
{
|
||||
"id": 1,
|
||||
"name": "cert1",
|
||||
"description": "this is cert1",
|
||||
"bits": 2048,
|
||||
"deleted": false,
|
||||
"issuer": "ExampeInc.",
|
||||
"serial": "123450",
|
||||
"chain": "-----Begin ...",
|
||||
"body": "-----Begin ...",
|
||||
"san": true,
|
||||
"owner": "joe@example.com",
|
||||
"active": true,
|
||||
"notBefore": "2015-06-05T17:09:39",
|
||||
"notAfter": "2015-06-10T17:09:39",
|
||||
"cn": "example.com",
|
||||
"status": "unknown"
|
||||
"status": null,
|
||||
"cn": "*.test.example.net",
|
||||
"chain": "",
|
||||
"authority": {
|
||||
"active": true,
|
||||
"owner": "secure@example.com",
|
||||
"id": 1,
|
||||
"description": "verisign test authority",
|
||||
"name": "verisign"
|
||||
},
|
||||
"owner": "joe@example.com",
|
||||
"serial": "82311058732025924142789179368889309156",
|
||||
"id": 2288,
|
||||
"issuer": "SymantecCorporation",
|
||||
"dateCreated": "2016-06-03T06:09:42.133769+00:00",
|
||||
"notBefore": "2016-06-03T00:00:00+00:00",
|
||||
"notAfter": "2018-01-12T23:59:59+00:00",
|
||||
"destinations": [],
|
||||
"bits": 2048,
|
||||
"body": "-----BEGIN CERTIFICATE-----...",
|
||||
"description": null,
|
||||
"deleted": null,
|
||||
"notifications": [{
|
||||
"id": 1
|
||||
}],
|
||||
"signingAlgorithm": "sha256",
|
||||
"user": {
|
||||
"username": "jane",
|
||||
"active": true,
|
||||
"email": "jane@example.com",
|
||||
"id": 2
|
||||
},
|
||||
"active": true,
|
||||
"domains": [{
|
||||
"sensitive": false,
|
||||
"id": 1090,
|
||||
"name": "*.test.example.net"
|
||||
}],
|
||||
"replaces": [],
|
||||
"rotation": true,
|
||||
"rotationPolicy": {"name": "default"},
|
||||
"name": "WILDCARD.test.example.net-SymantecCorporation-20160603-20180112",
|
||||
"roles": [{
|
||||
"id": 464,
|
||||
"description": "This is a google group based role created by Lemur",
|
||||
"name": "joe@example.com"
|
||||
}],
|
||||
"san": null
|
||||
}
|
||||
|
||||
:arg owner: owner email for certificate
|
||||
:arg publicCert: valid PEM public key for certificate
|
||||
:arg intermediateCert valid PEM intermediate key for certificate
|
||||
:arg privateKey: valid PEM private key for certificate
|
||||
:arg destinations: list of aws destinations to upload the certificate to
|
||||
:reqheader Authorization: OAuth token to authenticate
|
||||
:statuscode 403: unauthenticated
|
||||
:statuscode 200: no error
|
||||
"""
|
||||
self.reqparse.add_argument('description', type=str, location='json')
|
||||
self.reqparse.add_argument('owner', type=str, required=True, location='json')
|
||||
self.reqparse.add_argument('name', type=str, location='json')
|
||||
self.reqparse.add_argument('publicCert', type=pem_str, required=True, dest='public_cert', location='json')
|
||||
self.reqparse.add_argument('destinations', type=list, default=[], dest='destinations', location='json')
|
||||
self.reqparse.add_argument('notifications', type=list, default=[], dest='notifications', location='json')
|
||||
self.reqparse.add_argument('intermediateCert', type=pem_str, dest='intermediate_cert', location='json')
|
||||
self.reqparse.add_argument('privateKey', type=private_key_str, dest='private_key', location='json')
|
||||
|
||||
args = self.reqparse.parse_args()
|
||||
if args.get('destinations'):
|
||||
if args.get('private_key'):
|
||||
return service.upload(**args)
|
||||
"""
|
||||
data["creator"] = g.user
|
||||
if data.get("destinations"):
|
||||
if data.get("private_key"):
|
||||
return service.upload(**data)
|
||||
else:
|
||||
raise Exception("Private key must be provided in order to upload certificate to AWS")
|
||||
return service.upload(**args)
|
||||
raise Exception(
|
||||
"Private key must be provided in order to upload certificate to AWS"
|
||||
)
|
||||
return service.upload(**data)
|
||||
|
||||
|
||||
class CertificatesStats(AuthenticatedResource):
|
||||
""" Defines the 'certificates' stats endpoint """
|
||||
|
||||
def __init__(self):
|
||||
self.reqparse = reqparse.RequestParser()
|
||||
super(CertificatesStats, self).__init__()
|
||||
|
||||
def get(self):
|
||||
self.reqparse.add_argument('metric', type=str, location='args')
|
||||
self.reqparse.add_argument('range', default=32, type=int, location='args')
|
||||
self.reqparse.add_argument('destinationId', dest='destination_id', location='args')
|
||||
self.reqparse.add_argument('active', type=str, default='true', location='args')
|
||||
self.reqparse.add_argument("metric", type=str, location="args")
|
||||
self.reqparse.add_argument("range", default=32, type=int, location="args")
|
||||
self.reqparse.add_argument(
|
||||
"destinationId", dest="destination_id", location="args"
|
||||
)
|
||||
self.reqparse.add_argument("active", type=str, default="true", location="args")
|
||||
|
||||
args = self.reqparse.parse_args()
|
||||
|
||||
@ -476,7 +655,7 @@ class CertificatePrivateKey(AuthenticatedResource):
|
||||
Content-Type: text/javascript
|
||||
|
||||
{
|
||||
"key": "----Begin ...",
|
||||
"key": "-----BEGIN ..."
|
||||
}
|
||||
|
||||
:reqheader Authorization: OAuth token to authenticate
|
||||
@ -487,17 +666,19 @@ class CertificatePrivateKey(AuthenticatedResource):
|
||||
if not cert:
|
||||
return dict(message="Cannot find specified certificate"), 404
|
||||
|
||||
role = role_service.get_by_name(cert.owner)
|
||||
# allow creators
|
||||
if g.current_user != cert.user:
|
||||
owner_role = role_service.get_by_name(cert.owner)
|
||||
permission = CertificatePermission(owner_role, [x.name for x in cert.roles])
|
||||
|
||||
permission = ViewKeyPermission(certificate_id, getattr(role, 'name', None))
|
||||
if not permission.can():
|
||||
return dict(message="You are not authorized to view this key"), 403
|
||||
|
||||
if permission.can():
|
||||
response = make_response(jsonify(key=cert.private_key), 200)
|
||||
response.headers['cache-control'] = 'private, max-age=0, no-cache, no-store'
|
||||
response.headers['pragma'] = 'no-cache'
|
||||
return response
|
||||
|
||||
return dict(message='You are not authorized to view this key'), 403
|
||||
log_service.create(g.current_user, "key_view", certificate=cert)
|
||||
response = make_response(jsonify(key=cert.private_key), 200)
|
||||
response.headers["cache-control"] = "private, max-age=0, no-cache, no-store"
|
||||
response.headers["pragma"] = "no-cache"
|
||||
return response
|
||||
|
||||
|
||||
class Certificates(AuthenticatedResource):
|
||||
@ -505,7 +686,7 @@ class Certificates(AuthenticatedResource):
|
||||
self.reqparse = reqparse.RequestParser()
|
||||
super(Certificates, self).__init__()
|
||||
|
||||
@marshal_items(FIELDS)
|
||||
@validate_schema(None, certificate_output_schema)
|
||||
def get(self, certificate_id):
|
||||
"""
|
||||
.. http:get:: /certificates/1
|
||||
@ -529,32 +710,67 @@ class Certificates(AuthenticatedResource):
|
||||
Content-Type: text/javascript
|
||||
|
||||
{
|
||||
"id": 1,
|
||||
"name": "cert1",
|
||||
"description": "this is cert1",
|
||||
"status": null,
|
||||
"cn": "*.test.example.net",
|
||||
"chain": "",
|
||||
"csr": "-----BEGIN CERTIFICATE REQUEST-----"
|
||||
"authority": {
|
||||
"active": true,
|
||||
"owner": "secure@example.com",
|
||||
"id": 1,
|
||||
"description": "verisign test authority",
|
||||
"name": "verisign"
|
||||
},
|
||||
"owner": "joe@example.com",
|
||||
"serial": "82311058732025924142789179368889309156",
|
||||
"id": 2288,
|
||||
"issuer": "SymantecCorporation",
|
||||
"dateCreated": "2016-06-03T06:09:42.133769+00:00",
|
||||
"notBefore": "2016-06-03T00:00:00+00:00",
|
||||
"notAfter": "2018-01-12T23:59:59+00:00",
|
||||
"destinations": [],
|
||||
"bits": 2048,
|
||||
"deleted": false,
|
||||
"issuer": "ExampeInc.",
|
||||
"serial": "123450",
|
||||
"chain": "-----Begin ...",
|
||||
"body": "-----Begin ...",
|
||||
"san": true,
|
||||
"owner": "bob@example.com",
|
||||
"body": "-----BEGIN CERTIFICATE-----...",
|
||||
"description": null,
|
||||
"deleted": null,
|
||||
"notifications": [{
|
||||
"id": 1
|
||||
}],
|
||||
"signingAlgorithm": "sha256",
|
||||
"user": {
|
||||
"username": "jane",
|
||||
"active": true,
|
||||
"email": "jane@example.com",
|
||||
"id": 2
|
||||
},
|
||||
"active": true,
|
||||
"notBefore": "2015-06-05T17:09:39",
|
||||
"notAfter": "2015-06-10T17:09:39",
|
||||
"cn": "example.com",
|
||||
"status": "unknown"
|
||||
"domains": [{
|
||||
"sensitive": false,
|
||||
"id": 1090,
|
||||
"name": "*.test.example.net"
|
||||
}],
|
||||
"rotation": true,
|
||||
"rotationPolicy": {"name": "default"},
|
||||
"replaces": [],
|
||||
"replaced": [],
|
||||
"name": "WILDCARD.test.example.net-SymantecCorporation-20160603-20180112",
|
||||
"roles": [{
|
||||
"id": 464,
|
||||
"description": "This is a google group based role created by Lemur",
|
||||
"name": "joe@example.com"
|
||||
}],
|
||||
"san": null
|
||||
}
|
||||
|
||||
:reqheader Authorization: OAuth token to authenticate
|
||||
:statuscode 200: no error
|
||||
:statuscode 403: unauthenticated
|
||||
|
||||
"""
|
||||
return service.get(certificate_id)
|
||||
|
||||
@marshal_items(FIELDS)
|
||||
def put(self, certificate_id):
|
||||
@validate_schema(certificate_edit_input_schema, certificate_output_schema)
|
||||
def put(self, certificate_id, data=None):
|
||||
"""
|
||||
.. http:put:: /certificates/1
|
||||
|
||||
@ -572,7 +788,8 @@ class Certificates(AuthenticatedResource):
|
||||
"owner": "jimbob@example.com",
|
||||
"active": false
|
||||
"notifications": [],
|
||||
"destinations": []
|
||||
"destinations": [],
|
||||
"replacements": []
|
||||
}
|
||||
|
||||
**Example response**:
|
||||
@ -584,60 +801,262 @@ class Certificates(AuthenticatedResource):
|
||||
Content-Type: text/javascript
|
||||
|
||||
{
|
||||
"id": 1,
|
||||
"name": "cert1",
|
||||
"description": "this is cert1",
|
||||
"status": null,
|
||||
"cn": "*.test.example.net",
|
||||
"chain": "",
|
||||
"authority": {
|
||||
"active": true,
|
||||
"owner": "secure@example.com",
|
||||
"id": 1,
|
||||
"description": "verisign test authority",
|
||||
"name": "verisign"
|
||||
},
|
||||
"owner": "joe@example.com",
|
||||
"serial": "82311058732025924142789179368889309156",
|
||||
"id": 2288,
|
||||
"issuer": "SymantecCorporation",
|
||||
"dateCreated": "2016-06-03T06:09:42.133769+00:00",
|
||||
"notBefore": "2016-06-03T00:00:00+00:00",
|
||||
"notAfter": "2018-01-12T23:59:59+00:00",
|
||||
"destinations": [],
|
||||
"bits": 2048,
|
||||
"deleted": false,
|
||||
"issuer": "ExampeInc.",
|
||||
"serial": "123450",
|
||||
"chain": "-----Begin ...",
|
||||
"body": "-----Begin ...",
|
||||
"san": true,
|
||||
"owner": "jimbob@example.com",
|
||||
"active": false,
|
||||
"notBefore": "2015-06-05T17:09:39",
|
||||
"notAfter": "2015-06-10T17:09:39",
|
||||
"cn": "example.com",
|
||||
"status": "unknown",
|
||||
"body": "-----BEGIN CERTIFICATE-----...",
|
||||
"description": null,
|
||||
"deleted": null,
|
||||
"notifications": [{
|
||||
"id": 1
|
||||
}]
|
||||
"signingAlgorithm": "sha256",
|
||||
"user": {
|
||||
"username": "jane",
|
||||
"active": true,
|
||||
"email": "jane@example.com",
|
||||
"id": 2
|
||||
},
|
||||
"active": true,
|
||||
"domains": [{
|
||||
"sensitive": false,
|
||||
"id": 1090,
|
||||
"name": "*.test.example.net"
|
||||
}],
|
||||
"replaces": [],
|
||||
"name": "WILDCARD.test.example.net-SymantecCorporation-20160603-20180112",
|
||||
"roles": [{
|
||||
"id": 464,
|
||||
"description": "This is a google group based role created by Lemur",
|
||||
"name": "joe@example.com"
|
||||
}],
|
||||
"rotation": true,
|
||||
"rotationPolicy": {"name": "default"},
|
||||
"san": null
|
||||
}
|
||||
|
||||
:reqheader Authorization: OAuth token to authenticate
|
||||
:statuscode 200: no error
|
||||
:statuscode 403: unauthenticated
|
||||
|
||||
"""
|
||||
self.reqparse.add_argument('active', type=bool, location='json')
|
||||
self.reqparse.add_argument('owner', type=str, location='json')
|
||||
self.reqparse.add_argument('description', type=str, location='json')
|
||||
self.reqparse.add_argument('destinations', type=list, default=[], location='json')
|
||||
self.reqparse.add_argument('notifications', type=notification_list, default=[], location='json')
|
||||
args = self.reqparse.parse_args()
|
||||
cert = service.get(certificate_id)
|
||||
|
||||
if not cert:
|
||||
return dict(message="Cannot find specified certificate"), 404
|
||||
|
||||
# allow creators
|
||||
if g.current_user != cert.user:
|
||||
owner_role = role_service.get_by_name(cert.owner)
|
||||
permission = CertificatePermission(owner_role, [x.name for x in cert.roles])
|
||||
|
||||
if not permission.can():
|
||||
return (
|
||||
dict(message="You are not authorized to update this certificate"),
|
||||
403,
|
||||
)
|
||||
|
||||
for destination in data["destinations"]:
|
||||
if destination.plugin.requires_key:
|
||||
if not cert.private_key:
|
||||
return (
|
||||
dict(
|
||||
message="Unable to add destination: {0}. Certificate does not have required private key.".format(
|
||||
destination.label
|
||||
)
|
||||
),
|
||||
400,
|
||||
)
|
||||
|
||||
# if owner is changed, remove all notifications and roles associated with old owner
|
||||
if cert.owner != data["owner"]:
|
||||
service.cleanup_owner_roles_notification(cert.owner, data)
|
||||
|
||||
cert = service.update(certificate_id, **data)
|
||||
log_service.create(g.current_user, "update_cert", certificate=cert)
|
||||
return cert
|
||||
|
||||
@validate_schema(certificate_edit_input_schema, certificate_output_schema)
|
||||
def post(self, certificate_id, data=None):
|
||||
"""
|
||||
.. http:post:: /certificates/1/update/notify
|
||||
|
||||
Update certificate notification
|
||||
|
||||
**Example request**:
|
||||
|
||||
.. sourcecode:: http
|
||||
|
||||
POST /certificates/1/update/notify HTTP/1.1
|
||||
Host: example.com
|
||||
Accept: application/json, text/javascript
|
||||
|
||||
{
|
||||
"notify": false
|
||||
}
|
||||
|
||||
**Example response**:
|
||||
|
||||
.. sourcecode:: http
|
||||
|
||||
HTTP/1.1 200 OK
|
||||
Vary: Accept
|
||||
Content-Type: text/javascript
|
||||
|
||||
{
|
||||
"status": null,
|
||||
"cn": "*.test.example.net",
|
||||
"chain": "",
|
||||
"authority": {
|
||||
"active": true,
|
||||
"owner": "secure@example.com",
|
||||
"id": 1,
|
||||
"description": "verisign test authority",
|
||||
"name": "verisign"
|
||||
},
|
||||
"owner": "joe@example.com",
|
||||
"serial": "82311058732025924142789179368889309156",
|
||||
"id": 2288,
|
||||
"issuer": "SymantecCorporation",
|
||||
"dateCreated": "2016-06-03T06:09:42.133769+00:00",
|
||||
"notBefore": "2016-06-03T00:00:00+00:00",
|
||||
"notAfter": "2018-01-12T23:59:59+00:00",
|
||||
"destinations": [],
|
||||
"bits": 2048,
|
||||
"body": "-----BEGIN CERTIFICATE-----...",
|
||||
"description": null,
|
||||
"deleted": null,
|
||||
"notify": false,
|
||||
"notifications": [{
|
||||
"id": 1
|
||||
}]
|
||||
"signingAlgorithm": "sha256",
|
||||
"user": {
|
||||
"username": "jane",
|
||||
"active": true,
|
||||
"email": "jane@example.com",
|
||||
"id": 2
|
||||
},
|
||||
"active": true,
|
||||
"domains": [{
|
||||
"sensitive": false,
|
||||
"id": 1090,
|
||||
"name": "*.test.example.net"
|
||||
}],
|
||||
"replaces": [],
|
||||
"name": "WILDCARD.test.example.net-SymantecCorporation-20160603-20180112",
|
||||
"roles": [{
|
||||
"id": 464,
|
||||
"description": "This is a google group based role created by Lemur",
|
||||
"name": "joe@example.com"
|
||||
}],
|
||||
"rotation": true,
|
||||
"rotationPolicy": {"name": "default"},
|
||||
"san": null
|
||||
}
|
||||
|
||||
:reqheader Authorization: OAuth token to authenticate
|
||||
:statuscode 200: no error
|
||||
:statuscode 403: unauthenticated
|
||||
|
||||
"""
|
||||
cert = service.get(certificate_id)
|
||||
|
||||
if not cert:
|
||||
return dict(message="Cannot find specified certificate"), 404
|
||||
|
||||
# allow creators
|
||||
if g.current_user != cert.user:
|
||||
owner_role = role_service.get_by_name(cert.owner)
|
||||
permission = CertificatePermission(owner_role, [x.name for x in cert.roles])
|
||||
|
||||
if not permission.can():
|
||||
return (
|
||||
dict(message="You are not authorized to update this certificate"),
|
||||
403,
|
||||
)
|
||||
|
||||
cert = service.update_notify(cert, data.get("notify"))
|
||||
log_service.create(g.current_user, "update_cert", certificate=cert)
|
||||
return cert
|
||||
|
||||
def delete(self, certificate_id, data=None):
|
||||
"""
|
||||
.. http:delete:: /certificates/1
|
||||
|
||||
Delete a certificate
|
||||
|
||||
**Example request**:
|
||||
|
||||
.. sourcecode:: http
|
||||
|
||||
DELETE /certificates/1 HTTP/1.1
|
||||
Host: example.com
|
||||
|
||||
**Example response**:
|
||||
|
||||
.. sourcecode:: http
|
||||
|
||||
HTTP/1.1 204 OK
|
||||
|
||||
:reqheader Authorization: OAuth token to authenticate
|
||||
:statuscode 204: no error
|
||||
:statuscode 403: unauthenticated
|
||||
:statuscode 404: certificate not found
|
||||
:statuscode 405: certificate deletion is disabled
|
||||
|
||||
"""
|
||||
if not current_app.config.get("ALLOW_CERT_DELETION", False):
|
||||
return dict(message="Certificate deletion is disabled"), 405
|
||||
|
||||
cert = service.get(certificate_id)
|
||||
role = role_service.get_by_name(cert.owner)
|
||||
|
||||
permission = UpdateCertificatePermission(certificate_id, getattr(role, 'name', None))
|
||||
if not cert:
|
||||
return dict(message="Cannot find specified certificate"), 404
|
||||
|
||||
if permission.can():
|
||||
return service.update(
|
||||
certificate_id,
|
||||
args['owner'],
|
||||
args['description'],
|
||||
args['active'],
|
||||
args['destinations'],
|
||||
args['notifications']
|
||||
)
|
||||
if cert.deleted:
|
||||
return dict(message="Certificate is already deleted"), 412
|
||||
|
||||
return dict(message='You are not authorized to update this certificate'), 403
|
||||
# allow creators
|
||||
if g.current_user != cert.user:
|
||||
owner_role = role_service.get_by_name(cert.owner)
|
||||
permission = CertificatePermission(owner_role, [x.name for x in cert.roles])
|
||||
|
||||
if not permission.can():
|
||||
return (
|
||||
dict(message="You are not authorized to delete this certificate"),
|
||||
403,
|
||||
)
|
||||
|
||||
service.update(certificate_id, deleted=True)
|
||||
log_service.create(g.current_user, "delete_cert", certificate=cert)
|
||||
return "Certificate deleted", 204
|
||||
|
||||
|
||||
class NotificationCertificatesList(AuthenticatedResource):
|
||||
""" Defines the 'certificates' endpoint """
|
||||
|
||||
def __init__(self):
|
||||
self.reqparse = reqparse.RequestParser()
|
||||
super(NotificationCertificatesList, self).__init__()
|
||||
|
||||
@marshal_items(FIELDS)
|
||||
@validate_schema(None, certificates_output_schema)
|
||||
def get(self, notification_id):
|
||||
"""
|
||||
.. http:get:: /notifications/1/certificates
|
||||
@ -661,55 +1080,412 @@ class NotificationCertificatesList(AuthenticatedResource):
|
||||
Content-Type: text/javascript
|
||||
|
||||
{
|
||||
"items": [
|
||||
{
|
||||
"id": 1,
|
||||
"name": "cert1",
|
||||
"description": "this is cert1",
|
||||
"bits": 2048,
|
||||
"deleted": false,
|
||||
"issuer": "ExampeInc.",
|
||||
"serial": "123450",
|
||||
"chain": "-----Begin ...",
|
||||
"body": "-----Begin ...",
|
||||
"san": true,
|
||||
"owner": 'bob@example.com",
|
||||
"active": true,
|
||||
"notBefore": "2015-06-05T17:09:39",
|
||||
"notAfter": "2015-06-10T17:09:39",
|
||||
"cn": "example.com",
|
||||
"status": "unknown"
|
||||
}
|
||||
]
|
||||
"items": [{
|
||||
"status": null,
|
||||
"cn": "*.test.example.net",
|
||||
"chain": "",
|
||||
"csr": "-----BEGIN CERTIFICATE REQUEST-----"
|
||||
"authority": {
|
||||
"active": true,
|
||||
"owner": "secure@example.com",
|
||||
"id": 1,
|
||||
"description": "verisign test authority",
|
||||
"name": "verisign"
|
||||
},
|
||||
"owner": "joe@example.com",
|
||||
"serial": "82311058732025924142789179368889309156",
|
||||
"id": 2288,
|
||||
"issuer": "SymantecCorporation",
|
||||
"dateCreated": "2016-06-03T06:09:42.133769+00:00",
|
||||
"notBefore": "2016-06-03T00:00:00+00:00",
|
||||
"notAfter": "2018-01-12T23:59:59+00:00",
|
||||
"destinations": [],
|
||||
"bits": 2048,
|
||||
"body": "-----BEGIN CERTIFICATE-----...",
|
||||
"description": null,
|
||||
"deleted": null,
|
||||
"notifications": [{
|
||||
"id": 1
|
||||
}],
|
||||
"signingAlgorithm": "sha256",
|
||||
"user": {
|
||||
"username": "jane",
|
||||
"active": true,
|
||||
"email": "jane@example.com",
|
||||
"id": 2
|
||||
},
|
||||
"active": true,
|
||||
"domains": [{
|
||||
"sensitive": false,
|
||||
"id": 1090,
|
||||
"name": "*.test.example.net"
|
||||
}],
|
||||
"replaces": [],
|
||||
"replaced": [],
|
||||
"rotation": true,
|
||||
"rotationPolicy": {"name": "default"},
|
||||
"name": "WILDCARD.test.example.net-SymantecCorporation-20160603-20180112",
|
||||
"roles": [{
|
||||
"id": 464,
|
||||
"description": "This is a google group based role created by Lemur",
|
||||
"name": "joe@example.com"
|
||||
}],
|
||||
"san": null
|
||||
}],
|
||||
"total": 1
|
||||
}
|
||||
|
||||
:query sortBy: field to sort on
|
||||
:query sortDir: acs or desc
|
||||
:query page: int. default is 1
|
||||
:query filter: key value pair. format is k=v;
|
||||
:query limit: limit number. default is 10
|
||||
:query sortDir: asc or desc
|
||||
:query page: int default is 1
|
||||
:query filter: key value pair format is k;v
|
||||
:query count: count number default is 10
|
||||
:reqheader Authorization: OAuth token to authenticate
|
||||
:statuscode 200: no error
|
||||
:statuscode 403: unauthenticated
|
||||
|
||||
"""
|
||||
parser = paginated_parser.copy()
|
||||
parser.add_argument('timeRange', type=int, dest='time_range', location='args')
|
||||
parser.add_argument('owner', type=bool, location='args')
|
||||
parser.add_argument('id', type=str, location='args')
|
||||
parser.add_argument('active', type=bool, location='args')
|
||||
parser.add_argument('destinationId', type=int, dest="destination_id", location='args')
|
||||
parser.add_argument('creator', type=str, location='args')
|
||||
parser.add_argument('show', type=str, location='args')
|
||||
parser.add_argument("timeRange", type=int, dest="time_range", location="args")
|
||||
parser.add_argument("owner", type=inputs.boolean, location="args")
|
||||
parser.add_argument("id", type=str, location="args")
|
||||
parser.add_argument("active", type=inputs.boolean, location="args")
|
||||
parser.add_argument(
|
||||
"destinationId", type=int, dest="destination_id", location="args"
|
||||
)
|
||||
parser.add_argument("creator", type=str, location="args")
|
||||
parser.add_argument("show", type=str, location="args")
|
||||
|
||||
args = parser.parse_args()
|
||||
args['notification_id'] = notification_id
|
||||
args["notification_id"] = notification_id
|
||||
args["user"] = g.current_user
|
||||
return service.render(args)
|
||||
|
||||
|
||||
api.add_resource(CertificatesList, '/certificates', endpoint='certificates')
|
||||
api.add_resource(Certificates, '/certificates/<int:certificate_id>', endpoint='certificate')
|
||||
api.add_resource(CertificatesStats, '/certificates/stats', endpoint='certificateStats')
|
||||
api.add_resource(CertificatesUpload, '/certificates/upload', endpoint='certificateUpload')
|
||||
api.add_resource(CertificatePrivateKey, '/certificates/<int:certificate_id>/key', endpoint='privateKeyCertificates')
|
||||
api.add_resource(NotificationCertificatesList, '/notifications/<int:notification_id>/certificates', endpoint='notificationCertificates')
|
||||
class CertificatesReplacementsList(AuthenticatedResource):
|
||||
def __init__(self):
|
||||
self.reqparse = reqparse.RequestParser()
|
||||
super(CertificatesReplacementsList, self).__init__()
|
||||
|
||||
@validate_schema(None, certificates_output_schema)
|
||||
def get(self, certificate_id):
|
||||
"""
|
||||
.. http:get:: /certificates/1/replacements
|
||||
|
||||
One certificate
|
||||
|
||||
**Example request**:
|
||||
|
||||
.. sourcecode:: http
|
||||
|
||||
GET /certificates/1/replacements HTTP/1.1
|
||||
Host: example.com
|
||||
Accept: application/json, text/javascript
|
||||
|
||||
**Example response**:
|
||||
|
||||
.. sourcecode:: http
|
||||
|
||||
HTTP/1.1 200 OK
|
||||
Vary: Accept
|
||||
Content-Type: text/javascript
|
||||
|
||||
{
|
||||
"items": [{
|
||||
"status": null,
|
||||
"cn": "*.test.example.net",
|
||||
"chain": "",
|
||||
"csr": "-----BEGIN CERTIFICATE REQUEST-----",
|
||||
"authority": {
|
||||
"active": true,
|
||||
"owner": "secure@example.com",
|
||||
"id": 1,
|
||||
"description": "verisign test authority",
|
||||
"name": "verisign"
|
||||
},
|
||||
"owner": "joe@example.com",
|
||||
"serial": "82311058732025924142789179368889309156",
|
||||
"id": 2288,
|
||||
"issuer": "SymantecCorporation",
|
||||
"dateCreated": "2016-06-03T06:09:42.133769+00:00",
|
||||
"notBefore": "2016-06-03T00:00:00+00:00",
|
||||
"notAfter": "2018-01-12T23:59:59+00:00",
|
||||
"destinations": [],
|
||||
"bits": 2048,
|
||||
"body": "-----BEGIN CERTIFICATE-----...",
|
||||
"description": null,
|
||||
"deleted": null,
|
||||
"notifications": [{
|
||||
"id": 1
|
||||
}]
|
||||
"signingAlgorithm": "sha256",
|
||||
"user": {
|
||||
"username": "jane",
|
||||
"active": true,
|
||||
"email": "jane@example.com",
|
||||
"id": 2
|
||||
},
|
||||
"active": true,
|
||||
"domains": [{
|
||||
"sensitive": false,
|
||||
"id": 1090,
|
||||
"name": "*.test.example.net"
|
||||
}],
|
||||
"replaces": [],
|
||||
"replaced": [],
|
||||
"rotation": true,
|
||||
"rotationPolicy": {"name": "default"},
|
||||
"name": "WILDCARD.test.example.net-SymantecCorporation-20160603-20180112",
|
||||
"roles": [{
|
||||
"id": 464,
|
||||
"description": "This is a google group based role created by Lemur",
|
||||
"name": "joe@example.com"
|
||||
}],
|
||||
"san": null
|
||||
}],
|
||||
"total": 1
|
||||
}
|
||||
|
||||
:reqheader Authorization: OAuth token to authenticate
|
||||
:statuscode 200: no error
|
||||
:statuscode 403: unauthenticated
|
||||
|
||||
"""
|
||||
return service.get(certificate_id).replaces
|
||||
|
||||
|
||||
class CertificateExport(AuthenticatedResource):
|
||||
def __init__(self):
|
||||
self.reqparse = reqparse.RequestParser()
|
||||
super(CertificateExport, self).__init__()
|
||||
|
||||
@validate_schema(certificate_export_input_schema, None)
|
||||
def post(self, certificate_id, data=None):
|
||||
"""
|
||||
.. http:post:: /certificates/1/export
|
||||
|
||||
Export a certificate
|
||||
|
||||
**Example request**:
|
||||
|
||||
.. sourcecode:: http
|
||||
|
||||
PUT /certificates/1/export HTTP/1.1
|
||||
Host: example.com
|
||||
Accept: application/json, text/javascript
|
||||
|
||||
{
|
||||
"export": {
|
||||
"plugin": {
|
||||
"pluginOptions": [{
|
||||
"available": ["Java Key Store (JKS)"],
|
||||
"required": true,
|
||||
"type": "select",
|
||||
"name": "type",
|
||||
"helpMessage": "Choose the format you wish to export",
|
||||
"value": "Java Key Store (JKS)"
|
||||
}, {
|
||||
"required": false,
|
||||
"type": "str",
|
||||
"name": "passphrase",
|
||||
"validation": "^(?=.*[A-Za-z])(?=.*\\d)(?=.*[$@$!%*#?&])[A-Za-z\\d$@$!%*#?&]{8,}$",
|
||||
"helpMessage": "If no passphrase is given one will be generated for you, we highly recommend this. Minimum length is 8."
|
||||
}, {
|
||||
"required": false,
|
||||
"type": "str",
|
||||
"name": "alias",
|
||||
"helpMessage": "Enter the alias you wish to use for the keystore."
|
||||
}],
|
||||
"version": "unknown",
|
||||
"description": "Attempts to generate a JKS keystore or truststore",
|
||||
"title": "Java",
|
||||
"author": "Kevin Glisson",
|
||||
"type": "export",
|
||||
"slug": "java-export"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
**Example response**:
|
||||
|
||||
.. sourcecode:: http
|
||||
|
||||
HTTP/1.1 200 OK
|
||||
Vary: Accept
|
||||
Content-Type: text/javascript
|
||||
|
||||
{
|
||||
"data": "base64encodedstring",
|
||||
"passphrase": "UAWOHW#&@_%!tnwmxh832025",
|
||||
"extension": "jks"
|
||||
}
|
||||
|
||||
:reqheader Authorization: OAuth token to authenticate
|
||||
:statuscode 200: no error
|
||||
:statuscode 403: unauthenticated
|
||||
|
||||
"""
|
||||
cert = service.get(certificate_id)
|
||||
|
||||
if not cert:
|
||||
return dict(message="Cannot find specified certificate"), 404
|
||||
|
||||
plugin = data["plugin"]["plugin_object"]
|
||||
|
||||
if plugin.requires_key:
|
||||
if not cert.private_key:
|
||||
return (
|
||||
dict(
|
||||
message="Unable to export certificate, plugin: {0} requires a private key but no key was found.".format(
|
||||
plugin.slug
|
||||
)
|
||||
),
|
||||
400,
|
||||
)
|
||||
|
||||
else:
|
||||
# allow creators
|
||||
if g.current_user != cert.user:
|
||||
owner_role = role_service.get_by_name(cert.owner)
|
||||
permission = CertificatePermission(
|
||||
owner_role, [x.name for x in cert.roles]
|
||||
)
|
||||
|
||||
if not permission.can():
|
||||
return (
|
||||
dict(
|
||||
message="You are not authorized to export this certificate."
|
||||
),
|
||||
403,
|
||||
)
|
||||
|
||||
options = data["plugin"]["plugin_options"]
|
||||
|
||||
log_service.create(g.current_user, "key_view", certificate=cert)
|
||||
extension, passphrase, data = plugin.export(
|
||||
cert.body, cert.chain, cert.private_key, options
|
||||
)
|
||||
|
||||
# we take a hit in message size when b64 encoding
|
||||
return dict(
|
||||
extension=extension,
|
||||
passphrase=passphrase,
|
||||
data=base64.b64encode(data).decode("utf-8"),
|
||||
)
|
||||
|
||||
|
||||
class CertificateRevoke(AuthenticatedResource):
|
||||
def __init__(self):
|
||||
self.reqparse = reqparse.RequestParser()
|
||||
super(CertificateRevoke, self).__init__()
|
||||
|
||||
@validate_schema(None, None)
|
||||
def put(self, certificate_id, data=None):
|
||||
"""
|
||||
.. http:put:: /certificates/1/revoke
|
||||
|
||||
Revoke a certificate
|
||||
|
||||
**Example request**:
|
||||
|
||||
.. sourcecode:: http
|
||||
|
||||
POST /certificates/1/revoke HTTP/1.1
|
||||
Host: example.com
|
||||
Accept: application/json, text/javascript
|
||||
|
||||
**Example response**:
|
||||
|
||||
.. sourcecode:: http
|
||||
|
||||
HTTP/1.1 200 OK
|
||||
Vary: Accept
|
||||
Content-Type: text/javascript
|
||||
|
||||
{
|
||||
'id': 1
|
||||
}
|
||||
|
||||
:reqheader Authorization: OAuth token to authenticate
|
||||
:statuscode 200: no error
|
||||
:statuscode 403: unauthenticated
|
||||
|
||||
"""
|
||||
cert = service.get(certificate_id)
|
||||
|
||||
if not cert:
|
||||
return dict(message="Cannot find specified certificate"), 404
|
||||
|
||||
# allow creators
|
||||
if g.current_user != cert.user:
|
||||
owner_role = role_service.get_by_name(cert.owner)
|
||||
permission = CertificatePermission(owner_role, [x.name for x in cert.roles])
|
||||
|
||||
if not permission.can():
|
||||
return (
|
||||
dict(message="You are not authorized to revoke this certificate."),
|
||||
403,
|
||||
)
|
||||
|
||||
if not cert.external_id:
|
||||
return dict(message="Cannot revoke certificate. No external id found."), 400
|
||||
|
||||
if cert.endpoints:
|
||||
return (
|
||||
dict(
|
||||
message="Cannot revoke certificate. Endpoints are deployed with the given certificate."
|
||||
),
|
||||
403,
|
||||
)
|
||||
|
||||
plugin = plugins.get(cert.authority.plugin_name)
|
||||
plugin.revoke_certificate(cert, data)
|
||||
log_service.create(g.current_user, "revoke_cert", certificate=cert)
|
||||
return dict(id=cert.id)
|
||||
|
||||
|
||||
api.add_resource(
|
||||
CertificateRevoke,
|
||||
"/certificates/<int:certificate_id>/revoke",
|
||||
endpoint="revokeCertificate",
|
||||
)
|
||||
api.add_resource(
|
||||
CertificatesNameQuery,
|
||||
"/certificates/name/<string:certificate_name>",
|
||||
endpoint="certificatesNameQuery",
|
||||
)
|
||||
api.add_resource(CertificatesList, "/certificates", endpoint="certificates")
|
||||
api.add_resource(
|
||||
CertificatesListValid, "/certificates/valid", endpoint="certificatesListValid"
|
||||
)
|
||||
api.add_resource(
|
||||
Certificates, "/certificates/<int:certificate_id>", endpoint="certificate"
|
||||
)
|
||||
api.add_resource(
|
||||
Certificates, "/certificates/<int:certificate_id>/update/notify", endpoint="certificateUpdateNotify"
|
||||
)
|
||||
api.add_resource(CertificatesStats, "/certificates/stats", endpoint="certificateStats")
|
||||
api.add_resource(
|
||||
CertificatesUpload, "/certificates/upload", endpoint="certificateUpload"
|
||||
)
|
||||
api.add_resource(
|
||||
CertificatePrivateKey,
|
||||
"/certificates/<int:certificate_id>/key",
|
||||
endpoint="privateKeyCertificates",
|
||||
)
|
||||
api.add_resource(
|
||||
CertificateExport,
|
||||
"/certificates/<int:certificate_id>/export",
|
||||
endpoint="exportCertificate",
|
||||
)
|
||||
api.add_resource(
|
||||
NotificationCertificatesList,
|
||||
"/notifications/<int:notification_id>/certificates",
|
||||
endpoint="notificationCertificates",
|
||||
)
|
||||
api.add_resource(
|
||||
CertificatesReplacementsList,
|
||||
"/certificates/<int:certificate_id>/replacements",
|
||||
endpoint="replacements",
|
||||
)
|
||||
|
||||
844
lemur/common/celery.py
Normal file
844
lemur/common/celery.py
Normal file
@ -0,0 +1,844 @@
|
||||
"""
|
||||
This module controls defines celery tasks and their applicable schedules. The celery beat server and workers will start
|
||||
when invoked.
|
||||
|
||||
When ran in development mode (LEMUR_CONFIG=<location of development configuration file. To run both the celery
|
||||
beat scheduler and a worker simultaneously, and to have jobs kick off starting at the next minute, run the following
|
||||
command: celery -A lemur.common.celery worker --loglevel=info -l DEBUG -B
|
||||
|
||||
"""
|
||||
import copy
|
||||
import sys
|
||||
import time
|
||||
from celery import Celery
|
||||
from celery.app.task import Context
|
||||
from celery.exceptions import SoftTimeLimitExceeded
|
||||
from celery.signals import task_failure, task_received, task_revoked, task_success
|
||||
from datetime import datetime, timezone, timedelta
|
||||
from flask import current_app
|
||||
|
||||
from lemur.authorities.service import get as get_authority
|
||||
from lemur.certificates import cli as cli_certificate
|
||||
from lemur.common.redis import RedisHandler
|
||||
from lemur.destinations import service as destinations_service
|
||||
from lemur.dns_providers import cli as cli_dns_providers
|
||||
from lemur.endpoints import cli as cli_endpoints
|
||||
from lemur.extensions import metrics, sentry
|
||||
from lemur.factory import create_app
|
||||
from lemur.notifications import cli as cli_notification
|
||||
from lemur.notifications.messaging import send_pending_failure_notification
|
||||
from lemur.pending_certificates import service as pending_certificate_service
|
||||
from lemur.plugins.base import plugins
|
||||
from lemur.sources.cli import clean, sync, validate_sources
|
||||
from lemur.sources.service import add_aws_destination_to_sources
|
||||
|
||||
if current_app:
|
||||
flask_app = current_app
|
||||
else:
|
||||
flask_app = create_app()
|
||||
|
||||
red = RedisHandler().redis()
|
||||
|
||||
|
||||
def make_celery(app):
|
||||
celery = Celery(
|
||||
app.import_name,
|
||||
backend=app.config.get("CELERY_RESULT_BACKEND"),
|
||||
broker=app.config.get("CELERY_BROKER_URL"),
|
||||
)
|
||||
celery.conf.update(app.config)
|
||||
TaskBase = celery.Task
|
||||
|
||||
class ContextTask(TaskBase):
|
||||
abstract = True
|
||||
|
||||
def __call__(self, *args, **kwargs):
|
||||
with app.app_context():
|
||||
return TaskBase.__call__(self, *args, **kwargs)
|
||||
|
||||
celery.Task = ContextTask
|
||||
return celery
|
||||
|
||||
|
||||
celery = make_celery(flask_app)
|
||||
|
||||
|
||||
def is_task_active(fun, task_id, args):
|
||||
from celery.task.control import inspect
|
||||
|
||||
if not args:
|
||||
args = "()" # empty args
|
||||
|
||||
i = inspect()
|
||||
active_tasks = i.active()
|
||||
for _, tasks in active_tasks.items():
|
||||
for task in tasks:
|
||||
if task.get("id") == task_id:
|
||||
continue
|
||||
if task.get("name") == fun and task.get("args") == str(args):
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def get_celery_request_tags(**kwargs):
|
||||
request = kwargs.get("request")
|
||||
sender_hostname = "unknown"
|
||||
sender = kwargs.get("sender")
|
||||
if sender:
|
||||
try:
|
||||
sender_hostname = sender.hostname
|
||||
except AttributeError:
|
||||
sender_hostname = vars(sender.request).get("origin", "unknown")
|
||||
if request and not isinstance(
|
||||
request, Context
|
||||
): # unlike others, task_revoked sends a Context for `request`
|
||||
task_name = request.name
|
||||
task_id = request.id
|
||||
receiver_hostname = request.hostname
|
||||
else:
|
||||
task_name = sender.name
|
||||
task_id = sender.request.id
|
||||
receiver_hostname = sender.request.hostname
|
||||
|
||||
tags = {
|
||||
"task_name": task_name,
|
||||
"task_id": task_id,
|
||||
"sender_hostname": sender_hostname,
|
||||
"receiver_hostname": receiver_hostname,
|
||||
}
|
||||
if kwargs.get("exception"):
|
||||
tags["error"] = repr(kwargs["exception"])
|
||||
return tags
|
||||
|
||||
|
||||
@celery.task()
|
||||
def report_celery_last_success_metrics():
|
||||
"""
|
||||
For each celery task, this will determine the number of seconds since it has last been successful.
|
||||
|
||||
Celery tasks should be emitting redis stats with a deterministic key (In our case, `f"{task}.last_success"`.
|
||||
report_celery_last_success_metrics should be ran periodically to emit metrics on when a task was last successful.
|
||||
Admins can then alert when tasks are not ran when intended. Admins should also alert when no metrics are emitted
|
||||
from this function.
|
||||
"""
|
||||
function = f"{__name__}.{sys._getframe().f_code.co_name}"
|
||||
task_id = None
|
||||
if celery.current_task:
|
||||
task_id = celery.current_task.request.id
|
||||
|
||||
log_data = {
|
||||
"function": function,
|
||||
"message": "recurrent task",
|
||||
"task_id": task_id,
|
||||
}
|
||||
|
||||
if task_id and is_task_active(function, task_id, None):
|
||||
log_data["message"] = "Skipping task: Task is already active"
|
||||
current_app.logger.debug(log_data)
|
||||
return
|
||||
|
||||
current_time = int(time.time())
|
||||
schedule = current_app.config.get("CELERYBEAT_SCHEDULE")
|
||||
for _, t in schedule.items():
|
||||
task = t.get("task")
|
||||
last_success = int(red.get(f"{task}.last_success") or 0)
|
||||
metrics.send(
|
||||
f"{task}.time_since_last_success", "gauge", current_time - last_success
|
||||
)
|
||||
red.set(
|
||||
f"{function}.last_success", int(time.time())
|
||||
) # Alert if this metric is not seen
|
||||
metrics.send(f"{function}.success", "counter", 1)
|
||||
|
||||
|
||||
@task_received.connect
|
||||
def report_number_pending_tasks(**kwargs):
|
||||
"""
|
||||
Report the number of pending tasks to our metrics broker every time a task is published. This metric can be used
|
||||
for autoscaling workers.
|
||||
https://docs.celeryproject.org/en/latest/userguide/signals.html#task-received
|
||||
"""
|
||||
with flask_app.app_context():
|
||||
metrics.send(
|
||||
"celery.new_pending_task",
|
||||
"TIMER",
|
||||
1,
|
||||
metric_tags=get_celery_request_tags(**kwargs),
|
||||
)
|
||||
|
||||
|
||||
@task_success.connect
|
||||
def report_successful_task(**kwargs):
|
||||
"""
|
||||
Report a generic success metric as tasks to our metrics broker every time a task finished correctly.
|
||||
This metric can be used for autoscaling workers.
|
||||
https://docs.celeryproject.org/en/latest/userguide/signals.html#task-success
|
||||
"""
|
||||
with flask_app.app_context():
|
||||
tags = get_celery_request_tags(**kwargs)
|
||||
red.set(f"{tags['task_name']}.last_success", int(time.time()))
|
||||
metrics.send("celery.successful_task", "TIMER", 1, metric_tags=tags)
|
||||
|
||||
|
||||
@task_failure.connect
|
||||
def report_failed_task(**kwargs):
|
||||
"""
|
||||
Report a generic failure metric as tasks to our metrics broker every time a task fails.
|
||||
This metric can be used for alerting.
|
||||
https://docs.celeryproject.org/en/latest/userguide/signals.html#task-failure
|
||||
"""
|
||||
with flask_app.app_context():
|
||||
log_data = {
|
||||
"function": f"{__name__}.{sys._getframe().f_code.co_name}",
|
||||
"Message": "Celery Task Failure",
|
||||
}
|
||||
|
||||
# Add traceback if exception info is in the kwargs
|
||||
einfo = kwargs.get("einfo")
|
||||
if einfo:
|
||||
log_data["traceback"] = einfo.traceback
|
||||
|
||||
error_tags = get_celery_request_tags(**kwargs)
|
||||
|
||||
log_data.update(error_tags)
|
||||
current_app.logger.error(log_data)
|
||||
metrics.send("celery.failed_task", "TIMER", 1, metric_tags=error_tags)
|
||||
|
||||
|
||||
@task_revoked.connect
|
||||
def report_revoked_task(**kwargs):
|
||||
"""
|
||||
Report a generic failure metric as tasks to our metrics broker every time a task is revoked.
|
||||
This metric can be used for alerting.
|
||||
https://docs.celeryproject.org/en/latest/userguide/signals.html#task-revoked
|
||||
"""
|
||||
with flask_app.app_context():
|
||||
log_data = {
|
||||
"function": f"{__name__}.{sys._getframe().f_code.co_name}",
|
||||
"Message": "Celery Task Revoked",
|
||||
}
|
||||
|
||||
error_tags = get_celery_request_tags(**kwargs)
|
||||
|
||||
log_data.update(error_tags)
|
||||
current_app.logger.error(log_data)
|
||||
metrics.send("celery.revoked_task", "TIMER", 1, metric_tags=error_tags)
|
||||
|
||||
|
||||
@celery.task(soft_time_limit=600)
|
||||
def fetch_acme_cert(id):
|
||||
"""
|
||||
Attempt to get the full certificate for the pending certificate listed.
|
||||
|
||||
Args:
|
||||
id: an id of a PendingCertificate
|
||||
"""
|
||||
task_id = None
|
||||
if celery.current_task:
|
||||
task_id = celery.current_task.request.id
|
||||
|
||||
function = f"{__name__}.{sys._getframe().f_code.co_name}"
|
||||
log_data = {
|
||||
"function": function,
|
||||
"message": "Resolving pending certificate {}".format(id),
|
||||
"task_id": task_id,
|
||||
"id": id,
|
||||
}
|
||||
|
||||
current_app.logger.debug(log_data)
|
||||
|
||||
if task_id and is_task_active(log_data["function"], task_id, (id,)):
|
||||
log_data["message"] = "Skipping task: Task is already active"
|
||||
current_app.logger.debug(log_data)
|
||||
return
|
||||
|
||||
pending_certs = pending_certificate_service.get_pending_certs([id])
|
||||
new = 0
|
||||
failed = 0
|
||||
wrong_issuer = 0
|
||||
acme_certs = []
|
||||
|
||||
# We only care about certs using the acme-issuer plugin
|
||||
for cert in pending_certs:
|
||||
cert_authority = get_authority(cert.authority_id)
|
||||
if cert_authority.plugin_name == "acme-issuer":
|
||||
acme_certs.append(cert)
|
||||
else:
|
||||
wrong_issuer += 1
|
||||
|
||||
authority = plugins.get("acme-issuer")
|
||||
resolved_certs = authority.get_ordered_certificates(acme_certs)
|
||||
|
||||
for cert in resolved_certs:
|
||||
real_cert = cert.get("cert")
|
||||
# It's necessary to reload the pending cert due to detached instance: http://sqlalche.me/e/bhk3
|
||||
pending_cert = pending_certificate_service.get(cert.get("pending_cert").id)
|
||||
if not pending_cert:
|
||||
log_data[
|
||||
"message"
|
||||
] = "Pending certificate doesn't exist anymore. Was it resolved by another process?"
|
||||
current_app.logger.error(log_data)
|
||||
continue
|
||||
if real_cert:
|
||||
# If a real certificate was returned from issuer, then create it in Lemur and mark
|
||||
# the pending certificate as resolved
|
||||
final_cert = pending_certificate_service.create_certificate(
|
||||
pending_cert, real_cert, pending_cert.user
|
||||
)
|
||||
pending_certificate_service.update(
|
||||
cert.get("pending_cert").id, resolved_cert_id=final_cert.id
|
||||
)
|
||||
pending_certificate_service.update(
|
||||
cert.get("pending_cert").id, resolved=True
|
||||
)
|
||||
# add metrics to metrics extension
|
||||
new += 1
|
||||
else:
|
||||
failed += 1
|
||||
error_log = copy.deepcopy(log_data)
|
||||
error_log["message"] = "Pending certificate creation failure"
|
||||
error_log["pending_cert_id"] = pending_cert.id
|
||||
error_log["last_error"] = cert.get("last_error")
|
||||
error_log["cn"] = pending_cert.cn
|
||||
|
||||
if pending_cert.number_attempts > 4:
|
||||
error_log["message"] = "Deleting pending certificate"
|
||||
send_pending_failure_notification(
|
||||
pending_cert, notify_owner=pending_cert.notify
|
||||
)
|
||||
# Mark the pending cert as resolved
|
||||
pending_certificate_service.update(
|
||||
cert.get("pending_cert").id, resolved=True
|
||||
)
|
||||
else:
|
||||
pending_certificate_service.increment_attempt(pending_cert)
|
||||
pending_certificate_service.update(
|
||||
cert.get("pending_cert").id, status=str(cert.get("last_error"))
|
||||
)
|
||||
# Add failed pending cert task back to queue
|
||||
fetch_acme_cert.delay(id)
|
||||
current_app.logger.error(error_log)
|
||||
log_data["message"] = "Complete"
|
||||
log_data["new"] = new
|
||||
log_data["failed"] = failed
|
||||
log_data["wrong_issuer"] = wrong_issuer
|
||||
current_app.logger.debug(log_data)
|
||||
metrics.send(f"{function}.resolved", "gauge", new)
|
||||
metrics.send(f"{function}.failed", "gauge", failed)
|
||||
metrics.send(f"{function}.wrong_issuer", "gauge", wrong_issuer)
|
||||
print(
|
||||
"[+] Certificates: New: {new} Failed: {failed} Not using ACME: {wrong_issuer}".format(
|
||||
new=new, failed=failed, wrong_issuer=wrong_issuer
|
||||
)
|
||||
)
|
||||
return log_data
|
||||
|
||||
|
||||
@celery.task()
|
||||
def fetch_all_pending_acme_certs():
|
||||
"""Instantiate celery workers to resolve all pending Acme certificates"""
|
||||
|
||||
function = f"{__name__}.{sys._getframe().f_code.co_name}"
|
||||
task_id = None
|
||||
if celery.current_task:
|
||||
task_id = celery.current_task.request.id
|
||||
|
||||
log_data = {
|
||||
"function": function,
|
||||
"message": "Starting job.",
|
||||
"task_id": task_id,
|
||||
}
|
||||
|
||||
if task_id and is_task_active(function, task_id, None):
|
||||
log_data["message"] = "Skipping task: Task is already active"
|
||||
current_app.logger.debug(log_data)
|
||||
return
|
||||
|
||||
current_app.logger.debug(log_data)
|
||||
pending_certs = pending_certificate_service.get_unresolved_pending_certs()
|
||||
|
||||
# We only care about certs using the acme-issuer plugin
|
||||
for cert in pending_certs:
|
||||
cert_authority = get_authority(cert.authority_id)
|
||||
if cert_authority.plugin_name == "acme-issuer":
|
||||
if datetime.now(timezone.utc) - cert.last_updated > timedelta(minutes=5):
|
||||
log_data["message"] = "Triggering job for cert {}".format(cert.name)
|
||||
log_data["cert_name"] = cert.name
|
||||
log_data["cert_id"] = cert.id
|
||||
current_app.logger.debug(log_data)
|
||||
fetch_acme_cert.delay(cert.id)
|
||||
|
||||
metrics.send(f"{function}.success", "counter", 1)
|
||||
return log_data
|
||||
|
||||
|
||||
@celery.task()
|
||||
def remove_old_acme_certs():
|
||||
"""Prune old pending acme certificates from the database"""
|
||||
function = f"{__name__}.{sys._getframe().f_code.co_name}"
|
||||
task_id = None
|
||||
if celery.current_task:
|
||||
task_id = celery.current_task.request.id
|
||||
|
||||
log_data = {
|
||||
"function": function,
|
||||
"message": "Starting job.",
|
||||
"task_id": task_id,
|
||||
}
|
||||
|
||||
if task_id and is_task_active(function, task_id, None):
|
||||
log_data["message"] = "Skipping task: Task is already active"
|
||||
current_app.logger.debug(log_data)
|
||||
return
|
||||
|
||||
pending_certs = pending_certificate_service.get_pending_certs("all")
|
||||
|
||||
# Delete pending certs more than a week old
|
||||
for cert in pending_certs:
|
||||
if datetime.now(timezone.utc) - cert.last_updated > timedelta(days=7):
|
||||
log_data["pending_cert_id"] = cert.id
|
||||
log_data["pending_cert_name"] = cert.name
|
||||
log_data["message"] = "Deleting pending certificate"
|
||||
current_app.logger.debug(log_data)
|
||||
pending_certificate_service.delete(cert)
|
||||
|
||||
metrics.send(f"{function}.success", "counter", 1)
|
||||
return log_data
|
||||
|
||||
|
||||
@celery.task()
|
||||
def clean_all_sources():
|
||||
"""
|
||||
This function will clean unused certificates from sources. This is a destructive operation and should only
|
||||
be ran periodically. This function triggers one celery task per source.
|
||||
"""
|
||||
function = f"{__name__}.{sys._getframe().f_code.co_name}"
|
||||
task_id = None
|
||||
if celery.current_task:
|
||||
task_id = celery.current_task.request.id
|
||||
|
||||
log_data = {
|
||||
"function": function,
|
||||
"message": "Creating celery task to clean source",
|
||||
"task_id": task_id,
|
||||
}
|
||||
|
||||
if task_id and is_task_active(function, task_id, None):
|
||||
log_data["message"] = "Skipping task: Task is already active"
|
||||
current_app.logger.debug(log_data)
|
||||
return
|
||||
|
||||
sources = validate_sources("all")
|
||||
for source in sources:
|
||||
log_data["source"] = source.label
|
||||
current_app.logger.debug(log_data)
|
||||
clean_source.delay(source.label)
|
||||
|
||||
metrics.send(f"{function}.success", "counter", 1)
|
||||
return log_data
|
||||
|
||||
|
||||
@celery.task(soft_time_limit=3600)
|
||||
def clean_source(source):
|
||||
"""
|
||||
This celery task will clean the specified source. This is a destructive operation that will delete unused
|
||||
certificates from each source.
|
||||
|
||||
:param source:
|
||||
:return:
|
||||
"""
|
||||
function = f"{__name__}.{sys._getframe().f_code.co_name}"
|
||||
task_id = None
|
||||
if celery.current_task:
|
||||
task_id = celery.current_task.request.id
|
||||
|
||||
log_data = {
|
||||
"function": function,
|
||||
"message": "Cleaning source",
|
||||
"source": source,
|
||||
"task_id": task_id,
|
||||
}
|
||||
|
||||
if task_id and is_task_active(function, task_id, (source,)):
|
||||
log_data["message"] = "Skipping task: Task is already active"
|
||||
current_app.logger.debug(log_data)
|
||||
return
|
||||
|
||||
current_app.logger.debug(log_data)
|
||||
try:
|
||||
clean([source], True)
|
||||
except SoftTimeLimitExceeded:
|
||||
log_data["message"] = "Clean source: Time limit exceeded."
|
||||
current_app.logger.error(log_data)
|
||||
sentry.captureException()
|
||||
metrics.send("celery.timeout", "counter", 1, metric_tags={"function": function})
|
||||
return log_data
|
||||
|
||||
|
||||
@celery.task()
|
||||
def sync_all_sources():
|
||||
"""
|
||||
This function will sync certificates from all sources. This function triggers one celery task per source.
|
||||
"""
|
||||
function = f"{__name__}.{sys._getframe().f_code.co_name}"
|
||||
task_id = None
|
||||
if celery.current_task:
|
||||
task_id = celery.current_task.request.id
|
||||
|
||||
log_data = {
|
||||
"function": function,
|
||||
"message": "creating celery task to sync source",
|
||||
"task_id": task_id,
|
||||
}
|
||||
|
||||
if task_id and is_task_active(function, task_id, None):
|
||||
log_data["message"] = "Skipping task: Task is already active"
|
||||
current_app.logger.debug(log_data)
|
||||
return
|
||||
|
||||
sources = validate_sources("all")
|
||||
for source in sources:
|
||||
log_data["source"] = source.label
|
||||
current_app.logger.debug(log_data)
|
||||
sync_source.delay(source.label)
|
||||
|
||||
metrics.send(f"{function}.success", "counter", 1)
|
||||
return log_data
|
||||
|
||||
|
||||
@celery.task(soft_time_limit=7200)
|
||||
def sync_source(source):
|
||||
"""
|
||||
This celery task will sync the specified source.
|
||||
|
||||
:param source:
|
||||
:return:
|
||||
"""
|
||||
|
||||
function = f"{__name__}.{sys._getframe().f_code.co_name}"
|
||||
task_id = None
|
||||
if celery.current_task:
|
||||
task_id = celery.current_task.request.id
|
||||
|
||||
log_data = {
|
||||
"function": function,
|
||||
"message": "Syncing source",
|
||||
"source": source,
|
||||
"task_id": task_id,
|
||||
}
|
||||
|
||||
if task_id and is_task_active(function, task_id, (source,)):
|
||||
log_data["message"] = "Skipping task: Task is already active"
|
||||
current_app.logger.debug(log_data)
|
||||
return
|
||||
|
||||
current_app.logger.debug(log_data)
|
||||
try:
|
||||
sync([source])
|
||||
metrics.send(
|
||||
f"{function}.success", "counter", 1, metric_tags={"source": source}
|
||||
)
|
||||
except SoftTimeLimitExceeded:
|
||||
log_data["message"] = "Error syncing source: Time limit exceeded."
|
||||
current_app.logger.error(log_data)
|
||||
sentry.captureException()
|
||||
metrics.send(
|
||||
"sync_source_timeout", "counter", 1, metric_tags={"source": source}
|
||||
)
|
||||
metrics.send("celery.timeout", "counter", 1, metric_tags={"function": function})
|
||||
return
|
||||
|
||||
log_data["message"] = "Done syncing source"
|
||||
current_app.logger.debug(log_data)
|
||||
metrics.send(f"{function}.success", "counter", 1, metric_tags={"source": source})
|
||||
return log_data
|
||||
|
||||
|
||||
@celery.task()
|
||||
def sync_source_destination():
|
||||
"""
|
||||
This celery task will sync destination and source, to make sure all new destinations are also present as source.
|
||||
Some destinations do not qualify as sources, and hence should be excluded from being added as sources
|
||||
We identify qualified destinations based on the sync_as_source attributed of the plugin.
|
||||
The destination sync_as_source_name reveals the name of the suitable source-plugin.
|
||||
We rely on account numbers to avoid duplicates.
|
||||
"""
|
||||
function = f"{__name__}.{sys._getframe().f_code.co_name}"
|
||||
task_id = None
|
||||
if celery.current_task:
|
||||
task_id = celery.current_task.request.id
|
||||
|
||||
log_data = {
|
||||
"function": function,
|
||||
"message": "syncing AWS destinations and sources",
|
||||
"task_id": task_id,
|
||||
}
|
||||
|
||||
if task_id and is_task_active(function, task_id, None):
|
||||
log_data["message"] = "Skipping task: Task is already active"
|
||||
current_app.logger.debug(log_data)
|
||||
return
|
||||
|
||||
current_app.logger.debug(log_data)
|
||||
for dst in destinations_service.get_all():
|
||||
if add_aws_destination_to_sources(dst):
|
||||
log_data["message"] = "new source added"
|
||||
log_data["source"] = dst.label
|
||||
current_app.logger.debug(log_data)
|
||||
|
||||
log_data["message"] = "completed Syncing AWS destinations and sources"
|
||||
current_app.logger.debug(log_data)
|
||||
metrics.send(f"{function}.success", "counter", 1)
|
||||
return log_data
|
||||
|
||||
|
||||
@celery.task(soft_time_limit=3600)
|
||||
def certificate_reissue():
|
||||
"""
|
||||
This celery task reissues certificates which are pending reissue
|
||||
:return:
|
||||
"""
|
||||
function = f"{__name__}.{sys._getframe().f_code.co_name}"
|
||||
task_id = None
|
||||
if celery.current_task:
|
||||
task_id = celery.current_task.request.id
|
||||
|
||||
log_data = {
|
||||
"function": function,
|
||||
"message": "reissuing certificates",
|
||||
"task_id": task_id,
|
||||
}
|
||||
|
||||
if task_id and is_task_active(function, task_id, None):
|
||||
log_data["message"] = "Skipping task: Task is already active"
|
||||
current_app.logger.debug(log_data)
|
||||
return
|
||||
|
||||
current_app.logger.debug(log_data)
|
||||
try:
|
||||
cli_certificate.reissue(None, True)
|
||||
except SoftTimeLimitExceeded:
|
||||
log_data["message"] = "Certificate reissue: Time limit exceeded."
|
||||
current_app.logger.error(log_data)
|
||||
sentry.captureException()
|
||||
metrics.send("celery.timeout", "counter", 1, metric_tags={"function": function})
|
||||
return
|
||||
|
||||
log_data["message"] = "reissuance completed"
|
||||
current_app.logger.debug(log_data)
|
||||
metrics.send(f"{function}.success", "counter", 1)
|
||||
return log_data
|
||||
|
||||
|
||||
@celery.task(soft_time_limit=3600)
|
||||
def certificate_rotate(**kwargs):
|
||||
|
||||
"""
|
||||
This celery task rotates certificates which are reissued but having endpoints attached to the replaced cert
|
||||
:return:
|
||||
"""
|
||||
function = f"{__name__}.{sys._getframe().f_code.co_name}"
|
||||
task_id = None
|
||||
if celery.current_task:
|
||||
task_id = celery.current_task.request.id
|
||||
|
||||
region = kwargs.get("region")
|
||||
log_data = {
|
||||
"function": function,
|
||||
"message": "rotating certificates",
|
||||
"task_id": task_id,
|
||||
}
|
||||
|
||||
if task_id and is_task_active(function, task_id, None):
|
||||
log_data["message"] = "Skipping task: Task is already active"
|
||||
current_app.logger.debug(log_data)
|
||||
return
|
||||
|
||||
current_app.logger.debug(log_data)
|
||||
try:
|
||||
if region:
|
||||
log_data["region"] = region
|
||||
cli_certificate.rotate_region(None, None, None, None, True, region)
|
||||
else:
|
||||
cli_certificate.rotate(None, None, None, None, True)
|
||||
except SoftTimeLimitExceeded:
|
||||
log_data["message"] = "Certificate rotate: Time limit exceeded."
|
||||
current_app.logger.error(log_data)
|
||||
sentry.captureException()
|
||||
metrics.send("celery.timeout", "counter", 1, metric_tags={"function": function})
|
||||
return
|
||||
|
||||
log_data["message"] = "rotation completed"
|
||||
current_app.logger.debug(log_data)
|
||||
metrics.send(f"{function}.success", "counter", 1)
|
||||
return log_data
|
||||
|
||||
|
||||
@celery.task(soft_time_limit=3600)
|
||||
def endpoints_expire():
|
||||
"""
|
||||
This celery task removes all endpoints that have not been recently updated
|
||||
:return:
|
||||
"""
|
||||
function = f"{__name__}.{sys._getframe().f_code.co_name}"
|
||||
task_id = None
|
||||
if celery.current_task:
|
||||
task_id = celery.current_task.request.id
|
||||
|
||||
log_data = {
|
||||
"function": function,
|
||||
"message": "endpoints expire",
|
||||
"task_id": task_id,
|
||||
}
|
||||
|
||||
if task_id and is_task_active(function, task_id, None):
|
||||
log_data["message"] = "Skipping task: Task is already active"
|
||||
current_app.logger.debug(log_data)
|
||||
return
|
||||
|
||||
current_app.logger.debug(log_data)
|
||||
try:
|
||||
cli_endpoints.expire(2) # Time in hours
|
||||
except SoftTimeLimitExceeded:
|
||||
log_data["message"] = "endpoint expire: Time limit exceeded."
|
||||
current_app.logger.error(log_data)
|
||||
sentry.captureException()
|
||||
metrics.send("celery.timeout", "counter", 1, metric_tags={"function": function})
|
||||
return
|
||||
|
||||
metrics.send(f"{function}.success", "counter", 1)
|
||||
return log_data
|
||||
|
||||
|
||||
@celery.task(soft_time_limit=600)
|
||||
def get_all_zones():
|
||||
"""
|
||||
This celery syncs all zones from the available dns providers
|
||||
:return:
|
||||
"""
|
||||
function = f"{__name__}.{sys._getframe().f_code.co_name}"
|
||||
task_id = None
|
||||
if celery.current_task:
|
||||
task_id = celery.current_task.request.id
|
||||
|
||||
log_data = {
|
||||
"function": function,
|
||||
"message": "refresh all zones from available DNS providers",
|
||||
"task_id": task_id,
|
||||
}
|
||||
|
||||
if task_id and is_task_active(function, task_id, None):
|
||||
log_data["message"] = "Skipping task: Task is already active"
|
||||
current_app.logger.debug(log_data)
|
||||
return
|
||||
|
||||
current_app.logger.debug(log_data)
|
||||
try:
|
||||
cli_dns_providers.get_all_zones()
|
||||
except SoftTimeLimitExceeded:
|
||||
log_data["message"] = "get all zones: Time limit exceeded."
|
||||
current_app.logger.error(log_data)
|
||||
sentry.captureException()
|
||||
metrics.send("celery.timeout", "counter", 1, metric_tags={"function": function})
|
||||
return
|
||||
|
||||
metrics.send(f"{function}.success", "counter", 1)
|
||||
return log_data
|
||||
|
||||
|
||||
@celery.task(soft_time_limit=3600)
|
||||
def check_revoked():
|
||||
"""
|
||||
This celery task attempts to check if any certs are expired
|
||||
:return:
|
||||
"""
|
||||
function = f"{__name__}.{sys._getframe().f_code.co_name}"
|
||||
task_id = None
|
||||
if celery.current_task:
|
||||
task_id = celery.current_task.request.id
|
||||
|
||||
log_data = {
|
||||
"function": function,
|
||||
"message": "check if any certificates are revoked revoked",
|
||||
"task_id": task_id,
|
||||
}
|
||||
|
||||
if task_id and is_task_active(function, task_id, None):
|
||||
log_data["message"] = "Skipping task: Task is already active"
|
||||
current_app.logger.debug(log_data)
|
||||
return
|
||||
|
||||
current_app.logger.debug(log_data)
|
||||
try:
|
||||
cli_certificate.check_revoked()
|
||||
except SoftTimeLimitExceeded:
|
||||
log_data["message"] = "Checking revoked: Time limit exceeded."
|
||||
current_app.logger.error(log_data)
|
||||
sentry.captureException()
|
||||
metrics.send("celery.timeout", "counter", 1, metric_tags={"function": function})
|
||||
return
|
||||
|
||||
metrics.send(f"{function}.success", "counter", 1)
|
||||
return log_data
|
||||
|
||||
|
||||
@celery.task(soft_time_limit=3600)
|
||||
def notify_expirations():
|
||||
"""
|
||||
This celery task notifies about expiring certs
|
||||
:return:
|
||||
"""
|
||||
function = f"{__name__}.{sys._getframe().f_code.co_name}"
|
||||
task_id = None
|
||||
if celery.current_task:
|
||||
task_id = celery.current_task.request.id
|
||||
|
||||
log_data = {
|
||||
"function": function,
|
||||
"message": "notify for cert expiration",
|
||||
"task_id": task_id,
|
||||
}
|
||||
|
||||
if task_id and is_task_active(function, task_id, None):
|
||||
log_data["message"] = "Skipping task: Task is already active"
|
||||
current_app.logger.debug(log_data)
|
||||
return
|
||||
|
||||
current_app.logger.debug(log_data)
|
||||
try:
|
||||
cli_notification.expirations(
|
||||
current_app.config.get("EXCLUDE_CN_FROM_NOTIFICATION", [])
|
||||
)
|
||||
except SoftTimeLimitExceeded:
|
||||
log_data["message"] = "Notify expiring Time limit exceeded."
|
||||
current_app.logger.error(log_data)
|
||||
sentry.captureException()
|
||||
metrics.send("celery.timeout", "counter", 1, metric_tags={"function": function})
|
||||
return
|
||||
|
||||
metrics.send(f"{function}.success", "counter", 1)
|
||||
return log_data
|
||||
|
||||
|
||||
@celery.task(soft_time_limit=3600)
|
||||
def enable_autorotate_for_certs_attached_to_endpoint():
|
||||
"""
|
||||
This celery task automatically enables autorotation for unexpired certificates that are
|
||||
attached to an endpoint but do not have autorotate enabled.
|
||||
:return:
|
||||
"""
|
||||
function = f"{__name__}.{sys._getframe().f_code.co_name}"
|
||||
task_id = None
|
||||
if celery.current_task:
|
||||
task_id = celery.current_task.request.id
|
||||
|
||||
log_data = {
|
||||
"function": function,
|
||||
"task_id": task_id,
|
||||
"message": "Enabling autorotate to eligible certificates",
|
||||
}
|
||||
current_app.logger.debug(log_data)
|
||||
|
||||
cli_certificate.automatically_enable_autorotate()
|
||||
metrics.send(f"{function}.success", "counter", 1)
|
||||
return log_data
|
||||
288
lemur/common/defaults.py
Normal file
288
lemur/common/defaults.py
Normal file
@ -0,0 +1,288 @@
|
||||
import re
|
||||
import unicodedata
|
||||
|
||||
from cryptography import x509
|
||||
from cryptography.hazmat.primitives.serialization import Encoding
|
||||
from flask import current_app
|
||||
|
||||
from lemur.common.utils import is_selfsigned
|
||||
from lemur.extensions import sentry
|
||||
from lemur.constants import SAN_NAMING_TEMPLATE, DEFAULT_NAMING_TEMPLATE
|
||||
|
||||
|
||||
def text_to_slug(value, joiner="-"):
|
||||
"""
|
||||
Normalize a string to a "slug" value, stripping character accents and removing non-alphanum characters.
|
||||
A series of non-alphanumeric characters is replaced with the joiner character.
|
||||
"""
|
||||
|
||||
# Strip all character accents: decompose Unicode characters and then drop combining chars.
|
||||
value = "".join(
|
||||
c for c in unicodedata.normalize("NFKD", value) if not unicodedata.combining(c)
|
||||
)
|
||||
|
||||
# Replace all remaining non-alphanumeric characters with joiner string. Multiple characters get collapsed into a
|
||||
# single joiner. Except, keep 'xn--' used in IDNA domain names as is.
|
||||
value = re.sub(r"[^A-Za-z0-9.]+(?<!xn--)", joiner, value)
|
||||
|
||||
# '-' in the beginning or end of string looks ugly.
|
||||
return value.strip(joiner)
|
||||
|
||||
|
||||
def certificate_name(common_name, issuer, not_before, not_after, san):
|
||||
"""
|
||||
Create a name for our certificate. A naming standard
|
||||
is based on a series of templates. The name includes
|
||||
useful information such as Common Name, Validation dates,
|
||||
and Issuer.
|
||||
|
||||
:param san:
|
||||
:param common_name:
|
||||
:param not_after:
|
||||
:param issuer:
|
||||
:param not_before:
|
||||
:rtype: str
|
||||
:return:
|
||||
"""
|
||||
if san:
|
||||
t = SAN_NAMING_TEMPLATE
|
||||
else:
|
||||
t = DEFAULT_NAMING_TEMPLATE
|
||||
|
||||
temp = t.format(
|
||||
subject=common_name,
|
||||
issuer=issuer.replace(" ", ""),
|
||||
not_before=not_before.strftime("%Y%m%d"),
|
||||
not_after=not_after.strftime("%Y%m%d"),
|
||||
)
|
||||
|
||||
temp = temp.replace("*", "WILDCARD")
|
||||
return text_to_slug(temp)
|
||||
|
||||
|
||||
def signing_algorithm(cert):
|
||||
return cert.signature_hash_algorithm.name
|
||||
|
||||
|
||||
def common_name(cert):
|
||||
"""
|
||||
Attempts to get a sane common name from a given certificate.
|
||||
|
||||
:param cert:
|
||||
:return: Common name or None
|
||||
"""
|
||||
try:
|
||||
subject_oid = cert.subject.get_attributes_for_oid(x509.OID_COMMON_NAME)
|
||||
if len(subject_oid) > 0:
|
||||
return subject_oid[0].value.strip()
|
||||
return None
|
||||
except Exception as e:
|
||||
sentry.captureException()
|
||||
current_app.logger.error(
|
||||
{
|
||||
"message": "Unable to get common name",
|
||||
"error": e,
|
||||
"public_key": cert.public_bytes(Encoding.PEM).decode("utf-8")
|
||||
},
|
||||
exc_info=True
|
||||
)
|
||||
|
||||
|
||||
def organization(cert):
|
||||
"""
|
||||
Attempt to get the organization name from a given certificate.
|
||||
:param cert:
|
||||
:return:
|
||||
"""
|
||||
try:
|
||||
return cert.subject.get_attributes_for_oid(x509.OID_ORGANIZATION_NAME)[
|
||||
0
|
||||
].value.strip()
|
||||
except Exception as e:
|
||||
sentry.captureException()
|
||||
current_app.logger.error("Unable to get organization! {0}".format(e))
|
||||
|
||||
|
||||
def organizational_unit(cert):
|
||||
"""
|
||||
Attempt to get the organization unit from a given certificate.
|
||||
:param cert:
|
||||
:return:
|
||||
"""
|
||||
try:
|
||||
return cert.subject.get_attributes_for_oid(x509.OID_ORGANIZATIONAL_UNIT_NAME)[
|
||||
0
|
||||
].value.strip()
|
||||
except Exception as e:
|
||||
sentry.captureException()
|
||||
current_app.logger.error("Unable to get organizational unit! {0}".format(e))
|
||||
|
||||
|
||||
def country(cert):
|
||||
"""
|
||||
Attempt to get the country from a given certificate.
|
||||
:param cert:
|
||||
:return:
|
||||
"""
|
||||
try:
|
||||
return cert.subject.get_attributes_for_oid(x509.OID_COUNTRY_NAME)[
|
||||
0
|
||||
].value.strip()
|
||||
except Exception as e:
|
||||
sentry.captureException()
|
||||
current_app.logger.error("Unable to get country! {0}".format(e))
|
||||
|
||||
|
||||
def state(cert):
|
||||
"""
|
||||
Attempt to get the from a given certificate.
|
||||
:param cert:
|
||||
:return:
|
||||
"""
|
||||
try:
|
||||
return cert.subject.get_attributes_for_oid(x509.OID_STATE_OR_PROVINCE_NAME)[
|
||||
0
|
||||
].value.strip()
|
||||
except Exception as e:
|
||||
sentry.captureException()
|
||||
current_app.logger.error("Unable to get state! {0}".format(e))
|
||||
|
||||
|
||||
def location(cert):
|
||||
"""
|
||||
Attempt to get the location name from a given certificate.
|
||||
:param cert:
|
||||
:return:
|
||||
"""
|
||||
try:
|
||||
return cert.subject.get_attributes_for_oid(x509.OID_LOCALITY_NAME)[
|
||||
0
|
||||
].value.strip()
|
||||
except Exception as e:
|
||||
sentry.captureException()
|
||||
current_app.logger.error("Unable to get location! {0}".format(e))
|
||||
|
||||
|
||||
def domains(cert):
|
||||
"""
|
||||
Attempts to get an domains listed in a certificate.
|
||||
If 'subjectAltName' extension is not available we simply
|
||||
return the common name.
|
||||
|
||||
:param cert:
|
||||
:return: List of domains
|
||||
"""
|
||||
domains = []
|
||||
try:
|
||||
ext = cert.extensions.get_extension_for_oid(x509.OID_SUBJECT_ALTERNATIVE_NAME)
|
||||
entries = ext.value.get_values_for_type(x509.DNSName)
|
||||
for entry in entries:
|
||||
domains.append(entry)
|
||||
except x509.ExtensionNotFound:
|
||||
if current_app.config.get("LOG_SSL_SUBJ_ALT_NAME_ERRORS", True):
|
||||
sentry.captureException()
|
||||
except Exception as e:
|
||||
sentry.captureException()
|
||||
|
||||
return domains
|
||||
|
||||
|
||||
def serial(cert):
|
||||
"""
|
||||
Fetch the serial number from the certificate.
|
||||
|
||||
:param cert:
|
||||
:return: serial number
|
||||
"""
|
||||
return cert.serial_number
|
||||
|
||||
|
||||
def san(cert):
|
||||
"""
|
||||
Determines if a given certificate is a SAN certificate.
|
||||
SAN certificates are simply certificates that cover multiple domains.
|
||||
|
||||
:param cert:
|
||||
:return: Bool
|
||||
"""
|
||||
if len(domains(cert)) > 1:
|
||||
return True
|
||||
|
||||
|
||||
def is_wildcard(cert):
|
||||
"""
|
||||
Determines if certificate is a wildcard certificate.
|
||||
|
||||
:param cert:
|
||||
:return: Bool
|
||||
"""
|
||||
d = domains(cert)
|
||||
if len(d) == 1 and d[0][0:1] == "*":
|
||||
return True
|
||||
|
||||
if cert.subject.get_attributes_for_oid(x509.OID_COMMON_NAME)[0].value[0:1] == "*":
|
||||
return True
|
||||
|
||||
|
||||
def bitstrength(cert):
|
||||
"""
|
||||
Calculates a certificates public key bit length.
|
||||
|
||||
:param cert:
|
||||
:return: Integer
|
||||
"""
|
||||
try:
|
||||
return cert.public_key().key_size
|
||||
except AttributeError:
|
||||
sentry.captureException()
|
||||
current_app.logger.debug("Unable to get bitstrength.")
|
||||
|
||||
|
||||
def issuer(cert):
|
||||
"""
|
||||
Gets a sane issuer slug from a given certificate, stripping non-alphanumeric characters.
|
||||
|
||||
For self-signed certificates, the special value '<selfsigned>' is returned.
|
||||
If issuer cannot be determined, '<unknown>' is returned.
|
||||
|
||||
:param cert: Parsed certificate object
|
||||
:return: Issuer slug
|
||||
"""
|
||||
# If certificate is self-signed, we return a special value -- there really is no distinct "issuer" for it
|
||||
if is_selfsigned(cert):
|
||||
return "<selfsigned>"
|
||||
|
||||
# Try Common Name or fall back to Organization name
|
||||
attrs = cert.issuer.get_attributes_for_oid(
|
||||
x509.OID_COMMON_NAME
|
||||
) or cert.issuer.get_attributes_for_oid(x509.OID_ORGANIZATION_NAME)
|
||||
if not attrs:
|
||||
current_app.logger.error(
|
||||
"Unable to get issuer! Cert serial {:x}".format(cert.serial_number)
|
||||
)
|
||||
return "<unknown>"
|
||||
|
||||
return text_to_slug(attrs[0].value, "")
|
||||
|
||||
|
||||
def not_before(cert):
|
||||
"""
|
||||
Gets the naive datetime of the certificates 'not_before' field.
|
||||
This field denotes the first date in time which the given certificate
|
||||
is valid.
|
||||
|
||||
:param cert:
|
||||
:return: Datetime
|
||||
"""
|
||||
return cert.not_valid_before
|
||||
|
||||
|
||||
def not_after(cert):
|
||||
"""
|
||||
Gets the naive datetime of the certificates 'not_after' field.
|
||||
This field denotes the last date in time which the given certificate
|
||||
is valid.
|
||||
|
||||
:return: Datetime
|
||||
"""
|
||||
return cert.not_valid_after
|
||||
443
lemur/common/fields.py
Normal file
443
lemur/common/fields.py
Normal file
@ -0,0 +1,443 @@
|
||||
"""
|
||||
.. module: lemur.common.fields
|
||||
:platform: Unix
|
||||
:copyright: (c) 2018 by Netflix Inc., see AUTHORS for more
|
||||
:license: Apache, see LICENSE for more details.
|
||||
.. moduleauthor:: Kevin Glisson <kglisson@netflix.com>
|
||||
"""
|
||||
import arrow
|
||||
import warnings
|
||||
import ipaddress
|
||||
|
||||
from flask import current_app
|
||||
from datetime import datetime as dt
|
||||
|
||||
from cryptography import x509
|
||||
|
||||
from marshmallow import utils
|
||||
from marshmallow.fields import Field
|
||||
from marshmallow.exceptions import ValidationError
|
||||
|
||||
from lemur.common import validators
|
||||
|
||||
|
||||
class Hex(Field):
|
||||
"""
|
||||
A hex formatted string.
|
||||
"""
|
||||
|
||||
def _serialize(self, value, attr, obj):
|
||||
if value:
|
||||
value = hex(int(value))[2:].upper()
|
||||
return value
|
||||
|
||||
|
||||
class ArrowDateTime(Field):
|
||||
"""A formatted datetime string in UTC.
|
||||
|
||||
Example: ``'2014-12-22T03:12:58.019077+00:00'``
|
||||
|
||||
Timezone-naive `datetime` objects are converted to
|
||||
UTC (+00:00) by :meth:`Schema.dump <marshmallow.Schema.dump>`.
|
||||
:meth:`Schema.load <marshmallow.Schema.load>` returns `datetime`
|
||||
objects that are timezone-aware.
|
||||
|
||||
:param str format: Either ``"rfc"`` (for RFC822), ``"iso"`` (for ISO8601),
|
||||
or a date format string. If `None`, defaults to "iso".
|
||||
:param kwargs: The same keyword arguments that :class:`Field` receives.
|
||||
|
||||
"""
|
||||
|
||||
DATEFORMAT_SERIALIZATION_FUNCS = {
|
||||
"iso": utils.isoformat,
|
||||
"iso8601": utils.isoformat,
|
||||
"rfc": utils.rfcformat,
|
||||
"rfc822": utils.rfcformat,
|
||||
}
|
||||
|
||||
DATEFORMAT_DESERIALIZATION_FUNCS = {
|
||||
"iso": utils.from_iso,
|
||||
"iso8601": utils.from_iso,
|
||||
"rfc": utils.from_rfc,
|
||||
"rfc822": utils.from_rfc,
|
||||
}
|
||||
|
||||
DEFAULT_FORMAT = "iso"
|
||||
|
||||
localtime = False
|
||||
default_error_messages = {
|
||||
"invalid": "Not a valid datetime.",
|
||||
"format": '"{input}" cannot be formatted as a datetime.',
|
||||
}
|
||||
|
||||
def __init__(self, format=None, **kwargs):
|
||||
super(ArrowDateTime, self).__init__(**kwargs)
|
||||
# Allow this to be None. It may be set later in the ``_serialize``
|
||||
# or ``_desrialize`` methods This allows a Schema to dynamically set the
|
||||
# dateformat, e.g. from a Meta option
|
||||
self.dateformat = format
|
||||
|
||||
def _add_to_schema(self, field_name, schema):
|
||||
super(ArrowDateTime, self)._add_to_schema(field_name, schema)
|
||||
self.dateformat = self.dateformat or schema.opts.dateformat
|
||||
|
||||
def _serialize(self, value, attr, obj):
|
||||
if value is None:
|
||||
return None
|
||||
self.dateformat = self.dateformat or self.DEFAULT_FORMAT
|
||||
format_func = self.DATEFORMAT_SERIALIZATION_FUNCS.get(self.dateformat, None)
|
||||
if format_func:
|
||||
try:
|
||||
return format_func(value, localtime=self.localtime)
|
||||
except (AttributeError, ValueError) as err:
|
||||
self.fail("format", input=value)
|
||||
else:
|
||||
return value.strftime(self.dateformat)
|
||||
|
||||
def _deserialize(self, value, attr, data):
|
||||
if not value: # Falsy values, e.g. '', None, [] are not valid
|
||||
raise self.fail("invalid")
|
||||
self.dateformat = self.dateformat or self.DEFAULT_FORMAT
|
||||
func = self.DATEFORMAT_DESERIALIZATION_FUNCS.get(self.dateformat)
|
||||
if func:
|
||||
try:
|
||||
return arrow.get(func(value))
|
||||
except (TypeError, AttributeError, ValueError):
|
||||
raise self.fail("invalid")
|
||||
elif self.dateformat:
|
||||
try:
|
||||
return dt.datetime.strptime(value, self.dateformat)
|
||||
except (TypeError, AttributeError, ValueError):
|
||||
raise self.fail("invalid")
|
||||
elif utils.dateutil_available:
|
||||
try:
|
||||
return arrow.get(utils.from_datestring(value))
|
||||
except TypeError:
|
||||
raise self.fail("invalid")
|
||||
else:
|
||||
warnings.warn(
|
||||
"It is recommended that you install python-dateutil "
|
||||
"for improved datetime deserialization."
|
||||
)
|
||||
raise self.fail("invalid")
|
||||
|
||||
|
||||
class KeyUsageExtension(Field):
|
||||
"""An x509.KeyUsage ExtensionType object
|
||||
|
||||
Dict of KeyUsage names/values are deserialized into an x509.KeyUsage object
|
||||
and back.
|
||||
|
||||
:param kwargs: The same keyword arguments that :class:`Field` receives.
|
||||
|
||||
"""
|
||||
|
||||
def _serialize(self, value, attr, obj):
|
||||
return {
|
||||
"useDigitalSignature": value.digital_signature,
|
||||
"useNonRepudiation": value.content_commitment,
|
||||
"useKeyEncipherment": value.key_encipherment,
|
||||
"useDataEncipherment": value.data_encipherment,
|
||||
"useKeyAgreement": value.key_agreement,
|
||||
"useKeyCertSign": value.key_cert_sign,
|
||||
"useCRLSign": value.crl_sign,
|
||||
"useEncipherOnly": value._encipher_only,
|
||||
"useDecipherOnly": value._decipher_only,
|
||||
}
|
||||
|
||||
def _deserialize(self, value, attr, data):
|
||||
keyusages = {
|
||||
"digital_signature": False,
|
||||
"content_commitment": False,
|
||||
"key_encipherment": False,
|
||||
"data_encipherment": False,
|
||||
"key_agreement": False,
|
||||
"key_cert_sign": False,
|
||||
"crl_sign": False,
|
||||
"encipher_only": False,
|
||||
"decipher_only": False,
|
||||
}
|
||||
|
||||
for k, v in value.items():
|
||||
if k == "useDigitalSignature":
|
||||
keyusages["digital_signature"] = v
|
||||
|
||||
elif k == "useNonRepudiation":
|
||||
keyusages["content_commitment"] = v
|
||||
|
||||
elif k == "useKeyEncipherment":
|
||||
keyusages["key_encipherment"] = v
|
||||
|
||||
elif k == "useDataEncipherment":
|
||||
keyusages["data_encipherment"] = v
|
||||
|
||||
elif k == "useKeyCertSign":
|
||||
keyusages["key_cert_sign"] = v
|
||||
|
||||
elif k == "useCRLSign":
|
||||
keyusages["crl_sign"] = v
|
||||
|
||||
elif k == "useKeyAgreement":
|
||||
keyusages["key_agreement"] = v
|
||||
|
||||
elif k == "useEncipherOnly" and v:
|
||||
keyusages["encipher_only"] = True
|
||||
keyusages["key_agreement"] = True
|
||||
|
||||
elif k == "useDecipherOnly" and v:
|
||||
keyusages["decipher_only"] = True
|
||||
keyusages["key_agreement"] = True
|
||||
|
||||
if keyusages["encipher_only"] and keyusages["decipher_only"]:
|
||||
raise ValidationError(
|
||||
"A certificate cannot have both Encipher Only and Decipher Only Extended Key Usages."
|
||||
)
|
||||
|
||||
return x509.KeyUsage(
|
||||
digital_signature=keyusages["digital_signature"],
|
||||
content_commitment=keyusages["content_commitment"],
|
||||
key_encipherment=keyusages["key_encipherment"],
|
||||
data_encipherment=keyusages["data_encipherment"],
|
||||
key_agreement=keyusages["key_agreement"],
|
||||
key_cert_sign=keyusages["key_cert_sign"],
|
||||
crl_sign=keyusages["crl_sign"],
|
||||
encipher_only=keyusages["encipher_only"],
|
||||
decipher_only=keyusages["decipher_only"],
|
||||
)
|
||||
|
||||
|
||||
class ExtendedKeyUsageExtension(Field):
|
||||
"""An x509.ExtendedKeyUsage ExtensionType object
|
||||
|
||||
Dict of ExtendedKeyUsage names/values are deserialized into an x509.ExtendedKeyUsage object
|
||||
and back.
|
||||
|
||||
:param kwargs: The same keyword arguments that :class:`Field` receives.
|
||||
|
||||
"""
|
||||
|
||||
def _serialize(self, value, attr, obj):
|
||||
usages = value._usages
|
||||
usage_list = {}
|
||||
for usage in usages:
|
||||
if usage == x509.oid.ExtendedKeyUsageOID.CLIENT_AUTH:
|
||||
usage_list["useClientAuthentication"] = True
|
||||
|
||||
elif usage == x509.oid.ExtendedKeyUsageOID.SERVER_AUTH:
|
||||
usage_list["useServerAuthentication"] = True
|
||||
|
||||
elif usage == x509.oid.ExtendedKeyUsageOID.CODE_SIGNING:
|
||||
usage_list["useCodeSigning"] = True
|
||||
|
||||
elif usage == x509.oid.ExtendedKeyUsageOID.EMAIL_PROTECTION:
|
||||
usage_list["useEmailProtection"] = True
|
||||
|
||||
elif usage == x509.oid.ExtendedKeyUsageOID.TIME_STAMPING:
|
||||
usage_list["useTimestamping"] = True
|
||||
|
||||
elif usage == x509.oid.ExtendedKeyUsageOID.OCSP_SIGNING:
|
||||
usage_list["useOCSPSigning"] = True
|
||||
|
||||
elif usage.dotted_string == "1.3.6.1.5.5.7.3.14":
|
||||
usage_list["useEapOverLAN"] = True
|
||||
|
||||
elif usage.dotted_string == "1.3.6.1.5.5.7.3.13":
|
||||
usage_list["useEapOverPPP"] = True
|
||||
|
||||
elif usage.dotted_string == "1.3.6.1.4.1.311.20.2.2":
|
||||
usage_list["useSmartCardLogon"] = True
|
||||
|
||||
else:
|
||||
current_app.logger.warning(
|
||||
"Unable to serialize ExtendedKeyUsage with OID: {usage}".format(
|
||||
usage=usage.dotted_string
|
||||
)
|
||||
)
|
||||
|
||||
return usage_list
|
||||
|
||||
def _deserialize(self, value, attr, data):
|
||||
usage_oids = []
|
||||
for k, v in value.items():
|
||||
if k == "useClientAuthentication" and v:
|
||||
usage_oids.append(x509.oid.ExtendedKeyUsageOID.CLIENT_AUTH)
|
||||
|
||||
elif k == "useServerAuthentication" and v:
|
||||
usage_oids.append(x509.oid.ExtendedKeyUsageOID.SERVER_AUTH)
|
||||
|
||||
elif k == "useCodeSigning" and v:
|
||||
usage_oids.append(x509.oid.ExtendedKeyUsageOID.CODE_SIGNING)
|
||||
|
||||
elif k == "useEmailProtection" and v:
|
||||
usage_oids.append(x509.oid.ExtendedKeyUsageOID.EMAIL_PROTECTION)
|
||||
|
||||
elif k == "useTimestamping" and v:
|
||||
usage_oids.append(x509.oid.ExtendedKeyUsageOID.TIME_STAMPING)
|
||||
|
||||
elif k == "useOCSPSigning" and v:
|
||||
usage_oids.append(x509.oid.ExtendedKeyUsageOID.OCSP_SIGNING)
|
||||
|
||||
elif k == "useEapOverLAN" and v:
|
||||
usage_oids.append(x509.oid.ObjectIdentifier("1.3.6.1.5.5.7.3.14"))
|
||||
|
||||
elif k == "useEapOverPPP" and v:
|
||||
usage_oids.append(x509.oid.ObjectIdentifier("1.3.6.1.5.5.7.3.13"))
|
||||
|
||||
elif k == "useSmartCardLogon" and v:
|
||||
usage_oids.append(x509.oid.ObjectIdentifier("1.3.6.1.4.1.311.20.2.2"))
|
||||
|
||||
else:
|
||||
current_app.logger.warning(
|
||||
"Unable to deserialize ExtendedKeyUsage with name: {key}".format(
|
||||
key=k
|
||||
)
|
||||
)
|
||||
|
||||
return x509.ExtendedKeyUsage(usage_oids)
|
||||
|
||||
|
||||
class BasicConstraintsExtension(Field):
|
||||
"""An x509.BasicConstraints ExtensionType object
|
||||
|
||||
Dict of CA boolean and a path_length integer names/values are deserialized into an x509.BasicConstraints object
|
||||
and back.
|
||||
|
||||
:param kwargs: The same keyword arguments that :class:`Field` receives.
|
||||
|
||||
"""
|
||||
|
||||
def _serialize(self, value, attr, obj):
|
||||
return {"ca": value.ca, "path_length": value.path_length}
|
||||
|
||||
def _deserialize(self, value, attr, data):
|
||||
ca = value.get("ca", False)
|
||||
path_length = value.get("path_length", None)
|
||||
|
||||
if ca:
|
||||
if not isinstance(path_length, (type(None), int)):
|
||||
raise ValidationError(
|
||||
"A CA certificate path_length (for BasicConstraints) must be None or an integer."
|
||||
)
|
||||
return x509.BasicConstraints(ca=True, path_length=path_length)
|
||||
else:
|
||||
return x509.BasicConstraints(ca=False, path_length=None)
|
||||
|
||||
|
||||
class SubjectAlternativeNameExtension(Field):
|
||||
"""An x509.SubjectAlternativeName ExtensionType object
|
||||
|
||||
Dict of CA boolean and a path_length integer names/values are deserialized into an x509.BasicConstraints object
|
||||
and back.
|
||||
|
||||
:param kwargs: The same keyword arguments that :class:`Field` receives.
|
||||
|
||||
"""
|
||||
|
||||
def _serialize(self, value, attr, obj):
|
||||
general_names = []
|
||||
name_type = None
|
||||
|
||||
if value:
|
||||
for name in value._general_names:
|
||||
value = name.value
|
||||
|
||||
if isinstance(name, x509.DNSName):
|
||||
name_type = "DNSName"
|
||||
|
||||
elif isinstance(name, x509.IPAddress):
|
||||
if isinstance(value, ipaddress.IPv4Network):
|
||||
name_type = "IPNetwork"
|
||||
else:
|
||||
name_type = "IPAddress"
|
||||
|
||||
value = str(value)
|
||||
|
||||
elif isinstance(name, x509.UniformResourceIdentifier):
|
||||
name_type = "uniformResourceIdentifier"
|
||||
|
||||
elif isinstance(name, x509.DirectoryName):
|
||||
name_type = "directoryName"
|
||||
|
||||
elif isinstance(name, x509.RFC822Name):
|
||||
name_type = "rfc822Name"
|
||||
|
||||
elif isinstance(name, x509.RegisteredID):
|
||||
name_type = "registeredID"
|
||||
value = value.dotted_string
|
||||
else:
|
||||
current_app.logger.warning(
|
||||
"Unknown SubAltName type: {name}".format(name=name)
|
||||
)
|
||||
continue
|
||||
|
||||
general_names.append({"nameType": name_type, "value": value})
|
||||
|
||||
return general_names
|
||||
|
||||
def _deserialize(self, value, attr, data):
|
||||
general_names = []
|
||||
for name in value:
|
||||
if name["nameType"] == "DNSName":
|
||||
validators.sensitive_domain(name["value"])
|
||||
general_names.append(x509.DNSName(name["value"]))
|
||||
|
||||
elif name["nameType"] == "IPAddress":
|
||||
general_names.append(
|
||||
x509.IPAddress(ipaddress.ip_address(name["value"]))
|
||||
)
|
||||
|
||||
elif name["nameType"] == "IPNetwork":
|
||||
general_names.append(
|
||||
x509.IPAddress(ipaddress.ip_network(name["value"]))
|
||||
)
|
||||
|
||||
elif name["nameType"] == "uniformResourceIdentifier":
|
||||
general_names.append(x509.UniformResourceIdentifier(name["value"]))
|
||||
|
||||
elif name["nameType"] == "directoryName":
|
||||
# TODO: Need to parse a string in name['value'] like:
|
||||
# 'CN=Common Name, O=Org Name, OU=OrgUnit Name, C=US, ST=ST, L=City/emailAddress=person@example.com'
|
||||
# or
|
||||
# 'CN=Common Name/O=Org Name/OU=OrgUnit Name/C=US/ST=NH/L=City/emailAddress=person@example.com'
|
||||
# and turn it into something like:
|
||||
# x509.Name([
|
||||
# x509.NameAttribute(x509.OID_COMMON_NAME, "Common Name"),
|
||||
# x509.NameAttribute(x509.OID_ORGANIZATION_NAME, "Org Name"),
|
||||
# x509.NameAttribute(x509.OID_ORGANIZATIONAL_UNIT_NAME, "OrgUnit Name"),
|
||||
# x509.NameAttribute(x509.OID_COUNTRY_NAME, "US"),
|
||||
# x509.NameAttribute(x509.OID_STATE_OR_PROVINCE_NAME, "NH"),
|
||||
# x509.NameAttribute(x509.OID_LOCALITY_NAME, "City"),
|
||||
# x509.NameAttribute(x509.OID_EMAIL_ADDRESS, "person@example.com")
|
||||
# ]
|
||||
# general_names.append(x509.DirectoryName(x509.Name(BLAH))))
|
||||
pass
|
||||
|
||||
elif name["nameType"] == "rfc822Name":
|
||||
general_names.append(x509.RFC822Name(name["value"]))
|
||||
|
||||
elif name["nameType"] == "registeredID":
|
||||
general_names.append(
|
||||
x509.RegisteredID(x509.ObjectIdentifier(name["value"]))
|
||||
)
|
||||
|
||||
elif name["nameType"] == "otherName":
|
||||
# This has two inputs (type and value), so it doesn't fit the mold of the rest of these GeneralName entities.
|
||||
# general_names.append(x509.OtherName(name['type'], bytes(name['value']), 'utf-8'))
|
||||
pass
|
||||
|
||||
elif name["nameType"] == "x400Address":
|
||||
# The Python Cryptography library doesn't support x400Address types (yet?)
|
||||
pass
|
||||
|
||||
elif name["nameType"] == "EDIPartyName":
|
||||
# The Python Cryptography library doesn't support EDIPartyName types (yet?)
|
||||
pass
|
||||
|
||||
else:
|
||||
current_app.logger.warning(
|
||||
"Unable to deserialize SubAltName with type: {name_type}".format(
|
||||
name_type=name["nameType"]
|
||||
)
|
||||
)
|
||||
|
||||
return x509.SubjectAlternativeName(general_names)
|
||||
@ -1,16 +1,29 @@
|
||||
"""
|
||||
.. module: lemur.common.health
|
||||
:platform: Unix
|
||||
:copyright: (c) 2015 by Netflix Inc., see AUTHORS for more
|
||||
:copyright: (c) 2018 by Netflix Inc., see AUTHORS for more
|
||||
:license: Apache, see LICENSE for more details.
|
||||
|
||||
.. moduleauthor:: Kevin Glisson <kglisson@netflix.com>
|
||||
"""
|
||||
from flask import Blueprint
|
||||
from lemur.database import db
|
||||
from lemur.extensions import sentry
|
||||
|
||||
mod = Blueprint('healthCheck', __name__)
|
||||
mod = Blueprint("healthCheck", __name__)
|
||||
|
||||
|
||||
@mod.route('/healthcheck')
|
||||
@mod.route("/healthcheck")
|
||||
def health():
|
||||
return 'ok'
|
||||
try:
|
||||
if healthcheck(db):
|
||||
return "ok"
|
||||
except Exception:
|
||||
sentry.captureException()
|
||||
return "db check failed"
|
||||
|
||||
|
||||
def healthcheck(db):
|
||||
with db.engine.connect() as connection:
|
||||
connection.execute("SELECT 1;")
|
||||
return True
|
||||
|
||||
@ -1,13 +1,15 @@
|
||||
"""
|
||||
.. module: lemur.common.managers
|
||||
:platform: Unix
|
||||
:copyright: (c) 2015 by Netflix Inc., see AUTHORS for more
|
||||
:copyright: (c) 2018 by Netflix Inc., see AUTHORS for more
|
||||
:license: Apache, see LICENSE for more details.
|
||||
|
||||
.. moduleauthor:: Kevin Glisson <kglisson@netflix.com>
|
||||
"""
|
||||
from flask import current_app
|
||||
|
||||
from lemur.exceptions import InvalidConfiguration
|
||||
|
||||
|
||||
# inspired by https://github.com/getsentry/sentry
|
||||
class InstanceManager(object):
|
||||
@ -22,7 +24,8 @@ class InstanceManager(object):
|
||||
|
||||
def add(self, class_path):
|
||||
self.cache = None
|
||||
self.class_list.append(class_path)
|
||||
if class_path not in self.class_list:
|
||||
self.class_list.append(class_path)
|
||||
|
||||
def remove(self, class_path):
|
||||
self.cache = None
|
||||
@ -49,7 +52,7 @@ class InstanceManager(object):
|
||||
|
||||
results = []
|
||||
for cls_path in class_list:
|
||||
module_name, class_name = cls_path.rsplit('.', 1)
|
||||
module_name, class_name = cls_path.rsplit(".", 1)
|
||||
try:
|
||||
module = __import__(module_name, {}, {}, class_name)
|
||||
cls = getattr(module, class_name)
|
||||
@ -57,9 +60,18 @@ class InstanceManager(object):
|
||||
results.append(cls())
|
||||
else:
|
||||
results.append(cls)
|
||||
except Exception:
|
||||
current_app.logger.exception('Unable to import %s', cls_path)
|
||||
|
||||
except InvalidConfiguration as e:
|
||||
current_app.logger.warning(
|
||||
"Plugin '{0}' may not work correctly. {1}".format(class_name, e)
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
current_app.logger.exception(
|
||||
"Unable to import {0}. Reason: {1}".format(cls_path, e)
|
||||
)
|
||||
continue
|
||||
|
||||
self.cache = results
|
||||
|
||||
return results
|
||||
|
||||
25
lemur/common/missing.py
Normal file
25
lemur/common/missing.py
Normal file
@ -0,0 +1,25 @@
|
||||
import arrow
|
||||
from flask import current_app
|
||||
|
||||
from lemur.common.utils import is_weekend
|
||||
|
||||
|
||||
def convert_validity_years(data):
|
||||
"""
|
||||
Convert validity years to validity_start and validity_end
|
||||
|
||||
:param data:
|
||||
:return:
|
||||
"""
|
||||
if data.get("validity_years"):
|
||||
now = arrow.utcnow()
|
||||
data["validity_start"] = now.isoformat()
|
||||
|
||||
end = now.shift(years=+int(data["validity_years"]))
|
||||
|
||||
if not current_app.config.get("LEMUR_ALLOW_WEEKEND_EXPIRATION", True):
|
||||
if is_weekend(end):
|
||||
end = end.shift(days=-2)
|
||||
|
||||
data["validity_end"] = end.isoformat()
|
||||
return data
|
||||
52
lemur/common/redis.py
Normal file
52
lemur/common/redis.py
Normal file
@ -0,0 +1,52 @@
|
||||
"""
|
||||
Helper Class for Redis
|
||||
|
||||
"""
|
||||
import redis
|
||||
import sys
|
||||
from flask import current_app
|
||||
from lemur.extensions import sentry
|
||||
from lemur.factory import create_app
|
||||
|
||||
if current_app:
|
||||
flask_app = current_app
|
||||
else:
|
||||
flask_app = create_app()
|
||||
|
||||
|
||||
class RedisHandler:
|
||||
def __init__(self, host=flask_app.config.get('REDIS_HOST', 'localhost'),
|
||||
port=flask_app.config.get('REDIS_PORT', 6379),
|
||||
db=flask_app.config.get('REDIS_DB', 0)):
|
||||
self.host = host
|
||||
self.port = port
|
||||
self.db = db
|
||||
|
||||
def redis(self, db=0):
|
||||
# The decode_responses flag here directs the client to convert the responses from Redis into Python strings
|
||||
# using the default encoding utf-8. This is client specific.
|
||||
function = f"{__name__}.{sys._getframe().f_code.co_name}"
|
||||
try:
|
||||
red = redis.StrictRedis(host=self.host, port=self.port, db=self.db, encoding="utf-8", decode_responses=True)
|
||||
red.set("test", 0)
|
||||
except redis.ConnectionError:
|
||||
log_data = {
|
||||
"function": function,
|
||||
"message": "Redis Connection error",
|
||||
"host": self.host,
|
||||
"port": self.port
|
||||
}
|
||||
current_app.logger.error(log_data)
|
||||
sentry.captureException()
|
||||
return red
|
||||
|
||||
|
||||
def redis_get(key, default=None):
|
||||
red = RedisHandler().redis()
|
||||
try:
|
||||
v = red.get(key)
|
||||
except redis.exceptions.ConnectionError:
|
||||
v = None
|
||||
if not v:
|
||||
return default
|
||||
return v
|
||||
181
lemur/common/schema.py
Normal file
181
lemur/common/schema.py
Normal file
@ -0,0 +1,181 @@
|
||||
"""
|
||||
.. module: lemur.common.schema
|
||||
:platform: unix
|
||||
:copyright: (c) 2018 by Netflix Inc., see AUTHORS for more
|
||||
:license: Apache, see LICENSE for more details.
|
||||
|
||||
.. moduleauthor:: Kevin Glisson <kglisson@netflix.com>
|
||||
|
||||
"""
|
||||
from functools import wraps
|
||||
from flask import request, current_app
|
||||
|
||||
from sqlalchemy.orm.collections import InstrumentedList
|
||||
|
||||
from inflection import camelize, underscore
|
||||
from marshmallow import Schema, post_dump, pre_load
|
||||
|
||||
from lemur.extensions import sentry
|
||||
|
||||
|
||||
class LemurSchema(Schema):
|
||||
"""
|
||||
Base schema from which all grouper schema's inherit
|
||||
"""
|
||||
|
||||
__envelope__ = True
|
||||
|
||||
def under(self, data, many=None):
|
||||
items = []
|
||||
if many:
|
||||
for i in data:
|
||||
items.append({underscore(key): value for key, value in i.items()})
|
||||
return items
|
||||
return {underscore(key): value for key, value in data.items()}
|
||||
|
||||
def camel(self, data, many=None):
|
||||
items = []
|
||||
if many:
|
||||
for i in data:
|
||||
items.append(
|
||||
{
|
||||
camelize(key, uppercase_first_letter=False): value
|
||||
for key, value in i.items()
|
||||
}
|
||||
)
|
||||
return items
|
||||
return {
|
||||
camelize(key, uppercase_first_letter=False): value
|
||||
for key, value in data.items()
|
||||
}
|
||||
|
||||
def wrap_with_envelope(self, data, many):
|
||||
if many:
|
||||
if "total" in self.context.keys():
|
||||
return dict(total=self.context["total"], items=data)
|
||||
return data
|
||||
|
||||
|
||||
class LemurInputSchema(LemurSchema):
|
||||
@pre_load(pass_many=True)
|
||||
def preprocess(self, data, many):
|
||||
if isinstance(data, dict) and data.get("owner"):
|
||||
data["owner"] = data["owner"].lower()
|
||||
return self.under(data, many=many)
|
||||
|
||||
|
||||
class LemurOutputSchema(LemurSchema):
|
||||
@pre_load(pass_many=True)
|
||||
def preprocess(self, data, many):
|
||||
if many:
|
||||
data = self.unwrap_envelope(data, many)
|
||||
return self.under(data, many=many)
|
||||
|
||||
def unwrap_envelope(self, data, many):
|
||||
if many:
|
||||
if data["items"]:
|
||||
if isinstance(data, InstrumentedList) or isinstance(data, list):
|
||||
self.context["total"] = len(data)
|
||||
return data
|
||||
else:
|
||||
self.context["total"] = data["total"]
|
||||
else:
|
||||
self.context["total"] = 0
|
||||
data = {"items": []}
|
||||
|
||||
return data["items"]
|
||||
|
||||
return data
|
||||
|
||||
@post_dump(pass_many=True)
|
||||
def post_process(self, data, many):
|
||||
if data:
|
||||
data = self.camel(data, many=many)
|
||||
if self.__envelope__:
|
||||
return self.wrap_with_envelope(data, many=many)
|
||||
else:
|
||||
return data
|
||||
|
||||
|
||||
def format_errors(messages):
|
||||
errors = {}
|
||||
for k, v in messages.items():
|
||||
key = camelize(k, uppercase_first_letter=False)
|
||||
if isinstance(v, dict):
|
||||
errors[key] = format_errors(v)
|
||||
elif isinstance(v, list):
|
||||
errors[key] = v[0]
|
||||
return errors
|
||||
|
||||
|
||||
def wrap_errors(messages):
|
||||
errors = dict(message="Validation Error.")
|
||||
if messages.get("_schema"):
|
||||
errors["reasons"] = {"Schema": {"rule": messages["_schema"]}}
|
||||
else:
|
||||
errors["reasons"] = format_errors(messages)
|
||||
return errors
|
||||
|
||||
|
||||
def unwrap_pagination(data, output_schema):
|
||||
if not output_schema:
|
||||
return data
|
||||
|
||||
if isinstance(data, dict):
|
||||
if "total" in data.keys():
|
||||
if data.get("total") == 0:
|
||||
return data
|
||||
|
||||
marshaled_data = {"total": data["total"]}
|
||||
marshaled_data["items"] = output_schema.dump(data["items"], many=True).data
|
||||
return marshaled_data
|
||||
|
||||
return output_schema.dump(data).data
|
||||
|
||||
elif isinstance(data, list):
|
||||
marshaled_data = {"total": len(data)}
|
||||
marshaled_data["items"] = output_schema.dump(data, many=True).data
|
||||
return marshaled_data
|
||||
return output_schema.dump(data).data
|
||||
|
||||
|
||||
def validate_schema(input_schema, output_schema):
|
||||
def decorator(f):
|
||||
@wraps(f)
|
||||
def decorated_function(*args, **kwargs):
|
||||
if input_schema:
|
||||
if request.get_json():
|
||||
request_data = request.get_json()
|
||||
else:
|
||||
request_data = request.args
|
||||
|
||||
data, errors = input_schema.load(request_data)
|
||||
|
||||
if errors:
|
||||
return wrap_errors(errors), 400
|
||||
|
||||
kwargs["data"] = data
|
||||
|
||||
try:
|
||||
resp = f(*args, **kwargs)
|
||||
except Exception as e:
|
||||
sentry.captureException()
|
||||
current_app.logger.exception(e)
|
||||
return dict(message=str(e)), 500
|
||||
|
||||
if isinstance(resp, tuple):
|
||||
return resp[0], resp[1]
|
||||
|
||||
if not resp:
|
||||
return dict(message="No data found"), 404
|
||||
|
||||
if callable(output_schema):
|
||||
output_schema_to_use = output_schema()
|
||||
else:
|
||||
output_schema_to_use = output_schema
|
||||
|
||||
return unwrap_pagination(resp, output_schema_to_use), 200
|
||||
|
||||
return decorated_function
|
||||
|
||||
return decorator
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user