Compare commits
964 Commits
pkg/dev/ri
...
master
Author | SHA1 | Date |
---|---|---|
Emmanuel Garette | bb5b32a435 | |
Hossein Shafagh | da75d31fac | |
Hossein Shafagh | b295679cc3 | |
Jasmine Schladen | e3db887b07 | |
Hossein Shafagh | 5d61ed4d5b | |
Hossein Shafagh | ba1c549070 | |
Hossein Shafagh | f4178fefd2 | |
Jasmine Schladen | 3a757f8f94 | |
Jasmine Schladen | 2bb1d9ee21 | |
Jasmine Schladen | 28a4d21bcc | |
Hossein Shafagh | 49800bf9da | |
Hossein Shafagh | 2c081df06b | |
Hossein Shafagh | 1636847040 | |
Jasmine Schladen | c977826d62 | |
Hossein Shafagh | dbea35ba19 | |
Chad S | 91d0b36a6a | |
charhate | 890a016ee0 | |
Hossein Shafagh | 35a933ce9b | |
Jasmine Schladen | acf6ac1531 | |
Hossein Shafagh | ad742e6eee | |
Chad S | f7938bf226 | |
charhate | deb7586372 | |
Jasmine Schladen | 2da9754ffa | |
Hossein Shafagh | 4f409547a0 | |
Hossein Shafagh | c1168399a4 | |
Hossein Shafagh | a40298df08 | |
Jasmine Schladen | 0175df821c | |
Jasmine Schladen | b5c38c2854 | |
Jasmine Schladen | dc1f1c247a | |
dependabot-preview[bot] | 28b9a73a83 | |
dependabot-preview[bot] | d097da685a | |
dependabot-preview[bot] | 1c137e6596 | |
dependabot-preview[bot] | 0d388a85bb | |
Hossein Shafagh | a0a5e66cc3 | |
Hossein Shafagh | 1d486cf1fd | |
Hossein Shafagh | 5e5e72ca86 | |
Hossein Shafagh | 29b5c554d6 | |
Hossein Shafagh | d1c21fa23a | |
Hossein Shafagh | c2ec095532 | |
Hossein Shafagh | d7f413efe3 | |
Jasmine Schladen | 66e7acf773 | |
Hossein Shafagh | a630721a3a | |
Hossein Shafagh | 97bdb8a00a | |
Jasmine Schladen | 2b7c151426 | |
Jasmine Schladen | 00e0609547 | |
Hossein Shafagh | e5eba715b6 | |
Hossein Shafagh | 1fd6959403 | |
Chad S | 932a4489e7 | |
Hossein Shafagh | 6e4dd15790 | |
Chad S | bbc6c0dc5f | |
csine-nflx | 04b9df0a34 | |
Hossein Shafagh | e64f4028d4 | |
Hossein Shafagh | 8d44ab2124 | |
Hossein Shafagh | 9e37793cb5 | |
Hossein Shafagh | e48f4ffe77 | |
Hossein Shafagh | 13539814db | |
Hossein Shafagh | addaa3ab13 | |
Hossein Shafagh | 45712c582b | |
Hossein Shafagh | 4937c5dc2c | |
Hossein Shafagh | caa44c1531 | |
Hossein Shafagh | f2205b6025 | |
Hossein Shafagh | b2bfff341f | |
dependabot-preview[bot] | df17337075 | |
dependabot-preview[bot] | 03014ac194 | |
dependabot-preview[bot] | 6547e028b3 | |
dependabot-preview[bot] | d779c74e84 | |
dependabot-preview[bot] | 72852af3a9 | |
dependabot-preview[bot] | b319b335a7 | |
dependabot-preview[bot] | 39b7e10bfe | |
dependabot-preview[bot] | 31180fdca8 | |
dependabot-preview[bot] | 7fb13c7939 | |
dependabot-preview[bot] | 396cc5db40 | |
dependabot-preview[bot] | 0276d1a6f8 | |
dependabot-preview[bot] | 48aeb26b1a | |
Hossein Shafagh | 2e43211496 | |
Hossein Shafagh | 580506f605 | |
Hossein Shafagh | fdd6140995 | |
Hossein Shafagh | c579405805 | |
Hossein Shafagh | 8e5e8fdd03 | |
Hossein Shafagh | e530664da6 | |
Hossein Shafagh | cf7de0aa59 | |
charhate | d73b334958 | |
Hossein Shafagh | a724aa6c26 | |
Hossein Shafagh | a4e12f0d75 | |
Hossein Shafagh | 3062d31800 | |
Hossein Shafagh | b118fbbc3b | |
Hossein Shafagh | 6f197b255f | |
Hossein Shafagh | c3eb463c42 | |
Hossein Shafagh | a49570e5f9 | |
dependabot-preview[bot] | 576122393b | |
dependabot-preview[bot] | aabc7ea319 | |
dependabot-preview[bot] | 59149186cb | |
dependabot-preview[bot] | d0d888a7f1 | |
dependabot-preview[bot] | f8fb5754f0 | |
dependabot-preview[bot] | 735e7b10f9 | |
dependabot-preview[bot] | 5f0e61122d | |
dependabot-preview[bot] | 1b4d511db2 | |
dependabot-preview[bot] | 603d89bffe | |
dependabot-preview[bot] | cf7cf74bb0 | |
Jasmine Schladen | 6676dc245b | |
Jasmine Schladen | 30d5df339d | |
Jasmine Schladen | d823f5f767 | |
Jasmine Schladen | a7f8da91b8 | |
Jasmine Schladen | 0136366c83 | |
Jasmine Schladen | 8824156d4f | |
dependabot-preview[bot] | b3e8bb4bc2 | |
dependabot-preview[bot] | 72b4e93e06 | |
dependabot-preview[bot] | 1401b21674 | |
dependabot-preview[bot] | 358de8e25e | |
dependabot-preview[bot] | 2eb6fcf75b | |
dependabot-preview[bot] | 775a7b7da8 | |
dependabot-preview[bot] | 7293651c7c | |
dependabot-preview[bot] | 86dcac26a2 | |
dependabot-preview[bot] | d9b2e3559a | |
dependabot-preview[bot] | 6d9dca510e | |
dependabot-preview[bot] | 3f6248fb5a | |
dependabot-preview[bot] | 6579630ae7 | |
dependabot-preview[bot] | 3dc8f20e93 | |
Jasmine Schladen | 5c91e7431f | |
Jasmine Schladen | 1d6023e6d8 | |
dependabot-preview[bot] | 4c13d1a5a2 | |
dependabot-preview[bot] | 783676ce52 | |
Jasmine Schladen | d278c6e132 | |
dependabot-preview[bot] | d22bec1ec9 | |
Jasmine Schladen | de06f13bb1 | |
csine-nflx | 377ba25413 | |
Jasmine Schladen | 811ac1a970 | |
Jasmine Schladen | b3d0b7ce1b | |
Jasmine Schladen | fbba3034fc | |
Jasmine Schladen | ad5f7aef82 | |
Jasmine Schladen | 931dd26585 | |
Jasmine Schladen | bc260fabb1 | |
Jasmine Schladen | 0bc66be418 | |
Jasmine Schladen | cbbbe22bb1 | |
Jasmine Schladen | 85b053ed98 | |
Jasmine Schladen | 1918b911b3 | |
Jasmine Schladen | a153781335 | |
Jasmine Schladen | 46864f038c | |
Jasmine Schladen | f29abebf64 | |
Jasmine Schladen | 2fb595046f | |
Jasmine Schladen | 360e4c6154 | |
Jasmine Schladen | da9e949e89 | |
Jasmine Schladen | 00c64ba52f | |
Jasmine Schladen | 5f2e32ff92 | |
Jasmine Schladen | 24c1415983 | |
Jasmine Schladen | 40f62a0ad7 | |
Jasmine Schladen | e464e62d01 | |
Jasmine Schladen | 47121906f5 | |
Jasmine Schladen | 824a4b5910 | |
Jasmine Schladen | d4643d760a | |
Jasmine Schladen | c4a896ecf2 | |
Jasmine Schladen | dfad5ae968 | |
Jasmine Schladen | 8c666b7f0b | |
Jasmine Schladen | 40e5c60c39 | |
Jasmine Schladen | bfe3358b16 | |
Jasmine Schladen | 8086d7afc0 | |
Jasmine Schladen | 6aa6986a14 | |
Jasmine Schladen | e9e79309c5 | |
Jasmine Schladen | 1ab4fe278d | |
Jasmine Schladen | 5e46e2adf0 | |
Jasmine Schladen | 6aff89c1dc | |
Jasmine Schladen | bbdacaccf9 | |
Jasmine Schladen | c0c1022a5b | |
Jasmine Schladen | abdf544e06 | |
Jasmine Schladen | bfa1c067d9 | |
Jasmine Schladen | 91f6f752db | |
Jasmine Schladen | 938b962a32 | |
Jasmine Schladen | b265ecf588 | |
Jasmine Schladen | ec9e1c0dd0 | |
Jasmine Schladen | e29ebb4b61 | |
Jasmine Schladen | 45b84bd088 | |
Jasmine Schladen | 3049fa6585 | |
Jasmine Schladen | 058877d76b | |
Jasmine Schladen | 160ecd926d | |
Jasmine Schladen | 01b880a74b | |
Jasmine Schladen | 6d336ec79f | |
Jasmine Schladen | 697b1ad878 | |
Jasmine Schladen | e607210fe9 | |
Jasmine Schladen | 8cabffcb70 | |
Jasmine Schladen | 42044e99ae | |
Jasmine Schladen | e9860ee72a | |
dependabot-preview[bot] | 9f4e26a961 | |
dependabot-preview[bot] | 683d9cd769 | |
dependabot-preview[bot] | f29685b024 | |
dependabot-preview[bot] | 85d6cd1bfa | |
dependabot-preview[bot] | c4e283925b | |
dependabot-preview[bot] | 2c6abd7fe8 | |
dependabot-preview[bot] | c9beec7333 | |
dependabot-preview[bot] | 4c5fb1f34e | |
dependabot-preview[bot] | 983015ef04 | |
dependabot-preview[bot] | ecf0997778 | |
charhate | 7582eaeaf3 | |
charhate | ab960a533b | |
sayali | 51e90f6fb2 | |
charhate | fb219b2e94 | |
charhate | b3f6df6709 | |
sayali | 533985ca35 | |
charhate | 6fd7684d2d | |
sayali | c71f3bfb5c | |
sayali | a4f3ffa2d8 | |
sayali | 0c1701314a | |
Hossein Shafagh | 47d617f066 | |
Hossein Shafagh | 9ef7f4d549 | |
dependabot-preview[bot] | 52b3fa9c79 | |
dependabot-preview[bot] | 6c33cd0bf3 | |
dependabot-preview[bot] | 3138b99835 | |
dependabot-preview[bot] | 73b1edf096 | |
dependabot-preview[bot] | 08a3ef9f70 | |
dependabot-preview[bot] | c2fb41f817 | |
dependabot-preview[bot] | 62738c86ce | |
dependabot-preview[bot] | fb9986fefd | |
dependabot-preview[bot] | c42a2a75b9 | |
dependabot-preview[bot] | 2f318a3021 | |
dependabot-preview[bot] | f02ae5f3a3 | |
dependabot-preview[bot] | 72ef9f090d | |
Hossein Shafagh | db13d8c001 | |
Hossein Shafagh | 240c76b3cb | |
Jasmine Schladen | 8ea760337c | |
Jasmine Schladen | 9a0f000259 | |
Jasmine Schladen | 470068f03e | |
Jasmine Schladen | b2afb2b38a | |
Jasmine Schladen | 3a6c80df9a | |
Jasmine Schladen | 19d90b8f1c | |
charhate | 4459bbef65 | |
sayali | d9569a67ab | |
dependabot-preview[bot] | fae8a3c537 | |
Hossein Shafagh | feecba0c7a | |
Jasmine Schladen | 0089a45135 | |
Jasmine Schladen | 234f49cac6 | |
dependabot-preview[bot] | e242b4eaea | |
charhate | b699683fa3 | |
Hossein Shafagh | 9fe912892b | |
sayali | 002e73e184 | |
Hossein Shafagh | 6d367099be | |
Hossein Shafagh | 770a9539bb | |
Hossein Shafagh | 189adcb927 | |
dependabot-preview[bot] | 1fa6d82b71 | |
dependabot-preview[bot] | 9883173450 | |
dependabot-preview[bot] | 464077c52e | |
dependabot-preview[bot] | c9462ea8b7 | |
dependabot-preview[bot] | 67a8023d0e | |
dependabot-preview[bot] | 88dd4d61e2 | |
dependabot-preview[bot] | 712553f21f | |
dependabot-preview[bot] | 144d576d03 | |
dependabot-preview[bot] | 35abcf76f4 | |
dependabot-preview[bot] | 5fd167ad20 | |
Hossein Shafagh | 40dd9df617 | |
sergerdn | 83e6d153a5 | |
sergerdn | 50fe03bab4 | |
sergerdn | 69f21234f3 | |
sergerdn | c07af5aae2 | |
Hossein Shafagh | 8c2b084bb0 | |
Hossein Shafagh | 31b20e0a30 | |
Hossein Shafagh | 5fb98f747c | |
charhate | 2cef876ff2 | |
charhate | 244c8ff0ed | |
Hossein Shafagh | 8e93f4f0cf | |
Hossein Shafagh | 2b8af66202 | |
charhate | e542bb39de | |
Hossein Shafagh | 6df1b2985c | |
Hossein Shafagh | d5f678f70c | |
charhate | f4f970c4e9 | |
Hossein Shafagh | 93e9cfe8b3 | |
Hossein Shafagh | 5c5a34c206 | |
sergerdn | 7f16d44b48 | |
sergerdn | e1f5bfe41a | |
sergerdn | 9506898240 | |
sergerdn | fb2a352b13 | |
sayali | 38fc80fb47 | |
sayali | 77a50e0abf | |
Hossein Shafagh | 9b0cc3bbef | |
Hossein Shafagh | da8d3f42d2 | |
Hossein Shafagh | c3e0597ef1 | |
sayali | 20792dfe3a | |
Hossein Shafagh | 895d5e6ec7 | |
Hossein Shafagh | b80b6d0959 | |
Hossein Shafagh | 63e9fdd0e1 | |
Hossein Shafagh | 0d7e8d77e4 | |
Hossein Shafagh | e250a61344 | |
Hossein Shafagh | 089796a849 | |
Hossein Shafagh | 80f83efec2 | |
Hossein Shafagh | 92dabe5d43 | |
Hossein Shafagh | de762d2e53 | |
Hossein Shafagh | c26db968d2 | |
Hossein Shafagh | b9b3ae2286 | |
sayali | ed57a5a45a | |
sayali | fe2fc0eadd | |
sayali | f14a236739 | |
sayali | b9be18f281 | |
dependabot-preview[bot] | 6eeafcf56b | |
dependabot-preview[bot] | 859d4e5845 | |
dependabot-preview[bot] | 8bf9c975ac | |
dependabot-preview[bot] | c464130f90 | |
dependabot-preview[bot] | d6330e91dd | |
dependabot-preview[bot] | 643d94714b | |
dependabot-preview[bot] | 053d985647 | |
dependabot-preview[bot] | b3c06eb6c8 | |
dependabot-preview[bot] | d5ba065d4e | |
dependabot-preview[bot] | 0d13aa3c71 | |
dependabot-preview[bot] | 1f1c3eb9ac | |
dependabot-preview[bot] | 5c15e1690d | |
Jasmine Schladen | c6bcf0edf3 | |
manager | 7add8ab408 | |
charhate | f0fbc81370 | |
sayali | d5d89ec757 | |
Hossein Shafagh | f5899ea71b | |
Hossein Shafagh | ec96e08750 | |
Hossein Shafagh | 556e9fa70a | |
Hossein Shafagh | 3403ba89f1 | |
Hossein Shafagh | cf3f3fde3c | |
Hossein Shafagh | 2240ace825 | |
Hossein Shafagh | 0a37e8a275 | |
Hossein Shafagh | c86ad7d1fd | |
Hossein Shafagh | 74bb4f58a0 | |
Hossein Shafagh | e6a414a069 | |
dependabot-preview[bot] | 0f6af9f679 | |
dependabot-preview[bot] | 09a5b256fb | |
charhate | 8584b43d32 | |
charhate | 70c902954e | |
Hossein Shafagh | 3df63469e6 | |
Chad S | 6ff653cb2a | |
dependabot-preview[bot] | ca40dd4cd9 | |
Hossein Shafagh | a62a562a61 | |
Hossein Shafagh | d914d37e6b | |
Hossein Shafagh | 685cea4768 | |
Hossein Shafagh | fef7c7a907 | |
Hossein Shafagh | 45dfb1beb3 | |
Hossein Shafagh | 4afdc13b03 | |
Hossein Shafagh | f5cd7ecbd5 | |
sayali | 358f07b4c3 | |
Jasmine Schladen | 88e587af11 | |
Jasmine Schladen | 78d5fa7690 | |
Jasmine Schladen | 7343ea3433 | |
charhate | 4b9dee8a31 | |
Hossein Shafagh | be389c4876 | |
dependabot-preview[bot] | f93e49963e | |
dependabot-preview[bot] | dc2cbc055d | |
dependabot-preview[bot] | 2a5627112d | |
dependabot-preview[bot] | 3a5d9c212d | |
dependabot-preview[bot] | e8e085bfc1 | |
dependabot-preview[bot] | aa8d5f097e | |
dependabot-preview[bot] | 76280eb6cd | |
dependabot-preview[bot] | bca995b1ff | |
dependabot-preview[bot] | d83c07ce02 | |
dependabot-preview[bot] | 4eb5a26346 | |
dependabot-preview[bot] | 7d52ce2fad | |
dependabot-preview[bot] | b04dc06109 | |
dependabot-preview[bot] | 2531348d7e | |
dependabot-preview[bot] | 7de2257bf8 | |
dependabot-preview[bot] | c523bfc9f6 | |
charhate | d325d118cd | |
sayali | 3b254ac153 | |
dependabot-preview[bot] | 6311e7a283 | |
Hossein Shafagh | b0cf709bec | |
Hossein Shafagh | b4ec0a636f | |
Hossein Shafagh | b628949349 | |
Hossein Shafagh | ce693d5a1e | |
Hossein Shafagh | 65bd472b08 | |
Hossein Shafagh | 00c9d49f4b | |
Benjamin Greschbach | 8a9b729478 | |
charhate | 4c8edbb43e | |
charhate | a6354ef6f6 | |
sayali | ba050028ca | |
charhate | b3c5d23a3a | |
sayali | 396e3afdfa | |
sayali | 8f16402c0a | |
dependabot-preview[bot] | 9657098514 | |
dependabot-preview[bot] | 596aa8ca95 | |
dependabot-preview[bot] | bbf795faf1 | |
dependabot-preview[bot] | 0e04e79260 | |
dependabot-preview[bot] | 3742ab3a45 | |
dependabot-preview[bot] | 5f31706801 | |
dependabot-preview[bot] | 8e2e79e732 | |
dependabot-preview[bot] | 3bc6090963 | |
dependabot-preview[bot] | ac9fcada87 | |
dependabot-preview[bot] | 836591e165 | |
dependabot-preview[bot] | b9281bd3de | |
dependabot-preview[bot] | 9470f71e34 | |
dependabot-preview[bot] | 40b8b7c3a0 | |
dependabot-preview[bot] | 92312ff4f7 | |
Hossein Shafagh | 7e06d634d5 | |
dependabot-preview[bot] | 597a9e51d3 | |
dependabot-preview[bot] | ae49840add | |
Hossein Shafagh | 9a2056cedd | |
dependabot-preview[bot] | cbf388bde7 | |
Hossein Shafagh | aad48ab289 | |
dependabot-preview[bot] | d725a9fdcf | |
dependabot-preview[bot] | d8b769b5e2 | |
dependabot-preview[bot] | 7916a8ca90 | |
dependabot-preview[bot] | 6c2d5b84bb | |
dependabot-preview[bot] | f33f3e1af5 | |
dependabot-preview[bot] | 8b7b4529b3 | |
dependabot-preview[bot] | 53c5381d5b | |
dependabot-preview[bot] | d7f807c1ff | |
dependabot-preview[bot] | 6668592ffe | |
dependabot-preview[bot] | e946a37b6a | |
dependabot-preview[bot] | 84b5410075 | |
Hossein Shafagh | d71da1f4d8 | |
Hossein Shafagh | 7fb8469449 | |
Hossein Shafagh | 97ce836df9 | |
Hossein Shafagh | 7300ea1eb8 | |
Hossein Shafagh | 843249a84e | |
Hossein Shafagh | 1aa0e3dad6 | |
Hossein Shafagh | 63b5d24f39 | |
Jasmine Schladen | 3bb5c323ef | |
Jasmine Schladen | d2e9493397 | |
Tim Gates | 3598953512 | |
Hossein Shafagh | 8a63b175e3 | |
Hossein Shafagh | 3e53a43873 | |
Hossein Shafagh | 34553c2586 | |
Hossein Shafagh | 405aef3160 | |
Jasmine Schladen | 33e46d4a53 | |
Jasmine Schladen | de98586c13 | |
Jasmine Schladen | 787ca4f860 | |
dependabot-preview[bot] | 9d00464842 | |
dependabot-preview[bot] | 8ac3040396 | |
dependabot-preview[bot] | e659bf08ff | |
dependabot-preview[bot] | b89dc7dc6c | |
dependabot-preview[bot] | 5cfe88ef51 | |
dependabot-preview[bot] | ad5a773712 | |
dependabot-preview[bot] | f8e46da6c3 | |
dependabot-preview[bot] | aaba3aab7a | |
dependabot-preview[bot] | f511bb664a | |
dependabot-preview[bot] | 5f8a1b480e | |
dependabot-preview[bot] | 18b529a7f6 | |
dependabot-preview[bot] | bfbb2372c2 | |
dependabot-preview[bot] | 6e00e1b79b | |
dependabot-preview[bot] | 1165ab5eb7 | |
dependabot-preview[bot] | cc2f566bd6 | |
dependabot-preview[bot] | 58f40a7b86 | |
Hossein Shafagh | 0504919835 | |
csine-nflx | 4ac432ce87 | |
charhate | 90531e79f0 | |
charhate | 5d79cae0fa | |
Jasmine Schladen | f941383cd2 | |
Jasmine Schladen | 8ba32aa1e9 | |
sayali | 892a668bf5 | |
sayali | 34588c9513 | |
sayali | 575642fa49 | |
sayali | 42dd8e3f47 | |
charhate | 2ffa691fe4 | |
sayali | 785ef6adb5 | |
sayali | 1547cc3e60 | |
Hossein Shafagh | d921f53d08 | |
charhate | e0b1dbde98 | |
charhate | 8965cf1af5 | |
sayali | a9b983fac2 | |
sayali | 6e41f74908 | |
charhate | 456575b88b | |
charhate | d66af6d2bd | |
sayali | e62e07c929 | |
charhate | d643f32fbe | |
sayali | 856c2da15c | |
sayali | cf652952ff | |
sayali | 8bab89262c | |
sayali | 22d9ef7798 | |
Jasmine Schladen | 512b1acfdd | |
Hossein Shafagh | d3d186880b | |
Jasmine Schladen | 15d43d6c27 | |
Jasmine Schladen | 79c6b3fc00 | |
Jasmine Schladen | 065e4d3964 | |
Jasmine Schladen | eab5532397 | |
sirferl | 2f5b0fb91a | |
sirferl | b93c028215 | |
sirferl | 8a10b861be | |
sirferl | 9a38052868 | |
sirferl | b07af654e1 | |
sirferl | 5e33eee5c8 | |
Jasmine Schladen | affd077afe | |
Jasmine Schladen | 94ba1e77dd | |
dependabot-preview[bot] | 0ebaa78915 | |
dependabot-preview[bot] | f171e84a5e | |
dependabot-preview[bot] | d75f8f4149 | |
dependabot-preview[bot] | cdb5535ae0 | |
dependabot-preview[bot] | adf8483de1 | |
dependabot-preview[bot] | a3b7453886 | |
dependabot-preview[bot] | 6fd3e10ab1 | |
dependabot-preview[bot] | 9eca783c7e | |
dependabot-preview[bot] | fe01b8ee32 | |
dependabot-preview[bot] | 61ed4ea84d | |
dependabot-preview[bot] | e3824b9861 | |
dependabot-preview[bot] | be46623a7f | |
Hossein Shafagh | 9836d723d9 | |
Hossein Shafagh | 18573f00a9 | |
Jasmine Schladen | a9e10778a5 | |
sirferl | 9763f5c26a | |
sirferl | a02b378988 | |
sirferl | 2b65ab8972 | |
Jasmine Schladen | 29aeb9b298 | |
Jasmine Schladen | 1793859781 | |
Jasmine Schladen | 576c69c8e5 | |
Hossein Shafagh | ead8629928 | |
Jasmine Schladen | 42957cffc7 | |
Hossein Shafagh | 1184c219d4 | |
Hossein Shafagh | 4c2227f23c | |
Hossein Shafagh | 4afd425d9f | |
Jasmine Schladen | 148dcc7528 | |
Jasmine Schladen | 589df0e230 | |
sirferl | bc6c913a7a | |
sirferl | 2813186b13 | |
sirferl | 5651865347 | |
sirferl | c635e0f76e | |
sirferl | 6585bcaa55 | |
sirferl | f0f13ce97b | |
charhate | 101ee7317e | |
sayali | 9a1cb65853 | |
Hossein Shafagh | 6fe92a532b | |
sayali | ac038b59b7 | |
sayali | 1846402070 | |
charhate | 5616965637 | |
charhate | 535cf6103d | |
Hossein Shafagh | b1007234e5 | |
sayali | 03a758deca | |
Hossein Shafagh | 5e9e454293 | |
sayali | 5ca0c83a07 | |
sayali | 33bb17779d | |
charhate | aecb222de5 | |
Jasmine Schladen | a79df00a24 | |
Hossein Shafagh | 5d06a62ae9 | |
Hossein Shafagh | a951e7623c | |
Hossein Shafagh | 4437a0f870 | |
Hossein Shafagh | 1869f8a8b7 | |
charhate | 084b2b1830 | |
Jasmine Schladen | b40cb5562a | |
Jasmine Schladen | 4b93c81add | |
charhate | dbf8819034 | |
charhate | 5261281960 | |
Jasmine Schladen | 85d99ded73 | |
sirferl | a0517d26fa | |
sirferl | 9b2ac32d70 | |
sirferl | 3b19863a96 | |
Albert Tugushev | d2abe59e6e | |
sayali | f22f29c053 | |
sayali | 1f2b445215 | |
charhate | 87a4d7dc3b | |
sayali | fe213c85ae | |
sayali | 7a1f13dcb5 | |
charhate | f6472e6f6f | |
sayali | 817abb2ca8 | |
charhate | a2f731b710 | |
dependabot-preview[bot] | 0a23a91fdb | |
dependabot-preview[bot] | fa082145fe | |
dependabot-preview[bot] | 584f8e32f2 | |
dependabot-preview[bot] | 1eb0024b1f | |
dependabot-preview[bot] | ac6f2b8d14 | |
dependabot-preview[bot] | b00d97b6de | |
dependabot-preview[bot] | cb1d3a0883 | |
dependabot-preview[bot] | 8ad82bee53 | |
Hossein Shafagh | 7b5d6d3db1 | |
sirferl | 6d75007644 | |
sirferl | e9f7860816 | |
sirferl | 9949f9914e | |
Hossein Shafagh | b90ead36d7 | |
Hossein Shafagh | 6f3cc74721 | |
Hossein Shafagh | 2710bcc263 | |
Hossein Shafagh | 028f0c3330 | |
sirferl | 439e888d9e | |
sirferl | 56af628c68 | |
sirferl | 608f3dfb73 | |
sirferl | 0f3357ab46 | |
sirferl | eedd2e91ee | |
sirferl | 31bfcd8810 | |
sirferl | 4c88ffc9bc | |
Hossein Shafagh | b916aef81f | |
sayali | 563c754492 | |
sayali | 1207de8925 | |
Hossein Shafagh | c765eab2f0 | |
Hossein Shafagh | 6d1a630380 | |
dependabot-preview[bot] | 55aa19f878 | |
dependabot-preview[bot] | 1d247f3df4 | |
Hossein Shafagh | f63d73d0dc | |
dependabot-preview[bot] | 0138ed7564 | |
dependabot-preview[bot] | f6096c62cf | |
dependabot-preview[bot] | 1474a399b7 | |
dependabot-preview[bot] | e4b96f1ba4 | |
Hossein Shafagh | b703a4ff3c | |
dependabot-preview[bot] | 378e34d3d8 | |
dependabot-preview[bot] | 12b20693f8 | |
dependabot-preview[bot] | a0104dd026 | |
dependabot-preview[bot] | 3909032752 | |
Hossein Shafagh | 48e3eb25a0 | |
dependabot-preview[bot] | 320e0e7513 | |
Hossein Shafagh | 4fa7415e28 | |
Mathias Petermann | 72da149fde | |
Mathias Petermann | c342fb894d | |
Hossein Shafagh | a65f0c0fdd | |
Hossein Shafagh | cbdaa4e3e4 | |
Hossein Shafagh | 58ecb22f19 | |
Hossein Shafagh | 15b97dc2ef | |
Jasmine Schladen | 33e34d76b2 | |
Hossein Shafagh | 6163015e36 | |
Jasmine Schladen | 9aaf507dd6 | |
Hossein Shafagh | 3dc279ac29 | |
Hossein Shafagh | d22156337a | |
Hossein Shafagh | 13c2fafd46 | |
Hossein Shafagh | e4f89c93b5 | |
Hossein Shafagh | 24e74ab873 | |
sirferl | 0521624ccc | |
sirferl | 1b5f17d8b8 | |
sirferl | 48302b6acc | |
sirferl | 62230228a7 | |
sirferl | 40057262e1 | |
sirferl | ad1c6074ad | |
sirferl | 5f1978a4da | |
sirferl | 51bc7d0231 | |
sirferl | 334367376f | |
Jasmine Schladen | 347647da91 | |
Jasmine Schladen | db11f0c1b7 | |
Hossein Shafagh | d5714c8b5c | |
Hossein Shafagh | 31fde453a8 | |
Hossein Shafagh | 8a2621de44 | |
Hossein Shafagh | 95b24cbadc | |
Jasmine Schladen | afbd8b6930 | |
Hossein Shafagh | 50483c01da | |
Jasmine Schladen | bb11ff9f60 | |
Jasmine Schladen | 79094e782d | |
Jasmine Schladen | fc7db4a9b2 | |
Jasmine Schladen | 65676a84da | |
Jasmine Schladen | 8c56b74f07 | |
Hossein Shafagh | 2d2ecdeee2 | |
sirferl | 75107bcd69 | |
sirferl | ff540069e2 | |
sirferl | b191b32312 | |
sirferl | 50ffe6a29c | |
sirferl | 8738c4d893 | |
sirferl | 949a123cb9 | |
sirferl | e1926f2f3c | |
sirferl | 3a61308354 | |
sirferl | d4d51c702a | |
sirferl | 4b19a3ef78 | |
Hossein Shafagh | f4e0b3a90f | |
Hossein Shafagh | 02c7093b32 | |
Hossein Shafagh | 95854c6f68 | |
Jasmine Schladen | 1a65e09a99 | |
Mathias Petermann | 9fd3440cf6 | |
Mathias Petermann | 453826c59c | |
Mathias Petermann | 2b01bdb471 | |
Mathias Petermann | 648565d3e9 | |
Mathias Petermann | e12ee1d89c | |
Mathias Petermann | ae7a044b9c | |
Mathias Petermann | df11a03bde | |
Mathias Petermann | 6e5aa4e979 | |
Mathias Petermann | 5cdd88e033 | |
Mathias Petermann | 7b1beb62b6 | |
Mathias Petermann | 7a7f05ec9e | |
Hossein Shafagh | 252f84cf21 | |
Hossein Shafagh | ea77ef08aa | |
Hossein Shafagh | 8efa682858 | |
Jasmine Schladen | 2798692fa9 | |
Jasmine Schladen | dac6838c3b | |
Mathias Petermann | 31b5f3df86 | |
Mathias Petermann | fba1fdcc34 | |
Mathias Petermann | 9ebcdfc189 | |
Mathias Petermann | 6ffe7bc526 | |
Mathias Petermann | 960b8e78e3 | |
Mathias Petermann | 99ca0ac78d | |
Mathias Petermann | 4a181aff6e | |
Hossein Shafagh | f42d9539fc | |
Hossein Shafagh | 018f4a4b77 | |
dependabot-preview[bot] | a7a5a8fb72 | |
dependabot-preview[bot] | 65d9ac6a0f | |
dependabot-preview[bot] | 11386c6c7e | |
dependabot-preview[bot] | 7ec2860f88 | |
dependabot-preview[bot] | 76fb92d970 | |
dependabot-preview[bot] | 4c6645ca04 | |
dependabot-preview[bot] | 021f530810 | |
dependabot-preview[bot] | a74b8aed15 | |
dependabot-preview[bot] | 307e4693c6 | |
dependabot-preview[bot] | d3e8921731 | |
Hossein Shafagh | 40ef6d06d7 | |
Frederic Brin | 57208fe198 | |
Hossein Shafagh | 7c779d6283 | |
Hossein Shafagh | 519411b309 | |
charhate | 6fe855e824 | |
charhate | cafc2c1d80 | |
sayali | 320667935d | |
charhate | 894b74f523 | |
sayali | 206d010c9a | |
charhate | 92a555ba4b | |
sayali | 7d2ce61303 | |
charhate | 8990209411 | |
Jasmine Schladen | fb25d82eea | |
Jasmine Schladen | 4d32adb3bf | |
Jasmine Schladen | 4cc0f6bb60 | |
sayali | ab014873d0 | |
sayali | 003779a112 | |
sayali | dc7497e29d | |
Hossein Shafagh | 2a61206fdf | |
Mathias Petermann | c71dbcb0a0 | |
Mathias Petermann | bc564b574d | |
sayali | 3d64aa8d11 | |
sayali | 86b2cfbe4a | |
sayali | b75bd56546 | |
sayali | 6922d34825 | |
sayali | 825a001a8b | |
sayali | d88da028b1 | |
sayali | d821024e35 | |
sayali | 2dac95c6fb | |
sayali | 4ffced70f8 | |
sayali | 634339eac6 | |
charhate | 5569c9d8e1 | |
Jasmine Schladen | 8659504a8b | |
Jasmine Schladen | 8e8a89bdfb | |
Hossein Shafagh | 4659df42d5 | |
Hossein Shafagh | e9b0e2eca7 | |
dependabot-preview[bot] | 9ad9f349ba | |
dependabot-preview[bot] | fa620e539d | |
dependabot-preview[bot] | c4e3998715 | |
dependabot-preview[bot] | fc2fce6c0b | |
dependabot-preview[bot] | 3f46b0b6d7 | |
dependabot-preview[bot] | 2331638ed1 | |
dependabot-preview[bot] | 01f31772bb | |
dependabot-preview[bot] | 771c272895 | |
dependabot-preview[bot] | 3fb0aae43a | |
Hossein Shafagh | a15e1831d0 | |
dependabot-preview[bot] | a4d2f79a9b | |
Hossein Shafagh | 9c6856bcdd | |
Hossein Shafagh | 7bca42776b | |
Hossein Shafagh | 3dfafa0021 | |
Hossein Shafagh | add0960579 | |
Hossein Shafagh | e1ff89eb2d | |
Hossein Shafagh | cc2aa5c1de | |
Hossein Shafagh | ba8eb7a3f5 | |
Hossein Shafagh | c5769378cf | |
Hossein Shafagh | f90041353c | |
charhate | 8eba97fd14 | |
Hossein Shafagh | d41daeb4af | |
Chad S | 507e3caee5 | |
Chad S | cc05d21260 | |
csine-nflx | a4178ca113 | |
charhate | 69aa98c1c8 | |
sayali | 03dfbf535d | |
sayali | aec24ae132 | |
sayali | 4e44dd3d8f | |
charhate | 4330a42dd3 | |
Hossein Shafagh | 725eee549d | |
Hossein Shafagh | 497bd6a13c | |
Hossein Shafagh | 945ec0895b | |
Hossein Shafagh | 9aa2d2af76 | |
Chad S | 4bc5899e24 | |
csine-nflx | ccecb26816 | |
csine-nflx | ca465e3c9e | |
Jasmine Schladen | a3a02a8077 | |
Hossein Shafagh | 2aec317127 | |
Jasmine Schladen | 83d363fd9e | |
Jasmine Schladen | 1c59fb575c | |
Jasmine Schladen | 86207db93b | |
Hossein Shafagh | 15a7921bf4 | |
Jasmine Schladen | 84f8905cf1 | |
Hossein Shafagh | cca4670745 | |
Chad S | 14348a1f95 | |
Hossein Shafagh | 28c6f8583a | |
csine-nflx | a1f99c29c0 | |
Jasmine Schladen | aa2e0aa2f9 | |
csine-nflx | 2b91077d92 | |
Jasmine Schladen | 28686fcf5d | |
Jasmine Schladen | 45cc9528d2 | |
Jasmine Schladen | 78afc060ae | |
Hossein Shafagh | e967f2c676 | |
Hossein Shafagh | 2cea33cb11 | |
Chad S | af348b1012 | |
csine-nflx | 33a006bbeb | |
csine-nflx | b47667b73e | |
Jasmine Schladen | 3e492e6310 | |
charhate | ff83721720 | |
charhate | bbfc65813d | |
charhate | 166dfa89ad | |
charhate | 6adf94d28f | |
charhate | 43ebc5aac1 | |
charhate | 9fd61a37dc | |
Hossein Shafagh | 576302fdd5 | |
Hossein Shafagh | 54566ad4c3 | |
Jasmine Schladen | 5e696f36bf | |
Jasmine Schladen | acc95a4b66 | |
charhate | c25782468b | |
Hossein Shafagh | c0bf111cb9 | |
Hossein Shafagh | cc69b433ca | |
csine-nflx | d27f2a53af | |
charhate | 95b647ee1d | |
Hossein Shafagh | 84d30b5d50 | |
Jasmine Schladen | 31b9e2cd20 | |
Jasmine Schladen | 13e8421c78 | |
Jasmine Schladen | 16ce7970d0 | |
Hossein Shafagh | a9d3b7a676 | |
Hossein Shafagh | adca20ade1 | |
Mathias Petermann | 23e1700fad | |
Mathias Petermann | b656e0d75a | |
Jasmine Schladen | b7b7e9022f | |
Jasmine Schladen | 794e4d3855 | |
Hossein Shafagh | 61ef7f207d | |
Hossein Shafagh | 44e4100a39 | |
Hossein Shafagh | 2dd9ea3d01 | |
Hossein Shafagh | a1af7c89b1 | |
Hossein Shafagh | c6a8034890 | |
Hossein Shafagh | f77c262953 | |
Hossein Shafagh | 10aa02fd85 | |
Hossein Shafagh | 54c2245115 | |
Hossein Shafagh | d59a558d58 | |
Emmanuel Garette | e9824a6808 | |
Emmanuel Garette | 79647e3372 | |
Hossein Shafagh | 7f25e02589 | |
Hossein Shafagh | 729a6e69f5 | |
Hossein Shafagh | 46ec1798d3 | |
Hossein Shafagh | 437933c558 | |
Hossein Shafagh | 259a8808f1 | |
Hossein Shafagh | 56061a7e3b | |
Hossein Shafagh | 7a982a731a | |
dependabot-preview[bot] | dfdb26f994 | |
Hossein Shafagh | 97f66276ec | |
dependabot-preview[bot] | 196a311084 | |
dependabot-preview[bot] | 3551437d9c | |
Jasmine Schladen | 20b8c2fd93 | |
dependabot-preview[bot] | 5c3758731c | |
Mathias Petermann | ccf87986c0 | |
Mathias Petermann | 96fbcdaf70 | |
Mathias Petermann | 103e107668 | |
Mathias Petermann | 82bf8e2ac6 | |
Mathias Petermann | 2d98e71977 | |
Mathias Petermann | 30c10b93f8 | |
Mathias Petermann | 3b20a47603 | |
Mathias Petermann | 4464c5890d | |
Mathias Petermann | 812e1dee92 | |
Mathias Petermann | b91cebf245 | |
Mathias Petermann | 6c1be02bfa | |
Mathias Petermann | ef0fce2661 | |
Mathias Petermann | 235653b558 | |
Mathias Petermann | 81b078604c | |
Mathias Petermann | 215070b327 | |
Mathias Petermann | 41ea59d7e3 | |
Mathias Petermann | d24fae0bac | |
Mathias Petermann | 66cab6abd3 | |
Mathias Petermann | e3e5ef7d66 | |
Mathias Petermann | 76dcfbd528 | |
Mathias Petermann | d6719b729c | |
Mathias Petermann | b2de986652 | |
Mathias Petermann | b93d271f31 | |
Mathias Petermann | e06bdcf2a3 | |
Mathias Petermann | 3012995c76 | |
Mathias Petermann | 348d8477dd | |
Mathias Petermann | d00dd9d295 | |
Hossein Shafagh | faceee0f77 | |
Hossein Shafagh | 56a4200d2c | |
Hossein Shafagh | 645b45401d | |
Hossein Shafagh | 3b258447db | |
Hossein Shafagh | 2430507e55 | |
Hossein Shafagh | 1ef6139f9b | |
Hossein Shafagh | 6a1b4b4857 | |
Hossein Shafagh | 709a9808aa | |
Hossein Shafagh | cb4f814478 | |
Hossein Shafagh | 4fffb8ba5b | |
Hossein Shafagh | a87bf0d50a | |
Hossein Shafagh | 831e5619e1 | |
Hossein Shafagh | ab47db4cd4 | |
sayali | 392725ff30 | |
csine-nflx | 749aa772ba | |
Jasmine Schladen | 2d94b19c32 | |
Jasmine Schladen | 3f765b51ef | |
Hossein Shafagh | 6723e3c80d | |
Hossein Shafagh | 3290d6634b | |
Hossein Shafagh | fa62023b2d | |
dependabot-preview[bot] | 37f05a89f2 | |
Emmanuel Garette | d7478a5c5c | |
Hossein Shafagh | f6554a9a1e | |
Hossein Shafagh | 0e02abbb37 | |
Hossein Shafagh | 9957120a7f | |
Hossein Shafagh | 7e573d6d51 | |
Hossein Shafagh | 6891077501 | |
Hossein Shafagh | 75bc3a5b20 | |
Hossein Shafagh | d233490c8a | |
Hossein Shafagh | 2c1e7b19a2 | |
Hossein Shafagh | db0b245b6c | |
sayali | 2c22d42a57 | |
Hossein Shafagh | 0a05f99741 | |
Hossein Shafagh | d9bbf42480 | |
dependabot-preview[bot] | d58b32a19c | |
Hossein Shafagh | 3d83db6f8f | |
Hossein Shafagh | 2b7cb0d44f | |
Hossein Shafagh | fd16edb3e5 | |
Hossein Shafagh | 30915d30be | |
Hossein Shafagh | 5b523bb8ed | |
Hossein Shafagh | 584159c916 | |
Hossein Shafagh | 01bd357b1c | |
Jasmine Schladen | fd12d4848c | |
Hossein Shafagh | 582c7b0771 | |
Hossein Shafagh | 1495fb3595 | |
Hossein Shafagh | bc6fb02fc2 | |
Hossein Shafagh | e01863097b | |
Jasmine Schladen | a5cea4fb9a | |
Jasmine Schladen | 233f9768e8 | |
Hossein Shafagh | 5ccc99bbfa | |
Jasmine Schladen | 98962ae5f5 | |
Hossein Shafagh | 41ac43013d | |
Hossein Shafagh | 2ea39a51e3 | |
Hossein Shafagh | 2b274f723a | |
Hossein Shafagh | e87cf040f3 | |
Jasmine Schladen | 71df6b8560 | |
Hossein Shafagh | 8610af8b83 | |
Hossein Shafagh | 820106e333 | |
Hossein Shafagh | 9ce0010bf1 | |
Hossein Shafagh | cf87e178c8 | |
Hossein Shafagh | 97f80b79dc | |
Hossein Shafagh | 9acd974b74 | |
Hossein Shafagh | ae1e9d120b | |
Hossein Shafagh | 2e7652962c | |
Hossein Shafagh | 1c96ea9ab1 | |
Hossein Shafagh | 02c040865d | |
Hossein Shafagh | 8fa90a2ce5 | |
Hossein Shafagh | c60645bec4 | |
Hossein Shafagh | c2fe2b5e03 | |
Hossein Shafagh | 03d1af16e7 | |
Hossein Shafagh | 3e1e17998e | |
Hossein Shafagh | 2b876f22a5 | |
Hossein Shafagh | 2e7e3a82fa | |
Hossein Shafagh | c40ecd12cb | |
Hossein Shafagh | 2cc03088cd | |
Hossein Shafagh | a4dba0cb35 | |
Hossein Shafagh | 906b3b2337 | |
Hossein Shafagh | 92eec5cc9c | |
charhate | 55f219e97a | |
Hossein Shafagh | adf8f37718 | |
sayali | 43483cb1c7 | |
charhate | 2ccb7034b5 | |
Hossein Shafagh | 0986a7a3ff | |
sayali | 757e190b60 | |
Emmanuel Garette | 9374adaa46 | |
charhate | c1bf192bd8 | |
Hossein Shafagh | 18fdd420a7 | |
sayali | 4997165235 | |
charhate | 59a97cde1d | |
sayali | 01dddd2a55 | |
sayali | 788703ce12 | |
Jasmine Schladen | 1fc9cd2ff8 | |
Jasmine Schladen | 4f552cb636 | |
Jasmine Schladen | d6075ebc11 | |
Hossein Shafagh | 63ace016f9 | |
dependabot-preview[bot] | a3b90c1a6b | |
dependabot-preview[bot] | 58798fbc2e | |
charhate | ea33fe9979 | |
Hossein Shafagh | 5cf9ea4830 | |
sayali | 855baadfee | |
Jasmine Schladen | 669a4273c2 | |
Jasmine Schladen | ad07b41763 | |
Jasmine Schladen | b5f0fc5a19 | |
Jasmine Schladen | ecd4d6ebe3 | |
Hossein Shafagh | af3afe36e1 | |
Emmanuel Garette | 591c8cf524 | |
Jasmine Schladen | e90b08b363 | |
Jasmine Schladen | 6a1889787d | |
Jasmine Schladen | 2c92fc6eb9 | |
Jasmine Schladen | 072b337f37 | |
Jasmine Schladen | fe5d75c7f8 | |
Jasmine Schladen | 60bb0037f0 | |
Hossein Shafagh | dbdfa9eab8 | |
Jasmine Schladen | a04cce6044 | |
Hossein Shafagh | 503530e935 | |
Hossein Shafagh | 11ce540246 | |
Hossein Shafagh | 9c04a888d8 | |
Hossein Shafagh | 17e528b5dd | |
Hossein Shafagh | d705e3ae3b | |
Hossein Shafagh | 7d8eb1c61e | |
Hossein Shafagh | 6aad37e1f9 | |
Hossein Shafagh | d73db59d23 | |
Hossein Shafagh | bfe89e131e | |
sirferl | 723bf67957 |
|
@ -0,0 +1,2 @@
|
|||
# These owners will be the default owners for everything in the repo.
|
||||
* @hosseinsh @csine-nflx @charhate @jtschladen
|
|
@ -0,0 +1,15 @@
|
|||
version: 2
|
||||
updates:
|
||||
- directory: "/"
|
||||
schedule:
|
||||
interval: "weekly"
|
||||
day: "monday"
|
||||
time: "08:00"
|
||||
timezone: "America/Los_Angeles"
|
||||
package-ecosystem: "pip"
|
||||
reviewers:
|
||||
- "hosseinsh"
|
||||
- "csine-nflx"
|
||||
- "charhate"
|
||||
- "jtschladen"
|
||||
versioning-strategy: lockfile-only
|
|
@ -0,0 +1,71 @@
|
|||
# For most projects, this workflow file will not need changing; you simply need
|
||||
# to commit it to your repository.
|
||||
#
|
||||
# You may wish to alter this file to override the set of languages analyzed,
|
||||
# or to provide custom queries or build logic.
|
||||
#
|
||||
# ******** NOTE ********
|
||||
# We have attempted to detect the languages in your repository. Please check
|
||||
# the `language` matrix defined below to confirm you have the correct set of
|
||||
# supported CodeQL languages.
|
||||
#
|
||||
name: "CodeQL"
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [ master ]
|
||||
pull_request:
|
||||
# The branches below must be a subset of the branches above
|
||||
branches: [ master ]
|
||||
schedule:
|
||||
- cron: '15 16 * * 2'
|
||||
|
||||
jobs:
|
||||
analyze:
|
||||
name: Analyze
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
language: [ 'javascript', 'python' ]
|
||||
# CodeQL supports [ 'cpp', 'csharp', 'go', 'java', 'javascript', 'python' ]
|
||||
# Learn more:
|
||||
# https://docs.github.com/en/free-pro-team@latest/github/finding-security-vulnerabilities-and-errors-in-your-code/configuring-code-scanning#changing-the-languages-that-are-analyzed
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v2
|
||||
|
||||
# Install prerequisites for python-ldap. See: https://www.python-ldap.org/en/python-ldap-3.3.0/installing.html#build-prerequisites
|
||||
- name: Install python-ldap prerequisites
|
||||
run: sudo apt-get install libldap2-dev libsasl2-dev
|
||||
|
||||
# Initializes the CodeQL tools for scanning.
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@v1
|
||||
with:
|
||||
languages: ${{ matrix.language }}
|
||||
# If you wish to specify custom queries, you can do so here or in a config file.
|
||||
# By default, queries listed here will override any specified in a config file.
|
||||
# Prefix the list here with "+" to use these queries and those in the config file.
|
||||
# queries: ./path/to/local/query, your-org/your-repo/queries@main
|
||||
|
||||
# Autobuild attempts to build any compiled languages (C/C++, C#, or Java).
|
||||
# If this step fails, then you should remove it and run the build manually (see below)
|
||||
- name: Autobuild
|
||||
uses: github/codeql-action/autobuild@v1
|
||||
|
||||
# ℹ️ Command-line programs to run using the OS shell.
|
||||
# 📚 https://git.io/JvXDl
|
||||
|
||||
# ✏️ If the Autobuild fails above, remove it and uncomment the following three lines
|
||||
# and modify them (or add more) to build your code if your project
|
||||
# uses a compiled language
|
||||
|
||||
#- run: |
|
||||
# make bootstrap
|
||||
# make release
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@v1
|
|
@ -0,0 +1,14 @@
|
|||
name: dependabot-auto-merge
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
|
||||
jobs:
|
||||
auto-merge:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: ahmadnassri/action-dependabot-auto-merge@v2
|
||||
with:
|
||||
target: minor
|
||||
github-token: ${{ secrets.DEPENDABOT_GITHUB_TOKEN }}
|
|
@ -0,0 +1,41 @@
|
|||
# This workflow will upload a Python Package using Twine when a Lemur release is created via github
|
||||
# For more information see: https://help.github.com/en/actions/language-and-framework-guides/using-python-with-github-actions#publishing-to-package-registries
|
||||
|
||||
name: Publish Lemur's latest package to PyPI
|
||||
|
||||
on:
|
||||
release:
|
||||
types: [created]
|
||||
|
||||
jobs:
|
||||
deploy:
|
||||
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: '3.x'
|
||||
- name: Autobump version
|
||||
run: |
|
||||
# from refs/tags/v0.8.1 get 0.8.1
|
||||
VERSION=$(echo $GITHUB_REF | sed 's#.*/v##')
|
||||
PLACEHOLDER='__version__ = "develop"'
|
||||
VERSION_FILE='lemur/__about__.py'
|
||||
# in case placeholder is missing, exists with code 1 and github actions aborts the build
|
||||
grep "$PLACEHOLDER" "$VERSION_FILE"
|
||||
sed -i "s/$PLACEHOLDER/__version__ = \"${VERSION}\"/g" "$VERSION_FILE"
|
||||
shell: bash
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
pip install setuptools wheel twine
|
||||
- name: Build and publish
|
||||
env:
|
||||
TWINE_USERNAME: ${{ secrets.LEMUR_PYPI_API_USERNAME }}
|
||||
TWINE_PASSWORD: ${{ secrets.LEMUR_PYPI_API_TOKEN }}
|
||||
run: |
|
||||
python setup.py sdist bdist_wheel
|
||||
twine upload dist/*
|
|
@ -39,3 +39,4 @@ lemur/tests/tmp
|
|||
|
||||
/lemur/plugins/lemur_email/tests/expiration-rendered.html
|
||||
/lemur/plugins/lemur_email/tests/rotation-rendered.html
|
||||
.celerybeat-schedule
|
||||
|
|
|
@ -0,0 +1,23 @@
|
|||
# .readthedocs.yml
|
||||
# Read the Docs configuration file
|
||||
# See https://docs.readthedocs.io/en/stable/config-file/v2.html for details
|
||||
|
||||
# Required
|
||||
version: 2
|
||||
|
||||
# Build documentation in the docs/ directory with Sphinx
|
||||
sphinx:
|
||||
configuration: docs/conf.py
|
||||
fail_on_warning: true
|
||||
|
||||
# Build docs in all formats (html, pdf, epub)
|
||||
formats: all
|
||||
|
||||
# Set the version of Python and requirements required to build the docs
|
||||
python:
|
||||
version: 3.7
|
||||
install:
|
||||
- requirements: requirements-docs.txt
|
||||
- method: setuptools
|
||||
path: .
|
||||
system_packages: true
|
71
.travis.yml
|
@ -1,16 +1,50 @@
|
|||
language: python
|
||||
dist: xenial
|
||||
|
||||
node_js:
|
||||
- "6.2.0"
|
||||
- "10"
|
||||
|
||||
addons:
|
||||
postgresql: "9.4"
|
||||
|
||||
matrix:
|
||||
jobs:
|
||||
include:
|
||||
- python: "3.7"
|
||||
- name: "python3.7-postgresql-9.4-bionic"
|
||||
dist: bionic
|
||||
language: python
|
||||
python: "3.7"
|
||||
env: TOXENV=py37
|
||||
addons:
|
||||
postgresql: "9.4"
|
||||
chrome: stable
|
||||
services:
|
||||
- xvfb
|
||||
- name: "python3.7-postgresql-10-bionic"
|
||||
dist: bionic
|
||||
language: python
|
||||
python: "3.7"
|
||||
env: TOXENV=py37
|
||||
addons:
|
||||
postgresql: '10'
|
||||
chrome: stable
|
||||
apt:
|
||||
packages:
|
||||
- postgresql-10
|
||||
- postgresql-client-10
|
||||
- postgresql-server-dev-10
|
||||
services:
|
||||
- postgresql
|
||||
- xvfb
|
||||
- name: "python3.8-postgresql-12-focal"
|
||||
dist: focal
|
||||
language: python
|
||||
python: "3.8"
|
||||
env: TOXENV=py38
|
||||
addons:
|
||||
postgresql: '12'
|
||||
chrome: stable
|
||||
apt:
|
||||
packages:
|
||||
- postgresql-12
|
||||
- postgresql-client-12
|
||||
- postgresql-server-dev-12
|
||||
services:
|
||||
- postgresql
|
||||
- xvfb
|
||||
|
||||
cache:
|
||||
directories:
|
||||
|
@ -26,13 +60,23 @@ env:
|
|||
# https://github.com/travis-ci/travis-ci/issues/5246#issuecomment-166460882
|
||||
- BOTO_CONFIG=/doesnotexist
|
||||
|
||||
before_install:
|
||||
- export CHROME_BIN=/usr/bin/google-chrome
|
||||
|
||||
before_script:
|
||||
- sudo systemctl stop postgresql
|
||||
# the port may have been auto-configured to use 5433 if it thought 5422 was already in use,
|
||||
# for some reason it happens very often
|
||||
# https://github.com/travis-ci/travis-build/blob/master/lib/travis/build/bash/travis_setup_postgresql.bash#L52
|
||||
- sudo sed -i -e 's/5433/5432/' /etc/postgresql/*/main/postgresql.conf
|
||||
- sudo systemctl restart postgresql
|
||||
- psql -c "create database lemur;" -U postgres
|
||||
- psql -c "create user lemur with password 'lemur;'" -U postgres
|
||||
- psql lemur -c "create extension IF NOT EXISTS pg_trgm;" -U postgres
|
||||
- npm config set registry https://registry.npmjs.org
|
||||
- npm install -g bower
|
||||
- npm install -g npm@latest bower
|
||||
- pip install --upgrade setuptools
|
||||
- export DISPLAY=:99.0
|
||||
|
||||
install:
|
||||
- pip install coveralls
|
||||
|
@ -41,10 +85,15 @@ install:
|
|||
script:
|
||||
- make test
|
||||
- bandit -r . -ll -ii -x lemur/tests/,docs
|
||||
- make test-js
|
||||
|
||||
after_success:
|
||||
- coveralls
|
||||
|
||||
notifications:
|
||||
email:
|
||||
ccastrapel@netflix.com
|
||||
recipients:
|
||||
- lemur@netflix.com
|
||||
on_success: never
|
||||
on_failure: always
|
||||
on_cancel: never # Dependbot cancels Travis before rebase and triggers too many emails
|
||||
|
|
144
CHANGELOG.rst
|
@ -1,9 +1,128 @@
|
|||
Changelog
|
||||
=========
|
||||
|
||||
0.9.0 - `2021-03-17`
|
||||
~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
This release fixes three critical vulnerabilities where an authenticated user could retrieve/access
|
||||
unauthorized information. (Issue `#3463 <https://github.com/Netflix/lemur/issues/3463>`_)
|
||||
|
||||
|
||||
0.8.1 - `2021-03-12`
|
||||
~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
This release includes improvements on many fronts, such as:
|
||||
|
||||
- Notifications:
|
||||
- Enhanced SNS flow
|
||||
- Expiration Summary
|
||||
- CA expiration email
|
||||
- EC algorithm as the default
|
||||
- Improved revocation flow
|
||||
- Localized AWS STS option
|
||||
- Improved Lemur doc building
|
||||
- ACME:
|
||||
- reduced failed attempts to 3x trials
|
||||
- support for selecting the chain (Let's Encrypt X1 transition)
|
||||
- revocation
|
||||
- http01 documentation
|
||||
- Entrust:
|
||||
- Support for cross-signed intermediate CA
|
||||
- Revised disclosure process
|
||||
- Dependency updates and conflict resolutions
|
||||
|
||||
Special thanks to all who contributed to this release, notably:
|
||||
|
||||
- `peschmae <https://github.com/peschmae>`_
|
||||
- `atugushev <https://github.com/atugushev>`_
|
||||
- `sirferl <https://github.com/sirferl>`_
|
||||
|
||||
|
||||
|
||||
0.8.0 - `2020-11-13`
|
||||
~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
This release comes after more than two years and contains many interesting new features and improvements.
|
||||
In addition to multiple new plugins, such as ACME-http01, ADCS, PowerDNS, UltraDNS, Entrust, SNS, many of Lemur's existing
|
||||
flows have improved.
|
||||
|
||||
In the future, we plan to do frequent releases.
|
||||
|
||||
|
||||
Summary of notable changes:
|
||||
|
||||
- AWS S3 plugin: added delete, get methods, and support for uploading/deleting acme tokens
|
||||
- ACME plugin:
|
||||
- revamp of the plugin
|
||||
- support for http01 domain validation, via S3 and SFTP as destination for the acme token
|
||||
- support for CNAME delegated domain validation
|
||||
- store-acme-account-details
|
||||
- PowerDNS plugin
|
||||
- UltraDNS plugin
|
||||
- ADCS plugin
|
||||
- SNS plugin
|
||||
- Entrust plugin
|
||||
- Rotation:
|
||||
- respecting keyType and extensions
|
||||
- region-by-region rotation option
|
||||
- default to auto-rotate when cert attached to endpoint
|
||||
- default to 1y validity during rotation for multi-year browser-trusted certs
|
||||
- Certificate: search_by_name, and important performance improvements
|
||||
- UI
|
||||
- reducing the EC curve options to the relevant ones
|
||||
- edit option for notifications, destinations and sources
|
||||
- showing 13 month validity as default
|
||||
- option to hide certs expired since 3month
|
||||
- faster Permalink (no search involved)
|
||||
- commonName Auto Added as DNS in the UI
|
||||
- improved search and cert lookup
|
||||
- celery tasks instead of crone, for better logging and monitoring
|
||||
- countless bugfixes
|
||||
- group-lookup-fix-referral
|
||||
- url_context_path
|
||||
- duplicate notification
|
||||
- digicert-time-bug-fix
|
||||
- improved-csr-support
|
||||
- fix-cryptography-intermediate-ca
|
||||
- enhanced logging
|
||||
- vault-k8s-auth
|
||||
- cfssl-key-fix
|
||||
- cert-sync-endpoint-find-by-hash
|
||||
- nlb-naming-bug
|
||||
- fix_vault_api_v2_append
|
||||
- aid_openid_roles_provider_integration
|
||||
- rewrite-java-keystore-use-pyjks
|
||||
- vault_kv2
|
||||
|
||||
|
||||
To see the full list of changes, you can run
|
||||
|
||||
$ git log --merges --first-parent master --pretty=format:"%h %<(10,trunc)%aN %C(white)%<(15)%ar%Creset %C(red bold)%<(15)%D%Creset %s" | grep -v "depend"
|
||||
|
||||
|
||||
Special thanks to all who contributed to this release, notably:
|
||||
|
||||
- `peschmae <https://github.com/peschmae>`_
|
||||
- `sirferl <https://github.com/sirferl>`_
|
||||
- `lukasmrtvy <https://github.com/lukasmrtvy>`_
|
||||
- `intgr <https://github.com/intgr>`_
|
||||
- `kush-bavishi <https://github.com/kush-bavishi>`_
|
||||
- `alwaysjolley <https://github.com/alwaysjolley>`_
|
||||
- `jplana <https://github.com/jplana>`_
|
||||
- `explody <https://github.com/explody>`_
|
||||
- `titouanc <https://github.com/titouanc>`_
|
||||
- `jramosf <https://github.com/jramosf>`_
|
||||
|
||||
|
||||
Upgrading
|
||||
---------
|
||||
|
||||
.. note:: This release will need a migration change. Please follow the `documentation <https://lemur.readthedocs.io/en/latest/administration.html#upgrading-lemur>`_ to upgrade Lemur.
|
||||
|
||||
|
||||
|
||||
0.7 - `2018-05-07`
|
||||
~~~~~~~~~~~~~~
|
||||
~~~~~~~~~~~~~~~~~~
|
||||
|
||||
This release adds LetsEncrypt support with DNS providers Dyn, Route53, and Cloudflare, and expands on the pending certificate functionality.
|
||||
The linux_dst plugin will also be deprecated and removed.
|
||||
|
@ -40,8 +159,7 @@ Happy Holidays! This is a big release with lots of bug fixes and features. Below
|
|||
|
||||
Features:
|
||||
|
||||
* Per-certificate rotation policies, requires a database migration. The default rotation policy for all certificates.
|
||||
is 30 days. Every certificate will gain a policy regardless of if auto-rotation is used.
|
||||
* Per-certificate rotation policies, requires a database migration. The default rotation policy for all certificates is 30 days. Every certificate will gain a policy regardless of if auto-rotation is used.
|
||||
* Adds per-user API Keys, allows users to issue multiple long-lived API tokens with the same permission as the user creating them.
|
||||
* Adds the ability to revoke certificates from the Lemur UI/API, this is currently only supported for the digicert CIS and cfssl plugins.
|
||||
* Allow destinations to support an export function. Useful for file system destinations e.g. S3 to specify the export plugin you wish to run before being sent to the destination.
|
||||
|
@ -85,13 +203,9 @@ Big thanks to neilschelly for quite a lot of improvements to the `lemur-cryptogr
|
|||
|
||||
Other Highlights:
|
||||
|
||||
* Closed `#501 <https://github.com/Netflix/lemur/issues/501>`_ - Endpoint resource as now kept in sync via an
|
||||
expiration mechanism. Such that non-existant endpoints gracefully fall out of Lemur. Certificates are never
|
||||
removed from Lemur.
|
||||
* Closed `#551 <https://github.com/Netflix/lemur/pull/551>`_ - Added the ability to create a 4096 bit key during certificate
|
||||
creation. Closed `#528 <https://github.com/Netflix/lemur/pull/528>`_ to ensure that issuer plugins supported the new 4096 bit keys.
|
||||
* Closed `#566 <https://github.com/Netflix/lemur/issues/566>`_ - Fixed an issue changing the notification status for certificates
|
||||
without private keys.
|
||||
* Closed `#501 <https://github.com/Netflix/lemur/issues/501>`_ - Endpoint resource as now kept in sync via an expiration mechanism. Such that non-existant endpoints gracefully fall out of Lemur. Certificates are never removed from Lemur.
|
||||
* Closed `#551 <https://github.com/Netflix/lemur/pull/551>`_ - Added the ability to create a 4096 bit key during certificate creation. Closed `#528 <https://github.com/Netflix/lemur/pull/528>`_ to ensure that issuer plugins supported the new 4096 bit keys.
|
||||
* Closed `#566 <https://github.com/Netflix/lemur/issues/566>`_ - Fixed an issue changing the notification status for certificates without private keys.
|
||||
* Closed `#594 <https://github.com/Netflix/lemur/issues/594>`_ - Added `replaced` field indicating if a certificate has been superseded.
|
||||
* Closed `#602 <https://github.com/Netflix/lemur/issues/602>`_ - AWS plugin added support for ALBs for endpoint tracking.
|
||||
|
||||
|
@ -115,12 +229,8 @@ Upgrading
|
|||
|
||||
There have been quite a few issues closed in this release. Some notables:
|
||||
|
||||
* Closed `#284 <https://github.com/Netflix/lemur/issues/284>`_ - Created new models for `Endpoints` created associated
|
||||
AWS ELB endpoint tracking code. This was the major stated goal of this milestone and should serve as the basis for
|
||||
future enhancements of Lemur's certificate 'deployment' capabilities.
|
||||
|
||||
* Closed `#334 <https://github.com/Netflix/lemur/issues/334>`_ - Lemur not has the ability
|
||||
to restrict certificate expiration dates to weekdays.
|
||||
* Closed `#284 <https://github.com/Netflix/lemur/issues/284>`_ - Created new models for `Endpoints` created associated AWS ELB endpoint tracking code. This was the major stated goal of this milestone and should serve as the basis for future enhancements of Lemur's certificate 'deployment' capabilities.
|
||||
* Closed `#334 <https://github.com/Netflix/lemur/issues/334>`_ - Lemur not has the ability to restrict certificate expiration dates to weekdays.
|
||||
|
||||
Several fixes/tweaks to Lemurs python3 support (thanks chadhendrie!)
|
||||
|
||||
|
@ -175,7 +285,7 @@ these keys should be fairly trivial, additionally pull requests have been submit
|
|||
should be easier to determine what authorities are available and when an authority has actually been selected.
|
||||
* Closed `#254 <https://github.com/Netflix/lemur/issues/254>`_ - Forces certificate names to be generally unique. If a certificate name
|
||||
(generated or otherwise) is found to be a duplicate we increment by appending a counter.
|
||||
* Closed `#254 <https://github.com/Netflix/lemur/issues/275>`_ - Switched to using Fernet generated passphrases for exported items.
|
||||
* Closed `#275 <https://github.com/Netflix/lemur/issues/275>`_ - Switched to using Fernet generated passphrases for exported items.
|
||||
These are more sounds that pseudo random passphrases generated before and have the nice property of being in base64.
|
||||
* Closed `#278 <https://github.com/Netflix/lemur/issues/278>`_ - Added ability to specify a custom name to certificate creation, previously
|
||||
this was only available in the certificate import wizard.
|
||||
|
|
8
Makefile
|
@ -115,10 +115,10 @@ endif
|
|||
@echo "--> Updating Python requirements"
|
||||
pip install --upgrade pip
|
||||
pip install --upgrade pip-tools
|
||||
pip-compile --output-file requirements.txt requirements.in -U --no-index
|
||||
pip-compile --output-file requirements-docs.txt requirements-docs.in -U --no-index
|
||||
pip-compile --output-file requirements-dev.txt requirements-dev.in -U --no-index
|
||||
pip-compile --output-file requirements-tests.txt requirements-tests.in -U --no-index
|
||||
pip-compile --output-file requirements.txt requirements.in -U --no-emit-index-url
|
||||
pip-compile --output-file requirements-docs.txt requirements-docs.in -U --no-emit-index-url
|
||||
pip-compile --output-file requirements-dev.txt requirements-dev.in -U --no-emit-index-url
|
||||
pip-compile --output-file requirements-tests.txt requirements-tests.in -U --no-emit-index-url
|
||||
@echo "--> Done updating Python requirements"
|
||||
@echo "--> Removing python-ldap from requirements-docs.txt"
|
||||
grep -v "python-ldap" requirements-docs.txt > tempreqs && mv tempreqs requirements-docs.txt
|
||||
|
|
|
@ -29,7 +29,7 @@
|
|||
"satellizer": "~0.13.4",
|
||||
"angular-ui-router": "~0.2.15",
|
||||
"font-awesome": "~4.5.0",
|
||||
"lodash": "~4.0.1",
|
||||
"lodash": "~4.17.20",
|
||||
"underscore": "~1.8.3",
|
||||
"angular-smart-table": "2.1.8",
|
||||
"angular-strap": ">= 2.2.2",
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
FROM alpine:3.8
|
||||
FROM python:3.7.9-alpine3.12
|
||||
|
||||
ARG VERSION
|
||||
ENV VERSION master
|
||||
|
@ -12,7 +12,7 @@ ENV group lemur
|
|||
|
||||
RUN addgroup -S ${group} -g ${gid} && \
|
||||
adduser -D -S ${user} -G ${group} -u ${uid} && \
|
||||
apk --update add python3 libldap postgresql-client nginx supervisor curl tzdata openssl bash && \
|
||||
apk add --no-cache --update python3 py-pip libldap postgresql-client nginx supervisor curl tzdata openssl bash && \
|
||||
apk --update add --virtual build-dependencies \
|
||||
git \
|
||||
tar \
|
||||
|
@ -39,10 +39,12 @@ RUN addgroup -S ${group} -g ${gid} && \
|
|||
pip3 install --upgrade setuptools && \
|
||||
mkdir -p /run/nginx/ /etc/nginx/ssl/ && \
|
||||
chown -R $user:$group /opt/lemur/ /home/lemur/.lemur/
|
||||
|
||||
|
||||
WORKDIR /opt/lemur
|
||||
|
||||
RUN npm install --unsafe-perm && \
|
||||
RUN echo "Running with python:" && python -c 'import platform; print(platform.python_version())' && \
|
||||
echo "Running with nodejs:" && node -v && \
|
||||
npm install --unsafe-perm && \
|
||||
pip3 install -e . && \
|
||||
node_modules/.bin/gulp build && \
|
||||
node_modules/.bin/gulp package --urlContextPath=${URLCONTEXT} && \
|
||||
|
|
|
@ -1,9 +1,12 @@
|
|||
version: '3'
|
||||
|
||||
volumes:
|
||||
pg_data: { }
|
||||
|
||||
services:
|
||||
postgres:
|
||||
image: "postgres:10"
|
||||
restart: always
|
||||
image: "postgres:13.1-alpine"
|
||||
restart: on-failure
|
||||
volumes:
|
||||
- pg_data:/var/lib/postgresql/data
|
||||
env_file:
|
||||
|
@ -11,7 +14,9 @@ services:
|
|||
|
||||
lemur:
|
||||
# image: "netlix-lemur:latest"
|
||||
build: .
|
||||
restart: on-failure
|
||||
build:
|
||||
context: .
|
||||
depends_on:
|
||||
- postgres
|
||||
- redis
|
||||
|
@ -19,11 +24,9 @@ services:
|
|||
- lemur-env
|
||||
- pgsql-env
|
||||
ports:
|
||||
- 80:80
|
||||
- 443:443
|
||||
- 87:80
|
||||
- 447:443
|
||||
|
||||
redis:
|
||||
image: "redis:alpine"
|
||||
|
||||
volumes:
|
||||
pg_data: {}
|
||||
image: "redis:alpine3.12"
|
||||
restart: on-failure
|
||||
|
|
|
@ -14,10 +14,10 @@ export LEMUR_ADMIN_PASSWORD="${LEMUR_ADMIN_PASSWORD:-admin}"
|
|||
export SQLALCHEMY_DATABASE_URI="postgresql://$POSTGRES_USER:$POSTGRES_PASSWORD@$POSTGRES_HOST:$POSTGRES_PORT/$POSTGRES_DB"
|
||||
|
||||
|
||||
PGPASSWORD=$POSTGRES_PASSWORD psql -h $POSTGRES_HOST -p $POSTGRES_PORT -U $POSTGRES_USER -d $POSTGRES_DB --command 'select 1;'
|
||||
PGPASSWORD=$POSTGRES_PASSWORD psql -h "$POSTGRES_HOST" -p "$POSTGRES_PORT" -U "$POSTGRES_USER" -d "$POSTGRES_DB" --command 'select 1;'
|
||||
|
||||
echo " # Create Postgres trgm extension"
|
||||
PGPASSWORD=$POSTGRES_PASSWORD psql -h $POSTGRES_HOST -p $POSTGRES_PORT -U $POSTGRES_USER -d $POSTGRES_DB --command 'CREATE EXTENSION IF NOT EXISTS pg_trgm;'
|
||||
PGPASSWORD=$POSTGRES_PASSWORD psql -h "$POSTGRES_HOST" -p "$POSTGRES_PORT" -U "$POSTGRES_USER" -d "$POSTGRES_DB" --command 'CREATE EXTENSION IF NOT EXISTS pg_trgm;'
|
||||
echo " # Done"
|
||||
|
||||
if [ -z "${SKIP_SSL}" ]; then
|
||||
|
|
|
@ -1,11 +1,18 @@
|
|||
import os
|
||||
import os.path
|
||||
import random
|
||||
import string
|
||||
from celery.schedules import crontab
|
||||
|
||||
import base64
|
||||
from ast import literal_eval
|
||||
|
||||
_basedir = os.path.abspath(os.path.dirname(__file__))
|
||||
|
||||
# See the Lemur docs (https://lemur.readthedocs.org) for more information on configuration
|
||||
|
||||
LOG_LEVEL = str(os.environ.get('LOG_LEVEL', 'DEBUG'))
|
||||
LOG_FILE = str(os.environ.get('LOG_FILE', '/home/lemur/.lemur/lemur.log'))
|
||||
LOG_JSON = True
|
||||
|
||||
CORS = os.environ.get("CORS") == "True"
|
||||
debug = os.environ.get("DEBUG") == "True"
|
||||
|
||||
|
@ -17,44 +24,214 @@ def get_random_secret(length):
|
|||
return secret_key + ''.join(random.choice(string.digits) for x in range(round(length / 4)))
|
||||
|
||||
|
||||
# This is the secret key used by Flask session management
|
||||
SECRET_KEY = repr(os.environ.get('SECRET_KEY', get_random_secret(32).encode('utf8')))
|
||||
|
||||
# You should consider storing these separately from your config
|
||||
LEMUR_TOKEN_SECRET = repr(os.environ.get('LEMUR_TOKEN_SECRET',
|
||||
base64.b64encode(get_random_secret(32).encode('utf8'))))
|
||||
# This must match the key for whichever DB the container is using - this could be a dump of dev or test, or a unique key
|
||||
LEMUR_ENCRYPTION_KEYS = repr(os.environ.get('LEMUR_ENCRYPTION_KEYS',
|
||||
base64.b64encode(get_random_secret(32).encode('utf8'))))
|
||||
base64.b64encode(get_random_secret(32).encode('utf8')).decode('utf8')))
|
||||
|
||||
LEMUR_ALLOWED_DOMAINS = []
|
||||
REDIS_HOST = 'redis'
|
||||
REDIS_PORT = 6379
|
||||
REDIS_DB = 0
|
||||
CELERY_RESULT_BACKEND = f'redis://{REDIS_HOST}:{REDIS_PORT}'
|
||||
CELERY_BROKER_URL = f'redis://{REDIS_HOST}:{REDIS_PORT}'
|
||||
CELERY_IMPORTS = ('lemur.common.celery')
|
||||
CELERYBEAT_SCHEDULE = {
|
||||
# All tasks are disabled by default. Enable any tasks you wish to run.
|
||||
# 'fetch_all_pending_acme_certs': {
|
||||
# 'task': 'lemur.common.celery.fetch_all_pending_acme_certs',
|
||||
# 'options': {
|
||||
# 'expires': 180
|
||||
# },
|
||||
# 'schedule': crontab(minute="*"),
|
||||
# },
|
||||
# 'remove_old_acme_certs': {
|
||||
# 'task': 'lemur.common.celery.remove_old_acme_certs',
|
||||
# 'options': {
|
||||
# 'expires': 180
|
||||
# },
|
||||
# 'schedule': crontab(hour=8, minute=0, day_of_week=5),
|
||||
# },
|
||||
# 'clean_all_sources': {
|
||||
# 'task': 'lemur.common.celery.clean_all_sources',
|
||||
# 'options': {
|
||||
# 'expires': 180
|
||||
# },
|
||||
# 'schedule': crontab(hour=5, minute=0, day_of_week=5),
|
||||
# },
|
||||
# 'sync_all_sources': {
|
||||
# 'task': 'lemur.common.celery.sync_all_sources',
|
||||
# 'options': {
|
||||
# 'expires': 180
|
||||
# },
|
||||
# 'schedule': crontab(hour="*/2", minute=0),
|
||||
# # this job is running 30min before endpoints_expire which deletes endpoints which were not updated
|
||||
# },
|
||||
# 'sync_source_destination': {
|
||||
# 'task': 'lemur.common.celery.sync_source_destination',
|
||||
# 'options': {
|
||||
# 'expires': 180
|
||||
# },
|
||||
# 'schedule': crontab(hour="*/2", minute=15),
|
||||
# },
|
||||
# 'report_celery_last_success_metrics': {
|
||||
# 'task': 'lemur.common.celery.report_celery_last_success_metrics',
|
||||
# 'options': {
|
||||
# 'expires': 180
|
||||
# },
|
||||
# 'schedule': crontab(minute="*"),
|
||||
# },
|
||||
# 'certificate_reissue': {
|
||||
# 'task': 'lemur.common.celery.certificate_reissue',
|
||||
# 'options': {
|
||||
# 'expires': 180
|
||||
# },
|
||||
# 'schedule': crontab(hour=9, minute=0),
|
||||
# },
|
||||
# 'certificate_rotate': {
|
||||
# 'task': 'lemur.common.celery.certificate_rotate',
|
||||
# 'options': {
|
||||
# 'expires': 180
|
||||
# },
|
||||
# 'schedule': crontab(hour=10, minute=0),
|
||||
# },
|
||||
# 'endpoints_expire': {
|
||||
# 'task': 'lemur.common.celery.endpoints_expire',
|
||||
# 'options': {
|
||||
# 'expires': 180
|
||||
# },
|
||||
# 'schedule': crontab(hour="*/2", minute=30),
|
||||
# # this job is running 30min after sync_all_sources which updates endpoints
|
||||
# },
|
||||
# 'get_all_zones': {
|
||||
# 'task': 'lemur.common.celery.get_all_zones',
|
||||
# 'options': {
|
||||
# 'expires': 180
|
||||
# },
|
||||
# 'schedule': crontab(minute="*/30"),
|
||||
# },
|
||||
# 'check_revoked': {
|
||||
# 'task': 'lemur.common.celery.check_revoked',
|
||||
# 'options': {
|
||||
# 'expires': 180
|
||||
# },
|
||||
# 'schedule': crontab(hour=10, minute=0),
|
||||
# }
|
||||
# 'enable_autorotate_for_certs_attached_to_endpoint': {
|
||||
# 'task': 'lemur.common.celery.enable_autorotate_for_certs_attached_to_endpoint',
|
||||
# 'options': {
|
||||
# 'expires': 180
|
||||
# },
|
||||
# 'schedule': crontab(hour=10, minute=0),
|
||||
# }
|
||||
# 'notify_expirations': {
|
||||
# 'task': 'lemur.common.celery.notify_expirations',
|
||||
# 'options': {
|
||||
# 'expires': 180
|
||||
# },
|
||||
# 'schedule': crontab(hour=10, minute=0),
|
||||
# },
|
||||
# 'notify_authority_expirations': {
|
||||
# 'task': 'lemur.common.celery.notify_authority_expirations',
|
||||
# 'options': {
|
||||
# 'expires': 180
|
||||
# },
|
||||
# 'schedule': crontab(hour=10, minute=0),
|
||||
# },
|
||||
# 'send_security_expiration_summary': {
|
||||
# 'task': 'lemur.common.celery.send_security_expiration_summary',
|
||||
# 'options': {
|
||||
# 'expires': 180
|
||||
# },
|
||||
# 'schedule': crontab(hour=10, minute=0, day_of_week='mon-fri'),
|
||||
# }
|
||||
}
|
||||
CELERY_TIMEZONE = 'UTC'
|
||||
|
||||
LEMUR_EMAIL = ''
|
||||
LEMUR_SECURITY_TEAM_EMAIL = []
|
||||
SQLALCHEMY_ENABLE_FLASK_REPLICATED = False
|
||||
SQLALCHEMY_DATABASE_URI = os.environ.get('SQLALCHEMY_DATABASE_URI', 'postgresql://lemur:lemur@localhost:5432/lemur')
|
||||
|
||||
ALLOW_CERT_DELETION = os.environ.get('ALLOW_CERT_DELETION') == "True"
|
||||
SQLALCHEMY_TRACK_MODIFICATIONS = False
|
||||
SQLALCHEMY_ECHO = True
|
||||
SQLALCHEMY_POOL_RECYCLE = 499
|
||||
SQLALCHEMY_POOL_TIMEOUT = 20
|
||||
|
||||
LEMUR_DEFAULT_COUNTRY = str(os.environ.get('LEMUR_DEFAULT_COUNTRY',''))
|
||||
LEMUR_DEFAULT_STATE = str(os.environ.get('LEMUR_DEFAULT_STATE',''))
|
||||
LEMUR_DEFAULT_LOCATION = str(os.environ.get('LEMUR_DEFAULT_LOCATION',''))
|
||||
LEMUR_DEFAULT_ORGANIZATION = str(os.environ.get('LEMUR_DEFAULT_ORGANIZATION',''))
|
||||
LEMUR_DEFAULT_ORGANIZATIONAL_UNIT = str(os.environ.get('LEMUR_DEFAULT_ORGANIZATIONAL_UNIT',''))
|
||||
LEMUR_EMAIL = 'lemur@example.com'
|
||||
LEMUR_SECURITY_TEAM_EMAIL = ['security@example.com']
|
||||
LEMUR_SECURITY_TEAM_EMAIL_INTERVALS = [15, 2]
|
||||
LEMUR_DEFAULT_EXPIRATION_NOTIFICATION_INTERVALS = [30, 15, 2]
|
||||
LEMUR_EMAIL_SENDER = 'smtp'
|
||||
|
||||
LEMUR_DEFAULT_ISSUER_PLUGIN = str(os.environ.get('LEMUR_DEFAULT_ISSUER_PLUGIN',''))
|
||||
LEMUR_DEFAULT_AUTHORITY = str(os.environ.get('LEMUR_DEFAULT_AUTHORITY',''))
|
||||
# mail configuration
|
||||
# MAIL_SERVER = 'mail.example.com'
|
||||
|
||||
PUBLIC_CA_MAX_VALIDITY_DAYS = 397
|
||||
DEFAULT_VALIDITY_DAYS = 365
|
||||
|
||||
LEMUR_OWNER_EMAIL_IN_SUBJECT = False
|
||||
|
||||
LEMUR_DEFAULT_COUNTRY = str(os.environ.get('LEMUR_DEFAULT_COUNTRY', 'US'))
|
||||
LEMUR_DEFAULT_STATE = str(os.environ.get('LEMUR_DEFAULT_STATE', 'California'))
|
||||
LEMUR_DEFAULT_LOCATION = str(os.environ.get('LEMUR_DEFAULT_LOCATION', 'Los Gatos'))
|
||||
LEMUR_DEFAULT_ORGANIZATION = str(os.environ.get('LEMUR_DEFAULT_ORGANIZATION', 'Example, Inc.'))
|
||||
LEMUR_DEFAULT_ORGANIZATIONAL_UNIT = str(os.environ.get('LEMUR_DEFAULT_ORGANIZATIONAL_UNIT', ''))
|
||||
|
||||
LEMUR_DEFAULT_AUTHORITY = str(os.environ.get('LEMUR_DEFAULT_AUTHORITY', 'ExampleCa'))
|
||||
|
||||
LEMUR_DEFAULT_ROLE = 'operator'
|
||||
|
||||
ACTIVE_PROVIDERS = []
|
||||
|
||||
METRIC_PROVIDERS = []
|
||||
|
||||
LOG_LEVEL = str(os.environ.get('LOG_LEVEL','DEBUG'))
|
||||
LOG_FILE = str(os.environ.get('LOG_FILE','/home/lemur/.lemur/lemur.log'))
|
||||
# Authority Settings - These will change depending on which authorities you are
|
||||
# using
|
||||
current_path = os.path.dirname(os.path.realpath(__file__))
|
||||
|
||||
SQLALCHEMY_DATABASE_URI = os.environ.get('SQLALCHEMY_DATABASE_URI','postgresql://lemur:lemur@localhost:5432/lemur')
|
||||
# DNS Settings
|
||||
|
||||
LDAP_DEBUG = os.environ.get('LDAP_DEBUG') == "True"
|
||||
LDAP_AUTH = os.environ.get('LDAP_AUTH') == "True"
|
||||
LDAP_IS_ACTIVE_DIRECTORY = os.environ.get('LDAP_IS_ACTIVE_DIRECTORY') == "True"
|
||||
LDAP_BIND_URI = str(os.environ.get('LDAP_BIND_URI',''))
|
||||
LDAP_BASE_DN = str(os.environ.get('LDAP_BASE_DN',''))
|
||||
LDAP_EMAIL_DOMAIN = str(os.environ.get('LDAP_EMAIL_DOMAIN',''))
|
||||
LDAP_USE_TLS = str(os.environ.get('LDAP_USE_TLS',''))
|
||||
LDAP_REQUIRED_GROUP = str(os.environ.get('LDAP_REQUIRED_GROUP',''))
|
||||
LDAP_GROUPS_TO_ROLES = literal_eval(os.environ.get('LDAP_GROUPS_TO_ROLES') or "{}")
|
||||
# exclude logging missing SAN, since we can have certs from private CAs with only cn, prod parity
|
||||
LOG_SSL_SUBJ_ALT_NAME_ERRORS = False
|
||||
|
||||
ACME_DNS_PROVIDER_TYPES = {"items": [
|
||||
{
|
||||
'name': 'route53',
|
||||
'requirements': [
|
||||
{
|
||||
'name': 'account_id',
|
||||
'type': 'int',
|
||||
'required': True,
|
||||
'helpMessage': 'AWS Account number'
|
||||
},
|
||||
]
|
||||
},
|
||||
{
|
||||
'name': 'cloudflare',
|
||||
'requirements': [
|
||||
{
|
||||
'name': 'email',
|
||||
'type': 'str',
|
||||
'required': True,
|
||||
'helpMessage': 'Cloudflare Email'
|
||||
},
|
||||
{
|
||||
'name': 'key',
|
||||
'type': 'str',
|
||||
'required': True,
|
||||
'helpMessage': 'Cloudflare Key'
|
||||
},
|
||||
]
|
||||
},
|
||||
{
|
||||
'name': 'dyn',
|
||||
},
|
||||
{
|
||||
'name': 'ultradns',
|
||||
},
|
||||
]}
|
||||
|
||||
# Authority plugins which support revocation
|
||||
SUPPORTED_REVOCATION_AUTHORITY_PLUGINS = ['acme-issuer']
|
||||
|
|
|
@ -28,6 +28,13 @@ Basic Configuration
|
|||
|
||||
LOG_FILE = "/logs/lemur/lemur-test.log"
|
||||
|
||||
.. data:: LOG_UPGRADE_FILE
|
||||
:noindex:
|
||||
|
||||
::
|
||||
|
||||
LOG_UPGRADE_FILE = "/logs/lemur/db_upgrade.log"
|
||||
|
||||
.. data:: DEBUG
|
||||
:noindex:
|
||||
|
||||
|
@ -71,13 +78,13 @@ Basic Configuration
|
|||
The default connection pool size is 5 for sqlalchemy managed connections. Depending on the number of Lemur instances,
|
||||
please specify per instance connection pool size. Below is an example to set connection pool size to 10.
|
||||
|
||||
::
|
||||
::
|
||||
|
||||
SQLALCHEMY_POOL_SIZE = 10
|
||||
|
||||
|
||||
.. warning::
|
||||
This is an optional setting but important to review and set for optimal database connection usage and for overall database performance.
|
||||
This is an optional setting but important to review and set for optimal database connection usage and for overall database performance.
|
||||
|
||||
.. data:: SQLALCHEMY_MAX_OVERFLOW
|
||||
:noindex:
|
||||
|
@ -92,7 +99,7 @@ This is an optional setting but important to review and set for optimal database
|
|||
|
||||
|
||||
.. note::
|
||||
Specifying the `SQLALCHEMY_MAX_OVERFLOW` to 0 will enforce limit to not create connections above specified pool size.
|
||||
Specifying the `SQLALCHEMY_MAX_OVERFLOW` to 0 will enforce limit to not create connections above specified pool size.
|
||||
|
||||
|
||||
.. data:: LEMUR_ALLOW_WEEKEND_EXPIRATION
|
||||
|
@ -144,6 +151,15 @@ Specifying the `SQLALCHEMY_MAX_OVERFLOW` to 0 will enforce limit to not create c
|
|||
to start. Multiple keys can be provided to facilitate key rotation. The first key in the list is used for
|
||||
encryption and all keys are tried for decryption until one works. Each key must be 32 URL safe base-64 encoded bytes.
|
||||
|
||||
Only fields of type ``Vault`` will be encrypted. At present, only the following fields are encrypted:
|
||||
|
||||
* ``certificates.private_key``
|
||||
* ``pending_certificates.private_key``
|
||||
* ``dns_providers.credentials``
|
||||
* ``roles.password``
|
||||
|
||||
For implementation details, see ``Vault`` in ``utils.py``.
|
||||
|
||||
Running lemur create_config will securely generate a key for your configuration file.
|
||||
If you would like to generate your own, we recommend the following method:
|
||||
|
||||
|
@ -158,6 +174,7 @@ Specifying the `SQLALCHEMY_MAX_OVERFLOW` to 0 will enforce limit to not create c
|
|||
|
||||
.. data:: PUBLIC_CA_MAX_VALIDITY_DAYS
|
||||
:noindex:
|
||||
|
||||
Use this config to override the limit of 397 days of validity for certificates issued by CA/Browser compliant authorities.
|
||||
The authorities with cab_compliant option set to true will use this config. The example below overrides the default validity
|
||||
of 397 days and sets it to 365 days.
|
||||
|
@ -169,6 +186,7 @@ Specifying the `SQLALCHEMY_MAX_OVERFLOW` to 0 will enforce limit to not create c
|
|||
|
||||
.. data:: DEFAULT_VALIDITY_DAYS
|
||||
:noindex:
|
||||
|
||||
Use this config to override the default validity of 365 days for certificates offered through Lemur UI. Any CA which
|
||||
is not CA/Browser Forum compliant will be using this value as default validity to be displayed on UI. Please
|
||||
note that this config is used for cert issuance only through Lemur UI. The example below overrides the default validity
|
||||
|
@ -191,6 +209,11 @@ Specifying the `SQLALCHEMY_MAX_OVERFLOW` to 0 will enforce limit to not create c
|
|||
in the UI. When set to False (the default), the certificate delete API will always return "405 method not allowed"
|
||||
and deleted certificates will always be visible in the UI. (default: `False`)
|
||||
|
||||
.. data:: LEMUR_AWS_REGION
|
||||
:noindex:
|
||||
|
||||
This is an optional config applicable for settings where Lemur is deployed in AWS. For accessing regionalized
|
||||
STS endpoints, LEMUR_AWS_REGION defines the region where Lemur is deployed.
|
||||
|
||||
Certificate Default Options
|
||||
---------------------------
|
||||
|
@ -255,22 +278,123 @@ and are used when Lemur creates the CSR for your certificates.
|
|||
LEMUR_DEFAULT_AUTHORITY = "verisign"
|
||||
|
||||
|
||||
.. _NotificationOptions:
|
||||
|
||||
Notification Options
|
||||
--------------------
|
||||
|
||||
Lemur currently has very basic support for notifications. Currently only expiration notifications are supported. Actual notification
|
||||
is handled by the notification plugins that you have configured. Lemur ships with the 'Email' notification that allows expiration emails
|
||||
to be sent to subscribers.
|
||||
Lemur supports a small variety of notification types through a set of notification plugins.
|
||||
By default, Lemur configures a standard set of email notifications for all certificates.
|
||||
|
||||
Templates for expiration emails are located under `lemur/plugins/lemur_email/templates` and can be modified for your needs.
|
||||
Notifications are sent to the certificate creator, owner and security team as specified by the `LEMUR_SECURITY_TEAM_EMAIL` configuration parameter.
|
||||
**Plugin-capable notifications**
|
||||
|
||||
Certificates marked as inactive will **not** be notified of upcoming expiration. This enables a user to essentially
|
||||
silence the expiration. If a certificate is active and is expiring the above will be notified according to the `LEMUR_DEFAULT_EXPIRATION_NOTIFICATION_INTERVALS` or
|
||||
30, 15, 2 days before expiration if no intervals are set.
|
||||
These notifications can be configured to use all available notification plugins.
|
||||
|
||||
Lemur supports sending certification expiration notifications through SES and SMTP.
|
||||
Supported types:
|
||||
|
||||
* Certificate expiration
|
||||
|
||||
**Email-only notifications**
|
||||
|
||||
These notifications can only be sent via email and cannot use other notification plugins.
|
||||
|
||||
Supported types:
|
||||
|
||||
* CA certificate expiration
|
||||
* Pending ACME certificate failure
|
||||
* Certificate rotation
|
||||
* Security certificate expiration summary
|
||||
|
||||
**Default notifications**
|
||||
|
||||
When a certificate is created, the following email notifications are created for it if they do not exist.
|
||||
If these notifications already exist, they will be associated with the new certificate.
|
||||
|
||||
* ``DEFAULT_<OWNER>_X_DAY``, where X is the set of values specified in ``LEMUR_DEFAULT_EXPIRATION_NOTIFICATION_INTERVALS`` and defaults to 30, 15, and 2 if not specified. The owner's username will replace ``<OWNER>``.
|
||||
* ``DEFAULT_SECURITY_X_DAY``, where X is the set of values specified in ``LEMUR_SECURITY_TEAM_EMAIL_INTERVALS`` and defaults to ``LEMUR_DEFAULT_EXPIRATION_NOTIFICATION_INTERVALS`` if not specified (which also defaults to 30, 15, and 2 if not specified).
|
||||
|
||||
These notifications can be disabled if desired. They can also be unassociated with a specific certificate.
|
||||
|
||||
**Disabling notifications**
|
||||
|
||||
Notifications can be disabled either for an individual certificate (which disables all notifications for that certificate)
|
||||
or for an individual notification object (which disables that notification for all associated certificates).
|
||||
At present, disabling a notification object will only disable certificate expiration notifications, and not other types,
|
||||
since other notification types don't use notification objects.
|
||||
|
||||
**Certificate expiration**
|
||||
|
||||
Certificate expiration notifications are sent when the scheduled task to send certificate expiration notifications runs
|
||||
(see :ref:`PeriodicTasks`). Specific patterns of certificate names may be excluded using ``--exclude`` (when using
|
||||
cron; you may specify this multiple times for multiple patterns) or via the config option ``EXCLUDE_CN_FROM_NOTIFICATION``
|
||||
(when using celery; this is a list configuration option, meaning you specify multiple values, such as
|
||||
``['exclude', 'also exclude']``). The specified exclude pattern will match if found anywhere in the certificate name.
|
||||
|
||||
When the periodic task runs, Lemur checks for certificates meeting the following conditions:
|
||||
|
||||
* Certificate has notifications enabled
|
||||
* Certificate is not expired
|
||||
* Certificate is not revoked
|
||||
* Certificate name does not match the `exclude` parameter
|
||||
* Certificate has at least one associated notification object
|
||||
* That notification is active
|
||||
* That notification's configured interval and unit match the certificate's remaining lifespan
|
||||
|
||||
All eligible certificates are then grouped by owner and applicable notification. For each notification and certificate group,
|
||||
Lemur will send the expiration notification using whichever plugin was configured for that notification object.
|
||||
In addition, Lemur will send an email to the certificate owner and security team (as specified by the
|
||||
``LEMUR_SECURITY_TEAM_EMAIL`` configuration parameter).
|
||||
|
||||
**CA certificate expiration**
|
||||
|
||||
Certificate authority certificate expiration notifications are sent when the scheduled task to send authority certificate
|
||||
expiration notifications runs (see :ref:`PeriodicTasks`). Notifications are sent via the intervals configured in the
|
||||
configuration parameter ``LEMUR_AUTHORITY_CERT_EXPIRATION_EMAIL_INTERVALS``, with a default of 365 and 180 days.
|
||||
|
||||
When the periodic task runs, Lemur checks for certificates meeting the following conditions:
|
||||
|
||||
* Certificate has notifications enabled
|
||||
* Certificate is not expired
|
||||
* Certificate is not revoked
|
||||
* Certificate is associated with a CA
|
||||
* Certificate's remaining lifespan matches one of the configured intervals
|
||||
|
||||
All eligible certificates are then grouped by owner and expiration interval. For each interval and certificate group,
|
||||
Lemur will send the CA certificate expiration notification via email to the certificate owner and security team
|
||||
(as specified by the ``LEMUR_SECURITY_TEAM_EMAIL`` configuration parameter).
|
||||
|
||||
**Pending ACME certificate failure**
|
||||
|
||||
Whenever a pending ACME certificate fails to be issued, Lemur will send a notification via email to the certificate owner
|
||||
and security team (as specified by the ``LEMUR_SECURITY_TEAM_EMAIL`` configuration parameter). This email is not sent if
|
||||
the pending certificate had notifications disabled.
|
||||
|
||||
Lemur will attempt 3x times to resolve a pending certificate.
|
||||
This can at times result into 3 duplicate certificates, if all certificate attempts get resolved.
|
||||
|
||||
**Certificate rotation**
|
||||
|
||||
Whenever a cert is rotated, Lemur will send a notification via email to the certificate owner. This notification is
|
||||
disabled by default; to enable it, you must set the option ``--notify`` (when using cron) or the configuration parameter
|
||||
``ENABLE_ROTATION_NOTIFICATION`` (when using celery).
|
||||
|
||||
**Security certificate expiration summary**
|
||||
|
||||
If you enable the Celery or cron task to send this notification type, Lemur will send a summary of all
|
||||
certificates with upcoming expiration date that occurs within the number of days specified by the
|
||||
``LEMUR_EXPIRATION_SUMMARY_EMAIL_THRESHOLD_DAYS`` configuration parameter (with a fallback of 14 days).
|
||||
Note that certificates will be included in this summary even if they do not have any associated notifications.
|
||||
|
||||
This notification type also supports the same ``--exclude`` and ``EXCLUDE_CN_FROM_NOTIFICATION`` options as expiration emails.
|
||||
|
||||
NOTE: At present, this summary email essentially duplicates the certificate expiration notifications, since all
|
||||
certificate expiration notifications are also sent to the security team. This issue will be fixed in the future.
|
||||
|
||||
**Email notifications**
|
||||
|
||||
Templates for emails are located under `lemur/plugins/lemur_email/templates` and can be modified for your needs.
|
||||
|
||||
The following configuration options are supported:
|
||||
|
||||
.. data:: LEMUR_EMAIL_SENDER
|
||||
:noindex:
|
||||
|
@ -285,6 +409,25 @@ Lemur supports sending certification expiration notifications through SES and SM
|
|||
you can send any mail. See: `Verifying Email Address in Amazon SES <http://docs.aws.amazon.com/ses/latest/DeveloperGuide/verify-email-addresses.html>`_
|
||||
|
||||
|
||||
.. data:: LEMUR_SES_SOURCE_ARN
|
||||
:noindex:
|
||||
|
||||
Specifies an ARN to use as the SourceArn when sending emails via SES.
|
||||
|
||||
.. note::
|
||||
This parameter is only required if you're using a sending authorization with SES.
|
||||
See: `Using sending authorization with Amazon SES <https://docs.aws.amazon.com/ses/latest/DeveloperGuide/sending-authorization.html>`_
|
||||
|
||||
|
||||
.. data:: LEMUR_SES_REGION
|
||||
:noindex:
|
||||
|
||||
Specifies a region for sending emails via SES.
|
||||
|
||||
.. note::
|
||||
This parameter defaults to us-east-1 and is only required if you wish to use a different region.
|
||||
|
||||
|
||||
.. data:: LEMUR_EMAIL
|
||||
:noindex:
|
||||
|
||||
|
@ -292,7 +435,7 @@ Lemur supports sending certification expiration notifications through SES and SM
|
|||
|
||||
::
|
||||
|
||||
LEMUR_EMAIL = 'lemur.example.com'
|
||||
LEMUR_EMAIL = 'lemur@example.com'
|
||||
|
||||
|
||||
.. data:: LEMUR_SECURITY_TEAM_EMAIL
|
||||
|
@ -307,7 +450,7 @@ Lemur supports sending certification expiration notifications through SES and SM
|
|||
.. data:: LEMUR_DEFAULT_EXPIRATION_NOTIFICATION_INTERVALS
|
||||
:noindex:
|
||||
|
||||
Lemur notification intervals
|
||||
Lemur notification intervals. If unspecified, the value [30, 15, 2] is used.
|
||||
|
||||
::
|
||||
|
||||
|
@ -322,6 +465,15 @@ Lemur supports sending certification expiration notifications through SES and SM
|
|||
|
||||
LEMUR_SECURITY_TEAM_EMAIL_INTERVALS = [15, 2]
|
||||
|
||||
.. data:: LEMUR_AUTHORITY_CERT_EXPIRATION_EMAIL_INTERVALS
|
||||
:noindex:
|
||||
|
||||
Notification interval set for CA certificate expiration notifications. If unspecified, the value [365, 180] is used (roughly one year and 6 months).
|
||||
|
||||
::
|
||||
|
||||
LEMUR_AUTHORITY_CERT_EXPIRATION_EMAIL_INTERVALS = [365, 180]
|
||||
|
||||
|
||||
Celery Options
|
||||
---------------
|
||||
|
@ -567,6 +719,33 @@ For more information about how to use social logins, see: `Satellizer <https://g
|
|||
|
||||
PING_AUTH_ENDPOINT = "https://<yourpingserver>/oauth2/authorize"
|
||||
|
||||
.. data:: PING_USER_MEMBERSHIP_URL
|
||||
:noindex:
|
||||
|
||||
An optional additional endpoint to learn membership details post the user validation.
|
||||
|
||||
::
|
||||
|
||||
PING_USER_MEMBERSHIP_URL = "https://<yourmembershipendpoint>"
|
||||
|
||||
.. data:: PING_USER_MEMBERSHIP_TLS_PROVIDER
|
||||
:noindex:
|
||||
|
||||
A custom TLS session provider plugin name
|
||||
|
||||
::
|
||||
|
||||
PING_USER_MEMBERSHIP_TLS_PROVIDER = "slug-name"
|
||||
|
||||
.. data:: PING_USER_MEMBERSHIP_SERVICE
|
||||
:noindex:
|
||||
|
||||
Membership service name used by PING_USER_MEMBERSHIP_TLS_PROVIDER to create a session
|
||||
|
||||
::
|
||||
|
||||
PING_USER_MEMBERSHIP_SERVICE = "yourmembershipservice"
|
||||
|
||||
.. data:: OAUTH2_SECRET
|
||||
:noindex:
|
||||
|
||||
|
@ -664,6 +843,45 @@ If you are not using a metric provider you do not need to configure any of these
|
|||
Plugin Specific Options
|
||||
-----------------------
|
||||
|
||||
ACME Plugin
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
.. data:: ACME_DNS_PROVIDER_TYPES
|
||||
:noindex:
|
||||
|
||||
Dictionary of ACME DNS Providers and their requirements.
|
||||
|
||||
.. data:: ACME_ENABLE_DELEGATED_CNAME
|
||||
:noindex:
|
||||
|
||||
Enables delegated DNS domain validation using CNAMES. When enabled, Lemur will attempt to follow CNAME records to authoritative DNS servers when creating DNS-01 challenges.
|
||||
|
||||
|
||||
The following configration properties are optional for the ACME plugin to use. They allow reusing an existing ACME
|
||||
account. See :ref:`Using a pre-existing ACME account <AcmeAccountReuse>` for more details.
|
||||
|
||||
|
||||
.. data:: ACME_PRIVATE_KEY
|
||||
:noindex:
|
||||
|
||||
This is the private key, the account was registered with (in JWK format)
|
||||
|
||||
.. data:: ACME_REGR
|
||||
:noindex:
|
||||
|
||||
This is the registration for the ACME account, the most important part is the uri attribute (in JSON)
|
||||
|
||||
.. data:: ACME_PREFERRED_ISSUER
|
||||
:noindex:
|
||||
|
||||
This is an optional parameter to indicate the preferred chain to retrieve from ACME when finalizing the order.
|
||||
This is applicable to Let's Encrypts recent `migration <https://letsencrypt.org/certificates/>`_ to their
|
||||
own root, where they provide two distinct certificate chains (fullchain_pem vs. alternative_fullchains_pem);
|
||||
the main chain will be the long chain that is rooted in the expiring DTS root, whereas the alternative chain
|
||||
is rooted in X1 root CA.
|
||||
Select "X1" to get the shorter chain (currently alternative), leave blank or "DST Root CA X3" for the longer chain.
|
||||
|
||||
|
||||
Active Directory Certificate Services Plugin
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
|
@ -704,10 +922,12 @@ Active Directory Certificate Services Plugin
|
|||
|
||||
.. data:: ADCS_START
|
||||
:noindex:
|
||||
|
||||
Used in ADCS-Sourceplugin. Minimum id of the first certificate to be returned. ID is increased by one until ADCS_STOP. Missing cert-IDs are ignored
|
||||
|
||||
.. data:: ADCS_STOP
|
||||
:noindex:
|
||||
|
||||
Used for ADCS-Sourceplugin. Maximum id of the certificates returned.
|
||||
|
||||
|
||||
|
@ -785,6 +1005,26 @@ The following parameters have to be set in the configuration files.
|
|||
|
||||
If there is a config variable ENTRUST_PRODUCT_<upper(authority.name)> take the value as cert product name else default to "STANDARD_SSL". Refer to the API documentation for valid products names.
|
||||
|
||||
|
||||
.. data:: ENTRUST_CROSS_SIGNED_RSA_L1K
|
||||
:noindex:
|
||||
|
||||
This is optional. Entrust provides support for cross-signed subCAS. One can set ENTRUST_CROSS_SIGNED_RSA_L1K to the respective cross-signed RSA-based subCA PEM and Lemur will replace the retrieved subCA with ENTRUST_CROSS_SIGNED_RSA_L1K.
|
||||
|
||||
|
||||
.. data:: ENTRUST_CROSS_SIGNED_ECC_L1F
|
||||
:noindex:
|
||||
|
||||
This is optional. Entrust provides support for cross-signed subCAS. One can set ENTRUST_CROSS_SIGNED_ECC_L1F to the respective cross-signed EC-based subCA PEM and Lemur will replace the retrieved subCA with ENTRUST_CROSS_SIGNED_ECC_L1F.
|
||||
|
||||
|
||||
.. data:: ENTRUST_USE_DEFAULT_CLIENT_ID
|
||||
:noindex:
|
||||
|
||||
If set to True, Entrust will use the primary client ID of 1, which applies to most use-case.
|
||||
Otherwise, Entrust will first lookup the clientId before ordering the certificate.
|
||||
|
||||
|
||||
Verisign Issuer Plugin
|
||||
~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
|
@ -1166,23 +1406,6 @@ The following configuration properties are required to use the PowerDNS ACME Plu
|
|||
|
||||
File/Dir path to CA Bundle: Verifies the TLS certificate was issued by a Certificate Authority in the provided CA bundle.
|
||||
|
||||
ACME Plugin
|
||||
~~~~~~~~~~~~
|
||||
|
||||
The following configration properties are optional for the ACME plugin to use. They allow reusing an existing ACME
|
||||
account. See :ref:`Using a pre-existing ACME account <AcmeAccountReuse>` for more details.
|
||||
|
||||
|
||||
.. data:: ACME_PRIVATE_KEY
|
||||
:noindex:
|
||||
|
||||
This is the private key, the account was registered with (in JWK format)
|
||||
|
||||
.. data:: ACME_REGR
|
||||
:noindex:
|
||||
|
||||
This is the registration for the ACME account, the most important part is the uri attribute (in JSON)
|
||||
|
||||
.. _CommandLineInterface:
|
||||
|
||||
Command Line Interface
|
||||
|
@ -1436,8 +1659,8 @@ Slack
|
|||
Adds support for slack notifications.
|
||||
|
||||
|
||||
AWS
|
||||
----
|
||||
AWS (Source)
|
||||
------------
|
||||
|
||||
:Authors:
|
||||
Kevin Glisson <kglisson@netflix.com>,
|
||||
|
@ -1449,8 +1672,8 @@ AWS
|
|||
Uses AWS IAM as a source of certificates to manage. Supports a multi-account deployment.
|
||||
|
||||
|
||||
AWS
|
||||
----
|
||||
AWS (Destination)
|
||||
-----------------
|
||||
|
||||
:Authors:
|
||||
Kevin Glisson <kglisson@netflix.com>,
|
||||
|
@ -1462,6 +1685,19 @@ AWS
|
|||
Uses AWS IAM as a destination for Lemur generated certificates. Support a multi-account deployment.
|
||||
|
||||
|
||||
AWS (SNS Notification)
|
||||
----------------------
|
||||
|
||||
:Authors:
|
||||
Jasmine Schladen <jschladen@netflix.com>
|
||||
:Type:
|
||||
Notification
|
||||
:Description:
|
||||
Adds support for SNS notifications. SNS notifications (like other notification plugins) are currently only supported
|
||||
for certificate expiration. Configuration requires a region, account number, and SNS topic name; these elements
|
||||
are then combined to build the topic ARN. Lemur must have access to publish messages to the specified SNS topic.
|
||||
|
||||
|
||||
Kubernetes
|
||||
----------
|
||||
|
||||
|
|
|
@ -32,6 +32,9 @@ if on_rtd:
|
|||
MOCK_MODULES = ["ldap"]
|
||||
sys.modules.update((mod_name, Mock()) for mod_name in MOCK_MODULES)
|
||||
|
||||
autodoc_mock_imports = ["python-ldap", "acme", "certsrv", "dnspython3", "dyn", "factory-boy", "flask_replicated",
|
||||
"josepy", "logmatic", "pem"]
|
||||
|
||||
# -- General configuration ------------------------------------------------
|
||||
|
||||
# If your documentation needs a minimal Sphinx version, state it here.
|
||||
|
@ -146,7 +149,7 @@ if not on_rtd: # only import and set the theme if we're building docs locally
|
|||
# Add any paths that contain custom static files (such as style sheets) here,
|
||||
# relative to this directory. They are copied after the builtin static files,
|
||||
# so a file named "default.css" will overwrite the builtin "default.css".
|
||||
html_static_path = ["_static"]
|
||||
# html_static_path = ["_static"]
|
||||
|
||||
# Add any extra paths that contain custom files (such as robots.txt or
|
||||
# .htaccess) here, relative to this directory. These files are copied
|
||||
|
|
|
@ -43,18 +43,35 @@ Building Documentation
|
|||
Inside the ``docs`` directory, you can run ``make`` to build the documentation.
|
||||
See ``make help`` for available options and the `Sphinx Documentation <http://sphinx-doc.org/contents.html>`_ for more information.
|
||||
|
||||
Adding New Modules to Documentation
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
When a new module is added, it will need to be added to the documentation.
|
||||
Ideally, we might rely on `sphinx-apidoc <https://www.sphinx-doc.org/en/master/man/sphinx-apidoc.html>`_ to autogenerate our documentation.
|
||||
Unfortunately, this causes some build problems.
|
||||
Instead, you'll need to add new modules by hand.
|
||||
|
||||
Developing Against HEAD
|
||||
-----------------------
|
||||
|
||||
We try to make it easy to get up and running in a development environment using a git checkout
|
||||
of Lemur. You'll want to make sure you have a few things on your local system first:
|
||||
of Lemur. There are two ways to run Lemur locally: directly on your development machine, or
|
||||
in a Docker container.
|
||||
|
||||
**Running in a Docker container**
|
||||
|
||||
Look at the `lemur-docker <https://github.com/Netflix/lemur-docker>`_ project.
|
||||
Usage instructions are self-contained in the README for that project.
|
||||
|
||||
**Running directly on your development machine**
|
||||
|
||||
You'll want to make sure you have a few things on your local system first:
|
||||
|
||||
* python-dev (if you're on OS X, you already have this)
|
||||
* pip
|
||||
* virtualenv (ideally virtualenvwrapper)
|
||||
* node.js (for npm and building css/javascript)
|
||||
+* `PostgreSQL <https://lemur.readthedocs.io/en/latest/quickstart/index.html#setup-postgres>`_
|
||||
* `PostgreSQL <https://lemur.readthedocs.io/en/latest/quickstart/index.html#setup-postgres>`_
|
||||
|
||||
Once you've got all that, the rest is simple:
|
||||
|
||||
|
@ -99,7 +116,9 @@ You'll likely want to make some changes to the default configuration (we recomme
|
|||
Running tests with Docker and docker-compose
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
Alternatively you can use Docker and docker-compose for running the tests with ``docker-compose run test``.
|
||||
If you just want to run tests in a Docker container, you can use Docker and docker-compose for running the tests with ``docker-compose run test`` directly in the ``lemur`` project.
|
||||
|
||||
(For running the Lemur service in Docker, see `lemur-docker <https://github.com/Netflix/lemur-docker>`_.)
|
||||
|
||||
|
||||
Coding Standards
|
||||
|
@ -152,7 +171,7 @@ You'll notice that the test suite is structured based on where the code lives, a
|
|||
Static Media
|
||||
------------
|
||||
|
||||
Lemur uses a library that compiles it's static media assets (LESS and JS files) automatically. If you're developing using
|
||||
Lemur uses a library that compiles its static media assets (LESS and JS files) automatically. If you're developing using
|
||||
runserver you'll see changes happen not only in the original files, but also the minified or processed versions of the file.
|
||||
|
||||
If you've made changes and need to compile them by hand for any reason, you can do so by running:
|
||||
|
|
|
@ -0,0 +1,29 @@
|
|||
defaults Package
|
||||
================
|
||||
|
||||
:mod:`defaults` Module
|
||||
----------------------------------------
|
||||
|
||||
.. automodule:: lemur.defaults
|
||||
:noindex:
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
:mod:`schemas` Module
|
||||
-----------------------------
|
||||
|
||||
.. automodule:: lemur.defaults.schemas
|
||||
:noindex:
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
:mod:`views` Module
|
||||
---------------------------
|
||||
|
||||
.. automodule:: lemur.defaults.views
|
||||
:noindex:
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
|
@ -0,0 +1,20 @@
|
|||
deployment Package
|
||||
===================
|
||||
|
||||
:mod:`deployment` Module
|
||||
----------------------------------------
|
||||
|
||||
.. automodule:: lemur.deployment
|
||||
:noindex:
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
:mod:`service` Module
|
||||
------------------------------
|
||||
|
||||
.. automodule:: lemur.deployment.service
|
||||
:noindex:
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
|
@ -0,0 +1,56 @@
|
|||
endpoints Package
|
||||
===================
|
||||
|
||||
:mod:`endpoints` Module
|
||||
----------------------------------------
|
||||
|
||||
.. automodule:: lemur.endpoints
|
||||
:noindex:
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
:mod:`cli` Module
|
||||
--------------------------
|
||||
|
||||
.. automodule:: lemur.endpoints.cli
|
||||
:noindex:
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
:mod:`models` Module
|
||||
-----------------------------
|
||||
|
||||
.. automodule:: lemur.endpoints.models
|
||||
:noindex:
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
:mod:`schemas` Module
|
||||
------------------------------
|
||||
|
||||
.. automodule:: lemur.endpoints.schemas
|
||||
:noindex:
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
:mod:`service` Module
|
||||
------------------------------
|
||||
|
||||
.. automodule:: lemur.endpoints.service
|
||||
:noindex:
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
:mod:`views` Module
|
||||
----------------------------
|
||||
|
||||
.. automodule:: lemur.endpoints.views
|
||||
:noindex:
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
|
@ -0,0 +1,47 @@
|
|||
logs Package
|
||||
===================
|
||||
|
||||
:mod:`logs` Module
|
||||
--------------------
|
||||
|
||||
.. automodule:: lemur.logs
|
||||
:noindex:
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
:mod:`models` Module
|
||||
------------------------------
|
||||
|
||||
.. automodule:: lemur.logs.models
|
||||
:noindex:
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
:mod:`schemas` Module
|
||||
------------------------------
|
||||
|
||||
.. automodule:: lemur.logs.schemas
|
||||
:noindex:
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
:mod:`service` Module
|
||||
------------------------------
|
||||
|
||||
.. automodule:: lemur.logs.service
|
||||
:noindex:
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
:mod:`views` Module
|
||||
------------------------------
|
||||
|
||||
.. automodule:: lemur.logs.views
|
||||
:noindex:
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
|
@ -0,0 +1,83 @@
|
|||
lemur_acme package
|
||||
=================================
|
||||
|
||||
:mod:`lemur_acme` Module
|
||||
----------------------------------------
|
||||
|
||||
.. automodule:: lemur.plugins.lemur_acme
|
||||
:noindex:
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
:mod:`acme_handlers` Module
|
||||
-----------------------------------------------
|
||||
|
||||
.. automodule:: lemur.plugins.lemur_acme.acme_handlers
|
||||
:noindex:
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
:mod:`challenge_types` Module
|
||||
-------------------------------------------------
|
||||
|
||||
.. automodule:: lemur.plugins.lemur_acme.challenge_types
|
||||
:noindex:
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
:mod:`cloudflare` Module
|
||||
-------------------------------------------
|
||||
|
||||
.. automodule:: lemur.plugins.lemur_acme.cloudflare
|
||||
:noindex:
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
:mod:`dyn` Module
|
||||
------------------------------------
|
||||
|
||||
.. automodule:: lemur.plugins.lemur_acme.dyn
|
||||
:noindex:
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
:mod:`plugin` Module
|
||||
---------------------------------------
|
||||
|
||||
.. automodule:: lemur.plugins.lemur_acme.plugin
|
||||
:noindex:
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
:mod:`powerdns` Module
|
||||
-----------------------------------------
|
||||
|
||||
.. automodule:: lemur.plugins.lemur_acme.powerdns
|
||||
:noindex:
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
:mod:`route53` Module
|
||||
----------------------------------------
|
||||
|
||||
.. automodule:: lemur.plugins.lemur_acme.route53
|
||||
:noindex:
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
:mod:`ultradns` Module
|
||||
-----------------------------------------
|
||||
|
||||
.. automodule:: lemur.plugins.lemur_acme.ultradns
|
||||
:noindex:
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
|
@ -0,0 +1,20 @@
|
|||
lemur_atlas package
|
||||
==================================
|
||||
|
||||
:mod:`lemur_atlas` Module
|
||||
----------------------------------------
|
||||
|
||||
.. automodule:: lemur.plugins.lemur_atlas
|
||||
:noindex:
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
:mod:`plugin` Module
|
||||
--------------------
|
||||
|
||||
.. automodule:: lemur.plugins.lemur_atlas.plugin
|
||||
:noindex:
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
|
@ -0,0 +1,20 @@
|
|||
lemur_cryptography package
|
||||
==================================
|
||||
|
||||
:mod:`lemur_cryptography` Module
|
||||
----------------------------------------
|
||||
|
||||
.. automodule:: lemur.plugins.lemur_cryptography
|
||||
:noindex:
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
:mod:`plugin` Module
|
||||
--------------------
|
||||
|
||||
.. automodule:: lemur.plugins.lemur_cryptography.plugin
|
||||
:noindex:
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
|
@ -0,0 +1,20 @@
|
|||
lemur_digicert package
|
||||
==================================
|
||||
|
||||
:mod:`lemur_digicert` Module
|
||||
----------------------------------------
|
||||
|
||||
.. automodule:: lemur.plugins.lemur_digicert
|
||||
:noindex:
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
:mod:`plugin` Module
|
||||
--------------------
|
||||
|
||||
.. automodule:: lemur.plugins.lemur_digicert.plugin
|
||||
:noindex:
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
|
@ -0,0 +1,20 @@
|
|||
lemur_jks package
|
||||
==================================
|
||||
|
||||
:mod:`lemur_jks` Module
|
||||
----------------------------------------
|
||||
|
||||
.. automodule:: lemur.plugins.lemur_jks
|
||||
:noindex:
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
:mod:`plugin` Module
|
||||
--------------------
|
||||
|
||||
.. automodule:: lemur.plugins.lemur_jks.plugin
|
||||
:noindex:
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
|
@ -0,0 +1,20 @@
|
|||
lemur_kubernetes package
|
||||
==================================
|
||||
|
||||
:mod:`lemur_kubernetes` Module
|
||||
----------------------------------------
|
||||
|
||||
.. automodule:: lemur.plugins.lemur_kubernetes
|
||||
:noindex:
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
:mod:`plugin` Module
|
||||
--------------------
|
||||
|
||||
.. automodule:: lemur.plugins.lemur_kubernetes.plugin
|
||||
:noindex:
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
|
@ -0,0 +1,20 @@
|
|||
lemur_openssl package
|
||||
==================================
|
||||
|
||||
:mod:`lemur_openssl` Module
|
||||
----------------------------------------
|
||||
|
||||
.. automodule:: lemur.plugins.lemur_openssl
|
||||
:noindex:
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
:mod:`plugin` Module
|
||||
--------------------
|
||||
|
||||
.. automodule:: lemur.plugins.lemur_openssl.plugin
|
||||
:noindex:
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
|
@ -0,0 +1,20 @@
|
|||
lemur_slack package
|
||||
==================================
|
||||
|
||||
:mod:`lemur_slack` Module
|
||||
----------------------------------------
|
||||
|
||||
.. automodule:: lemur.plugins.lemur_slack
|
||||
:noindex:
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
:mod:`plugin` Module
|
||||
--------------------
|
||||
|
||||
.. automodule:: lemur.plugins.lemur_slack.plugin
|
||||
:noindex:
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
|
@ -0,0 +1,38 @@
|
|||
reporting Package
|
||||
===================
|
||||
|
||||
:mod:`reporting` Module
|
||||
----------------------------------------
|
||||
|
||||
.. automodule:: lemur.reporting
|
||||
:noindex:
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
:mod:`cli` Module
|
||||
------------------------------
|
||||
|
||||
.. automodule:: lemur.reporting.cli
|
||||
:noindex:
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
:mod:`service` Module
|
||||
------------------------------
|
||||
|
||||
.. automodule:: lemur.reporting.service
|
||||
:noindex:
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
:mod:`views` Module
|
||||
------------------------------
|
||||
|
||||
.. automodule:: lemur.reporting.views
|
||||
:noindex:
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
|
@ -28,15 +28,6 @@ lemur Package
|
|||
:undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
:mod:`decorators` Module
|
||||
------------------------
|
||||
|
||||
.. automodule:: lemur.decorators
|
||||
:noindex:
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
:mod:`exceptions` Module
|
||||
------------------------
|
||||
|
||||
|
@ -108,7 +99,7 @@ Subpackages
|
|||
lemur.plugins.lemur_atlas
|
||||
lemur.plugins.lemur_cryptography
|
||||
lemur.plugins.lemur_digicert
|
||||
lemur.plugins.lemur_java
|
||||
lemur.plugins.lemur_jks
|
||||
lemur.plugins.lemur_kubernetes
|
||||
lemur.plugins.lemur_openssl
|
||||
lemur.plugins.lemur_slack
|
||||
|
|
|
@ -0,0 +1,56 @@
|
|||
sources Package
|
||||
===================
|
||||
|
||||
:mod:`sources` Module
|
||||
----------------------
|
||||
|
||||
.. automodule:: lemur.sources
|
||||
:noindex:
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
:mod:`cli` Module
|
||||
------------------------------
|
||||
|
||||
.. automodule:: lemur.sources.cli
|
||||
:noindex:
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
:mod:`models` Module
|
||||
------------------------------
|
||||
|
||||
.. automodule:: lemur.sources.models
|
||||
:noindex:
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
:mod:`schemas` Module
|
||||
------------------------------
|
||||
|
||||
.. automodule:: lemur.sources.schemas
|
||||
:noindex:
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
:mod:`service` Module
|
||||
------------------------------
|
||||
|
||||
.. automodule:: lemur.sources.service
|
||||
:noindex:
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
:mod:`views` Module
|
||||
------------------------------
|
||||
|
||||
.. automodule:: lemur.sources.views
|
||||
:noindex:
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
|
@ -0,0 +1,11 @@
|
|||
tests Package
|
||||
=============
|
||||
|
||||
:mod:`tests` Module
|
||||
--------------------
|
||||
|
||||
.. automodule:: lemur.tests
|
||||
:noindex:
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
|
@ -104,7 +104,7 @@ The `IssuerPlugin` exposes four functions functions::
|
|||
|
||||
def create_certificate(self, csr, issuer_options):
|
||||
# requests.get('a third party')
|
||||
def revoke_certificate(self, certificate, comments):
|
||||
def revoke_certificate(self, certificate, reason):
|
||||
# requests.put('a third party')
|
||||
def get_ordered_certificate(self, order_id):
|
||||
# requests.get('already existing certificate')
|
||||
|
@ -145,8 +145,7 @@ The `IssuerPlugin` doesn't have any options like Destination, Source, and Notifi
|
|||
any fields you might need to submit a request to a third party. If there are additional options you need
|
||||
in your plugin feel free to open an issue, or look into adding additional options to issuers yourself.
|
||||
|
||||
Asynchronous Certificates
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
**Asynchronous Certificates**
|
||||
An issuer may take some time to actually issue a certificate for an order. In this case, a `PendingCertificate` is returned, which holds information to recreate a `Certificate` object at a later time. Then, `get_ordered_certificate()` should be run periodically via `python manage.py pending_certs fetch -i all` to attempt to retrieve an ordered certificate::
|
||||
|
||||
def get_ordered_ceriticate(self, order_id):
|
||||
|
@ -154,9 +153,10 @@ An issuer may take some time to actually issue a certificate for an order. In t
|
|||
# retrieve an order, and check if there is an issued certificate attached to it
|
||||
|
||||
`cancel_ordered_certificate()` should be implemented to allow an ordered certificate to be canceled before it is issued::
|
||||
def cancel_ordered_certificate(self, pending_cert, **kwargs):
|
||||
# pending_cert should contain the necessary information to match an order
|
||||
# kwargs can be given to provide information to the issuer for canceling
|
||||
|
||||
def cancel_ordered_certificate(self, pending_cert, **kwargs):
|
||||
# pending_cert should contain the necessary information to match an order
|
||||
# kwargs can be given to provide information to the issuer for canceling
|
||||
|
||||
Destination
|
||||
-----------
|
||||
|
@ -215,18 +215,22 @@ Notification
|
|||
------------
|
||||
|
||||
Lemur includes the ability to create Email notifications by **default**. These notifications
|
||||
currently come in the form of expiration notices. Lemur periodically checks certifications expiration dates and
|
||||
currently come in the form of expiration and rotation notices for all certificates, expiration notices for CA certificates,
|
||||
and ACME certificate creation failure notices. Lemur periodically checks certificate expiration dates and
|
||||
determines if a given certificate is eligible for notification. There are currently only two parameters used to
|
||||
determine if a certificate is eligible; validity expiration (date the certificate is no longer valid) and the number
|
||||
of days the current date (UTC) is from that expiration date.
|
||||
|
||||
There are currently two objects that available for notification plugins the first is `NotficationPlugin`. This is the base object for
|
||||
any notification within Lemur. Currently the only support notification type is an certificate expiration notification. If you
|
||||
Certificate expiration notifications can also be configured for Slack or AWS SNS. Other notifications are not configurable.
|
||||
Notifications sent to a certificate owner and security team (`LEMUR_SECURITY_TEAM_EMAIL`) can currently only be sent via email.
|
||||
|
||||
There are currently two objects that are available for notification plugins. The first is `NotificationPlugin`, which is the base object for
|
||||
any notification within Lemur. Currently the only supported notification type is a certificate expiration notification. If you
|
||||
are trying to create a new notification type (audit, failed logins, etc.) this would be the object to base your plugin on.
|
||||
You would also then need to build additional code to trigger the new notification type.
|
||||
|
||||
The second is `ExpirationNotificationPlugin`, this object inherits from `NotificationPlugin` object.
|
||||
You will most likely want to base your plugin on, if you want to add new channels for expiration notices (Slack, HipChat, Jira, etc.). It adds default options that are required by
|
||||
The second is `ExpirationNotificationPlugin`, which inherits from the `NotificationPlugin` object.
|
||||
You will most likely want to base your plugin on this object if you want to add new channels for expiration notices (HipChat, Jira, etc.). It adds default options that are required by
|
||||
all expiration notifications (interval, unit). This interface expects for the child to define the following function::
|
||||
|
||||
def send(self, notification_type, message, targets, options, **kwargs):
|
||||
|
@ -281,6 +285,17 @@ The `ExportPlugin` object requires the implementation of one function::
|
|||
Support of various formats sometimes relies on external tools system calls. Always be mindful of sanitizing any input to these calls.
|
||||
|
||||
|
||||
Custom TLS Provider
|
||||
-------------------
|
||||
|
||||
Managing TLS at the enterprise scale could be hard and often organizations offer custom wrapper implementations. It could
|
||||
be ideal to use those while making calls to internal services. The `TLSPlugin` would help to achieve this. It requires the
|
||||
implementation of one function which creates a TLS session::
|
||||
|
||||
def session(self, server_application):
|
||||
# return active session
|
||||
|
||||
|
||||
Testing
|
||||
=======
|
||||
|
||||
|
|
|
@ -1,32 +1,65 @@
|
|||
Doing a release
|
||||
===============
|
||||
|
||||
Doing a release of ``lemur`` requires a few steps.
|
||||
Doing a release of ``lemur`` is now mostly automated and consists of the following steps:
|
||||
|
||||
Bumping the version number
|
||||
--------------------------
|
||||
* Raise a PR to add the release date and summary in the :doc:`/changelog`.
|
||||
* Merge above PR and create a new `Github release <https://github.com/Netflix/lemur/releaes>`_: set the tag starting with v, e.g., v0.9.0
|
||||
|
||||
The `publish workflow <https://github.com/Netflix/lemur/actions/workflows/lemur-publish-release-pypi.yml>`_ uses the git
|
||||
tag to set the release version.
|
||||
|
||||
The following describes the manual release steps, which is now obsolete:
|
||||
|
||||
Manually Bumping the version number
|
||||
-----------------------------------
|
||||
|
||||
The next step in doing a release is bumping the version number in the
|
||||
software.
|
||||
|
||||
* Update the version number in ``lemur/__about__.py``.
|
||||
* Set the release date in the :doc:`/changelog`.
|
||||
* Do a commit indicating this.
|
||||
* Send a pull request with this.
|
||||
* Do a commit indicating this, and raise a pull request with this.
|
||||
* Wait for it to be merged.
|
||||
|
||||
Performing the release
|
||||
----------------------
|
||||
Manually Performing the release
|
||||
-------------------------------
|
||||
|
||||
The commit that merged the version number bump is now the official release
|
||||
commit for this release. You will need to have ``gpg`` installed and a ``gpg``
|
||||
key in order to do a release. Once this has happened:
|
||||
commit for this release. You need an `API key <https://pypi.org/manage/account/#api-tokens>`_,
|
||||
which requires permissions to maintain the Lemur `project <https://pypi.org/project/lemur/>`_.
|
||||
|
||||
* Run ``invoke release {version}``.
|
||||
For creating the release, follow these steps (more details `here <https://packaging.python.org/tutorials/packaging-projects/#generating-distribution-archives>`_)
|
||||
|
||||
The release should now be available on PyPI and a tag should be available in
|
||||
* Make sure you have the latest versions of setuptools and wheel installed:
|
||||
|
||||
``python3 -m pip install --user --upgrade setuptools wheel``
|
||||
|
||||
* Now run this command from the same directory where setup.py is located:
|
||||
|
||||
``python3 setup.py sdist bdist_wheel``
|
||||
|
||||
* Once completed it should generate two files in the dist directory:
|
||||
|
||||
.. code-block:: pycon
|
||||
|
||||
$ ls dist/
|
||||
lemur-0.8.0-py2.py3-none-any.whl lemur-0.8.0.tar.gz
|
||||
|
||||
|
||||
* In this step, the distribution will be uploaded. You’ll need to install Twine:
|
||||
|
||||
``python3 -m pip install --user --upgrade twine``
|
||||
|
||||
* Once installed, run Twine to upload all of the archives under dist. Once installed, run Twine to upload all of the archives under dist:
|
||||
|
||||
``python3 -m twine upload --repository pypi dist/*``
|
||||
|
||||
The release should now be available on `PyPI Lemur <https://pypi.org/project/lemur/>`_ and a tag should be available in
|
||||
the repository.
|
||||
|
||||
Make sure to also make a github `release <https://github.com/Netflix/lemur/releases>`_ which will pick up the latest version.
|
||||
|
||||
Verifying the release
|
||||
---------------------
|
||||
|
||||
|
|
Before Width: | Height: | Size: 125 KiB After Width: | Height: | Size: 112 KiB |
Before Width: | Height: | Size: 44 KiB After Width: | Height: | Size: 133 KiB |
Before Width: | Height: | Size: 57 KiB After Width: | Height: | Size: 48 KiB |
Before Width: | Height: | Size: 56 KiB After Width: | Height: | Size: 74 KiB |
|
@ -37,18 +37,20 @@ Create a New Certificate
|
|||
|
||||
.. figure:: create_certificate.png
|
||||
|
||||
Enter an owner, short description and the authority you wish to issue this certificate.
|
||||
Enter a common name into the certificate, if no validity range is selected two years is
|
||||
the default.
|
||||
Enter an owner, common name, short description and certificate authority you wish to issue this certificate.
|
||||
Depending upon the selected CA, the UI displays default validity of the certificate. You can select different
|
||||
validity by entering a custom date, if supported by the CA.
|
||||
|
||||
You can also add `Subject Alternate Names` or SAN for certificates that need to include more than one domains,
|
||||
The first domain is the Common Name and all other domains are added here as DNSName entries.
|
||||
|
||||
You can add notification options and upload the created certificate to a destination, both
|
||||
of these are editable features and can be changed after the certificate has been created.
|
||||
|
||||
.. figure:: certificate_extensions.png
|
||||
|
||||
These options are typically for advanced users, the one exception is the `Subject Alternate Names` or SAN.
|
||||
For certificates that need to include more than one domains, the first domain is the Common Name and all
|
||||
other domains are added here as DNSName entries.
|
||||
These options are typically for advanced users. Lemur creates ECC based certificate (ECCPRIME256V1 in particular)
|
||||
by default. One can change the key type using the dropdown option listed here.
|
||||
|
||||
|
||||
Import an Existing Certificate
|
||||
|
@ -58,11 +60,12 @@ Import an Existing Certificate
|
|||
|
||||
Enter an owner, short description and public certificate. If there are intermediates and private keys
|
||||
Lemur will track them just as it does if the certificate were created through Lemur. Lemur generates
|
||||
a certificate name but you can override that by passing a value to the `Custom Name` field.
|
||||
a certificate name but you can override that by passing a value to the `Custom Certificate Name` field.
|
||||
|
||||
You can add notification options and upload the created certificate to a destination, both
|
||||
of these are editable features and can be changed after the certificate has been created.
|
||||
|
||||
.. _CreateANewUser:
|
||||
|
||||
Create a New User
|
||||
~~~~~~~~~~~~~~~~~
|
||||
|
|
Before Width: | Height: | Size: 73 KiB After Width: | Height: | Size: 83 KiB |
|
@ -17,4 +17,5 @@ A list of additional contributors can be seen on `GitHub <https://github.com/net
|
|||
Lemur License
|
||||
-------------
|
||||
|
||||
.. include:: ../../LICENSE
|
||||
.. include:: ../../LICENSE
|
||||
:literal:
|
|
@ -323,9 +323,9 @@ Periodic Tasks
|
|||
Lemur contains a few tasks that are run and scheduled basis, currently the recommend way to run these tasks is to create
|
||||
celery tasks or cron jobs that run these commands.
|
||||
|
||||
There are currently three commands that could/should be run on a periodic basis:
|
||||
The following commands that could/should be run on a periodic basis:
|
||||
|
||||
- `notify`
|
||||
- `notify expirations` `notify authority_expirations`, and `notify security_expiration_summary` (see :ref:`NotificationOptions` for configuration info)
|
||||
- `check_revoked`
|
||||
- `sync`
|
||||
|
||||
|
@ -334,13 +334,16 @@ If you are using LetsEncrypt, you must also run the following:
|
|||
- `fetch_all_pending_acme_certs`
|
||||
- `remove_old_acme_certs`
|
||||
|
||||
How often you run these commands is largely up to the user. `notify` and `check_revoked` are typically run at least once a day.
|
||||
How often you run these commands is largely up to the user. `notify` should be run once a day (more often will result in
|
||||
duplicate notifications). `check_revoked` is typically run at least once a day.
|
||||
`sync` is typically run every 15 minutes. `fetch_all_pending_acme_certs` should be ran frequently (Every minute is fine).
|
||||
`remove_old_acme_certs` can be ran more rarely, such as once every week.
|
||||
|
||||
Example cron entries::
|
||||
|
||||
0 22 * * * lemuruser export LEMUR_CONF=/Users/me/.lemur/lemur.conf.py; /www/lemur/bin/lemur notify expirations
|
||||
0 22 * * * lemuruser export LEMUR_CONF=/Users/me/.lemur/lemur.conf.py; /www/lemur/bin/lemur notify authority_expirations
|
||||
0 22 * * * lemuruser export LEMUR_CONF=/Users/me/.lemur/lemur.conf.py; /www/lemur/bin/lemur notify security_expiration_summary
|
||||
*/15 * * * * lemuruser export LEMUR_CONF=/Users/me/.lemur/lemur.conf.py; /www/lemur/bin/lemur source sync -s all
|
||||
0 22 * * * lemuruser export LEMUR_CONF=/Users/me/.lemur/lemur.conf.py; /www/lemur/bin/lemur certificate check_revoked
|
||||
|
||||
|
@ -382,6 +385,27 @@ Example Celery configuration (To be placed in your configuration file)::
|
|||
'expires': 180
|
||||
},
|
||||
'schedule': crontab(hour="*"),
|
||||
},
|
||||
'notify_expirations': {
|
||||
'task': 'lemur.common.celery.notify_expirations',
|
||||
'options': {
|
||||
'expires': 180
|
||||
},
|
||||
'schedule': crontab(hour=22, minute=0),
|
||||
},
|
||||
'notify_authority_expirations': {
|
||||
'task': 'lemur.common.celery.notify_authority_expirations',
|
||||
'options': {
|
||||
'expires': 180
|
||||
},
|
||||
'schedule': crontab(hour=22, minute=0),
|
||||
},
|
||||
'send_security_expiration_summary': {
|
||||
'task': 'lemur.common.celery.send_security_expiration_summary',
|
||||
'options': {
|
||||
'expires': 180
|
||||
},
|
||||
'schedule': crontab(hour=22, minute=0),
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -415,8 +439,8 @@ And the worker can be started with desired options such as the following::
|
|||
|
||||
supervisor or systemd configurations should be created for these in production environments as appropriate.
|
||||
|
||||
Add support for LetsEncrypt
|
||||
===========================
|
||||
Add support for LetsEncrypt/ACME
|
||||
================================
|
||||
|
||||
LetsEncrypt is a free, limited-feature certificate authority that offers publicly trusted certificates that are valid
|
||||
for 90 days. LetsEncrypt does not use organizational validation (OV), and instead relies on domain validation (DV).
|
||||
|
@ -424,7 +448,10 @@ LetsEncrypt requires that we prove ownership of a domain before we're able to is
|
|||
time we want a certificate.
|
||||
|
||||
The most common methods to prove ownership are HTTP validation and DNS validation. Lemur supports DNS validation
|
||||
through the creation of DNS TXT records.
|
||||
through the creation of DNS TXT records as well as HTTP validation, reusing the destination concept.
|
||||
|
||||
ACME DNS Challenge
|
||||
------------------
|
||||
|
||||
In a nutshell, when we send a certificate request to LetsEncrypt, they generate a random token and ask us to put that
|
||||
token in a DNS text record to prove ownership of a domain. If a certificate request has multiple domains, we must
|
||||
|
@ -462,6 +489,24 @@ possible. To enable this functionality, periodically (or through Cron/Celery) ru
|
|||
This command will traverse all DNS providers, determine which zones they control, and upload this list of zones to
|
||||
Lemur's database (in the dns_providers table). Alternatively, you can manually input this data.
|
||||
|
||||
ACME HTTP Challenge
|
||||
-------------------
|
||||
|
||||
The flow for requesting a certificate using the HTTP challenge is not that different from the one described for the DNS
|
||||
challenge. The only difference is, that instead of creating a DNS TXT record, a file is uploaded to a Webserver which
|
||||
serves the file at `http://<domain>/.well-known/acme-challenge/<token>`
|
||||
|
||||
Currently the HTTP challenge also works without Celery, since it's done while creating the certificate, and doesn't
|
||||
rely on celery to create the DNS record. This will change when we implement mix & match of acme challenge types.
|
||||
|
||||
To create a HTTP compatible Authority, you first need to create a new destination that will be used to deploy the
|
||||
challenge token. Visit `Admin` -> `Destination` and click `Create`. The path you provide for the destination needs to
|
||||
be the exact path that is called when the ACME providers calls `http://<domain>/.well-known/acme-challenge/`. The
|
||||
token part will be added dynamically by the acme_upload.
|
||||
Currently only the SFTP and S3 Bucket destination support the ACME HTTP challenge.
|
||||
|
||||
Afterwards you can create a new certificate authority as described in the DNS challenge, but need to choose
|
||||
`Acme HTTP-01` as the plugin type, and then the destination you created beforehand.
|
||||
|
||||
LetsEncrypt: pinning to cross-signed ICA
|
||||
----------------------------------------
|
||||
|
@ -554,4 +599,119 @@ Using `python-jwt` converting an existing private key in PEM format is quite eas
|
|||
|
||||
{"body": {}, "uri": "https://acme-staging-v02.api.letsencrypt.org/acme/acct/<ACCOUNT_NUMBER>"}
|
||||
|
||||
The URI can be retrieved from the ACME create account endpoint when creating a new account, using the existing key.
|
||||
The URI can be retrieved from the ACME create account endpoint when creating a new account, using the existing key.
|
||||
|
||||
OpenSSH
|
||||
=======
|
||||
|
||||
OpenSSH (also known as OpenBSD Secure Shell) is a suite of secure networking utilities based on the Secure Shell (SSH) protocol, which provides a secure channel over an unsecured network in a client–server architecture.
|
||||
|
||||
Using a PKI with OpenSSH means you can sign a key for a user and it can log into any server that trust the CA.
|
||||
|
||||
Using a CA avoids TOFU or synchronize a list of server public keys to `known_hosts` files.
|
||||
|
||||
This is useful when you're managing large number of machines or for an immutable infrastructure.
|
||||
|
||||
Add first OpenSSH authority
|
||||
---------------------------
|
||||
|
||||
To start issuing OpenSSH, you need to create an OpenSSH authority. To do this, visit
|
||||
Authorities -> Create. Set the applicable attributes:
|
||||
|
||||
- Name : OpenSSH
|
||||
- Common Name: example.net
|
||||
|
||||
Then click "More Options" and change the plugin value to "OpenSSH".
|
||||
|
||||
Just click to "Create" button to add this authority.
|
||||
|
||||
.. note:: OpenSSH do not support sub CA feature.
|
||||
|
||||
Add a server certificate
|
||||
-------------------------
|
||||
|
||||
Now visit Certificates -> Create to add a server certificate. Set the applicable attributes:
|
||||
|
||||
- Common Name: server.example.net
|
||||
|
||||
Then click "More Options" and set the Certificate Template to "Server Certificate".
|
||||
|
||||
This step is important, a certificat for a server and for a client is not exactly the same thing.
|
||||
In this case "Common Name" and all Subject Alternate Names with type DNSName will be added in the certificate.
|
||||
|
||||
Finally click on "Create" button.
|
||||
|
||||
Add a client certificate
|
||||
------------------------
|
||||
|
||||
Now visit Certificates -> Create to add a client certificate. Set the applicable attributes:
|
||||
|
||||
- Common Name: example.net
|
||||
|
||||
Then click "More Options" and set the Certificate Template to "Client Certificate".
|
||||
|
||||
In this case the name of the creator is used as principal (in this documentation we assume that this certificate is created by the user "lemur").
|
||||
|
||||
Finally click on "Create" button.
|
||||
|
||||
Configure OpenSSH server
|
||||
------------------------
|
||||
|
||||
Connect to the server.example.net server to correctly configure the OpenSSH server with the CA created previously.
|
||||
|
||||
First of all add the CA chain, private and public certificates:
|
||||
|
||||
- Create file `/etc/ssh/ca.pub` and copy the "CHAIN" content of the *server certificate* (everything in one line).
|
||||
- Create file `/etc/ssh/ssh_host_key` and copy "PRIVATE KEY" content.
|
||||
- Create file `/etc/ssh/ssh_host_key.pub` and copy "PUBLIC CERTIFICATE" content (everything in one line).
|
||||
|
||||
Set the appropriate right:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
chmod 600 /etc/ssh/ca.pub /etc/ssh/ssh_host_key
|
||||
chmod 644 /etc/ssh/ssh_host_key.pub
|
||||
chown root: /etc/ssh/ca.pub /etc/ssh/ssh_host_key /etc/ssh/ssh_host_key.pub
|
||||
|
||||
Then change OpenSSH server configuration to use these files. Edit `/etc/ssh/sshd_config` and add::
|
||||
|
||||
TrustedUserCAKeys /etc/ssh/ca.pub
|
||||
HostKey /etc/ssh/ssh_host_key
|
||||
HostCertificate /etc/ssh/ssh_host_key.pub
|
||||
|
||||
You can remove all other `HostKey` lines.
|
||||
|
||||
Finally restart OpenSSH.
|
||||
|
||||
Configure the OpenSSH client
|
||||
----------------------------
|
||||
|
||||
Now you can configure the user's computer.
|
||||
|
||||
First of all add private and public certificates:
|
||||
|
||||
- Create file `~/.ssh/key` and copy "PRIVATE KEY" content.
|
||||
- Create file `~/.ssh/key.pub` and copy "PUBLIC CERTIFICATE" content of the *client certicate* (everything in one line).
|
||||
|
||||
Set the appropriate right:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
chmod 600 ~/.ssh/key.pub ~/.ssh/key
|
||||
|
||||
To avoid TOFU, edite the `~/.ssh/known_hosts` file and add a new line (all in one line):
|
||||
|
||||
- @cert-authority \*example.net
|
||||
- the "CHAIN" content
|
||||
|
||||
Now you can connect to server with (here 'lemur' is the principal name and must exists on the server):
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
ssh lemur@server.example.net -i ~/.ssh/key
|
||||
|
||||
With this configuration you don't have any line like::
|
||||
|
||||
Warning: Permanently added 'server.example.net,192.168.0.1' (RSA) to the list of known hosts.
|
||||
|
||||
And you don't have to enter any password.
|
||||
|
|
|
@ -1,9 +1,10 @@
|
|||
Quickstart
|
||||
**********
|
||||
|
||||
This guide will step you through setting up a Python-based virtualenv, installing the required packages, and configuring the basic web service. This guide assumes a clean Ubuntu 14.04 instance, commands may differ based on the OS and configuration being used.
|
||||
This guide will step you through setting up a Python-based virtualenv, installing the required packages, and configuring the basic web service.
|
||||
This guide assumes a clean Ubuntu 18.04/20.04 instance, commands may differ based on the OS and configuration being used.
|
||||
|
||||
Pressed for time? See the Lemur docker file on `Github <https://github.com/Netflix/lemur-docker>`_.
|
||||
For a quicker alternative, see the Lemur docker file on `Github <https://github.com/Netflix/lemur-docker>`_.
|
||||
|
||||
|
||||
Dependencies
|
||||
|
@ -11,12 +12,14 @@ Dependencies
|
|||
|
||||
Some basic prerequisites which you'll need in order to run Lemur:
|
||||
|
||||
* A UNIX-based operating system (we test on Ubuntu, develop on OS X)
|
||||
* A UNIX-based operating system (we test on Ubuntu, develop on macOS)
|
||||
* Python 3.7 or greater
|
||||
* PostgreSQL 9.4 or greater
|
||||
* Nginx
|
||||
* Node v10.x (LTS)
|
||||
|
||||
.. note:: Lemur was built with in AWS in mind. This means that things such as databases (RDS), mail (SES), and TLS (ELB), are largely handled for us. Lemur does **not** require AWS to function. Our guides and documentation try to be as generic as possible and are not intended to document every step of launching Lemur into a given environment.
|
||||
.. note:: Ubuntu 18.04 supports by default Python 3.6.x and Node v8.x
|
||||
.. note:: Lemur was built with AWS in mind. This means that things such as databases (RDS), mail (SES), and TLS (ELB), are largely handled for us. Lemur does **not** require AWS to function. Our guides and documentation try to be as generic as possible and are not intended to document every step of launching Lemur into a given environment.
|
||||
|
||||
|
||||
Installing Build Dependencies
|
||||
|
@ -27,7 +30,7 @@ If installing Lemur on a bare Ubuntu OS you will need to grab the following pack
|
|||
.. code-block:: bash
|
||||
|
||||
sudo apt-get update
|
||||
sudo apt-get install nodejs nodejs-legacy python-pip python-dev python3-dev libpq-dev build-essential libssl-dev libffi-dev libsasl2-dev libldap2-dev nginx git supervisor npm postgresql
|
||||
sudo apt-get install nodejs npm python-pip python-dev python3-dev libpq-dev build-essential libssl-dev libffi-dev libsasl2-dev libldap2-dev nginx git supervisor postgresql
|
||||
|
||||
.. note:: PostgreSQL is only required if your database is going to be on the same host as the webserver. npm is needed if you're installing Lemur from the source (e.g., from git).
|
||||
|
||||
|
@ -130,7 +133,7 @@ Once created, you will need to update the configuration file with information ab
|
|||
vi ~/.lemur/lemur.conf.py
|
||||
|
||||
.. note:: If you are unfamiliar with the SQLALCHEMY_DATABASE_URI string it can be broken up like so:
|
||||
``postgresql://userame:password@<database-fqdn>:<database-port>/<database-name>``
|
||||
``postgresql://username:password@<database-fqdn>:<database-port>/<database-name>``
|
||||
|
||||
Before Lemur will run you need to fill in a few required variables in the configuration file:
|
||||
|
||||
|
@ -145,7 +148,7 @@ Before Lemur will run you need to fill in a few required variables in the config
|
|||
LEMUR_DEFAULT_ORGANIZATIONAL_UNIT
|
||||
|
||||
Set Up Postgres
|
||||
--------------
|
||||
---------------
|
||||
|
||||
For production, a dedicated database is recommended, for this guide we will assume postgres has been installed and is on the same machine that Lemur is installed on.
|
||||
|
||||
|
@ -183,11 +186,12 @@ In addition to creating a new user, Lemur also creates a few default email notif
|
|||
Your database installation requires the pg_trgm extension. If you do not have this installed already, you can allow the script to install this for you by adding the SUPERUSER permission to the lemur database user.
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
sudo -u postgres -i
|
||||
psql
|
||||
postgres=# ALTER USER lemur WITH SUPERUSER
|
||||
|
||||
Additional notifications can be created through the UI or API. See :ref:`Creating Notifications <CreatingNotifications>` and :ref:`Command Line Interface <CommandLineInterface>` for details.
|
||||
Additional notifications can be created through the UI or API. See :ref:`Notification Options <NotificationOptions>` and :ref:`Command Line Interface <CommandLineInterface>` for details.
|
||||
|
||||
**Make note of the password used as this will be used during first login to the Lemur UI.**
|
||||
|
||||
|
@ -199,15 +203,16 @@ Additional notifications can be created through the UI or API. See :ref:`Creati
|
|||
.. note:: If you added the SUPERUSER permission to the lemur database user above, it is recommended you revoke that permission now.
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
sudo -u postgres -i
|
||||
psql
|
||||
postgres=# ALTER USER lemur WITH NOSUPERUSER
|
||||
|
||||
|
||||
.. note:: It is recommended that once the ``lemur`` user is created that you create individual users for every day access. There is currently no way for a user to self enroll for Lemur access, they must have an administrator create an account for them or be enrolled automatically through SSO. This can be done through the CLI or UI. See :ref:`Creating Users <CreatingUsers>` and :ref:`Command Line Interface <CommandLineInterface>` for details.
|
||||
.. note:: It is recommended that once the ``lemur`` user is created that you create individual users for every day access. There is currently no way for a user to self enroll for Lemur access, they must have an administrator create an account for them or be enrolled automatically through SSO. This can be done through the CLI or UI. See :ref:`Creating a New User <CreateANewUser>` and :ref:`Command Line Interface <CommandLineInterface>` for details.
|
||||
|
||||
Set Up a Reverse Proxy
|
||||
---------------------
|
||||
----------------------
|
||||
|
||||
By default, Lemur runs on port 8000. Even if you change this, under normal conditions you won't be able to bind to port 80. To get around this (and to avoid running Lemur as a privileged user, which you shouldn't), we need to set up a simple web proxy. There are many different web servers you can use for this, we like and recommend Nginx.
|
||||
|
||||
|
@ -323,6 +328,12 @@ unlock
|
|||
Decrypts sensitive key material - used to decrypt the secrets stored in source during deployment.
|
||||
|
||||
|
||||
Automated celery tasks
|
||||
~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
Please refer to :ref:`Periodic Tasks <PeriodicTasks>` to learn more about task scheduling in Lemur.
|
||||
|
||||
|
||||
What's Next?
|
||||
------------
|
||||
|
||||
|
|
|
@ -22,7 +22,7 @@ Supported Versions
|
|||
------------------
|
||||
|
||||
At any given time, we will provide security support for the `master`_ branch
|
||||
as well as the 2 most recent releases.
|
||||
as well as the most recent release.
|
||||
|
||||
Disclosure Process
|
||||
------------------
|
||||
|
@ -30,20 +30,15 @@ Disclosure Process
|
|||
Our process for taking a security issue from private discussion to public
|
||||
disclosure involves multiple steps.
|
||||
|
||||
Approximately one week before full public disclosure, we will send advance
|
||||
notification of the issue to a list of people and organizations, primarily
|
||||
composed of operating-system vendors and other distributors of
|
||||
``lemur``. This notification will consist of an email message
|
||||
containing:
|
||||
Approximately one week before full public disclosure, we will provide advanced notification that a security issue exists. Depending on the severity of the issue, we may choose to either send a targeted email to known Lemur users and contributors or post an issue to the Lemur repository. In either case, the notification should contain the following.
|
||||
|
||||
* A full description of the issue and the affected versions of
|
||||
``lemur``.
|
||||
* A description of the potential impact
|
||||
* The affected versions of ``lemur``.
|
||||
* The steps we will be taking to remedy the issue.
|
||||
* The patches, if any, that will be applied to ``lemur``.
|
||||
* The date on which the ``lemur`` team will apply these patches, issue
|
||||
new releases, and publicly disclose the issue.
|
||||
|
||||
Simultaneously, the reporter of the issue will receive notification of the date
|
||||
If the issue was disclosed to us, the reporter will receive notification of the date
|
||||
on which we plan to make the issue public.
|
||||
|
||||
On the day of disclosure, we will take the following steps:
|
||||
|
@ -52,7 +47,7 @@ On the day of disclosure, we will take the following steps:
|
|||
messages for these patches will indicate that they are for security issues,
|
||||
but will not describe the issue in any detail; instead, they will warn of
|
||||
upcoming disclosure.
|
||||
* Issue the relevant releases.
|
||||
* Issue an updated release.
|
||||
|
||||
If a reported issue is believed to be particularly time-sensitive – due to a
|
||||
known exploit in the wild, for example – the time between advance notification
|
||||
|
|
186
gulp/build.js
|
@ -7,7 +7,7 @@ var gulp = require('gulp'),
|
|||
gulpif = require('gulp-if'),
|
||||
gutil = require('gulp-util'),
|
||||
foreach = require('gulp-foreach'),
|
||||
path =require('path'),
|
||||
path = require('path'),
|
||||
merge = require('merge-stream'),
|
||||
del = require('del'),
|
||||
size = require('gulp-size'),
|
||||
|
@ -21,7 +21,6 @@ var gulp = require('gulp'),
|
|||
useref = require('gulp-useref'),
|
||||
filter = require('gulp-filter'),
|
||||
rev = require('gulp-rev'),
|
||||
revReplace = require('gulp-rev-replace'),
|
||||
imagemin = require('gulp-imagemin'),
|
||||
minifyHtml = require('gulp-minify-html'),
|
||||
bowerFiles = require('main-bower-files'),
|
||||
|
@ -29,52 +28,77 @@ var gulp = require('gulp'),
|
|||
replace = require('gulp-replace'),
|
||||
argv = require('yargs').argv;
|
||||
|
||||
gulp.task('default', ['clean'], function () {
|
||||
gulp.start('fonts', 'styles');
|
||||
// http://stackoverflow.com/questions/1144783/replacing-all-occurrences-of-a-string-in-javascript
|
||||
function escapeRegExp(string) {
|
||||
return string.replace(/([.*+?^=!:${}()|\[\]\/\\])/g, '\\$1');
|
||||
}
|
||||
|
||||
function replaceAll(string, find, replace) {
|
||||
return string.replace(new RegExp(escapeRegExp(find), 'g'), replace);
|
||||
}
|
||||
|
||||
function stringSrc(filename, string) {
|
||||
let src = require('stream').Readable({objectMode: true});
|
||||
src._read = function () {
|
||||
this.push(new gutil.File({cwd: '', base: '', path: filename, contents: Buffer.from(string)}));
|
||||
this.push(null);
|
||||
};
|
||||
return src;
|
||||
}
|
||||
|
||||
gulp.task('clean', function (done) {
|
||||
del(['.tmp', 'lemur/static/dist'], done);
|
||||
done();
|
||||
});
|
||||
|
||||
gulp.task('clean', function (cb) {
|
||||
del(['.tmp', 'lemur/static/dist'], cb);
|
||||
});
|
||||
gulp.task('default', gulp.series(['clean'], function () {
|
||||
gulp.start('fonts', 'styles');
|
||||
}));
|
||||
|
||||
gulp.task('test', function (done) {
|
||||
new karma.Server({
|
||||
configFile: __dirname + '/karma.conf.js',
|
||||
singleRun: true
|
||||
}, function() {
|
||||
done();
|
||||
}, function (err) {
|
||||
if (err === 0) {
|
||||
done();
|
||||
} else {
|
||||
// if karma server failed to start raise error
|
||||
done(new gutil.PluginError('karma', {
|
||||
message: 'Karma Tests failed'
|
||||
}));
|
||||
}
|
||||
}).start();
|
||||
});
|
||||
|
||||
gulp.task('dev:fonts', function () {
|
||||
var fileList = [
|
||||
let fileList = [
|
||||
'bower_components/bootstrap/dist/fonts/*',
|
||||
'bower_components/fontawesome/fonts/*'
|
||||
];
|
||||
|
||||
return gulp.src(fileList)
|
||||
.pipe(gulp.dest('.tmp/fonts'));
|
||||
.pipe(gulp.dest('.tmp/fonts')); // returns a stream making it async
|
||||
});
|
||||
|
||||
gulp.task('dev:styles', function () {
|
||||
var baseContent = '@import "bower_components/bootstrap/less/bootstrap.less";@import "bower_components/bootswatch/$theme$/variables.less";@import "bower_components/bootswatch/$theme$/bootswatch.less";@import "bower_components/bootstrap/less/utilities.less";';
|
||||
var isBootswatchFile = function (file) {
|
||||
let baseContent = '@import "bower_components/bootstrap/less/bootstrap.less";@import "bower_components/bootswatch/$theme$/variables.less";@import "bower_components/bootswatch/$theme$/bootswatch.less";@import "bower_components/bootstrap/less/utilities.less";';
|
||||
let isBootswatchFile = function (file) {
|
||||
|
||||
var suffix = 'bootswatch.less';
|
||||
let suffix = 'bootswatch.less';
|
||||
return file.path.indexOf(suffix, file.path.length - suffix.length) !== -1;
|
||||
};
|
||||
|
||||
var isBootstrapFile = function (file) {
|
||||
var suffix = 'bootstrap-',
|
||||
let isBootstrapFile = function (file) {
|
||||
let suffix = 'bootstrap-',
|
||||
fileName = path.basename(file.path);
|
||||
|
||||
return fileName.indexOf(suffix) === 0;
|
||||
};
|
||||
|
||||
var fileList = [
|
||||
let fileList = [
|
||||
'bower_components/bootswatch/sandstone/bootswatch.less',
|
||||
'bower_components/fontawesome/css/font-awesome.css',
|
||||
'bower_components/angular-spinkit/src/angular-spinkit.css',
|
||||
'bower_components/angular-chart.js/dist/angular-chart.css',
|
||||
'bower_components/angular-loading-bar/src/loading-bar.css',
|
||||
'bower_components/angular-ui-switch/angular-ui-switch.css',
|
||||
|
@ -87,20 +111,18 @@ gulp.task('dev:styles', function () {
|
|||
|
||||
return gulp.src(fileList)
|
||||
.pipe(gulpif(isBootswatchFile, foreach(function (stream, file) {
|
||||
var themeName = path.basename(path.dirname(file.path)),
|
||||
content = replaceAll(baseContent, '$theme$', themeName),
|
||||
file2 = string_src('bootstrap-' + themeName + '.less', content);
|
||||
|
||||
return file2;
|
||||
let themeName = path.basename(path.dirname(file.path)),
|
||||
content = replaceAll(baseContent, '$theme$', themeName);
|
||||
return stringSrc('bootstrap-' + themeName + '.less', content);
|
||||
})))
|
||||
.pipe(less())
|
||||
.pipe(gulpif(isBootstrapFile, foreach(function (stream, file) {
|
||||
var fileName = path.basename(file.path),
|
||||
let fileName = path.basename(file.path),
|
||||
themeName = fileName.substring(fileName.indexOf('-') + 1, fileName.indexOf('.'));
|
||||
|
||||
// http://stackoverflow.com/questions/21719833/gulp-how-to-add-src-files-in-the-middle-of-a-pipe
|
||||
// https://github.com/gulpjs/gulp/blob/master/docs/recipes/using-multiple-sources-in-one-task.md
|
||||
return merge(stream, gulp.src(['.tmp/styles/font-awesome.css', '.tmp/styles/lemur.css']))
|
||||
return merge(stream, gulp.src(['.tmp/styles/font-awesome.css', '.tmp/styles/lemur.css'], {allowEmpty: true}))
|
||||
.pipe(concat('style-' + themeName + '.css'));
|
||||
})))
|
||||
.pipe(plumber())
|
||||
|
@ -111,24 +133,6 @@ gulp.task('dev:styles', function () {
|
|||
.pipe(size());
|
||||
});
|
||||
|
||||
// http://stackoverflow.com/questions/1144783/replacing-all-occurrences-of-a-string-in-javascript
|
||||
function escapeRegExp(string) {
|
||||
return string.replace(/([.*+?^=!:${}()|\[\]\/\\])/g, '\\$1');
|
||||
}
|
||||
|
||||
function replaceAll(string, find, replace) {
|
||||
return string.replace(new RegExp(escapeRegExp(find), 'g'), replace);
|
||||
}
|
||||
|
||||
function string_src(filename, string) {
|
||||
var src = require('stream').Readable({ objectMode: true });
|
||||
src._read = function () {
|
||||
this.push(new gutil.File({ cwd: '', base: '', path: filename, contents: new Buffer(string) }));
|
||||
this.push(null);
|
||||
};
|
||||
return src;
|
||||
}
|
||||
|
||||
gulp.task('dev:scripts', function () {
|
||||
return gulp.src(['lemur/static/app/angular/**/*.js'])
|
||||
.pipe(jshint())
|
||||
|
@ -144,12 +148,12 @@ gulp.task('build:extras', function () {
|
|||
function injectHtml(isDev) {
|
||||
return gulp.src('lemur/static/app/index.html')
|
||||
.pipe(
|
||||
inject(gulp.src(bowerFiles({ base: 'app' })), {
|
||||
starttag: '<!-- inject:bower:{{ext}} -->',
|
||||
addRootSlash: false,
|
||||
ignorePath: isDev ? ['lemur/static/app/', '.tmp/'] : null
|
||||
})
|
||||
)
|
||||
inject(gulp.src(bowerFiles({base: 'app'})), {
|
||||
starttag: '<!-- inject:bower:{{ext}} -->',
|
||||
addRootSlash: false,
|
||||
ignorePath: isDev ? ['lemur/static/app/', '.tmp/'] : null
|
||||
})
|
||||
)
|
||||
.pipe(inject(gulp.src(['lemur/static/app/angular/**/*.js']), {
|
||||
starttag: '<!-- inject:{{ext}} -->',
|
||||
addRootSlash: false,
|
||||
|
@ -161,22 +165,18 @@ function injectHtml(isDev) {
|
|||
ignorePath: isDev ? ['lemur/static/app/', '.tmp/'] : null
|
||||
}))
|
||||
.pipe(
|
||||
gulpif(!isDev,
|
||||
inject(gulp.src('lemur/static/dist/ngviews/ngviews.min.js'), {
|
||||
starttag: '<!-- inject:ngviews -->',
|
||||
addRootSlash: false
|
||||
})
|
||||
)
|
||||
).pipe(gulp.dest('.tmp/'));
|
||||
gulpif(!isDev,
|
||||
inject(gulp.src('lemur/static/dist/ngviews/ngviews.min.js', {allowEmpty: true}), {
|
||||
starttag: '<!-- inject:ngviews -->',
|
||||
addRootSlash: false
|
||||
})
|
||||
)
|
||||
).pipe(gulp.dest('.tmp/'));
|
||||
}
|
||||
|
||||
gulp.task('dev:inject', ['dev:styles', 'dev:scripts'], function () {
|
||||
gulp.task('dev:inject', gulp.series(['dev:styles', 'dev:scripts'], function () {
|
||||
return injectHtml(true);
|
||||
});
|
||||
|
||||
gulp.task('build:inject', ['dev:styles', 'dev:scripts', 'build:ngviews'], function () {
|
||||
return injectHtml(false);
|
||||
});
|
||||
}));
|
||||
|
||||
gulp.task('build:ngviews', function () {
|
||||
return gulp.src(['lemur/static/app/angular/**/*.html'])
|
||||
|
@ -189,9 +189,13 @@ gulp.task('build:ngviews', function () {
|
|||
.pipe(size());
|
||||
});
|
||||
|
||||
gulp.task('build:html', ['dev:styles', 'dev:scripts', 'build:ngviews', 'build:inject'], function () {
|
||||
var jsFilter = filter(['**/*.js'], {'restore': true});
|
||||
var cssFilter = filter(['**/*.css'], {'restore': true});
|
||||
gulp.task('build:inject', gulp.series(['dev:styles', 'dev:scripts', 'build:ngviews'], function () {
|
||||
return injectHtml(false);
|
||||
}));
|
||||
|
||||
gulp.task('build:html', gulp.series(['build:inject'], function () {
|
||||
let jsFilter = filter(['**/*.js'], {'restore': true});
|
||||
let cssFilter = filter(['**/*.css'], {'restore': true});
|
||||
|
||||
return gulp.src('.tmp/index.html')
|
||||
.pipe(jsFilter)
|
||||
|
@ -203,12 +207,12 @@ gulp.task('build:html', ['dev:styles', 'dev:scripts', 'build:ngviews', 'build:in
|
|||
.pipe(useref())
|
||||
.pipe(gulp.dest('lemur/static/dist'))
|
||||
.pipe(size());
|
||||
});
|
||||
}));
|
||||
|
||||
gulp.task('build:fonts', ['dev:fonts'], function () {
|
||||
gulp.task('build:fonts', gulp.series(['dev:fonts'], function () {
|
||||
return gulp.src('.tmp/fonts/**/*')
|
||||
.pipe(gulp.dest('lemur/static/dist/fonts'));
|
||||
});
|
||||
}));
|
||||
|
||||
gulp.task('build:images', function () {
|
||||
return gulp.src('lemur/static/app/images/**/*')
|
||||
|
@ -230,36 +234,28 @@ gulp.task('package:strip', function () {
|
|||
.pipe(size());
|
||||
});
|
||||
|
||||
gulp.task('addUrlContextPath',['addUrlContextPath:revreplace'], function(){
|
||||
var urlContextPathExists = argv.urlContextPath ? true : false;
|
||||
['lemur/static/dist/scripts/main*.js',
|
||||
'lemur/static/dist/angular/**/*.html']
|
||||
.forEach(function(file){
|
||||
return gulp.src(file)
|
||||
.pipe(gulpif(urlContextPathExists, replace('api/', argv.urlContextPath + '/api/')))
|
||||
.pipe(gulpif(urlContextPathExists, replace('angular/', argv.urlContextPath + '/angular/')))
|
||||
.pipe(gulp.dest(function(file){
|
||||
return file.base;
|
||||
}))
|
||||
})
|
||||
});
|
||||
|
||||
gulp.task('addUrlContextPath:revision', function(){
|
||||
return gulp.src(['lemur/static/dist/**/*.css','lemur/static/dist/**/*.js'])
|
||||
gulp.task('addUrlContextPath:revision', function () {
|
||||
return gulp.src(['lemur/static/dist/**/*.css', 'lemur/static/dist/**/*.js'])
|
||||
.pipe(rev())
|
||||
.pipe(gulp.dest('lemur/static/dist'))
|
||||
.pipe(rev.manifest())
|
||||
.pipe(gulp.dest('lemur/static/dist'))
|
||||
})
|
||||
|
||||
gulp.task('addUrlContextPath:revreplace', ['addUrlContextPath:revision'], function(){
|
||||
var manifest = gulp.src("lemur/static/dist/rev-manifest.json");
|
||||
var urlContextPathExists = argv.urlContextPath ? true : false;
|
||||
return gulp.src( "lemur/static/dist/index.html")
|
||||
.pipe(gulpif(urlContextPathExists, revReplace({prefix: argv.urlContextPath + '/', manifest: manifest}, revReplace({manifest: manifest}))))
|
||||
.pipe(gulp.dest('lemur/static/dist'));
|
||||
})
|
||||
});
|
||||
|
||||
gulp.task('addUrlContextPath:revreplace', gulp.series(['addUrlContextPath:revision'], function () {
|
||||
return gulp.src('lemur/static/dist/index.html')
|
||||
.pipe(gulp.dest('lemur/static/dist'));
|
||||
}));
|
||||
|
||||
gulp.task('build', ['build:ngviews', 'build:inject', 'build:images', 'build:fonts', 'build:html', 'build:extras']);
|
||||
gulp.task('package', ['addUrlContextPath', 'package:strip']);
|
||||
gulp.task('addUrlContextPath', gulp.series(['addUrlContextPath:revreplace'], function () {
|
||||
let urlContextPathExists = !!argv.urlContextPath;
|
||||
return gulp.src(['lemur/static/dist/scripts/main*.js', 'lemur/static/dist/angular/**/*.html'])
|
||||
.pipe(gulpif(urlContextPathExists, replace('api/', argv.urlContextPath + '/api/')))
|
||||
.pipe(gulpif(urlContextPathExists, replace('/angular/', '/' + argv.urlContextPath + '/angular/')))
|
||||
.pipe(gulp.dest(function (file) {
|
||||
return file.base;
|
||||
}));
|
||||
}));
|
||||
|
||||
gulp.task('build', gulp.series(['build:images', 'build:fonts', 'build:html', 'build:extras']));
|
||||
gulp.task('package', gulp.series(['addUrlContextPath', 'package:strip']));
|
||||
|
|
|
@ -1,12 +1,14 @@
|
|||
// Contents of: config/karma.conf.js
|
||||
'use strict';
|
||||
|
||||
module.exports = function (config) {
|
||||
config.set({
|
||||
basePath : '../',
|
||||
basePath: '../',
|
||||
|
||||
// Fix for "JASMINE is not supported anymore" warning
|
||||
frameworks : ["jasmine"],
|
||||
frameworks: ['jasmine'],
|
||||
|
||||
files : [
|
||||
files: [
|
||||
'app/lib/angular/angular.js',
|
||||
'app/lib/angular/angular-*.js',
|
||||
'test/lib/angular/angular-mocks.js',
|
||||
|
@ -14,14 +16,22 @@ module.exports = function (config) {
|
|||
'test/unit/**/*.js'
|
||||
],
|
||||
|
||||
autoWatch : true,
|
||||
autoWatch: true,
|
||||
|
||||
browsers : ['Chrome'],
|
||||
browsers: [process.env.TRAVIS ? 'Chrome_travis_ci' : 'Chrome'],
|
||||
customLaunchers: {
|
||||
'Chrome_travis_ci': {
|
||||
base: 'Chrome',
|
||||
flags: ['--no-sandbox', '--disable-setuid-sandbox', '--disable-gpu',],
|
||||
},
|
||||
},
|
||||
|
||||
junitReporter : {
|
||||
outputFile : 'test_out/unit.xml',
|
||||
suite : 'unit'
|
||||
junitReporter: {
|
||||
outputFile: 'test_out/unit.xml',
|
||||
suite: 'unit'
|
||||
//...
|
||||
}
|
||||
},
|
||||
|
||||
failOnEmptyTestSuite: false,
|
||||
});
|
||||
};
|
||||
};
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
'use strict';
|
||||
|
||||
var gulp = require('gulp');
|
||||
const watch = require('./watch')
|
||||
|
||||
var browserSync = require('browser-sync');
|
||||
var httpProxy = require('http-proxy');
|
||||
|
@ -38,7 +39,7 @@ function browserSyncInit(baseDir, files, browser) {
|
|||
|
||||
}
|
||||
|
||||
gulp.task('serve', ['watch'], function () {
|
||||
gulp.task('serve', gulp.series(['watch'], function (done) {
|
||||
browserSyncInit([
|
||||
'.tmp',
|
||||
'lemur/static/app'
|
||||
|
@ -51,9 +52,12 @@ gulp.task('serve', ['watch'], function () {
|
|||
'lemur/static/app/angular/**/*',
|
||||
'lemur/static/app/index.html'
|
||||
]);
|
||||
});
|
||||
|
||||
done();
|
||||
}));
|
||||
|
||||
|
||||
gulp.task('serve:dist', ['build'], function () {
|
||||
gulp.task('serve:dist', gulp.series(['build'], function (done) {
|
||||
browserSyncInit('lemur/static/dist');
|
||||
});
|
||||
done();
|
||||
}));
|
||||
|
|
|
@ -3,10 +3,13 @@
|
|||
var gulp = require('gulp');
|
||||
|
||||
|
||||
gulp.task('watch', ['dev:styles', 'dev:scripts', 'dev:inject', 'dev:fonts'] ,function () {
|
||||
gulp.watch('app/styles/**/*.less', ['dev:styles']);
|
||||
gulp.watch('app/styles/**/*.css', ['dev:styles']);
|
||||
gulp.watch('app/**/*.js', ['dev:scripts']);
|
||||
gulp.watch('app/images/**/*', ['build:images']);
|
||||
gulp.watch('bower.json', ['dev:inject']);
|
||||
});
|
||||
const watch = gulp.task('watch', gulp.series(['dev:inject', 'dev:fonts'] ,function (done) {
|
||||
gulp.watch('app/styles/**/*.less', gulp.series('dev:styles'));
|
||||
gulp.watch('app/styles/**/*.css', gulp.series('dev:styles'));
|
||||
gulp.watch('app/**/*.js', gulp.series('dev:scripts'));
|
||||
gulp.watch('app/images/**/*', gulp.series('build:images'));
|
||||
gulp.watch('bower.json', gulp.series('dev:inject'));
|
||||
done();
|
||||
}));
|
||||
|
||||
module.exports = {watch:watch}
|
||||
|
|
|
@ -15,7 +15,7 @@ __title__ = "lemur"
|
|||
__summary__ = "Certificate management and orchestration service"
|
||||
__uri__ = "https://github.com/Netflix/lemur"
|
||||
|
||||
__version__ = "0.7.0"
|
||||
__version__ = "develop"
|
||||
|
||||
__author__ = "The Lemur developers"
|
||||
__email__ = "security@netflix.com"
|
||||
|
|
|
@ -1,12 +1,15 @@
|
|||
import time
|
||||
import json
|
||||
import arrow
|
||||
|
||||
from flask_script import Manager
|
||||
from flask import current_app
|
||||
|
||||
from lemur.extensions import sentry
|
||||
from lemur.constants import SUCCESS_METRIC_STATUS
|
||||
from lemur.plugins import plugins
|
||||
from lemur.plugins.lemur_acme.plugin import AcmeHandler
|
||||
from lemur.plugins.lemur_aws import s3
|
||||
|
||||
manager = Manager(
|
||||
usage="Handles all ACME related tasks"
|
||||
|
@ -84,3 +87,105 @@ def dnstest(domain, token):
|
|||
|
||||
status = SUCCESS_METRIC_STATUS
|
||||
print("[+] Done with ACME Tests.")
|
||||
|
||||
|
||||
@manager.option(
|
||||
"-t",
|
||||
"--token",
|
||||
dest="token",
|
||||
default="date: " + arrow.utcnow().format("YYYY-MM-DDTHH-mm-ss"),
|
||||
required=False,
|
||||
help="Value of the Token",
|
||||
)
|
||||
@manager.option(
|
||||
"-n",
|
||||
"--token_name",
|
||||
dest="token_name",
|
||||
default="Token-" + arrow.utcnow().format("YYYY-MM-DDTHH-mm-ss"),
|
||||
required=False,
|
||||
help="path",
|
||||
)
|
||||
@manager.option(
|
||||
"-p",
|
||||
"--prefix",
|
||||
dest="prefix",
|
||||
default="test/",
|
||||
required=False,
|
||||
help="S3 bucket prefix",
|
||||
)
|
||||
@manager.option(
|
||||
"-a",
|
||||
"--account_number",
|
||||
dest="account_number",
|
||||
required=True,
|
||||
help="AWS Account",
|
||||
)
|
||||
@manager.option(
|
||||
"-b",
|
||||
"--bucket_name",
|
||||
dest="bucket_name",
|
||||
required=True,
|
||||
help="Bucket Name",
|
||||
)
|
||||
def upload_acme_token_s3(token, token_name, prefix, account_number, bucket_name):
|
||||
"""
|
||||
This method serves for testing the upload_acme_token to S3, fetching the token to verify it, and then deleting it.
|
||||
It mainly serves for testing purposes.
|
||||
:param token:
|
||||
:param token_name:
|
||||
:param prefix:
|
||||
:param account_number:
|
||||
:param bucket_name:
|
||||
:return:
|
||||
"""
|
||||
additional_options = [
|
||||
{
|
||||
"name": "bucket",
|
||||
"value": bucket_name,
|
||||
"type": "str",
|
||||
"required": True,
|
||||
"validation": r"[0-9a-z.-]{3,63}",
|
||||
"helpMessage": "Must be a valid S3 bucket name!",
|
||||
},
|
||||
{
|
||||
"name": "accountNumber",
|
||||
"type": "str",
|
||||
"value": account_number,
|
||||
"required": True,
|
||||
"validation": r"[0-9]{12}",
|
||||
"helpMessage": "A valid AWS account number with permission to access S3",
|
||||
},
|
||||
{
|
||||
"name": "region",
|
||||
"type": "str",
|
||||
"default": "us-east-1",
|
||||
"required": False,
|
||||
"helpMessage": "Region bucket exists",
|
||||
"available": ["us-east-1", "us-west-2", "eu-west-1"],
|
||||
},
|
||||
{
|
||||
"name": "encrypt",
|
||||
"type": "bool",
|
||||
"value": False,
|
||||
"required": False,
|
||||
"helpMessage": "Enable server side encryption",
|
||||
"default": True,
|
||||
},
|
||||
{
|
||||
"name": "prefix",
|
||||
"type": "str",
|
||||
"value": prefix,
|
||||
"required": False,
|
||||
"helpMessage": "Must be a valid S3 object prefix!",
|
||||
},
|
||||
]
|
||||
|
||||
p = plugins.get("aws-s3")
|
||||
p.upload_acme_token(token_name, token, additional_options)
|
||||
|
||||
if not prefix.endswith("/"):
|
||||
prefix + "/"
|
||||
|
||||
token_res = s3.get(bucket_name, prefix + token_name, account_number=account_number)
|
||||
assert(token_res == token)
|
||||
s3.delete(bucket_name, prefix + token_name, account_number=account_number)
|
||||
|
|
|
@ -7,6 +7,7 @@
|
|||
"""
|
||||
from lemur import database
|
||||
from lemur.api_keys.models import ApiKey
|
||||
from lemur.logs import service as log_service
|
||||
|
||||
|
||||
def get(aid):
|
||||
|
@ -24,6 +25,7 @@ def delete(access_key):
|
|||
:param access_key:
|
||||
:return:
|
||||
"""
|
||||
log_service.audit_log("delete_api_key", access_key.name, "Deleting the API key")
|
||||
database.delete(access_key)
|
||||
|
||||
|
||||
|
@ -34,8 +36,9 @@ def revoke(aid):
|
|||
:return:
|
||||
"""
|
||||
api_key = get(aid)
|
||||
setattr(api_key, "revoked", False)
|
||||
setattr(api_key, "revoked", True)
|
||||
|
||||
log_service.audit_log("revoke_api_key", api_key.name, "Revoking API key")
|
||||
return database.update(api_key)
|
||||
|
||||
|
||||
|
@ -55,6 +58,9 @@ def create(**kwargs):
|
|||
:return:
|
||||
"""
|
||||
api_key = ApiKey(**kwargs)
|
||||
# this logs only metadata about the api key
|
||||
log_service.audit_log("create_api_key", api_key.name, f"Creating the API key {api_key}")
|
||||
|
||||
database.create(api_key)
|
||||
return api_key
|
||||
|
||||
|
@ -69,6 +75,7 @@ def update(api_key, **kwargs):
|
|||
for key, value in kwargs.items():
|
||||
setattr(api_key, key, value)
|
||||
|
||||
log_service.audit_log("update_api_key", api_key.name, f"Update summary - {kwargs}")
|
||||
return database.update(api_key)
|
||||
|
||||
|
||||
|
|
|
@ -105,6 +105,7 @@ class ApiKeyList(AuthenticatedResource):
|
|||
POST /keys HTTP/1.1
|
||||
Host: example.com
|
||||
Accept: application/json, text/javascript
|
||||
Content-Type: application/json;charset=UTF-8
|
||||
|
||||
{
|
||||
"name": "my custom name",
|
||||
|
@ -225,6 +226,7 @@ class ApiKeyUserList(AuthenticatedResource):
|
|||
POST /users/1/keys HTTP/1.1
|
||||
Host: example.com
|
||||
Accept: application/json, text/javascript
|
||||
Content-Type: application/json;charset=UTF-8
|
||||
|
||||
{
|
||||
"name": "my custom name"
|
||||
|
@ -332,6 +334,7 @@ class ApiKeys(AuthenticatedResource):
|
|||
PUT /keys/1 HTTP/1.1
|
||||
Host: example.com
|
||||
Accept: application/json, text/javascript
|
||||
Content-Type: application/json;charset=UTF-8
|
||||
|
||||
{
|
||||
"name": "new_name",
|
||||
|
@ -474,6 +477,7 @@ class UserApiKeys(AuthenticatedResource):
|
|||
PUT /users/1/keys/1 HTTP/1.1
|
||||
Host: example.com
|
||||
Accept: application/json, text/javascript
|
||||
Content-Type: application/json;charset=UTF-8
|
||||
|
||||
{
|
||||
"name": "new_name",
|
||||
|
|
|
@ -210,7 +210,8 @@ class LdapPrincipal:
|
|||
self.ldap_groups = []
|
||||
for group in lgroups:
|
||||
(dn, values) = group
|
||||
self.ldap_groups.append(values["cn"][0].decode("ascii"))
|
||||
if type(values) == dict:
|
||||
self.ldap_groups.append(values["cn"][0].decode("utf-8"))
|
||||
else:
|
||||
lgroups = self.ldap_client.search_s(
|
||||
self.ldap_base_dn, ldap.SCOPE_SUBTREE, ldap_filter, self.ldap_attrs
|
||||
|
|
|
@ -75,9 +75,9 @@ def create_token(user, aid=None, ttl=None):
|
|||
if ttl == -1:
|
||||
del payload["exp"]
|
||||
else:
|
||||
payload["exp"] = ttl
|
||||
payload["exp"] = datetime.utcnow() + timedelta(days=ttl)
|
||||
token = jwt.encode(payload, current_app.config["LEMUR_TOKEN_SECRET"])
|
||||
return token.decode("unicode_escape")
|
||||
return token
|
||||
|
||||
|
||||
def login_required(f):
|
||||
|
@ -101,7 +101,8 @@ def login_required(f):
|
|||
return dict(message="Token is invalid"), 403
|
||||
|
||||
try:
|
||||
payload = jwt.decode(token, current_app.config["LEMUR_TOKEN_SECRET"])
|
||||
header_data = fetch_token_header(token)
|
||||
payload = jwt.decode(token, current_app.config["LEMUR_TOKEN_SECRET"], algorithms=[header_data["alg"]])
|
||||
except jwt.DecodeError:
|
||||
return dict(message="Token is invalid"), 403
|
||||
except jwt.ExpiredSignatureError:
|
||||
|
@ -115,9 +116,8 @@ def login_required(f):
|
|||
return dict(message="Token has been revoked"), 403
|
||||
if access_key.ttl != -1:
|
||||
current_time = datetime.utcnow()
|
||||
expired_time = datetime.fromtimestamp(
|
||||
access_key.issued_at + access_key.ttl
|
||||
)
|
||||
# API key uses days
|
||||
expired_time = datetime.fromtimestamp(access_key.issued_at) + timedelta(days=access_key.ttl)
|
||||
if current_time >= expired_time:
|
||||
return dict(message="Token has expired"), 403
|
||||
|
||||
|
|
|
@ -5,6 +5,8 @@
|
|||
:license: Apache, see LICENSE for more details.
|
||||
.. moduleauthor:: Kevin Glisson <kglisson@netflix.com>
|
||||
"""
|
||||
import json
|
||||
|
||||
import jwt
|
||||
import base64
|
||||
import requests
|
||||
|
@ -20,9 +22,10 @@ from lemur.common.utils import get_psuedo_random_string
|
|||
|
||||
from lemur.users import service as user_service
|
||||
from lemur.roles import service as role_service
|
||||
from lemur.logs import service as log_service
|
||||
from lemur.auth.service import create_token, fetch_token_header, get_rsa_public_key
|
||||
from lemur.auth import ldap
|
||||
|
||||
from lemur.plugins.base import plugins
|
||||
|
||||
mod = Blueprint("auth", __name__)
|
||||
api = Api(mod)
|
||||
|
@ -137,6 +140,47 @@ def retrieve_user(user_api_url, access_token):
|
|||
return user, profile
|
||||
|
||||
|
||||
def retrieve_user_memberships(user_api_url, user_membership_api_url, access_token):
|
||||
user, profile = retrieve_user(user_api_url, access_token)
|
||||
|
||||
if user_membership_api_url is None:
|
||||
return user, profile
|
||||
"""
|
||||
Potentially, below code can be made more generic i.e., plugin driven. Unaware of the usage of this
|
||||
code across the community, current implementation is config driven. Without user_membership_api_url
|
||||
configured, it is backward compatible.
|
||||
"""
|
||||
tls_provider = plugins.get(current_app.config.get("PING_USER_MEMBERSHIP_TLS_PROVIDER"))
|
||||
|
||||
# put user id in url
|
||||
user_membership_api_url = user_membership_api_url.replace("%user_id%", profile["userId"])
|
||||
|
||||
session = tls_provider.session(current_app.config.get("PING_USER_MEMBERSHIP_SERVICE"))
|
||||
headers = {"Content-Type": "application/json"}
|
||||
data = {"relation": "DIRECT_ONLY", "groupFilter": {"type": "GOOGLE"}, "size": 500}
|
||||
user_membership = {"email": profile["email"],
|
||||
"thumbnailPhotoUrl": profile["thumbnailPhotoUrl"],
|
||||
"googleGroups": []}
|
||||
while True:
|
||||
# retrieve information about the current user memberships
|
||||
r = session.post(user_membership_api_url, data=json.dumps(data), headers=headers)
|
||||
|
||||
if r.status_code == 200:
|
||||
response = r.json()
|
||||
membership_details = response["data"]
|
||||
for membership in membership_details:
|
||||
user_membership["googleGroups"].append(membership["membership"]["name"])
|
||||
|
||||
if "nextPageToken" in response and response["nextPageToken"]:
|
||||
data["nextPageToken"] = response["nextPageToken"]
|
||||
else:
|
||||
break
|
||||
else:
|
||||
current_app.logger.error(f"Response Code:{r.status_code} {r.text}")
|
||||
break
|
||||
return user, user_membership
|
||||
|
||||
|
||||
def create_user_roles(profile):
|
||||
"""Creates new roles based on profile information.
|
||||
|
||||
|
@ -155,7 +199,7 @@ def create_user_roles(profile):
|
|||
description="This is a google group based role created by Lemur",
|
||||
third_party=True,
|
||||
)
|
||||
if not role.third_party:
|
||||
if (group != 'admin') and (not role.third_party):
|
||||
role = role_service.set_third_party(role.id, third_party_status=True)
|
||||
roles.append(role)
|
||||
else:
|
||||
|
@ -198,7 +242,6 @@ def update_user(user, profile, roles):
|
|||
:param profile:
|
||||
:param roles:
|
||||
"""
|
||||
|
||||
# if we get an sso user create them an account
|
||||
if not user:
|
||||
user = user_service.create(
|
||||
|
@ -212,10 +255,16 @@ def update_user(user, profile, roles):
|
|||
|
||||
else:
|
||||
# we add 'lemur' specific roles, so they do not get marked as removed
|
||||
removed_roles = []
|
||||
for ur in user.roles:
|
||||
if not ur.third_party:
|
||||
roles.append(ur)
|
||||
elif ur not in roles:
|
||||
# This is a role assigned in lemur, but not returned by sso during current login
|
||||
removed_roles.append(ur.name)
|
||||
|
||||
if removed_roles:
|
||||
log_service.audit_log("unassign_role", user.username, f"Un-assigning roles {removed_roles}")
|
||||
# update any changes to the user
|
||||
user_service.update(
|
||||
user.id,
|
||||
|
@ -262,6 +311,7 @@ class Login(Resource):
|
|||
POST /auth/login HTTP/1.1
|
||||
Host: example.com
|
||||
Accept: application/json, text/javascript
|
||||
Content-Type: application/json;charset=UTF-8
|
||||
|
||||
{
|
||||
"username": "test",
|
||||
|
@ -368,7 +418,6 @@ class Ping(Resource):
|
|||
|
||||
# you can either discover these dynamically or simply configure them
|
||||
access_token_url = current_app.config.get("PING_ACCESS_TOKEN_URL")
|
||||
user_api_url = current_app.config.get("PING_USER_API_URL")
|
||||
|
||||
secret = current_app.config.get("PING_SECRET")
|
||||
|
||||
|
@ -384,7 +433,12 @@ class Ping(Resource):
|
|||
error_code = validate_id_token(id_token, args["clientId"], jwks_url)
|
||||
if error_code:
|
||||
return error_code
|
||||
user, profile = retrieve_user(user_api_url, access_token)
|
||||
|
||||
user, profile = retrieve_user_memberships(
|
||||
current_app.config.get("PING_USER_API_URL"),
|
||||
current_app.config.get("PING_USER_MEMBERSHIP_URL"),
|
||||
access_token
|
||||
)
|
||||
roles = create_user_roles(profile)
|
||||
update_user(user, profile, roles)
|
||||
|
||||
|
|
|
@ -18,7 +18,7 @@ from sqlalchemy import (
|
|||
func,
|
||||
ForeignKey,
|
||||
DateTime,
|
||||
PassiveDefault,
|
||||
DefaultClause,
|
||||
Boolean,
|
||||
)
|
||||
from sqlalchemy.dialects.postgresql import JSON
|
||||
|
@ -39,7 +39,7 @@ class Authority(db.Model):
|
|||
plugin_name = Column(String(64))
|
||||
description = Column(Text)
|
||||
options = Column(JSON)
|
||||
date_created = Column(DateTime, PassiveDefault(func.now()), nullable=False)
|
||||
date_created = Column(DateTime, DefaultClause(func.now()), nullable=False)
|
||||
roles = relationship(
|
||||
"Role",
|
||||
secondary=roles_authorities,
|
||||
|
@ -93,9 +93,11 @@ class Authority(db.Model):
|
|||
if not self.options:
|
||||
return None
|
||||
|
||||
for option in json.loads(self.options):
|
||||
if "name" in option and option["name"] == 'cab_compliant':
|
||||
return option["value"]
|
||||
options_array = json.loads(self.options)
|
||||
if isinstance(options_array, list):
|
||||
for option in options_array:
|
||||
if "name" in option and option["name"] == 'cab_compliant':
|
||||
return option["value"]
|
||||
|
||||
return None
|
||||
|
||||
|
|
|
@ -7,7 +7,7 @@
|
|||
"""
|
||||
from flask import current_app
|
||||
|
||||
from marshmallow import fields, validates_schema, pre_load
|
||||
from marshmallow import fields, validates_schema, pre_load, post_dump
|
||||
from marshmallow import validate
|
||||
from marshmallow.exceptions import ValidationError
|
||||
|
||||
|
@ -24,6 +24,7 @@ from lemur.common import validators, missing
|
|||
|
||||
from lemur.common.fields import ArrowDateTime
|
||||
from lemur.constants import CERTIFICATE_KEY_TYPES
|
||||
from lemur.plugins.base import plugins
|
||||
|
||||
|
||||
class AuthorityInputSchema(LemurInputSchema):
|
||||
|
@ -129,6 +130,12 @@ class AuthorityOutputSchema(LemurOutputSchema):
|
|||
default_validity_days = fields.Integer()
|
||||
authority_certificate = fields.Nested(RootAuthorityCertificateOutputSchema)
|
||||
|
||||
@post_dump
|
||||
def handle_auth_certificate(self, cert):
|
||||
# Plugins may need to modify the cert object before returning it to the user
|
||||
plugin = plugins.get(cert['plugin']['slug'])
|
||||
plugin.wrap_auth_certificate(cert['authority_certificate'])
|
||||
|
||||
|
||||
class AuthorityNestedOutputSchema(LemurOutputSchema):
|
||||
__envelope__ = False
|
||||
|
|
|
@ -117,6 +117,12 @@ def create(**kwargs):
|
|||
"""
|
||||
Creates a new authority.
|
||||
"""
|
||||
ca_name = kwargs.get("name")
|
||||
if get_by_name(ca_name):
|
||||
raise Exception(f"Authority with name {ca_name} already exists")
|
||||
if role_service.get_by_name(f"{ca_name}_admin") or role_service.get_by_name(f"{ca_name}_operator"):
|
||||
raise Exception(f"Admin and/or operator roles for authority {ca_name} already exist")
|
||||
|
||||
body, private_key, chain, roles = mint(**kwargs)
|
||||
|
||||
kwargs["creator"].roles = list(set(list(kwargs["creator"].roles) + roles))
|
||||
|
|
|
@ -130,32 +130,33 @@ class AuthoritiesList(AuthenticatedResource):
|
|||
POST /authorities HTTP/1.1
|
||||
Host: example.com
|
||||
Accept: application/json, text/javascript
|
||||
Content-Type: application/json;charset=UTF-8
|
||||
|
||||
{
|
||||
"country": "US",
|
||||
"state": "California",
|
||||
"location": "Los Gatos",
|
||||
"organization": "Netflix",
|
||||
"organizationalUnit": "Operations",
|
||||
"type": "root",
|
||||
"signingAlgorithm": "sha256WithRSA",
|
||||
"sensitivity": "medium",
|
||||
"keyType": "RSA2048",
|
||||
"plugin": {
|
||||
"slug": "cloudca-issuer"
|
||||
},
|
||||
"name": "TimeTestAuthority5",
|
||||
"owner": "secure@example.com",
|
||||
"description": "test",
|
||||
"commonName": "AcommonName",
|
||||
"validityYears": "20",
|
||||
"extensions": {
|
||||
"subAltNames": {
|
||||
"names": []
|
||||
},
|
||||
"custom": []
|
||||
}
|
||||
}
|
||||
{
|
||||
"country": "US",
|
||||
"state": "California",
|
||||
"location": "Los Gatos",
|
||||
"organization": "Netflix",
|
||||
"organizationalUnit": "Operations",
|
||||
"type": "root",
|
||||
"signingAlgorithm": "sha256WithRSA",
|
||||
"sensitivity": "medium",
|
||||
"keyType": "RSA2048",
|
||||
"plugin": {
|
||||
"slug": "cloudca-issuer"
|
||||
},
|
||||
"name": "TimeTestAuthority5",
|
||||
"owner": "secure@example.com",
|
||||
"description": "test",
|
||||
"commonName": "AcommonName",
|
||||
"validityYears": "20",
|
||||
"extensions": {
|
||||
"subAltNames": {
|
||||
"names": []
|
||||
},
|
||||
"custom": []
|
||||
}
|
||||
}
|
||||
|
||||
**Example response**:
|
||||
|
||||
|
@ -217,8 +218,7 @@ class AuthoritiesList(AuthenticatedResource):
|
|||
:arg parent: the parent authority if this is to be a subca
|
||||
:arg signingAlgorithm: algorithm used to sign the authority
|
||||
:arg keyType: key type
|
||||
:arg sensitivity: the sensitivity of the root key, for CloudCA this determines if the root keys are stored
|
||||
in an HSM
|
||||
:arg sensitivity: the sensitivity of the root key, for CloudCA this determines if the root keys are stored in an HSM
|
||||
:arg keyName: name of the key to store in the HSM (CloudCA)
|
||||
:arg serialNumber: serial number of the authority
|
||||
:arg firstSerial: specifies the starting serial number for certificates issued off of this authority
|
||||
|
@ -301,6 +301,7 @@ class Authorities(AuthenticatedResource):
|
|||
PUT /authorities/1 HTTP/1.1
|
||||
Host: example.com
|
||||
Accept: application/json, text/javascript
|
||||
Content-Type: application/json;charset=UTF-8
|
||||
|
||||
{
|
||||
"name": "TestAuthority5",
|
||||
|
@ -492,23 +493,48 @@ class CertificateAuthority(AuthenticatedResource):
|
|||
class AuthorityVisualizations(AuthenticatedResource):
|
||||
def get(self, authority_id):
|
||||
"""
|
||||
{"name": "flare",
|
||||
"children": [
|
||||
{
|
||||
"name": "analytics",
|
||||
"children": [
|
||||
{
|
||||
"name": "cluster",
|
||||
"children": [
|
||||
{"name": "AgglomerativeCluster", "size": 3938},
|
||||
{"name": "CommunityStructure", "size": 3812},
|
||||
{"name": "HierarchicalCluster", "size": 6714},
|
||||
{"name": "MergeEdge", "size": 743}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
]}
|
||||
.. http:get:: /authorities/1/visualize
|
||||
|
||||
Authority visualization
|
||||
|
||||
**Example request**:
|
||||
|
||||
.. sourcecode:: http
|
||||
|
||||
GET /certificates/1/visualize HTTP/1.1
|
||||
Host: example.com
|
||||
Accept: application/json, text/javascript
|
||||
|
||||
**Example response**:
|
||||
|
||||
.. sourcecode:: http
|
||||
|
||||
HTTP/1.1 200 OK
|
||||
Vary: Accept
|
||||
Content-Type: text/javascript
|
||||
|
||||
{"name": "flare",
|
||||
"children": [
|
||||
{
|
||||
"name": "analytics",
|
||||
"children": [
|
||||
{
|
||||
"name": "cluster",
|
||||
"children": [
|
||||
{"name": "AgglomerativeCluster", "size": 3938},
|
||||
{"name": "CommunityStructure", "size": 3812},
|
||||
{"name": "HierarchicalCluster", "size": 6714},
|
||||
{"name": "MergeEdge", "size": 743}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
:reqheader Authorization: OAuth token to authenticate
|
||||
:statuscode 200: no error
|
||||
:statuscode 403: unauthenticated
|
||||
"""
|
||||
authority = service.get(authority_id)
|
||||
return dict(
|
||||
|
|
|
@ -5,13 +5,13 @@
|
|||
:license: Apache, see LICENSE for more details.
|
||||
.. moduleauthor:: Kevin Glisson <kglisson@netflix.com>
|
||||
"""
|
||||
import multiprocessing
|
||||
import sys
|
||||
from flask import current_app
|
||||
from flask_principal import Identity, identity_changed
|
||||
from flask_script import Manager
|
||||
from sqlalchemy import or_
|
||||
from tabulate import tabulate
|
||||
from time import sleep
|
||||
|
||||
from lemur import database
|
||||
from lemur.authorities.models import Authority
|
||||
|
@ -26,9 +26,10 @@ from lemur.certificates.service import (
|
|||
get_all_valid_certs,
|
||||
get,
|
||||
get_all_certs_attached_to_endpoint_without_autorotate,
|
||||
revoke as revoke_certificate,
|
||||
)
|
||||
from lemur.certificates.verify import verify_string
|
||||
from lemur.constants import SUCCESS_METRIC_STATUS, FAILURE_METRIC_STATUS
|
||||
from lemur.constants import SUCCESS_METRIC_STATUS, FAILURE_METRIC_STATUS, CRLReason
|
||||
from lemur.deployment import service as deployment_service
|
||||
from lemur.domains.models import Domain
|
||||
from lemur.endpoints import service as endpoint_service
|
||||
|
@ -118,13 +119,20 @@ def request_rotation(endpoint, certificate, message, commit):
|
|||
status = SUCCESS_METRIC_STATUS
|
||||
|
||||
except Exception as e:
|
||||
sentry.captureException(extra={"certificate_name": str(certificate.name),
|
||||
"endpoint": str(endpoint.dnsname)})
|
||||
current_app.logger.exception(
|
||||
f"Error rotating certificate: {certificate.name}", exc_info=True
|
||||
)
|
||||
print(
|
||||
"[!] Failed to rotate endpoint {0} to certificate {1} reason: {2}".format(
|
||||
endpoint.name, certificate.name, e
|
||||
)
|
||||
)
|
||||
|
||||
metrics.send("endpoint_rotation", "counter", 1, metric_tags={"status": status})
|
||||
metrics.send("endpoint_rotation", "counter", 1, metric_tags={"status": status,
|
||||
"certificate_name": str(certificate.name),
|
||||
"endpoint": str(endpoint.dnsname)})
|
||||
|
||||
|
||||
def request_reissue(certificate, commit):
|
||||
|
@ -223,7 +231,7 @@ def rotate(endpoint_name, new_certificate_name, old_certificate_name, message, c
|
|||
print(
|
||||
f"[+] Rotating endpoint: {endpoint.name} to certificate {new_cert.name}"
|
||||
)
|
||||
log_data["message"] = "Rotating endpoint"
|
||||
log_data["message"] = "Rotating one endpoint"
|
||||
log_data["endpoint"] = endpoint.dnsname
|
||||
log_data["certificate"] = new_cert.name
|
||||
request_rotation(endpoint, new_cert, message, commit)
|
||||
|
@ -231,8 +239,6 @@ def rotate(endpoint_name, new_certificate_name, old_certificate_name, message, c
|
|||
|
||||
elif old_cert and new_cert:
|
||||
print(f"[+] Rotating all endpoints from {old_cert.name} to {new_cert.name}")
|
||||
|
||||
log_data["message"] = "Rotating all endpoints"
|
||||
log_data["certificate"] = new_cert.name
|
||||
log_data["certificate_old"] = old_cert.name
|
||||
log_data["message"] = "Rotating endpoint from old to new cert"
|
||||
|
@ -243,41 +249,23 @@ def rotate(endpoint_name, new_certificate_name, old_certificate_name, message, c
|
|||
current_app.logger.info(log_data)
|
||||
|
||||
else:
|
||||
# No certificate name or endpoint is provided. We will now fetch all endpoints,
|
||||
# which are associated with a certificate that has been replaced
|
||||
print("[+] Rotating all endpoints that have new certificates available")
|
||||
log_data["message"] = "Rotating all endpoints that have new certificates available"
|
||||
for endpoint in endpoint_service.get_all_pending_rotation():
|
||||
log_data["endpoint"] = endpoint.dnsname
|
||||
if len(endpoint.certificate.replaced) == 1:
|
||||
print(
|
||||
f"[+] Rotating {endpoint.name} to {endpoint.certificate.replaced[0].name}"
|
||||
)
|
||||
log_data["certificate"] = endpoint.certificate.replaced[0].name
|
||||
request_rotation(
|
||||
endpoint, endpoint.certificate.replaced[0], message, commit
|
||||
)
|
||||
current_app.logger.info(log_data)
|
||||
|
||||
else:
|
||||
log_data["message"] = "Failed to rotate endpoint due to Multiple replacement certificates found"
|
||||
print(log_data)
|
||||
metrics.send(
|
||||
"endpoint_rotation",
|
||||
"counter",
|
||||
1,
|
||||
metric_tags={
|
||||
"status": FAILURE_METRIC_STATUS,
|
||||
"old_certificate_name": str(old_cert),
|
||||
"new_certificate_name": str(
|
||||
endpoint.certificate.replaced[0].name
|
||||
),
|
||||
"endpoint_name": str(endpoint.name),
|
||||
"message": str(message),
|
||||
},
|
||||
)
|
||||
print(
|
||||
f"[!] Failed to rotate endpoint {endpoint.name} reason: "
|
||||
"Multiple replacement certificates found."
|
||||
)
|
||||
log_data["message"] = "Rotating endpoint from old to new cert"
|
||||
if len(endpoint.certificate.replaced) > 1:
|
||||
log_data["message"] = f"Multiple replacement certificates found, going with the first one out of " \
|
||||
f"{len(endpoint.certificate.replaced)}"
|
||||
|
||||
log_data["endpoint"] = endpoint.dnsname
|
||||
log_data["certificate"] = endpoint.certificate.replaced[0].name
|
||||
print(
|
||||
f"[+] Rotating {endpoint.name} to {endpoint.certificate.replaced[0].name}"
|
||||
)
|
||||
request_rotation(endpoint, endpoint.certificate.replaced[0], message, commit)
|
||||
current_app.logger.info(log_data)
|
||||
|
||||
status = SUCCESS_METRIC_STATUS
|
||||
print("[+] Done!")
|
||||
|
@ -368,6 +356,7 @@ def rotate_region(endpoint_name, new_certificate_name, old_certificate_name, mes
|
|||
:param message: Send a rotation notification to the certificates owner.
|
||||
:param commit: Persist changes.
|
||||
:param region: Region in which to rotate the endpoint.
|
||||
#todo: merge this method with rotate()
|
||||
"""
|
||||
if commit:
|
||||
print("[!] Running in COMMIT mode.")
|
||||
|
@ -417,24 +406,20 @@ def rotate_region(endpoint_name, new_certificate_name, old_certificate_name, mes
|
|||
1,
|
||||
metric_tags={
|
||||
"region": region,
|
||||
"old_certificate_name": str(old_cert),
|
||||
"new_certificate_name": str(endpoint.certificate.replaced[0].name),
|
||||
"endpoint_name": str(endpoint.dnsname),
|
||||
},
|
||||
)
|
||||
continue
|
||||
|
||||
if len(endpoint.certificate.replaced) == 1:
|
||||
log_data["certificate"] = endpoint.certificate.replaced[0].name
|
||||
log_data["message"] = "Rotating all endpoints in region"
|
||||
print(log_data)
|
||||
current_app.logger.info(log_data)
|
||||
request_rotation(endpoint, endpoint.certificate.replaced[0], message, commit)
|
||||
status = SUCCESS_METRIC_STATUS
|
||||
else:
|
||||
status = FAILURE_METRIC_STATUS
|
||||
log_data["message"] = "Failed to rotate endpoint due to Multiple replacement certificates found"
|
||||
print(log_data)
|
||||
current_app.logger.info(log_data)
|
||||
log_data["certificate"] = endpoint.certificate.replaced[0].name
|
||||
log_data["message"] = "Rotating all endpoints in region"
|
||||
if len(endpoint.certificate.replaced) > 1:
|
||||
log_data["message"] = f"Multiple replacement certificates found, going with the first one out of " \
|
||||
f"{len(endpoint.certificate.replaced)}"
|
||||
|
||||
request_rotation(endpoint, endpoint.certificate.replaced[0], message, commit)
|
||||
current_app.logger.info(log_data)
|
||||
|
||||
metrics.send(
|
||||
"endpoint_rotation_region",
|
||||
|
@ -442,8 +427,7 @@ def rotate_region(endpoint_name, new_certificate_name, old_certificate_name, mes
|
|||
1,
|
||||
metric_tags={
|
||||
"status": FAILURE_METRIC_STATUS,
|
||||
"old_certificate_name": str(old_cert),
|
||||
"new_certificate_name": str(endpoint.certificate.replaced[0].name),
|
||||
"new_certificate_name": str(log_data["certificate"]),
|
||||
"endpoint_name": str(endpoint.dnsname),
|
||||
"message": str(message),
|
||||
"region": str(region),
|
||||
|
@ -586,11 +570,10 @@ def worker(data, commit, reason):
|
|||
parts = [x for x in data.split(" ") if x]
|
||||
try:
|
||||
cert = get(int(parts[0].strip()))
|
||||
plugin = plugins.get(cert.authority.plugin_name)
|
||||
|
||||
print("[+] Revoking certificate. Id: {0} Name: {1}".format(cert.id, cert.name))
|
||||
if commit:
|
||||
plugin.revoke_certificate(cert, reason)
|
||||
revoke_certificate(cert, reason)
|
||||
|
||||
metrics.send(
|
||||
"certificate_revoke",
|
||||
|
@ -620,10 +603,10 @@ def clear_pending():
|
|||
v.clear_pending_certificates()
|
||||
|
||||
|
||||
@manager.option(
|
||||
"-p", "--path", dest="path", help="Absolute file path to a Lemur query csv."
|
||||
)
|
||||
@manager.option("-r", "--reason", dest="reason", help="Reason to revoke certificate.")
|
||||
@manager.option("-p", "--path", dest="path", help="Absolute file path to a Lemur query csv.")
|
||||
@manager.option("-id", "--certid", dest="cert_id", help="ID of the certificate to be revoked")
|
||||
@manager.option("-r", "--reason", dest="reason", default="unspecified", help="CRL Reason as per RFC 5280 section 5.3.1")
|
||||
@manager.option("-m", "--message", dest="message", help="Message explaining reason for revocation")
|
||||
@manager.option(
|
||||
"-c",
|
||||
"--commit",
|
||||
|
@ -632,20 +615,32 @@ def clear_pending():
|
|||
default=False,
|
||||
help="Persist changes.",
|
||||
)
|
||||
def revoke(path, reason, commit):
|
||||
def revoke(path, cert_id, reason, message, commit):
|
||||
"""
|
||||
Revokes given certificate.
|
||||
"""
|
||||
if not path and not cert_id:
|
||||
print("[!] No input certificates mentioned to revoke")
|
||||
return
|
||||
if path and cert_id:
|
||||
print("[!] Please mention single certificate id (-id) or input file (-p)")
|
||||
return
|
||||
|
||||
if commit:
|
||||
print("[!] Running in COMMIT mode.")
|
||||
|
||||
print("[+] Starting certificate revocation.")
|
||||
|
||||
with open(path, "r") as f:
|
||||
args = [[x, commit, reason] for x in f.readlines()[2:]]
|
||||
if reason not in CRLReason.__members__:
|
||||
reason = CRLReason.unspecified.name
|
||||
comments = {"comments": message, "crl_reason": reason}
|
||||
|
||||
with multiprocessing.Pool(processes=3) as pool:
|
||||
pool.starmap(worker, args)
|
||||
if cert_id:
|
||||
worker(cert_id, commit, comments)
|
||||
else:
|
||||
with open(path, "r") as f:
|
||||
for x in f.readlines()[2:]:
|
||||
worker(x, commit, comments)
|
||||
|
||||
|
||||
@manager.command
|
||||
|
@ -735,3 +730,48 @@ def automatically_enable_autorotate():
|
|||
})
|
||||
cert.rotation = True
|
||||
database.update(cert)
|
||||
|
||||
|
||||
@manager.command
|
||||
def deactivate_entrust_certificates():
|
||||
"""
|
||||
Attempt to deactivate test certificates issued by Entrust
|
||||
"""
|
||||
|
||||
log_data = {
|
||||
"function": f"{__name__}.{sys._getframe().f_code.co_name}",
|
||||
"message": "Deactivating Entrust certificates"
|
||||
}
|
||||
|
||||
certificates = get_all_valid_certs(['entrust-issuer'])
|
||||
entrust_plugin = plugins.get('entrust-issuer')
|
||||
for index, cert in enumerate(certificates):
|
||||
if (index % 10) == 0:
|
||||
# Entrust enforces a 10 request per 30s rate limit
|
||||
sleep(30)
|
||||
try:
|
||||
response = entrust_plugin.deactivate_certificate(cert)
|
||||
if response == 200:
|
||||
cert.status = "revoked"
|
||||
else:
|
||||
cert.status = "unknown"
|
||||
|
||||
log_data["valid"] = cert.status
|
||||
log_data["certificate_name"] = cert.name
|
||||
log_data["certificate_id"] = cert.id
|
||||
metrics.send(
|
||||
"certificate_deactivate",
|
||||
"counter",
|
||||
1,
|
||||
metric_tags={"status": log_data["valid"],
|
||||
"certificate_name": log_data["certificate_name"],
|
||||
"certificate_id": log_data["certificate_id"]},
|
||||
)
|
||||
current_app.logger.info(log_data)
|
||||
|
||||
database.update(cert)
|
||||
|
||||
except Exception as e:
|
||||
current_app.logger.info(log_data)
|
||||
sentry.captureException()
|
||||
current_app.logger.exception(e)
|
||||
|
|
|
@ -16,7 +16,7 @@ from sqlalchemy import (
|
|||
Integer,
|
||||
ForeignKey,
|
||||
String,
|
||||
PassiveDefault,
|
||||
DefaultClause,
|
||||
func,
|
||||
Column,
|
||||
Text,
|
||||
|
@ -138,7 +138,7 @@ class Certificate(db.Model):
|
|||
not_after = Column(ArrowType)
|
||||
not_after_ix = Index("ix_certificates_not_after", not_after.desc())
|
||||
|
||||
date_created = Column(ArrowType, PassiveDefault(func.now()), nullable=False)
|
||||
date_created = Column(ArrowType, DefaultClause(func.now()), nullable=False)
|
||||
|
||||
signing_algorithm = Column(String(128))
|
||||
status = Column(String(128))
|
||||
|
@ -184,7 +184,6 @@ class Certificate(db.Model):
|
|||
"PendingCertificate",
|
||||
secondary=pending_cert_replacement_associations,
|
||||
backref="pending_replace",
|
||||
viewonly=True,
|
||||
)
|
||||
|
||||
logs = relationship("Log", backref="certificate")
|
||||
|
|
|
@ -16,7 +16,7 @@ from lemur.certificates import utils as cert_utils
|
|||
from lemur.common import missing, utils, validators
|
||||
from lemur.common.fields import ArrowDateTime, Hex
|
||||
from lemur.common.schema import LemurInputSchema, LemurOutputSchema
|
||||
from lemur.constants import CERTIFICATE_KEY_TYPES
|
||||
from lemur.constants import CERTIFICATE_KEY_TYPES, CRLReason
|
||||
from lemur.destinations.schemas import DestinationNestedOutputSchema
|
||||
from lemur.dns_providers.schemas import DnsProvidersNestedOutputSchema
|
||||
from lemur.domains.schemas import DomainNestedOutputSchema
|
||||
|
@ -38,6 +38,7 @@ from lemur.schemas import (
|
|||
AssociatedRotationPolicySchema,
|
||||
)
|
||||
from lemur.users.schemas import UserNestedOutputSchema
|
||||
from lemur.plugins.base import plugins
|
||||
|
||||
|
||||
class CertificateSchema(LemurInputSchema):
|
||||
|
@ -89,7 +90,7 @@ class CertificateInputSchema(CertificateCreationSchema):
|
|||
csr = fields.String(allow_none=True, validate=validators.csr)
|
||||
|
||||
key_type = fields.String(
|
||||
validate=validate.OneOf(CERTIFICATE_KEY_TYPES), missing="RSA2048"
|
||||
validate=validate.OneOf(CERTIFICATE_KEY_TYPES), missing="ECCPRIME256V1"
|
||||
)
|
||||
|
||||
notify = fields.Boolean(default=True)
|
||||
|
@ -160,7 +161,7 @@ class CertificateInputSchema(CertificateCreationSchema):
|
|||
if data.get("body"):
|
||||
data["key_type"] = utils.get_key_type_from_certificate(data["body"])
|
||||
else:
|
||||
data["key_type"] = "RSA2048" # default value
|
||||
data["key_type"] = "ECCPRIME256V1" # default value
|
||||
|
||||
return missing.convert_validity_years(data)
|
||||
|
||||
|
@ -324,6 +325,8 @@ class CertificateOutputSchema(LemurOutputSchema):
|
|||
notifications = fields.Nested(NotificationNestedOutputSchema, many=True)
|
||||
replaces = fields.Nested(CertificateNestedOutputSchema, many=True)
|
||||
authority = fields.Nested(AuthorityNestedOutputSchema)
|
||||
# if this certificate is an authority, the authority informations are in root_authority
|
||||
root_authority = fields.Nested(AuthorityNestedOutputSchema)
|
||||
dns_provider = fields.Nested(DnsProvidersNestedOutputSchema)
|
||||
roles = fields.Nested(RoleNestedOutputSchema, many=True)
|
||||
endpoints = fields.Nested(EndpointNestedOutputSchema, many=True, missing=[])
|
||||
|
@ -340,6 +343,8 @@ class CertificateOutputSchema(LemurOutputSchema):
|
|||
|
||||
@post_dump
|
||||
def handle_subject_details(self, data):
|
||||
subject_details = ["country", "state", "location", "organization", "organizational_unit"]
|
||||
|
||||
# Remove subject details if authority is CA/Browser Forum compliant. The code will use default set of values in that case.
|
||||
# If CA/Browser Forum compliance of an authority is unknown (None), it is safe to fallback to default values. Thus below
|
||||
# condition checks for 'not False' ==> 'True or None'
|
||||
|
@ -347,11 +352,29 @@ class CertificateOutputSchema(LemurOutputSchema):
|
|||
is_cab_compliant = data.get("authority").get("isCabCompliant")
|
||||
|
||||
if is_cab_compliant is not False:
|
||||
data.pop("country", None)
|
||||
data.pop("state", None)
|
||||
data.pop("location", None)
|
||||
data.pop("organization", None)
|
||||
data.pop("organizational_unit", None)
|
||||
for field in subject_details:
|
||||
data.pop(field, None)
|
||||
|
||||
# Removing subject fields if None, else it complains in de-serialization
|
||||
for field in subject_details:
|
||||
if field in data and data[field] is None:
|
||||
data.pop(field)
|
||||
|
||||
@post_dump
|
||||
def handle_certificate(self, cert):
|
||||
# Plugins may need to modify the cert object before returning it to the user
|
||||
if cert['authority'] is None:
|
||||
if cert['root_authority'] is None:
|
||||
plugin = None
|
||||
else:
|
||||
# this certificate is an authority
|
||||
plugin = plugins.get(cert['root_authority']['plugin']['slug'])
|
||||
else:
|
||||
plugin = plugins.get(cert['authority']['plugin']['slug'])
|
||||
if plugin:
|
||||
plugin.wrap_certificate(cert)
|
||||
if 'root_authority' in cert:
|
||||
del cert['root_authority']
|
||||
|
||||
|
||||
class CertificateShortOutputSchema(LemurOutputSchema):
|
||||
|
@ -437,6 +460,7 @@ class CertificateExportInputSchema(LemurInputSchema):
|
|||
|
||||
|
||||
class CertificateNotificationOutputSchema(LemurOutputSchema):
|
||||
id = fields.Integer()
|
||||
description = fields.String()
|
||||
issuer = fields.String()
|
||||
name = fields.String()
|
||||
|
@ -451,6 +475,7 @@ class CertificateNotificationOutputSchema(LemurOutputSchema):
|
|||
|
||||
class CertificateRevokeSchema(LemurInputSchema):
|
||||
comments = fields.String()
|
||||
crl_reason = fields.String(validate=validate.OneOf(CRLReason.__members__), missing="unspecified")
|
||||
|
||||
|
||||
certificates_list_request_parser = RequestParser()
|
||||
|
|
|
@ -6,11 +6,13 @@
|
|||
.. moduleauthor:: Kevin Glisson <kglisson@netflix.com>
|
||||
"""
|
||||
import arrow
|
||||
import re
|
||||
from cryptography import x509
|
||||
from cryptography.hazmat.backends import default_backend
|
||||
from cryptography.hazmat.primitives import hashes, serialization
|
||||
from flask import current_app
|
||||
from sqlalchemy import func, or_, not_, cast, Integer
|
||||
from sqlalchemy.sql.expression import false, true
|
||||
|
||||
from lemur import database
|
||||
from lemur.authorities.models import Authority
|
||||
|
@ -19,6 +21,7 @@ from lemur.certificates.schemas import CertificateOutputSchema, CertificateInput
|
|||
from lemur.common.utils import generate_private_key, truthiness
|
||||
from lemur.destinations.models import Destination
|
||||
from lemur.domains.models import Domain
|
||||
from lemur.endpoints import service as endpoint_service
|
||||
from lemur.extensions import metrics, sentry, signals
|
||||
from lemur.models import certificate_associations
|
||||
from lemur.notifications.models import Notification
|
||||
|
@ -85,6 +88,16 @@ def get_by_attributes(conditions):
|
|||
return database.find_all(query, Certificate, conditions).all()
|
||||
|
||||
|
||||
def get_by_root_authority(id):
|
||||
"""
|
||||
Retrieves certificate by its root_authority's id.
|
||||
|
||||
:param id:
|
||||
:return:
|
||||
"""
|
||||
return database.get(Certificate, id, field="root_authority_id")
|
||||
|
||||
|
||||
def delete(cert_id):
|
||||
"""
|
||||
Delete's a certificate.
|
||||
|
@ -105,7 +118,7 @@ def get_all_certs():
|
|||
|
||||
def get_all_valid_certs(authority_plugin_name):
|
||||
"""
|
||||
Retrieves all valid (not expired) certificates within Lemur, for the given authority plugin names
|
||||
Retrieves all valid (not expired & not revoked) certificates within Lemur, for the given authority plugin names
|
||||
ignored if no authority_plugin_name provided.
|
||||
|
||||
Note that depending on the DB size retrieving all certificates might an expensive operation
|
||||
|
@ -116,11 +129,12 @@ def get_all_valid_certs(authority_plugin_name):
|
|||
return (
|
||||
Certificate.query.outerjoin(Authority, Authority.id == Certificate.authority_id).filter(
|
||||
Certificate.not_after > arrow.now().format("YYYY-MM-DD")).filter(
|
||||
Authority.plugin_name.in_(authority_plugin_name)).all()
|
||||
Authority.plugin_name.in_(authority_plugin_name)).filter(Certificate.revoked.is_(False)).all()
|
||||
)
|
||||
else:
|
||||
return (
|
||||
Certificate.query.filter(Certificate.not_after > arrow.now().format("YYYY-MM-DD")).all()
|
||||
Certificate.query.filter(Certificate.not_after > arrow.now().format("YYYY-MM-DD")).filter(
|
||||
Certificate.revoked.is_(False)).all()
|
||||
)
|
||||
|
||||
|
||||
|
@ -148,7 +162,8 @@ def get_all_certs_attached_to_endpoint_without_autorotate():
|
|||
"""
|
||||
return (
|
||||
Certificate.query.filter(Certificate.endpoints.any())
|
||||
.filter(Certificate.rotation == False)
|
||||
.filter(Certificate.rotation == false())
|
||||
.filter(Certificate.revoked == false())
|
||||
.filter(Certificate.not_after >= arrow.now())
|
||||
.filter(not_(Certificate.replaced.any()))
|
||||
.all() # noqa
|
||||
|
@ -203,9 +218,9 @@ def get_all_pending_reissue():
|
|||
:return:
|
||||
"""
|
||||
return (
|
||||
Certificate.query.filter(Certificate.rotation == True)
|
||||
Certificate.query.filter(Certificate.rotation == true())
|
||||
.filter(not_(Certificate.replaced.any()))
|
||||
.filter(Certificate.in_rotation_window == True)
|
||||
.filter(Certificate.in_rotation_window == true())
|
||||
.all()
|
||||
) # noqa
|
||||
|
||||
|
@ -359,7 +374,12 @@ def create(**kwargs):
|
|||
try:
|
||||
cert_body, private_key, cert_chain, external_id, csr = mint(**kwargs)
|
||||
except Exception:
|
||||
current_app.logger.error("Exception minting certificate", exc_info=True)
|
||||
log_data = {
|
||||
"message": "Exception minting certificate",
|
||||
"issuer": kwargs["authority"].name,
|
||||
"cn": kwargs["common_name"],
|
||||
}
|
||||
current_app.logger.error(log_data, exc_info=True)
|
||||
sentry.captureException()
|
||||
raise
|
||||
kwargs["body"] = cert_body
|
||||
|
@ -379,6 +399,7 @@ def create(**kwargs):
|
|||
cert = Certificate(**kwargs)
|
||||
kwargs["creator"].certificates.append(cert)
|
||||
else:
|
||||
# ACME path
|
||||
cert = PendingCertificate(**kwargs)
|
||||
kwargs["creator"].pending_certificates.append(cert)
|
||||
|
||||
|
@ -518,7 +539,7 @@ def render(args):
|
|||
)
|
||||
|
||||
if current_app.config.get("ALLOW_CERT_DELETION", False):
|
||||
query = query.filter(Certificate.deleted == False) # noqa
|
||||
query = query.filter(Certificate.deleted == false())
|
||||
|
||||
result = database.sort_and_page(query, Certificate, args)
|
||||
return result
|
||||
|
@ -554,20 +575,29 @@ def query_common_name(common_name, args):
|
|||
:return:
|
||||
"""
|
||||
owner = args.pop("owner")
|
||||
if not owner:
|
||||
owner = "%"
|
||||
page = args.pop("page")
|
||||
count = args.pop("count")
|
||||
|
||||
paginate = page and count
|
||||
query = database.session_query(Certificate) if paginate else Certificate.query
|
||||
|
||||
# only not expired certificates
|
||||
current_time = arrow.utcnow()
|
||||
query = query.filter(Certificate.not_after >= current_time.format("YYYY-MM-DD"))\
|
||||
.filter(not_(Certificate.revoked))\
|
||||
.filter(not_(Certificate.replaced.any())) # ignore rotated certificates to avoid duplicates
|
||||
|
||||
result = (
|
||||
Certificate.query.filter(Certificate.cn.ilike(common_name))
|
||||
.filter(Certificate.owner.ilike(owner))
|
||||
.filter(Certificate.not_after >= current_time.format("YYYY-MM-DD"))
|
||||
.all()
|
||||
)
|
||||
if owner:
|
||||
query = query.filter(Certificate.owner.ilike(owner))
|
||||
|
||||
return result
|
||||
if common_name != "%":
|
||||
# if common_name is a wildcard ('%'), no need to include it in the query
|
||||
query = query.filter(Certificate.cn.ilike(common_name))
|
||||
|
||||
if paginate:
|
||||
return database.paginate(query, page, count)
|
||||
|
||||
return query.all()
|
||||
|
||||
|
||||
def create_csr(**csr_config):
|
||||
|
@ -659,7 +689,16 @@ def stats(**kwargs):
|
|||
:param kwargs:
|
||||
:return:
|
||||
"""
|
||||
if kwargs.get("metric") == "not_after":
|
||||
|
||||
# Verify requested metric
|
||||
allow_list = ["bits", "issuer", "not_after", "signing_algorithm"]
|
||||
req_metric = kwargs.get("metric")
|
||||
if req_metric not in allow_list:
|
||||
raise Exception(
|
||||
f"Stats not available for requested metric: {req_metric}"
|
||||
)
|
||||
|
||||
if req_metric == "not_after":
|
||||
start = arrow.utcnow()
|
||||
end = start.shift(weeks=+32)
|
||||
items = (
|
||||
|
@ -671,7 +710,7 @@ def stats(**kwargs):
|
|||
)
|
||||
|
||||
else:
|
||||
attr = getattr(Certificate, kwargs.get("metric"))
|
||||
attr = getattr(Certificate, req_metric)
|
||||
query = database.db.session.query(attr, func.count(attr))
|
||||
|
||||
items = query.group_by(attr).all()
|
||||
|
@ -772,6 +811,103 @@ def reissue_certificate(certificate, replace=None, user=None):
|
|||
if replace:
|
||||
primitives["replaces"] = [certificate]
|
||||
|
||||
# Modify description to include the certificate ID being reissued and mention that this is created by Lemur
|
||||
# as part of reissue
|
||||
reissue_message_prefix = "Reissued by Lemur for cert ID "
|
||||
reissue_message = re.compile(f"{reissue_message_prefix}([0-9]+)")
|
||||
if primitives["description"]:
|
||||
match = reissue_message.search(primitives["description"])
|
||||
if match:
|
||||
primitives["description"] = primitives["description"].replace(match.group(1), str(certificate.id))
|
||||
else:
|
||||
primitives["description"] = f"{reissue_message_prefix}{certificate.id}, {primitives['description']}"
|
||||
else:
|
||||
primitives["description"] = f"{reissue_message_prefix}{certificate.id}"
|
||||
|
||||
# Rotate the certificate to ECCPRIME256V1 if cert owner is present in the configured list
|
||||
# This is a temporary change intending to rotate certificates to ECC, if opted in by certificate owners
|
||||
# Unless identified a use case, this will be removed in mid-Q2 2021
|
||||
ecc_reissue_owner_list = current_app.config.get("ROTATE_TO_ECC_OWNER_LIST", [])
|
||||
ecc_reissue_exclude_cn_list = current_app.config.get("ECC_NON_COMPATIBLE_COMMON_NAMES", [])
|
||||
|
||||
if (certificate.owner in ecc_reissue_owner_list) and (certificate.cn not in ecc_reissue_exclude_cn_list):
|
||||
primitives["key_type"] = "ECCPRIME256V1"
|
||||
|
||||
new_cert = create(**primitives)
|
||||
|
||||
return new_cert
|
||||
|
||||
|
||||
def is_attached_to_endpoint(certificate_name, endpoint_name):
|
||||
"""
|
||||
Find if given certificate is attached to the endpoint. Both, certificate and endpoint, are identified by name.
|
||||
This method talks to elb and finds the real time information.
|
||||
:param certificate_name:
|
||||
:param endpoint_name:
|
||||
:return: True if certificate is attached to the given endpoint, False otherwise
|
||||
"""
|
||||
endpoint = endpoint_service.get_by_name(endpoint_name)
|
||||
attached_certificates = endpoint.source.plugin.get_endpoint_certificate_names(endpoint)
|
||||
return certificate_name in attached_certificates
|
||||
|
||||
|
||||
def remove_from_destination(certificate, destination):
|
||||
"""
|
||||
Remove the certificate from given destination if clean() is implemented
|
||||
:param certificate:
|
||||
:param destination:
|
||||
:return:
|
||||
"""
|
||||
plugin = plugins.get(destination.plugin_name)
|
||||
if not hasattr(plugin, "clean"):
|
||||
info_text = f"Cannot clean certificate {certificate.name}, {destination.plugin_name} plugin does not implement 'clean()'"
|
||||
current_app.logger.warning(info_text)
|
||||
else:
|
||||
plugin.clean(certificate=certificate, options=destination.options)
|
||||
|
||||
|
||||
def revoke(certificate, reason):
|
||||
plugin = plugins.get(certificate.authority.plugin_name)
|
||||
plugin.revoke_certificate(certificate, reason)
|
||||
|
||||
# Perform cleanup after revoke
|
||||
return cleanup_after_revoke(certificate)
|
||||
|
||||
|
||||
def cleanup_after_revoke(certificate):
|
||||
"""
|
||||
Perform the needed cleanup for a revoked certificate. This includes -
|
||||
1. Disabling notification
|
||||
2. Disabling auto-rotation
|
||||
3. Update certificate status to 'revoked'
|
||||
4. Remove from AWS
|
||||
:param certificate: Certificate object to modify and update in DB
|
||||
:return: None
|
||||
"""
|
||||
certificate.notify = False
|
||||
certificate.rotation = False
|
||||
certificate.status = 'revoked'
|
||||
|
||||
error_message = ""
|
||||
|
||||
for destination in list(certificate.destinations):
|
||||
try:
|
||||
remove_from_destination(certificate, destination)
|
||||
certificate.destinations.remove(destination)
|
||||
except Exception as e:
|
||||
# This cleanup is the best-effort since certificate is already revoked at this point.
|
||||
# We will capture the exception and move on to the next destination
|
||||
sentry.captureException()
|
||||
error_message = error_message + f"Failed to remove destination: {destination.label}. {str(e)}. "
|
||||
|
||||
database.update(certificate)
|
||||
return error_message
|
||||
|
||||
|
||||
def get_issued_cert_count_for_authority(authority):
|
||||
"""
|
||||
Returns the count of certs issued by the specified authority.
|
||||
|
||||
:return:
|
||||
"""
|
||||
return database.db.session.query(Certificate).filter(Certificate.authority_id == authority.id).count()
|
||||
|
|
|
@ -82,4 +82,4 @@ def get_key_type_from_csr(data):
|
|||
raise Exception("Unsupported key type")
|
||||
|
||||
except NotImplemented:
|
||||
raise NotImplemented()
|
||||
raise NotImplementedError
|
||||
|
|
|
@ -19,7 +19,7 @@ from lemur.auth.permissions import AuthorityPermission, CertificatePermission
|
|||
|
||||
from lemur.certificates import service
|
||||
from lemur.certificates.models import Certificate
|
||||
from lemur.plugins.base import plugins
|
||||
from lemur.extensions import sentry
|
||||
from lemur.certificates.schemas import (
|
||||
certificate_input_schema,
|
||||
certificate_output_schema,
|
||||
|
@ -28,10 +28,12 @@ from lemur.certificates.schemas import (
|
|||
certificate_export_input_schema,
|
||||
certificate_edit_input_schema,
|
||||
certificates_list_output_schema_factory,
|
||||
certificate_revoke_schema,
|
||||
)
|
||||
|
||||
from lemur.roles import service as role_service
|
||||
from lemur.logs import service as log_service
|
||||
from lemur.plugins.base import plugins
|
||||
|
||||
|
||||
mod = Blueprint("certificates", __name__)
|
||||
|
@ -50,17 +52,20 @@ class CertificatesListValid(AuthenticatedResource):
|
|||
"""
|
||||
.. http:get:: /certificates/valid/<query>
|
||||
|
||||
The current list of not-expired certificates for a given common name, and owner
|
||||
The current list of not-expired certificates for a given common name, and owner. The API offers
|
||||
optional pagination. One can send page number(>=1) and desired count per page. The returned data
|
||||
contains total number of certificates which can help in determining the last page. Pagination
|
||||
will not be offered if page or count info is not sent or if it is zero.
|
||||
|
||||
**Example request**:
|
||||
|
||||
.. sourcecode:: http
|
||||
GET /certificates/valid?filter=cn;*.test.example.net&owner=joe@example.com
|
||||
HTTP/1.1
|
||||
|
||||
GET /certificates/valid?filter=cn;*.test.example.net&owner=joe@example.com&page=1&count=20 HTTP/1.1
|
||||
Host: example.com
|
||||
Accept: application/json, text/javascript
|
||||
|
||||
**Example response**:
|
||||
**Example response (with single cert to be concise)**:
|
||||
|
||||
.. sourcecode:: http
|
||||
|
||||
|
@ -127,10 +132,15 @@ class CertificatesListValid(AuthenticatedResource):
|
|||
:statuscode 403: unauthenticated
|
||||
|
||||
"""
|
||||
parser = paginated_parser.copy()
|
||||
args = parser.parse_args()
|
||||
# using non-paginated parser to ensure backward compatibility
|
||||
self.reqparse.add_argument("filter", type=str, location="args")
|
||||
self.reqparse.add_argument("owner", type=str, location="args")
|
||||
self.reqparse.add_argument("count", type=int, location="args")
|
||||
self.reqparse.add_argument("page", type=int, location="args")
|
||||
|
||||
args = self.reqparse.parse_args()
|
||||
args["user"] = g.user
|
||||
common_name = args["filter"].split(";")[1]
|
||||
common_name = args.pop("filter").split(";")[1]
|
||||
return service.query_common_name(common_name, args)
|
||||
|
||||
|
||||
|
@ -368,6 +378,7 @@ class CertificatesList(AuthenticatedResource):
|
|||
POST /certificates HTTP/1.1
|
||||
Host: example.com
|
||||
Accept: application/json, text/javascript
|
||||
Content-Type: application/json;charset=UTF-8
|
||||
|
||||
{
|
||||
"owner": "secure@example.net",
|
||||
|
@ -517,6 +528,7 @@ class CertificatesUpload(AuthenticatedResource):
|
|||
POST /certificates/upload HTTP/1.1
|
||||
Host: example.com
|
||||
Accept: application/json, text/javascript
|
||||
Content-Type: application/json;charset=UTF-8
|
||||
|
||||
{
|
||||
"owner": "joe@example.com",
|
||||
|
@ -624,7 +636,12 @@ class CertificatesStats(AuthenticatedResource):
|
|||
|
||||
args = self.reqparse.parse_args()
|
||||
|
||||
items = service.stats(**args)
|
||||
try:
|
||||
items = service.stats(**args)
|
||||
except Exception as e:
|
||||
sentry.captureException()
|
||||
return dict(message=f"Failed to retrieve stats: {str(e)}"), 400
|
||||
|
||||
return dict(items=items, total=len(items))
|
||||
|
||||
|
||||
|
@ -675,6 +692,16 @@ class CertificatePrivateKey(AuthenticatedResource):
|
|||
return dict(message="You are not authorized to view this key"), 403
|
||||
|
||||
log_service.create(g.current_user, "key_view", certificate=cert)
|
||||
|
||||
# Plugins may need to modify the cert object before returning it to the user
|
||||
if cert.root_authority:
|
||||
# this certificate is an authority
|
||||
plugin_name = cert.root_authority.plugin_name
|
||||
else:
|
||||
plugin_name = cert.authority.plugin_name
|
||||
plugin = plugins.get(plugin_name)
|
||||
plugin.wrap_private_key(cert)
|
||||
|
||||
response = make_response(jsonify(key=cert.private_key), 200)
|
||||
response.headers["cache-control"] = "private, max-age=0, no-cache, no-store"
|
||||
response.headers["pragma"] = "no-cache"
|
||||
|
@ -783,6 +810,7 @@ class Certificates(AuthenticatedResource):
|
|||
PUT /certificates/1 HTTP/1.1
|
||||
Host: example.com
|
||||
Accept: application/json, text/javascript
|
||||
Content-Type: application/json;charset=UTF-8
|
||||
|
||||
{
|
||||
"owner": "jimbob@example.com",
|
||||
|
@ -888,8 +916,24 @@ class Certificates(AuthenticatedResource):
|
|||
if cert.owner != data["owner"]:
|
||||
service.cleanup_owner_roles_notification(cert.owner, data)
|
||||
|
||||
error_message = ""
|
||||
# if destination is removed, cleanup the certificate from AWS
|
||||
for destination in cert.destinations:
|
||||
if destination not in data["destinations"]:
|
||||
try:
|
||||
service.remove_from_destination(cert, destination)
|
||||
except Exception as e:
|
||||
sentry.captureException()
|
||||
# Add the removed destination back
|
||||
data["destinations"].append(destination)
|
||||
error_message = error_message + f"Failed to remove destination: {destination.label}. {str(e)}. "
|
||||
|
||||
# go ahead with DB update
|
||||
cert = service.update(certificate_id, **data)
|
||||
log_service.create(g.current_user, "update_cert", certificate=cert)
|
||||
|
||||
if error_message:
|
||||
return dict(message=f"Edit Successful except -\n\n {error_message}"), 400
|
||||
return cert
|
||||
|
||||
@validate_schema(certificate_edit_input_schema, certificate_output_schema)
|
||||
|
@ -906,6 +950,7 @@ class Certificates(AuthenticatedResource):
|
|||
POST /certificates/1/update/notify HTTP/1.1
|
||||
Host: example.com
|
||||
Accept: application/json, text/javascript
|
||||
Content-Type: application/json;charset=UTF-8
|
||||
|
||||
{
|
||||
"notify": false
|
||||
|
@ -1155,6 +1200,7 @@ class NotificationCertificatesList(AuthenticatedResource):
|
|||
)
|
||||
parser.add_argument("creator", type=str, location="args")
|
||||
parser.add_argument("show", type=str, location="args")
|
||||
parser.add_argument("showExpired", type=int, location="args")
|
||||
|
||||
args = parser.parse_args()
|
||||
args["notification_id"] = notification_id
|
||||
|
@ -1273,6 +1319,7 @@ class CertificateExport(AuthenticatedResource):
|
|||
PUT /certificates/1/export HTTP/1.1
|
||||
Host: example.com
|
||||
Accept: application/json, text/javascript
|
||||
Content-Type: application/json;charset=UTF-8
|
||||
|
||||
{
|
||||
"export": {
|
||||
|
@ -1380,7 +1427,7 @@ class CertificateRevoke(AuthenticatedResource):
|
|||
self.reqparse = reqparse.RequestParser()
|
||||
super(CertificateRevoke, self).__init__()
|
||||
|
||||
@validate_schema(None, None)
|
||||
@validate_schema(certificate_revoke_schema, None)
|
||||
def put(self, certificate_id, data=None):
|
||||
"""
|
||||
.. http:put:: /certificates/1/revoke
|
||||
|
@ -1394,6 +1441,12 @@ class CertificateRevoke(AuthenticatedResource):
|
|||
POST /certificates/1/revoke HTTP/1.1
|
||||
Host: example.com
|
||||
Accept: application/json, text/javascript
|
||||
Content-Type: application/json;charset=UTF-8
|
||||
|
||||
{
|
||||
"crlReason": "affiliationChanged",
|
||||
"comments": "Additional details if any"
|
||||
}
|
||||
|
||||
**Example response**:
|
||||
|
||||
|
@ -1404,12 +1457,13 @@ class CertificateRevoke(AuthenticatedResource):
|
|||
Content-Type: text/javascript
|
||||
|
||||
{
|
||||
'id': 1
|
||||
"id": 1
|
||||
}
|
||||
|
||||
:reqheader Authorization: OAuth token to authenticate
|
||||
:statuscode 200: no error
|
||||
:statuscode 403: unauthenticated
|
||||
:statuscode 403: unauthenticated or cert attached to LB
|
||||
:statuscode 400: encountered error, more details in error message
|
||||
|
||||
"""
|
||||
cert = service.get(certificate_id)
|
||||
|
@ -1432,17 +1486,27 @@ class CertificateRevoke(AuthenticatedResource):
|
|||
return dict(message="Cannot revoke certificate. No external id found."), 400
|
||||
|
||||
if cert.endpoints:
|
||||
return (
|
||||
dict(
|
||||
message="Cannot revoke certificate. Endpoints are deployed with the given certificate."
|
||||
),
|
||||
403,
|
||||
)
|
||||
for endpoint in cert.endpoints:
|
||||
if service.is_attached_to_endpoint(cert.name, endpoint.name):
|
||||
return (
|
||||
dict(
|
||||
message="Cannot revoke certificate. Endpoints are deployed with the given certificate."
|
||||
),
|
||||
403,
|
||||
)
|
||||
|
||||
plugin = plugins.get(cert.authority.plugin_name)
|
||||
plugin.revoke_certificate(cert, data)
|
||||
log_service.create(g.current_user, "revoke_cert", certificate=cert)
|
||||
return dict(id=cert.id)
|
||||
try:
|
||||
error_message = service.revoke(cert, data)
|
||||
log_service.create(g.current_user, "revoke_cert", certificate=cert)
|
||||
|
||||
if error_message:
|
||||
return dict(message=f"Certificate (id:{cert.id}) is revoked - {error_message}"), 400
|
||||
return dict(id=cert.id)
|
||||
except NotImplementedError as ne:
|
||||
return dict(message="Revoke is not implemented for issuer of this certificate"), 400
|
||||
except Exception as e:
|
||||
sentry.captureException()
|
||||
return dict(message=f"Failed to revoke: {str(e)}"), 400
|
||||
|
||||
|
||||
api.add_resource(
|
||||
|
|
|
@ -20,6 +20,7 @@ from flask import current_app
|
|||
from lemur.authorities.service import get as get_authority
|
||||
from lemur.certificates import cli as cli_certificate
|
||||
from lemur.common.redis import RedisHandler
|
||||
from lemur.constants import ACME_ADDITIONAL_ATTEMPTS
|
||||
from lemur.destinations import service as destinations_service
|
||||
from lemur.dns_providers import cli as cli_dns_providers
|
||||
from lemur.endpoints import cli as cli_endpoints
|
||||
|
@ -273,7 +274,8 @@ def fetch_acme_cert(id):
|
|||
real_cert = cert.get("cert")
|
||||
# It's necessary to reload the pending cert due to detached instance: http://sqlalche.me/e/bhk3
|
||||
pending_cert = pending_certificate_service.get(cert.get("pending_cert").id)
|
||||
if not pending_cert:
|
||||
if not pending_cert or pending_cert.resolved:
|
||||
# pending_cert is cleared or it was resolved by another process
|
||||
log_data[
|
||||
"message"
|
||||
] = "Pending certificate doesn't exist anymore. Was it resolved by another process?"
|
||||
|
@ -301,7 +303,7 @@ def fetch_acme_cert(id):
|
|||
error_log["last_error"] = cert.get("last_error")
|
||||
error_log["cn"] = pending_cert.cn
|
||||
|
||||
if pending_cert.number_attempts > 4:
|
||||
if pending_cert.number_attempts > ACME_ADDITIONAL_ATTEMPTS:
|
||||
error_log["message"] = "Deleting pending certificate"
|
||||
send_pending_failure_notification(
|
||||
pending_cert, notify_owner=pending_cert.notify
|
||||
|
@ -656,11 +658,12 @@ def certificate_rotate(**kwargs):
|
|||
|
||||
current_app.logger.debug(log_data)
|
||||
try:
|
||||
notify = current_app.config.get("ENABLE_ROTATION_NOTIFICATION", None)
|
||||
if region:
|
||||
log_data["region"] = region
|
||||
cli_certificate.rotate_region(None, None, None, None, True, region)
|
||||
cli_certificate.rotate_region(None, None, None, notify, True, region)
|
||||
else:
|
||||
cli_certificate.rotate(None, None, None, None, True)
|
||||
cli_certificate.rotate(None, None, None, notify, True)
|
||||
except SoftTimeLimitExceeded:
|
||||
log_data["message"] = "Certificate rotate: Time limit exceeded."
|
||||
current_app.logger.error(log_data)
|
||||
|
@ -759,7 +762,7 @@ def check_revoked():
|
|||
|
||||
log_data = {
|
||||
"function": function,
|
||||
"message": "check if any certificates are revoked revoked",
|
||||
"message": "check if any valid certificate is revoked",
|
||||
"task_id": task_id,
|
||||
}
|
||||
|
||||
|
@ -820,6 +823,78 @@ def notify_expirations():
|
|||
return log_data
|
||||
|
||||
|
||||
@celery.task(soft_time_limit=3600)
|
||||
def notify_authority_expirations():
|
||||
"""
|
||||
This celery task notifies about expiring certificate authority certs
|
||||
:return:
|
||||
"""
|
||||
function = f"{__name__}.{sys._getframe().f_code.co_name}"
|
||||
task_id = None
|
||||
if celery.current_task:
|
||||
task_id = celery.current_task.request.id
|
||||
|
||||
log_data = {
|
||||
"function": function,
|
||||
"message": "notify for certificate authority cert expiration",
|
||||
"task_id": task_id,
|
||||
}
|
||||
|
||||
if task_id and is_task_active(function, task_id, None):
|
||||
log_data["message"] = "Skipping task: Task is already active"
|
||||
current_app.logger.debug(log_data)
|
||||
return
|
||||
|
||||
current_app.logger.debug(log_data)
|
||||
try:
|
||||
cli_notification.authority_expirations()
|
||||
except SoftTimeLimitExceeded:
|
||||
log_data["message"] = "Notify expiring CA Time limit exceeded."
|
||||
current_app.logger.error(log_data)
|
||||
sentry.captureException()
|
||||
metrics.send("celery.timeout", "counter", 1, metric_tags={"function": function})
|
||||
return
|
||||
|
||||
metrics.send(f"{function}.success", "counter", 1)
|
||||
return log_data
|
||||
|
||||
|
||||
@celery.task(soft_time_limit=3600)
|
||||
def send_security_expiration_summary():
|
||||
"""
|
||||
This celery task sends a summary about expiring certificates to the security team.
|
||||
:return:
|
||||
"""
|
||||
function = f"{__name__}.{sys._getframe().f_code.co_name}"
|
||||
task_id = None
|
||||
if celery.current_task:
|
||||
task_id = celery.current_task.request.id
|
||||
|
||||
log_data = {
|
||||
"function": function,
|
||||
"message": "send summary for certificate expiration",
|
||||
"task_id": task_id,
|
||||
}
|
||||
|
||||
if task_id and is_task_active(function, task_id, None):
|
||||
log_data["message"] = "Skipping task: Task is already active"
|
||||
current_app.logger.debug(log_data)
|
||||
return
|
||||
|
||||
current_app.logger.debug(log_data)
|
||||
try:
|
||||
cli_notification.security_expiration_summary(current_app.config.get("EXCLUDE_CN_FROM_NOTIFICATION", []))
|
||||
except SoftTimeLimitExceeded:
|
||||
log_data["message"] = "Send summary for expiring certs Time limit exceeded."
|
||||
current_app.logger.error(log_data)
|
||||
sentry.captureException()
|
||||
metrics.send("celery.timeout", "counter", 1, metric_tags={"function": function})
|
||||
return
|
||||
|
||||
metrics.send(f"{function}.success", "counter", 1)
|
||||
return log_data
|
||||
|
||||
|
||||
@celery.task(soft_time_limit=3600)
|
||||
def enable_autorotate_for_certs_attached_to_endpoint():
|
||||
"""
|
||||
|
@ -842,3 +917,39 @@ def enable_autorotate_for_certs_attached_to_endpoint():
|
|||
cli_certificate.automatically_enable_autorotate()
|
||||
metrics.send(f"{function}.success", "counter", 1)
|
||||
return log_data
|
||||
|
||||
|
||||
@celery.task(soft_time_limit=3600)
|
||||
def deactivate_entrust_test_certificates():
|
||||
"""
|
||||
This celery task attempts to deactivate all not yet deactivated Entrust certificates, and should only run in TEST
|
||||
:return:
|
||||
"""
|
||||
function = f"{__name__}.{sys._getframe().f_code.co_name}"
|
||||
task_id = None
|
||||
if celery.current_task:
|
||||
task_id = celery.current_task.request.id
|
||||
|
||||
log_data = {
|
||||
"function": function,
|
||||
"message": "deactivate entrust certificates",
|
||||
"task_id": task_id,
|
||||
}
|
||||
|
||||
if task_id and is_task_active(function, task_id, None):
|
||||
log_data["message"] = "Skipping task: Task is already active"
|
||||
current_app.logger.debug(log_data)
|
||||
return
|
||||
|
||||
current_app.logger.debug(log_data)
|
||||
try:
|
||||
cli_certificate.deactivate_entrust_certificates()
|
||||
except SoftTimeLimitExceeded:
|
||||
log_data["message"] = "Time limit exceeded."
|
||||
current_app.logger.error(log_data)
|
||||
sentry.captureException()
|
||||
metrics.send("celery.timeout", "counter", 1, metric_tags={"function": function})
|
||||
return
|
||||
|
||||
metrics.send(f"{function}.success", "counter", 1)
|
||||
return log_data
|
||||
|
|
|
@ -95,9 +95,11 @@ def organization(cert):
|
|||
:return:
|
||||
"""
|
||||
try:
|
||||
return cert.subject.get_attributes_for_oid(x509.OID_ORGANIZATION_NAME)[
|
||||
0
|
||||
].value.strip()
|
||||
o = cert.subject.get_attributes_for_oid(x509.OID_ORGANIZATION_NAME)
|
||||
if not o:
|
||||
return None
|
||||
|
||||
return o[0].value.strip()
|
||||
except Exception as e:
|
||||
sentry.captureException()
|
||||
current_app.logger.error("Unable to get organization! {0}".format(e))
|
||||
|
@ -110,9 +112,11 @@ def organizational_unit(cert):
|
|||
:return:
|
||||
"""
|
||||
try:
|
||||
return cert.subject.get_attributes_for_oid(x509.OID_ORGANIZATIONAL_UNIT_NAME)[
|
||||
0
|
||||
].value.strip()
|
||||
ou = cert.subject.get_attributes_for_oid(x509.OID_ORGANIZATIONAL_UNIT_NAME)
|
||||
if not ou:
|
||||
return None
|
||||
|
||||
return ou[0].value.strip()
|
||||
except Exception as e:
|
||||
sentry.captureException()
|
||||
current_app.logger.error("Unable to get organizational unit! {0}".format(e))
|
||||
|
@ -125,9 +129,11 @@ def country(cert):
|
|||
:return:
|
||||
"""
|
||||
try:
|
||||
return cert.subject.get_attributes_for_oid(x509.OID_COUNTRY_NAME)[
|
||||
0
|
||||
].value.strip()
|
||||
c = cert.subject.get_attributes_for_oid(x509.OID_COUNTRY_NAME)
|
||||
if not c:
|
||||
return None
|
||||
|
||||
return c[0].value.strip()
|
||||
except Exception as e:
|
||||
sentry.captureException()
|
||||
current_app.logger.error("Unable to get country! {0}".format(e))
|
||||
|
@ -140,9 +146,11 @@ def state(cert):
|
|||
:return:
|
||||
"""
|
||||
try:
|
||||
return cert.subject.get_attributes_for_oid(x509.OID_STATE_OR_PROVINCE_NAME)[
|
||||
0
|
||||
].value.strip()
|
||||
s = cert.subject.get_attributes_for_oid(x509.OID_STATE_OR_PROVINCE_NAME)
|
||||
if not s:
|
||||
return None
|
||||
|
||||
return s[0].value.strip()
|
||||
except Exception as e:
|
||||
sentry.captureException()
|
||||
current_app.logger.error("Unable to get state! {0}".format(e))
|
||||
|
@ -155,9 +163,11 @@ def location(cert):
|
|||
:return:
|
||||
"""
|
||||
try:
|
||||
return cert.subject.get_attributes_for_oid(x509.OID_LOCALITY_NAME)[
|
||||
0
|
||||
].value.strip()
|
||||
loc = cert.subject.get_attributes_for_oid(x509.OID_LOCALITY_NAME)
|
||||
if not loc:
|
||||
return None
|
||||
|
||||
return loc[0].value.strip()
|
||||
except Exception as e:
|
||||
sentry.captureException()
|
||||
current_app.logger.error("Unable to get location! {0}".format(e))
|
||||
|
|
|
@ -10,6 +10,7 @@ import random
|
|||
import re
|
||||
import string
|
||||
import pem
|
||||
import base64
|
||||
|
||||
import sqlalchemy
|
||||
from cryptography import x509
|
||||
|
@ -34,6 +35,12 @@ paginated_parser.add_argument("filter", type=str, location="args")
|
|||
paginated_parser.add_argument("owner", type=str, location="args")
|
||||
|
||||
|
||||
def base64encode(string):
|
||||
# Performs Base64 encoding of string to string using the base64.b64encode() function
|
||||
# which encodes bytes to bytes.
|
||||
return base64.b64encode(string.encode()).decode()
|
||||
|
||||
|
||||
def get_psuedo_random_string():
|
||||
"""
|
||||
Create a random and strongish challenge.
|
||||
|
|
|
@ -3,6 +3,8 @@
|
|||
:copyright: (c) 2018 by Netflix Inc.
|
||||
:license: Apache, see LICENSE for more details.
|
||||
"""
|
||||
from enum import IntEnum
|
||||
|
||||
SAN_NAMING_TEMPLATE = "SAN-{subject}-{issuer}-{not_before}-{not_after}"
|
||||
DEFAULT_NAMING_TEMPLATE = "{subject}-{issuer}-{not_before}-{not_after}"
|
||||
NONSTANDARD_NAMING_TEMPLATE = "{issuer}-{not_before}-{not_after}"
|
||||
|
@ -10,6 +12,9 @@ NONSTANDARD_NAMING_TEMPLATE = "{issuer}-{not_before}-{not_after}"
|
|||
SUCCESS_METRIC_STATUS = "success"
|
||||
FAILURE_METRIC_STATUS = "failure"
|
||||
|
||||
# when ACME attempts to resolve a certificate try in total 3 times
|
||||
ACME_ADDITIONAL_ATTEMPTS = 2
|
||||
|
||||
CERTIFICATE_KEY_TYPES = [
|
||||
"RSA2048",
|
||||
"RSA4096",
|
||||
|
@ -32,3 +37,17 @@ CERTIFICATE_KEY_TYPES = [
|
|||
"ECCSECT409R1",
|
||||
"ECCSECT571R2",
|
||||
]
|
||||
|
||||
|
||||
# As per RFC 5280 section 5.3.1 (https://tools.ietf.org/html/rfc5280#section-5.3.1)
|
||||
class CRLReason(IntEnum):
|
||||
unspecified = 0,
|
||||
keyCompromise = 1,
|
||||
cACompromise = 2,
|
||||
affiliationChanged = 3,
|
||||
superseded = 4,
|
||||
cessationOfOperation = 5,
|
||||
certificateHold = 6,
|
||||
removeFromCRL = 8,
|
||||
privilegeWithdrawn = 9,
|
||||
aACompromise = 10
|
||||
|
|
|
@ -9,6 +9,7 @@
|
|||
|
||||
.. moduleauthor:: Kevin Glisson <kglisson@netflix.com>
|
||||
"""
|
||||
import math
|
||||
from inflection import underscore
|
||||
from sqlalchemy import exc, func, distinct
|
||||
from sqlalchemy.orm import make_transient, lazyload
|
||||
|
@ -219,13 +220,20 @@ def sort(query, model, field, direction):
|
|||
|
||||
def paginate(query, page, count):
|
||||
"""
|
||||
Returns the items given the count and page specified
|
||||
Returns the items given the count and page specified. The items would be an empty list
|
||||
if page number exceeds max page number based on count per page and total number of records.
|
||||
|
||||
:param query:
|
||||
:param page:
|
||||
:param count:
|
||||
:param query: search query
|
||||
:param page: current page number
|
||||
:param count: results per page
|
||||
"""
|
||||
return query.paginate(page, count)
|
||||
total = get_count(query)
|
||||
# Check if input page is higher than total number of pages based on count per page and total
|
||||
# In such a case Flask-SQLAlchemy pagination call results in 404
|
||||
if math.ceil(total / count) < page:
|
||||
return dict(items=[], total=total)
|
||||
items = query.paginate(page, count).items
|
||||
return dict(items=items, total=total)
|
||||
|
||||
|
||||
def update_list(model, model_attr, item_model, items):
|
||||
|
|
|
@ -31,6 +31,9 @@ class DestinationOutputSchema(LemurOutputSchema):
|
|||
def fill_object(self, data):
|
||||
if data:
|
||||
data["plugin"]["pluginOptions"] = data["options"]
|
||||
for option in data["plugin"]["pluginOptions"]:
|
||||
if "export-plugin" in option["type"]:
|
||||
option["value"]["pluginOptions"] = option["value"]["plugin_options"]
|
||||
return data
|
||||
|
||||
|
||||
|
|
|
@ -21,7 +21,7 @@ def create(label, plugin_name, options, description=None):
|
|||
|
||||
:param label: Destination common name
|
||||
:param description:
|
||||
:rtype : Destination
|
||||
:rtype: Destination
|
||||
:return: New destination
|
||||
"""
|
||||
# remove any sub-plugin objects before try to save the json options
|
||||
|
@ -41,19 +41,26 @@ def create(label, plugin_name, options, description=None):
|
|||
return database.create(destination)
|
||||
|
||||
|
||||
def update(destination_id, label, options, description):
|
||||
def update(destination_id, label, plugin_name, options, description):
|
||||
"""
|
||||
Updates an existing destination.
|
||||
|
||||
:param destination_id: Lemur assigned ID
|
||||
:param label: Destination common name
|
||||
:param plugin_name:
|
||||
:param options:
|
||||
:param description:
|
||||
:rtype : Destination
|
||||
:rtype: Destination
|
||||
:return:
|
||||
"""
|
||||
destination = get(destination_id)
|
||||
|
||||
destination.label = label
|
||||
destination.plugin_name = plugin_name
|
||||
# remove any sub-plugin objects before try to save the json options
|
||||
for option in options:
|
||||
if "plugin" in option["type"]:
|
||||
del option["value"]["plugin_object"]
|
||||
destination.options = options
|
||||
destination.description = description
|
||||
|
||||
|
@ -74,7 +81,7 @@ def get(destination_id):
|
|||
Retrieves an destination by its lemur assigned ID.
|
||||
|
||||
:param destination_id: Lemur assigned ID
|
||||
:rtype : Destination
|
||||
:rtype: Destination
|
||||
:return:
|
||||
"""
|
||||
return database.get(Destination, destination_id)
|
||||
|
|
|
@ -113,6 +113,7 @@ class DestinationsList(AuthenticatedResource):
|
|||
POST /destinations HTTP/1.1
|
||||
Host: example.com
|
||||
Accept: application/json, text/javascript
|
||||
Content-Type: application/json;charset=UTF-8
|
||||
|
||||
{
|
||||
"description": "test33",
|
||||
|
@ -264,6 +265,7 @@ class Destinations(AuthenticatedResource):
|
|||
POST /destinations/1 HTTP/1.1
|
||||
Host: example.com
|
||||
Accept: application/json, text/javascript
|
||||
Content-Type: application/json;charset=UTF-8
|
||||
|
||||
|
||||
{
|
||||
|
@ -338,6 +340,7 @@ class Destinations(AuthenticatedResource):
|
|||
return service.update(
|
||||
destination_id,
|
||||
data["label"],
|
||||
data["plugin"]["slug"],
|
||||
data["plugin"]["plugin_options"],
|
||||
data["description"],
|
||||
)
|
||||
|
@ -422,7 +425,7 @@ class CertificateDestinations(AuthenticatedResource):
|
|||
|
||||
|
||||
class DestinationsStats(AuthenticatedResource):
|
||||
""" Defines the 'certificates' stats endpoint """
|
||||
""" Defines the 'destinations' stats endpoint """
|
||||
|
||||
def __init__(self):
|
||||
self.reqparse = reqparse.RequestParser()
|
||||
|
|
|
@ -3,9 +3,9 @@ from flask_script import Manager
|
|||
import sys
|
||||
|
||||
from lemur.constants import SUCCESS_METRIC_STATUS
|
||||
from lemur.plugins.lemur_acme.acme_handlers import AcmeDnsHandler
|
||||
from lemur.dns_providers.service import get_all_dns_providers, set_domains
|
||||
from lemur.extensions import metrics, sentry
|
||||
from lemur.plugins.base import plugins
|
||||
|
||||
manager = Manager(
|
||||
usage="Iterates through all DNS providers and sets DNS zones in the database."
|
||||
|
@ -19,7 +19,7 @@ def get_all_zones():
|
|||
"""
|
||||
print("[+] Starting dns provider zone lookup and configuration.")
|
||||
dns_providers = get_all_dns_providers()
|
||||
acme_plugin = plugins.get("acme-issuer")
|
||||
acme_dns_handler = AcmeDnsHandler()
|
||||
|
||||
function = f"{__name__}.{sys._getframe().f_code.co_name}"
|
||||
log_data = {
|
||||
|
@ -29,7 +29,7 @@ def get_all_zones():
|
|||
|
||||
for dns_provider in dns_providers:
|
||||
try:
|
||||
zones = acme_plugin.get_all_zones(dns_provider)
|
||||
zones = acme_dns_handler.get_all_zones(dns_provider)
|
||||
set_domains(dns_provider, zones)
|
||||
except Exception as e:
|
||||
print("[+] Error with DNS Provider {}: {}".format(dns_provider.name, e))
|
||||
|
|
|
@ -10,9 +10,9 @@ class DnsProvidersNestedOutputSchema(LemurOutputSchema):
|
|||
name = fields.String()
|
||||
provider_type = fields.String()
|
||||
description = fields.String()
|
||||
credentials = fields.String()
|
||||
api_endpoint = fields.String()
|
||||
date_created = ArrowDateTime()
|
||||
# credentials are intentionally omitted (they are input-only)
|
||||
|
||||
|
||||
class DnsProvidersNestedInputSchema(LemurInputSchema):
|
||||
|
|
|
@ -36,7 +36,7 @@ def get_friendly(dns_provider_id):
|
|||
Retrieves a dns provider by its lemur assigned ID.
|
||||
|
||||
:param dns_provider_id: Lemur assigned ID
|
||||
:rtype : DnsProvider
|
||||
:rtype: DnsProvider
|
||||
:return:
|
||||
"""
|
||||
dns_provider = get(dns_provider_id)
|
||||
|
|
|
@ -86,62 +86,79 @@ class DnsProvidersList(AuthenticatedResource):
|
|||
@admin_permission.require(http_exception=403)
|
||||
def post(self, data=None):
|
||||
"""
|
||||
Creates a DNS Provider
|
||||
.. http:post:: /dns_providers
|
||||
|
||||
**Example request**:
|
||||
{
|
||||
"providerType": {
|
||||
"name": "route53",
|
||||
"requirements": [
|
||||
{
|
||||
"name": "account_id",
|
||||
"type": "int",
|
||||
"required": true,
|
||||
"helpMessage": "AWS Account number",
|
||||
"value": 12345
|
||||
}
|
||||
],
|
||||
"route": "dns_provider_options",
|
||||
"reqParams": null,
|
||||
"restangularized": true,
|
||||
"fromServer": true,
|
||||
"parentResource": null,
|
||||
"restangularCollection": false
|
||||
},
|
||||
"name": "provider_name",
|
||||
"description": "provider_description"
|
||||
}
|
||||
Creates a DNS Provider
|
||||
|
||||
**Example request**:
|
||||
|
||||
.. sourcecode:: http
|
||||
|
||||
POST /dns_providers HTTP/1.1
|
||||
Host: example.com
|
||||
Accept: application/json, text/javascript
|
||||
|
||||
{
|
||||
"providerType": {
|
||||
"name": "route53",
|
||||
"requirements": [
|
||||
{
|
||||
"name": "account_id",
|
||||
"type": "int",
|
||||
"required": true,
|
||||
"helpMessage": "AWS Account number",
|
||||
"value": 12345
|
||||
}
|
||||
],
|
||||
"route": "dns_provider_options",
|
||||
"reqParams": null,
|
||||
"restangularized": true,
|
||||
"fromServer": true,
|
||||
"parentResource": null,
|
||||
"restangularCollection": false
|
||||
},
|
||||
"name": "provider_name",
|
||||
"description": "provider_description"
|
||||
}
|
||||
|
||||
**Example request 2**:
|
||||
|
||||
.. sourcecode:: http
|
||||
|
||||
HTTP/1.1 200 OK
|
||||
Vary: Accept
|
||||
Content-Type: text/javascript
|
||||
|
||||
{
|
||||
"providerType": {
|
||||
"name": "cloudflare",
|
||||
"requirements": [
|
||||
{
|
||||
"name": "email",
|
||||
"type": "str",
|
||||
"required": true,
|
||||
"helpMessage": "Cloudflare Email",
|
||||
"value": "test@example.com"
|
||||
},
|
||||
{
|
||||
"name": "key",
|
||||
"type": "str",
|
||||
"required": true,
|
||||
"helpMessage": "Cloudflare Key",
|
||||
"value": "secretkey"
|
||||
}
|
||||
],
|
||||
"route": "dns_provider_options",
|
||||
"reqParams": null,
|
||||
"restangularized": true,
|
||||
"fromServer": true,
|
||||
"parentResource": null,
|
||||
"restangularCollection": false
|
||||
},
|
||||
"name": "provider_name",
|
||||
"description": "provider_description"
|
||||
}
|
||||
|
||||
**Example request 2**
|
||||
{
|
||||
"providerType": {
|
||||
"name": "cloudflare",
|
||||
"requirements": [
|
||||
{
|
||||
"name": "email",
|
||||
"type": "str",
|
||||
"required": true,
|
||||
"helpMessage": "Cloudflare Email",
|
||||
"value": "test@example.com"
|
||||
},
|
||||
{
|
||||
"name": "key",
|
||||
"type": "str",
|
||||
"required": true,
|
||||
"helpMessage": "Cloudflare Key",
|
||||
"value": "secretkey"
|
||||
}
|
||||
],
|
||||
"route": "dns_provider_options",
|
||||
"reqParams": null,
|
||||
"restangularized": true,
|
||||
"fromServer": true,
|
||||
"parentResource": null,
|
||||
"restangularCollection": false
|
||||
},
|
||||
"name": "provider_name",
|
||||
"description": "provider_description"
|
||||
}
|
||||
:return:
|
||||
"""
|
||||
return service.create(data)
|
||||
|
|
|
@ -96,7 +96,7 @@ class DomainsList(AuthenticatedResource):
|
|||
|
||||
.. sourcecode:: http
|
||||
|
||||
GET /domains HTTP/1.1
|
||||
POST /domains HTTP/1.1
|
||||
Host: example.com
|
||||
Accept: application/json, text/javascript
|
||||
|
||||
|
|
|
@ -10,7 +10,7 @@
|
|||
|
||||
"""
|
||||
import os
|
||||
import imp
|
||||
import importlib
|
||||
import errno
|
||||
import pkg_resources
|
||||
import socket
|
||||
|
@ -73,8 +73,9 @@ def from_file(file_path, silent=False):
|
|||
:param file_path:
|
||||
:param silent:
|
||||
"""
|
||||
d = imp.new_module("config")
|
||||
d.__file__ = file_path
|
||||
module_spec = importlib.util.spec_from_file_location("config", file_path)
|
||||
d = importlib.util.module_from_spec(module_spec)
|
||||
|
||||
try:
|
||||
with open(file_path) as config_file:
|
||||
exec( # nosec: config file safe
|
||||
|
|
|
@ -7,7 +7,7 @@
|
|||
|
||||
.. moduleauthor:: Kevin Glisson <kglisson@netflix.com>
|
||||
"""
|
||||
from sqlalchemy import Column, Integer, ForeignKey, PassiveDefault, func, Enum
|
||||
from sqlalchemy import Column, Integer, ForeignKey, DefaultClause, func, Enum
|
||||
|
||||
from sqlalchemy_utils.types.arrow import ArrowType
|
||||
|
||||
|
@ -29,5 +29,5 @@ class Log(db.Model):
|
|||
),
|
||||
nullable=False,
|
||||
)
|
||||
logged_at = Column(ArrowType(), PassiveDefault(func.now()), nullable=False)
|
||||
logged_at = Column(ArrowType(), DefaultClause(func.now()), nullable=False)
|
||||
user_id = Column(Integer, ForeignKey("users.id"), nullable=False)
|
||||
|
|
|
@ -7,7 +7,7 @@
|
|||
:license: Apache, see LICENSE for more details.
|
||||
.. moduleauthor:: Kevin Glisson <kglisson@netflix.com>
|
||||
"""
|
||||
from flask import current_app
|
||||
from flask import current_app, g
|
||||
|
||||
from lemur import database
|
||||
from lemur.logs.models import Log
|
||||
|
@ -34,6 +34,20 @@ def create(user, type, certificate=None):
|
|||
database.commit()
|
||||
|
||||
|
||||
def audit_log(action, entity, message):
|
||||
"""
|
||||
Logs given action
|
||||
:param action: The action being logged e.g. assign_role, create_role etc
|
||||
:param entity: The entity undergoing the action e.g. name of the role
|
||||
:param message: Additional info e.g. Role being assigned to user X
|
||||
:return:
|
||||
"""
|
||||
user = g.current_user.email if hasattr(g, 'current_user') else "LEMUR"
|
||||
current_app.logger.info(
|
||||
f"[lemur-audit] action: {action}, user: {user}, entity: {entity}, details: {message}"
|
||||
)
|
||||
|
||||
|
||||
def get_all():
|
||||
"""
|
||||
Retrieve all logs from the database.
|
||||
|
|
|
@ -120,6 +120,7 @@ METRIC_PROVIDERS = []
|
|||
|
||||
LOG_LEVEL = "DEBUG"
|
||||
LOG_FILE = "lemur.log"
|
||||
LOG_UPGRADE_FILE = "db_upgrade.log"
|
||||
|
||||
|
||||
# Database
|
||||
|
|
|
@ -10,11 +10,21 @@ Create Date: 2018-08-03 12:56:44.565230
|
|||
revision = "1db4f82bc780"
|
||||
down_revision = "3adfdd6598df"
|
||||
|
||||
import logging
|
||||
|
||||
from alembic import op
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
from flask import current_app
|
||||
from logging import Formatter, FileHandler, getLogger
|
||||
|
||||
log = getLogger(__name__)
|
||||
handler = FileHandler(current_app.config.get("LOG_UPGRADE_FILE", "db_upgrade.log"))
|
||||
handler.setFormatter(
|
||||
Formatter(
|
||||
"%(asctime)s %(levelname)s: %(message)s " "[in %(pathname)s:%(lineno)d]"
|
||||
)
|
||||
)
|
||||
handler.setLevel(current_app.config.get("LOG_LEVEL", "DEBUG"))
|
||||
log.setLevel(current_app.config.get("LOG_LEVEL", "DEBUG"))
|
||||
log.addHandler(handler)
|
||||
|
||||
|
||||
def upgrade():
|
||||
|
|
|
@ -7,8 +7,9 @@ the rest of the keys, the certificate body is parsed to determine
|
|||
the exact key_type information.
|
||||
|
||||
Each individual DB change is explicitly committed, and the respective
|
||||
log is added to a file named db_upgrade.log in the current working
|
||||
directory. Any error encountered while parsing a certificate will
|
||||
log is added to a file configured in LOG_UPGRADE_FILE or, by default,
|
||||
to a file named db_upgrade.log in the current working directory.
|
||||
Any error encountered while parsing a certificate will
|
||||
also be logged along with the certificate ID. If faced with any issue
|
||||
while running this upgrade, there is no harm in re-running the upgrade.
|
||||
Each run processes only rows for which key_type information is not yet
|
||||
|
@ -31,15 +32,28 @@ down_revision = '434c29e40511'
|
|||
|
||||
from alembic import op
|
||||
from sqlalchemy.sql import text
|
||||
from lemur.common import utils
|
||||
import time
|
||||
import datetime
|
||||
from flask import current_app
|
||||
|
||||
log_file = open('db_upgrade.log', 'a')
|
||||
from logging import Formatter, FileHandler, getLogger
|
||||
|
||||
from lemur.common import utils
|
||||
|
||||
log = getLogger(__name__)
|
||||
handler = FileHandler(current_app.config.get("LOG_UPGRADE_FILE", "db_upgrade.log"))
|
||||
handler.setFormatter(
|
||||
Formatter(
|
||||
"%(asctime)s %(levelname)s: %(message)s " "[in %(pathname)s:%(lineno)d]"
|
||||
)
|
||||
)
|
||||
handler.setLevel(current_app.config.get("LOG_LEVEL", "DEBUG"))
|
||||
log.setLevel(current_app.config.get("LOG_LEVEL", "DEBUG"))
|
||||
log.addHandler(handler)
|
||||
|
||||
|
||||
def upgrade():
|
||||
log_file.write("\n*** Starting new run(%s) ***\n" % datetime.datetime.now())
|
||||
log.info("\n*** Starting new run(%s) ***\n" % datetime.datetime.now())
|
||||
start_time = time.time()
|
||||
|
||||
# Update RSA keys using the key length information
|
||||
|
@ -50,8 +64,7 @@ def upgrade():
|
|||
# Process remaining certificates. Though below method does not make any assumptions, most of the remaining ones should be ECC certs.
|
||||
update_key_type()
|
||||
|
||||
log_file.write("--- Total %s seconds ---\n" % (time.time() - start_time))
|
||||
log_file.close()
|
||||
log.info("--- Total %s seconds ---\n" % (time.time() - start_time))
|
||||
|
||||
|
||||
def downgrade():
|
||||
|
@ -61,6 +74,7 @@ def downgrade():
|
|||
"update certificates set key_type=null where not_after > CURRENT_DATE - 32"
|
||||
)
|
||||
op.execute(stmt)
|
||||
commit()
|
||||
|
||||
|
||||
"""
|
||||
|
@ -69,18 +83,18 @@ def downgrade():
|
|||
|
||||
|
||||
def update_key_type_rsa(bits):
|
||||
log_file.write("Processing certificate with key type RSA %s\n" % bits)
|
||||
log.info("Processing certificate with key type RSA %s\n" % bits)
|
||||
|
||||
stmt = text(
|
||||
f"update certificates set key_type='RSA{bits}' where bits={bits} and not_after > CURRENT_DATE - 31 and key_type is null"
|
||||
)
|
||||
log_file.write("Query: %s\n" % stmt)
|
||||
log.info("Query: %s\n" % stmt)
|
||||
|
||||
start_time = time.time()
|
||||
op.execute(stmt)
|
||||
commit()
|
||||
|
||||
log_file.write("--- %s seconds ---\n" % (time.time() - start_time))
|
||||
log.info("--- %s seconds ---\n" % (time.time() - start_time))
|
||||
|
||||
|
||||
def update_key_type():
|
||||
|
@ -95,9 +109,9 @@ def update_key_type():
|
|||
try:
|
||||
cert_key_type = utils.get_key_type_from_certificate(body)
|
||||
except ValueError as e:
|
||||
log_file.write("Error in processing certificate - ID: %s Error: %s \n" % (cert_id, str(e)))
|
||||
log.error("Error in processing certificate - ID: %s Error: %s \n" % (cert_id, str(e)))
|
||||
else:
|
||||
log_file.write("Processing certificate - ID: %s key_type: %s\n" % (cert_id, cert_key_type))
|
||||
log.info("Processing certificate - ID: %s key_type: %s\n" % (cert_id, cert_key_type))
|
||||
stmt = text(
|
||||
"update certificates set key_type=:key_type where id=:id"
|
||||
)
|
||||
|
@ -106,7 +120,7 @@ def update_key_type():
|
|||
|
||||
commit()
|
||||
|
||||
log_file.write("--- %s seconds ---\n" % (time.time() - start_time))
|
||||
log.info("--- %s seconds ---\n" % (time.time() - start_time))
|
||||
|
||||
|
||||
def commit():
|
||||
|
|
|
@ -10,6 +10,8 @@ from flask_script import Manager
|
|||
from lemur.constants import SUCCESS_METRIC_STATUS, FAILURE_METRIC_STATUS
|
||||
from lemur.extensions import sentry, metrics
|
||||
from lemur.notifications.messaging import send_expiration_notifications
|
||||
from lemur.notifications.messaging import send_authority_expiration_notifications
|
||||
from lemur.notifications.messaging import send_security_expiration_summary
|
||||
|
||||
manager = Manager(usage="Handles notification related tasks.")
|
||||
|
||||
|
@ -24,7 +26,7 @@ manager = Manager(usage="Handles notification related tasks.")
|
|||
)
|
||||
def expirations(exclude):
|
||||
"""
|
||||
Runs Lemur's notification engine, that looks for expired certificates and sends
|
||||
Runs Lemur's notification engine, that looks for expiring certificates and sends
|
||||
notifications out to those that have subscribed to them.
|
||||
|
||||
Every certificate receives notifications by default. When expiration notifications are handled outside of Lemur
|
||||
|
@ -39,9 +41,7 @@ def expirations(exclude):
|
|||
print("Starting to notify subscribers about expiring certificates!")
|
||||
success, failed = send_expiration_notifications(exclude)
|
||||
print(
|
||||
"Finished notifying subscribers about expiring certificates! Sent: {success} Failed: {failed}".format(
|
||||
success=success, failed=failed
|
||||
)
|
||||
f"Finished notifying subscribers about expiring certificates! Sent: {success} Failed: {failed}"
|
||||
)
|
||||
status = SUCCESS_METRIC_STATUS
|
||||
except Exception as e:
|
||||
|
@ -50,3 +50,50 @@ def expirations(exclude):
|
|||
metrics.send(
|
||||
"expiration_notification_job", "counter", 1, metric_tags={"status": status}
|
||||
)
|
||||
|
||||
|
||||
def authority_expirations():
|
||||
"""
|
||||
Runs Lemur's notification engine, that looks for expiring certificate authority certificates and sends
|
||||
notifications out to the security team and owner.
|
||||
|
||||
:return:
|
||||
"""
|
||||
status = FAILURE_METRIC_STATUS
|
||||
try:
|
||||
print("Starting to notify subscribers about expiring certificate authority certificates!")
|
||||
success, failed = send_authority_expiration_notifications()
|
||||
print(
|
||||
"Finished notifying subscribers about expiring certificate authority certificates! "
|
||||
f"Sent: {success} Failed: {failed}"
|
||||
)
|
||||
status = SUCCESS_METRIC_STATUS
|
||||
except Exception as e:
|
||||
sentry.captureException()
|
||||
|
||||
metrics.send(
|
||||
"authority_expiration_notification_job", "counter", 1, metric_tags={"status": status}
|
||||
)
|
||||
|
||||
|
||||
def security_expiration_summary(exclude):
|
||||
"""
|
||||
Sends a summary email with info on all expiring certs (that match the configured expiry intervals).
|
||||
|
||||
:return:
|
||||
"""
|
||||
status = FAILURE_METRIC_STATUS
|
||||
try:
|
||||
print("Starting to notify security team about expiring certificates!")
|
||||
success = send_security_expiration_summary(exclude)
|
||||
print(
|
||||
f"Finished notifying security team about expiring certificates! Success: {success}"
|
||||
)
|
||||
if success:
|
||||
status = SUCCESS_METRIC_STATUS
|
||||
except Exception:
|
||||
sentry.captureException()
|
||||
|
||||
metrics.send(
|
||||
"security_expiration_notification_job", "counter", 1, metric_tags={"status": status}
|
||||
)
|
||||
|
|
|
@ -8,6 +8,7 @@
|
|||
.. moduleauthor:: Kevin Glisson <kglisson@netflix.com>
|
||||
|
||||
"""
|
||||
import sys
|
||||
from collections import defaultdict
|
||||
from datetime import timedelta
|
||||
from itertools import groupby
|
||||
|
@ -15,11 +16,13 @@ from itertools import groupby
|
|||
import arrow
|
||||
from flask import current_app
|
||||
from sqlalchemy import and_
|
||||
from sqlalchemy.sql.expression import false, true
|
||||
|
||||
from lemur import database
|
||||
from lemur.certificates import service as certificates_service
|
||||
from lemur.certificates.models import Certificate
|
||||
from lemur.certificates.schemas import certificate_notification_output_schema
|
||||
from lemur.common.utils import windowed_query
|
||||
from lemur.common.utils import windowed_query, is_selfsigned
|
||||
from lemur.constants import FAILURE_METRIC_STATUS, SUCCESS_METRIC_STATUS
|
||||
from lemur.extensions import metrics, sentry
|
||||
from lemur.pending_certificates.schemas import pending_certificate_output_schema
|
||||
|
@ -29,7 +32,7 @@ from lemur.plugins.utils import get_plugin_option
|
|||
|
||||
def get_certificates(exclude=None):
|
||||
"""
|
||||
Finds all certificates that are eligible for notifications.
|
||||
Finds all certificates that are eligible for expiration notifications.
|
||||
:param exclude:
|
||||
:return:
|
||||
"""
|
||||
|
@ -39,9 +42,10 @@ def get_certificates(exclude=None):
|
|||
q = (
|
||||
database.db.session.query(Certificate)
|
||||
.filter(Certificate.not_after <= max)
|
||||
.filter(Certificate.notify == True)
|
||||
.filter(Certificate.expired == False)
|
||||
) # noqa
|
||||
.filter(Certificate.notify == true())
|
||||
.filter(Certificate.expired == false())
|
||||
.filter(Certificate.revoked == false())
|
||||
)
|
||||
|
||||
exclude_conditions = []
|
||||
if exclude:
|
||||
|
@ -59,9 +63,71 @@ def get_certificates(exclude=None):
|
|||
return certs
|
||||
|
||||
|
||||
def get_certificates_for_security_summary_email(exclude=None):
|
||||
"""
|
||||
Finds all certificates that are eligible for expiration notifications for the security expiration summary.
|
||||
:param exclude:
|
||||
:return:
|
||||
"""
|
||||
now = arrow.utcnow()
|
||||
threshold_days = current_app.config.get("LEMUR_EXPIRATION_SUMMARY_EMAIL_THRESHOLD_DAYS", 14)
|
||||
max_not_after = now + timedelta(days=threshold_days + 1)
|
||||
|
||||
q = (
|
||||
database.db.session.query(Certificate)
|
||||
.filter(Certificate.not_after <= max_not_after)
|
||||
.filter(Certificate.notify == true())
|
||||
.filter(Certificate.expired == false())
|
||||
.filter(Certificate.revoked == false())
|
||||
)
|
||||
|
||||
exclude_conditions = []
|
||||
if exclude:
|
||||
for e in exclude:
|
||||
exclude_conditions.append(~Certificate.name.ilike("%{}%".format(e)))
|
||||
|
||||
q = q.filter(and_(*exclude_conditions))
|
||||
|
||||
certs = []
|
||||
for c in windowed_query(q, Certificate.id, 10000):
|
||||
days_remaining = (c.not_after - now).days
|
||||
if days_remaining <= threshold_days:
|
||||
certs.append(c)
|
||||
return certs
|
||||
|
||||
|
||||
def get_expiring_authority_certificates():
|
||||
"""
|
||||
Finds all certificate authority certificates that are eligible for expiration notifications.
|
||||
:return:
|
||||
"""
|
||||
now = arrow.utcnow()
|
||||
authority_expiration_intervals = current_app.config.get("LEMUR_AUTHORITY_CERT_EXPIRATION_EMAIL_INTERVALS",
|
||||
[365, 180])
|
||||
max_not_after = now + timedelta(days=max(authority_expiration_intervals) + 1)
|
||||
|
||||
q = (
|
||||
database.db.session.query(Certificate)
|
||||
.filter(Certificate.not_after < max_not_after)
|
||||
.filter(Certificate.notify == true())
|
||||
.filter(Certificate.expired == false())
|
||||
.filter(Certificate.revoked == false())
|
||||
.filter(Certificate.root_authority_id.isnot(None))
|
||||
.filter(Certificate.authority_id.is_(None))
|
||||
)
|
||||
|
||||
certs = []
|
||||
for c in windowed_query(q, Certificate.id, 10000):
|
||||
days_remaining = (c.not_after - now).days
|
||||
if days_remaining in authority_expiration_intervals:
|
||||
certs.append(c)
|
||||
return certs
|
||||
|
||||
|
||||
def get_eligible_certificates(exclude=None):
|
||||
"""
|
||||
Finds all certificates that are eligible for certificate expiration.
|
||||
Finds all certificates that are eligible for certificate expiration notification.
|
||||
Returns the set of all eligible certificates, grouped by owner, with a list of applicable notifications.
|
||||
:param exclude:
|
||||
:return:
|
||||
"""
|
||||
|
@ -86,28 +152,72 @@ def get_eligible_certificates(exclude=None):
|
|||
return certificates
|
||||
|
||||
|
||||
def send_notification(event_type, data, targets, notification):
|
||||
def get_eligible_security_summary_certs(exclude=None):
|
||||
certificates = defaultdict(list)
|
||||
all_certs = get_certificates_for_security_summary_email(exclude=exclude)
|
||||
now = arrow.utcnow()
|
||||
|
||||
# group by expiration interval
|
||||
for interval, interval_certs in groupby(all_certs, lambda x: (x.not_after - now).days):
|
||||
certificates[interval] = list(interval_certs)
|
||||
|
||||
return certificates
|
||||
|
||||
|
||||
def get_eligible_authority_certificates():
|
||||
"""
|
||||
Finds all certificate authority certificates that are eligible for certificate expiration notification.
|
||||
Returns the set of all eligible CA certificates, grouped by owner and interval, with a list of applicable certs.
|
||||
:return:
|
||||
"""
|
||||
certificates = defaultdict(dict)
|
||||
all_certs = get_expiring_authority_certificates()
|
||||
now = arrow.utcnow()
|
||||
|
||||
# group by owner
|
||||
for owner, owner_certs in groupby(all_certs, lambda x: x.owner):
|
||||
# group by expiration interval
|
||||
for interval, interval_certs in groupby(owner_certs, lambda x: (x.not_after - now).days):
|
||||
certificates[owner][interval] = list(interval_certs)
|
||||
|
||||
return certificates
|
||||
|
||||
|
||||
def send_plugin_notification(event_type, data, recipients, notification):
|
||||
"""
|
||||
Executes the plugin and handles failure.
|
||||
|
||||
:param event_type:
|
||||
:param data:
|
||||
:param targets:
|
||||
:param recipients:
|
||||
:param notification:
|
||||
:return:
|
||||
"""
|
||||
function = f"{__name__}.{sys._getframe().f_code.co_name}"
|
||||
log_data = {
|
||||
"function": function,
|
||||
"message": f"Sending {event_type} notification for to recipients {recipients}",
|
||||
"notification_type": event_type,
|
||||
"notification_plugin": notification.plugin.slug,
|
||||
"certificate_targets": recipients,
|
||||
"plugin": notification.plugin.slug,
|
||||
"notification_id": notification.id,
|
||||
}
|
||||
status = FAILURE_METRIC_STATUS
|
||||
try:
|
||||
notification.plugin.send(event_type, data, targets, notification.options)
|
||||
current_app.logger.debug(log_data)
|
||||
notification.plugin.send(event_type, data, recipients, notification.options)
|
||||
status = SUCCESS_METRIC_STATUS
|
||||
except Exception as e:
|
||||
log_data["message"] = f"Unable to send {event_type} notification to recipients {recipients}"
|
||||
current_app.logger.error(log_data, exc_info=True)
|
||||
sentry.captureException()
|
||||
|
||||
metrics.send(
|
||||
"notification",
|
||||
"counter",
|
||||
1,
|
||||
metric_tags={"status": status, "event_type": event_type},
|
||||
metric_tags={"status": status, "event_type": event_type, "plugin": notification.plugin.slug},
|
||||
)
|
||||
|
||||
if status == SUCCESS_METRIC_STATUS:
|
||||
|
@ -124,7 +234,6 @@ def send_expiration_notifications(exclude):
|
|||
# security team gets all
|
||||
security_email = current_app.config.get("LEMUR_SECURITY_TEAM_EMAIL")
|
||||
|
||||
security_data = []
|
||||
for owner, notification_group in get_eligible_certificates(exclude=exclude).items():
|
||||
|
||||
for notification_label, certificates in notification_group.items():
|
||||
|
@ -138,147 +247,144 @@ def send_expiration_notifications(exclude):
|
|||
certificate
|
||||
).data
|
||||
notification_data.append(cert_data)
|
||||
security_data.append(cert_data)
|
||||
|
||||
if send_notification(
|
||||
"expiration", notification_data, [owner], notification
|
||||
email_recipients = notification.plugin.get_recipients(notification.options, security_email + [owner])
|
||||
# Plugin will ONLY use the provided recipients if it's email; any other notification plugin ignores them
|
||||
if send_plugin_notification(
|
||||
"expiration", notification_data, email_recipients, notification
|
||||
):
|
||||
success += 1
|
||||
success += len(email_recipients)
|
||||
else:
|
||||
failure += 1
|
||||
|
||||
notification_recipient = get_plugin_option(
|
||||
"recipients", notification.options
|
||||
)
|
||||
if notification_recipient:
|
||||
notification_recipient = notification_recipient.split(",")
|
||||
# removing owner and security_email from notification_recipient
|
||||
notification_recipient = [i for i in notification_recipient if i not in security_email and i != owner]
|
||||
|
||||
if (
|
||||
notification_recipient
|
||||
):
|
||||
if send_notification(
|
||||
"expiration",
|
||||
notification_data,
|
||||
notification_recipient,
|
||||
notification,
|
||||
failure += len(email_recipients)
|
||||
# If we're using an email plugin, we're done,
|
||||
# since "security_email + [owner]" were added as email_recipients.
|
||||
# If we're not using an email plugin, we also need to send an email to the security team and owner,
|
||||
# since the plugin notification didn't send anything to them.
|
||||
if notification.plugin.slug != "email-notification":
|
||||
if send_default_notification(
|
||||
"expiration", notification_data, email_recipients, notification.options
|
||||
):
|
||||
success += 1
|
||||
success = 1 + len(email_recipients)
|
||||
else:
|
||||
failure += 1
|
||||
|
||||
if send_notification(
|
||||
"expiration", security_data, security_email, notification
|
||||
):
|
||||
success += 1
|
||||
else:
|
||||
failure += 1
|
||||
failure = 1 + len(email_recipients)
|
||||
|
||||
return success, failure
|
||||
|
||||
|
||||
def send_rotation_notification(certificate, notification_plugin=None):
|
||||
def send_authority_expiration_notifications():
|
||||
"""
|
||||
Sends a report to certificate owners when their certificate has been
|
||||
rotated.
|
||||
This function will check for upcoming certificate authority certificate expiration,
|
||||
and send out notification emails at configured intervals.
|
||||
"""
|
||||
success = failure = 0
|
||||
|
||||
:param certificate:
|
||||
:param notification_plugin:
|
||||
# security team gets all
|
||||
security_email = current_app.config.get("LEMUR_SECURITY_TEAM_EMAIL")
|
||||
|
||||
for owner, owner_cert_groups in get_eligible_authority_certificates().items():
|
||||
for interval, certificates in owner_cert_groups.items():
|
||||
notification_data = []
|
||||
|
||||
for certificate in certificates:
|
||||
cert_data = certificate_notification_output_schema.dump(
|
||||
certificate
|
||||
).data
|
||||
cert_data['self_signed'] = is_selfsigned(certificate.parsed_cert)
|
||||
cert_data['issued_cert_count'] = certificates_service.get_issued_cert_count_for_authority(certificate.root_authority)
|
||||
notification_data.append(cert_data)
|
||||
|
||||
email_recipients = security_email + [owner]
|
||||
if send_default_notification(
|
||||
"authority_expiration", notification_data, email_recipients,
|
||||
notification_options=[{'name': 'interval', 'value': interval}]
|
||||
):
|
||||
success = len(email_recipients)
|
||||
else:
|
||||
failure = len(email_recipients)
|
||||
|
||||
return success, failure
|
||||
|
||||
|
||||
def send_default_notification(notification_type, data, targets, notification_options=None):
|
||||
"""
|
||||
Sends a report to the specified target via the default notification plugin. Applicable for any notification_type.
|
||||
At present, "default" means email, as the other notification plugins do not support dynamically configured targets.
|
||||
|
||||
:param notification_type:
|
||||
:param data:
|
||||
:param targets:
|
||||
:param notification_options:
|
||||
:return:
|
||||
"""
|
||||
function = f"{__name__}.{sys._getframe().f_code.co_name}"
|
||||
status = FAILURE_METRIC_STATUS
|
||||
if not notification_plugin:
|
||||
notification_plugin = plugins.get(
|
||||
current_app.config.get("LEMUR_DEFAULT_NOTIFICATION_PLUGIN")
|
||||
)
|
||||
|
||||
data = certificate_notification_output_schema.dump(certificate).data
|
||||
notification_plugin = plugins.get(
|
||||
current_app.config.get("LEMUR_DEFAULT_NOTIFICATION_PLUGIN", "email-notification")
|
||||
)
|
||||
log_data = {
|
||||
"function": function,
|
||||
"message": f"Sending {notification_type} notification for certificate data {data} to targets {targets}",
|
||||
"notification_type": notification_type,
|
||||
"notification_plugin": notification_plugin.slug,
|
||||
}
|
||||
|
||||
try:
|
||||
notification_plugin.send("rotation", data, [data["owner"]])
|
||||
current_app.logger.debug(log_data)
|
||||
# we need the notification.options here because the email templates utilize the interval/unit info
|
||||
notification_plugin.send(notification_type, data, targets, notification_options)
|
||||
status = SUCCESS_METRIC_STATUS
|
||||
except Exception as e:
|
||||
current_app.logger.error(
|
||||
"Unable to send notification to {}.".format(data["owner"]), exc_info=True
|
||||
)
|
||||
log_data["message"] = f"Unable to send {notification_type} notification for certificate data {data} " \
|
||||
f"to targets {targets}"
|
||||
current_app.logger.error(log_data, exc_info=True)
|
||||
sentry.captureException()
|
||||
|
||||
metrics.send(
|
||||
"notification",
|
||||
"counter",
|
||||
1,
|
||||
metric_tags={"status": status, "event_type": "rotation"},
|
||||
metric_tags={"status": status, "event_type": notification_type, "plugin": notification_plugin.slug},
|
||||
)
|
||||
|
||||
if status == SUCCESS_METRIC_STATUS:
|
||||
return True
|
||||
|
||||
|
||||
def send_rotation_notification(certificate):
|
||||
data = certificate_notification_output_schema.dump(certificate).data
|
||||
return send_default_notification("rotation", data, [data["owner"]])
|
||||
|
||||
|
||||
def send_pending_failure_notification(
|
||||
pending_cert, notify_owner=True, notify_security=True, notification_plugin=None
|
||||
pending_cert, notify_owner=True, notify_security=True
|
||||
):
|
||||
"""
|
||||
Sends a report to certificate owners when their pending certificate failed to be created.
|
||||
|
||||
:param pending_cert:
|
||||
:param notification_plugin:
|
||||
:param notify_owner:
|
||||
:param notify_security:
|
||||
:return:
|
||||
"""
|
||||
status = FAILURE_METRIC_STATUS
|
||||
|
||||
if not notification_plugin:
|
||||
notification_plugin = plugins.get(
|
||||
current_app.config.get(
|
||||
"LEMUR_DEFAULT_NOTIFICATION_PLUGIN", "email-notification"
|
||||
)
|
||||
)
|
||||
|
||||
data = pending_certificate_output_schema.dump(pending_cert).data
|
||||
data["security_email"] = current_app.config.get("LEMUR_SECURITY_TEAM_EMAIL")
|
||||
|
||||
email_recipients = []
|
||||
if notify_owner:
|
||||
try:
|
||||
notification_plugin.send("failed", data, [data["owner"]], pending_cert)
|
||||
status = SUCCESS_METRIC_STATUS
|
||||
except Exception as e:
|
||||
current_app.logger.error(
|
||||
"Unable to send pending failure notification to {}.".format(
|
||||
data["owner"]
|
||||
),
|
||||
exc_info=True,
|
||||
)
|
||||
sentry.captureException()
|
||||
email_recipients = email_recipients + [data["owner"]]
|
||||
|
||||
if notify_security:
|
||||
try:
|
||||
notification_plugin.send(
|
||||
"failed", data, data["security_email"], pending_cert
|
||||
)
|
||||
status = SUCCESS_METRIC_STATUS
|
||||
except Exception as e:
|
||||
current_app.logger.error(
|
||||
"Unable to send pending failure notification to "
|
||||
"{}.".format(data["security_email"]),
|
||||
exc_info=True,
|
||||
)
|
||||
sentry.captureException()
|
||||
email_recipients = email_recipients + data["security_email"]
|
||||
|
||||
metrics.send(
|
||||
"notification",
|
||||
"counter",
|
||||
1,
|
||||
metric_tags={"status": status, "event_type": "rotation"},
|
||||
)
|
||||
|
||||
if status == SUCCESS_METRIC_STATUS:
|
||||
return True
|
||||
return send_default_notification("failed", data, email_recipients, pending_cert)
|
||||
|
||||
|
||||
def needs_notification(certificate):
|
||||
"""
|
||||
Determine if notifications for a given certificate should
|
||||
currently be sent
|
||||
Determine if notifications for a given certificate should currently be sent.
|
||||
For each notification configured for the cert, verifies it is active, properly configured,
|
||||
and that the configured expiration period is currently met.
|
||||
|
||||
:param certificate:
|
||||
:return:
|
||||
|
@ -290,7 +396,7 @@ def needs_notification(certificate):
|
|||
|
||||
for notification in certificate.notifications:
|
||||
if not notification.active or not notification.options:
|
||||
return
|
||||
continue
|
||||
|
||||
interval = get_plugin_option("interval", notification.options)
|
||||
unit = get_plugin_option("unit", notification.options)
|
||||
|
@ -306,9 +412,64 @@ def needs_notification(certificate):
|
|||
|
||||
else:
|
||||
raise Exception(
|
||||
"Invalid base unit for expiration interval: {0}".format(unit)
|
||||
f"Invalid base unit for expiration interval: {unit}"
|
||||
)
|
||||
|
||||
if days == interval:
|
||||
notifications.append(notification)
|
||||
return notifications
|
||||
|
||||
|
||||
def send_security_expiration_summary(exclude=None):
|
||||
"""
|
||||
Sends a report to the security team with a summary of all expiring certificates.
|
||||
All expiring certificates are included here, regardless of notification configuration.
|
||||
Certificates with notifications disabled are omitted.
|
||||
|
||||
:param exclude:
|
||||
:return:
|
||||
"""
|
||||
function = f"{__name__}.{sys._getframe().f_code.co_name}"
|
||||
status = FAILURE_METRIC_STATUS
|
||||
notification_plugin = plugins.get(
|
||||
current_app.config.get("LEMUR_DEFAULT_NOTIFICATION_PLUGIN", "email-notification")
|
||||
)
|
||||
notification_type = "expiration_summary"
|
||||
log_data = {
|
||||
"function": function,
|
||||
"message": "Sending expiration summary notification for to security team",
|
||||
"notification_type": notification_type,
|
||||
"notification_plugin": notification_plugin.slug,
|
||||
}
|
||||
|
||||
intervals_and_certs = get_eligible_security_summary_certs(exclude)
|
||||
security_email = current_app.config.get("LEMUR_SECURITY_TEAM_EMAIL")
|
||||
|
||||
try:
|
||||
current_app.logger.debug(log_data)
|
||||
|
||||
message_data = []
|
||||
|
||||
for interval, certs in intervals_and_certs.items():
|
||||
cert_data = []
|
||||
for certificate in certs:
|
||||
cert_data.append(certificate_notification_output_schema.dump(certificate).data)
|
||||
interval_data = {"interval": interval, "certificates": cert_data}
|
||||
message_data.append(interval_data)
|
||||
|
||||
notification_plugin.send(notification_type, message_data, security_email, None)
|
||||
status = SUCCESS_METRIC_STATUS
|
||||
except Exception:
|
||||
log_data["message"] = f"Unable to send {notification_type} notification for certificates " \
|
||||
f"{intervals_and_certs} to targets {security_email}"
|
||||
current_app.logger.error(log_data, exc_info=True)
|
||||
sentry.captureException()
|
||||
|
||||
metrics.send(
|
||||
"notification",
|
||||
"counter",
|
||||
1,
|
||||
metric_tags={"status": status, "event_type": notification_type, "plugin": notification_plugin.slug},
|
||||
)
|
||||
|
||||
if status == SUCCESS_METRIC_STATUS:
|
||||
return True
|
||||
|
|
|
@ -21,6 +21,8 @@ class NotificationInputSchema(LemurInputSchema):
|
|||
active = fields.Boolean()
|
||||
plugin = fields.Nested(PluginInputSchema, required=True)
|
||||
certificates = fields.Nested(AssociatedCertificateSchema, many=True, missing=[])
|
||||
added_certificates = fields.Nested(AssociatedCertificateSchema, many=True, missing=[])
|
||||
removed_certificates = fields.Nested(AssociatedCertificateSchema, many=True, missing=[])
|
||||
|
||||
|
||||
class NotificationOutputSchema(LemurOutputSchema):
|
||||
|
|
|
@ -43,7 +43,7 @@ def create_default_expiration_notifications(name, recipients, intervals=None):
|
|||
"name": "recipients",
|
||||
"type": "str",
|
||||
"required": True,
|
||||
"validation": "^([\w+-.%]+@[\w-.]+\.[A-Za-z]{2,4},?)+$",
|
||||
"validation": r"^([\w+-.%]+@[\w-.]+\.[A-Za-z]{2,4},?)+$",
|
||||
"helpMessage": "Comma delimited list of email addresses",
|
||||
"value": ",".join(recipients),
|
||||
},
|
||||
|
@ -63,7 +63,7 @@ def create_default_expiration_notifications(name, recipients, intervals=None):
|
|||
"name": "interval",
|
||||
"type": "int",
|
||||
"required": True,
|
||||
"validation": "^\d+$",
|
||||
"validation": r"^\d+$",
|
||||
"helpMessage": "Number of days to be alert before expiration.",
|
||||
"value": i,
|
||||
}
|
||||
|
@ -94,7 +94,7 @@ def create(label, plugin_name, options, description, certificates):
|
|||
:param options:
|
||||
:param description:
|
||||
:param certificates:
|
||||
:rtype : Notification
|
||||
:rtype: Notification
|
||||
:return:
|
||||
"""
|
||||
notification = Notification(
|
||||
|
@ -104,26 +104,30 @@ def create(label, plugin_name, options, description, certificates):
|
|||
return database.create(notification)
|
||||
|
||||
|
||||
def update(notification_id, label, options, description, active, certificates):
|
||||
def update(notification_id, label, plugin_name, options, description, active, added_certificates, removed_certificates):
|
||||
"""
|
||||
Updates an existing notification.
|
||||
|
||||
:param notification_id:
|
||||
:param label: Notification label
|
||||
:param plugin_name:
|
||||
:param options:
|
||||
:param description:
|
||||
:param active:
|
||||
:param certificates:
|
||||
:rtype : Notification
|
||||
:param added_certificates:
|
||||
:param removed_certificates:
|
||||
:rtype: Notification
|
||||
:return:
|
||||
"""
|
||||
notification = get(notification_id)
|
||||
|
||||
notification.label = label
|
||||
notification.plugin_name = plugin_name
|
||||
notification.options = options
|
||||
notification.description = description
|
||||
notification.active = active
|
||||
notification.certificates = certificates
|
||||
notification.certificates = notification.certificates + added_certificates
|
||||
notification.certificates = [c for c in notification.certificates if c not in removed_certificates]
|
||||
|
||||
return database.update(notification)
|
||||
|
||||
|
@ -142,7 +146,7 @@ def get(notification_id):
|
|||
Retrieves an notification by its lemur assigned ID.
|
||||
|
||||
:param notification_id: Lemur assigned ID
|
||||
:rtype : Notification
|
||||
:rtype: Notification
|
||||
:return:
|
||||
"""
|
||||
return database.get(Notification, notification_id)
|
||||
|
|
|
@ -117,7 +117,7 @@ class NotificationsList(AuthenticatedResource):
|
|||
"""
|
||||
.. http:post:: /notifications
|
||||
|
||||
Creates a new account
|
||||
Creates a new notification
|
||||
|
||||
**Example request**:
|
||||
|
||||
|
@ -126,6 +126,7 @@ class NotificationsList(AuthenticatedResource):
|
|||
POST /notifications HTTP/1.1
|
||||
Host: example.com
|
||||
Accept: application/json, text/javascript
|
||||
Content-Type: application/json;charset=UTF-8
|
||||
|
||||
{
|
||||
"description": "a test",
|
||||
|
@ -213,9 +214,12 @@ class NotificationsList(AuthenticatedResource):
|
|||
"id": 2
|
||||
}
|
||||
|
||||
:arg accountNumber: aws account number
|
||||
:arg label: human readable account label
|
||||
:arg comments: some description about the account
|
||||
:label label: notification name
|
||||
:label slug: notification plugin slug
|
||||
:label plugin_options: notification plugin options
|
||||
:label description: notification description
|
||||
:label active: whether or not the notification is active/enabled
|
||||
:label certificates: certificates to attach to notification
|
||||
:reqheader Authorization: OAuth token to authenticate
|
||||
:statuscode 200: no error
|
||||
"""
|
||||
|
@ -238,7 +242,7 @@ class Notifications(AuthenticatedResource):
|
|||
"""
|
||||
.. http:get:: /notifications/1
|
||||
|
||||
Get a specific account
|
||||
Get a specific notification
|
||||
|
||||
**Example request**:
|
||||
|
||||
|
@ -305,15 +309,28 @@ class Notifications(AuthenticatedResource):
|
|||
"""
|
||||
.. http:put:: /notifications/1
|
||||
|
||||
Updates an account
|
||||
Updates a notification
|
||||
|
||||
**Example request**:
|
||||
|
||||
.. sourcecode:: http
|
||||
|
||||
POST /notifications/1 HTTP/1.1
|
||||
PUT /notifications/1 HTTP/1.1
|
||||
Host: example.com
|
||||
Accept: application/json, text/javascript
|
||||
Content-Type: application/json;charset=UTF-8
|
||||
|
||||
{
|
||||
"label": "labelChanged",
|
||||
"plugin": {
|
||||
"slug": "email-notification",
|
||||
"plugin_options": "???"
|
||||
},
|
||||
"description": "Sample notification",
|
||||
"active": "true",
|
||||
"added_certificates": "???",
|
||||
"removed_certificates": "???"
|
||||
}
|
||||
|
||||
|
||||
**Example response**:
|
||||
|
@ -326,24 +343,36 @@ class Notifications(AuthenticatedResource):
|
|||
|
||||
{
|
||||
"id": 1,
|
||||
"accountNumber": 11111111111,
|
||||
"label": "labelChanged",
|
||||
"comments": "this is a thing"
|
||||
"plugin": {
|
||||
"slug": "email-notification",
|
||||
"plugin_options": "???"
|
||||
},
|
||||
"description": "Sample notification",
|
||||
"active": "true",
|
||||
"added_certificates": "???",
|
||||
"removed_certificates": "???"
|
||||
}
|
||||
|
||||
:arg accountNumber: aws account number
|
||||
:arg label: human readable account label
|
||||
:arg comments: some description about the account
|
||||
:label label: notification name
|
||||
:label slug: notification plugin slug
|
||||
:label plugin_options: notification plugin options
|
||||
:label description: notification description
|
||||
:label active: whether or not the notification is active/enabled
|
||||
:label added_certificates: certificates to add
|
||||
:label removed_certificates: certificates to remove
|
||||
:reqheader Authorization: OAuth token to authenticate
|
||||
:statuscode 200: no error
|
||||
"""
|
||||
return service.update(
|
||||
notification_id,
|
||||
data["label"],
|
||||
data["plugin"]["slug"],
|
||||
data["plugin"]["plugin_options"],
|
||||
data["description"],
|
||||
data["active"],
|
||||
data["certificates"],
|
||||
data["added_certificates"],
|
||||
data["removed_certificates"],
|
||||
)
|
||||
|
||||
def delete(self, notification_id):
|
||||
|
|
|
@ -12,10 +12,12 @@ from flask import current_app
|
|||
from flask_script import Manager
|
||||
|
||||
from lemur.authorities.service import get as get_authority
|
||||
from lemur.constants import ACME_ADDITIONAL_ATTEMPTS
|
||||
from lemur.notifications.messaging import send_pending_failure_notification
|
||||
from lemur.pending_certificates import service as pending_certificate_service
|
||||
from lemur.plugins.base import plugins
|
||||
|
||||
|
||||
manager = Manager(usage="Handles pending certificate related tasks.")
|
||||
|
||||
|
||||
|
@ -107,7 +109,7 @@ def fetch_all_acme():
|
|||
error_log["last_error"] = cert.get("last_error")
|
||||
error_log["cn"] = pending_cert.cn
|
||||
|
||||
if pending_cert.number_attempts > 4:
|
||||
if pending_cert.number_attempts > ACME_ADDITIONAL_ATTEMPTS:
|
||||
error_log["message"] = "Marking pending certificate as resolved"
|
||||
send_pending_failure_notification(
|
||||
pending_cert, notify_owner=pending_cert.notify
|
||||
|
|
|
@ -9,7 +9,7 @@ from sqlalchemy import (
|
|||
Integer,
|
||||
ForeignKey,
|
||||
String,
|
||||
PassiveDefault,
|
||||
DefaultClause,
|
||||
func,
|
||||
Column,
|
||||
Text,
|
||||
|
@ -76,14 +76,14 @@ class PendingCertificate(db.Model):
|
|||
chain = Column(Text())
|
||||
private_key = Column(Vault, nullable=True)
|
||||
|
||||
date_created = Column(ArrowType, PassiveDefault(func.now()), nullable=False)
|
||||
date_created = Column(ArrowType, DefaultClause(func.now()), nullable=False)
|
||||
dns_provider_id = Column(
|
||||
Integer, ForeignKey("dns_providers.id", ondelete="CASCADE")
|
||||
)
|
||||
|
||||
status = Column(Text(), nullable=True)
|
||||
last_updated = Column(
|
||||
ArrowType, PassiveDefault(func.now()), onupdate=func.now(), nullable=False
|
||||
ArrowType, DefaultClause(func.now()), onupdate=func.now(), nullable=False
|
||||
)
|
||||
|
||||
rotation = Column(Boolean, default=False)
|
||||
|
|
|
@ -93,11 +93,10 @@ def get_pending_certs(pending_ids):
|
|||
def create_certificate(pending_certificate, certificate, user):
|
||||
"""
|
||||
Create and store a certificate with pending certificate's info
|
||||
Args:
|
||||
pending_certificate: PendingCertificate which will populate the certificate
|
||||
certificate: dict from Authority, which contains the body, chain and external id
|
||||
user: User that called this function, used as 'creator' of the certificate if it does
|
||||
not have an owner
|
||||
|
||||
:arg pending_certificate: PendingCertificate which will populate the certificate
|
||||
:arg certificate: dict from Authority, which contains the body, chain and external id
|
||||
:arg user: User that called this function, used as 'creator' of the certificate if it does not have an owner
|
||||
"""
|
||||
certificate["owner"] = pending_certificate.owner
|
||||
data, errors = CertificateUploadInputSchema().load(certificate)
|
||||
|
@ -158,9 +157,9 @@ def cancel(pending_certificate, **kwargs):
|
|||
"""
|
||||
Cancel a pending certificate. A check should be done prior to this function to decide to
|
||||
revoke the certificate or just abort cancelling.
|
||||
Args:
|
||||
pending_certificate: PendingCertificate to be cancelled
|
||||
Returns: the pending certificate if successful, raises Exception if there was an issue
|
||||
|
||||
:arg pending_certificate: PendingCertificate to be cancelled
|
||||
:return: the pending certificate if successful, raises Exception if there was an issue
|
||||
"""
|
||||
plugin = plugins.get(pending_certificate.authority.plugin_name)
|
||||
plugin.cancel_ordered_certificate(pending_certificate, **kwargs)
|
||||
|
|
|
@ -221,9 +221,10 @@ class PendingCertificates(AuthenticatedResource):
|
|||
|
||||
.. sourcecode:: http
|
||||
|
||||
PUT /pending certificates/1 HTTP/1.1
|
||||
PUT /pending_certificates/1 HTTP/1.1
|
||||
Host: example.com
|
||||
Accept: application/json, text/javascript
|
||||
Content-Type: application/json;charset=UTF-8
|
||||
|
||||
{
|
||||
"owner": "jimbob@example.com",
|
||||
|
@ -337,7 +338,7 @@ class PendingCertificates(AuthenticatedResource):
|
|||
|
||||
.. sourcecode:: http
|
||||
|
||||
DELETE /pending certificates/1 HTTP/1.1
|
||||
DELETE /pending_certificates/1 HTTP/1.1
|
||||
Host: example.com
|
||||
Accept: application/json, text/javascript
|
||||
|
||||
|
@ -465,6 +466,7 @@ class PendingCertificatesUpload(AuthenticatedResource):
|
|||
POST /certificates/1/upload HTTP/1.1
|
||||
Host: example.com
|
||||
Accept: application/json, text/javascript
|
||||
Content-Type: application/json;charset=UTF-8
|
||||
|
||||
{
|
||||
"body": "-----BEGIN CERTIFICATE-----...",
|
||||
|
|
|
@ -3,3 +3,4 @@ from .issuer import IssuerPlugin # noqa
|
|||
from .source import SourcePlugin # noqa
|
||||
from .notification import NotificationPlugin, ExpirationNotificationPlugin # noqa
|
||||
from .export import ExportPlugin # noqa
|
||||
from .tls import TLSPlugin # noqa
|
||||
|
|
|
@ -31,6 +31,11 @@ class ExportDestinationPlugin(DestinationPlugin):
|
|||
|
||||
@property
|
||||
def options(self):
|
||||
"""
|
||||
Gets/sets options for the plugin.
|
||||
|
||||
:return:
|
||||
"""
|
||||
return self.default_options + self.additional_options
|
||||
|
||||
def export(self, body, private_key, cert_chain, options):
|
||||
|
|
|
@ -23,7 +23,7 @@ class IssuerPlugin(Plugin):
|
|||
def create_authority(self, options):
|
||||
raise NotImplementedError
|
||||
|
||||
def revoke_certificate(self, certificate, comments):
|
||||
def revoke_certificate(self, certificate, reason):
|
||||
raise NotImplementedError
|
||||
|
||||
def get_ordered_certificate(self, certificate):
|
||||
|
@ -31,3 +31,12 @@ class IssuerPlugin(Plugin):
|
|||
|
||||
def cancel_ordered_certificate(self, pending_cert, **kwargs):
|
||||
raise NotImplementedError
|
||||
|
||||
def wrap_certificate(self, cert):
|
||||
pass
|
||||
|
||||
def wrap_auth_certificate(self, cert):
|
||||
pass
|
||||
|
||||
def wrap_private_key(self, cert):
|
||||
pass
|
||||
|
|
|
@ -20,6 +20,15 @@ class NotificationPlugin(Plugin):
|
|||
def send(self, notification_type, message, targets, options, **kwargs):
|
||||
raise NotImplementedError
|
||||
|
||||
def get_recipients(self, options, additional_recipients):
|
||||
"""
|
||||
Given a set of options (which should include configured recipient info), returns the parsed list of recipients
|
||||
from those options plus the additional recipients specified. The returned value has no duplicates.
|
||||
|
||||
For any notification types where recipients can't be dynamically modified, this returns only the additional recipients.
|
||||
"""
|
||||
return additional_recipients
|
||||
|
||||
|
||||
class ExpirationNotificationPlugin(NotificationPlugin):
|
||||
"""
|
||||
|
@ -33,7 +42,7 @@ class ExpirationNotificationPlugin(NotificationPlugin):
|
|||
"name": "interval",
|
||||
"type": "int",
|
||||
"required": True,
|
||||
"validation": "^\d+$",
|
||||
"validation": r"^\d+$",
|
||||
"helpMessage": "Number of days to be alert before expiration.",
|
||||
},
|
||||
{
|
||||
|
@ -48,7 +57,12 @@ class ExpirationNotificationPlugin(NotificationPlugin):
|
|||
|
||||
@property
|
||||
def options(self):
|
||||
"""
|
||||
Gets/sets options for the plugin.
|
||||
|
||||
:return:
|
||||
"""
|
||||
return self.default_options + self.additional_options
|
||||
|
||||
def send(self, notification_type, message, targets, options, **kwargs):
|
||||
def send(self, notification_type, message, excluded_targets, options, **kwargs):
|
||||
raise NotImplementedError
|
||||
|
|
|
@ -33,4 +33,9 @@ class SourcePlugin(Plugin):
|
|||
|
||||
@property
|
||||
def options(self):
|
||||
"""
|
||||
Gets/sets options for the plugin.
|
||||
|
||||
:return:
|
||||
"""
|
||||
return self.default_options + self.additional_options
|
||||
|
|