diff --git a/Pipfile b/Pipfile index f3c910f7e..ac753aa4b 100644 --- a/Pipfile +++ b/Pipfile @@ -21,7 +21,6 @@ django-rest-framework = "*" django-storages = "*" djangorestframework-guardian = "*" drf-yasg = "*" -elastic-apm = "*" facebook-sdk = "*" ldap3 = "*" lxml = "*" diff --git a/Pipfile.lock b/Pipfile.lock index 6a1a545da..0dbb95b72 100644 --- a/Pipfile.lock +++ b/Pipfile.lock @@ -1,7 +1,7 @@ { "_meta": { "hash": { - "sha256": "b7ba5405c03bf3526eebb29817887744a3e31bca019ad2e566ea23096c6a5cfe" + "sha256": "616f5d355c42881b7ea70d4623bf885cff043d4c58913287960923df49c09909" }, "pipfile-spec": 6, "requires": { @@ -21,6 +21,7 @@ "sha256:70cdb10628468ff14e57ec2f751c7aa9e48e7e3651cfd62d431213c0c4e58f21", "sha256:aa7f313fb887c91f15474c1229907a04dac0b8135822d6603437803424c0aa59" ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'", "version": "==2.6.1" }, "asgiref": { @@ -28,14 +29,16 @@ "sha256:7e51911ee147dd685c3c8b805c0ad0cb58d360987b56953878f8c06d2d1c6f1a", "sha256:9fc6fb5d39b8af147ba40765234fa822b39818b12cc80b35ad9b0cef3a476aed" ], + "markers": "python_version >= '3.5'", "version": "==3.2.10" }, "attrs": { "hashes": [ - "sha256:08a96c641c3a74e44eb59afb61a24f2cb9f4d7188748e76ba4bb5edfa3cb7d1c", - "sha256:f7b7ce16570fe9965acd6d30101a28f62fb4a7f9e926b3bbc9b61f8b04247e72" + "sha256:0ef97238856430dcf9228e07f316aefc17e8939fc8507e18c6501b761ef1a42a", + "sha256:2867b7b9f8326499ab5b0e2d12801fa5c98842d2cbd22b35112ae04bf85b4dff" ], - "version": "==19.3.0" + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", + "version": "==20.1.0" }, "billiard": { "hashes": [ @@ -258,6 +261,7 @@ "sha256:6dd02d5a4bd2516fb93f80360673bf540c3b6641fec8766b1da2870a5aa00b32", "sha256:8b1ac62c581dbc5799b03e535854b92fc4053ecfe74bad3f9c05782063d4196b" ], + "markers": "python_version >= '3.5'", "version": "==3.11.1" }, "djangorestframework-guardian": { @@ -274,6 +278,7 @@ "sha256:9e4d7ecfc600058e07ba661411a2b7de2fd0fafa17d1a7f7361cd47b1175c827", "sha256:a2aeea129088da402665e92e0b25b04b073c04b2dce4ab65caaa38b7ce2e1a99" ], + "markers": "python_version >= '2.6' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==0.15.2" }, "drf-yasg": { @@ -291,39 +296,6 @@ ], "version": "==1.0.0" }, - "elastic-apm": { - "hashes": [ - "sha256:0c766621a4d15ed4ff7dd195499df1af6d7eb8c13790a727bf05773de2952de0", - "sha256:2187a0fd080cac7ed65dabfd64d7693ff187ae9b5ad4a810772387dca6877160", - "sha256:2a0bb663d3f9388db233784356f218807b9cfe1f4d4fa4569f41b567c068b50f", - "sha256:317e2a897b2a81d79bce42688975cfe0ccf6a3dc8025540c47093ea8ac5f1771", - "sha256:3a91d2df89af564dbf0abccb3d370940083205247903fe6d708fa771b16fca38", - "sha256:44fe2ce3ea57f97fce5fb32e747f6a9c9b361f5055608d59747c39ae06d1c526", - "sha256:4ca9f42d4b841ce598819f2f3a4d516c549cd5c02ab43c8283ca406c3b92a2db", - "sha256:56b34b30420aebf9566eeee3ffd633131ce51d1e2a4da6061f143a2b547d1980", - "sha256:5a56d20734771a4f7823ec12492fcd17a15dac761ecf1452d034a9b9b8b83388", - "sha256:6279cc28bd2f2bc2da478cebd5ace711b52549f736d138f950ebe0fa8f706a6f", - "sha256:69bcac2cee8f16a093f57000128caab7d1d3d8ac1474e24ce45190264ffc5ebe", - "sha256:7021b931210140e02540f3e56fdc8be07542eed10de82c9e5464dbe449a4c9aa", - "sha256:70237e1242ae461500ed455f47a5518abdbdc565e47265eddf3ca1dad530a541", - "sha256:7545f27703151ce71d73271a95662735cffb537189c214f778195a6fdab58533", - "sha256:8525ba800fbd955b65af667c43889df2358c22b1ef66ee92a846f5f4bc8d7286", - "sha256:8ba4239862f0b043d191a19e021637a25c3490f677cb8b1dd752bc425bb382e0", - "sha256:8c98625cb825c404954763ca5a6f82e06b833a6e6a9e2035065dc9894b4dc6dc", - "sha256:b02394f4d55af4f39086aee7bacf8652fde703f7226c5a564cdae9f7e2bf3f71", - "sha256:b3b1815765638ce01f9dbd136822d79e887d8d09cd10bc8770d4cc1d530bb853", - "sha256:b7bce10060abd98198d8a96e7f3e2e0e169dbd860c76e2c09e6a8874384eebb7", - "sha256:b8f849202dffe97512843dd366c4104d07d3b319e42916e3e031cff3db7475db", - "sha256:bc677614c198486ca4ef1026bde0c4efd74b936598ff9d64ea109f978a6381bb", - "sha256:d19fe00915c60ceabee42ae8c0aa76c6a48c2ffa67c5ba7f0d0fbb856ac36c09", - "sha256:d5561eb57eaa43c721258797dfab67b13938fdc94b7daec7a6ccb56dc524fe02", - "sha256:dc04aa32c7a3a17c688e3cc4c6293f2176be2482d67efccc651ff1fbb5c00ed6", - "sha256:e0d2c3463061b0e50ca53530bd5317498517d208618d90cf6e9933e93f9c727e", - "sha256:e9a416418cb2f6deb7a18b68bd75dad0552b4fd85d3e72e59ae4add0e8739b1c" - ], - "index": "pypi", - "version": "==5.8.1" - }, "facebook-sdk": { "hashes": [ "sha256:2e987b3e0f466a6f4ee77b935eb023dba1384134f004a2af21f1cfff7fe0806e", @@ -336,6 +308,7 @@ "hashes": [ "sha256:b1bead90b70cf6ec3f0710ae53a525360fa360d306a86583adc6bf83a4db537d" ], + "markers": "python_version >= '2.6' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==0.18.2" }, "idna": { @@ -350,6 +323,7 @@ "sha256:88b101b2668a1d81d6d72d4c2018e53bc6c7fc544c987849da1c7f77545c3bc9", "sha256:f576e85132d34f5bf7df5183c2c6f94cfb32e528f53065345cf71329ba0b8924" ], + "markers": "python_version >= '3.5'", "version": "==0.5.0" }, "itypes": { @@ -364,6 +338,7 @@ "sha256:89aab215427ef59c34ad58735269eb58b1a5808103067f7bb9d5836c651b3bb0", "sha256:f0a4641d3cf955324a89c04f3d94663aa4d638abe8f733ecd3582848e1c37035" ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'", "version": "==2.11.2" }, "jmespath": { @@ -371,6 +346,7 @@ "sha256:b85d0567b8666149a93172712e68920734333c0ce7e89b78b3e987f71e5ed4f9", "sha256:cdf6525904cc597730141d61b36f2e4b8ecc257c420fa2f4549bac2c2d0cb72f" ], + "markers": "python_version >= '2.6' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==0.10.0" }, "jsonschema": { @@ -385,12 +361,16 @@ "sha256:be48cdffb54a2194d93ad6533d73f69408486483d189fe9f5990ee24255b0e0a", "sha256:ca1b45faac8c0b18493d02a8571792f3c40291cf2bcf1f55afed3d8f3aa7ba74" ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'", "version": "==4.6.11" }, "ldap3": { "hashes": [ + "sha256:b399c39e80b6459e349b33fbe9787c1bcbf86de05994d41806a05c06f3e7574d", + "sha256:bdaf568cd30fc0006c8bb4f5e6014554afeb0c4bbea1677de9706e278a4057e7", + "sha256:df27407f4991f25bd669b5bb1bc8cb9ddf44a3e713ff6b3afeb3b3c26502f88f", "sha256:59d1adcd5ead263387039e2a37d7cd772a2006b1cdb3ecfcbaab5192a601c515", - "sha256:df27407f4991f25bd669b5bb1bc8cb9ddf44a3e713ff6b3afeb3b3c26502f88f" + "sha256:7abbb3e5f4522114e0230ec175b60ae968b938d1f8a7d8bce7789f78d871fb9f" ], "index": "pypi", "version": "==2.8" @@ -468,6 +448,7 @@ "sha256:e249096428b3ae81b08327a63a485ad0878de3fb939049038579ac0ef61e17e7", "sha256:e8313f01ba26fbbe36c7be1966a7b7424942f670f38e666995b88d012765b9be" ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==1.1.1" }, "oauthlib": { @@ -475,6 +456,7 @@ "sha256:bee41cc35fcca6e988463cacc3bcb8a96224f470ca547e697b604cc697b2f889", "sha256:df884cd6cbe20e32633f1db1072e9356f53638e4361bef4e8b03c9127c9328ea" ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==3.1.0" }, "packaging": { @@ -530,15 +512,37 @@ }, "pyasn1": { "hashes": [ + "sha256:08c3c53b75eaa48d71cf8c710312316392ed40899cb34710d092e96745a358b7", + "sha256:5c9414dcfede6e441f7e8f81b43b34e834731003427e5b09e4e00e3172a10f00", + "sha256:03840c999ba71680a131cfaee6fab142e1ed9bbd9c693e285cc6aca0d555e576", + "sha256:aef77c9fb94a3ac588e87841208bdec464471d9871bd5050a287cc9a475cd0ba", + "sha256:6e7545f1a61025a4e58bb336952c5061697da694db1cae97b116e9c46abcf7c8", + "sha256:7ab8a544af125fb704feadb008c99a88805126fb525280b2270bb25cc1d78a12", + "sha256:fec3e9d8e36808a28efb59b489e4528c10ad0f480e57dcc32b4de5c9d8c9fdf3", + "sha256:0458773cfe65b153891ac249bcf1b5f8f320b7c2ce462151f8fa74de8934becf", + "sha256:014c0e9976956a08139dc0712ae195324a75e142284d5f87f1a87ee1b068a359", "sha256:39c7e2ec30515947ff4e87fb6f456dfc6e84857d34be479c9d4a4ba4bf46aa5d", - "sha256:aef77c9fb94a3ac588e87841208bdec464471d9871bd5050a287cc9a475cd0ba" + "sha256:e89bf84b5437b532b0803ba5c9a5e054d21fec423a89952a74f87fa2c9b7bce2", + "sha256:99fcc3c8d804d1bc6d9a099921e39d827026409a58f2a720dcdb89374ea0c776", + "sha256:78fa6da68ed2727915c4767bb386ab32cdba863caa7dbe473eaae45f9959da86" ], "version": "==0.4.8" }, "pyasn1-modules": { "hashes": [ + "sha256:426edb7a5e8879f1ec54a1864f16b882c2837bfd06eee62f2c982315ee2473ed", + "sha256:f39edd8c4ecaa4556e989147ebf219227e2cd2e8a43c7e7fcb1f1c18c5fd6a3d", + "sha256:b80486a6c77252ea3a3e9b1e360bc9cf28eaac41263d173c032581ad2f20fe45", + "sha256:a99324196732f53093a84c4369c996713eb8c89d360a496b599fb1a9c47fc3eb", + "sha256:0845a5582f6a02bb3e1bde9ecfc4bfcae6ec3210dd270522fee602365430c3f8", "sha256:905f84c712230b2c592c19470d3ca8d552de726050d1d1716282a1f6146be65e", - "sha256:a50b808ffeb97cb3601dd25981f6b016cbb3d31fbf57a8b8a87428e6158d0c74" + "sha256:a50b808ffeb97cb3601dd25981f6b016cbb3d31fbf57a8b8a87428e6158d0c74", + "sha256:65cebbaffc913f4fe9e4808735c95ea22d7a7775646ab690518c056784bc21b4", + "sha256:0fe1b68d1e486a1ed5473f1302bd991c1611d319bba158e98b106ff86e1d7199", + "sha256:cbac4bc38d117f2a49aeedec4407d23e8866ea4ac27ff2cf7fb3e5b570df19e0", + "sha256:c29a5e5cc7a3f05926aff34e097e84f8589cd790ce0ed41b67aed6857b26aafd", + "sha256:15b7c67fabc7fc240d87fb9aabf999cf82311a6d6fb2c70d00d3d0604878c811", + "sha256:fe0644d9ab041506b62782e92b06b8c68cca799e1a9636ec398675459e031405" ], "version": "==0.2.8" }, @@ -547,6 +551,7 @@ "sha256:2d475327684562c3a96cc71adf7dc8c4f0565175cf86b6d7a404ff4c771f15f0", "sha256:7582ad22678f0fcd81102833f60ef8d0e57288b6b5fb00323d101be910e35705" ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==2.20" }, "pycryptodome": { @@ -618,6 +623,7 @@ "sha256:ea4d4b58f9bc34e224ef4b4604a6be03d72ef1f8c486391f970205f6733dbc46", "sha256:f60b3484ce4be04f5da3777c51c5140d3fe21cdd6674f2b6568f41c8130bcdeb" ], + "markers": "python_version >= '2.6' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==3.9.8" }, "pyjwkest": { @@ -639,6 +645,7 @@ "sha256:c203ec8783bf771a155b207279b9bccb8dea02d8f0c9e5f8ead507bc3246ecc1", "sha256:ef9d7589ef3c200abe66653d3f1ab1033c3c419ae9b9bdb1240a85b024efc88b" ], + "markers": "python_version >= '2.6' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==2.4.7" }, "pyrsistent": { @@ -652,6 +659,7 @@ "sha256:73ebfe9dbf22e832286dafa60473e4cd239f8592f699aa5adaf10050e6e1823c", "sha256:75bb3f31ea686f1197762692a9ee6a7550b59fc6ca3a1f4b5d7e32fb98e2da2a" ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==2.8.1" }, "pytz": { @@ -709,6 +717,7 @@ "sha256:0e7e0cfca8660dea8b7d5cd8c4f6c5e29e11f31158c0b0ae91a397f00e5a05a2", "sha256:432b788c4530cfe16d8d943a09d40ca6c16149727e4afe8c2c9d5580c59d9f24" ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'", "version": "==3.5.3" }, "requests": { @@ -716,12 +725,14 @@ "sha256:b3559a131db72c33ee969480840fff4bb6dd111de7dd27c8ee1f820f4f00231b", "sha256:fe75cc94a9443b9246fc7049224f75604b113c36acb93f87b80ed42c44cbb898" ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'", "version": "==2.24.0" }, "requests-oauthlib": { "hashes": [ + "sha256:b4261601a71fd721a8bd6d7aa1cc1d6a8a93b4a9f5e96626f8e4d91e8beeaa6a", "sha256:7f71572defaecd16372f9006f33c2ec8c077c3cfa6f5911a9a90202beb513f3d", - "sha256:b4261601a71fd721a8bd6d7aa1cc1d6a8a93b4a9f5e96626f8e4d91e8beeaa6a" + "sha256:fa6c47b933f01060936d87ae9327fead68768b69c6c9ea2109c48be30f2d4dbc" ], "index": "pypi", "version": "==1.3.0" @@ -755,7 +766,7 @@ "sha256:ed5b3698a2bb241b7f5cbbe277eaa7fe48b07a58784fba4f75224fd066d253ad", "sha256:f9dcc1ae73f36e8059589b601e8e4776b9976effd76c21ad6a855a74318efd6e" ], - "markers": "platform_python_implementation == 'CPython' and python_version < '3.9'", + "markers": "python_version < '3.9' and platform_python_implementation == 'CPython'", "version": "==0.2.0" }, "s3transfer": { @@ -794,6 +805,7 @@ "sha256:30639c035cdb23534cd4aa2dd52c3bf48f06e5f4a941509c8bafd8ce11080259", "sha256:8b74bedcbbbaca38ff6d7491d76f2b06b3592611af620f8426e82dddb04a5ced" ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==1.15.0" }, "sqlparse": { @@ -801,6 +813,7 @@ "sha256:022fb9c87b524d1f7862b3037e541f68597a730a8843245c349fc93e1643dc4e", "sha256:e162203737712307dfe78860cc56c8da8a852ab2ee33750e33aeadf38d12c548" ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==0.3.1" }, "structlog": { @@ -824,6 +837,7 @@ "sha256:07620c3f3f8eed1f12600845892b0e036a2420acf513c53f7de0abd911a5894f", "sha256:5af8ad10cec94f215e3f48112de2022e1d5a37ed427fbd88652fa908f2ab7cae" ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==3.0.1" }, "urllib3": { @@ -835,7 +849,6 @@ "sha256:e7983572181f5e1522d9c98453462384ee92a0be7fac5f1413a1e35c56cc0461" ], "index": "pypi", - "markers": null, "version": "==1.25.10" }, "vine": { @@ -843,6 +856,7 @@ "sha256:133ee6d7a9016f177ddeaf191c1f58421a1dcc6ee9a42c58b34bed40e1d2cd87", "sha256:ea4947cc56d1fd6f2095c8d543ee25dad966f78692528e68b4fada11ba3f98af" ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==1.3.0" } }, @@ -859,6 +873,7 @@ "sha256:7e51911ee147dd685c3c8b805c0ad0cb58d360987b56953878f8c06d2d1c6f1a", "sha256:9fc6fb5d39b8af147ba40765234fa822b39818b12cc80b35ad9b0cef3a476aed" ], + "markers": "python_version >= '3.5'", "version": "==3.2.10" }, "astroid": { @@ -866,14 +881,16 @@ "sha256:4c17cea3e592c21b6e222f673868961bad77e1f985cb1694ed077475a89229c1", "sha256:d8506842a3faf734b81599c8b98dcc423de863adcc1999248480b18bd31a0f38" ], + "markers": "python_version >= '3.5'", "version": "==2.4.1" }, "attrs": { "hashes": [ - "sha256:08a96c641c3a74e44eb59afb61a24f2cb9f4d7188748e76ba4bb5edfa3cb7d1c", - "sha256:f7b7ce16570fe9965acd6d30101a28f62fb4a7f9e926b3bbc9b61f8b04247e72" + "sha256:0ef97238856430dcf9228e07f316aefc17e8939fc8507e18c6501b761ef1a42a", + "sha256:2867b7b9f8326499ab5b0e2d12801fa5c98842d2cbd22b35112ae04bf85b4dff" ], - "version": "==19.3.0" + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", + "version": "==20.1.0" }, "autopep8": { "hashes": [ @@ -903,6 +920,7 @@ "sha256:477f0e18a0d58e50bb3dbc9af7fcda464fd0ebfc7a6151d8888602d7153171a0", "sha256:cd4f3a231305e405ed8944d8ff35bd742d9bc740ad62f483bd0ca21ce7131984" ], + "markers": "python_version >= '3.5'", "version": "==1.0.0" }, "bumpversion": { @@ -932,6 +950,7 @@ "sha256:d2b5255c7c6349bc1bd1e59e08cd12acbbd63ce649f2588755783aa94dfb6b1a", "sha256:dacca89f4bfadd5de3d7489b7c8a566eee0d3676333fbb50030263894c38c0dc" ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'", "version": "==7.1.2" }, "colorama": { @@ -1018,6 +1037,7 @@ "sha256:15e351d19611c887e482fb960eae4d44845013cc142d42896e9862f775d8cf5c", "sha256:f04b9fcbac03b0a3e58c0ab3a0ecc462e023a9faf046d57794184028123aa208" ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==3.8.3" }, "flake8-polyfill": { @@ -1032,6 +1052,7 @@ "sha256:91f36bfb1ab7949b3b40e23736db18231bf7593edada2ba5c3a174a7b23657ac", "sha256:c9e1f2d0db7ddb9a704c2a0217be31214e91a4fe1dea1efad19ae42ba0c285c9" ], + "markers": "python_version >= '3.4'", "version": "==4.0.5" }, "gitpython": { @@ -1039,6 +1060,7 @@ "sha256:2db287d71a284e22e5c2846042d0602465c7434d910406990d5b74df4afb0858", "sha256:fa3b92da728a457dd75d62bb5f3eb2816d99a7fe6c67398e260637a40e3fafb5" ], + "markers": "python_version >= '3.4'", "version": "==3.1.7" }, "idna": { @@ -1053,6 +1075,7 @@ "sha256:54da7e92468955c4fceacd0c86bd0ec997b0e1ee80d97f67c35a78b719dccab1", "sha256:6e811fcb295968434526407adb8796944f1988c5b65e8139058f2014cbe100fd" ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==4.3.21" }, "lazy-object-proxy": { @@ -1079,6 +1102,7 @@ "sha256:efa1909120ce98bbb3777e8b6f92237f5d5c8ea6758efea36a473e1d38f7d3e4", "sha256:f3900e8a5de27447acbf900b4750b0ddfd7ec1ea7fbaf11dfa911141bc522af0" ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==1.4.3" }, "mccabe": { @@ -1121,6 +1145,7 @@ "sha256:2295e7b2f6b5bd100585ebcb1f616591b652db8a741695b3d8f5d28bdc934367", "sha256:c58a7d2815e0e8d7972bf1803331fb0152f867bd89adf8a01dfd55085434192e" ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==2.6.0" }, "pydocstyle": { @@ -1128,6 +1153,7 @@ "sha256:da7831660b7355307b32778c4a0dbfb137d89254ef31a2b2978f50fc0b4d7586", "sha256:f4f5d210610c2d153fae39093d44224c17429e2ad7da12a8b419aba5c2f614b5" ], + "markers": "python_version >= '3.5'", "version": "==5.0.2" }, "pyflakes": { @@ -1135,6 +1161,7 @@ "sha256:0d94e0e05a19e57a99444b6ddcf9a6eb2e5c68d3ca1e98e90707af8152c90a92", "sha256:35b2d75ee967ea93b55750aa9edbbf72813e06a66ba54438df2cfac9e3c27fc8" ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==2.2.0" }, "pylint": { @@ -1227,6 +1254,7 @@ "sha256:b3559a131db72c33ee969480840fff4bb6dd111de7dd27c8ee1f820f4f00231b", "sha256:fe75cc94a9443b9246fc7049224f75604b113c36acb93f87b80ed42c44cbb898" ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'", "version": "==2.24.0" }, "requirements-detector": { @@ -1254,6 +1282,7 @@ "sha256:30639c035cdb23534cd4aa2dd52c3bf48f06e5f4a941509c8bafd8ce11080259", "sha256:8b74bedcbbbaca38ff6d7491d76f2b06b3592611af620f8426e82dddb04a5ced" ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==1.15.0" }, "smmap": { @@ -1261,6 +1290,7 @@ "sha256:54c44c197c819d5ef1991799a7e30b662d1e520f2ac75c9efbeb54a742214cf4", "sha256:9c98bbd1f9786d22f14b3d4126894d56befb835ec90cef151af566c7e19b5d24" ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==3.0.4" }, "snowballstemmer": { @@ -1275,6 +1305,7 @@ "sha256:022fb9c87b524d1f7862b3037e541f68597a730a8843245c349fc93e1643dc4e", "sha256:e162203737712307dfe78860cc56c8da8a852ab2ee33750e33aeadf38d12c548" ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==0.3.1" }, "stevedore": { @@ -1282,6 +1313,7 @@ "sha256:38791aa5bed922b0a844513c5f9ed37774b68edc609e5ab8ab8d8fe0ce4315e5", "sha256:c8f4f0ebbc394e52ddf49de8bcc3cf8ad2b4425ebac494106bbc5e3661ac7633" ], + "markers": "python_version >= '3.6'", "version": "==3.2.0" }, "toml": { @@ -1334,7 +1366,6 @@ "sha256:e7983572181f5e1522d9c98453462384ee92a0be7fac5f1413a1e35c56cc0461" ], "index": "pypi", - "markers": null, "version": "==1.25.10" }, "websocket-client": { diff --git a/README.md b/README.md index 1e4ccf2ae..1b8cc91c7 100644 --- a/README.md +++ b/README.md @@ -61,7 +61,6 @@ postgresql: user: postgres log_level: debug -error_reporting: false ``` ## Security diff --git a/docs/installation/kubernetes.md b/docs/installation/kubernetes.md index 956d1b034..8e704e9a8 100644 --- a/docs/installation/kubernetes.md +++ b/docs/installation/kubernetes.md @@ -18,16 +18,11 @@ config: # Optionally specify fixed secret_key, otherwise generated automatically # secret_key: _k*@6h2u2@q-dku57hhgzb7tnx*ba9wodcb^s9g0j59@=y(@_o # Enable error reporting - error_reporting: false + error_reporting: + enabled: false # Log level used by web and worker # Can be either debug, info, warning, error log_level: warning - # Optionally enable Elastic APM Support - apm: - enabled: false - server_url: "" - secret_token: "" - verify_server_cert: true # This Helm chart ships with built-in Prometheus ServiceMonitors and Rules. # This requires the CoreOS Prometheus Operator. diff --git a/helm/templates/configmap.yaml b/helm/templates/configmap.yaml index 73a49abab..ef2a137b6 100644 --- a/helm/templates/configmap.yaml +++ b/helm/templates/configmap.yaml @@ -19,10 +19,7 @@ data: host: "{{ .Release.Name }}-redis-master" cache_db: 0 message_queue_db: 1 - error_reporting: {{ .Values.config.error_reporting }} + error_reporting: + enabled: {{ .Values.config.error_reporting.enabled }} + environment: {{ .Values.config.error_reporting.environment }} log_level: "{{ .Values.config.log_level }}" - apm: - enabled: {{ .Values.config.apm.enabled }} - server_url: "{{ .Values.config.apm.server_url }}" - secret_token: "{{ .Values.config.apm.secret_token }}" - verify_server_cert: {{ .Values.config.apm.verify_server_cert }} diff --git a/helm/values.yaml b/helm/values.yaml index bdfea8a14..585cc4e83 100644 --- a/helm/values.yaml +++ b/helm/values.yaml @@ -10,16 +10,12 @@ config: # Optionally specify fixed secret_key, otherwise generated automatically # secret_key: _k*@6h2u2@q-dku57hhgzb7tnx*ba9wodcb^s9g0j59@=y(@_o # Enable error reporting - error_reporting: false + error_reporting: + enabled: false + environment: customer # Log level used by web and worker # Can be either debug, info, warning, error log_level: warning - # Optionally enable Elastic APM Support - apm: - enabled: false - server_url: "" - secret_token: "" - verify_server_cert: true # This Helm chart ships with built-in Prometheus ServiceMonitors and Rules. # This requires the CoreOS Prometheus Operator. diff --git a/passbook/flows/planner.py b/passbook/flows/planner.py index 546417bd3..b32f8d666 100644 --- a/passbook/flows/planner.py +++ b/passbook/flows/planner.py @@ -1,13 +1,14 @@ """Flows Planner""" from dataclasses import dataclass, field -from time import time from typing import Any, Dict, List, Optional from django.core.cache import cache from django.http import HttpRequest -from elasticapm import capture_span +from sentry_sdk import start_span +from sentry_sdk.tracing import Span from structlog import get_logger +from passbook.audit.models import cleanse_dict from passbook.core.models import User from passbook.flows.exceptions import EmptyFlowException, FlowNonApplicableException from passbook.flows.markers import ReevaluateMarker, StageMarker @@ -90,47 +91,53 @@ class FlowPlanner: self.allow_empty_flows = False self.flow = flow - @capture_span(name="FlowPlanner", span_type="flow.planner.plan") def plan( self, request: HttpRequest, default_context: Optional[Dict[str, Any]] = None ) -> FlowPlan: """Check each of the flows' policies, check policies for each stage with PolicyBinding and return ordered list""" - LOGGER.debug("f(plan): Starting planning process", flow=self.flow) - # Bit of a workaround here, if there is a pending user set in the default context - # we use that user for our cache key - # to make sure they don't get the generic response - if default_context and PLAN_CONTEXT_PENDING_USER in default_context: - user = default_context[PLAN_CONTEXT_PENDING_USER] - else: - user = request.user - # First off, check the flow's direct policy bindings - # to make sure the user even has access to the flow - engine = PolicyEngine(self.flow, user, request) - if default_context: - engine.request.context = default_context - engine.build() - result = engine.result - if not result.passing: - raise FlowNonApplicableException(result.messages) - # User is passing so far, check if we have a cached plan - cached_plan_key = cache_key(self.flow, user) - cached_plan = cache.get(cached_plan_key, None) - if cached_plan and self.use_cache: - LOGGER.debug( - "f(plan): Taking plan from cache", flow=self.flow, key=cached_plan_key - ) - # Reset the context as this isn't factored into caching - cached_plan.context = default_context or {} - return cached_plan - LOGGER.debug("f(plan): building plan", flow=self.flow) - plan = self._build_plan(user, request, default_context) - cache.set(cache_key(self.flow, user), plan) - if not plan.stages and not self.allow_empty_flows: - raise EmptyFlowException() - return plan + with start_span(op="flow.planner.plan") as span: + span: Span + span.set_data("flow", self.flow) + span.set_data("request", request) + + LOGGER.debug("f(plan): Starting planning process", flow=self.flow) + # Bit of a workaround here, if there is a pending user set in the default context + # we use that user for our cache key + # to make sure they don't get the generic response + if default_context and PLAN_CONTEXT_PENDING_USER in default_context: + user = default_context[PLAN_CONTEXT_PENDING_USER] + else: + user = request.user + # First off, check the flow's direct policy bindings + # to make sure the user even has access to the flow + engine = PolicyEngine(self.flow, user, request) + if default_context: + span.set_data("default_context", cleanse_dict(default_context)) + engine.request.context = default_context + engine.build() + result = engine.result + if not result.passing: + raise FlowNonApplicableException(result.messages) + # User is passing so far, check if we have a cached plan + cached_plan_key = cache_key(self.flow, user) + cached_plan = cache.get(cached_plan_key, None) + if cached_plan and self.use_cache: + LOGGER.debug( + "f(plan): Taking plan from cache", + flow=self.flow, + key=cached_plan_key, + ) + # Reset the context as this isn't factored into caching + cached_plan.context = default_context or {} + return cached_plan + LOGGER.debug("f(plan): building plan", flow=self.flow) + plan = self._build_plan(user, request, default_context) + cache.set(cache_key(self.flow, user), plan) + if not plan.stages and not self.allow_empty_flows: + raise EmptyFlowException() + return plan - @capture_span(name="FlowPlanner", span_type="flow.planner.build_plan") def _build_plan( self, user: User, @@ -139,38 +146,40 @@ class FlowPlanner: ) -> FlowPlan: """Build flow plan by checking each stage in their respective order and checking the applied policies""" - start_time = time() - plan = FlowPlan(flow_pk=self.flow.pk.hex) - if default_context: - plan.context = default_context - # Check Flow policies - for stage in ( - self.flow.stages.order_by("flowstagebinding__order") - .select_subclasses() - .select_related() - ): - binding: FlowStageBinding = stage.flowstagebinding_set.get( - target__pk=self.flow.pk - ) - engine = PolicyEngine(binding, user, request) - engine.request.context = plan.context - engine.build() - if engine.passing: - LOGGER.debug("f(plan): Stage passing", stage=stage, flow=self.flow) - plan.stages.append(stage) - marker = StageMarker() - if binding.re_evaluate_policies: - LOGGER.debug( - "f(plan): Stage has re-evaluate marker", - stage=stage, - flow=self.flow, - ) - marker = ReevaluateMarker(binding=binding, user=user) - plan.markers.append(marker) - end_time = time() + with start_span(op="flow.planner.build_plan") as span: + span: Span + span.set_data("flow", self.flow) + span.set_data("user", user) + span.set_data("request", request) + + plan = FlowPlan(flow_pk=self.flow.pk.hex) + if default_context: + plan.context = default_context + # Check Flow policies + for stage in ( + self.flow.stages.order_by("flowstagebinding__order") + .select_subclasses() + .select_related() + ): + binding: FlowStageBinding = stage.flowstagebinding_set.get( + target__pk=self.flow.pk + ) + engine = PolicyEngine(binding, user, request) + engine.request.context = plan.context + engine.build() + if engine.passing: + LOGGER.debug("f(plan): Stage passing", stage=stage, flow=self.flow) + plan.stages.append(stage) + marker = StageMarker() + if binding.re_evaluate_policies: + LOGGER.debug( + "f(plan): Stage has re-evaluate marker", + stage=stage, + flow=self.flow, + ) + marker = ReevaluateMarker(binding=binding, user=user) + plan.markers.append(marker) LOGGER.debug( - "f(plan): Finished building", - flow=self.flow, - duration_s=end_time - start_time, + "f(plan): Finished building", flow=self.flow, duration_s=span.timestamp, ) return plan diff --git a/passbook/flows/tests/test_planner.py b/passbook/flows/tests/test_planner.py index 1f9a680ce..13db0db56 100644 --- a/passbook/flows/tests/test_planner.py +++ b/passbook/flows/tests/test_planner.py @@ -1,5 +1,5 @@ """flow planner tests""" -from unittest.mock import MagicMock, PropertyMock, patch +from unittest.mock import MagicMock, Mock, PropertyMock, patch from django.contrib.sessions.middleware import SessionMiddleware from django.core.cache import cache @@ -18,7 +18,7 @@ from passbook.policies.types import PolicyResult from passbook.stages.dummy.models import DummyStage POLICY_RETURN_FALSE = PropertyMock(return_value=PolicyResult(False)) -TIME_NOW_MOCK = MagicMock(return_value=3) +CACHE_MOCK = Mock(wraps=cache) POLICY_RETURN_TRUE = MagicMock(return_value=PolicyResult(True)) @@ -64,7 +64,7 @@ class TestFlowPlanner(TestCase): planner = FlowPlanner(flow) planner.plan(request) - @patch("passbook.flows.planner.time", TIME_NOW_MOCK) + @patch("passbook.flows.planner.cache", CACHE_MOCK) def test_planner_cache(self): """Test planner cache""" flow = Flow.objects.create( @@ -82,12 +82,15 @@ class TestFlowPlanner(TestCase): planner = FlowPlanner(flow) planner.plan(request) - self.assertEqual(TIME_NOW_MOCK.call_count, 2) # Start and end + self.assertEqual( + CACHE_MOCK.set.call_count, 1 + ) # Ensure plan is written to cache planner = FlowPlanner(flow) planner.plan(request) self.assertEqual( - TIME_NOW_MOCK.call_count, 2 - ) # When taking from cache, time is not measured + CACHE_MOCK.set.call_count, 1 + ) # Ensure nothing is written to cache + self.assertEqual(CACHE_MOCK.get.call_count, 2) # Get is called twice def test_planner_default_context(self): """Test planner with default_context""" diff --git a/passbook/lib/default.yml b/passbook/lib/default.yml index a6b74cb87..82a2a5805 100644 --- a/passbook/lib/default.yml +++ b/passbook/lib/default.yml @@ -15,7 +15,9 @@ debug: false log_level: warning # Error reporting, sends stacktrace to sentry.beryju.org -error_reporting: false +error_reporting: + enabled: false + environment: customer passbook: # Optionally add links to the footer on the login page diff --git a/passbook/lib/expression/evaluator.py b/passbook/lib/expression/evaluator.py index df954d92c..29973947a 100644 --- a/passbook/lib/expression/evaluator.py +++ b/passbook/lib/expression/evaluator.py @@ -4,8 +4,9 @@ from textwrap import indent from typing import Any, Dict, Iterable, Optional from django.core.exceptions import ValidationError -from elasticapm import capture_span from requests import Session +from sentry_sdk import start_span +from sentry_sdk.tracing import Span from structlog import get_logger from passbook.core.models import User @@ -71,27 +72,31 @@ class BaseEvaluator: full_expression += f"\nresult = handler({handler_signature})" return full_expression - @capture_span(name="BaseEvaluator", span_type="lib.evaluator.evaluate") def evaluate(self, expression_source: str) -> Any: """Parse and evaluate expression. If the syntax is incorrect, a SyntaxError is raised. If any exception is raised during execution, it is raised. The result is returned without any type-checking.""" - param_keys = self._context.keys() - ast_obj = compile( - self.wrap_expression(expression_source, param_keys), self._filename, "exec", - ) - try: - _locals = self._context - # Yes this is an exec, yes it is potentially bad. Since we limit what variables are - # available here, and these policies can only be edited by admins, this is a risk - # we're willing to take. - # pylint: disable=exec-used - exec(ast_obj, self._globals, _locals) # nosec # noqa - result = _locals["result"] - except Exception as exc: - LOGGER.warning("Expression error", exc=exc) - raise - return result + with start_span(op="lib.evaluator.evaluate") as span: + span: Span + span.set_data("expression", expression_source) + param_keys = self._context.keys() + ast_obj = compile( + self.wrap_expression(expression_source, param_keys), + self._filename, + "exec", + ) + try: + _locals = self._context + # Yes this is an exec, yes it is potentially bad. Since we limit what variables are + # available here, and these policies can only be edited by admins, this is a risk + # we're willing to take. + # pylint: disable=exec-used + exec(ast_obj, self._globals, _locals) # nosec # noqa + result = _locals["result"] + except Exception as exc: + LOGGER.warning("Expression error", exc=exc) + raise + return result def validate(self, expression: str) -> bool: """Validate expression's syntax, raise ValidationError if Syntax is invalid""" diff --git a/passbook/lib/sentry.py b/passbook/lib/sentry.py index 08d2bdebf..8aadcd385 100644 --- a/passbook/lib/sentry.py +++ b/passbook/lib/sentry.py @@ -4,7 +4,6 @@ from botocore.client import ClientError from django.core.exceptions import DisallowedHost, ValidationError from django.db import InternalError, OperationalError, ProgrammingError from django_redis.exceptions import ConnectionInterrupted -from elasticapm.transport.http import TransportException from redis.exceptions import RedisError from rest_framework.exceptions import APIException from structlog import get_logger @@ -34,7 +33,6 @@ def before_send(event, hint): OSError, RedisError, SentryIgnoredException, - TransportException, ) if "exc_info" in hint: _, exc_value, _ = hint["exc_info"] diff --git a/passbook/policies/engine.py b/passbook/policies/engine.py index 3671e239f..2da684862 100644 --- a/passbook/policies/engine.py +++ b/passbook/policies/engine.py @@ -5,7 +5,8 @@ from typing import List, Optional from django.core.cache import cache from django.http import HttpRequest -from elasticapm import capture_span +from sentry_sdk import start_span +from sentry_sdk.tracing import Span from structlog import get_logger from passbook.core.models import User @@ -70,36 +71,39 @@ class PolicyEngine: if policy.__class__ == Policy: raise TypeError(f"Policy '{policy}' is root type") - @capture_span(name="PolicyEngine", span_type="policy.engine.build") def build(self) -> "PolicyEngine": - """Build task group""" - for binding in self._iter_bindings(): - self._check_policy_type(binding.policy) - key = cache_key(binding, self.request) - cached_policy = cache.get(key, None) - if cached_policy and self.use_cache: - LOGGER.debug( - "P_ENG: Taking result from cache", - policy=binding.policy, - cache_key=key, + """Build wrapper which monitors performance""" + with start_span(op="policy.engine.build") as span: + span: Span + span.set_data("pbm", self.__pbm) + span.set_data("request", self.request) + for binding in self._iter_bindings(): + self._check_policy_type(binding.policy) + key = cache_key(binding, self.request) + cached_policy = cache.get(key, None) + if cached_policy and self.use_cache: + LOGGER.debug( + "P_ENG: Taking result from cache", + policy=binding.policy, + cache_key=key, + ) + self.__cached_policies.append(cached_policy) + continue + LOGGER.debug("P_ENG: Evaluating policy", policy=binding.policy) + our_end, task_end = Pipe(False) + task = PolicyProcess(binding, self.request, task_end) + LOGGER.debug("P_ENG: Starting Process", policy=binding.policy) + task.start() + self.__processes.append( + PolicyProcessInfo(process=task, connection=our_end, binding=binding) ) - self.__cached_policies.append(cached_policy) - continue - LOGGER.debug("P_ENG: Evaluating policy", policy=binding.policy) - our_end, task_end = Pipe(False) - task = PolicyProcess(binding, self.request, task_end) - LOGGER.debug("P_ENG: Starting Process", policy=binding.policy) - task.start() - self.__processes.append( - PolicyProcessInfo(process=task, connection=our_end, binding=binding) - ) - # If all policies are cached, we have an empty list here. - for proc_info in self.__processes: - proc_info.process.join(proc_info.binding.timeout) - # Only call .recv() if no result is saved, otherwise we just deadlock here - if not proc_info.result: - proc_info.result = proc_info.connection.recv() - return self + # If all policies are cached, we have an empty list here. + for proc_info in self.__processes: + proc_info.process.join(proc_info.binding.timeout) + # Only call .recv() if no result is saved, otherwise we just deadlock here + if not proc_info.result: + proc_info.result = proc_info.connection.recv() + return self @property def result(self) -> PolicyResult: diff --git a/passbook/policies/process.py b/passbook/policies/process.py index 0c8dbf87b..d925c249d 100644 --- a/passbook/policies/process.py +++ b/passbook/policies/process.py @@ -4,7 +4,8 @@ from multiprocessing.connection import Connection from typing import Optional from django.core.cache import cache -from elasticapm import capture_span +from sentry_sdk import start_span +from sentry_sdk.tracing import Span from structlog import get_logger from passbook.policies.exceptions import PolicyException @@ -45,35 +46,38 @@ class PolicyProcess(Process): if connection: self.connection = connection - @capture_span(name="PolicyEngine", span_type="policy.process.execute") def execute(self) -> PolicyResult: """Run actual policy, returns result""" - LOGGER.debug( - "P_ENG(proc): Running policy", - policy=self.binding.policy, - user=self.request.user, - process="PolicyProcess", - ) - try: - policy_result = self.binding.policy.passes(self.request) - except PolicyException as exc: - LOGGER.debug("P_ENG(proc): error", exc=exc) - policy_result = PolicyResult(False, str(exc)) - # Invert result if policy.negate is set - if self.binding.negate: - policy_result.passing = not policy_result.passing - LOGGER.debug( - "P_ENG(proc): Finished", - policy=self.binding.policy, - result=policy_result, - process="PolicyProcess", - passing=policy_result.passing, - user=self.request.user, - ) - key = cache_key(self.binding, self.request) - cache.set(key, policy_result) - LOGGER.debug("P_ENG(proc): Cached policy evaluation", key=key) - return policy_result + with start_span(op="policy.process.execute",) as span: + span: Span + span.set_data("policy", self.binding.policy) + span.set_data("request", self.request) + LOGGER.debug( + "P_ENG(proc): Running policy", + policy=self.binding.policy, + user=self.request.user, + process="PolicyProcess", + ) + try: + policy_result = self.binding.policy.passes(self.request) + except PolicyException as exc: + LOGGER.debug("P_ENG(proc): error", exc=exc) + policy_result = PolicyResult(False, str(exc)) + # Invert result if policy.negate is set + if self.binding.negate: + policy_result.passing = not policy_result.passing + LOGGER.debug( + "P_ENG(proc): Finished", + policy=self.binding.policy, + result=policy_result, + process="PolicyProcess", + passing=policy_result.passing, + user=self.request.user, + ) + key = cache_key(self.binding, self.request) + cache.set(key, policy_result) + LOGGER.debug("P_ENG(proc): Cached policy evaluation", key=key) + return policy_result def run(self): """Task wrapper to run policy checking""" diff --git a/passbook/providers/oauth2/views/authorize.py b/passbook/providers/oauth2/views/authorize.py index d80a036a5..d4ce95393 100644 --- a/passbook/providers/oauth2/views/authorize.py +++ b/passbook/providers/oauth2/views/authorize.py @@ -144,7 +144,11 @@ class OAuthAuthorizationParams: LOGGER.warning("Missing redirect uri.") raise RedirectUriError() if self.redirect_uri not in self.provider.redirect_uris.split(): - LOGGER.warning("Invalid redirect uri", redirect_uri=self.redirect_uri, excepted=self.provider.redirect_uris.split()) + LOGGER.warning( + "Invalid redirect uri", + redirect_uri=self.redirect_uri, + excepted=self.provider.redirect_uris.split(), + ) raise RedirectUriError() if not is_open_id and ( diff --git a/passbook/providers/oauth2/views/token.py b/passbook/providers/oauth2/views/token.py index 275604d13..6eaa325c7 100644 --- a/passbook/providers/oauth2/views/token.py +++ b/passbook/providers/oauth2/views/token.py @@ -110,7 +110,11 @@ class TokenParams: raise TokenError("invalid_grant") if self.redirect_uri not in self.provider.redirect_uris.split(): - LOGGER.warning("Invalid redirect uri", uri=self.redirect_uri, expected=self.provider.redirect_uris.split()) + LOGGER.warning( + "Invalid redirect uri", + uri=self.redirect_uri, + expected=self.provider.redirect_uris.split(), + ) raise TokenError("invalid_client") try: diff --git a/passbook/root/settings.py b/passbook/root/settings.py index ef69d3d5a..f6b924191 100644 --- a/passbook/root/settings.py +++ b/passbook/root/settings.py @@ -269,7 +269,7 @@ if CONFIG.y("postgresql.backup"): } # Sentry integration -_ERROR_REPORTING = CONFIG.y_bool("error_reporting", False) +_ERROR_REPORTING = CONFIG.y_bool("error_reporting.enabled", False) if not DEBUG and _ERROR_REPORTING: LOGGER.info("Error reporting is enabled.") sentry_init( @@ -278,21 +278,10 @@ if not DEBUG and _ERROR_REPORTING: send_default_pii=True, before_send=before_send, release="passbook@%s" % __version__, + traces_sample_rate=1.0, + environment=CONFIG.y("error_reporting.environment", "customer"), ) -_APM_ENABLED = CONFIG.y("apm.enabled", False) -if _APM_ENABLED: - INSTALLED_APPS.append("elasticapm.contrib.django") - ELASTIC_APM = { - "CLOUD_PROVIDER": False, - "DEBUG": DEBUG, - "SERVICE_NAME": "passbook", - "SERVICE_VERSION": __version__, - "SECRET_TOKEN": CONFIG.y("apm.secret_token", ""), - "SERVER_URL": CONFIG.y("apm.server_url", "http://localhost:8200"), - "VERIFY_SERVER_CERT": CONFIG.y_bool("apm.verify_server_cert", True), - } - # Static files (CSS, JavaScript, Images) # https://docs.djangoproject.com/en/2.1/howto/static-files/ @@ -372,7 +361,6 @@ _LOGGING_HANDLER_MAP = { "grpc": LOG_LEVEL, "docker": "WARNING", "urllib3": "WARNING", - "elasticapm": "WARNING", } for handler_name, level in _LOGGING_HANDLER_MAP.items(): # pyright: reportGeneralTypeIssues=false