diff --git a/.gitignore b/.gitignore index 981265a803ef..b97cb687af4f 100644 --- a/.gitignore +++ b/.gitignore @@ -12,3 +12,4 @@ checkstyle.txt .env .direnv .envrc +.elasticbeanstalk/ \ No newline at end of file diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index 20465af4a75e..5cf078b971c2 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -25,7 +25,7 @@ deploydevelop: - develop deployawsstaging: - image: twstuart/elasticbeanstalk-pipenv + image: bullettrain/elasticbeanstalk-pipenv stage: deploy-aws script: - export AWS_ACCESS_KEY_ID=$AWS_STAGING_ACCESS_KEY_ID @@ -45,7 +45,7 @@ deployawsstaging: - staging deployawsmaster: - image: twstuart/elasticbeanstalk-pipenv + image: bullettrain/elasticbeanstalk-pipenv stage: deploy-aws script: - export DATABASE_URL=$DATABASE_URL_PRODUCTION diff --git a/Pipfile b/Pipfile index 0cc2bb7a54a5..012bf70d8f5c 100644 --- a/Pipfile +++ b/Pipfile @@ -16,6 +16,7 @@ pylint = "*" "autopep8" = "*" pytest = "*" pytest-django = "*" +django-test-migrations = "*" [packages] appdirs = "*" @@ -33,9 +34,7 @@ sendgrid-django = "*" psycopg2-binary = "*" coreapi = "*" Django = "<3.0" -numpy = "*" django-simple-history = "*" -twisted = {version = "*",extras = ["tls"]} django-debug-toolbar = "*" google-api-python-client = "*" "oauth2client" = "*" @@ -46,8 +45,8 @@ chargebee = "*" python-http-client = "<3.2.0" # 3.2.0 is the latest but throws an error on installation saying that it's not found django-health-check = "*" django-storages = "*" -boto3 = "*" django-environ = "*" django-trench = "*" djoser = "*" influxdb-client = "*" +django-ordered-model = "*" diff --git a/Pipfile.lock b/Pipfile.lock index 8ff119aa0974..0fbc2a7df125 100644 --- a/Pipfile.lock +++ b/Pipfile.lock @@ -1,7 +1,7 @@ { "_meta": { "hash": { - "sha256": "fec64739b3f80d9c137898778a1ff1adf96088bc06bc48739166954c2c17170f" + "sha256": "874f3d39c60f509470b7114a6175f82eeb3b02d29eda4d82053a3ca32d17de87" }, "pipfile-spec": 6, "requires": {}, @@ -16,87 +16,26 @@ "default": { "appdirs": { "hashes": [ - "sha256:9e5896d1372858f8dd3344faf4e5014d21849c756c8d5701f78f8a103b372d92", - "sha256:d8b24664561d0d34ddfaec54636d502d7cea6e29c3eaf68f3df6180863e2166e" + "sha256:7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41", + "sha256:a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128" ], "index": "pypi", - "version": "==1.4.3" - }, - "attrs": { - "hashes": [ - "sha256:08a96c641c3a74e44eb59afb61a24f2cb9f4d7188748e76ba4bb5edfa3cb7d1c", - "sha256:f7b7ce16570fe9965acd6d30101a28f62fb4a7f9e926b3bbc9b61f8b04247e72" - ], - "version": "==19.3.0" - }, - "automat": { - "hashes": [ - "sha256:7979803c74610e11ef0c0d68a2942b152df52da55336e0c9d58daf1831cbdf33", - "sha256:b6feb6455337df834f6c9962d6ccf771515b7d939bca142b29c20c2376bc6111" - ], - "version": "==20.2.0" - }, - "boto3": { - "hashes": [ - "sha256:05f75d30aa10094eb96bba22b25b6005126de748188f196a5fffab8a76d821ac", - "sha256:f1ac7eb23ff8b1d7e314123668ff1e93b874dd396ac5424adc443d68bd8a6fbf" - ], - "index": "pypi", - "version": "==1.13.6" - }, - "botocore": { - "hashes": [ - "sha256:1f5e57f41f9f9400feffc62f17b517a601643ffec69f7ee927555604112cc012", - "sha256:b9c8e0aa07770b7b371d586db41eef46e70bfc4ab47f7a1ee1acd4e9c811c6c9" - ], - "version": "==1.16.6" + "version": "==1.4.4" }, "cachetools": { "hashes": [ - "sha256:1d057645db16ca7fe1f3bd953558897603d6f0b9c51ed9d11eb4d071ec4e2aab", - "sha256:de5d88f87781602201cde465d3afe837546663b168e8b39df67411b0bf10cefc" + "sha256:513d4ff98dd27f85743a8dc0e92f55ddb1b49e060c2d5961512855cda2c01a98", + "sha256:bbaa39c3dede00175df2dc2b03d0cf18dd2d32a7de7beb68072d13043c9edb20" ], - "version": "==4.1.0" + "markers": "python_version ~= '3.5'", + "version": "==4.1.1" }, "certifi": { "hashes": [ - "sha256:1d987a998c75633c40847cc966fcf5904906c920a7f17ef374f5aa4282abd304", - "sha256:51fcb31174be6e6664c5f69e3e1691a2d72a1a12e90f872cbdb1567eb47b6519" - ], - "version": "==2020.4.5.1" - }, - "cffi": { - "hashes": [ - "sha256:001bf3242a1bb04d985d63e138230802c6c8d4db3668fb545fb5005ddf5bb5ff", - "sha256:00789914be39dffba161cfc5be31b55775de5ba2235fe49aa28c148236c4e06b", - "sha256:028a579fc9aed3af38f4892bdcc7390508adabc30c6af4a6e4f611b0c680e6ac", - "sha256:14491a910663bf9f13ddf2bc8f60562d6bc5315c1f09c704937ef17293fb85b0", - "sha256:1cae98a7054b5c9391eb3249b86e0e99ab1e02bb0cc0575da191aedadbdf4384", - "sha256:2089ed025da3919d2e75a4d963d008330c96751127dd6f73c8dc0c65041b4c26", - "sha256:2d384f4a127a15ba701207f7639d94106693b6cd64173d6c8988e2c25f3ac2b6", - "sha256:337d448e5a725bba2d8293c48d9353fc68d0e9e4088d62a9571def317797522b", - "sha256:399aed636c7d3749bbed55bc907c3288cb43c65c4389964ad5ff849b6370603e", - "sha256:3b911c2dbd4f423b4c4fcca138cadde747abdb20d196c4a48708b8a2d32b16dd", - "sha256:3d311bcc4a41408cf5854f06ef2c5cab88f9fded37a3b95936c9879c1640d4c2", - "sha256:62ae9af2d069ea2698bf536dcfe1e4eed9090211dbaafeeedf5cb6c41b352f66", - "sha256:66e41db66b47d0d8672d8ed2708ba91b2f2524ece3dee48b5dfb36be8c2f21dc", - "sha256:675686925a9fb403edba0114db74e741d8181683dcf216be697d208857e04ca8", - "sha256:7e63cbcf2429a8dbfe48dcc2322d5f2220b77b2e17b7ba023d6166d84655da55", - "sha256:8a6c688fefb4e1cd56feb6c511984a6c4f7ec7d2a1ff31a10254f3c817054ae4", - "sha256:8c0ffc886aea5df6a1762d0019e9cb05f825d0eec1f520c51be9d198701daee5", - "sha256:95cd16d3dee553f882540c1ffe331d085c9e629499ceadfbda4d4fde635f4b7d", - "sha256:99f748a7e71ff382613b4e1acc0ac83bf7ad167fb3802e35e90d9763daba4d78", - "sha256:b8c78301cefcf5fd914aad35d3c04c2b21ce8629b5e4f4e45ae6812e461910fa", - "sha256:c420917b188a5582a56d8b93bdd8e0f6eca08c84ff623a4c16e809152cd35793", - "sha256:c43866529f2f06fe0edc6246eb4faa34f03fe88b64a0a9a942561c8e22f4b71f", - "sha256:cab50b8c2250b46fe738c77dbd25ce017d5e6fb35d3407606e7a4180656a5a6a", - "sha256:cef128cb4d5e0b3493f058f10ce32365972c554572ff821e175dbc6f8ff6924f", - "sha256:cf16e3cf6c0a5fdd9bc10c21687e19d29ad1fe863372b5543deaec1039581a30", - "sha256:e56c744aa6ff427a607763346e4170629caf7e48ead6921745986db3692f987f", - "sha256:e577934fc5f8779c554639376beeaa5657d54349096ef24abe8c74c5d9c117c3", - "sha256:f2b0fa0c01d8a0c7483afd9f31d7ecf2d71760ca24499c8697aeb5ca37dc090c" - ], - "version": "==1.14.0" + "sha256:5930595817496dd21bb8dc35dad090f1c2cd0adfaf21204bf6732ca5d8ee34d3", + "sha256:8fc0819f1f30ba15bdb34cceffb9ef04d99f420f68eb75d901e9560b8749fc41" + ], + "version": "==2020.6.20" }, "chardet": { "hashes": [ @@ -118,13 +57,6 @@ ], "version": "==2.1.3" }, - "constantly": { - "hashes": [ - "sha256:586372eb92059873e29eba4f9dec8381541b4d3834660707faf8ba59146dfc35", - "sha256:dd2fa9d6b1a51a83f0d7dd76293d734046aa176e384bf6e33b7e44880eb37c5d" - ], - "version": "==15.1.0" - }, "coreapi": { "hashes": [ "sha256:46145fcc1f7017c076a2ef684969b641d18a2991051fddec9458ad3f78ffc1cb", @@ -140,30 +72,6 @@ ], "version": "==0.0.4" }, - "cryptography": { - "hashes": [ - "sha256:091d31c42f444c6f519485ed528d8b451d1a0c7bf30e8ca583a0cac44b8a0df6", - "sha256:18452582a3c85b96014b45686af264563e3e5d99d226589f057ace56196ec78b", - "sha256:1dfa985f62b137909496e7fc182dac687206d8d089dd03eaeb28ae16eec8e7d5", - "sha256:1e4014639d3d73fbc5ceff206049c5a9a849cefd106a49fa7aaaa25cc0ce35cf", - "sha256:22e91636a51170df0ae4dcbd250d318fd28c9f491c4e50b625a49964b24fe46e", - "sha256:3b3eba865ea2754738616f87292b7f29448aec342a7c720956f8083d252bf28b", - "sha256:651448cd2e3a6bc2bb76c3663785133c40d5e1a8c1a9c5429e4354201c6024ae", - "sha256:726086c17f94747cedbee6efa77e99ae170caebeb1116353c6cf0ab67ea6829b", - "sha256:844a76bc04472e5135b909da6aed84360f522ff5dfa47f93e3dd2a0b84a89fa0", - "sha256:88c881dd5a147e08d1bdcf2315c04972381d026cdb803325c03fe2b4a8ed858b", - "sha256:96c080ae7118c10fcbe6229ab43eb8b090fccd31a09ef55f83f690d1ef619a1d", - "sha256:a0c30272fb4ddda5f5ffc1089d7405b7a71b0b0f51993cb4e5dbb4590b2fc229", - "sha256:bb1f0281887d89617b4c68e8db9a2c42b9efebf2702a3c5bf70599421a8623e3", - "sha256:c447cf087cf2dbddc1add6987bbe2f767ed5317adb2d08af940db517dd704365", - "sha256:c4fd17d92e9d55b84707f4fd09992081ba872d1a0c610c109c18e062e06a2e55", - "sha256:d0d5aeaedd29be304848f1c5059074a740fa9f6f26b84c5b63e8b29e73dfc270", - "sha256:daf54a4b07d67ad437ff239c8a4080cfd1cc7213df57d33c97de7b4738048d5e", - "sha256:e993468c859d084d5579e2ebee101de8f5a27ce8e2159959b6673b418fd8c785", - "sha256:f118a95c7480f5be0df8afeb9a11bd199aa20afab7a96bcf20409b411a3a85f0" - ], - "version": "==2.9.2" - }, "dj-database-url": { "hashes": [ "sha256:4aeaeb1f573c74835b0686a2b46b85990571159ffc21aa57ecd4d1e1cb334163", @@ -174,19 +82,19 @@ }, "django": { "hashes": [ - "sha256:69897097095f336d5aeef45b4103dceae51c00afa6d3ae198a2a18e519791b7a", - "sha256:6ecd229e1815d4fc5240fc98f1cca78c41e7a8cd3e3f2eefadc4735031077916" + "sha256:edf0ecf6657713b0435b6757e6069466925cae70d634a3283c96b80c01e06191", + "sha256:f2250bd35d0f6c23e930c544629934144e5dd39a4c06092e1050c731c1712ba8" ], "index": "pypi", - "version": "==2.2.12" + "version": "==2.2.14" }, "django-cors-headers": { "hashes": [ - "sha256:a5960addecc04527ab26617e51b8ed42f0adab4594b24bb0f3c33e2bd3857c3f", - "sha256:a785b5f446f6635810776d9f5f5d23e6a2a2f728ea982648370afaf0dfdf2627" + "sha256:5240062ef0b16668ce8a5f43324c388d65f5439e1a30e22c38684d5ddaff0d15", + "sha256:f5218f2f0bb1210563ff87687afbf10786e080d8494a248e705507ebd92d7153" ], "index": "pypi", - "version": "==3.2.1" + "version": "==3.4.0" }, "django-debug-toolbar": { "hashes": [ @@ -212,13 +120,21 @@ "index": "pypi", "version": "==3.12.1" }, + "django-ordered-model": { + "hashes": [ + "sha256:29af6624cf3505daaf0df00e2df1d0726dd777b95e08f304d5ad0264092aa934", + "sha256:d867166ed4dd12501139e119cbbc5b4d19798a3e72740aef0af4879ba97102cf" + ], + "index": "pypi", + "version": "==3.4.1" + }, "django-simple-history": { "hashes": [ - "sha256:1b970298e743270e5715c88b17209421c6954603d31da5cd9a11825b016ebd26", - "sha256:8585bd0d0145df816657348ad62f753444b3b9a970a2064fb92dc4cb876c5049" + "sha256:b46191e97bb59b82e0ef20ae316021f7337fec50e5acbbd5a757b37910759af0", + "sha256:d147d441165b802082647c86ca14776fe3574986053bbba90a9eaee1b315b826" ], "index": "pypi", - "version": "==2.10.0" + "version": "==2.11.0" }, "django-storages": { "hashes": [ @@ -266,14 +182,6 @@ "index": "pypi", "version": "==2.0.3" }, - "docutils": { - "hashes": [ - "sha256:6c4f696463b79f1fb8ba0c594b63840ebd41f059e92b31957c46b74a4599b6d0", - "sha256:9e4d7ecfc600058e07ba661411a2b7de2fd0fafa17d1a7f7361cd47b1175c827", - "sha256:a2aeea129088da402665e92e0b25b04b073c04b2dce4ab65caaa38b7ce2e1a99" - ], - "version": "==0.15.2" - }, "drf-nested-routers": { "hashes": [ "sha256:46e5c3abc15c782cafafd7d75028e8f9121bbc6228e3599bbb48a3daa4585034", @@ -292,25 +200,27 @@ }, "google-api-core": { "hashes": [ - "sha256:c0e430658ed6be902d7ba7095fb0a9cac810270d71bf7ac4484e76c300407aae", - "sha256:e4082a0b479dc2dee2f8d7b80ea8b5d0184885b773caab15ab1836277a01d689" + "sha256:7b65e8e5ee59bd7517eab2bf9b3008e7b50fd9fb591d4efd780ead6859cd904b", + "sha256:fea9a434068406ddabe2704988d24d6c5bde3ecfc40823a34f43892d017b14f6" ], - "version": "==1.17.0" + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", + "version": "==1.21.0" }, "google-api-python-client": { "hashes": [ - "sha256:8dd35a3704650c2db44e6cf52abdaf9de71f409c93c56bbe48a321ab5e14ebad", - "sha256:bf482c13fb41a6d01770f9d62be6b33fdcd41d68c97f2beb9be02297bdd9e725" + "sha256:220349ce189a85229fc46875d467101318495a4a735c0ff2f165b9bdbc7511a0", + "sha256:f8e73dd6433f8218922c952e09adc4fc0dbc360f9959cf427565a16e8d4c5d25" ], "index": "pypi", - "version": "==1.8.2" + "version": "==1.9.3" }, "google-auth": { "hashes": [ - "sha256:2243db98475f7f2033c41af5185333cbf13780e8f5f96eaadd997c6f34181dcc", - "sha256:23cfeeb71d98b7f51cd33650779d35291aeb8b23384976d497805d12eefc6e9b" + "sha256:5e3f540b7b0b892000d542cea6b818b837c230e9a4db9337bb2973bcae0fc078", + "sha256:d6b390d3bb0969061ffec7e5766c45c1b39e13c302691e35029f1ad1ccd8ca3b" ], - "version": "==1.14.2" + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", + "version": "==1.18.0" }, "google-auth-httplib2": { "hashes": [ @@ -321,9 +231,11 @@ }, "googleapis-common-protos": { "hashes": [ - "sha256:013c91704279119150e44ef770086fdbba158c1f978a6402167d47d5409e226e" + "sha256:560716c807117394da12cecb0a54da5a451b5cf9866f1d37e9a5e2329a665351", + "sha256:c8961760f5aad9a711d37b675be103e0cc4e9a39327e0d6d857872f698403e24" ], - "version": "==1.51.0" + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", + "version": "==1.52.0" }, "gunicorn": { "hashes": [ @@ -335,46 +247,34 @@ }, "httplib2": { "hashes": [ - "sha256:39dd15a333f67bfb70798faa9de8a6e99c819da6ad82b77f9a259a5c7b1225a2", - "sha256:6d9722decd2deacd486ef10c5dd5e2f120ca3ba8736842b90509afcdc16488b1" + "sha256:8af66c1c52c7ffe1aa5dc4bcd7c769885254b0756e6e69f953c7f0ab49a70ba3", + "sha256:ca2914b015b6247791c4866782fa6042f495b94401a0f0bd3e1d6e0ba2236782" ], - "version": "==0.17.3" - }, - "hyperlink": { - "hashes": [ - "sha256:4288e34705da077fada1111a24a0aa08bb1e76699c9ce49876af722441845654", - "sha256:ab4a308feb039b04f855a020a6eda3b18ca5a68e6d8f8c899cbe9e653721d04f" - ], - "version": "==19.0.0" + "version": "==0.18.1" }, "idna": { "hashes": [ - "sha256:7588d1c14ae4c77d74036e8c22ff447b26d0fde8f007354fd48a7814db15b7cb", - "sha256:a068a21ceac8a4d63dbfd964670474107f541babbd2250d61922f029858365fa" - ], - "version": "==2.9" - }, - "incremental": { - "hashes": [ - "sha256:717e12246dddf231a349175f48d74d93e2897244939173b01974ab6661406b9f", - "sha256:7b751696aaf36eebfab537e458929e194460051ccad279c72b755a167eebd4b3" + "sha256:b307872f855b18632ce0c21c5e45be78c0ea7ae4c15c828c20788b26921eb3f6", + "sha256:b97d804b1e9b523befed77c48dacec60e6dcb0b5391d57af6a65a312a90648c0" ], - "version": "==17.5.0" + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", + "version": "==2.10" }, "inflection": { "hashes": [ - "sha256:32a5c3341d9583ec319548b9015b7fbdf8c429cbcb575d326c33ae3a0e90d52c", - "sha256:9a15d3598f01220e93f2207c432cfede50daff53137ce660fb8be838ef1ca6cc" + "sha256:88b101b2668a1d81d6d72d4c2018e53bc6c7fc544c987849da1c7f77545c3bc9", + "sha256:f576e85132d34f5bf7df5183c2c6f94cfb32e528f53065345cf71329ba0b8924" ], - "version": "==0.4.0" + "markers": "python_version >= '3.5'", + "version": "==0.5.0" }, "influxdb-client": { "hashes": [ - "sha256:1ba837b941b7e1f3175f2751f1a7fd050106b57a577a76f87846457d25dd0a85", - "sha256:41a737032b74e25ee211f0599fc008db5fe9668f13008b672eef9c0fa00625e2" + "sha256:e5393a9caeeee6f9718aa3505eaf212199cab0567b693187823f8e3e97545e3a", + "sha256:ec2dd4911ada30ba2c88a1e2b7f1b24ec0e70934497e4b99b3ede6e6a8ed7e97" ], "index": "pypi", - "version": "==1.6.0" + "version": "==1.8.0" }, "itypes": { "hashes": [ @@ -388,15 +288,9 @@ "sha256:89aab215427ef59c34ad58735269eb58b1a5808103067f7bb9d5836c651b3bb0", "sha256:f0a4641d3cf955324a89c04f3d94663aa4d638abe8f733ecd3582848e1c37035" ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'", "version": "==2.11.2" }, - "jmespath": { - "hashes": [ - "sha256:695cb76fa78a10663425d5b73ddc5714eb711157e52704d69be03b1a02ba4fec", - "sha256:cca55c8d153173e21baa59983015ad0daf603f9cb799904ff057bfb8ff8dc2d9" - ], - "version": "==0.9.5" - }, "markupsafe": { "hashes": [ "sha256:00bc623926325b26bb9605ae9eae8a215691f33cae5df11ca5424f06f2d1f473", @@ -433,35 +327,9 @@ "sha256:e249096428b3ae81b08327a63a485ad0878de3fb939049038579ac0ef61e17e7", "sha256:e8313f01ba26fbbe36c7be1966a7b7424942f670f38e666995b88d012765b9be" ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==1.1.1" }, - "numpy": { - "hashes": [ - "sha256:00d7b54c025601e28f468953d065b9b121ddca7fff30bed7be082d3656dd798d", - "sha256:02ec9582808c4e48be4e93cd629c855e644882faf704bc2bd6bbf58c08a2a897", - "sha256:0e6f72f7bb08f2f350ed4408bb7acdc0daba637e73bce9f5ea2b207039f3af88", - "sha256:1be2e96314a66f5f1ce7764274327fd4fb9da58584eaff00b5a5221edefee7d6", - "sha256:2466fbcf23711ebc5daa61d28ced319a6159b260a18839993d871096d66b93f7", - "sha256:2b573fcf6f9863ce746e4ad00ac18a948978bb3781cffa4305134d31801f3e26", - "sha256:3f0dae97e1126f529ebb66f3c63514a0f72a177b90d56e4bce8a0b5def34627a", - "sha256:50fb72bcbc2cf11e066579cb53c4ca8ac0227abb512b6cbc1faa02d1595a2a5d", - "sha256:57aea170fb23b1fd54fa537359d90d383d9bf5937ee54ae8045a723caa5e0961", - "sha256:709c2999b6bd36cdaf85cf888d8512da7433529f14a3689d6e37ab5242e7add5", - "sha256:7d59f21e43bbfd9a10953a7e26b35b6849d888fc5a331fa84a2d9c37bd9fe2a2", - "sha256:904b513ab8fbcbdb062bed1ce2f794ab20208a1b01ce9bd90776c6c7e7257032", - "sha256:96dd36f5cdde152fd6977d1bbc0f0561bccffecfde63cd397c8e6033eb66baba", - "sha256:9933b81fecbe935e6a7dc89cbd2b99fea1bf362f2790daf9422a7bb1dc3c3085", - "sha256:bbcc85aaf4cd84ba057decaead058f43191cc0e30d6bc5d44fe336dc3d3f4509", - "sha256:dccd380d8e025c867ddcb2f84b439722cf1f23f3a319381eac45fd077dee7170", - "sha256:e22cd0f72fc931d6abc69dc7764484ee20c6a60b0d0fee9ce0426029b1c1bdae", - "sha256:ed722aefb0ebffd10b32e67f48e8ac4c5c4cf5d3a785024fdf0e9eb17529cd9d", - "sha256:efb7ac5572c9a57159cf92c508aad9f856f1cb8e8302d7fdb99061dbe52d712c", - "sha256:efdba339fffb0e80fcc19524e4fdbda2e2b5772ea46720c44eaac28096d60720", - "sha256:f22273dd6a403ed870207b853a856ff6327d5cbce7a835dfa0645b3fc00273ec" - ], - "index": "pypi", - "version": "==1.18.4" - }, "oauth2client": { "hashes": [ "sha256:b8a81cc5d60e2d364f0b1b98f958dbd472887acaf1a5b05e21c28c31a2d6d3ac", @@ -472,34 +340,34 @@ }, "packaging": { "hashes": [ - "sha256:3c292b474fda1671ec57d46d739d072bfd495a4f51ad01a055121d81e952b7a3", - "sha256:82f77b9bee21c1bafbf35a84905d604d5d1223801d639cf3ed140bd651c08752" + "sha256:4357f74f47b9c12db93624a82154e9b120fa8293699949152b22065d556079f8", + "sha256:998416ba6962ae7fbd6596850b80e17859a5753ba17c32284f67bfff33784181" ], "index": "pypi", - "version": "==20.3" + "version": "==20.4" }, "protobuf": { "hashes": [ - "sha256:0bae429443cc4748be2aadfdaf9633297cfaeb24a9a02d0ab15849175ce90fab", - "sha256:24e3b6ad259544d717902777b33966a1a069208c885576254c112663e6a5bb0f", - "sha256:310a7aca6e7f257510d0c750364774034272538d51796ca31d42c3925d12a52a", - "sha256:52e586072612c1eec18e1174f8e3bb19d08f075fc2e3f91d3b16c919078469d0", - "sha256:73152776dc75f335c476d11d52ec6f0f6925774802cd48d6189f4d5d7fe753f4", - "sha256:7774bbbaac81d3ba86de646c39f154afc8156717972bf0450c9dbfa1dc8dbea2", - "sha256:82d7ac987715d8d1eb4068bf997f3053468e0ce0287e2729c30601feb6602fee", - "sha256:8eb9c93798b904f141d9de36a0ba9f9b73cc382869e67c9e642c0aba53b0fc07", - "sha256:adf0e4d57b33881d0c63bb11e7f9038f98ee0c3e334c221f0858f826e8fb0151", - "sha256:c40973a0aee65422d8cb4e7d7cbded95dfeee0199caab54d5ab25b63bce8135a", - "sha256:c77c974d1dadf246d789f6dad1c24426137c9091e930dbf50e0a29c1fcf00b1f", - "sha256:dd9aa4401c36785ea1b6fff0552c674bdd1b641319cb07ed1fe2392388e9b0d7", - "sha256:e11df1ac6905e81b815ab6fd518e79be0a58b5dc427a2cf7208980f30694b956", - "sha256:e2f8a75261c26b2f5f3442b0525d50fd79a71aeca04b5ec270fc123536188306", - "sha256:e512b7f3a4dd780f59f1bf22c302740e27b10b5c97e858a6061772668cd6f961", - "sha256:ef2c2e56aaf9ee914d3dccc3408d42661aaf7d9bb78eaa8f17b2e6282f214481", - "sha256:fac513a9dc2a74b99abd2e17109b53945e364649ca03d9f7a0b96aa8d1807d0a", - "sha256:fdfb6ad138dbbf92b5dbea3576d7c8ba7463173f7d2cb0ca1bd336ec88ddbd80" - ], - "version": "==3.11.3" + "sha256:304e08440c4a41a0f3592d2a38934aad6919d692bb0edfb355548786728f9a5e", + "sha256:49ef8ab4c27812a89a76fa894fe7a08f42f2147078392c0dee51d4a444ef6df5", + "sha256:50b5fee674878b14baea73b4568dc478c46a31dd50157a5b5d2f71138243b1a9", + "sha256:5524c7020eb1fb7319472cb75c4c3206ef18b34d6034d2ee420a60f99cddeb07", + "sha256:612bc97e42b22af10ba25e4140963fbaa4c5181487d163f4eb55b0b15b3dfcd2", + "sha256:6f349adabf1c004aba53f7b4633459f8ca8a09654bf7e69b509c95a454755776", + "sha256:85b94d2653b0fdf6d879e39d51018bf5ccd86c81c04e18a98e9888694b98226f", + "sha256:87535dc2d2ef007b9d44e309d2b8ea27a03d2fa09556a72364d706fcb7090828", + "sha256:a7ab28a8f1f043c58d157bceb64f80e4d2f7f1b934bc7ff5e7f7a55a337ea8b0", + "sha256:a96f8fc625e9ff568838e556f6f6ae8eca8b4837cdfb3f90efcb7c00e342a2eb", + "sha256:b5a114ea9b7fc90c2cc4867a866512672a47f66b154c6d7ee7e48ddb68b68122", + "sha256:be04fe14ceed7f8641e30f36077c1a654ff6f17d0c7a5283b699d057d150d82a", + "sha256:bff02030bab8b969f4de597543e55bd05e968567acb25c0a87495a31eb09e925", + "sha256:c9ca9f76805e5a637605f171f6c4772fc4a81eced4e2f708f79c75166a2c99ea", + "sha256:e1464a4a2cf12f58f662c8e6421772c07947266293fb701cb39cd9c1e183f63c", + "sha256:e72736dd822748b0721f41f9aaaf6a5b6d5cfc78f6c8690263aef8bba4457f0e", + "sha256:eafe9fa19fcefef424ee089fb01ac7177ff3691af7cc2ae8791ae523eb6ca907", + "sha256:f4b73736108a416c76c17a8a09bc73af3d91edaa26c682aaa460ef91a47168d3" + ], + "version": "==3.12.2" }, "psycopg2-binary": { "hashes": [ @@ -539,32 +407,40 @@ }, "pyasn1": { "hashes": [ + "sha256:014c0e9976956a08139dc0712ae195324a75e142284d5f87f1a87ee1b068a359", + "sha256:03840c999ba71680a131cfaee6fab142e1ed9bbd9c693e285cc6aca0d555e576", + "sha256:0458773cfe65b153891ac249bcf1b5f8f320b7c2ce462151f8fa74de8934becf", + "sha256:08c3c53b75eaa48d71cf8c710312316392ed40899cb34710d092e96745a358b7", "sha256:39c7e2ec30515947ff4e87fb6f456dfc6e84857d34be479c9d4a4ba4bf46aa5d", - "sha256:aef77c9fb94a3ac588e87841208bdec464471d9871bd5050a287cc9a475cd0ba" + "sha256:5c9414dcfede6e441f7e8f81b43b34e834731003427e5b09e4e00e3172a10f00", + "sha256:6e7545f1a61025a4e58bb336952c5061697da694db1cae97b116e9c46abcf7c8", + "sha256:78fa6da68ed2727915c4767bb386ab32cdba863caa7dbe473eaae45f9959da86", + "sha256:7ab8a544af125fb704feadb008c99a88805126fb525280b2270bb25cc1d78a12", + "sha256:99fcc3c8d804d1bc6d9a099921e39d827026409a58f2a720dcdb89374ea0c776", + "sha256:aef77c9fb94a3ac588e87841208bdec464471d9871bd5050a287cc9a475cd0ba", + "sha256:e89bf84b5437b532b0803ba5c9a5e054d21fec423a89952a74f87fa2c9b7bce2", + "sha256:fec3e9d8e36808a28efb59b489e4528c10ad0f480e57dcc32b4de5c9d8c9fdf3" ], "version": "==0.4.8" }, "pyasn1-modules": { "hashes": [ + "sha256:0845a5582f6a02bb3e1bde9ecfc4bfcae6ec3210dd270522fee602365430c3f8", + "sha256:0fe1b68d1e486a1ed5473f1302bd991c1611d319bba158e98b106ff86e1d7199", + "sha256:15b7c67fabc7fc240d87fb9aabf999cf82311a6d6fb2c70d00d3d0604878c811", + "sha256:426edb7a5e8879f1ec54a1864f16b882c2837bfd06eee62f2c982315ee2473ed", + "sha256:65cebbaffc913f4fe9e4808735c95ea22d7a7775646ab690518c056784bc21b4", "sha256:905f84c712230b2c592c19470d3ca8d552de726050d1d1716282a1f6146be65e", - "sha256:a50b808ffeb97cb3601dd25981f6b016cbb3d31fbf57a8b8a87428e6158d0c74" + "sha256:a50b808ffeb97cb3601dd25981f6b016cbb3d31fbf57a8b8a87428e6158d0c74", + "sha256:a99324196732f53093a84c4369c996713eb8c89d360a496b599fb1a9c47fc3eb", + "sha256:b80486a6c77252ea3a3e9b1e360bc9cf28eaac41263d173c032581ad2f20fe45", + "sha256:c29a5e5cc7a3f05926aff34e097e84f8589cd790ce0ed41b67aed6857b26aafd", + "sha256:cbac4bc38d117f2a49aeedec4407d23e8866ea4ac27ff2cf7fb3e5b570df19e0", + "sha256:f39edd8c4ecaa4556e989147ebf219227e2cd2e8a43c7e7fcb1f1c18c5fd6a3d", + "sha256:fe0644d9ab041506b62782e92b06b8c68cca799e1a9636ec398675459e031405" ], "version": "==0.2.8" }, - "pycparser": { - "hashes": [ - "sha256:2d475327684562c3a96cc71adf7dc8c4f0565175cf86b6d7a404ff4c771f15f0", - "sha256:7582ad22678f0fcd81102833f60ef8d0e57288b6b5fb00323d101be910e35705" - ], - "version": "==2.20" - }, - "pyhamcrest": { - "hashes": [ - "sha256:412e00137858f04bde0729913874a48485665f2d36fe9ee449f26be864af9316", - "sha256:7ead136e03655af85069b6f47b23eb7c3e5c221aa9f022a4fbb499f5b7308f29" - ], - "version": "==2.0.2" - }, "pyjwt": { "hashes": [ "sha256:5c6eca3c2940464d106b99ba83b00c6add741c9becaec087fb7ccdefea71350e", @@ -572,13 +448,6 @@ ], "version": "==1.7.1" }, - "pyopenssl": { - "hashes": [ - "sha256:621880965a720b8ece2f1b2f54ea2071966ab00e2970ad2ce11d596102063504", - "sha256:9a24494b2602aaf402be5c9e30a0b82d4a5c67528fe8fb475e3f3bc00dd69507" - ], - "version": "==19.1.0" - }, "pyotp": { "hashes": [ "sha256:c88f37fd47541a580b744b42136f387cdad481b560ef410c0d85c957eb2a2bc0", @@ -599,6 +468,7 @@ "sha256:73ebfe9dbf22e832286dafa60473e4cd239f8592f699aa5adaf10050e6e1823c", "sha256:75bb3f31ea686f1197762692a9ee6a7550b59fc6ca3a1f4b5d7e32fb98e2da2a" ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==2.8.1" }, "python-http-client": { @@ -619,18 +489,20 @@ }, "requests": { "hashes": [ - "sha256:43999036bfa82904b6af1d99e4882b560e5e2c68e5c4b0aa03b655f3d7d73fee", - "sha256:b3f43d496c6daba4493e7c431722aeb7dbc6288f52a6e04e7b6023b0247817e6" + "sha256:b3559a131db72c33ee969480840fff4bb6dd111de7dd27c8ee1f820f4f00231b", + "sha256:fe75cc94a9443b9246fc7049224f75604b113c36acb93f87b80ed42c44cbb898" ], "index": "pypi", - "version": "==2.23.0" + "version": "==2.24.0" }, "rsa": { "hashes": [ - "sha256:14ba45700ff1ec9eeb206a2ce76b32814958a98e372006c8fb76ba820211be66", - "sha256:1a836406405730121ae9823e19c6e806c62bbad73f890574fff50efa4122c487" + "sha256:109ea5a66744dd859bf16fe904b8d8b627adafb9408753161e766a92e7d681fa", + "sha256:6166864e23d6b5195a5cfed6cd9fed0fe774e226d8f854fcb23b7bbef0350233", + "sha256:23778f5523461cf86ae075f9482a99317f362bca752ae57cb118044066f4026f" ], - "version": "==4.0" + "markers": "python_version >= '3'", + "version": "==4.6" }, "ruamel.yaml": { "hashes": [ @@ -661,22 +533,16 @@ "sha256:ed5b3698a2bb241b7f5cbbe277eaa7fe48b07a58784fba4f75224fd066d253ad", "sha256:f9dcc1ae73f36e8059589b601e8e4776b9976effd76c21ad6a855a74318efd6e" ], - "markers": "platform_python_implementation == 'CPython' and python_version < '3.9'", + "markers": "python_version < '3.9' and platform_python_implementation == 'CPython'", "version": "==0.2.0" }, "rx": { "hashes": [ "sha256:aaf409848e24dd514926eb8467e2764762bfd258325717fca4628d32d8721252" ], + "markers": "python_full_version >= '3.6.0'", "version": "==3.1.0" }, - "s3transfer": { - "hashes": [ - "sha256:2482b4259524933a022d59da830f51bd746db62f047d6eb213f2f8855dcb8a13", - "sha256:921a37e2aefc64145e7b73d50c71bb4f26f46e4c9f414dc648c6245ff92cf7db" - ], - "version": "==0.3.3" - }, "sendgrid": { "hashes": [ "sha256:9fba62068dd13922004b6a1676e21c6435709aaf7c2b978cdf1206e3d2196c60", @@ -691,13 +557,6 @@ "index": "pypi", "version": "==4.2.0" }, - "service-identity": { - "hashes": [ - "sha256:001c0707759cb3de7e49c078a7c0c9cd12594161d3bf06b9c254fdcb1a60dc36", - "sha256:0858a54aabc5b459d1aafa8a518ed2081a285087f349fe3e55197989232e2e2d" - ], - "version": "==18.1.0" - }, "shortuuid": { "hashes": [ "sha256:3c11d2007b915c43bee3e10625f068d8a349e04f0d81f08f5fa08507427ebf1f", @@ -708,11 +567,11 @@ }, "six": { "hashes": [ - "sha256:236bdbdce46e6e6a3d61a337c0f8b763ca1e8717c03b369e87a7ec7ce1319c0a", - "sha256:8f3cd2e254d8f793e7f3d6d9df77b92252b52637291d0f0da013c76ea2724b6c" + "sha256:30639c035cdb23534cd4aa2dd52c3bf48f06e5f4a941509c8bafd8ce11080259", + "sha256:8b74bedcbbbaca38ff6d7491d76f2b06b3592611af620f8426e82dddb04a5ced" ], "index": "pypi", - "version": "==1.14.0" + "version": "==1.15.0" }, "smsapi-client": { "hashes": [ @@ -726,51 +585,21 @@ "sha256:022fb9c87b524d1f7862b3037e541f68597a730a8843245c349fc93e1643dc4e", "sha256:e162203737712307dfe78860cc56c8da8a852ab2ee33750e33aeadf38d12c548" ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==0.3.1" }, "twilio": { "hashes": [ - "sha256:7ef6ad19251fee6a41f1184e97b4fcb62f4a8c0e6f4b78797e40e9c92aed006d" - ], - "version": "==6.39.0" - }, - "twisted": { - "extras": [ - "tls" - ], - "hashes": [ - "sha256:040eb6641125d2a9a09cf198ec7b83dd8858c6f51f6770325ed9959c00f5098f", - "sha256:147780b8caf21ba2aef3688628eaf13d7e7fe02a86747cd54bfaf2140538f042", - "sha256:158ddb80719a4813d292293ac44ba41d8b56555ed009d90994a278237ee63d2c", - "sha256:2182000d6ffc05d269e6c03bfcec8b57e20259ca1086180edaedec3f1e689292", - "sha256:25ffcf37944bdad4a99981bc74006d735a678d2b5c193781254fbbb6d69e3b22", - "sha256:3281d9ce889f7b21bdb73658e887141aa45a102baf3b2320eafcfba954fcefec", - "sha256:356e8d8dd3590e790e3dba4db139eb8a17aca64b46629c622e1b1597a4a92478", - "sha256:70952c56e4965b9f53b180daecf20a9595cf22b8d0935cd3bd664c90273c3ab2", - "sha256:7408c6635ee1b96587289283ebe90ee15dbf9614b05857b446055116bc822d29", - "sha256:7c547fd0215db9da8a1bc23182b309e84a232364cc26d829e9ee196ce840b114", - "sha256:894f6f3cfa57a15ea0d0714e4283913a5f2511dbd18653dd148eba53b3919797", - "sha256:94ac3d55a58c90e2075c5fe1853f2aa3892b73e3bf56395f743aefde8605eeaa", - "sha256:a58e61a2a01e5bcbe3b575c0099a2bcb8d70a75b1a087338e0c48dd6e01a5f15", - "sha256:c09c47ff9750a8e3aa60ad169c4b95006d455a29b80ad0901f031a103b2991cd", - "sha256:ca3a0b8c9110800e576d89b5337373e52018b41069bc879f12fa42b7eb2d0274", - "sha256:cd1dc5c85b58494138a3917752b54bb1daa0045d234b7c132c37a61d5483ebad", - "sha256:cdbc4c7f0cd7a2218b575844e970f05a1be1861c607b0e048c9bceca0c4d42f7", - "sha256:d267125cc0f1e8a0eed6319ba4ac7477da9b78a535601c49ecd20c875576433a", - "sha256:d72c55b5d56e176563b91d11952d13b01af8725c623e498db5507b6614fc1e10", - "sha256:d95803193561a243cb0401b0567c6b7987d3f2a67046770e1dccd1c9e49a9780", - "sha256:e92703bed0cc21d6cb5c61d66922b3b1564015ca8a51325bd164a5e33798d504", - "sha256:f058bd0168271de4dcdc39845b52dd0a4a2fecf5f1246335f13f5e96eaebb467", - "sha256:f3c19e5bd42bbe4bf345704ad7c326c74d3fd7a1b3844987853bef180be638d4" + "sha256:1ff3b66992ebb59411794f669eab7f11bcfaacc5549eec1afb47af1c755872ac" ], - "index": "pypi", - "version": "==20.3.0" + "version": "==6.43.0" }, "uritemplate": { "hashes": [ "sha256:07620c3f3f8eed1f12600845892b0e036a2420acf513c53f7de0abd911a5894f", "sha256:5af8ad10cec94f215e3f48112de2022e1d5a37ed427fbd88652fa908f2ab7cae" ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==3.0.1" }, "urllib3": { @@ -778,7 +607,7 @@ "sha256:3018294ebefce6572a474f0604c2021e33b3fd8006ecd11d62107a5d2a963527", "sha256:88206b0eb87e6d677d424843ac5209e3fb9d0190d0ee169599165ec25e9d9115" ], - "markers": "python_version != '3.4'", + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4' and python_version < '4'", "version": "==1.25.9" }, "whitenoise": { @@ -791,92 +620,58 @@ }, "yubico-client": { "hashes": [ - "sha256:1d74c6341210c94b639f7c7c8930550e73d5c1be60402e418e9dc95e038f8527", - "sha256:c90c47ec4596f0508f2d202c9c216ca3854284f8c5833dc814c36089794e0aa2" - ], - "version": "==1.12.0" - }, - "zope.interface": { - "hashes": [ - "sha256:0103cba5ed09f27d2e3de7e48bb320338592e2fabc5ce1432cf33808eb2dfd8b", - "sha256:14415d6979356629f1c386c8c4249b4d0082f2ea7f75871ebad2e29584bd16c5", - "sha256:1ae4693ccee94c6e0c88a4568fb3b34af8871c60f5ba30cf9f94977ed0e53ddd", - "sha256:1b87ed2dc05cb835138f6a6e3595593fea3564d712cb2eb2de963a41fd35758c", - "sha256:269b27f60bcf45438e8683269f8ecd1235fa13e5411de93dae3b9ee4fe7f7bc7", - "sha256:27d287e61639d692563d9dab76bafe071fbeb26818dd6a32a0022f3f7ca884b5", - "sha256:39106649c3082972106f930766ae23d1464a73b7d30b3698c986f74bf1256a34", - "sha256:40e4c42bd27ed3c11b2c983fecfb03356fae1209de10686d03c02c8696a1d90e", - "sha256:461d4339b3b8f3335d7e2c90ce335eb275488c587b61aca4b305196dde2ff086", - "sha256:4f98f70328bc788c86a6a1a8a14b0ea979f81ae6015dd6c72978f1feff70ecda", - "sha256:558a20a0845d1a5dc6ff87cd0f63d7dac982d7c3be05d2ffb6322a87c17fa286", - "sha256:562dccd37acec149458c1791da459f130c6cf8902c94c93b8d47c6337b9fb826", - "sha256:5e86c66a6dea8ab6152e83b0facc856dc4d435fe0f872f01d66ce0a2131b7f1d", - "sha256:60a207efcd8c11d6bbeb7862e33418fba4e4ad79846d88d160d7231fcb42a5ee", - "sha256:645a7092b77fdbc3f68d3cc98f9d3e71510e419f54019d6e282328c0dd140dcd", - "sha256:6874367586c020705a44eecdad5d6b587c64b892e34305bb6ed87c9bbe22a5e9", - "sha256:74bf0a4f9091131de09286f9a605db449840e313753949fe07c8d0fe7659ad1e", - "sha256:7b726194f938791a6691c7592c8b9e805fc6d1b9632a833b9c0640828cd49cbc", - "sha256:8149ded7f90154fdc1a40e0c8975df58041a6f693b8f7edcd9348484e9dc17fe", - "sha256:8cccf7057c7d19064a9e27660f5aec4e5c4001ffcf653a47531bde19b5aa2a8a", - "sha256:911714b08b63d155f9c948da2b5534b223a1a4fc50bb67139ab68b277c938578", - "sha256:a5f8f85986197d1dd6444763c4a15c991bfed86d835a1f6f7d476f7198d5f56a", - "sha256:a744132d0abaa854d1aad50ba9bc64e79c6f835b3e92521db4235a1991176813", - "sha256:af2c14efc0bb0e91af63d00080ccc067866fb8cbbaca2b0438ab4105f5e0f08d", - "sha256:b054eb0a8aa712c8e9030065a59b5e6a5cf0746ecdb5f087cca5ec7685690c19", - "sha256:b0becb75418f8a130e9d465e718316cd17c7a8acce6fe8fe07adc72762bee425", - "sha256:b1d2ed1cbda2ae107283befd9284e650d840f8f7568cb9060b5466d25dc48975", - "sha256:ba4261c8ad00b49d48bbb3b5af388bb7576edfc0ca50a49c11dcb77caa1d897e", - "sha256:d1fe9d7d09bb07228650903d6a9dc48ea649e3b8c69b1d263419cc722b3938e8", - "sha256:d7804f6a71fc2dda888ef2de266727ec2f3915373d5a785ed4ddc603bbc91e08", - "sha256:da2844fba024dd58eaa712561da47dcd1e7ad544a257482392472eae1c86d5e5", - "sha256:dcefc97d1daf8d55199420e9162ab584ed0893a109f45e438b9794ced44c9fd0", - "sha256:dd98c436a1fc56f48c70882cc243df89ad036210d871c7427dc164b31500dc11", - "sha256:e74671e43ed4569fbd7989e5eecc7d06dc134b571872ab1d5a88f4a123814e9f", - "sha256:eb9b92f456ff3ec746cd4935b73c1117538d6124b8617bc0fe6fda0b3816e345", - "sha256:ebb4e637a1fb861c34e48a00d03cffa9234f42bef923aec44e5625ffb9a8e8f9", - "sha256:ef739fe89e7f43fb6494a43b1878a36273e5924869ba1d866f752c5812ae8d58", - "sha256:f40db0e02a8157d2b90857c24d89b6310f9b6c3642369852cdc3b5ac49b92afc", - "sha256:f68bf937f113b88c866d090fea0bc52a098695173fc613b055a17ff0cf9683b6", - "sha256:fb55c182a3f7b84c1a2d6de5fa7b1a05d4660d866b91dbf8d74549c57a1499e8" - ], - "version": "==5.1.0" + "sha256:59d818661f638e3f041fae44ba2c0569e4eb2a17865fa7cc9ad6577185c4d185", + "sha256:e3b86cd2a123105edfacad40551c7b26e9c1193d81ffe168ee704ebfd3d11162" + ], + "version": "==1.13.0" } }, "develop": { "astroid": { "hashes": [ - "sha256:4c17cea3e592c21b6e222f673868961bad77e1f985cb1694ed077475a89229c1", - "sha256:d8506842a3faf734b81599c8b98dcc423de863adcc1999248480b18bd31a0f38" + "sha256:2f4078c2a41bf377eea06d71c9d2ba4eb8f6b1af2135bec27bbbb7d8f12bb703", + "sha256:bc58d83eb610252fd8de6363e39d4f1d0619c894b0ed24603b881c02e64c7386" ], - "version": "==2.4.1" + "markers": "python_version >= '3.5'", + "version": "==2.4.2" }, "attrs": { "hashes": [ "sha256:08a96c641c3a74e44eb59afb61a24f2cb9f4d7188748e76ba4bb5edfa3cb7d1c", "sha256:f7b7ce16570fe9965acd6d30101a28f62fb4a7f9e926b3bbc9b61f8b04247e72" ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==19.3.0" }, "autopep8": { "hashes": [ - "sha256:152fd8fe47d02082be86e05001ec23d6f420086db56b17fc883f3f965fb34954" + "sha256:60fd8c4341bab59963dafd5d2a566e94f547e660b9b396f772afe67d8481dbf0" + ], + "index": "pypi", + "version": "==1.5.3" + }, + "django-test-migrations": { + "hashes": [ + "sha256:d120d0287e1dd82ed62fe083747a1e99c0398d56beda52594e8391b94a41bef5", + "sha256:e5747e2ad0b7e4d3b8d9ccd40d414b0f186316d3757af022b4bbdec700897521" ], "index": "pypi", - "version": "==1.5.2" + "version": "==1.0.0" }, "importlib-metadata": { "hashes": [ - "sha256:2a688cbaa90e0cc587f1df48bdc97a6eadccdcd9c35fb3f976a09e3b5016d90f", - "sha256:34513a8a0c4962bc66d35b359558fd8a5e10cd472d37aec5f66858addef32c1e" + "sha256:90bb658cdbbf6d1735b6341ce708fc7024a3e14e99ffdc5783edea9f9b077f83", + "sha256:dc15b2969b4ce36305c51eebe62d418ac7791e9a157911d58bfb1f9ccd8e2070" ], "markers": "python_version < '3.8'", - "version": "==1.6.0" + "version": "==1.7.0" }, "isort": { "hashes": [ "sha256:54da7e92468955c4fceacd0c86bd0ec997b0e1ee80d97f67c35a78b719dccab1", "sha256:6e811fcb295968434526407adb8796944f1988c5b65e8139058f2014cbe100fd" ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==4.3.21" }, "lazy-object-proxy": { @@ -903,6 +698,7 @@ "sha256:efa1909120ce98bbb3777e8b6f92237f5d5c8ea6758efea36a473e1d38f7d3e4", "sha256:f3900e8a5de27447acbf900b4750b0ddfd7ec1ea7fbaf11dfa911141bc522af0" ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==1.4.3" }, "mccabe": { @@ -914,18 +710,19 @@ }, "more-itertools": { "hashes": [ - "sha256:5dd8bcf33e5f9513ffa06d5ad33d78f31e1931ac9a18f33d37e77a180d393a7c", - "sha256:b1ddb932186d8a6ac451e1d95844b382f55e12686d51ca0c68b6f61f2ab7a507" + "sha256:68c70cc7167bdf5c7c9d8f6954a7837089c6a36bf565383919bb595efb8a17e5", + "sha256:b78134b2063dd214000685165d81c154522c3ee0a1c0d4d113c80361c234c5a2" ], - "version": "==8.2.0" + "markers": "python_version >= '3.5'", + "version": "==8.4.0" }, "packaging": { "hashes": [ - "sha256:3c292b474fda1671ec57d46d739d072bfd495a4f51ad01a055121d81e952b7a3", - "sha256:82f77b9bee21c1bafbf35a84905d604d5d1223801d639cf3ed140bd651c08752" + "sha256:4357f74f47b9c12db93624a82154e9b120fa8293699949152b22065d556079f8", + "sha256:998416ba6962ae7fbd6596850b80e17859a5753ba17c32284f67bfff33784181" ], "index": "pypi", - "version": "==20.3" + "version": "==20.4" }, "pep8": { "hashes": [ @@ -940,29 +737,32 @@ "sha256:15b2acde666561e1298d71b523007ed7364de07029219b604cf808bfa1c765b0", "sha256:966c145cd83c96502c3c3868f50408687b38434af77734af1e9ca461a4081d2d" ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==0.13.1" }, "py": { "hashes": [ - "sha256:5e27081401262157467ad6e7f851b7aa402c5852dbcb3dae06768434de5752aa", - "sha256:c20fdd83a5dbc0af9efd622bee9a5564e278f6380fffcacc43ba6f43db2813b0" + "sha256:366389d1db726cd2fcfc79732e75410e5fe4d31db13692115529d34069a043c2", + "sha256:9ca6883ce56b4e8da7e79ac18787889fa5206c79dcc67fb065376cd2fe03f342" ], - "version": "==1.8.1" + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", + "version": "==1.9.0" }, "pycodestyle": { "hashes": [ - "sha256:95a2219d12372f05704562a14ec30bc76b05a5b297b21a5dfe3f6fac3491ae56", - "sha256:e40a936c9a450ad81df37f549d676d127b1b66000a6c500caa2b085bc0ca976c" + "sha256:2295e7b2f6b5bd100585ebcb1f616591b652db8a741695b3d8f5d28bdc934367", + "sha256:c58a7d2815e0e8d7972bf1803331fb0152f867bd89adf8a01dfd55085434192e" ], - "version": "==2.5.0" + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", + "version": "==2.6.0" }, "pylint": { "hashes": [ - "sha256:b95e31850f3af163c2283ed40432f053acbc8fc6eba6a069cb518d9dbf71848c", - "sha256:dd506acce0427e9e08fb87274bcaa953d38b50a58207170dbf5b36cf3e16957b" + "sha256:7dd78437f2d8d019717dbf287772d0b2dbdfd13fc016aa7faa08d67bccc46adc", + "sha256:d0ece7d223fe422088b0e8f13fa0a1e8eb745ebffcb8ed53d3e95394b6101a1c" ], "index": "pypi", - "version": "==2.5.2" + "version": "==2.5.3" }, "pyparsing": { "hashes": [ @@ -974,11 +774,11 @@ }, "pytest": { "hashes": [ - "sha256:95c710d0a72d91c13fae35dce195633c929c3792f54125919847fdcdf7caa0d3", - "sha256:eb2b5e935f6a019317e455b6da83dd8650ac9ffd2ee73a7b657a30873d67a698" + "sha256:5c0db86b698e8f170ba4582a492248919255fcd4c79b1ee64ace34301fb589a1", + "sha256:7979331bfcba207414f5e1263b5a0f8f521d0f457318836a7355531ed1a4c7d8" ], "index": "pypi", - "version": "==5.4.2" + "version": "==5.4.3" }, "pytest-django": { "hashes": [ @@ -990,18 +790,18 @@ }, "six": { "hashes": [ - "sha256:236bdbdce46e6e6a3d61a337c0f8b763ca1e8717c03b369e87a7ec7ce1319c0a", - "sha256:8f3cd2e254d8f793e7f3d6d9df77b92252b52637291d0f0da013c76ea2724b6c" + "sha256:30639c035cdb23534cd4aa2dd52c3bf48f06e5f4a941509c8bafd8ce11080259", + "sha256:8b74bedcbbbaca38ff6d7491d76f2b06b3592611af620f8426e82dddb04a5ced" ], "index": "pypi", - "version": "==1.14.0" + "version": "==1.15.0" }, "toml": { "hashes": [ - "sha256:229f81c57791a41d65e399fc06bf0848bab550a9dfd5ed66df18ce5f05e73d5c", - "sha256:235682dd292d5899d361a811df37e04a8828a5b1da3115886b73cf81ebc9100e" + "sha256:926b612be1e5ce0634a2ca03470f95169cf16f939018233a670519cb4ac58b0f", + "sha256:bda89d5935c2eac546d648028b9901107a595863cb36bae0c73ac804a9b4ce88" ], - "version": "==0.10.0" + "version": "==0.10.1" }, "typed-ast": { "hashes": [ @@ -1027,15 +827,23 @@ "sha256:fc0fea399acb12edbf8a628ba8d2312f583bdbdb3335635db062fa98cf71fca4", "sha256:fe460b922ec15dd205595c9b5b99e2f056fd98ae8f9f56b888e7a17dc2b757e7" ], - "markers": "implementation_name == 'cpython' and python_version < '3.8'", + "markers": "python_version < '3.8' and implementation_name == 'cpython'", "version": "==1.4.1" }, + "typing-extensions": { + "hashes": [ + "sha256:6e95524d8a547a91e08f404ae485bbb71962de46967e1b71a0cb89af24e761c5", + "sha256:79ee589a3caca649a9bfd2a8de4709837400dfa00b6cc81962a1e6a1815969ae", + "sha256:f8d2bd89d25bc39dabe7d23df520442fa1d8969b82544370e03d88b5a591c392" + ], + "version": "==3.7.4.2" + }, "wcwidth": { "hashes": [ - "sha256:cafe2186b3c009a04067022ce1dcd79cb38d8d65ee4f4791b8888d6599d1bbe1", - "sha256:ee73862862a156bf77ff92b09034fc4825dd3af9cf81bc5b360668d425f3c5f1" + "sha256:beb4802a9cebb9144e99086eff703a642a13d6a0052920003a230f3294bbe784", + "sha256:c4d647b99872929fdb7bdcaa4fbe7f01413ed3d98077df798530e5b04f116c83" ], - "version": "==0.1.9" + "version": "==0.2.5" }, "wrapt": { "hashes": [ @@ -1048,6 +856,7 @@ "sha256:aa36550ff0c0b7ef7fa639055d797116ee891440eac1a56f378e2d3179e0320b", "sha256:c599e4d75c98f6798c509911d08a22e6c021d074469042177c8c86fb92eefd96" ], + "markers": "python_version >= '3.6'", "version": "==3.1.0" } } diff --git a/readme.md b/readme.md index 598a9d6bb2c8..b54eb10743ba 100644 --- a/readme.md +++ b/readme.md @@ -120,7 +120,6 @@ The application relies on the following environment variables to run: * `INFLUXDB_URL`: The URL for your InfluxDB database * `INFLUXDB_ORG`: The organisation string for your InfluxDB API call. * `GA_TABLE_ID`: GA table ID (view) to query when looking for organisation usage -* `USE_S3_STORAGE`: 'True' to store static files in s3 * `AWS_STORAGE_BUCKET_NAME`: bucket name to store static files. Required if `USE_S3_STORAGE' is true. * `AWS_S3_REGION_NAME`: region name of the static files bucket. Defaults to eu-west-2. * `ALLOWED_ADMIN_IP_ADDRESSES`: restrict access to the django admin console to a comma separated list of IP addresses (e.g. `127.0.0.1,127.0.0.2`) diff --git a/src/api/serializers.py b/src/api/serializers.py new file mode 100644 index 000000000000..015789dc3722 --- /dev/null +++ b/src/api/serializers.py @@ -0,0 +1,5 @@ +from rest_framework import serializers + + +class ErrorSerializer(serializers.Serializer): + message = serializers.CharField() \ No newline at end of file diff --git a/src/app/middleware.py b/src/app/middleware.py index b04984ad80fa..08968881b4fb 100644 --- a/src/app/middleware.py +++ b/src/app/middleware.py @@ -1,6 +1,10 @@ from django.conf import settings from django.core.exceptions import PermissionDenied +from util.logging import get_logger + +logger = get_logger(__name__) + class AdminWhitelistMiddleware: def __init__(self, get_response): @@ -12,6 +16,7 @@ def __call__(self, request): ip = x_forwarded_for.split(',')[0] if x_forwarded_for else request.META.get('REMOTE_ADDR') if settings.ALLOWED_ADMIN_IP_ADDRESSES and ip not in settings.ALLOWED_ADMIN_IP_ADDRESSES: # IP address not allowed! + logger.info('Denying access to admin for ip address %s' % ip) raise PermissionDenied() return self.get_response(request) diff --git a/src/app/settings/common.py b/src/app/settings/common.py index 0ff72d6cec1d..40d5f092be5c 100644 --- a/src/app/settings/common.py +++ b/src/app/settings/common.py @@ -9,7 +9,6 @@ For the full list of settings and their values, see https://docs.djangoproject.com/en/1.9/ref/settings/ """ -import logging import os import warnings from importlib import reload @@ -29,6 +28,8 @@ PROJECT_ROOT = os.path.abspath(os.path.dirname(__file__)) ENV = env('ENVIRONMENT', default='local') +if ENV not in ('local', 'dev', 'staging', 'production'): + warnings.warn('ENVIRONMENT env variable must be one of local, dev, staging or production') if 'DJANGO_SECRET_KEY' not in os.environ: secret_key_gen() @@ -106,6 +107,9 @@ # health check plugins 'health_check', 'health_check.db', + + # Used for ordering models (e.g. FeatureSegment) + 'ordered_model', ] if GOOGLE_ANALYTICS_KEY or INFLUXDB_TOKEN: @@ -125,7 +129,10 @@ ), 'PAGE_SIZE': 10, 'UNICODE_JSON': False, - 'DEFAULT_PAGINATION_CLASS': 'rest_framework.pagination.PageNumberPagination' + 'DEFAULT_PAGINATION_CLASS': 'rest_framework.pagination.PageNumberPagination', + 'DEFAULT_THROTTLE_RATES': { + 'login': '1/s' + } } MIDDLEWARE = [ @@ -147,7 +154,9 @@ if INFLUXDB_TOKEN: MIDDLEWARE.append('analytics.middleware.InfluxDBMiddleware') -if ENV != 'local': +ALLOWED_ADMIN_IP_ADDRESSES = env.list('ALLOWED_ADMIN_IP_ADDRESSES', default=list()) +if len(ALLOWED_ADMIN_IP_ADDRESSES) > 0: + warnings.warn('Restricting access to the admin site for ip addresses %s' % ', '.join(ALLOWED_ADMIN_IP_ADDRESSES)) MIDDLEWARE.append('app.middleware.AdminWhitelistMiddleware') ROOT_URLCONF = 'app.urls' @@ -320,16 +329,6 @@ } } -if env.bool('USE_S3_STORAGE', default=False): - STATICFILES_STORAGE = 'storages.backends.s3boto3.S3Boto3Storage' - AWS_STORAGE_BUCKET_NAME = os.environ['AWS_STORAGE_BUCKET_NAME'] - AWS_S3_REGION_NAME = os.environ.get('AWS_S3_REGION_NAME', 'eu-west-2') - AWS_LOCATION = 'static' - AWS_DEFAULT_ACL = 'public-read' - AWS_S3_ADDRESSING_STYLE = 'virtual' - -ALLOWED_ADMIN_IP_ADDRESSES = env.list('ALLOWED_ADMIN_IP_ADDRESSES', default=list()) - LOG_LEVEL = env.str('LOG_LEVEL', 'WARNING') TRENCH_AUTH = { @@ -368,3 +367,8 @@ 'user_list': ['custom_auth.permissions.CurrentUser'], } } + + +# Github OAuth credentials +GITHUB_CLIENT_ID = env.str('GITHUB_CLIENT_ID', '') +GITHUB_CLIENT_SECRET = env.str('GITHUB_CLIENT_SECRET', '') diff --git a/src/app/settings/master.py b/src/app/settings/master.py index 3bea621f3d28..b88e1e9f59f6 100644 --- a/src/app/settings/master.py +++ b/src/app/settings/master.py @@ -40,4 +40,4 @@ REST_FRAMEWORK['PAGE_SIZE'] = 999 SECURE_SSL_REDIRECT = True -SECURE_REDIRECT_EXEMPT = [r'^/$', r'^$'] # root is exempt as it's used for EB health checks +SECURE_REDIRECT_EXEMPT = [r'^health$'] # /health is exempt as it's used for EB health checks diff --git a/src/app/settings/staging.py b/src/app/settings/staging.py index 3bea621f3d28..b88e1e9f59f6 100644 --- a/src/app/settings/staging.py +++ b/src/app/settings/staging.py @@ -40,4 +40,4 @@ REST_FRAMEWORK['PAGE_SIZE'] = 999 SECURE_SSL_REDIRECT = True -SECURE_REDIRECT_EXEMPT = [r'^/$', r'^$'] # root is exempt as it's used for EB health checks +SECURE_REDIRECT_EXEMPT = [r'^health$'] # /health is exempt as it's used for EB health checks diff --git a/src/app/urls.py b/src/app/urls.py index 2e854b1bfcf6..7d293b33f9c0 100644 --- a/src/app/urls.py +++ b/src/app/urls.py @@ -22,10 +22,5 @@ if settings.DEBUG: import debug_toolbar urlpatterns = [ - # Django 2 - # path('__debug__/', include(debug_toolbar.urls)), - - # For django versions before 2.0: url(r'^__debug__/', include(debug_toolbar.urls)), - ] + urlpatterns diff --git a/src/audit/models.py b/src/audit/models.py index 18a163913bdc..b18aa086ac5e 100644 --- a/src/audit/models.py +++ b/src/audit/models.py @@ -9,7 +9,7 @@ FEATURE_UPDATED_MESSAGE = "Flag / Remote Config updated: %s" SEGMENT_CREATED_MESSAGE = "New Segment created: %s" SEGMENT_UPDATED_MESSAGE = "Segment updated: %s" -FEATURE_SEGMENT_UPDATED_MESSAGE = "Segment rules updated for flag: %s" +FEATURE_SEGMENT_UPDATED_MESSAGE = "Segment rules updated for flag: %s in environment: %s" ENVIRONMENT_CREATED_MESSAGE = "New Environment created: %s" ENVIRONMENT_UPDATED_MESSAGE = "Environment updated: %s" FEATURE_STATE_UPDATED_MESSAGE = "Flag state / Remote Config value updated for feature: %s" @@ -45,3 +45,15 @@ class Meta: def __str__(self): return "Audit Log %s" % self.id + + @classmethod + def create_record(cls, obj, obj_type, log_message, author, project=None, environment=None): + cls.objects.create( + related_object_id=obj.id, + related_object_type=obj_type.name, + log=log_message, + author=author, + project=project, + environment=environment + ) + diff --git a/src/audit/signals.py b/src/audit/signals.py index 460d790794a3..f61670e6156e 100644 --- a/src/audit/signals.py +++ b/src/audit/signals.py @@ -5,10 +5,10 @@ from audit.models import AuditLog from audit.serializers import AuditLogSerializer +from util.logging import get_logger from webhooks.webhooks import call_organisation_webhooks, WebhookEventType -logger = logging.getLogger(__name__) -logger.setLevel(logging.INFO) +logger = get_logger(__name__) @receiver(post_save, sender=AuditLog) diff --git a/src/custom_auth/oauth/exceptions.py b/src/custom_auth/oauth/exceptions.py new file mode 100644 index 000000000000..150e93e45aec --- /dev/null +++ b/src/custom_auth/oauth/exceptions.py @@ -0,0 +1,10 @@ +class GithubError(Exception): + pass + + +class GoogleError(Exception): + pass + + +class OAuthError(Exception): + pass diff --git a/src/custom_auth/oauth/github.py b/src/custom_auth/oauth/github.py new file mode 100644 index 000000000000..ee0b36a6b192 --- /dev/null +++ b/src/custom_auth/oauth/github.py @@ -0,0 +1,78 @@ +import requests +from django.conf import settings +from requests import RequestException + +from custom_auth.oauth.exceptions import GithubError +from custom_auth.oauth.helpers.github_helpers import convert_response_data_to_dictionary, get_first_and_last_name +from util.logging import get_logger + +GITHUB_API_URL = "https://api.github.com" +GITHUB_OAUTH_URL = "https://github.com/login/oauth" + +NON_200_ERROR_MESSAGE = "Github returned {} status code when getting an access token." + +logger = get_logger(__name__) + + +class GithubUser: + def __init__(self, code: str, client_id: str = None, client_secret: str = None): + self.client_id = client_id or settings.GITHUB_CLIENT_ID + self.client_secret = client_secret or settings.GITHUB_CLIENT_SECRET + + self.access_token = self._get_access_token(code) + self.headers = { + "Authorization": f"token {self.access_token}" + } + + def _get_access_token(self, code) -> str: + data = { + "code": code, + "client_id": self.client_id, + "client_secret": self.client_secret + } + response = requests.post(f"{GITHUB_OAUTH_URL}/access_token", data=data) + + if response.status_code != 200: + raise GithubError(NON_200_ERROR_MESSAGE.format(response.status_code)) + + response_json = convert_response_data_to_dictionary(response.text) + if "error" in response_json: + error_message = response_json["error_description"].replace("+", " ") + raise GithubError(error_message) + + return response_json["access_token"] + + def get_user_info(self) -> dict: + # TODO: use threads? + try: + return { + **self._get_user_name_and_id(), + "email": self._get_primary_email() + } + except RequestException: + raise GithubError("Failed to communicate with the Github API.") + + def _get_user_name_and_id(self): + user_response = requests.get(f"{GITHUB_API_URL}/user", headers=self.headers) + user_response_json = user_response.json() + full_name = user_response_json.get("name") + first_name, last_name = get_first_and_last_name(full_name) if full_name else ["", ""] + return { + "first_name": first_name, + "last_name": last_name, + "github_user_id": user_response_json.get("id") + } + + def _get_primary_email(self): + emails_response = requests.get(f"{GITHUB_API_URL}/user/emails", headers=self.headers) + + # response from github should be a list of dictionaries, this will find the first entry that is both verified + # and marked as primary (there should only be one). + primary_email_data = next( + filter(lambda email_data: email_data["primary"] and email_data["verified"], emails_response.json()), None + ) + + if not primary_email_data: + raise GithubError("User does not have a verified email address with Github.") + + return primary_email_data["email"] diff --git a/src/custom_auth/oauth/google.py b/src/custom_auth/oauth/google.py index e4e9d670d1cd..1467ad073864 100644 --- a/src/custom_auth/oauth/google.py +++ b/src/custom_auth/oauth/google.py @@ -1,15 +1,27 @@ import requests +from requests import RequestException +from rest_framework import status + +from custom_auth.oauth.exceptions import GoogleError USER_INFO_URL = "https://www.googleapis.com/oauth2/v1/userinfo?alt=json&" +NON_200_ERROR_MESSAGE = "Google returned {} status code when getting an access token." def get_user_info(access_token): - headers = {"Authorization": f"Bearer {access_token}"} - response = requests.get(USER_INFO_URL, headers=headers) - response_json = response.json() - return { - "email": response_json["email"], - "first_name": response_json.get("given_name", ""), - "last_name": response_json.get("family_name", ""), - "google_user_id": response_json["id"] - } + try: + headers = {"Authorization": f"Bearer {access_token}"} + response = requests.get(USER_INFO_URL, headers=headers) + + if response.status_code != status.HTTP_200_OK: + raise GoogleError(NON_200_ERROR_MESSAGE.format(response.status_code)) + + response_json = response.json() + return { + "email": response_json["email"], + "first_name": response_json.get("given_name", ""), + "last_name": response_json.get("family_name", ""), + "google_user_id": response_json["id"] + } + except RequestException: + raise GoogleError("Failed to communicate with the Google API.") diff --git a/src/custom_auth/oauth/helpers/__init__.py b/src/custom_auth/oauth/helpers/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/src/custom_auth/oauth/helpers/github_helpers.py b/src/custom_auth/oauth/helpers/github_helpers.py new file mode 100644 index 000000000000..e853bc5ba038 --- /dev/null +++ b/src/custom_auth/oauth/helpers/github_helpers.py @@ -0,0 +1,23 @@ +from custom_auth.oauth.exceptions import GithubError +from util.logging import get_logger + +logger = get_logger(__name__) + + +def convert_response_data_to_dictionary(text: str) -> dict: + try: + response_data = {} + for key, value in [param.split("=") for param in text.split("&")]: + response_data[key] = value + return response_data + except ValueError: + logger.warning("Malformed data received from Github (%s)" % text) + raise GithubError("Malformed data received from Github") + + +def get_first_and_last_name(full_name: str) -> list: + if not full_name: + return ["", ""] + + names = full_name.strip().split(" ") + return names if len(names) == 2 else [full_name, ""] diff --git a/src/custom_auth/oauth/helpers/tests/test_unit_github_helpers.py b/src/custom_auth/oauth/helpers/tests/test_unit_github_helpers.py new file mode 100644 index 000000000000..a2f971118d22 --- /dev/null +++ b/src/custom_auth/oauth/helpers/tests/test_unit_github_helpers.py @@ -0,0 +1,66 @@ +import pytest + +from custom_auth.oauth.exceptions import GithubError +from custom_auth.oauth.helpers.github_helpers import convert_response_data_to_dictionary, get_first_and_last_name + + +def test_convert_response_data_to_dictionary_success(): + # Given + response_string = "key_1=value_1&key_2=value_2&key_3=value_3" + + # When + response_dict = convert_response_data_to_dictionary(response_string) + + # Then + assert response_dict == { + "key_1": "value_1", + "key_2": "value_2", + "key_3": "value_3", + } + + +def test_convert_response_data_to_dictionary_fail(): + # Given + response_string = "key_1value_1&key_2=value_2=value_2" + + # When + with pytest.raises(GithubError): + convert_response_data_to_dictionary(response_string) + + # Then - exception raised + + +def test_get_first_and_last_name_success(): + # Given + full_name = "tommy tester" + + # When + first_name, last_name = get_first_and_last_name(full_name) + + # Then + assert first_name == "tommy" + assert last_name == "tester" + + +def test_get_first_and_last_name_too_many_names(): + # Given + full_name = "tommy tester the third king among testers" + + # When + first_name, last_name = get_first_and_last_name(full_name) + + # Then + assert first_name == full_name + assert last_name == "" + + +def test_get_first_and_last_name_too_few_names(): + # Given + full_name = "wall-e" + + # When + first_name, last_name = get_first_and_last_name(full_name) + + # Then + assert first_name == full_name + assert last_name == "" diff --git a/src/custom_auth/oauth/serializers.py b/src/custom_auth/oauth/serializers.py index 3692892c7267..009b6795650d 100644 --- a/src/custom_auth/oauth/serializers.py +++ b/src/custom_auth/oauth/serializers.py @@ -2,22 +2,38 @@ from rest_framework import serializers from rest_framework.authtoken.models import Token +from custom_auth.oauth.github import GithubUser from custom_auth.oauth.google import get_user_info GOOGLE_URL = "https://www.googleapis.com/oauth2/v1/userinfo?alt=json&" UserModel = get_user_model() -class OAuthAccessTokenSerializer(serializers.Serializer): - access_token = serializers.CharField() +class OAuthLoginSerializer(serializers.Serializer): + access_token = serializers.CharField( + required=True, + help_text="Code or access token returned from the FE interaction with the third party login provider." + ) - def create(self, validated_data): - """ - get or create a user and token based on the access token and return a DRF token + class Meta: + abstract = True - TODO: make this generic to allow for other oauth access methods - """ - user_data = get_user_info(validated_data["access_token"]) + def create(self, validated_data): + user_data = self.get_user_info() email = user_data.pop("email") user, _ = UserModel.objects.get_or_create(email=email, defaults=user_data) return Token.objects.get_or_create(user=user)[0] + + def get_user_info(self): + raise NotImplementedError("`get_user_info()` must be implemented.") + + +class GoogleLoginSerializer(OAuthLoginSerializer): + def get_user_info(self): + return get_user_info(self.validated_data["access_token"]) + + +class GithubLoginSerializer(OAuthLoginSerializer): + def get_user_info(self): + github_user = GithubUser(code=self.validated_data["access_token"]) + return github_user.get_user_info() diff --git a/src/custom_auth/oauth/tests/test_unit_github.py b/src/custom_auth/oauth/tests/test_unit_github.py new file mode 100644 index 000000000000..f63f1eda3728 --- /dev/null +++ b/src/custom_auth/oauth/tests/test_unit_github.py @@ -0,0 +1,117 @@ +from unittest import mock, TestCase + +import pytest + +from custom_auth.oauth.exceptions import GithubError +from custom_auth.oauth.github import NON_200_ERROR_MESSAGE, GithubUser + + +class GithubUserTestCase(TestCase): + def setUp(self) -> None: + self.test_client_id = "test-client-id" + self.test_client_secret = "test-client-secret" + + self.mock_requests = mock.patch("custom_auth.oauth.github.requests").start() + + def tearDown(self) -> None: + self.mock_requests.stop() + + def test_get_access_token_success(self): + # Given + test_code = "abc123" + expected_access_token = "access-token" + + self.mock_requests.post.return_value = mock.MagicMock( + text=f"access_token={expected_access_token}&scope=user&token_type=bearer", status_code=200 + ) + + # When + github_user = GithubUser(test_code, client_id=self.test_client_id, client_secret=self.test_client_secret) + + # Then + assert github_user.access_token == expected_access_token + + assert self.mock_requests.post.call_count == 1 + request_calls = self.mock_requests.post.call_args + assert request_calls[1]["data"]["code"] == test_code + + def test_get_access_token_fail_non_200(self): + # Given + invalid_code = "invalid" + status_code = 400 + self.mock_requests.post.return_value = mock.MagicMock(status_code=status_code) + + # When + with pytest.raises(GithubError) as e: + GithubUser(invalid_code, client_id=self.test_client_id, client_secret=self.test_client_secret) + + # Then - exception raised + assert NON_200_ERROR_MESSAGE.format(status_code) in str(e) + + def test_get_access_token_fail_token_expired(self): + # Given + invalid_code = "invalid" + + error_description = "there+was+an+error" + self.mock_requests.post.return_value = mock.MagicMock( + text=f"error=bad_verification_code&error_description={error_description}", status_code=200 + ) + + # When + with pytest.raises(GithubError) as e: + GithubUser(invalid_code, client_id=self.test_client_id, client_secret=self.test_client_secret) + + # Then + assert error_description.replace("+", " ") in str(e) + + def test_get_user_name_and_id(self): + # Given + # mock the post to get the access token + self.mock_requests.post.return_value = mock.MagicMock(status_code=200, text="access_token=123456") + + # mock the get to get the user info + mock_response = mock.MagicMock(status_code=200) + self.mock_requests.get.return_value = mock_response + mock_response.json.return_value = { + "name": "tommy tester", + "id": 123456 + } + + # When + github_user = GithubUser("test-code", client_id=self.test_client_id, client_secret=self.test_client_secret) + user_name_and_id = github_user._get_user_name_and_id() + + # Then + assert user_name_and_id == { + "first_name": "tommy", + "last_name": "tester", + "github_user_id": 123456 + } + + def test_get_primary_email(self): + # Given + # mock the post to get the access token + self.mock_requests.post.return_value = mock.MagicMock(status_code=200, text="access_token=123456") + + # mock the request to get the user info + mock_response = mock.MagicMock(status_code=200) + self.mock_requests.get.return_value = mock_response + + verified_emails = [{ + "email": f"tommy_tester@example_{i}.com", + "verified": True, + "visibility": None, + "primary": False + } for i in range(5)] + + # set one of the verified emails to be the primary + verified_emails[3]["primary"] = True + + mock_response.json.return_value = verified_emails + + # When + github_user = GithubUser("test-code", client_id=self.test_client_id, client_secret=self.test_client_secret) + primary_email = github_user._get_primary_email() + + # Then + assert primary_email == verified_emails[3]["email"] diff --git a/src/custom_auth/oauth/tests/test_unit_google.py b/src/custom_auth/oauth/tests/test_unit_google.py index eb710ebb06f8..08157f7895ef 100644 --- a/src/custom_auth/oauth/tests/test_unit_google.py +++ b/src/custom_auth/oauth/tests/test_unit_google.py @@ -1,5 +1,8 @@ from unittest import mock +import pytest + +from custom_auth.oauth.exceptions import GoogleError from custom_auth.oauth.google import get_user_info, USER_INFO_URL @@ -14,7 +17,7 @@ def test_get_user_info(mock_requests): "email": "testytester@example.com" } expected_headers = {"Authorization": f"Bearer {access_token}"} - mock_response = mock.MagicMock() + mock_response = mock.MagicMock(status_code=200) mock_requests.get.return_value = mock_response mock_response.json.return_value = mock_google_response_data @@ -28,4 +31,18 @@ def test_get_user_info(mock_requests): "first_name": mock_google_response_data["given_name"], "last_name": mock_google_response_data["family_name"], "google_user_id": mock_google_response_data["id"] - } \ No newline at end of file + } + + +@mock.patch("custom_auth.oauth.google.requests") +def test_get_user_info_non_200_status_code(mock_requests): + # Given + access_token = "access-token" + mock_response = mock.MagicMock(status_code=400) + mock_requests.get.return_value = mock_response + + # When + with pytest.raises(GoogleError): + get_user_info(access_token) + + # Then - exception raised diff --git a/src/custom_auth/oauth/tests/test_unit_serializers.py b/src/custom_auth/oauth/tests/test_unit_serializers.py index 12c25302744d..ef5fc95e8516 100644 --- a/src/custom_auth/oauth/tests/test_unit_serializers.py +++ b/src/custom_auth/oauth/tests/test_unit_serializers.py @@ -4,13 +4,13 @@ from django.contrib.auth import get_user_model from rest_framework.authtoken.models import Token -from custom_auth.oauth.serializers import OAuthAccessTokenSerializer +from custom_auth.oauth.serializers import GoogleLoginSerializer, OAuthLoginSerializer, GithubLoginSerializer UserModel = get_user_model() @pytest.mark.django_db -class OAuthAccessTokenSerializerTestCase(TestCase): +class OAuthLoginSerializerTestCase(TestCase): def setUp(self) -> None: self.test_email = "testytester@example.com" self.test_first_name = "testy" @@ -27,17 +27,53 @@ def setUp(self) -> None: def test_create(self, mock_get_user_info): # Given access_token = "access-token" - serializer = OAuthAccessTokenSerializer() data = { "access_token": access_token } + serializer = OAuthLoginSerializer(data=data) - mock_get_user_info.return_value = self.mock_user_data + # monkey patch the get_user_info method to return the mock user data + serializer.get_user_info = lambda: self.mock_user_data # When - response = serializer.create(validated_data=data) + serializer.is_valid() + response = serializer.save() # Then assert UserModel.objects.filter(email=self.test_email).exists() assert isinstance(response, Token) assert response.user.email == self.test_email + + +class GoogleLoginSerializerTestCase(TestCase): + @mock.patch("custom_auth.oauth.serializers.get_user_info") + def test_get_user_info(self, mock_get_user_info): + # Given + access_token = "some-access-token" + serializer = GoogleLoginSerializer(data={"access_token": access_token}) + + # When + serializer.is_valid() + serializer.get_user_info() + + # Then + mock_get_user_info.assert_called_with(access_token) + + +class GithubLoginSerializerTestCase(TestCase): + @mock.patch("custom_auth.oauth.serializers.GithubUser") + def test_get_user_info(self, MockGithubUser): + # Given + access_token = "some-access-token" + serializer = GithubLoginSerializer(data={"access_token": access_token}) + + mock_github_user = mock.MagicMock() + MockGithubUser.return_value = mock_github_user + + # When + serializer.is_valid() + serializer.get_user_info() + + # Then + MockGithubUser.assert_called_with(code=access_token) + mock_github_user.get_user_info.assert_called() diff --git a/src/custom_auth/oauth/urls.py b/src/custom_auth/oauth/urls.py index 07843d00b9be..90c5bf3d40d6 100644 --- a/src/custom_auth/oauth/urls.py +++ b/src/custom_auth/oauth/urls.py @@ -1,9 +1,10 @@ from django.urls import path -from custom_auth.oauth.views import login_with_google +from custom_auth.oauth.views import login_with_google, login_with_github -app_name = 'oauth' +app_name = "oauth" urlpatterns = [ - path('google/', login_with_google), + path("google/", login_with_google), + path("github/", login_with_github) ] diff --git a/src/custom_auth/oauth/views.py b/src/custom_auth/oauth/views.py index 2c1ce94e4479..675758e7874b 100644 --- a/src/custom_auth/oauth/views.py +++ b/src/custom_auth/oauth/views.py @@ -1,17 +1,59 @@ from drf_yasg.utils import swagger_auto_schema +from rest_framework import status from rest_framework.decorators import api_view, permission_classes from rest_framework.permissions import AllowAny from rest_framework.response import Response -from custom_auth.oauth.serializers import OAuthAccessTokenSerializer +from api.serializers import ErrorSerializer +from custom_auth.oauth.exceptions import GithubError, GoogleError +from custom_auth.oauth.serializers import GoogleLoginSerializer, GithubLoginSerializer from custom_auth.serializers import CustomTokenSerializer +from util.logging import get_logger +logger = get_logger(__name__) -@swagger_auto_schema(method="post", request_body=OAuthAccessTokenSerializer, responses={200: CustomTokenSerializer}) +AUTH_ERROR_MESSAGE = "An error occurred authenticating with {}" +GITHUB_AUTH_ERROR_MESSAGE = AUTH_ERROR_MESSAGE.format("GITHUB") +GOOGLE_AUTH_ERROR_MESSAGE = AUTH_ERROR_MESSAGE.format("GOOGLE") + + +@swagger_auto_schema( + method="post", + request_body=GoogleLoginSerializer, + responses={200: CustomTokenSerializer, 502: ErrorSerializer}, +) @api_view(["POST"]) @permission_classes([AllowAny]) def login_with_google(request): - serializer = OAuthAccessTokenSerializer(data=request.data) - serializer.is_valid(raise_exception=True) - token = serializer.save() - return Response(data=CustomTokenSerializer(instance=token).data) + try: + serializer = GoogleLoginSerializer(data=request.data) + serializer.is_valid(raise_exception=True) + token = serializer.save() + return Response(data=CustomTokenSerializer(instance=token).data) + except GoogleError as e: + logger.warning("%s: %s" % (GOOGLE_AUTH_ERROR_MESSAGE, str(e))) + return Response( + data={"message": GOOGLE_AUTH_ERROR_MESSAGE}, + status=status.HTTP_502_BAD_GATEWAY, + ) + + +@swagger_auto_schema( + method="post", + request_body=GithubLoginSerializer, + responses={200: CustomTokenSerializer, 502: ErrorSerializer}, +) +@api_view(["POST"]) +@permission_classes([AllowAny]) +def login_with_github(request): + try: + serializer = GithubLoginSerializer(data=request.data) + serializer.is_valid(raise_exception=True) + token = serializer.save() + return Response(data=CustomTokenSerializer(instance=token).data) + except GithubError as e: + logger.warning("%s: %s" % (GITHUB_AUTH_ERROR_MESSAGE, str(e))) + return Response( + data={"message": GITHUB_AUTH_ERROR_MESSAGE}, + status=status.HTTP_502_BAD_GATEWAY, + ) diff --git a/src/custom_auth/tests/end_to_end/test_custom_auth_integration.py b/src/custom_auth/tests/end_to_end/test_custom_auth_integration.py index baa50f74d42f..05b9826ec1ff 100644 --- a/src/custom_auth/tests/end_to_end/test_custom_auth_integration.py +++ b/src/custom_auth/tests/end_to_end/test_custom_auth_integration.py @@ -1,5 +1,6 @@ import re +import time import pyotp from django.core import mail from django.urls import reverse @@ -41,6 +42,8 @@ def test_register_and_login_workflows(self): assert register_response_success.status_code == status.HTTP_201_CREATED assert register_response_success.json()["key"] + # add delay to avoid HTTP_429 as we have throttle in place for login + time.sleep(1) # now verify we can login with the same credentials new_login_data = { "email": self.test_email, @@ -78,6 +81,8 @@ def test_register_and_login_workflows(self): ) assert reset_password_confirm_response.status_code == status.HTTP_204_NO_CONTENT + # add delay to avoid HTTP_429 as we have throttle in place for login + time.sleep(1) # now check we can login with the new details new_login_data = { "email": self.test_email, @@ -145,3 +150,34 @@ def test_login_workflow_with_mfa_enabled(self): current_user_response = self.client.get(self.current_user_url) assert current_user_response.status_code == status.HTTP_200_OK assert current_user_response.json()["email"] == self.test_email + + def test_throttle_login_workflows(self): + # register the user + register_data = { + "email": self.test_email, + "password": self.password, + "re_password": self.password, + "first_name": "test", + "last_name": "user", + } + register_response = self.client.post( + self.register_url, data=register_data + ) + assert register_response.status_code == status.HTTP_201_CREATED + assert register_response.json()["key"] + + # since we're hitting login in other tests we need to ensure that the + # first login request doesn't fail with HTTP_429 + time.sleep(1) + # verify we can login with credentials + login_data = { + "email": self.test_email, + "password": self.password, + } + login_response = self.client.post(self.login_url, data=login_data) + assert login_response.status_code == status.HTTP_200_OK + assert login_response.json()["key"] + + # try login in again, should deny, current limit 1 per second + login_response = self.client.post(self.login_url, data=login_data) + assert login_response.status_code == status.HTTP_429_TOO_MANY_REQUESTS diff --git a/src/custom_auth/urls.py b/src/custom_auth/urls.py index c665507ef149..d9a685bebe84 100644 --- a/src/custom_auth/urls.py +++ b/src/custom_auth/urls.py @@ -1,9 +1,12 @@ from django.urls import include, path +from custom_auth.views import CustomAuthTokenLoginOrRequestMFACode app_name = 'custom_auth' urlpatterns = [ + # Override auth/login endpoint for throttling login requests + path('login/', CustomAuthTokenLoginOrRequestMFACode.as_view(), name='custom-mfa-authtoken-login'), path('', include('djoser.urls')), path('', include('trench.urls')), # MFA path('', include('trench.urls.djoser')), # override necessary urls for MFA auth diff --git a/src/custom_auth/views.py b/src/custom_auth/views.py new file mode 100644 index 000000000000..376a5462a0ff --- /dev/null +++ b/src/custom_auth/views.py @@ -0,0 +1,10 @@ +from rest_framework.throttling import ScopedRateThrottle +from trench.views.authtoken import AuthTokenLoginOrRequestMFACode + + +class CustomAuthTokenLoginOrRequestMFACode(AuthTokenLoginOrRequestMFACode): + """ + Class to handle throttling for login requests + """ + throttle_classes = [ScopedRateThrottle] + throttle_scope = 'login' diff --git a/src/environments/admin.py b/src/environments/admin.py index a750191d8190..16fca4a9a4f2 100644 --- a/src/environments/admin.py +++ b/src/environments/admin.py @@ -1,6 +1,7 @@ # -*- coding: utf-8 -*- from __future__ import unicode_literals +from django.conf import settings from django.contrib import admin from simple_history.admin import SimpleHistoryAdmin @@ -21,7 +22,6 @@ class EnvironmentAdmin(admin.ModelAdmin): inlines = (WebhookInline,) -@admin.register(Identity) class IdentityAdmin(admin.ModelAdmin): date_hierarchy = 'created_date' list_display = ('__str__', 'created_date', 'environment',) @@ -29,7 +29,6 @@ class IdentityAdmin(admin.ModelAdmin): search_fields = ('identifier',) -@admin.register(Trait) class TraitAdmin(SimpleHistoryAdmin): date_hierarchy = 'created_date' list_display = ('__str__', 'value_type', 'boolean_value', 'integer_value', 'string_value', @@ -37,3 +36,9 @@ class TraitAdmin(SimpleHistoryAdmin): list_filter = ('value_type', 'created_date', 'identity',) raw_id_fields = ('identity',) search_fields = ('string_value', 'trait_key', 'identity__identifier',) + + +if settings.ENV in ('local', 'dev'): + # these shouldn't be displayed in production environments but are useful in development environments + admin.site.register(Identity, IdentityAdmin) + admin.site.register(Trait, TraitAdmin) diff --git a/src/environments/models.py b/src/environments/models.py index 06a9ffadb743..be1d95525cb3 100644 --- a/src/environments/models.py +++ b/src/environments/models.py @@ -123,7 +123,9 @@ def get_all_feature_states(self): # define sub queries belongs_to_environment_query = Q(environment=self.environment) overridden_for_identity_query = Q(identity=self) - overridden_for_segment_query = Q(feature_segment__segment__in=segments) + overridden_for_segment_query = Q( + feature_segment__segment__in=segments, feature_segment__environment=self.environment + ) environment_default_query = Q(identity=None, feature_segment=None) # define the full query @@ -135,6 +137,8 @@ def get_all_feature_states(self): all_flags = FeatureState.objects.select_related(*select_related_args).filter(full_query) + # iterate over all the flags and build a dictionary keyed on feature with the highest priority flag + # for the given identity as the value. identity_flags = {} for flag in all_flags: if flag.feature_id not in identity_flags: diff --git a/src/environments/tests/test_models.py b/src/environments/tests/test_models.py index 1addc9413268..571d7ac81adb 100644 --- a/src/environments/tests/test_models.py +++ b/src/environments/tests/test_models.py @@ -6,7 +6,7 @@ from features.utils import INTEGER, STRING, BOOLEAN from organisations.models import Organisation from projects.models import Project -from segments.models import Segment, SegmentRule, Condition, EQUAL, GREATER_THAN_INCLUSIVE +from segments.models import Segment, SegmentRule, Condition, EQUAL, GREATER_THAN_INCLUSIVE, GREATER_THAN from util.tests import Helper @@ -190,9 +190,9 @@ def test_get_all_feature_states_for_identity_returns_correct_values_for_matching remote_config = Feature.objects.create(name='test-remote-config', project=self.project, initial_value='initial-value', type='CONFIG') - FeatureSegment.objects.create(feature=feature_flag, segment=segment, enabled=True) + FeatureSegment.objects.create(feature=feature_flag, segment=segment, environment=self.environment, enabled=True) overridden_value = 'overridden-value' - FeatureSegment.objects.create(feature=remote_config, segment=segment, + FeatureSegment.objects.create(feature=remote_config, segment=segment, environment=self.environment, value=overridden_value, value_type=STRING) # When @@ -221,9 +221,9 @@ def test_get_all_feature_states_for_identity_returns_correct_values_for_identity remote_config = Feature.objects.create(name='test-remote-config', project=self.project, initial_value=initial_value, type='CONFIG') - FeatureSegment.objects.create(feature=feature_flag, segment=segment, enabled=True) + FeatureSegment.objects.create(feature=feature_flag, segment=segment, environment=self.environment, enabled=True) overridden_value = 'overridden-value' - FeatureSegment.objects.create(feature=remote_config, segment=segment, + FeatureSegment.objects.create(feature=remote_config, segment=segment, environment=self.environment, value=overridden_value, value_type=STRING) # When @@ -252,7 +252,7 @@ def test_get_all_feature_states_for_identity_returns_correct_value_for_matching_ # Feature segment value is converted to string in the serializer so we set as a string value here to test # bool value overridden_value = '12' - FeatureSegment.objects.create(feature=remote_config, segment=segment, + FeatureSegment.objects.create(feature=remote_config, segment=segment, environment=self.environment, value=overridden_value, value_type=INTEGER) # When @@ -279,7 +279,7 @@ def test_get_all_feature_states_for_identity_returns_correct_value_for_matching_ # Feature segment value is converted to string in the serializer so we set as a string value here to test # bool value overridden_value = 'false' - FeatureSegment.objects.create(feature=remote_config, segment=segment, + FeatureSegment.objects.create(feature=remote_config, segment=segment, environment=self.environment, value=overridden_value, value_type=BOOLEAN) # When @@ -313,11 +313,11 @@ def test_get_all_feature_states_highest_value_of_highest_priority_segment(self): # which is overridden by both segments with different values overridden_value_1 = 'overridden-value-1' - FeatureSegment.objects.create(feature=remote_config, segment=segment_1, + FeatureSegment.objects.create(feature=remote_config, segment=segment_1, environment=self.environment, value=overridden_value_1, value_type=STRING, priority=1) overridden_value_2 = 'overridden-value-2' - FeatureSegment.objects.create(feature=remote_config, segment=segment_2, + FeatureSegment.objects.create(feature=remote_config, segment=segment_2, environment=self.environment, value=overridden_value_2, value_type=STRING, priority=2) # When - we get all feature states for an identity @@ -327,3 +327,38 @@ def test_get_all_feature_states_highest_value_of_highest_priority_segment(self): assert len(feature_states) == 1 remote_config_feature_state = next(filter(lambda fs: fs.feature == remote_config, feature_states)) assert remote_config_feature_state.get_feature_state_value() == overridden_value_1 + + def test_remote_config_override(self): + """specific test for bug raised following work to make feature segments unique to an environment""" + # GIVEN - an identity with a trait that has a value of 10 + identity = Identity.objects.create(identifier="test", environment=self.environment) + trait = Trait.objects.create(identity=identity, trait_key="my_trait", integer_value=10, value_type=INTEGER) + + # and a segment that matches users that have a value for this trait greater than 5 + segment = Segment.objects.create(name="Test segment", project=self.project) + segment_rule = SegmentRule.objects.create(segment=segment, type=SegmentRule.ALL_RULE) + condition = Condition.objects.create( + rule=segment_rule, operator=GREATER_THAN, value="5", property=trait.trait_key + ) + + # and a feature that has a segment override in the same environment as the identity + remote_config = Feature.objects.create(name="my_feature", initial_value="initial value", project=self.project) + feature_segment = FeatureSegment.objects.create( + feature=remote_config, + environment=self.environment, + segment=segment, + value="overridden value 1", + value_type=STRING + ) + + # WHEN - the value on the feature segment is updated and we get all the feature states for the identity + feature_segment.value = "overridden value 2" + feature_segment.save() + feature_states = identity.get_all_feature_states() + + # THEN - the feature state value is correctly set to the newly updated feature segment value + assert len(feature_states) == 1 + + overridden_feature_state = feature_states[0] + assert overridden_feature_state.get_feature_state_value() == feature_segment.value + diff --git a/src/environments/tests/test_views.py b/src/environments/tests/test_views.py index 0902699120f7..880442b2516b 100644 --- a/src/environments/tests/test_views.py +++ b/src/environments/tests/test_views.py @@ -511,7 +511,9 @@ def test_identities_endpoint_returns_value_for_segment_if_identity_in_segment(se segment = Segment.objects.create(name='Test Segment', project=self.project) segment_rule = SegmentRule.objects.create(segment=segment, type=SegmentRule.ALL_RULE) Condition.objects.create(operator='EQUAL', property=trait_key, value=trait_value, rule=segment_rule) - FeatureSegment.objects.create(segment=segment, feature=self.feature_2, enabled=True, priority=1) + FeatureSegment.objects.create( + segment=segment, feature=self.feature_2, environment=self.environment, enabled=True, priority=1 + ) # When response = self.client.get(url) @@ -534,7 +536,9 @@ def test_identities_endpoint_returns_value_for_segment_if_identity_in_segment_an segment = Segment.objects.create(name='Test Segment', project=self.project) segment_rule = SegmentRule.objects.create(segment=segment, type=SegmentRule.ALL_RULE) Condition.objects.create(operator='EQUAL', property=trait_key, value=trait_value, rule=segment_rule) - FeatureSegment.objects.create(segment=segment, feature=self.feature_1, enabled=True, priority=1) + FeatureSegment.objects.create( + segment=segment, feature=self.feature_1, environment=self.environment, enabled=True, priority=1 + ) # When response = self.client.get(url) @@ -557,7 +561,9 @@ def test_identities_endpoint_returns_value_for_segment_if_rule_type_percentage_s Condition.objects.create(operator=models.PERCENTAGE_SPLIT, value=(identity_percentage_value + (1 - identity_percentage_value) / 2) * 100.0, rule=segment_rule) - FeatureSegment.objects.create(segment=segment, feature=self.feature_1, enabled=True, priority=1) + FeatureSegment.objects.create( + segment=segment, feature=self.feature_1, environment=self.environment, enabled=True, priority=1 + ) # When self.client.credentials(HTTP_X_ENVIRONMENT_KEY=self.environment.api_key) @@ -580,7 +586,9 @@ def test_identities_endpoint_returns_default_value_if_rule_type_percentage_split Condition.objects.create(operator=models.PERCENTAGE_SPLIT, value=identity_percentage_value / 2, rule=segment_rule) - FeatureSegment.objects.create(segment=segment, feature=self.feature_1, enabled=True, priority=1) + FeatureSegment.objects.create( + segment=segment, feature=self.feature_1, environment=self.environment, enabled=True, priority=1 + ) # When self.client.credentials(HTTP_X_ENVIRONMENT_KEY=self.environment.api_key) diff --git a/src/features/admin.py b/src/features/admin.py index aa03743f335f..9fdeab29d046 100644 --- a/src/features/admin.py +++ b/src/features/admin.py @@ -1,6 +1,7 @@ # -*- coding: utf-8 -*- from __future__ import unicode_literals +from django.conf import settings from django.contrib import admin from simple_history.admin import SimpleHistoryAdmin @@ -13,7 +14,6 @@ class FeatureStateValueInline(admin.StackedInline): show_change_link = True -@admin.register(Feature) class FeatureAdmin(SimpleHistoryAdmin): date_hierarchy = 'created_date' list_display = ('__str__', 'initial_value', @@ -28,7 +28,6 @@ class FeatureAdmin(SimpleHistoryAdmin): ) -@admin.register(FeatureSegment) class FeatureSegmentAdmin(admin.ModelAdmin): model = FeatureSegment @@ -41,7 +40,6 @@ def change_view(self, *args, **kwargs): return super(FeatureSegmentAdmin, self).change_view(*args, **kwargs) -@admin.register(FeatureState) class FeatureStateAdmin(SimpleHistoryAdmin): inlines = [ FeatureStateValueInline, @@ -58,7 +56,6 @@ class FeatureStateAdmin(SimpleHistoryAdmin): ) -@admin.register(FeatureStateValue) class FeatureStateValueAdmin(SimpleHistoryAdmin): list_display = ('feature_state', 'type', 'boolean_value', 'integer_value', 'string_value', ) @@ -72,3 +69,10 @@ class FeatureStateValueAdmin(SimpleHistoryAdmin): 'feature_state__environment__name', 'feature_state__identity__identifier', ) + + +if settings.ENV in ('local', 'dev'): + admin.site.register(Feature, FeatureAdmin) + admin.site.register(FeatureState, FeatureStateAdmin) + admin.site.register(FeatureSegment, FeatureSegmentAdmin) + admin.site.register(FeatureStateValue, FeatureStateValueAdmin) diff --git a/src/features/apps.py b/src/features/apps.py index 53c8e4511f06..eee08e740595 100644 --- a/src/features/apps.py +++ b/src/features/apps.py @@ -8,4 +8,5 @@ class FeaturesConfig(AppConfig): name = 'features' def ready(self): - pass + # noinspection PyUnresolvedReferences + import features.signals diff --git a/src/features/fields.py b/src/features/fields.py new file mode 100644 index 000000000000..651d32ae932f --- /dev/null +++ b/src/features/fields.py @@ -0,0 +1,19 @@ +from rest_framework import serializers + +from features.utils import INTEGER, BOOLEAN, STRING + + +class FeatureSegmentValueField(serializers.Field): + def to_internal_value(self, data): + if data is not None: + # grab the type of the value and set the context for use + # in the create / update methods on the serializer + value_type = type(data).__name__ + value_types = [STRING, BOOLEAN, INTEGER] + value_type = value_type if value_type in value_types else STRING + self.context['value_type'] = value_type + + return str(data) + + def to_representation(self, value): + return self.root.instance.get_value() diff --git a/src/features/helpers.py b/src/features/helpers.py new file mode 100644 index 000000000000..d2fa260bc178 --- /dev/null +++ b/src/features/helpers.py @@ -0,0 +1,13 @@ +import typing + +from features.utils import INTEGER, BOOLEAN + + +def get_correctly_typed_value(value_type: str, string_value: str) -> typing.Any: + if value_type == INTEGER: + return int(string_value) + elif value_type == BOOLEAN: + return string_value == 'True' + + return string_value + diff --git a/src/features/migrations/0017_auto_20200607_1005.py b/src/features/migrations/0017_auto_20200607_1005.py new file mode 100644 index 000000000000..dfff61dfc526 --- /dev/null +++ b/src/features/migrations/0017_auto_20200607_1005.py @@ -0,0 +1,25 @@ +# Generated by Django 2.2.12 on 2020-06-07 10:05 +from django.db import migrations, models +import django.db.models.deletion + + +class Migration(migrations.Migration): + + dependencies = [ + ('environments', '0012_auto_20200504_1322'), + ('segments', '0007_auto_20190906_1416'), + ('features', '0016_auto_20190916_1717'), + ] + + operations = [ + # first, add the field, allowing null values + migrations.AddField( + model_name='featuresegment', + name='environment', + field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='feature_segments', to='environments.Environment'), + ), + migrations.AlterUniqueTogether( + name='featuresegment', + unique_together={('feature', 'environment', 'segment')}, + ), + ] diff --git a/src/features/migrations/0018_auto_20200607_1057.py b/src/features/migrations/0018_auto_20200607_1057.py new file mode 100644 index 000000000000..c5a5b369b649 --- /dev/null +++ b/src/features/migrations/0018_auto_20200607_1057.py @@ -0,0 +1,75 @@ +# Generated by Django 2.2.13 on 2020-06-07 10:57 +import logging + +from django.db import migrations + +logger = logging.getLogger() +logger.setLevel(logging.INFO) + + +def migrate_feature_segments_forward(apps, schema_editor): + FeatureSegment = apps.get_model('features', 'FeatureSegment') + FeatureState = apps.get_model('features', 'FeatureState') + + # iterate over all current feature segments and ensure that one exists for all environments in it's project + for feature_segment in FeatureSegment.objects.all(): + for idx, environment in enumerate(feature_segment.feature.project.environments.all()): + # update the existing feature segment with the first environment and then create new feature segments + # for the remaining environments + if idx == 0: + logger.info('Adding environment %d to feature segment %d' % (environment.id, feature_segment.id)) + feature_segment.environment = environment + feature_segment.save() + else: + logger.info('Creating new feature segment for feature %d, environment %d and segment %d' % ( + feature_segment.feature.id, environment.id, feature_segment.segment.id + )) + # create a copy of the feature segment by just setting the pk to None + new_feature_segment = FeatureSegment.objects.create( + feature=feature_segment.feature, + environment=environment, + segment=feature_segment.segment, + priority=feature_segment.priority, + enabled=feature_segment.enabled, + value=feature_segment.value, + value_type=feature_segment.value_type, + ) + + # we now need to update the feature state to point to the correct feature segment + FeatureState.objects.filter( + environment=environment, feature=new_feature_segment.feature, feature_segment=feature_segment + ).update(feature_segment=new_feature_segment) + + assert not FeatureSegment.objects.filter(environment__isnull=True).exists() + + +def migrate_feature_segments_reverse(apps, schema_editor): + """ + Reverse the above by making feature segments unique to a feature again. + + NOTE: THIS WILL RESULT IN A LOSS OF DATA! + There is no way to determine which 'value' should be kept for a feature segment so we blindly just delete all but + one of the feature segments. This has to be done due to the uniqueness constraint to ensure that we can still + migrate backwards. + """ + FeatureSegment = apps.get_model('features', 'FeatureSegment') + Feature = apps.get_model('features', 'Feature') + + for feature in Feature.objects.filter(feature_segments__isnull=False).prefetch_related('feature_segments'): + # todo: this is deleting more than it should. It should only be deleting one per feature / segment but it's + # ignoring cases where there are more than one segment + first_feature_segment = feature.feature_segments.first() + FeatureSegment.objects.filter(feature=feature).exclude(pk=first_feature_segment.pk).delete() + + +class Migration(migrations.Migration): + + dependencies = [ + ('features', '0017_auto_20200607_1005'), + ] + + operations = [ + migrations.RunPython( + migrate_feature_segments_forward, reverse_code=migrate_feature_segments_reverse + ), + ] diff --git a/src/features/migrations/0019_auto_20200607_1059.py b/src/features/migrations/0019_auto_20200607_1059.py new file mode 100644 index 000000000000..fa13301f271e --- /dev/null +++ b/src/features/migrations/0019_auto_20200607_1059.py @@ -0,0 +1,19 @@ +# Generated by Django 2.2.13 on 2020-06-07 10:59 + +from django.db import migrations, models +import django.db.models.deletion + + +class Migration(migrations.Migration): + + dependencies = [ + ('features', '0018_auto_20200607_1057'), + ] + + operations = [ + migrations.AlterField( + model_name='featuresegment', + name='environment', + field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='feature_segments', to='environments.Environment'), + ), + ] diff --git a/src/features/migrations/0020_auto_20200615_1300.py b/src/features/migrations/0020_auto_20200615_1300.py new file mode 100644 index 000000000000..83add93c506b --- /dev/null +++ b/src/features/migrations/0020_auto_20200615_1300.py @@ -0,0 +1,28 @@ +# Generated by Django 2.2.13 on 2020-06-15 13:00 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('environments', '0012_auto_20200504_1322'), + ('segments', '0007_auto_20190906_1416'), + ('features', '0019_auto_20200607_1059'), + ] + + operations = [ + migrations.AlterModelOptions( + name='featuresegment', + options={'ordering': ('priority',)}, + ), + migrations.AlterField( + model_name='featuresegment', + name='priority', + field=models.PositiveIntegerField(db_index=True, editable=False), + ), + migrations.AlterUniqueTogether( + name='featuresegment', + unique_together={('feature', 'environment', 'segment')}, + ), + ] diff --git a/src/features/migrations/0021_historicalfeaturesegment.py b/src/features/migrations/0021_historicalfeaturesegment.py new file mode 100644 index 000000000000..6afbf147586a --- /dev/null +++ b/src/features/migrations/0021_historicalfeaturesegment.py @@ -0,0 +1,43 @@ +# Generated by Django 2.2.13 on 2020-06-20 14:56 + +from django.conf import settings +from django.db import migrations, models +import django.db.models.deletion +import simple_history.models + + +class Migration(migrations.Migration): + + dependencies = [ + migrations.swappable_dependency(settings.AUTH_USER_MODEL), + ('environments', '0012_auto_20200504_1322'), + ('segments', '0007_auto_20190906_1416'), + ('features', '0020_auto_20200615_1300'), + ] + + operations = [ + migrations.CreateModel( + name='HistoricalFeatureSegment', + fields=[ + ('id', models.IntegerField(auto_created=True, blank=True, db_index=True, verbose_name='ID')), + ('enabled', models.BooleanField(default=False)), + ('value', models.CharField(blank=True, max_length=2000, null=True)), + ('value_type', models.CharField(blank=True, choices=[('int', 'Integer'), ('unicode', 'String'), ('bool', 'Boolean')], max_length=50, null=True)), + ('priority', models.PositiveIntegerField(db_index=True, editable=False)), + ('history_id', models.AutoField(primary_key=True, serialize=False)), + ('history_date', models.DateTimeField()), + ('history_change_reason', models.CharField(max_length=100, null=True)), + ('history_type', models.CharField(choices=[('+', 'Created'), ('~', 'Changed'), ('-', 'Deleted')], max_length=1)), + ('environment', models.ForeignKey(blank=True, db_constraint=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', to='environments.Environment')), + ('feature', models.ForeignKey(blank=True, db_constraint=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', to='features.Feature')), + ('history_user', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to=settings.AUTH_USER_MODEL)), + ('segment', models.ForeignKey(blank=True, db_constraint=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', to='segments.Segment')), + ], + options={ + 'verbose_name': 'historical feature segment', + 'ordering': ('-history_date', '-history_id'), + 'get_latest_by': 'history_date', + }, + bases=(simple_history.models.HistoricalChanges, models.Model), + ), + ] diff --git a/src/features/migrations/0022_auto_20200630_2115.py b/src/features/migrations/0022_auto_20200630_2115.py new file mode 100644 index 000000000000..9c2f6ac7e4fe --- /dev/null +++ b/src/features/migrations/0022_auto_20200630_2115.py @@ -0,0 +1,19 @@ +# Generated by Django 2.2.13 on 2020-06-30 21:15 + +from django.db import migrations + + +class Migration(migrations.Migration): + + dependencies = [ + ('features', '0021_historicalfeaturesegment'), + ] + + operations = [ + # this migration should have no affect but should fix the issues on dev after + # screwing around with the migrations + migrations.AlterUniqueTogether( + name='featuresegment', + unique_together={('feature', 'environment', 'segment')}, + ), + ] diff --git a/src/features/models.py b/src/features/models.py index 337596854055..eac10b8d1124 100644 --- a/src/features/models.py +++ b/src/features/models.py @@ -5,8 +5,10 @@ from django.db import models from django.utils.encoding import python_2_unicode_compatible from django.utils.translation import ugettext_lazy as _ +from ordered_model.models import OrderedModelBase from simple_history.models import HistoricalRecords +from features.helpers import get_correctly_typed_value from features.tasks import trigger_feature_state_change_webhooks from features.utils import get_boolean_from_string, get_integer_from_string, INTEGER, STRING, BOOLEAN, get_value_type from projects.models import Project @@ -38,10 +40,10 @@ class Feature(models.Model): Project, related_name='features', help_text=_( - "Changing the project selected will remove previous Feature States for the previously " - "associated projects Environments that are related to this Feature. New default " - "Feature States will be created for the new selected projects Environments for this " - "Feature." + 'Changing the project selected will remove previous Feature States for the previously ' + 'associated projects Environments that are related to this Feature. New default ' + 'Feature States will be created for the new selected projects Environments for this ' + 'Feature.' ), on_delete=models.CASCADE ) @@ -54,12 +56,12 @@ class Feature(models.Model): class Meta: ordering = ['id'] # Note: uniqueness is changed to reference lowercase name in explicit SQL in the migrations - unique_together = ("name", "project") + unique_together = ('name', 'project') def save(self, *args, **kwargs): - """ + ''' Override save method to initialise feature states for all environments - """ + ''' if self.pk: # If the feature has moved to a new project, delete the feature states from the old project old_feature = Feature.objects.get(pk=self.pk) @@ -84,24 +86,24 @@ def save(self, *args, **kwargs): ) def validate_unique(self, *args, **kwargs): - """ + ''' Checks unique constraints on the model and raises ``ValidationError`` if any failed. - """ + ''' super(Feature, self).validate_unique(*args, **kwargs) if Feature.objects.filter(project=self.project, name__iexact=self.name).exists(): raise ValidationError( { NON_FIELD_ERRORS: [ - "Feature with that name already exists for this project. Note that feature " - "names are case insensitive.", + 'Feature with that name already exists for this project. Note that feature ' + 'names are case insensitive.', ], } ) def __str__(self): - return "Project %s - Feature %s" % (self.project.name, self.name) + return 'Project %s - Feature %s' % (self.project.name, self.name) def get_next_segment_priority(feature): @@ -113,45 +115,49 @@ def get_next_segment_priority(feature): @python_2_unicode_compatible -class FeatureSegment(models.Model): - feature = models.ForeignKey(Feature, on_delete=models.CASCADE, related_name="feature_segments") - segment = models.ForeignKey('segments.Segment', related_name="feature_segments", on_delete=models.CASCADE) - priority = models.IntegerField(blank=True, null=True) +class FeatureSegment(OrderedModelBase): + feature = models.ForeignKey(Feature, on_delete=models.CASCADE, related_name='feature_segments') + segment = models.ForeignKey('segments.Segment', related_name='feature_segments', on_delete=models.CASCADE) + environment = models.ForeignKey( + 'environments.Environment', on_delete=models.CASCADE, related_name='feature_segments' + ) + enabled = models.BooleanField(default=False) value = models.CharField(max_length=2000, blank=True, null=True) value_type = models.CharField(choices=FEATURE_STATE_VALUE_TYPES, max_length=50, blank=True, null=True) + # specific attributes for managing the order of feature segments + priority = models.PositiveIntegerField(editable=False, db_index=True) + order_field_name = 'priority' + order_with_respect_to = ('feature', 'environment') + + # used for audit purposes + history = HistoricalRecords() + class Meta: - unique_together = [('feature', 'segment'), ('feature', 'priority')] + unique_together = ('feature', 'environment', 'segment') + ordering = ('priority',) def save(self, *args, **kwargs): - if not self.pk and not self.priority: - # intialise priority field on object creation if not set - self.priority = get_next_segment_priority(self.feature) - super(FeatureSegment, self).save(*args, **kwargs) - # create feature states - for environment in self.feature.project.environments.all(): - fs, _ = FeatureState.objects.get_or_create(environment=environment, feature=self.feature, - feature_segment=self) - fs.enabled = self.enabled - fs.save() + # update or create feature state for environment + FeatureState.objects.update_or_create( + environment=self.environment, feature=self.feature, feature_segment=self, defaults={"enabled": self.enabled} + ) def __str__(self): - return "FeatureSegment for " + self.feature.name + " with priority " + str(self.priority) + return 'FeatureSegment for ' + self.feature.name + ' with priority ' + str(self.priority) + # noinspection PyTypeChecker def get_value(self): - return { - BOOLEAN: get_boolean_from_string(self.value), - INTEGER: get_boolean_from_string(self.value) - }.get(self.value_type, self.value) + return get_correctly_typed_value(self.value_type, self.value) def __lt__(self, other): - """ + ''' Kind of counter intuitive but since priority 1 is highest, we want to check if priority is GREATER than the priority of the other feature segment. - """ + ''' return other and self.priority > other.priority @@ -169,26 +175,26 @@ class FeatureState(models.Model): history = HistoricalRecords() class Meta: - unique_together = (("feature", "environment", "identity"), ("feature", "environment", "feature_segment")) + unique_together = (('feature', 'environment', 'identity'), ('feature', 'environment', 'feature_segment')) ordering = ['id'] def __gt__(self, other): - """ + ''' Checks if the current feature state is higher priority that the provided feature state. :param other: (FeatureState) the feature state to compare the priority of :return: True if self is higher priority than other - """ + ''' if self.environment != other.environment: - raise ValueError("Cannot compare feature states as they belong to different environments.") + raise ValueError('Cannot compare feature states as they belong to different environments.') if self.feature != other.feature: - raise ValueError("Cannot compare feature states as they belong to different features.") + raise ValueError('Cannot compare feature states as they belong to different features.') if self.identity: # identity is the highest priority so we can always return true if other.identity and self.identity != other.identity: - raise ValueError("Cannot compare feature states as they are for different identities.") + raise ValueError('Cannot compare feature states as they are for different identities.') return True if self.feature_segment: @@ -201,6 +207,9 @@ def __gt__(self, other): return not (other.feature_segment or other.identity) def get_feature_state_value(self): + if self.feature_segment: + return self.feature_segment.get_value() + try: value_type = self.feature_state_value.type except ObjectDoesNotExist: @@ -245,10 +254,12 @@ def save(self, *args, **kwargs): # create default feature state value for feature state # note: this is get_or_create since feature state values are updated separately, and hence if this is set to # update_or_create, it overwrites the FSV with the initial value again - FeatureStateValue.objects.get_or_create( - feature_state=self, - defaults=self._get_defaults() - ) + # Note: feature segments are handled differently as they have their own values + if not self.feature_segment: + FeatureStateValue.objects.get_or_create( + feature_state=self, + defaults=self._get_defaults() + ) # TODO: move this to an async call using celery or django-rq trigger_feature_state_change_webhooks(self) @@ -300,40 +311,40 @@ def _get_defaults_for_environment_feature_state(self): @staticmethod def _get_feature_state_key_name(fsv_type): return { - INTEGER: "integer_value", - BOOLEAN: "boolean_value", - STRING: "string_value", - }.get(fsv_type, "string_value") # The default was chosen for backwards compatibility + INTEGER: 'integer_value', + BOOLEAN: 'boolean_value', + STRING: 'string_value', + }.get(fsv_type, 'string_value') # The default was chosen for backwards compatibility def generate_feature_state_value_data(self, value): - """ + ''' Takes the value of a feature state to generate a feature state value and returns dictionary to use for passing into feature state value serializer :param value: feature state value of variable type :return: dictionary to pass directly into feature state value serializer - """ + ''' fsv_type = type(value).__name__ accepted_types = (STRING, INTEGER, BOOLEAN) return { # Default to string if not an anticipate type value to keep backwards compatibility. - "type": fsv_type if fsv_type in accepted_types else STRING, - "feature_state": self.id, + 'type': fsv_type if fsv_type in accepted_types else STRING, + 'feature_state': self.id, self._get_feature_state_key_name(fsv_type): value } def __str__(self): if self.environment is not None: - return "Project %s - Environment %s - Feature %s - Enabled: %r" % \ + return 'Project %s - Environment %s - Feature %s - Enabled: %r' % \ (self.environment.project.name, self.environment.name, self.feature.name, self.enabled) elif self.identity is not None: - return "Identity %s - Feature %s - Enabled: %r" % (self.identity.identifier, + return 'Identity %s - Feature %s - Enabled: %r' % (self.identity.identifier, self.feature.name, self.enabled) else: - return "Feature %s - Enabled: %r" % (self.feature.name, self.enabled) + return 'Feature %s - Enabled: %r' % (self.feature.name, self.enabled) class FeatureStateValue(models.Model): diff --git a/src/features/serializers.py b/src/features/serializers.py index 0f3fdcaaa998..b4ce14accdaf 100644 --- a/src/features/serializers.py +++ b/src/features/serializers.py @@ -4,8 +4,8 @@ from audit.models import AuditLog, RelatedObjectType, FEATURE_CREATED_MESSAGE, FEATURE_UPDATED_MESSAGE, \ FEATURE_STATE_UPDATED_MESSAGE, IDENTITY_FEATURE_STATE_UPDATED_MESSAGE from environments.models import Identity -from features.utils import get_value_type, get_boolean_from_string, get_integer_from_string, BOOLEAN, INTEGER -from segments.serializers import SegmentSerializerBasic +from features.utils import BOOLEAN, INTEGER, STRING +from .fields import FeatureSegmentValueField from .models import Feature, FeatureState, FeatureStateValue, FeatureSegment @@ -46,46 +46,60 @@ def _create_audit_log(self, instance, created): class FeatureSegmentCreateSerializer(serializers.ModelSerializer): + value = FeatureSegmentValueField(required=False) + class Meta: model = FeatureSegment - fields = ('feature', 'segment', 'priority', 'enabled', 'value') + fields = ('id', 'feature', 'segment', 'environment', 'priority', 'enabled', 'value') + read_only_fields = ('id', 'priority',) def create(self, validated_data): - if validated_data.get('value') or validated_data.get('value') is False: - validated_data['value_type'] = get_value_type(validated_data['value']) + validated_data['value_type'] = self.context.get('value_type', STRING) return super(FeatureSegmentCreateSerializer, self).create(validated_data) - def to_internal_value(self, data): - if data.get('value') or data.get('value') is False: - data['value'] = str(data['value']) - return super(FeatureSegmentCreateSerializer, self).to_internal_value(data) + def update(self, instance, validated_data): + validated_data['value_type'] = self.context.get('value_type', STRING) + return super(FeatureSegmentCreateSerializer, self).update(instance, validated_data) + +class FeatureSegmentQuerySerializer(serializers.Serializer): + environment = serializers.IntegerField() + feature = serializers.IntegerField() -class FeatureSegmentSerializer(serializers.ModelSerializer): - segment = SegmentSerializerBasic() + +class FeatureSegmentListSerializer(serializers.ModelSerializer): value = serializers.SerializerMethodField() class Meta: model = FeatureSegment - fields = ('segment', 'priority', 'enabled', 'value') + fields = ('id', 'segment', 'priority', 'environment', 'enabled', 'value') + read_only_fields = ('id', 'segment', 'priority', 'environment', 'enabled', 'value') def get_value(self, instance): - if instance.value: - value_type = get_value_type(instance.value) - if value_type == BOOLEAN: - return get_boolean_from_string(instance.value) - elif value_type == INTEGER: - return get_integer_from_string(instance.value) + return instance.get_value() - return instance.value +class FeatureSegmentChangePrioritiesSerializer(serializers.Serializer): + priority = serializers.IntegerField(min_value=0, help_text="Value to change the feature segment's priority to.") + id = serializers.IntegerField() -class FeatureSerializer(serializers.ModelSerializer): - feature_segments = FeatureSegmentSerializer(many=True) + def create(self, validated_data): + try: + instance = FeatureSegment.objects.get(id=validated_data['id']) + return self.update(instance, validated_data) + except FeatureSegment.DoesNotExist: + raise ValidationError("No feature segment exists with id: %s" % validated_data['id']) + def update(self, instance, validated_data): + instance.to(validated_data['priority']) + return instance + + +class FeatureSerializer(serializers.ModelSerializer): class Meta: model = Feature - fields = "__all__" + fields = ('id', 'name', 'created_date', 'initial_value', 'description', 'default_enabled', 'type') + writeonly_fields = ('initial_value', 'default_enabled') class FeatureStateSerializerFull(serializers.ModelSerializer): @@ -146,10 +160,6 @@ def get_identity_identifier(self, instance): return instance.identity.identifier if instance.identity else None -class FeatureStateSerializerFullWithIdentityAndSegment(FeatureStateSerializerFullWithIdentity): - feature_segment = FeatureSegmentSerializer() - - class FeatureStateSerializerCreate(serializers.ModelSerializer): class Meta: model = FeatureState diff --git a/src/features/signals.py b/src/features/signals.py new file mode 100644 index 000000000000..fa515a57d09d --- /dev/null +++ b/src/features/signals.py @@ -0,0 +1,32 @@ +from django.db import transaction +from django.dispatch import receiver +from simple_history.signals import post_create_historical_record + +from audit.models import AuditLog, RelatedObjectType, FEATURE_SEGMENT_UPDATED_MESSAGE +from projects.models import Project +from util.logging import get_logger +# noinspection PyUnresolvedReferences +from .models import HistoricalFeatureSegment + +logger = get_logger(__name__) + + +@receiver(post_create_historical_record, sender=HistoricalFeatureSegment) +def create_feature_segment_audit_log(instance, history_user, history_instance, **kwargs): + deleted = history_instance.history_type == "-" + + # if the feature segment has been deleted, this could have been a cascade delete from the project being deleted + # if it is, then we can skip creating the audit log. + project = instance.feature.project + with transaction.atomic(): + if deleted and not Project.objects.filter(id=project.id).exists(): + return + + message = FEATURE_SEGMENT_UPDATED_MESSAGE % (instance.feature.name, instance.environment.name) + AuditLog.create_record( + obj=instance.feature, + obj_type=RelatedObjectType.FEATURE, + log_message=message, + author=history_user, + project=instance.feature.project + ) diff --git a/src/features/tasks.py b/src/features/tasks.py index cf2cf1300a98..58ab9d00b8c7 100644 --- a/src/features/tasks.py +++ b/src/features/tasks.py @@ -69,10 +69,16 @@ def _get_feature_state_webhook_data(feature_state, previous=False): "description": feature.description, "initial_value": feature.initial_value, "name": feature.name, - "project": feature.project_id, + "project": { + "id": feature.project_id, + "name": feature.project.name, + }, "type": feature.type, }, - "environment": feature_state.environment_id, + "environment": { + "id": feature_state.environment_id, + "name": feature_state.environment.name, + }, "identity": feature_state.identity_id, "identity_identifier": identity_identifier, "feature_segment": None, # default to none, will be updated below if it exists diff --git a/src/features/tests/test_fields.py b/src/features/tests/test_fields.py new file mode 100644 index 000000000000..698f523f5aaf --- /dev/null +++ b/src/features/tests/test_fields.py @@ -0,0 +1,25 @@ +import pytest +from rest_framework import serializers + +from features.fields import FeatureSegmentValueField +from features.utils import STRING, BOOLEAN, INTEGER + + +@pytest.mark.parametrize("value, expected_type", [ + ["string", STRING], + [True, BOOLEAN], + [False, BOOLEAN], + [123, INTEGER], +]) +def test_feature_segment_field_to_representation(value, expected_type): + # Given + class MySerializer(serializers.Serializer): + my_field = FeatureSegmentValueField() + + # When + serializer = MySerializer() + internal_value = serializer.to_internal_value({"my_field": value}) + + # Then + assert internal_value['my_field'] == str(value) + assert serializer.context['value_type'] == expected_type diff --git a/src/features/tests/test_helpers.py b/src/features/tests/test_helpers.py new file mode 100644 index 000000000000..afb6f38e1c44 --- /dev/null +++ b/src/features/tests/test_helpers.py @@ -0,0 +1,19 @@ +import pytest + +from features.helpers import get_correctly_typed_value +from features.utils import INTEGER, BOOLEAN, STRING + + +@pytest.mark.parametrize( + "value_type, string_value, expected_value", + ( + (INTEGER, "123", 123), + (BOOLEAN, "True", True), + (BOOLEAN, "False", False), + (STRING, "my_string", "my_string"), + (STRING, "True", "True"), + (STRING, "False", "False"), + ), +) +def test_get_correctly_typed_value(value_type, string_value, expected_value): + assert get_correctly_typed_value(value_type, string_value) == expected_value diff --git a/src/features/tests/test_migrations.py b/src/features/tests/test_migrations.py new file mode 100644 index 000000000000..c3f0e4238700 --- /dev/null +++ b/src/features/tests/test_migrations.py @@ -0,0 +1,94 @@ + + +def test_migrate_feature_segments_forward(migrator): + # Given - the migration state is at 0017 (before the migration we want to test) + old_state = migrator.apply_initial_migration(('features', '0017_auto_20200607_1005')) + OldFeatureSegment = old_state.apps.get_model('features', 'FeatureSegment') + OldFeatureState = old_state.apps.get_model('features', 'FeatureState') + + # use the migration state to get the classes we need for test data + Feature = old_state.apps.get_model('features', 'Feature') + Organisation = old_state.apps.get_model('organisations', 'Organisation') + Project = old_state.apps.get_model('projects', 'Project') + Segment = old_state.apps.get_model('segments', 'Segment') + Environment = old_state.apps.get_model('environments', 'Environment') + + # setup some test data + organisation = Organisation.objects.create(name='Test Organisation') + project = Project.objects.create(name='Test project', organisation=organisation) + feature = Feature.objects.create(name='Test feature', project=project) + segment_1 = Segment.objects.create(name='Test segment 1', project=project) + segment_2 = Segment.objects.create(name='Test segment 2', project=project) + environment_1 = Environment.objects.create(name='Test environment 1', project=project) + environment_2 = Environment.objects.create(name='Test environment 2', project=project) + + # create 2 feature segment without an environment and with enabled overridden to true + feature_segment_1 = OldFeatureSegment.objects.create(feature=feature, segment=segment_1, enabled=True, priority=0) + feature_segment_2 = OldFeatureSegment.objects.create(feature=feature, segment=segment_2, enabled=True, priority=1) + + # mimick the creation of the feature states that would have happened when save is called on the model (but doesn't + # happen because we're using the migrator models) + OldFeatureState.objects.create(feature=feature, environment=environment_1, feature_segment=feature_segment_1) + OldFeatureState.objects.create(feature=feature, environment=environment_2, feature_segment=feature_segment_1) + OldFeatureState.objects.create(feature=feature, environment=environment_1, feature_segment=feature_segment_2) + OldFeatureState.objects.create(feature=feature, environment=environment_2, feature_segment=feature_segment_2) + + # When + new_state = migrator.apply_tested_migration(('features', '0018_auto_20200607_1057')) + NewFeatureSegment = new_state.apps.get_model('features', 'FeatureSegment') + NewFeatureState = new_state.apps.get_model('features', 'FeatureState') + + # Then - there are 4 feature segments, for each feature segment, create 1 for each environment + assert NewFeatureSegment.objects.count() == 4 + assert NewFeatureSegment.objects.filter( + segment_id=segment_1.id, environment__pk=environment_1.pk, enabled=True + ).exists() + assert NewFeatureSegment.objects.filter( + segment_id=segment_1.id, environment__pk=environment_2.pk, enabled=True + ).exists() + assert NewFeatureSegment.objects.filter( + segment_id=segment_2.id, environment__pk=environment_1.pk, enabled=True + ).exists() + assert NewFeatureSegment.objects.filter( + segment_id=segment_2.id, environment__pk=environment_2.pk, enabled=True + ).exists() + assert not NewFeatureSegment.objects.filter(environment__isnull=True).exists() + + # verify that the feature states are created / updated with the new feature segments + assert NewFeatureState.objects.values('feature_segment').distinct().count() == 4 + + +def test_migrate_feature_segments_reverse(migrator): + # Given - migration state is at 0018, after the migration we want to test in reverse + old_state = migrator.apply_initial_migration(('features', '0018_auto_20200607_1057')) + OldFeatureSegment = old_state.apps.get_model('features', 'FeatureSegment') + + # use the migration state to get the classes we need for test data + Feature = old_state.apps.get_model('features', 'Feature') + Organisation = old_state.apps.get_model('organisations', 'Organisation') + Project = old_state.apps.get_model('projects', 'Project') + Segment = old_state.apps.get_model('segments', 'Segment') + Environment = old_state.apps.get_model('environments', 'Environment') + + # setup some test data + organisation = Organisation.objects.create(name='Test Organisation') + project = Project.objects.create(name='Test project', organisation=organisation) + feature = Feature.objects.create(name='Test feature', project=project) + segment = Segment.objects.create(name='Test segment', project=project) + environment_1 = Environment.objects.create(name='Test environment 1', project=project) + environment_2 = Environment.objects.create(name='Test environment 2', project=project) + + # create a feature segment for each environment + OldFeatureSegment.objects.create(feature=feature, segment=segment, environment=environment_1, enabled=True, priority=0) + OldFeatureSegment.objects.create(feature=feature, segment=segment, environment=environment_2, enabled=False, priority=0) + + # When + new_state = migrator.apply_tested_migration(('features', '0017_auto_20200607_1005')) + NewFeatureSegment = new_state.apps.get_model('features', 'FeatureSegment') + + # Then - there is only one feature segment left + assert NewFeatureSegment.objects.count() == 1 + # Note that it's not possible to determine which feature segment to keep so we can't test that it keeps the + # correct value. Just verify that the essential data is the same. + assert NewFeatureSegment.objects.first().feature.pk == feature.pk + assert NewFeatureSegment.objects.first().segment.pk == segment.pk diff --git a/src/features/tests/test_models.py b/src/features/tests/test_models.py index e60257aa3680..47fcfeb1292a 100644 --- a/src/features/tests/test_models.py +++ b/src/features/tests/test_models.py @@ -6,7 +6,7 @@ from django.test import TestCase from environments.models import Environment, Identity, Trait, STRING -from features.models import Feature, FeatureState, CONFIG, FeatureSegment, FeatureStateValue +from features.models import Feature, FeatureState, CONFIG, FeatureSegment, FeatureStateValue, FLAG from features.utils import INTEGER, BOOLEAN from organisations.models import Organisation from projects.models import Project @@ -121,51 +121,65 @@ def setUp(self) -> None: self.not_matching_identity = Identity.objects.create(identifier='user_2', environment=self.environment) - def test_can_create_segment_override_for_string_remote_config(self): + def test_feature_segment_save_updates_string_feature_state_value_for_environment(self): # Given overridden_value = 'overridden value' - feature_segment = FeatureSegment.objects.create(feature=self.remote_config, segment=self.segment, priority=1) - FeatureStateValue.objects.filter( - feature_state__feature_segment=feature_segment).update(type=STRING, string_value=overridden_value) + feature_segment = FeatureSegment( + feature=self.remote_config, + segment=self.segment, + environment=self.environment, + value=overridden_value, + value_type=STRING + ) # When - feature_states = self.matching_identity.get_all_feature_states() + feature_segment.save() # Then - feature_state = next(filter(lambda fs: fs.feature == self.remote_config, feature_states)) + feature_state = FeatureState.objects.get(feature_segment=feature_segment, environment=self.environment) assert feature_state.get_feature_state_value() == overridden_value - def test_can_create_segment_override_for_integer_remote_config(self): + def test_feature_segment_save_updates_integer_feature_state_value_for_environment(self): # Given overridden_value = 12 - feature_segment = FeatureSegment.objects.create(feature=self.remote_config, segment=self.segment, priority=1) - FeatureStateValue.objects.filter( - feature_state__feature_segment=feature_segment).update(type=INTEGER, integer_value=overridden_value) + feature_segment = FeatureSegment( + feature=self.remote_config, + segment=self.segment, + environment=self.environment, + value=str(overridden_value), + value_type=INTEGER + ) # When - feature_states = self.matching_identity.get_all_feature_states() + feature_segment.save() # Then - feature_state = next(filter(lambda fs: fs.feature == self.remote_config, feature_states)) + feature_state = FeatureState.objects.get(feature_segment=feature_segment, environment=self.environment) assert feature_state.get_feature_state_value() == overridden_value - def test_can_create_segment_override_for_boolean_remote_config(self): + def test_feature_segment_save_updates_boolean_feature_state_value_for_environment(self): # Given overridden_value = False - feature_segment = FeatureSegment.objects.create(feature=self.remote_config, segment=self.segment, priority=1) - FeatureStateValue.objects.filter( - feature_state__feature_segment=feature_segment).update(type=BOOLEAN, boolean_value=overridden_value) + feature_segment = FeatureSegment( + feature=self.remote_config, + segment=self.segment, + environment=self.environment, + value=str(overridden_value), + value_type=BOOLEAN + ) # When - feature_states = self.matching_identity.get_all_feature_states() + feature_segment.save() # Then - feature_state = next(filter(lambda fs: fs.feature == self.remote_config, feature_states)) + feature_state = FeatureState.objects.get(feature_segment=feature_segment, environment=self.environment) assert feature_state.get_feature_state_value() == overridden_value def test_feature_state_enabled_value_is_updated_when_feature_segment_updated(self): # Given - feature_segment = FeatureSegment.objects.create(feature=self.remote_config, segment=self.segment, priority=1) + feature_segment = FeatureSegment.objects.create( + feature=self.remote_config, segment=self.segment, environment=self.environment, priority=1 + ) feature_state = FeatureState.objects.get(feature_segment=feature_segment, enabled=False) # When @@ -178,11 +192,14 @@ def test_feature_state_enabled_value_is_updated_when_feature_segment_updated(sel def test_feature_segment_is_less_than_other_if_priority_lower(self): # Given - feature_segment_1 = FeatureSegment.objects.create(feature=self.remote_config, segment=self.segment, priority=1) + feature_segment_1 = FeatureSegment.objects.create( + feature=self.remote_config, segment=self.segment, environment=self.environment, priority=1 + ) another_segment = Segment.objects.create(name='Another segment', project=self.project) - feature_segment_2 = FeatureSegment.objects.create(feature=self.remote_config, segment=another_segment, - priority=2) + feature_segment_2 = FeatureSegment.objects.create( + feature=self.remote_config, segment=another_segment, environment=self.environment, priority=2 + ) # When result = feature_segment_2 < feature_segment_1 @@ -190,6 +207,62 @@ def test_feature_segment_is_less_than_other_if_priority_lower(self): # Then assert result + def test_feature_segments_are_created_with_correct_priority(self): + # Given - 5 feature segments + + # 2 with the same feature, environment but a different segment + another_segment = Segment.objects.create(name='Another segment', project=self.project) + feature_segment_1 = FeatureSegment.objects.create( + feature=self.remote_config, segment=self.segment, environment=self.environment + ) + + feature_segment_2 = FeatureSegment.objects.create( + feature=self.remote_config, segment=another_segment, environment=self.environment + ) + + # 1 with the same feature but a different environment + another_environment = Environment.objects.create(name='Another environment', project=self.project) + feature_segment_3 = FeatureSegment.objects.create( + feature=self.remote_config, segment=self.segment, environment=another_environment + ) + + # 1 with the same environment but a different feature + another_feature = Feature.objects.create(name='Another feature', project=self.project, type=FLAG) + feature_segment_4 = FeatureSegment.objects.create( + feature=another_feature, segment=self.segment, environment=self.environment + ) + + # 1 with a different feature and a different environment + feature_segment_5 = FeatureSegment.objects.create( + feature=another_feature, segment=self.segment, environment=another_environment + ) + + # Then + # the two with the same feature and environment are created with ascending priorities + assert feature_segment_1.priority == 0 + assert feature_segment_2.priority == 1 + + # the ones with different combinations of features and environments are all created with a priority of 0 + assert feature_segment_3.priority == 0 + assert feature_segment_4.priority == 0 + assert feature_segment_5.priority == 0 + + def test_feature_state_value_for_feature_segments(self): + # Given + segment = Segment.objects.create(name="Test Segment", project=self.project) + + # When + feature_segment = FeatureSegment.objects.create( + segment=segment, feature=self.remote_config, environment=self.environment, value="test", value_type=STRING + ) + + # Then + feature_state = FeatureState.objects.get(feature=self.remote_config, feature_segment=feature_segment) + assert not FeatureStateValue.objects.filter(feature_state=feature_state).exists() + + # and the feature_state value is correct + assert feature_state.get_feature_state_value() == feature_segment.get_value() + @pytest.mark.django_db class FeatureStateTest(TestCase): @@ -220,8 +293,12 @@ def test_feature_state_gt_operator(self): identity = Identity.objects.create(identifier='test_identity', environment=self.environment) segment_1 = Segment.objects.create(name='Test Segment 1', project=self.project) segment_2 = Segment.objects.create(name='Test Segment 2', project=self.project) - feature_segment_p1 = FeatureSegment.objects.create(segment=segment_1, feature=self.feature, priority=1) - feature_segment_p2 = FeatureSegment.objects.create(segment=segment_2, feature=self.feature, priority=2) + feature_segment_p1 = FeatureSegment.objects.create( + segment=segment_1, feature=self.feature, environment=self.environment, priority=1 + ) + feature_segment_p2 = FeatureSegment.objects.create( + segment=segment_2, feature=self.feature, environment=self.environment, priority=2 + ) # When identity_state = FeatureState.objects.create(identity=identity, feature=self.feature, @@ -296,3 +373,4 @@ def test_save_calls_trigger_webhooks(self, mock_trigger_webhooks): # Then mock_trigger_webhooks.assert_called_with(feature_state) + diff --git a/src/features/tests/test_views.py b/src/features/tests/test_views.py index 750ff553eed6..967929ffa48b 100644 --- a/src/features/tests/test_views.py +++ b/src/features/tests/test_views.py @@ -1,5 +1,5 @@ import json -from unittest import TestCase +from unittest import TestCase, mock import pytest from django.urls import reverse @@ -10,12 +10,16 @@ IDENTITY_FEATURE_STATE_DELETED_MESSAGE from environments.models import Environment, Identity from features.models import Feature, FeatureState, FeatureSegment, CONFIG, FeatureStateValue +from features.utils import INTEGER, BOOLEAN, STRING from organisations.models import Organisation, OrganisationRole from projects.models import Project from segments.models import Segment from users.models import FFAdminUser from util.tests import Helper +# patch this function as it's triggering extra threads and causing errors +mock.patch("features.models.trigger_feature_state_change_webhooks").start() + @pytest.mark.django_db class ProjectFeatureTestCase(TestCase): @@ -158,23 +162,6 @@ def test_audit_log_created_when_feature_updated(self): # Then assert AuditLog.objects.filter(related_object_type=RelatedObjectType.FEATURE.name).count() == 1 - def test_audit_log_created_when_feature_segments_updated(self): - # Given - segment = Segment.objects.create(name='Test segment', project=self.project) - feature = Feature.objects.create(name='Test feature', project=self.project) - url = reverse('api-v1:projects:project-features-segments', args=[self.project.id, feature.id]) - data = [{ - 'segment': segment.id, - 'priority': 1, - 'enabled': True - }] - - # When - self.client.post(url, data=json.dumps(data), content_type='application/json') - - # Then - assert AuditLog.objects.filter(related_object_type=RelatedObjectType.FEATURE.name).count() == 1 - def test_audit_log_created_when_feature_state_created_for_identity(self): # Given feature = Feature.objects.create(name='Test feature', project=self.project) @@ -259,79 +246,195 @@ def setUp(self) -> None: self.feature = Feature.objects.create(project=self.project, name='Test feature') self.segment = Segment.objects.create(project=self.project, name='Test segment') - def test_when_feature_segments_updated_then_feature_states_updated_for_each_environment(self): + def test_list_feature_segments(self): # Given - url = reverse('api-v1:projects:project-features-segments', args=[self.project.id, self.feature.id]) - FeatureSegment.objects.create(segment=self.segment, feature=self.feature, enabled=False) - data = [{ - 'segment': self.segment.id, - 'priority': 1, - 'enabled': True - }] + base_url = reverse('api-v1:features:feature-segment-list') + url = f"{base_url}?environment={self.environment_1.id}&feature={self.feature.id}" + segment_2 = Segment.objects.create(project=self.project, name='Segment 2') + segment_3 = Segment.objects.create(project=self.project, name='Segment 3') + + FeatureSegment.objects.create( + feature=self.feature, segment=self.segment, environment=self.environment_1, value="123", value_type=INTEGER + ) + FeatureSegment.objects.create( + feature=self.feature, segment=segment_2, environment=self.environment_1, value="True", value_type=BOOLEAN + ) + FeatureSegment.objects.create( + feature=self.feature, segment=segment_3, environment=self.environment_1, value="str", value_type=STRING + ) + FeatureSegment.objects.create(feature=self.feature, segment=self.segment, environment=self.environment_2) # When - self.client.post(url, data=json.dumps(data), content_type='application/json') + response = self.client.get(url) # Then - for env in Environment.objects.all(): - assert FeatureState.objects.get(environment=env, feature_segment__segment=self.segment).enabled + assert response.status_code == status.HTTP_200_OK + response_json = response.json() + assert response_json["count"] == 3 + for result in response_json["results"]: + assert result["environment"] == self.environment_1.id - def test_when_feature_segments_created_with_integer_value_then_feature_states_created_with_integer_value(self): + def test_create_feature_segment_with_integer_value(self): # Given - url = reverse('api-v1:projects:project-features-segments', args=[self.project.id, self.feature.id]) - value = 1 + data = { + "feature": self.feature.id, + "segment": self.segment.id, + "environment": self.environment_1.id, + "value": 123 + } + url = reverse("api-v1:features:feature-segment-list") - data = [{ - 'segment': self.segment.id, - 'priority': 1, - 'value': value - }] + # When + response = self.client.post(url, data=json.dumps(data), content_type='application/json') + + # Then + assert response.status_code == status.HTTP_201_CREATED + response_json = response.json() + assert response_json["id"] + assert response_json["value"] == 123 + + def test_create_feature_segment_with_boolean_value(self): + # Given + data = { + "feature": self.feature.id, + "segment": self.segment.id, + "environment": self.environment_1.id, + "value": True + } + url = reverse("api-v1:features:feature-segment-list") # When - self.client.post(url, data=json.dumps(data), content_type='application/json') + response = self.client.post(url, data=json.dumps(data), content_type='application/json') # Then - for env in Environment.objects.all(): - fs = FeatureState.objects.get(environment=env, feature_segment__segment=self.segment) - assert fs.get_feature_state_value() == value + assert response.status_code == status.HTTP_201_CREATED + response_json = response.json() + assert response_json["id"] + assert response_json["value"] is True - def test_when_feature_segments_created_with_boolean_value_then_feature_states_created_with_boolean_value(self): + def test_create_feature_segment_with_string_value(self): # Given - url = reverse('api-v1:projects:project-features-segments', args=[self.project.id, self.feature.id]) - value = False + data = { + "feature": self.feature.id, + "segment": self.segment.id, + "environment": self.environment_1.id, + "value": "string" + } + url = reverse("api-v1:features:feature-segment-list") - data = [{ - 'segment': self.segment.id, - 'priority': 1, - 'value': value - }] + # When + response = self.client.post(url, data=json.dumps(data), content_type='application/json') + + # Then + assert response.status_code == status.HTTP_201_CREATED + response_json = response.json() + assert response_json["id"] + assert response_json["value"] == "string" + + def test_create_feature_segment_without_value(self): + # Given + data = { + "feature": self.feature.id, + "segment": self.segment.id, + "environment": self.environment_1.id, + "enabled": True + } + url = reverse("api-v1:features:feature-segment-list") # When - self.client.post(url, data=json.dumps(data), content_type='application/json') + response = self.client.post(url, data=json.dumps(data), content_type='application/json') # Then - for env in Environment.objects.all(): - fs = FeatureState.objects.get(environment=env, feature_segment__segment=self.segment) - assert fs.get_feature_state_value() == value + assert response.status_code == status.HTTP_201_CREATED + response_json = response.json() + assert response_json["id"] + assert response_json["enabled"] is True + + def test_update_feature_segment(self): + # Given + feature_segment = FeatureSegment.objects.create( + feature=self.feature, + environment=self.environment_1, + segment=self.segment, + value="123", + value_type=INTEGER + ) + url = reverse("api-v1:features:feature-segment-detail", args=[feature_segment.id]) + data = { + "value": 456 + } + + # When + response = self.client.patch(url, data=json.dumps(data), content_type='application/json') + + # Then + assert response.status_code == status.HTTP_200_OK + response_json = response.json() + assert response_json["value"] == 456 - def test_when_feature_segments_created_with_string_value_then_feature_states_created_with_string_value(self): + def test_delete_feature_segment(self): # Given - url = reverse('api-v1:projects:project-features-segments', args=[self.project.id, self.feature.id]) - value = 'my_string' + feature_segment = FeatureSegment.objects.create( + feature=self.feature, environment=self.environment_1, segment=self.segment + ) + url = reverse("api-v1:features:feature-segment-detail", args=[feature_segment.id]) - data = [{ + # When + response = self.client.delete(url) + + # Then + assert response.status_code == status.HTTP_204_NO_CONTENT + assert not FeatureSegment.objects.filter(id=feature_segment.id).exists() + + def test_audit_log_created_when_feature_segment_created(self): + # Given + url = reverse('api-v1:features:feature-segment-list') + data = { 'segment': self.segment.id, - 'priority': 1, - 'value': value - }] + 'feature': self.feature.id, + 'environment': self.environment_1.id, + 'enabled': True + } # When - self.client.post(url, data=json.dumps(data), content_type='application/json') + response = self.client.post(url, data=data) # Then - for env in Environment.objects.all(): - fs = FeatureState.objects.get(environment=env, feature_segment__segment=self.segment) - assert fs.get_feature_state_value() == value + assert response.status_code == status.HTTP_201_CREATED + assert AuditLog.objects.filter(related_object_type=RelatedObjectType.FEATURE.name).count() == 1 + + def test_priority_of_multiple_feature_segments(self): + # Given + url = reverse('api-v1:features:feature-segment-update-priorities') + + # another segment and 2 feature segments for the same feature / the 2 segments + another_segment = Segment.objects.create(name='Another segment', project=self.project) + feature_segment_default_data = {"environment": self.environment_1, "feature": self.feature} + feature_segment_1 = FeatureSegment.objects.create(segment=self.segment, **feature_segment_default_data) + feature_segment_2 = FeatureSegment.objects.create(segment=another_segment, **feature_segment_default_data) + + # reorder the feature segments + assert feature_segment_1.priority == 0 + assert feature_segment_2.priority == 1 + data = [ + { + 'id': feature_segment_1.id, + 'priority': 1, + }, + { + 'id': feature_segment_2.id, + 'priority': 0, + }, + ] + + # When + response = self.client.post(url, data=json.dumps(data), content_type='application/json') + + # Then the segments are reordered + assert response.status_code == status.HTTP_200_OK + json_response = response.json() + assert json_response[0]['id'] == feature_segment_1.id + assert json_response[1]['id'] == feature_segment_2.id @pytest.mark.django_db() @@ -407,7 +510,7 @@ def setUp(self) -> None: self.environment = Environment.objects.create(name='Test environment', project=self.project) self.feature = Feature.objects.create(name='Test feature', project=self.project, type=CONFIG, initial_value=self.environment_fs_value) segment = Segment.objects.create(name='Test segment', project=self.project) - FeatureSegment.objects.create(segment=segment, feature=self.feature, value=self.segment_fs_value) + FeatureSegment.objects.create(segment=segment, feature=self.feature, value=self.segment_fs_value, environment=self.environment) identity = Identity.objects.create(identifier='test', environment=self.environment) identity_feature_state = FeatureState.objects.create(identity=identity, environment=self.environment, feature=self.feature) FeatureStateValue.objects.filter(feature_state=identity_feature_state).update(string_value=self.identity_fs_value) diff --git a/src/features/urls.py b/src/features/urls.py index 55c002ff3af6..9ad5b0821bba 100644 --- a/src/features/urls.py +++ b/src/features/urls.py @@ -1,14 +1,16 @@ from django.conf.urls import url, include +from django.urls import path from rest_framework_nested import routers -from features.views import FeatureStateCreateViewSet +from features.views import FeatureStateCreateViewSet, FeatureSegmentViewSet router = routers.DefaultRouter() -router.register(r'', FeatureStateCreateViewSet, basename="featurestates") +router.register(r'featurestates', FeatureStateCreateViewSet, basename='featurestates') +router.register(r'feature-segments', FeatureSegmentViewSet, basename='feature-segment') app_name = "features" urlpatterns = [ - url(r'^featurestates', include(router.urls)) + path('', include(router.urls)) ] diff --git a/src/features/views.py b/src/features/views.py index 014aef47d0c7..a69187aa2bba 100644 --- a/src/features/views.py +++ b/src/features/views.py @@ -3,7 +3,6 @@ import coreapi from django.conf import settings from django.core.cache import caches -from django.db import transaction from django.utils.decorators import method_decorator from drf_yasg import openapi from drf_yasg.utils import swagger_auto_schema @@ -14,9 +13,7 @@ from rest_framework.response import Response from rest_framework.schemas import AutoSchema -from analytics.track import track_event -from audit.models import AuditLog, RelatedObjectType, FEATURE_SEGMENT_UPDATED_MESSAGE, \ - IDENTITY_FEATURE_STATE_DELETED_MESSAGE +from audit.models import AuditLog, RelatedObjectType, IDENTITY_FEATURE_STATE_DELETED_MESSAGE from environments.authentication import EnvironmentKeyAuthentication from environments.models import Environment, Identity from environments.permissions import EnvironmentKeyPermissions, NestedEnvironmentPermissions @@ -25,7 +22,8 @@ from .permissions import FeaturePermissions, FeatureStatePermissions from .serializers import FeatureStateSerializerBasic, FeatureStateSerializerFull, \ FeatureStateSerializerCreate, CreateFeatureSerializer, FeatureSerializer, \ - FeatureStateValueSerializer, FeatureSegmentCreateSerializer, FeatureStateSerializerWithIdentity + FeatureStateValueSerializer, FeatureSegmentCreateSerializer, FeatureStateSerializerWithIdentity, \ + FeatureSegmentListSerializer, FeatureSegmentQuerySerializer, FeatureSegmentChangePrioritiesSerializer logger = logging.getLogger() logger.setLevel(logging.INFO) @@ -57,34 +55,6 @@ def create(self, request, *args, **kwargs): return super().create(request, *args, **kwargs) - @action(detail=True, methods=["POST"]) - @transaction.atomic - def segments(self, request, *args, **kwargs): - feature = self.get_object() - # delete existing segments to avoid priority clashes, note method is transactional so will roll back on error - FeatureSegment.objects.filter(feature=feature).delete() - - self._create_feature_segments(feature, request.data) - self._create_feature_segments_audit_log() - - return Response(data=FeatureSerializer(instance=feature).data, status=status.HTTP_200_OK) - - @staticmethod - def _create_feature_segments(feature, feature_segment_data): - for feature_segment in feature_segment_data: - feature_segment["feature"] = feature.id - fs_serializer = FeatureSegmentCreateSerializer(data=feature_segment) - if fs_serializer.is_valid(raise_exception=True): - fs_serializer.save() - - def _create_feature_segments_audit_log(self): - feature = self.get_object() - message = FEATURE_SEGMENT_UPDATED_MESSAGE % feature.name - AuditLog.objects.create(author=self.request.user, related_object_id=feature.id, - related_object_type=RelatedObjectType.FEATURE.name, - project=feature.project, - log=message) - @method_decorator(name='list', decorator=swagger_auto_schema( manual_parameters=[ @@ -385,3 +355,48 @@ def organisation_has_got_feature(request, organisation): organisation.has_requested_features = True organisation.save() return True + + +@method_decorator(name='list', decorator=swagger_auto_schema(query_serializer=FeatureSegmentQuerySerializer())) +@method_decorator( + name='update_priorities', decorator=swagger_auto_schema(responses={200: FeatureSegmentListSerializer(many=True)}) +) +class FeatureSegmentViewSet( + mixins.ListModelMixin, + mixins.CreateModelMixin, + mixins.UpdateModelMixin, + mixins.DestroyModelMixin, + viewsets.GenericViewSet +): + def get_queryset(self): + permitted_projects = self.request.user.get_permitted_projects(['VIEW_PROJECT']) + queryset = FeatureSegment.objects.filter(feature__project__in=permitted_projects) + + if self.action == 'list': + filter_serializer = FeatureSegmentQuerySerializer(data=self.request.query_params) + filter_serializer.is_valid(raise_exception=True) + return queryset.filter(**filter_serializer.data) + + return queryset + + def get_serializer_class(self): + if self.action in ['create', 'update', 'partial_update']: + return FeatureSegmentCreateSerializer + + if self.action == 'update_priorities': + return FeatureSegmentChangePrioritiesSerializer + + return FeatureSegmentListSerializer + + def get_serializer(self, *args, **kwargs): + if self.action == 'update_priorities': + # update the serializer kwargs to ensure docs here are correct + kwargs = {**kwargs, 'many': True, 'partial': True} + return super(FeatureSegmentViewSet, self).get_serializer(*args, **kwargs) + + @action(detail=False, methods=['POST'], url_path='update-priorities') + def update_priorities(self, request, *args, **kwargs): + serializer = self.get_serializer(data=request.data) + serializer.is_valid(raise_exception=True) + updated_instances = serializer.save() + return Response(FeatureSegmentListSerializer(instance=updated_instances, many=True).data) diff --git a/src/organisations/migrations/0021_auto_20200619_1555.py b/src/organisations/migrations/0021_auto_20200619_1555.py new file mode 100644 index 000000000000..3dbf35701493 --- /dev/null +++ b/src/organisations/migrations/0021_auto_20200619_1555.py @@ -0,0 +1,19 @@ +# Generated by Django 2.2.12 on 2020-06-19 15:55 + +from django.conf import settings +from django.db import migrations + + +class Migration(migrations.Migration): + + dependencies = [ + migrations.swappable_dependency(settings.AUTH_USER_MODEL), + ('organisations', '0020_auto_20200222_1159'), + ] + + operations = [ + migrations.AlterUniqueTogether( + name='userorganisation', + unique_together={('user', 'organisation')}, + ), + ] diff --git a/src/organisations/models.py b/src/organisations/models.py index 91d51e33ea9a..739aac3d3311 100644 --- a/src/organisations/models.py +++ b/src/organisations/models.py @@ -59,6 +59,9 @@ class UserOrganisation(models.Model): date_joined = models.DateTimeField(auto_now_add=True) role = models.CharField(max_length=50, choices=organisation_roles) + class Meta: + unique_together = ('user', 'organisation',) + class Subscription(models.Model): organisation = models.OneToOneField(Organisation, on_delete=models.CASCADE, related_name='subscription') diff --git a/src/organisations/tests/test_views.py b/src/organisations/tests/test_views.py index 981881a7cfef..186a1e90c508 100644 --- a/src/organisations/tests/test_views.py +++ b/src/organisations/tests/test_views.py @@ -10,7 +10,11 @@ from rest_framework import status from rest_framework.test import APIClient +from environments.models import Environment +from features.models import Feature, FeatureSegment from organisations.models import Organisation, OrganisationRole, Subscription +from projects.models import Project +from segments.models import Segment from users.models import Invite, FFAdminUser from util.tests import Helper @@ -254,6 +258,26 @@ def test_update_subscription_gets_subscription_data_from_chargebee(self, mock_ge assert organisation.has_subscription() and organisation.subscription.subscription_id == subscription_id and \ organisation.subscription.customer_id == customer_id + @pytest.mark.skip("Skip for now so we can release per env segment configuration.") + def test_delete_organisation(self): + # GIVEN an organisation with a project, environment, feature, segment and feature segment + organisation = Organisation.objects.create(name="Test organisation") + self.user.add_organisation(organisation, OrganisationRole.ADMIN) + project = Project.objects.create(name="Test project", organisation=organisation) + environment = Environment.objects.create(name="Test environment", project=project) + feature = Feature.objects.create(name="Test feature", project=project) + segment = Segment.objects.create(name="Test segment", project=project) + FeatureSegment.objects.create(feature=feature, segment=segment, environment=environment) + + from audit.models import AuditLog + + # WHEN + delete_organisation_url = reverse("api-v1:organisations:organisation-detail", args=[organisation.id]) + response = self.client.delete(delete_organisation_url) + + # THEN + assert response.status_code == status.HTTP_204_NO_CONTENT + @pytest.mark.django_db class ChargeBeeWebhookTestCase(TestCase): diff --git a/src/segments/admin.py b/src/segments/admin.py index e8ade74bbcb7..2e25b2119e10 100644 --- a/src/segments/admin.py +++ b/src/segments/admin.py @@ -1,3 +1,4 @@ +from django.conf import settings from django.contrib import admin from segments.models import SegmentRule, Condition, Segment @@ -15,15 +16,18 @@ class ConditionsInline(admin.StackedInline): show_change_link = True -@admin.register(Segment) class SegmentAdmin(admin.ModelAdmin): inlines = [ RulesInline ] -@admin.register(SegmentRule) -class SegmentRule(admin.ModelAdmin): +class SegmentRuleAdmin(admin.ModelAdmin): inlines = [ ConditionsInline ] + + +if settings.ENV == ('local', 'dev'): + admin.site.register(Segment, SegmentAdmin) + admin.site.register(SegmentRule, SegmentRuleAdmin) diff --git a/src/users/auth_type.py b/src/users/auth_type.py index ec5a20f07deb..7824932665ae 100644 --- a/src/users/auth_type.py +++ b/src/users/auth_type.py @@ -3,4 +3,5 @@ class AuthType(Enum): GOOGLE = "GOOGLE" + GITHUB = "GITHUB" EMAIL = "EMAIL" diff --git a/src/users/migrations/0027_ffadminuser_github_user_id.py b/src/users/migrations/0027_ffadminuser_github_user_id.py new file mode 100644 index 000000000000..fd3a0439684a --- /dev/null +++ b/src/users/migrations/0027_ffadminuser_github_user_id.py @@ -0,0 +1,18 @@ +# Generated by Django 2.2.13 on 2020-06-20 20:53 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('users', '0026_ffadminuser_google_user_id'), + ] + + operations = [ + migrations.AddField( + model_name='ffadminuser', + name='github_user_id', + field=models.CharField(blank=True, max_length=50, null=True), + ), + ] diff --git a/src/users/models.py b/src/users/models.py index 6807c71f0f46..8f9fa8a444dc 100644 --- a/src/users/models.py +++ b/src/users/models.py @@ -69,6 +69,7 @@ class FFAdminUser(AbstractUser): first_name = models.CharField(_('first name'), max_length=30) last_name = models.CharField(_('last name'), max_length=150) google_user_id = models.CharField(max_length=50, null=True, blank=True) + github_user_id = models.CharField(max_length=50, null=True, blank=True) USERNAME_FIELD = 'email' REQUIRED_FIELDS = ['first_name', 'last_name'] @@ -82,7 +83,13 @@ def __str__(self): @property def auth_type(self): - return AuthType.GOOGLE.value if self.google_user_id else AuthType.EMAIL.value + if self.google_user_id: + return AuthType.GOOGLE.value + + if self.github_user_id: + return AuthType.GITHUB.value + + return AuthType.EMAIL.value def get_full_name(self): if not self.first_name: diff --git a/src/users/tests/test_models.py b/src/users/tests/test_models.py index f4849029d2f5..e1d94a1c1b42 100644 --- a/src/users/tests/test_models.py +++ b/src/users/tests/test_models.py @@ -2,8 +2,9 @@ import pytest +from django.db.utils import IntegrityError from environments.models import UserEnvironmentPermission, EnvironmentPermissionModel, Environment -from organisations.models import Organisation, OrganisationRole +from organisations.models import Organisation, OrganisationRole, UserOrganisation from projects.models import Project, UserProjectPermission, ProjectPermissionModel from users.models import FFAdminUser @@ -82,3 +83,13 @@ def test_get_permitted_environments_for_user_returns_only_environments_matching_ # Then assert environments.count() == 1 + + def test_unique_user_organisation(self): + # Given organisation and user + + # When + self.user.add_organisation(self.organisation, OrganisationRole.ADMIN) + + # Then + with pytest.raises(IntegrityError): + self.user.add_organisation(self.organisation, OrganisationRole.USER) diff --git a/version.txt b/version.txt new file mode 100644 index 000000000000..e3a4f193364d --- /dev/null +++ b/version.txt @@ -0,0 +1 @@ +2.2.0 \ No newline at end of file