From e3a48d171c83f5b5f16aaa09dee776591cadd6ba Mon Sep 17 00:00:00 2001 From: Alex Parsons Date: Mon, 16 Dec 2024 20:53:46 +0000 Subject: [PATCH 1/7] Add StrEnum backport --- src/twfy_tools/common/enum_backport.py | 42 ++++++++++++++++++++++++++ 1 file changed, 42 insertions(+) create mode 100644 src/twfy_tools/common/enum_backport.py diff --git a/src/twfy_tools/common/enum_backport.py b/src/twfy_tools/common/enum_backport.py new file mode 100644 index 0000000000..7814e28a82 --- /dev/null +++ b/src/twfy_tools/common/enum_backport.py @@ -0,0 +1,42 @@ +from enum import Enum +from typing import List, Type, TypeVar + +_S = TypeVar("_S", bound="StrEnum") + + +class StrEnum(str, Enum): + """ + Enum where members are also (and must be) strings + """ + + def __new__(cls: Type[_S], *values: str) -> _S: + if len(values) > 3: + raise TypeError("too many arguments for str(): %r" % (values,)) + if len(values) == 1: + # it must be a string + if not isinstance(values[0], str): + raise TypeError("%r is not a string" % (values[0],)) + if len(values) >= 2: + # check that encoding argument is a string + if not isinstance(values[1], str): + raise TypeError("encoding must be a string, not %r" % (values[1],)) + if len(values) == 3: + # check that errors argument is a string + if not isinstance(values[2], str): + raise TypeError("errors must be a string, not %r" % (values[2])) + value = str(*values) + member = str.__new__(cls, value) + member._value_ = value + return member + + def __str__(self): + return self.value + + @staticmethod + def _generate_next_value_( + name: str, start: int, count: int, last_values: List[str] + ) -> str: + """ + Return the lower-cased version of the member name. + """ + return name.lower() From 990464d91b36583b48cd84ea0e5fd84b3ca575b7 Mon Sep 17 00:00:00 2001 From: Alex Parsons Date: Mon, 16 Dec 2024 20:55:42 +0000 Subject: [PATCH 2/7] Add MAILCHIMP_API_KEY variable --- .devcontainer/devcontainer.json | 3 +++ bin/docker-entrypoint.sh | 1 + conf/general-example | 2 ++ docker-compose.yml | 1 + src/twfy_tools/common/config.py | 1 + 5 files changed, 8 insertions(+) diff --git a/.devcontainer/devcontainer.json b/.devcontainer/devcontainer.json index ad88b23aad..0ec69841e8 100644 --- a/.devcontainer/devcontainer.json +++ b/.devcontainer/devcontainer.json @@ -4,6 +4,9 @@ "TWFY_VOTES_URL": { "description": "URL to the TWFY Votes page" }, + "MAILCHIMP_API_KEY": { + "description": "Mailchimp API key" + }, "MAPIT_URL": { "description": "URL to the MapIt API - defaults to mapit.mysociety.org" }, diff --git a/bin/docker-entrypoint.sh b/bin/docker-entrypoint.sh index 3e4faee79b..22228e2826 100755 --- a/bin/docker-entrypoint.sh +++ b/bin/docker-entrypoint.sh @@ -15,6 +15,7 @@ sed -r \ -e 's!^(.*"OPTION_TWFY_DB_NAME", *)"[^"]*"!'"\\1'twfy'!" \ -e 's!^(.*"OPTION_TWFY_MEMCACHED_HOST", *)"[^"]*"!'"\\1'memcache'!" \ -e 's!^(.*"TWFY_VOTES_URL", *)"[^"]*"!'"\\1'$TWFY_VOTES_URL'!" \ + -e 's!^(.*"MAILCHIMP_API_KEY", *)"[^"]*"!'"\\1'$MAILCHIMP_API_KEY'!" \ -e 's!^(.*"OPTION_MAPIT_URL", *)"[^"]*"!'"\\1'$MAPIT_URL'!" \ -e 's!^(.*"OPTION_MAPIT_API_KEY", *)"[^"]*"!'"\\1'$MAPIT_API_KEY'!" \ -e 's!^(.*"OPTION_DEMOCRACYCLUB_TOKEN", *)"[^"]*"!'"\\1'$DEMOCRACYCLUB_TOKEN'!" \ diff --git a/conf/general-example b/conf/general-example index 8baa3dbd94..ab7e478de9 100644 --- a/conf/general-example +++ b/conf/general-example @@ -127,6 +127,8 @@ define ("RECESSFILE","https://www.theyworkforyou.com/pwdata/parl-recesses.txt"); define("TWFY_VOTES_URL", ""); +define("MAILCHIMP_API_KEY", ""); + define('ENTRIES_AFTER_LEFT', '{ "10170,2014-09-08": "2014-09-07", "11068,2008-09": "2008-08-13", diff --git a/docker-compose.yml b/docker-compose.yml index 02d3e418ae..dde50817d1 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -24,6 +24,7 @@ services: TWFY_TEST_DB_USER: twfy TWFY_TEST_DB_PASS: password TWFY_VOTES_URL: ${TWFY_VOTES_URL} + MAILCHIMP_API_KEY: ${MAILCHIMP_API_KEY:-} MAPIT_URL: ${MAPIT_URL:-https://mapit.mysociety.org/} MAPIT_API_KEY: ${MAPIT_API_KEY:-} DEMOCRACYCLUB_TOKEN: ${DEMOCRACYCLUB_TOKEN:-} diff --git a/src/twfy_tools/common/config.py b/src/twfy_tools/common/config.py index 576675f6f2..d3e75003bc 100644 --- a/src/twfy_tools/common/config.py +++ b/src/twfy_tools/common/config.py @@ -22,6 +22,7 @@ class ConfigModel(BaseModel): TWFY_DB_PASS: str RAWDATA: Path PWMEMBERS: Path + MAILCHIMP_API_KEY: str @classmethod def from_php_config(cls, php_config_get: BaseConfigGet): From 3090ab3f658a2edda392de69c13eba38feb900ed Mon Sep 17 00:00:00 2001 From: Alex Parsons Date: Mon, 16 Dec 2024 20:55:56 +0000 Subject: [PATCH 3/7] Add TWFY_DB_PORT to config --- src/twfy_tools/common/config.py | 1 + 1 file changed, 1 insertion(+) diff --git a/src/twfy_tools/common/config.py b/src/twfy_tools/common/config.py index d3e75003bc..3256eb3167 100644 --- a/src/twfy_tools/common/config.py +++ b/src/twfy_tools/common/config.py @@ -20,6 +20,7 @@ class ConfigModel(BaseModel): TWFY_DB_NAME: str TWFY_DB_USER: str TWFY_DB_PASS: str + TWFY_DB_PORT: int RAWDATA: Path PWMEMBERS: Path MAILCHIMP_API_KEY: str From dc7813503a4a1d6fb0572fd615765d3aba3aa87c Mon Sep 17 00:00:00 2001 From: Alex Parsons Date: Mon, 16 Dec 2024 20:57:30 +0000 Subject: [PATCH 4/7] Add django, mailchimp, typer to requirements --- poetry.lock | 283 ++++++++++++++++++++++++++++++++++++++++++++++++- pyproject.toml | 3 + 2 files changed, 284 insertions(+), 2 deletions(-) diff --git a/poetry.lock b/poetry.lock index 8d5feb1c1c..39ab08d022 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.8.5 and should not be changed by hand. [[package]] name = "annotated-types" @@ -11,6 +11,148 @@ files = [ {file = "annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89"}, ] +[[package]] +name = "asgiref" +version = "3.8.1" +description = "ASGI specs, helper code, and adapters" +optional = false +python-versions = ">=3.8" +files = [ + {file = "asgiref-3.8.1-py3-none-any.whl", hash = "sha256:3e1e3ecc849832fe52ccf2cb6686b7a55f82bb1d6aee72a58826471390335e47"}, + {file = "asgiref-3.8.1.tar.gz", hash = "sha256:c343bd80a0bec947a9860adb4c432ffa7db769836c64238fc34bdc3fec84d590"}, +] + +[package.dependencies] +typing-extensions = {version = ">=4", markers = "python_version < \"3.11\""} + +[package.extras] +tests = ["mypy (>=0.800)", "pytest", "pytest-asyncio"] + +[[package]] +name = "certifi" +version = "2024.12.14" +description = "Python package for providing Mozilla's CA Bundle." +optional = false +python-versions = ">=3.6" +files = [ + {file = "certifi-2024.12.14-py3-none-any.whl", hash = "sha256:1275f7a45be9464efc1173084eaa30f866fe2e47d389406136d332ed4967ec56"}, + {file = "certifi-2024.12.14.tar.gz", hash = "sha256:b650d30f370c2b724812bee08008be0c4163b163ddaec3f2546c1caf65f191db"}, +] + +[[package]] +name = "charset-normalizer" +version = "3.4.0" +description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." +optional = false +python-versions = ">=3.7.0" +files = [ + {file = "charset_normalizer-3.4.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:4f9fc98dad6c2eaa32fc3af1417d95b5e3d08aff968df0cd320066def971f9a6"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0de7b687289d3c1b3e8660d0741874abe7888100efe14bd0f9fd7141bcbda92b"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5ed2e36c3e9b4f21dd9422f6893dec0abf2cca553af509b10cd630f878d3eb99"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:40d3ff7fc90b98c637bda91c89d51264a3dcf210cade3a2c6f838c7268d7a4ca"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1110e22af8ca26b90bd6364fe4c763329b0ebf1ee213ba32b68c73de5752323d"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:86f4e8cca779080f66ff4f191a685ced73d2f72d50216f7112185dc02b90b9b7"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f683ddc7eedd742e2889d2bfb96d69573fde1d92fcb811979cdb7165bb9c7d3"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:27623ba66c183eca01bf9ff833875b459cad267aeeb044477fedac35e19ba907"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:f606a1881d2663630ea5b8ce2efe2111740df4b687bd78b34a8131baa007f79b"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:0b309d1747110feb25d7ed6b01afdec269c647d382c857ef4663bbe6ad95a912"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:136815f06a3ae311fae551c3df1f998a1ebd01ddd424aa5603a4336997629e95"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:14215b71a762336254351b00ec720a8e85cada43b987da5a042e4ce3e82bd68e"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:79983512b108e4a164b9c8d34de3992f76d48cadc9554c9e60b43f308988aabe"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-win32.whl", hash = "sha256:c94057af19bc953643a33581844649a7fdab902624d2eb739738a30e2b3e60fc"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:55f56e2ebd4e3bc50442fbc0888c9d8c94e4e06a933804e2af3e89e2f9c1c749"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:0d99dd8ff461990f12d6e42c7347fd9ab2532fb70e9621ba520f9e8637161d7c"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c57516e58fd17d03ebe67e181a4e4e2ccab1168f8c2976c6a334d4f819fe5944"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6dba5d19c4dfab08e58d5b36304b3f92f3bd5d42c1a3fa37b5ba5cdf6dfcbcee"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bf4475b82be41b07cc5e5ff94810e6a01f276e37c2d55571e3fe175e467a1a1c"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ce031db0408e487fd2775d745ce30a7cd2923667cf3b69d48d219f1d8f5ddeb6"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8ff4e7cdfdb1ab5698e675ca622e72d58a6fa2a8aa58195de0c0061288e6e3ea"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3710a9751938947e6327ea9f3ea6332a09bf0ba0c09cae9cb1f250bd1f1549bc"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:82357d85de703176b5587dbe6ade8ff67f9f69a41c0733cf2425378b49954de5"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:47334db71978b23ebcf3c0f9f5ee98b8d65992b65c9c4f2d34c2eaf5bcaf0594"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:8ce7fd6767a1cc5a92a639b391891bf1c268b03ec7e021c7d6d902285259685c"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:f1a2f519ae173b5b6a2c9d5fa3116ce16e48b3462c8b96dfdded11055e3d6365"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:63bc5c4ae26e4bc6be6469943b8253c0fd4e4186c43ad46e713ea61a0ba49129"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:bcb4f8ea87d03bc51ad04add8ceaf9b0f085ac045ab4d74e73bbc2dc033f0236"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-win32.whl", hash = "sha256:9ae4ef0b3f6b41bad6366fb0ea4fc1d7ed051528e113a60fa2a65a9abb5b1d99"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:cee4373f4d3ad28f1ab6290684d8e2ebdb9e7a1b74fdc39e4c211995f77bec27"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:0713f3adb9d03d49d365b70b84775d0a0d18e4ab08d12bc46baa6132ba78aaf6"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:de7376c29d95d6719048c194a9cf1a1b0393fbe8488a22008610b0361d834ecf"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:4a51b48f42d9358460b78725283f04bddaf44a9358197b889657deba38f329db"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b295729485b06c1a0683af02a9e42d2caa9db04a373dc38a6a58cdd1e8abddf1"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ee803480535c44e7f5ad00788526da7d85525cfefaf8acf8ab9a310000be4b03"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3d59d125ffbd6d552765510e3f31ed75ebac2c7470c7274195b9161a32350284"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8cda06946eac330cbe6598f77bb54e690b4ca93f593dee1568ad22b04f347c15"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07afec21bbbbf8a5cc3651aa96b980afe2526e7f048fdfb7f1014d84acc8b6d8"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6b40e8d38afe634559e398cc32b1472f376a4099c75fe6299ae607e404c033b2"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:b8dcd239c743aa2f9c22ce674a145e0a25cb1566c495928440a181ca1ccf6719"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:84450ba661fb96e9fd67629b93d2941c871ca86fc38d835d19d4225ff946a631"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:44aeb140295a2f0659e113b31cfe92c9061622cadbc9e2a2f7b8ef6b1e29ef4b"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:1db4e7fefefd0f548d73e2e2e041f9df5c59e178b4c72fbac4cc6f535cfb1565"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-win32.whl", hash = "sha256:5726cf76c982532c1863fb64d8c6dd0e4c90b6ece9feb06c9f202417a31f7dd7"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-win_amd64.whl", hash = "sha256:b197e7094f232959f8f20541ead1d9862ac5ebea1d58e9849c1bf979255dfac9"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:dd4eda173a9fcccb5f2e2bd2a9f423d180194b1bf17cf59e3269899235b2a114"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:e9e3c4c9e1ed40ea53acf11e2a386383c3304212c965773704e4603d589343ed"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:92a7e36b000bf022ef3dbb9c46bfe2d52c047d5e3f3343f43204263c5addc250"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:54b6a92d009cbe2fb11054ba694bc9e284dad30a26757b1e372a1fdddaf21920"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ffd9493de4c922f2a38c2bf62b831dcec90ac673ed1ca182fe11b4d8e9f2a64"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:35c404d74c2926d0287fbd63ed5d27eb911eb9e4a3bb2c6d294f3cfd4a9e0c23"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4796efc4faf6b53a18e3d46343535caed491776a22af773f366534056c4e1fbc"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e7fdd52961feb4c96507aa649550ec2a0d527c086d284749b2f582f2d40a2e0d"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:92db3c28b5b2a273346bebb24857fda45601aef6ae1c011c0a997106581e8a88"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:ab973df98fc99ab39080bfb0eb3a925181454d7c3ac8a1e695fddfae696d9e90"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:4b67fdab07fdd3c10bb21edab3cbfe8cf5696f453afce75d815d9d7223fbe88b"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:aa41e526a5d4a9dfcfbab0716c7e8a1b215abd3f3df5a45cf18a12721d31cb5d"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:ffc519621dce0c767e96b9c53f09c5d215578e10b02c285809f76509a3931482"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-win32.whl", hash = "sha256:f19c1585933c82098c2a520f8ec1227f20e339e33aca8fa6f956f6691b784e67"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-win_amd64.whl", hash = "sha256:707b82d19e65c9bd28b81dde95249b07bf9f5b90ebe1ef17d9b57473f8a64b7b"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:dbe03226baf438ac4fda9e2d0715022fd579cb641c4cf639fa40d53b2fe6f3e2"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dd9a8bd8900e65504a305bf8ae6fa9fbc66de94178c420791d0293702fce2df7"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b8831399554b92b72af5932cdbbd4ddc55c55f631bb13ff8fe4e6536a06c5c51"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a14969b8691f7998e74663b77b4c36c0337cb1df552da83d5c9004a93afdb574"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dcaf7c1524c0542ee2fc82cc8ec337f7a9f7edee2532421ab200d2b920fc97cf"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:425c5f215d0eecee9a56cdb703203dda90423247421bf0d67125add85d0c4455"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:d5b054862739d276e09928de37c79ddeec42a6e1bfc55863be96a36ba22926f6"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_i686.whl", hash = "sha256:f3e73a4255342d4eb26ef6df01e3962e73aa29baa3124a8e824c5d3364a65748"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_ppc64le.whl", hash = "sha256:2f6c34da58ea9c1a9515621f4d9ac379871a8f21168ba1b5e09d74250de5ad62"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_s390x.whl", hash = "sha256:f09cb5a7bbe1ecae6e87901a2eb23e0256bb524a79ccc53eb0b7629fbe7677c4"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:0099d79bdfcf5c1f0c2c72f91516702ebf8b0b8ddd8905f97a8aecf49712c621"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-win32.whl", hash = "sha256:9c98230f5042f4945f957d006edccc2af1e03ed5e37ce7c373f00a5a4daa6149"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-win_amd64.whl", hash = "sha256:62f60aebecfc7f4b82e3f639a7d1433a20ec32824db2199a11ad4f5e146ef5ee"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:af73657b7a68211996527dbfeffbb0864e043d270580c5aef06dc4b659a4b578"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:cab5d0b79d987c67f3b9e9c53f54a61360422a5a0bc075f43cab5621d530c3b6"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:9289fd5dddcf57bab41d044f1756550f9e7cf0c8e373b8cdf0ce8773dc4bd417"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6b493a043635eb376e50eedf7818f2f322eabbaa974e948bd8bdd29eb7ef2a51"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9fa2566ca27d67c86569e8c85297aaf413ffab85a8960500f12ea34ff98e4c41"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a8e538f46104c815be19c975572d74afb53f29650ea2025bbfaef359d2de2f7f"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6fd30dc99682dc2c603c2b315bded2799019cea829f8bf57dc6b61efde6611c8"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2006769bd1640bdf4d5641c69a3d63b71b81445473cac5ded39740a226fa88ab"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:dc15e99b2d8a656f8e666854404f1ba54765871104e50c8e9813af8a7db07f12"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:ab2e5bef076f5a235c3774b4f4028a680432cded7cad37bba0fd90d64b187d19"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:4ec9dd88a5b71abfc74e9df5ebe7921c35cbb3b641181a531ca65cdb5e8e4dea"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:43193c5cda5d612f247172016c4bb71251c784d7a4d9314677186a838ad34858"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:aa693779a8b50cd97570e5a0f343538a8dbd3e496fa5dcb87e29406ad0299654"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-win32.whl", hash = "sha256:7706f5850360ac01d80c89bcef1640683cc12ed87f42579dab6c5d3ed6888613"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-win_amd64.whl", hash = "sha256:c3e446d253bd88f6377260d07c895816ebf33ffffd56c1c792b13bff9c3e1ade"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:980b4f289d1d90ca5efcf07958d3eb38ed9c0b7676bf2831a54d4f66f9c27dfa"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f28f891ccd15c514a0981f3b9db9aa23d62fe1a99997512b0491d2ed323d229a"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a8aacce6e2e1edcb6ac625fb0f8c3a9570ccc7bfba1f63419b3769ccf6a00ed0"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bd7af3717683bea4c87acd8c0d3d5b44d56120b26fd3f8a692bdd2d5260c620a"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5ff2ed8194587faf56555927b3aa10e6fb69d931e33953943bc4f837dfee2242"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e91f541a85298cf35433bf66f3fab2a4a2cff05c127eeca4af174f6d497f0d4b"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:309a7de0a0ff3040acaebb35ec45d18db4b28232f21998851cfa709eeff49d62"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:285e96d9d53422efc0d7a17c60e59f37fbf3dfa942073f666db4ac71e8d726d0"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:5d447056e2ca60382d460a604b6302d8db69476fd2015c81e7c35417cfabe4cd"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:20587d20f557fe189b7947d8e7ec5afa110ccf72a3128d61a2a387c3313f46be"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:130272c698667a982a5d0e626851ceff662565379baf0ff2cc58067b81d4f11d"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:ab22fbd9765e6954bc0bcff24c25ff71dcbfdb185fcdaca49e81bac68fe724d3"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:7782afc9b6b42200f7362858f9e73b1f8316afb276d316336c0ec3bd73312742"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-win32.whl", hash = "sha256:2de62e8801ddfff069cd5c504ce3bc9672b23266597d4e4f50eda28846c322f2"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-win_amd64.whl", hash = "sha256:95c3c157765b031331dd4db3c775e58deaee050a3042fcad72cbc4189d7c8dca"}, + {file = "charset_normalizer-3.4.0-py3-none-any.whl", hash = "sha256:fe9f97feb71aa9896b81973a7bbada8c49501dc73e58a10fcef6663af95e5079"}, + {file = "charset_normalizer-3.4.0.tar.gz", hash = "sha256:223217c3d4f82c3ac5e29032b3f1c2eb0fb591b72161f86d93f5719079dae93e"}, +] + [[package]] name = "click" version = "8.1.7" @@ -52,6 +194,26 @@ pyyaml = "^6.0.1" type = "directory" url = "commonlib" +[[package]] +name = "django" +version = "4.2.17" +description = "A high-level Python web framework that encourages rapid development and clean, pragmatic design." +optional = false +python-versions = ">=3.8" +files = [ + {file = "Django-4.2.17-py3-none-any.whl", hash = "sha256:3a93350214ba25f178d4045c0786c61573e7dbfa3c509b3551374f1e11ba8de0"}, + {file = "Django-4.2.17.tar.gz", hash = "sha256:6b56d834cc94c8b21a8f4e775064896be3b4a4ca387f2612d4406a5927cd2fdc"}, +] + +[package.dependencies] +asgiref = ">=3.6.0,<4" +sqlparse = ">=0.3.1" +tzdata = {version = "*", markers = "sys_platform == \"win32\""} + +[package.extras] +argon2 = ["argon2-cffi (>=19.1.0)"] +bcrypt = ["bcrypt"] + [[package]] name = "greenlet" version = "3.0.3" @@ -123,6 +285,42 @@ files = [ docs = ["Sphinx", "furo"] test = ["objgraph", "psutil"] +[[package]] +name = "idna" +version = "3.10" +description = "Internationalized Domain Names in Applications (IDNA)" +optional = false +python-versions = ">=3.6" +files = [ + {file = "idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3"}, + {file = "idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9"}, +] + +[package.extras] +all = ["flake8 (>=7.1.1)", "mypy (>=1.11.2)", "pytest (>=8.3.2)", "ruff (>=0.6.2)"] + +[[package]] +name = "mailchimp-marketing" +version = "3.0.80" +description = "Mailchimp Marketing API" +optional = false +python-versions = "*" +files = [] +develop = false + +[package.dependencies] +certifi = ">=2017.4.17" +python-dateutil = ">=2.1" +requests = ">=2.23" +six = ">=1.10" +urllib3 = ">=1.23" + +[package.source] +type = "git" +url = "https://github.com/mailchimp/mailchimp-marketing-python.git" +reference = "HEAD" +resolved_reference = "3305fa45b3f436767a539c5fba9cb2b0a083d761" + [[package]] name = "markdown-it-py" version = "3.0.0" @@ -564,6 +762,27 @@ files = [ {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, ] +[[package]] +name = "requests" +version = "2.32.3" +description = "Python HTTP for Humans." +optional = false +python-versions = ">=3.8" +files = [ + {file = "requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6"}, + {file = "requests-2.32.3.tar.gz", hash = "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760"}, +] + +[package.dependencies] +certifi = ">=2017.4.17" +charset-normalizer = ">=2,<4" +idna = ">=2.5,<4" +urllib3 = ">=1.21.1,<3" + +[package.extras] +socks = ["PySocks (>=1.5.6,!=1.5.7)"] +use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] + [[package]] name = "rich" version = "13.7.1" @@ -628,6 +847,17 @@ files = [ {file = "ruff-0.6.2.tar.gz", hash = "sha256:239ee6beb9e91feb8e0ec384204a763f36cb53fb895a1a364618c6abb076b3be"}, ] +[[package]] +name = "shellingham" +version = "1.5.4" +description = "Tool to Detect Surrounding Shell" +optional = false +python-versions = ">=3.7" +files = [ + {file = "shellingham-1.5.4-py2.py3-none-any.whl", hash = "sha256:7ecfff8f2fd72616f7481040475a65b2bf8af90a56c89140852d1120324e8686"}, + {file = "shellingham-1.5.4.tar.gz", hash = "sha256:8dbca0739d487e5bd35ab3ca4b36e11c4078f3a234bfce294b0a0291363404de"}, +] + [[package]] name = "six" version = "1.16.0" @@ -726,6 +956,38 @@ postgresql-psycopgbinary = ["psycopg[binary] (>=3.0.7)"] pymysql = ["pymysql"] sqlcipher = ["sqlcipher3_binary"] +[[package]] +name = "sqlparse" +version = "0.5.3" +description = "A non-validating SQL parser." +optional = false +python-versions = ">=3.8" +files = [ + {file = "sqlparse-0.5.3-py3-none-any.whl", hash = "sha256:cf2196ed3418f3ba5de6af7e82c694a9fbdbfecccdfc72e281548517081f16ca"}, + {file = "sqlparse-0.5.3.tar.gz", hash = "sha256:09f67787f56a0b16ecdbde1bfc7f5d9c3371ca683cfeaa8e6ff60b4807ec9272"}, +] + +[package.extras] +dev = ["build", "hatch"] +doc = ["sphinx"] + +[[package]] +name = "typer" +version = "0.15.1" +description = "Typer, build great CLIs. Easy to code. Based on Python type hints." +optional = false +python-versions = ">=3.7" +files = [ + {file = "typer-0.15.1-py3-none-any.whl", hash = "sha256:7994fb7b8155b64d3402518560648446072864beefd44aa2dc36972a5972e847"}, + {file = "typer-0.15.1.tar.gz", hash = "sha256:a0588c0a7fa68a1978a069818657778f86abe6ff5ea6abf472f940a08bfe4f0a"}, +] + +[package.dependencies] +click = ">=8.0.0" +rich = ">=10.11.0" +shellingham = ">=1.3.0" +typing-extensions = ">=3.7.4.3" + [[package]] name = "typing-extensions" version = "4.12.2" @@ -748,7 +1010,24 @@ files = [ {file = "tzdata-2024.1.tar.gz", hash = "sha256:2674120f8d891909751c38abcdfd386ac0a5a1127954fbc332af6b5ceae07efd"}, ] +[[package]] +name = "urllib3" +version = "2.2.3" +description = "HTTP library with thread-safe connection pooling, file post, and more." +optional = false +python-versions = ">=3.8" +files = [ + {file = "urllib3-2.2.3-py3-none-any.whl", hash = "sha256:ca899ca043dcb1bafa3e262d73aa25c465bfb49e0bd9dd5d59f1d0acba2f8fac"}, + {file = "urllib3-2.2.3.tar.gz", hash = "sha256:e7d814a81dad81e6caf2ec9fdedb284ecc9c73076b62654547cc64ccdcae26e9"}, +] + +[package.extras] +brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] +h2 = ["h2 (>=4,<5)"] +socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] +zstd = ["zstandard (>=0.18.0)"] + [metadata] lock-version = "2.0" python-versions = "^3.9" -content-hash = "1dafed89107d96e38dfb14ba659e5ed4666a4de947774cac8240e8f04973318d" +content-hash = "e60700446c474d5e984f160564b1e43f41d6056c6eb269fbf6cb166813c0fa73" diff --git a/pyproject.toml b/pyproject.toml index 281e4fc3ff..fe19e4f3bd 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -16,6 +16,9 @@ pyyaml = "6.0.1" commonlib = {path = "commonlib"} pydantic = "^2.8.2" sqlalchemy = "^2.0.32" +django = ">=4.2,<5.0" +mailchimp-marketing = {git = "https://github.com/mailchimp/mailchimp-marketing-python.git"} +typer = "^0.15.1" [tool.poetry.group.dev.dependencies] From f474f09a478c81035d7afea0781a36480c9d40b8 Mon Sep 17 00:00:00 2001 From: Alex Parsons Date: Mon, 16 Dec 2024 20:58:52 +0000 Subject: [PATCH 5/7] Basic Django interface - Add supporting functions - Add basic User model --- src/twfy_tools/db/django_setup.py | 36 ++++++++++++ src/twfy_tools/db/model_helper.py | 59 ++++++++++++++++++++ src/twfy_tools/db/models.py | 92 +++++++++++++++++++++++++++++++ 3 files changed, 187 insertions(+) create mode 100644 src/twfy_tools/db/django_setup.py create mode 100644 src/twfy_tools/db/model_helper.py create mode 100644 src/twfy_tools/db/models.py diff --git a/src/twfy_tools/db/django_setup.py b/src/twfy_tools/db/django_setup.py new file mode 100644 index 0000000000..5e85882c8b --- /dev/null +++ b/src/twfy_tools/db/django_setup.py @@ -0,0 +1,36 @@ +""" +This is a simple minimal setup for using Django ORMs. +Import this when creating models and then the models can be used as normal. +""" + +import os + +import django +from django.conf import settings + +from twfy_tools.common.config import config + +# Allow use in notebooks +os.environ["DJANGO_ALLOW_ASYNC_UNSAFE"] = "true" + +if not settings.configured: + settings.configure( + DEBUG=True, + SECRET_KEY="your-secret-key", + ALLOWED_HOSTS=["*"], + INSTALLED_APPS=[ + "twfy_tools", + ], + DATABASES={ + "default": { + "ENGINE": "django.db.backends.mysql", + "NAME": config.TWFY_DB_NAME, + "USER": config.TWFY_DB_USER, + "PASSWORD": config.TWFY_DB_PASS, + "HOST": config.TWFY_DB_HOST, + "PORT": config.TWFY_DB_PORT, + } + }, + ) + +django.setup() diff --git a/src/twfy_tools/db/model_helper.py b/src/twfy_tools/db/model_helper.py new file mode 100644 index 0000000000..7945d2eef3 --- /dev/null +++ b/src/twfy_tools/db/model_helper.py @@ -0,0 +1,59 @@ +from typing import Any, Callable, TypeVar + +from django.db import models + +from typing_extensions import ParamSpec, dataclass_transform + +FieldType = TypeVar( + "FieldType", + bound=models.Field, +) +P = ParamSpec("P") + + +def field( + model_class: Callable[P, FieldType], + null: bool = False, + *args: P.args, + **kwargs: P.kwargs, +) -> Any: + """ + Helper function for basic field creation. + So the type checker doesn't complain about the return type + and you can specify the specify type of the item as a typehint. + """ + if args: + raise ValueError("Positional arguments are not supported") + kwargs["null"] = null + if isinstance(model_class, type) and issubclass(model_class, models.Field): + return model_class(**kwargs) + else: + raise ValueError(f"Invalid model class {model_class}") + + +@dataclass_transform(kw_only_default=True, field_specifiers=(field,)) +class DataclassModelBase(models.base.ModelBase): + def __new__(cls, name: str, bases: tuple[type], dct: dict[str, Any], **kwargs: Any): + """ + Basic metaclass to make class keyword parameters into a Meta class. + + e.g. (as below) - abstract is passed in as a class keyword parameter + rather than a `class Meta: abstract = True` block. + + """ + if kwargs: + dct["Meta"] = type("Meta", (dct.get("Meta", type),), kwargs) + return super().__new__(cls, name, bases, dct) + + +class DataclassModel(models.Model, metaclass=DataclassModelBase, abstract=True): + """ + Basic wrapper that adds tidier metaclass config, and dataclass + prompting for IDEs. + """ + + +class UnmanagedDataclassModel(DataclassModel, managed=False, abstract=True): + """ + Dataclass model that is not managed by the django schema. + """ diff --git a/src/twfy_tools/db/models.py b/src/twfy_tools/db/models.py new file mode 100644 index 0000000000..85aecb0769 --- /dev/null +++ b/src/twfy_tools/db/models.py @@ -0,0 +1,92 @@ +""" +This is a simple one file setup for using django's ORM models. +""" + +from __future__ import annotations + +import datetime +from enum import IntEnum +from typing import Optional + +from django.db import models + +from twfy_tools.db import django_setup as django_setup + +from ..common.enum_backport import StrEnum +from .model_helper import UnmanagedDataclassModel, field + +datetime_min = datetime.datetime.min + + +class UserLevels(StrEnum): + VIEWER = "Viewer" + USER = "User" + MODERATOR = "Moderator" + ADMINISTRATOR = "Administrator" + SUPERUSER = "Superuser" + + +class OptinValues(IntEnum): + OPTIN_SERVICE = 1 + OPTIN_STREAM = 2 + OPTIN_ORG = 4 + + +class User(UnmanagedDataclassModel, db_table="users"): + user_id: Optional[int] = field(models.AutoField, primary_key=True) + firstname: str = field(models.CharField, max_length=255, default="") + lastname: str = field(models.CharField, max_length=255, default="") + email: str = field(models.CharField, max_length=255) + password: str = field(models.CharField, max_length=102, default="") + lastvisit: datetime.datetime = field(models.DateTimeField, default=datetime_min) + registrationtime: datetime.datetime = field( + models.DateTimeField, default=datetime_min + ) + registrationip: str = field(models.CharField, max_length=20, blank=True, null=True) + status: UserLevels = field( + models.CharField, + max_length=13, + blank=True, + null=True, + default=UserLevels.VIEWER, + ) + emailpublic: int = field(models.IntegerField, default=0) + optin: int = field(models.IntegerField, default=0) + deleted: int = field(models.IntegerField, default=0) + postcode: str = field(models.CharField, max_length=10, blank=True, null=True) + registrationtoken: str = field(models.CharField, max_length=24, default="") + confirmed: int = field(models.IntegerField, default=0) + url: str = field(models.CharField, max_length=255, blank=True, null=True) + api_key: str = field( + models.CharField, unique=True, max_length=24, blank=True, null=True + ) + facebook_id: str = field(models.CharField, max_length=24, blank=True, null=True) + facebook_token: str = field(models.CharField, max_length=200, blank=True, null=True) + + UserLevels = UserLevels + OptinValues = OptinValues + + def __str__(self): + return f"{self.status}: {self.email}" + + def get_optin_values(self) -> list[OptinValues]: + """ + Returns a list of OptinValues that match the user's optin value. + """ + matched_values: list[OptinValues] = [] + for value in OptinValues: + if self.optin & value: + matched_values.append(value) + return matched_values + + def add_optin(self, optin_value: OptinValues): + """ + Add an optin value to the user. + """ + self.optin |= optin_value + + def remove_optin(self, optin_value: OptinValues): + """ + Remove an optin value from the user. + """ + self.optin &= ~optin_value From e245a13dd554ceb30e521fe01fa4b717c8ecc6b0 Mon Sep 17 00:00:00 2001 From: Alex Parsons Date: Mon, 16 Dec 2024 20:59:09 +0000 Subject: [PATCH 6/7] Add mailchimp interface config --- src/twfy_tools/common/mailchimp.py | 686 +++++++++++++++++++++++++++++ 1 file changed, 686 insertions(+) create mode 100644 src/twfy_tools/common/mailchimp.py diff --git a/src/twfy_tools/common/mailchimp.py b/src/twfy_tools/common/mailchimp.py new file mode 100644 index 0000000000..63967a6be8 --- /dev/null +++ b/src/twfy_tools/common/mailchimp.py @@ -0,0 +1,686 @@ +import datetime +import hashlib +from functools import lru_cache +from typing import Any, NamedTuple, NewType, Optional, TypedDict + +import mailchimp_marketing +import numpy as np +import pandas as pd +import requests +from mailchimp_marketing.api_client import ApiClientError + +InternalListID = NewType("InternalListID", str) +InterestInternalId = NewType("InterestInternalId", str) + + +class MailChimpApiKey(NamedTuple): + api_key: str + server: str + + +class CategoryInfo(NamedTuple): + group_id: str + interest_name_to_id: dict[str, InterestInternalId] + + +class MemberAndInterests(NamedTuple): + email: str + interests: list[InterestInternalId] + + +def get_client(api_key: MailChimpApiKey) -> mailchimp_marketing.Client: # type: ignore + """ + Get the mailchimp api client + """ + client = mailchimp_marketing.Client() + client.set_config({"api_key": api_key.api_key, "server": api_key.server}) + return client # type: ignore + + +@lru_cache +def get_lists(api_key: MailChimpApiKey) -> pd.DataFrame: + """ + Get dataframe of all lists in the account. + """ + client = get_client(api_key) + response: dict[str, Any] = client.lists.get_all_lists(count=1000) + df = pd.DataFrame(response["lists"]) + + # explode the list of dictionaries + # in the 'stats' column into columns in their own right + df = pd.concat([df.drop(["stats"], axis=1), df["stats"].apply(pd.Series)], axis=1) + df = df[["id", "web_id", "name", "member_count"]] + df = df.sort_values("name") + + return df + + +def get_recent_email_count( + api_key: MailChimpApiKey, list_web_id: str, segment_id: str, days: int = 7 +) -> int: + """ + Get the emails and sign up date for a list and segment + Get the count of the number in the last [x] days + """ + client = get_client(api_key) + + try: + list_id = int(list_web_id) + is_web_id = True + except ValueError: + is_web_id = False + if is_web_id: + list_id = list_web_id_to_unique_id(api_key, list_web_id) + else: + list_id = list_name_to_unique_id(api_key, list_web_id) + + dfs = [] + # paginate until we have all emails + offset = 0 + while True: + response: dict[str, Any] = client.lists.get_segment_members_list( + list_id, + segment_id, + count=1000, + offset=offset, + ) + df = pd.DataFrame(response["members"]) + dfs.append(df) + if len(df) < 1000: + break + offset += 1000 + df = pd.concat(dfs) # type: ignore + + # create new timestamp_joined from timestamp_signup and timestamp_opt if timestamp_signup is empty + df["timestamp_joined"] = np.where( + df["timestamp_signup"].isna() | df["timestamp_signup"].isin([None, ""]), + df["timestamp_opt"], # type: ignore + df["timestamp_signup"], # type: ignore + ) + df["timestamp_joined"] = pd.to_datetime(df["timestamp_joined"]).dt.date + # get the cutoff date as a date object + cutoff = (datetime.date.today() - datetime.timedelta(days=days)).isoformat() + mask: pd.Series[bool] = df["timestamp_joined"].apply( + lambda x: x.isoformat() > cutoff # type: ignore + ) + df = df[mask] + return len(df) + + +@lru_cache +def get_segments(api_key: MailChimpApiKey, list_web_id: str) -> pd.DataFrame: + """ + Get segements of a list as a dataframe + """ + client = get_client(api_key) + # if list_web_id can be converted to an int, it's a webid, otherwise it's a name + try: + list_id = int(list_web_id) + is_web_id = True + except ValueError: + is_web_id = False + if is_web_id: + list_id = list_web_id_to_unique_id(api_key, list_web_id) + else: + list_id = list_name_to_unique_id(api_key, list_web_id) + response: dict[str, Any] = client.lists.list_segments(list_id, count=1000) + df = pd.DataFrame(response["segments"]) # type: ignore + df = df[["id", "name", "member_count"]] + df["id"] = list_web_id + ":" + df["id"].astype(str) + return df + + +@lru_cache +def get_recent_campaigns(api_key: MailChimpApiKey, count: int = 20) -> pd.DataFrame: + """ + Get latest campaigns as a dataframe + """ + client = get_client(api_key) + response: dict[str, Any] = client.campaigns.list( + count=count, sort_field="create_time", sort_dir="DESC" + ) + df = pd.DataFrame(response["campaigns"]) + df["subject_line"] = df["settings"].apply(lambda x: x.get("subject_line", "")) # type: ignore + df["title"] = df["settings"].apply(lambda x: x["title"]) # type: ignore + df["recipient_count"] = df["recipients"].apply(lambda x: x["recipient_count"]) # type: ignore + df = df[ + [ + "id", + "web_id", + "type", + "content_type", + "title", + "status", + "send_time", + "recipient_count", + ] + ] + + return df + + +@lru_cache +def get_templates(api_key: MailChimpApiKey) -> pd.DataFrame: + """ + Get templates as a dataframe + """ + client = get_client(api_key) + response: dict[str, Any] = client.templates.list(count=1000) + df = pd.DataFrame(response["templates"]) + df = df[ + [ + "id", + "type", + "name", + "date_created", + "drag_and_drop", + ] + ] + # limit to type user + df = df[df["type"] == "user"] + return df + + +def campaign_web_id_to_unique_id(api_key: MailChimpApiKey, web_id: str) -> str: + """ + Convert a campaign web id to a campaign id + """ + df = get_recent_campaigns(1000) + # convert to web_id, id column dict + lookup = df.set_index("web_id")["id"].to_dict() + return lookup[int(web_id)] + + +def list_web_id_to_unique_id(api_key: MailChimpApiKey, web_id: str) -> str: + """ + Convert a list web id to a list id + """ + df = get_lists() + # convert to web_id, id column dict + df["web_id"] = df["web_id"].astype(str) + lookup = df.set_index("web_id")["id"].to_dict() + return lookup[web_id] + + +def list_name_to_unique_id(api_key: MailChimpApiKey, name: str) -> InternalListID: + """ + Convert a list's human name to a unique list id + """ + df = get_lists(api_key) + # convert to web_id, id column dict + df["name"] = df["name"].astype(str) + lookup = df.set_index("name")["id"].to_dict() + return lookup[name] + + +def segment_name_to_unique_id(api_key: MailChimpApiKey, list_id: str, name: str) -> int: + """ + Convert a segment's human name to a unique segment id + """ + df = get_segments(list_id) + # convert to web_id, id column dict + df["name"] = df["name"].astype(str) + lookup = df.set_index("name")["id"].to_dict() + return lookup[name].split(":")[1] + + +def template_name_to_unique_id(api_key: MailChimpApiKey, name: str) -> int: + """ + Convert a template's human name to a unique template id + """ + df = get_templates() + # convert to web_id, id column dict + df["name"] = df["name"].astype(str) + lookup = df.set_index("name")["id"].to_dict() + return lookup[name] + + +def send_test_email( + api_key: MailChimpApiKey, campaign_web_id: str, emails: list[str] +) -> bool: + """ + send a test email + """ + client = get_client(api_key) + campaign_id = campaign_web_id_to_unique_id(api_key, campaign_web_id) + response: requests.models.Response = client.campaigns.send_test_email( + campaign_id, {"test_emails": emails, "send_type": "html"} + ) + # if response code is 200 or 204 + return response.ok + + +def schedule_campaign( + api_key: MailChimpApiKey, camapign_web_id: str, schedule_time: datetime.datetime +) -> bool: + """ + Schedule a campaign + """ + client = get_client(api_key) + campaign_id = campaign_web_id_to_unique_id(api_key, camapign_web_id) + + # round to next round 15 minutes (e.g. 15, 30, 45, 60) past hour. + current_minute = schedule_time.minute + if current_minute % 15 != 0: + schedule_time += datetime.timedelta(minutes=15 - (current_minute % 15)) + # delete any seconds or microseconds + schedule_time = schedule_time.replace(second=0, microsecond=0) + + print(f"Scheduling for {schedule_time} for campaign {campaign_id}") + + str_time = schedule_time.isoformat() + response: requests.models.Response = client.campaigns.schedule( + campaign_id, + { + "schedule_time": str_time, + "batch_delivery": False, + }, + ) + return response.ok + + +def get_user_hash(email: str): + # Convert the email to lowercase and get its MD5 hash + return hashlib.md5(email.lower().encode("utf-8")).hexdigest() + + +@lru_cache +def get_interest_group( + api_key: MailChimpApiKey, list_id: InternalListID, interest_group_label: str +) -> CategoryInfo: + client = get_client(api_key) + options = client.lists.get_list_interest_categories(list_id)["categories"] + options = [option for option in options if option["title"] == interest_group_label][ + 0 + ] + category_id = options["id"] + # get the interests associated with the category + interests = client.lists.list_interest_category_interests( + list_id, + category_id, # type: ignore + )["interests"] + # make lookup from name to id + interests_by_name = {interest["name"]: interest["id"] for interest in interests} + return CategoryInfo(category_id, interests_by_name) + + +def get_member_from_email( + api_key: MailChimpApiKey, internal_list_id: InternalListID, email: str +) -> dict[str, Any]: + # Get the member from the list + client = get_client(api_key) + user_hash = get_user_hash(email) + return client.lists.get_list_member(internal_list_id, user_hash) + + +def get_donor_tags( + api_key: MailChimpApiKey, internal_list_id: InternalListID, email: str +) -> list[str]: + # Get the tags for the user + client = get_client(api_key) + user_hash = get_user_hash(email) + details = client.lists.get_list_member_tags(internal_list_id, user_hash) + return [x["name"] for x in details["tags"]] + + +def set_donor_tags( + api_key: MailChimpApiKey, + internal_list_id: InternalListID, + email: str, + tags_to_add: list[str] = [], + tags_to_remove: list[str] = [], + disable_automation: bool = False, +): + client = get_client(api_key) + # Set the donor status on the user + user_hash = get_user_hash(email) + + existing_tags = get_donor_tags(api_key, internal_list_id, email) + + tags_to_add = [x for x in tags_to_add if x not in existing_tags] + + to_add_dict = [{"name": tag, "status": "active"} for tag in tags_to_add] + to_remove_dict = [{"name": tag, "status": "inactive"} for tag in tags_to_remove] + to_change_list = to_add_dict + to_remove_dict + if len(to_change_list) == 0: + return + + details = { + "tags": to_change_list, + "is_syncing": disable_automation, + } + client.lists.update_list_member_tags(internal_list_id, user_hash, details) + + +def get_notes( + api_key: MailChimpApiKey, + internal_list_id: InternalListID, + email: str, +) -> list[str]: + client = get_client(api_key) + user_hash = get_user_hash(email) + data = client.lists.get_list_member_notes(internal_list_id, user_hash, count=1000) + return [x["note"] for x in data["notes"]] + + +def add_user_notes( + api_key: MailChimpApiKey, + internal_list_id: InternalListID, + email: str, + notes: list[str], + check_existing: bool = True, +): + client = get_client(api_key) + user_hash = get_user_hash(email) + + if check_existing: + existing_notes = get_notes(api_key, internal_list_id, email) + notes_to_add = [note for note in notes if note not in existing_notes] + else: + notes_to_add = notes + + for note in notes_to_add: + client.lists.create_list_member_note( + internal_list_id, user_hash, {"note": note} + ) + + +class MemberUpload(TypedDict): + email: str + merge_fields: dict[str, Any] + + +def batch_add_to_interest_group( + api_key: MailChimpApiKey, + internal_list_id: InternalListID, + interest_group_collection: str, + emails: list[str], + interests: list[str], +): + client = get_client(api_key) + + avaliable_list_ids = get_interest_group( + api_key, internal_list_id, interest_group_collection + ) + + interests_to_add = [ + avaliable_list_ids.interest_name_to_id[interest] for interest in interests + ] + interests_to_upload = {x: True for x in interests_to_add} + + # upload all emails in list to audience id + items = [ + { + "email_address": x, + "status": "subscribed", + "interests": interests_to_upload, + } + for x in emails + ] + client.lists.batch_list_members(internal_list_id, {"members": items}) + + +def batch_add_to_different_interest_groups( + api_key: MailChimpApiKey, + internal_list_id: InternalListID, + emails_and_interests: list[MemberAndInterests], + batch_size: int = 200, +): + """ + Specify *different* interest groups for different emails. + """ + client = get_client(api_key) + + items: list[dict[str, Any]] = [] + + for email, interests in emails_and_interests: + items.append( + { + "email_address": email, + "status": "subscribed", + "interests": {x: True for x in interests}, + } + ) + + # upload as batches + for i in range(0, len(items), batch_size): + client.lists.batch_list_members( + internal_list_id, {"members": items[i : i + 200]} + ) + + +def set_user_metadata( + api_key: MailChimpApiKey, + internal_list_id: InternalListID, + email: str, + merge_data: dict[str, Any] = {}, + tags: list[str] = [], + interest_group_collection: Optional[str] = None, + interests: list[str] = [], + notes: list[str] = [], +): + """ + A general purpose function to set metadata for a user. + If user doesn't exist - we're creating them! + """ + client = get_client(api_key) + user_hash = get_user_hash(email) + + try: + current_person = client.lists.get_list_member(internal_list_id, user_hash) + except ApiClientError: + current_person = None + + details: dict[str, Any] = { + "merge_fields": merge_data, + } + + if interests: + avaliable_list_ids = get_interest_group( + api_key, internal_list_id, interest_group_collection + ) + + interests_to_add = [ + avaliable_list_ids.interest_name_to_id[interest] for interest in interests + ] + + details["interests"] = {x: True for x in interests_to_add} + + if current_person: + try: + client.lists.update_list_member(internal_list_id, user_hash, details) + except ApiClientError as e: + print(e.text) + raise e + if tags: + set_donor_tags(api_key, internal_list_id, email, tags_to_add=tags) + + else: + details["email_address"] = email + details["status"] = "subscribed" + if tags: + details["tags"] = tags + try: + client.lists.add_list_member(internal_list_id, details) + except ApiClientError as e: + allowed_problems = [ + "looks fake or invalid", + "Forgotten Email Not Subscribed", + "Please provide a valid email address", + ] + for problem in allowed_problems: + if problem in e.text: + return + print(e.text) + raise e + + if notes: + add_user_notes(api_key, internal_list_id, email, notes=notes) + + +def get_all_members( + api_key: MailChimpApiKey, + internal_list_id: InternalListID, + cut_off: Optional[int] = None, +) -> list[dict[str, Any]]: + client = get_client(api_key) + + # Get all the members of the list + member_count = 1 + running_members: list[dict[str, Any]] = [] + size = 1000 if not cut_off else cut_off + offset = 0 + while member_count > 0: + reply = client.lists.get_list_members_info( + internal_list_id, count=size, offset=0 + ) + running_members.extend(reply["members"]) # type: ignore + member_count = len(reply["members"]) # type: ignore + offset += size + if cut_off and len(running_members) > cut_off: + break + return running_members + + +class MailChimpHandler: + """ + Shortcut to the mailchimp api to avoid having to remember the api key + """ + + def __init__(self, api_key: str, server: str = "us9"): + self.api_settings = MailChimpApiKey(api_key, server) + + def get_lists(self) -> pd.DataFrame: + return get_lists(self.api_settings) + + def list_name_to_unique_id(self, name: str) -> InternalListID: + return list_name_to_unique_id(self.api_settings, name) + + def segment_name_to_unique_id(self, list_id: str, name: str) -> int: + return segment_name_to_unique_id(self.api_settings, list_id, name) + + def template_name_to_unique_id(self, name: str) -> int: + return template_name_to_unique_id(self.api_settings, name) + + def get_segments(self, list_web_id: str) -> pd.DataFrame: + return get_segments(self.api_settings, list_web_id) + + def get_recent_campaigns(self, count: int = 20) -> pd.DataFrame: + return get_recent_campaigns(self.api_settings, count) + + def get_templates(self) -> pd.DataFrame: + return get_templates(self.api_settings) + + def get_interest_group( + self, list_id: InternalListID, interest_group_label: str + ) -> CategoryInfo: + return get_interest_group(self.api_settings, list_id, interest_group_label) + + def get_member_from_email( + self, internal_list_id: InternalListID, email: str + ) -> dict[str, Any]: + return get_member_from_email(self.api_settings, internal_list_id, email) + + def get_donor_tags(self, internal_list_id: InternalListID, email: str) -> list[str]: + return get_donor_tags(self.api_settings, internal_list_id, email) + + def get_notes(self, internal_list_id: InternalListID, email: str) -> list[str]: + return get_notes(self.api_settings, internal_list_id, email) + + def get_all_members( + self, internal_list_id: InternalListID, cut_off: Optional[int] = None + ) -> list[dict[str, Any]]: + return get_all_members(self.api_settings, internal_list_id, cut_off) + + def get_user_metadata( + self, internal_list_id: InternalListID, email: str + ) -> dict[str, Any]: + return get_member_from_email(self.api_settings, internal_list_id, email) + + def set_user_metadata( + self, + internal_list_id: InternalListID, + email: str, + merge_data: dict[str, Any] = {}, + tags: list[str] = [], + interest_group_collection: Optional[str] = None, + interests: list[str] = [], + notes: list[str] = [], + ): + set_user_metadata( + self.api_settings, + internal_list_id, + email, + merge_data, + tags, + interest_group_collection, + interests, + notes, + ) + + def set_donor_tags( + self, + internal_list_id: InternalListID, + email: str, + tags_to_add: list[str] = [], + tags_to_remove: list[str] = [], + disable_automation: bool = False, + ): + set_donor_tags( + self.api_settings, + internal_list_id, + email, + tags_to_add, + tags_to_remove, + disable_automation, + ) + + def add_user_notes( + self, + internal_list_id: InternalListID, + email: str, + notes: list[str], + check_existing: bool = True, + ): + add_user_notes( + self.api_settings, internal_list_id, email, notes, check_existing + ) + + def send_test_email(self, campaign_web_id: str, emails: list[str]) -> bool: + return send_test_email(self.api_settings, campaign_web_id, emails) + + def schedule_campaign( + self, camapign_web_id: str, schedule_time: datetime.datetime + ) -> bool: + return schedule_campaign(self.api_settings, camapign_web_id, schedule_time) + + def batch_add_to_different_interest_groups( + self, + internal_list_id: InternalListID, + emails_and_interests: list[MemberAndInterests], + batch_size: int = 200, + ): + batch_add_to_different_interest_groups( + self.api_settings, internal_list_id, emails_and_interests, batch_size + ) + + def batch_add_to_interest_group( + self, + internal_list_id: InternalListID, + interest_group_collection: str, + emails: list[str], + interests: list[str], + ): + batch_add_to_interest_group( + self.api_settings, + internal_list_id, + interest_group_collection, + emails, + interests, + ) + + def get_user_hash(self, email: str) -> str: + return get_user_hash(email) + + def list_web_id_to_unique_id(self, web_id: str) -> str: + return list_web_id_to_unique_id(self.api_settings, web_id) From 71a3625ce9332297b7eb02e9d83f9281fe01bd93 Mon Sep 17 00:00:00 2001 From: Alex Parsons Date: Mon, 16 Dec 2024 21:01:21 +0000 Subject: [PATCH 7/7] Add contact_io interface - Upload optins to relevant mailchimp lists daily --- pyproject.toml | 4 +- scripts/dailyupdate | 3 + src/twfy_tools/utils/__init__.py | 0 src/twfy_tools/utils/contact_io.py | 98 ++++++++++++++++++++++++++++++ 4 files changed, 104 insertions(+), 1 deletion(-) create mode 100644 src/twfy_tools/utils/__init__.py create mode 100644 src/twfy_tools/utils/contact_io.py diff --git a/pyproject.toml b/pyproject.toml index fe19e4f3bd..d36989c3be 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -24,11 +24,13 @@ typer = "^0.15.1" [tool.poetry.group.dev.dependencies] ruff = "^0.6.1" +[tool.poetry.scripts] +contact-io = "twfy_tools.utils.contact_io:app" + [build-system] requires = ["poetry-core"] build-backend = "poetry.core.masonry.api" - [tool.ruff] extend-exclude = ["migrations", "commonlib", "scripts/historic"] diff --git a/scripts/dailyupdate b/scripts/dailyupdate index 518238a9ec..66b98ad30b 100755 --- a/scripts/dailyupdate +++ b/scripts/dailyupdate @@ -30,3 +30,6 @@ system './mpinfoin.pl'; # update individual division votes system "./json2db.pl >> $cron_log"; + +# Add yesterdays optins to mailchimp +system "poetry run contact-io upload-yesterday >> $cron_log"; \ No newline at end of file diff --git a/src/twfy_tools/utils/__init__.py b/src/twfy_tools/utils/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/src/twfy_tools/utils/contact_io.py b/src/twfy_tools/utils/contact_io.py new file mode 100644 index 0000000000..953d17c727 --- /dev/null +++ b/src/twfy_tools/utils/contact_io.py @@ -0,0 +1,98 @@ +""" +Interface to upload user optin information to MailChimp. +""" + +from datetime import date, timedelta +from functools import lru_cache + +from typer import Typer + +from twfy_tools.common.config import config +from twfy_tools.common.mailchimp import ( + InterestInternalId, + MailChimpHandler, + MemberAndInterests, +) +from twfy_tools.db.models import OptinValues, User + +app = Typer() + +mailing_list_name = "mySociety Newsletters" + + +@lru_cache +def get_mailing_list_internal_id(): + client = MailChimpHandler(config.MAILCHIMP_API_KEY) + return client.list_name_to_unique_id(mailing_list_name) + + +def get_internal_optin_id( + interest_group: str, interest_name: str +) -> InterestInternalId: + client = MailChimpHandler(config.MAILCHIMP_API_KEY) + + # get internal id for the mailing list + mailing_list_id = get_mailing_list_internal_id() + interest_group_items = client.get_interest_group(mailing_list_id, interest_group) + return interest_group_items.interest_name_to_id[interest_name] + + +def upload_contacts(start_date: date, end_date: date): + """ + Given a start and end date - get the optin_values for new users. + And add them to the relevant mySociety lists. + """ + optin_interest_lookup = { + OptinValues.OPTIN_ORG: get_internal_optin_id( + interest_group="What are you interested in? Select all that apply", + interest_name="mySociety newsletter", + ), + OptinValues.OPTIN_STREAM: get_internal_optin_id( + interest_group="What are you interested in? Select all that apply", + interest_name="Democracy and Parliaments", + ), + OptinValues.OPTIN_SERVICE: get_internal_optin_id( + interest_group="Service interest", + interest_name="TheyWorkForYou", + ), + } + + new_users = User.objects.filter( + registrationtime__gte=start_date, registrationtime__lt=end_date + ) + + members_and_values: list[MemberAndInterests] = [] + + for user in new_users: + internal_ids = [optin_interest_lookup[x] for x in user.get_optin_values()] + if internal_ids: + members_and_values.append(MemberAndInterests(user.email, internal_ids)) + + client = MailChimpHandler(config.MAILCHIMP_API_KEY) + mailing_list_id = get_mailing_list_internal_id() + + client.batch_add_to_different_interest_groups(mailing_list_id, members_and_values) + + print(f"Uploaded {len(members_and_values)} users to MailChimp") + + +@app.command() +def upload_mailchimp_optins(start_date: str, end_date: str): + """ + Upload the optin values for new users to MailChimp. + """ + upload_contacts(date.fromisoformat(start_date), date.fromisoformat(end_date)) + + +@app.command() +def upload_yesterday(): + """ + Uploads the optin values for users who registered yesterday. + the end_date is a less than, so safe to say today. + """ + yesterday = date.today() - timedelta(days=1) + upload_contacts(yesterday, date.today()) + + +if __name__ == "__main__": + app()