diff --git a/ops/poetry.lock b/ops/poetry.lock
new file mode 100644
index 0000000..8b4c6fd
--- /dev/null
+++ b/ops/poetry.lock
@@ -0,0 +1,342 @@
+# This file is automatically @generated by Poetry 1.7.1 and should not be changed by hand.
+
+[[package]]
+name = "jinja2"
+version = "3.1.3"
+description = "A very fast and expressive template engine."
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "Jinja2-3.1.3-py3-none-any.whl", hash = "sha256:7d6d50dd97d52cbc355597bd845fabfbac3f551e1f99619e39a35ce8c370b5fa"},
+ {file = "Jinja2-3.1.3.tar.gz", hash = "sha256:ac8bd6544d4bb2c9792bf3a159e80bba8fda7f07e81bc3aed565432d5925ba90"},
+]
+
+[package.dependencies]
+MarkupSafe = ">=2.0"
+
+[package.extras]
+i18n = ["Babel (>=2.7)"]
+
+[[package]]
+name = "markupsafe"
+version = "2.1.4"
+description = "Safely add untrusted strings to HTML/XML markup."
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "MarkupSafe-2.1.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:de8153a7aae3835484ac168a9a9bdaa0c5eee4e0bc595503c95d53b942879c84"},
+ {file = "MarkupSafe-2.1.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e888ff76ceb39601c59e219f281466c6d7e66bd375b4ec1ce83bcdc68306796b"},
+ {file = "MarkupSafe-2.1.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0b838c37ba596fcbfca71651a104a611543077156cb0a26fe0c475e1f152ee8"},
+ {file = "MarkupSafe-2.1.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dac1ebf6983148b45b5fa48593950f90ed6d1d26300604f321c74a9ca1609f8e"},
+ {file = "MarkupSafe-2.1.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0fbad3d346df8f9d72622ac71b69565e621ada2ce6572f37c2eae8dacd60385d"},
+ {file = "MarkupSafe-2.1.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d5291d98cd3ad9a562883468c690a2a238c4a6388ab3bd155b0c75dd55ece858"},
+ {file = "MarkupSafe-2.1.4-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:a7cc49ef48a3c7a0005a949f3c04f8baa5409d3f663a1b36f0eba9bfe2a0396e"},
+ {file = "MarkupSafe-2.1.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b83041cda633871572f0d3c41dddd5582ad7d22f65a72eacd8d3d6d00291df26"},
+ {file = "MarkupSafe-2.1.4-cp310-cp310-win32.whl", hash = "sha256:0c26f67b3fe27302d3a412b85ef696792c4a2386293c53ba683a89562f9399b0"},
+ {file = "MarkupSafe-2.1.4-cp310-cp310-win_amd64.whl", hash = "sha256:a76055d5cb1c23485d7ddae533229039b850db711c554a12ea64a0fd8a0129e2"},
+ {file = "MarkupSafe-2.1.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:9e9e3c4020aa2dc62d5dd6743a69e399ce3de58320522948af6140ac959ab863"},
+ {file = "MarkupSafe-2.1.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:0042d6a9880b38e1dd9ff83146cc3c9c18a059b9360ceae207805567aacccc69"},
+ {file = "MarkupSafe-2.1.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:55d03fea4c4e9fd0ad75dc2e7e2b6757b80c152c032ea1d1de487461d8140efc"},
+ {file = "MarkupSafe-2.1.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ab3a886a237f6e9c9f4f7d272067e712cdb4efa774bef494dccad08f39d8ae6"},
+ {file = "MarkupSafe-2.1.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:abf5ebbec056817057bfafc0445916bb688a255a5146f900445d081db08cbabb"},
+ {file = "MarkupSafe-2.1.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e1a0d1924a5013d4f294087e00024ad25668234569289650929ab871231668e7"},
+ {file = "MarkupSafe-2.1.4-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:e7902211afd0af05fbadcc9a312e4cf10f27b779cf1323e78d52377ae4b72bea"},
+ {file = "MarkupSafe-2.1.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:c669391319973e49a7c6230c218a1e3044710bc1ce4c8e6eb71f7e6d43a2c131"},
+ {file = "MarkupSafe-2.1.4-cp311-cp311-win32.whl", hash = "sha256:31f57d64c336b8ccb1966d156932f3daa4fee74176b0fdc48ef580be774aae74"},
+ {file = "MarkupSafe-2.1.4-cp311-cp311-win_amd64.whl", hash = "sha256:54a7e1380dfece8847c71bf7e33da5d084e9b889c75eca19100ef98027bd9f56"},
+ {file = "MarkupSafe-2.1.4-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:a76cd37d229fc385738bd1ce4cba2a121cf26b53864c1772694ad0ad348e509e"},
+ {file = "MarkupSafe-2.1.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:987d13fe1d23e12a66ca2073b8d2e2a75cec2ecb8eab43ff5624ba0ad42764bc"},
+ {file = "MarkupSafe-2.1.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5244324676254697fe5c181fc762284e2c5fceeb1c4e3e7f6aca2b6f107e60dc"},
+ {file = "MarkupSafe-2.1.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:78bc995e004681246e85e28e068111a4c3f35f34e6c62da1471e844ee1446250"},
+ {file = "MarkupSafe-2.1.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a4d176cfdfde84f732c4a53109b293d05883e952bbba68b857ae446fa3119b4f"},
+ {file = "MarkupSafe-2.1.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:f9917691f410a2e0897d1ef99619fd3f7dd503647c8ff2475bf90c3cf222ad74"},
+ {file = "MarkupSafe-2.1.4-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:f06e5a9e99b7df44640767842f414ed5d7bedaaa78cd817ce04bbd6fd86e2dd6"},
+ {file = "MarkupSafe-2.1.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:396549cea79e8ca4ba65525470d534e8a41070e6b3500ce2414921099cb73e8d"},
+ {file = "MarkupSafe-2.1.4-cp312-cp312-win32.whl", hash = "sha256:f6be2d708a9d0e9b0054856f07ac7070fbe1754be40ca8525d5adccdbda8f475"},
+ {file = "MarkupSafe-2.1.4-cp312-cp312-win_amd64.whl", hash = "sha256:5045e892cfdaecc5b4c01822f353cf2c8feb88a6ec1c0adef2a2e705eef0f656"},
+ {file = "MarkupSafe-2.1.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:7a07f40ef8f0fbc5ef1000d0c78771f4d5ca03b4953fc162749772916b298fc4"},
+ {file = "MarkupSafe-2.1.4-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d18b66fe626ac412d96c2ab536306c736c66cf2a31c243a45025156cc190dc8a"},
+ {file = "MarkupSafe-2.1.4-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:698e84142f3f884114ea8cf83e7a67ca8f4ace8454e78fe960646c6c91c63bfa"},
+ {file = "MarkupSafe-2.1.4-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:49a3b78a5af63ec10d8604180380c13dcd870aba7928c1fe04e881d5c792dc4e"},
+ {file = "MarkupSafe-2.1.4-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:15866d7f2dc60cfdde12ebb4e75e41be862348b4728300c36cdf405e258415ec"},
+ {file = "MarkupSafe-2.1.4-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:6aa5e2e7fc9bc042ae82d8b79d795b9a62bd8f15ba1e7594e3db243f158b5565"},
+ {file = "MarkupSafe-2.1.4-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:54635102ba3cf5da26eb6f96c4b8c53af8a9c0d97b64bdcb592596a6255d8518"},
+ {file = "MarkupSafe-2.1.4-cp37-cp37m-win32.whl", hash = "sha256:3583a3a3ab7958e354dc1d25be74aee6228938312ee875a22330c4dc2e41beb0"},
+ {file = "MarkupSafe-2.1.4-cp37-cp37m-win_amd64.whl", hash = "sha256:d6e427c7378c7f1b2bef6a344c925b8b63623d3321c09a237b7cc0e77dd98ceb"},
+ {file = "MarkupSafe-2.1.4-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:bf1196dcc239e608605b716e7b166eb5faf4bc192f8a44b81e85251e62584bd2"},
+ {file = "MarkupSafe-2.1.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:4df98d4a9cd6a88d6a585852f56f2155c9cdb6aec78361a19f938810aa020954"},
+ {file = "MarkupSafe-2.1.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b835aba863195269ea358cecc21b400276747cc977492319fd7682b8cd2c253d"},
+ {file = "MarkupSafe-2.1.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:23984d1bdae01bee794267424af55eef4dfc038dc5d1272860669b2aa025c9e3"},
+ {file = "MarkupSafe-2.1.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1c98c33ffe20e9a489145d97070a435ea0679fddaabcafe19982fe9c971987d5"},
+ {file = "MarkupSafe-2.1.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:9896fca4a8eb246defc8b2a7ac77ef7553b638e04fbf170bff78a40fa8a91474"},
+ {file = "MarkupSafe-2.1.4-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:b0fe73bac2fed83839dbdbe6da84ae2a31c11cfc1c777a40dbd8ac8a6ed1560f"},
+ {file = "MarkupSafe-2.1.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:c7556bafeaa0a50e2fe7dc86e0382dea349ebcad8f010d5a7dc6ba568eaaa789"},
+ {file = "MarkupSafe-2.1.4-cp38-cp38-win32.whl", hash = "sha256:fc1a75aa8f11b87910ffd98de62b29d6520b6d6e8a3de69a70ca34dea85d2a8a"},
+ {file = "MarkupSafe-2.1.4-cp38-cp38-win_amd64.whl", hash = "sha256:3a66c36a3864df95e4f62f9167c734b3b1192cb0851b43d7cc08040c074c6279"},
+ {file = "MarkupSafe-2.1.4-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:765f036a3d00395a326df2835d8f86b637dbaf9832f90f5d196c3b8a7a5080cb"},
+ {file = "MarkupSafe-2.1.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:21e7af8091007bf4bebf4521184f4880a6acab8df0df52ef9e513d8e5db23411"},
+ {file = "MarkupSafe-2.1.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d5c31fe855c77cad679b302aabc42d724ed87c043b1432d457f4976add1c2c3e"},
+ {file = "MarkupSafe-2.1.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7653fa39578957bc42e5ebc15cf4361d9e0ee4b702d7d5ec96cdac860953c5b4"},
+ {file = "MarkupSafe-2.1.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:47bb5f0142b8b64ed1399b6b60f700a580335c8e1c57f2f15587bd072012decc"},
+ {file = "MarkupSafe-2.1.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:fe8512ed897d5daf089e5bd010c3dc03bb1bdae00b35588c49b98268d4a01e00"},
+ {file = "MarkupSafe-2.1.4-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:36d7626a8cca4d34216875aee5a1d3d654bb3dac201c1c003d182283e3205949"},
+ {file = "MarkupSafe-2.1.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:b6f14a9cd50c3cb100eb94b3273131c80d102e19bb20253ac7bd7336118a673a"},
+ {file = "MarkupSafe-2.1.4-cp39-cp39-win32.whl", hash = "sha256:c8f253a84dbd2c63c19590fa86a032ef3d8cc18923b8049d91bcdeeb2581fbf6"},
+ {file = "MarkupSafe-2.1.4-cp39-cp39-win_amd64.whl", hash = "sha256:8b570a1537367b52396e53325769608f2a687ec9a4363647af1cded8928af959"},
+ {file = "MarkupSafe-2.1.4.tar.gz", hash = "sha256:3aae9af4cac263007fd6309c64c6ab4506dd2b79382d9d19a1994f9240b8db4f"},
+]
+
+[[package]]
+name = "omymodels"
+version = "0.15.1"
+description = "O! My Models (omymodels) is a library to generate Python Models for SQLAlchemy (ORM & Core), GinoORM, Pydantic, Pydal tables & Python Dataclasses from SQL DDL. And convert one models to another."
+optional = false
+python-versions = ">=3.7,<4.0"
+files = [
+ {file = "omymodels-0.15.1-py3-none-any.whl", hash = "sha256:d114da8ef4e9c10685a7f460cdd837666a765cd9df9e28daf0e9b143781c337d"},
+ {file = "omymodels-0.15.1.tar.gz", hash = "sha256:95acc2b706a4e5f8611d4d37f7b700e2afb58520f566a144364227ded63eaa99"},
+]
+
+[package.dependencies]
+Jinja2 = ">=3.0.1,<4.0.0"
+py-models-parser = ">=0.7.0,<0.8.0"
+pydantic = ">=1.8.2,<2.0.0"
+simple-ddl-parser = ">=1.0.0,<2.0.0"
+table-meta = ">=0.1.5,<0.2.0"
+
+[[package]]
+name = "parsimonious"
+version = "0.10.0"
+description = "(Soon to be) the fastest pure-Python PEG parser I could muster"
+optional = false
+python-versions = "*"
+files = [
+ {file = "parsimonious-0.10.0-py3-none-any.whl", hash = "sha256:982ab435fabe86519b57f6b35610aa4e4e977e9f02a14353edf4bbc75369fc0f"},
+ {file = "parsimonious-0.10.0.tar.gz", hash = "sha256:8281600da180ec8ae35427a4ab4f7b82bfec1e3d1e52f80cb60ea82b9512501c"},
+]
+
+[package.dependencies]
+regex = ">=2022.3.15"
+
+[[package]]
+name = "ply"
+version = "3.11"
+description = "Python Lex & Yacc"
+optional = false
+python-versions = "*"
+files = [
+ {file = "ply-3.11-py2.py3-none-any.whl", hash = "sha256:096f9b8350b65ebd2fd1346b12452efe5b9607f7482813ffca50c22722a807ce"},
+ {file = "ply-3.11.tar.gz", hash = "sha256:00c7c1aaa88358b9c765b6d3000c6eec0ba42abca5351b095321aef446081da3"},
+]
+
+[[package]]
+name = "py-models-parser"
+version = "0.7.0"
+description = "Parser for Different Python Models (Pydantic, Enums, ORMs: Tortoise, SqlAlchemy, GinoORM, PonyORM, Pydal tables) to extract information about columns(attrs), model, table args,etc in one format."
+optional = false
+python-versions = ">=3.7,<4.0"
+files = [
+ {file = "py_models_parser-0.7.0-py3-none-any.whl", hash = "sha256:68929f903a8c70f1dfe429a49ee71852caf663d1560bb09012342bfbf925ba71"},
+ {file = "py_models_parser-0.7.0.tar.gz", hash = "sha256:ee34fb27f79e9158cb44c7df93fa99ac67475b16458eef9583d4ed39a5350608"},
+]
+
+[package.dependencies]
+parsimonious = ">=0.10.0,<0.11.0"
+
+[[package]]
+name = "pydantic"
+version = "1.10.14"
+description = "Data validation and settings management using python type hints"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "pydantic-1.10.14-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:7f4fcec873f90537c382840f330b90f4715eebc2bc9925f04cb92de593eae054"},
+ {file = "pydantic-1.10.14-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8e3a76f571970fcd3c43ad982daf936ae39b3e90b8a2e96c04113a369869dc87"},
+ {file = "pydantic-1.10.14-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:82d886bd3c3fbeaa963692ef6b643159ccb4b4cefaf7ff1617720cbead04fd1d"},
+ {file = "pydantic-1.10.14-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:798a3d05ee3b71967844a1164fd5bdb8c22c6d674f26274e78b9f29d81770c4e"},
+ {file = "pydantic-1.10.14-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:23d47a4b57a38e8652bcab15a658fdb13c785b9ce217cc3a729504ab4e1d6bc9"},
+ {file = "pydantic-1.10.14-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f9f674b5c3bebc2eba401de64f29948ae1e646ba2735f884d1594c5f675d6f2a"},
+ {file = "pydantic-1.10.14-cp310-cp310-win_amd64.whl", hash = "sha256:24a7679fab2e0eeedb5a8924fc4a694b3bcaac7d305aeeac72dd7d4e05ecbebf"},
+ {file = "pydantic-1.10.14-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:9d578ac4bf7fdf10ce14caba6f734c178379bd35c486c6deb6f49006e1ba78a7"},
+ {file = "pydantic-1.10.14-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:fa7790e94c60f809c95602a26d906eba01a0abee9cc24150e4ce2189352deb1b"},
+ {file = "pydantic-1.10.14-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aad4e10efa5474ed1a611b6d7f0d130f4aafadceb73c11d9e72823e8f508e663"},
+ {file = "pydantic-1.10.14-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1245f4f61f467cb3dfeced2b119afef3db386aec3d24a22a1de08c65038b255f"},
+ {file = "pydantic-1.10.14-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:21efacc678a11114c765eb52ec0db62edffa89e9a562a94cbf8fa10b5db5c046"},
+ {file = "pydantic-1.10.14-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:412ab4a3f6dbd2bf18aefa9f79c7cca23744846b31f1d6555c2ee2b05a2e14ca"},
+ {file = "pydantic-1.10.14-cp311-cp311-win_amd64.whl", hash = "sha256:e897c9f35281f7889873a3e6d6b69aa1447ceb024e8495a5f0d02ecd17742a7f"},
+ {file = "pydantic-1.10.14-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:d604be0f0b44d473e54fdcb12302495fe0467c56509a2f80483476f3ba92b33c"},
+ {file = "pydantic-1.10.14-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a42c7d17706911199798d4c464b352e640cab4351efe69c2267823d619a937e5"},
+ {file = "pydantic-1.10.14-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:596f12a1085e38dbda5cbb874d0973303e34227b400b6414782bf205cc14940c"},
+ {file = "pydantic-1.10.14-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:bfb113860e9288d0886e3b9e49d9cf4a9d48b441f52ded7d96db7819028514cc"},
+ {file = "pydantic-1.10.14-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:bc3ed06ab13660b565eed80887fcfbc0070f0aa0691fbb351657041d3e874efe"},
+ {file = "pydantic-1.10.14-cp37-cp37m-win_amd64.whl", hash = "sha256:ad8c2bc677ae5f6dbd3cf92f2c7dc613507eafe8f71719727cbc0a7dec9a8c01"},
+ {file = "pydantic-1.10.14-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:c37c28449752bb1f47975d22ef2882d70513c546f8f37201e0fec3a97b816eee"},
+ {file = "pydantic-1.10.14-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:49a46a0994dd551ec051986806122767cf144b9702e31d47f6d493c336462597"},
+ {file = "pydantic-1.10.14-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:53e3819bd20a42470d6dd0fe7fc1c121c92247bca104ce608e609b59bc7a77ee"},
+ {file = "pydantic-1.10.14-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0fbb503bbbbab0c588ed3cd21975a1d0d4163b87e360fec17a792f7d8c4ff29f"},
+ {file = "pydantic-1.10.14-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:336709883c15c050b9c55a63d6c7ff09be883dbc17805d2b063395dd9d9d0022"},
+ {file = "pydantic-1.10.14-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:4ae57b4d8e3312d486e2498d42aed3ece7b51848336964e43abbf9671584e67f"},
+ {file = "pydantic-1.10.14-cp38-cp38-win_amd64.whl", hash = "sha256:dba49d52500c35cfec0b28aa8b3ea5c37c9df183ffc7210b10ff2a415c125c4a"},
+ {file = "pydantic-1.10.14-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c66609e138c31cba607d8e2a7b6a5dc38979a06c900815495b2d90ce6ded35b4"},
+ {file = "pydantic-1.10.14-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:d986e115e0b39604b9eee3507987368ff8148222da213cd38c359f6f57b3b347"},
+ {file = "pydantic-1.10.14-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:646b2b12df4295b4c3148850c85bff29ef6d0d9621a8d091e98094871a62e5c7"},
+ {file = "pydantic-1.10.14-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:282613a5969c47c83a8710cc8bfd1e70c9223feb76566f74683af889faadc0ea"},
+ {file = "pydantic-1.10.14-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:466669501d08ad8eb3c4fecd991c5e793c4e0bbd62299d05111d4f827cded64f"},
+ {file = "pydantic-1.10.14-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:13e86a19dca96373dcf3190fcb8797d40a6f12f154a244a8d1e8e03b8f280593"},
+ {file = "pydantic-1.10.14-cp39-cp39-win_amd64.whl", hash = "sha256:08b6ec0917c30861e3fe71a93be1648a2aa4f62f866142ba21670b24444d7fd8"},
+ {file = "pydantic-1.10.14-py3-none-any.whl", hash = "sha256:8ee853cd12ac2ddbf0ecbac1c289f95882b2d4482258048079d13be700aa114c"},
+ {file = "pydantic-1.10.14.tar.gz", hash = "sha256:46f17b832fe27de7850896f3afee50ea682220dd218f7e9c88d436788419dca6"},
+]
+
+[package.dependencies]
+typing-extensions = ">=4.2.0"
+
+[package.extras]
+dotenv = ["python-dotenv (>=0.10.4)"]
+email = ["email-validator (>=1.0.3)"]
+
+[[package]]
+name = "regex"
+version = "2023.12.25"
+description = "Alternative regular expression module, to replace re."
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "regex-2023.12.25-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:0694219a1d54336fd0445ea382d49d36882415c0134ee1e8332afd1529f0baa5"},
+ {file = "regex-2023.12.25-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b014333bd0217ad3d54c143de9d4b9a3ca1c5a29a6d0d554952ea071cff0f1f8"},
+ {file = "regex-2023.12.25-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d865984b3f71f6d0af64d0d88f5733521698f6c16f445bb09ce746c92c97c586"},
+ {file = "regex-2023.12.25-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1e0eabac536b4cc7f57a5f3d095bfa557860ab912f25965e08fe1545e2ed8b4c"},
+ {file = "regex-2023.12.25-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c25a8ad70e716f96e13a637802813f65d8a6760ef48672aa3502f4c24ea8b400"},
+ {file = "regex-2023.12.25-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a9b6d73353f777630626f403b0652055ebfe8ff142a44ec2cf18ae470395766e"},
+ {file = "regex-2023.12.25-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a9cc99d6946d750eb75827cb53c4371b8b0fe89c733a94b1573c9dd16ea6c9e4"},
+ {file = "regex-2023.12.25-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:88d1f7bef20c721359d8675f7d9f8e414ec5003d8f642fdfd8087777ff7f94b5"},
+ {file = "regex-2023.12.25-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:cb3fe77aec8f1995611f966d0c656fdce398317f850d0e6e7aebdfe61f40e1cd"},
+ {file = "regex-2023.12.25-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:7aa47c2e9ea33a4a2a05f40fcd3ea36d73853a2aae7b4feab6fc85f8bf2c9704"},
+ {file = "regex-2023.12.25-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:df26481f0c7a3f8739fecb3e81bc9da3fcfae34d6c094563b9d4670b047312e1"},
+ {file = "regex-2023.12.25-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:c40281f7d70baf6e0db0c2f7472b31609f5bc2748fe7275ea65a0b4601d9b392"},
+ {file = "regex-2023.12.25-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:d94a1db462d5690ebf6ae86d11c5e420042b9898af5dcf278bd97d6bda065423"},
+ {file = "regex-2023.12.25-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:ba1b30765a55acf15dce3f364e4928b80858fa8f979ad41f862358939bdd1f2f"},
+ {file = "regex-2023.12.25-cp310-cp310-win32.whl", hash = "sha256:150c39f5b964e4d7dba46a7962a088fbc91f06e606f023ce57bb347a3b2d4630"},
+ {file = "regex-2023.12.25-cp310-cp310-win_amd64.whl", hash = "sha256:09da66917262d9481c719599116c7dc0c321ffcec4b1f510c4f8a066f8768105"},
+ {file = "regex-2023.12.25-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:1b9d811f72210fa9306aeb88385b8f8bcef0dfbf3873410413c00aa94c56c2b6"},
+ {file = "regex-2023.12.25-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d902a43085a308cef32c0d3aea962524b725403fd9373dea18110904003bac97"},
+ {file = "regex-2023.12.25-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d166eafc19f4718df38887b2bbe1467a4f74a9830e8605089ea7a30dd4da8887"},
+ {file = "regex-2023.12.25-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c7ad32824b7f02bb3c9f80306d405a1d9b7bb89362d68b3c5a9be53836caebdb"},
+ {file = "regex-2023.12.25-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:636ba0a77de609d6510235b7f0e77ec494d2657108f777e8765efc060094c98c"},
+ {file = "regex-2023.12.25-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0fda75704357805eb953a3ee15a2b240694a9a514548cd49b3c5124b4e2ad01b"},
+ {file = "regex-2023.12.25-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f72cbae7f6b01591f90814250e636065850c5926751af02bb48da94dfced7baa"},
+ {file = "regex-2023.12.25-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:db2a0b1857f18b11e3b0e54ddfefc96af46b0896fb678c85f63fb8c37518b3e7"},
+ {file = "regex-2023.12.25-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:7502534e55c7c36c0978c91ba6f61703faf7ce733715ca48f499d3dbbd7657e0"},
+ {file = "regex-2023.12.25-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:e8c7e08bb566de4faaf11984af13f6bcf6a08f327b13631d41d62592681d24fe"},
+ {file = "regex-2023.12.25-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:283fc8eed679758de38fe493b7d7d84a198b558942b03f017b1f94dda8efae80"},
+ {file = "regex-2023.12.25-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:f44dd4d68697559d007462b0a3a1d9acd61d97072b71f6d1968daef26bc744bd"},
+ {file = "regex-2023.12.25-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:67d3ccfc590e5e7197750fcb3a2915b416a53e2de847a728cfa60141054123d4"},
+ {file = "regex-2023.12.25-cp311-cp311-win32.whl", hash = "sha256:68191f80a9bad283432385961d9efe09d783bcd36ed35a60fb1ff3f1ec2efe87"},
+ {file = "regex-2023.12.25-cp311-cp311-win_amd64.whl", hash = "sha256:7d2af3f6b8419661a0c421584cfe8aaec1c0e435ce7e47ee2a97e344b98f794f"},
+ {file = "regex-2023.12.25-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:8a0ccf52bb37d1a700375a6b395bff5dd15c50acb745f7db30415bae3c2b0715"},
+ {file = "regex-2023.12.25-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c3c4a78615b7762740531c27cf46e2f388d8d727d0c0c739e72048beb26c8a9d"},
+ {file = "regex-2023.12.25-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ad83e7545b4ab69216cef4cc47e344d19622e28aabec61574b20257c65466d6a"},
+ {file = "regex-2023.12.25-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b7a635871143661feccce3979e1727c4e094f2bdfd3ec4b90dfd4f16f571a87a"},
+ {file = "regex-2023.12.25-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d498eea3f581fbe1b34b59c697512a8baef88212f92e4c7830fcc1499f5b45a5"},
+ {file = "regex-2023.12.25-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:43f7cd5754d02a56ae4ebb91b33461dc67be8e3e0153f593c509e21d219c5060"},
+ {file = "regex-2023.12.25-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:51f4b32f793812714fd5307222a7f77e739b9bc566dc94a18126aba3b92b98a3"},
+ {file = "regex-2023.12.25-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ba99d8077424501b9616b43a2d208095746fb1284fc5ba490139651f971d39d9"},
+ {file = "regex-2023.12.25-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:4bfc2b16e3ba8850e0e262467275dd4d62f0d045e0e9eda2bc65078c0110a11f"},
+ {file = "regex-2023.12.25-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8c2c19dae8a3eb0ea45a8448356ed561be843b13cbc34b840922ddf565498c1c"},
+ {file = "regex-2023.12.25-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:60080bb3d8617d96f0fb7e19796384cc2467447ef1c491694850ebd3670bc457"},
+ {file = "regex-2023.12.25-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b77e27b79448e34c2c51c09836033056a0547aa360c45eeeb67803da7b0eedaf"},
+ {file = "regex-2023.12.25-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:518440c991f514331f4850a63560321f833979d145d7d81186dbe2f19e27ae3d"},
+ {file = "regex-2023.12.25-cp312-cp312-win32.whl", hash = "sha256:e2610e9406d3b0073636a3a2e80db05a02f0c3169b5632022b4e81c0364bcda5"},
+ {file = "regex-2023.12.25-cp312-cp312-win_amd64.whl", hash = "sha256:cc37b9aeebab425f11f27e5e9e6cf580be7206c6582a64467a14dda211abc232"},
+ {file = "regex-2023.12.25-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:da695d75ac97cb1cd725adac136d25ca687da4536154cdc2815f576e4da11c69"},
+ {file = "regex-2023.12.25-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d126361607b33c4eb7b36debc173bf25d7805847346dd4d99b5499e1fef52bc7"},
+ {file = "regex-2023.12.25-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4719bb05094d7d8563a450cf8738d2e1061420f79cfcc1fa7f0a44744c4d8f73"},
+ {file = "regex-2023.12.25-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5dd58946bce44b53b06d94aa95560d0b243eb2fe64227cba50017a8d8b3cd3e2"},
+ {file = "regex-2023.12.25-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:22a86d9fff2009302c440b9d799ef2fe322416d2d58fc124b926aa89365ec482"},
+ {file = "regex-2023.12.25-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2aae8101919e8aa05ecfe6322b278f41ce2994c4a430303c4cd163fef746e04f"},
+ {file = "regex-2023.12.25-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:e692296c4cc2873967771345a876bcfc1c547e8dd695c6b89342488b0ea55cd8"},
+ {file = "regex-2023.12.25-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:263ef5cc10979837f243950637fffb06e8daed7f1ac1e39d5910fd29929e489a"},
+ {file = "regex-2023.12.25-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:d6f7e255e5fa94642a0724e35406e6cb7001c09d476ab5fce002f652b36d0c39"},
+ {file = "regex-2023.12.25-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:88ad44e220e22b63b0f8f81f007e8abbb92874d8ced66f32571ef8beb0643b2b"},
+ {file = "regex-2023.12.25-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:3a17d3ede18f9cedcbe23d2daa8a2cd6f59fe2bf082c567e43083bba3fb00347"},
+ {file = "regex-2023.12.25-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:d15b274f9e15b1a0b7a45d2ac86d1f634d983ca40d6b886721626c47a400bf39"},
+ {file = "regex-2023.12.25-cp37-cp37m-win32.whl", hash = "sha256:ed19b3a05ae0c97dd8f75a5d8f21f7723a8c33bbc555da6bbe1f96c470139d3c"},
+ {file = "regex-2023.12.25-cp37-cp37m-win_amd64.whl", hash = "sha256:a6d1047952c0b8104a1d371f88f4ab62e6275567d4458c1e26e9627ad489b445"},
+ {file = "regex-2023.12.25-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:b43523d7bc2abd757119dbfb38af91b5735eea45537ec6ec3a5ec3f9562a1c53"},
+ {file = "regex-2023.12.25-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:efb2d82f33b2212898f1659fb1c2e9ac30493ac41e4d53123da374c3b5541e64"},
+ {file = "regex-2023.12.25-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:b7fca9205b59c1a3d5031f7e64ed627a1074730a51c2a80e97653e3e9fa0d415"},
+ {file = "regex-2023.12.25-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:086dd15e9435b393ae06f96ab69ab2d333f5d65cbe65ca5a3ef0ec9564dfe770"},
+ {file = "regex-2023.12.25-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e81469f7d01efed9b53740aedd26085f20d49da65f9c1f41e822a33992cb1590"},
+ {file = "regex-2023.12.25-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:34e4af5b27232f68042aa40a91c3b9bb4da0eeb31b7632e0091afc4310afe6cb"},
+ {file = "regex-2023.12.25-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9852b76ab558e45b20bf1893b59af64a28bd3820b0c2efc80e0a70a4a3ea51c1"},
+ {file = "regex-2023.12.25-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ff100b203092af77d1a5a7abe085b3506b7eaaf9abf65b73b7d6905b6cb76988"},
+ {file = "regex-2023.12.25-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:cc038b2d8b1470364b1888a98fd22d616fba2b6309c5b5f181ad4483e0017861"},
+ {file = "regex-2023.12.25-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:094ba386bb5c01e54e14434d4caabf6583334090865b23ef58e0424a6286d3dc"},
+ {file = "regex-2023.12.25-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:5cd05d0f57846d8ba4b71d9c00f6f37d6b97d5e5ef8b3c3840426a475c8f70f4"},
+ {file = "regex-2023.12.25-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:9aa1a67bbf0f957bbe096375887b2505f5d8ae16bf04488e8b0f334c36e31360"},
+ {file = "regex-2023.12.25-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:98a2636994f943b871786c9e82bfe7883ecdaba2ef5df54e1450fa9869d1f756"},
+ {file = "regex-2023.12.25-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:37f8e93a81fc5e5bd8db7e10e62dc64261bcd88f8d7e6640aaebe9bc180d9ce2"},
+ {file = "regex-2023.12.25-cp38-cp38-win32.whl", hash = "sha256:d78bd484930c1da2b9679290a41cdb25cc127d783768a0369d6b449e72f88beb"},
+ {file = "regex-2023.12.25-cp38-cp38-win_amd64.whl", hash = "sha256:b521dcecebc5b978b447f0f69b5b7f3840eac454862270406a39837ffae4e697"},
+ {file = "regex-2023.12.25-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:f7bc09bc9c29ebead055bcba136a67378f03d66bf359e87d0f7c759d6d4ffa31"},
+ {file = "regex-2023.12.25-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:e14b73607d6231f3cc4622809c196b540a6a44e903bcfad940779c80dffa7be7"},
+ {file = "regex-2023.12.25-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:9eda5f7a50141291beda3edd00abc2d4a5b16c29c92daf8d5bd76934150f3edc"},
+ {file = "regex-2023.12.25-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cc6bb9aa69aacf0f6032c307da718f61a40cf970849e471254e0e91c56ffca95"},
+ {file = "regex-2023.12.25-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:298dc6354d414bc921581be85695d18912bea163a8b23cac9a2562bbcd5088b1"},
+ {file = "regex-2023.12.25-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2f4e475a80ecbd15896a976aa0b386c5525d0ed34d5c600b6d3ebac0a67c7ddf"},
+ {file = "regex-2023.12.25-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:531ac6cf22b53e0696f8e1d56ce2396311254eb806111ddd3922c9d937151dae"},
+ {file = "regex-2023.12.25-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:22f3470f7524b6da61e2020672df2f3063676aff444db1daa283c2ea4ed259d6"},
+ {file = "regex-2023.12.25-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:89723d2112697feaa320c9d351e5f5e7b841e83f8b143dba8e2d2b5f04e10923"},
+ {file = "regex-2023.12.25-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0ecf44ddf9171cd7566ef1768047f6e66975788258b1c6c6ca78098b95cf9a3d"},
+ {file = "regex-2023.12.25-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:905466ad1702ed4acfd67a902af50b8db1feeb9781436372261808df7a2a7bca"},
+ {file = "regex-2023.12.25-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:4558410b7a5607a645e9804a3e9dd509af12fb72b9825b13791a37cd417d73a5"},
+ {file = "regex-2023.12.25-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:7e316026cc1095f2a3e8cc012822c99f413b702eaa2ca5408a513609488cb62f"},
+ {file = "regex-2023.12.25-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:3b1de218d5375cd6ac4b5493e0b9f3df2be331e86520f23382f216c137913d20"},
+ {file = "regex-2023.12.25-cp39-cp39-win32.whl", hash = "sha256:11a963f8e25ab5c61348d090bf1b07f1953929c13bd2309a0662e9ff680763c9"},
+ {file = "regex-2023.12.25-cp39-cp39-win_amd64.whl", hash = "sha256:e693e233ac92ba83a87024e1d32b5f9ab15ca55ddd916d878146f4e3406b5c91"},
+ {file = "regex-2023.12.25.tar.gz", hash = "sha256:29171aa128da69afdf4bde412d5bedc335f2ca8fcfe4489038577d05f16181e5"},
+]
+
+[[package]]
+name = "simple-ddl-parser"
+version = "1.0.3"
+description = "Simple DDL Parser to parse SQL & dialects like HQL, TSQL (MSSQL), Oracle, AWS Redshift, Snowflake, MySQL, PostgreSQL, etc ddl files to json/python dict with full information about columns: types, defaults, primary keys, etc.; sequences, alters, custom types & other entities from ddl."
+optional = false
+python-versions = ">=3.6,<4.0"
+files = [
+ {file = "simple_ddl_parser-1.0.3-py3-none-any.whl", hash = "sha256:5ed36be082b9b567dcb1c3cc0e2dad41f1c84cf1e63a15228ce893a474e2d83d"},
+ {file = "simple_ddl_parser-1.0.3.tar.gz", hash = "sha256:4043de4bfd4befc9c0a27d0160b3f9c623bf3df9f3d27d3f5dd95e68d1b59343"},
+]
+
+[package.dependencies]
+ply = ">=3.11,<4.0"
+
+[[package]]
+name = "table-meta"
+version = "0.1.5"
+description = "Universal class that created to be a middleware, universal mapping for data from different parsers - simple-ddl-parser and py-models-parser"
+optional = false
+python-versions = ">=3.6.2,<4.0"
+files = [
+ {file = "table-meta-0.1.5.tar.gz", hash = "sha256:66de6770ead04198f4fbce202e0091bc24d1391429df37d644328e5d54d3bb87"},
+ {file = "table_meta-0.1.5-py3-none-any.whl", hash = "sha256:94166eea85db1e332f6860b7ca7f4361e51d66ce42d7596774a861d39029afb3"},
+]
+
+[package.dependencies]
+pydantic = ">=1.8.2,<2.0.0"
+
+[[package]]
+name = "typing-extensions"
+version = "4.9.0"
+description = "Backported and Experimental Type Hints for Python 3.8+"
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "typing_extensions-4.9.0-py3-none-any.whl", hash = "sha256:af72aea155e91adfc61c3ae9e0e342dbc0cba726d6cba4b6c72c1f34e47291cd"},
+ {file = "typing_extensions-4.9.0.tar.gz", hash = "sha256:23478f88c37f27d76ac8aee6c905017a143b0b1b886c3c9f66bc2fd94f9f5783"},
+]
+
+[metadata]
+lock-version = "2.0"
+python-versions = "~3.10.6"
+content-hash = "8048c13c38cdb16a06db13c5b76e77189db245da2ecba914b62e334c95056747"
diff --git a/ops/pyproject.toml b/ops/pyproject.toml
new file mode 100644
index 0000000..e3a2e38
--- /dev/null
+++ b/ops/pyproject.toml
@@ -0,0 +1,18 @@
+[tool.poetry]
+name = "scripts"
+version = "0.1.0"
+description = ""
+authors = []
+
+[tool.poetry.dependencies]
+python = "~3.10.6"
+
+[tool.poetry.group.dev.dependencies]
+omymodels = "^0.15.1"
+
+[tool.poetry.scripts]
+generate-types = "scripts.generate_types:run"
+
+[build-system]
+requires = ["poetry-core"]
+build-backend = "poetry.core.masonry.api"
\ No newline at end of file
diff --git a/ops/scripts/generate_types.py b/ops/scripts/generate_types.py
new file mode 100644
index 0000000..9ff73cb
--- /dev/null
+++ b/ops/scripts/generate_types.py
@@ -0,0 +1,92 @@
+import subprocess
+import sys
+import re
+from omymodels import create_models
+
+def add_class_config(input_text):
+ # Regex pattern to match class definitions
+ class_pattern = r"(class\s+\w+\(BaseModel\):)([\s\S]+?)(\n\n|\Z)"
+ replacement = r"\1\2\n\n model_config = ConfigDict(\n populate_by_name=True\n )\3"
+ return re.sub(class_pattern, replacement, input_text)
+
+def snake_to_camel(snake_str):
+ components = snake_str.split('_')
+ # Capitalize the first letter of each component except the first one
+ # and join them together.
+ return components[0] + ''.join(x.title() for x in components[1:])
+
+def add_alias_no_default(input_text):
+ # Regex pattern to match properties without a default value
+ property_pattern = r"(\s+)(\w+_\w+)(:\s+\w+)\s*\n"
+ def edit(match):
+ name, type_def = match.group(2), match.group(3)
+ camel_case = snake_to_camel(name)
+ return f"{match.group(1)}{name}{type_def} = Field(..., alias=\"{camel_case}\")\n"
+ return re.sub(property_pattern, edit, input_text)
+
+def add_alias_with_default(input_text):
+ # Regex pattern to match properties with a default value
+ property_with_default_pattern = r"(\s+)(\w+_\w+)(:\s+Optional\[\w+\.?\w*\]\s*=\s*None)\n"
+ def edit(match):
+ name, type_def = match.group(2), match.group(3)
+ # Extract the type without Optional and the default value
+ type_only = re.search(r'Optional\[(\w+\.?\w*)\]', type_def).group(1)
+ camel_case = snake_to_camel(name)
+ return f"{match.group(1)}{name}: Optional[{type_only}] = Field(default=None, alias=\"{camel_case}\")\n"
+ return re.sub(property_with_default_pattern, edit, input_text)
+
+def run():
+ # Run `supabase db dump --local` to get the db schema
+ result = subprocess.run(
+ ["npx", "supabase", "db", "dump", "--local"],
+ capture_output=True,
+ cwd="../web"
+ )
+ if result.returncode != 0:
+ print("Failed to run 'supabase db dump --local'")
+ print(result.stderr.decode())
+ sys.exit(1)
+
+ db_schema = result.stdout.decode()
+
+ # Split the schema by statement (ending in ;)
+ statements = [stmt.strip() for stmt in db_schema.split(';')]
+ # Extract only the "CREATE TABLE" and "CREATE TYPE" statements
+ create_table_statements = [stmt + ';' for stmt in statements if (
+ stmt.strip().startswith('CREATE TABLE IF NOT EXISTS "public".') or
+ stmt.strip().startswith('CREATE TYPE "public".')
+ )]
+ create_table_statements = [stmt.replace('CREATE TABLE IF NOT EXISTS "public".', 'CREATE TABLE ') for stmt in create_table_statements]
+ create_table_statements = [stmt.replace('"public".', '') for stmt in create_table_statements]
+ # Remove some unsupported SQL features that break omymodels
+ create_table_statements = [stmt.replace('DEFAULT "gen_random_uuid"() NOT NULL', '') for stmt in create_table_statements]
+ create_table_statements = [stmt.replace('with time zone DEFAULT "now"() NOT NULL', '') for stmt in create_table_statements]
+ create_table_statements = [stmt.replace('with time zone', '') for stmt in create_table_statements]
+ create_table_statements = [re.sub(r'(?m)CONSTRAINT.*\n?', '', stmt) for stmt in create_table_statements]
+ db_schema = '\n\n'.join(create_table_statements)
+
+ # Generate pydantic types using omymodels
+ types = create_models(
+ db_schema,
+ models_type="pydantic",
+ dump=False
+ )["code"]
+
+ # Convert "= false" and "= true" to proper Python
+ types = re.sub(r'= false', '= False', types)
+ types = re.sub(r'= true', '= Talse', types)
+
+ # Default Optional types = None
+ types = re.sub(r'Optional\[(.*?)\]', r'Optional[\1] = None', types)
+
+ # Add aliases for all snake case classes
+ types = add_class_config(types)
+ types = add_alias_no_default(types)
+ types = add_alias_with_default(types)
+ types = types.replace("from pydantic import BaseModel, Json", "from pydantic import BaseModel, Json, Field, ConfigDict")
+
+ # Write the types to a file
+ with open("../workers/fund_public_goods/db/entities.py", "w") as file:
+ file.write(types)
+
+ sys.exit(0)
diff --git a/package.json b/package.json
index c14b504..d09d97b 100644
--- a/package.json
+++ b/package.json
@@ -4,16 +4,25 @@
"web"
],
"scripts": {
- "postinstall": "cd workers && poetry install",
+ "postinstall": "yarn workers:install && yarn ops:install",
+ "codegen": "yarn web:codegen && yarn ops:codegen",
"build": "yarn web:build && yarn workers:build",
"dev": "npx concurrently \"yarn web:dev\" \"yarn workers:dev\" \"yarn events:dev\" -k -n web,workers,events",
+ "web:codegen": "cd web && yarn db:generate-types",
"web:build": "yarn web:env && cd web && yarn build",
"web:dev": "yarn web:env && cd web && yarn dev",
"web:env": "if [ \"$CICD\" != \"true\" ]; then cp .env ./web/.env; fi",
+ "db:start": "cd web && yarn db:start",
+ "db:reset": "cd web && yarn db:reset",
+ "db:stop": "cd web && yarn db:stop",
+ "workers:install": "cd workers && poetry install",
"workers:build": "cd workers && poetry run build-check",
"workers:dev": "yarn workers:env && cd workers && poetry run python -m uvicorn fund_public_goods.main:app --reload",
"workers:env": "if [ \"$CICD\" != \"true\" ]; then cp .env ./workers/.env; fi",
- "events:dev": "npx inngest-cli dev"
+ "workers:types": "cd ops && poetry run generate-types",
+ "events:dev": "npx inngest-cli dev",
+ "ops:install": "cd ops && poetry install",
+ "ops:codegen": "cd ops && poetry run generate-types"
},
"dependencies": {
"concurrently": "8.2.2",
diff --git a/web/app/actions/startWorker.ts b/web/app/actions/startWorker.ts
index e63413d..9cfb5f3 100644
--- a/web/app/actions/startWorker.ts
+++ b/web/app/actions/startWorker.ts
@@ -17,6 +17,10 @@ export const startWorker = async (
},
});
+ if (response.status !== 200) {
+ throw Error(`Error starting new worker. Status: ${response.status}\nMessage: ${response.statusText}`);
+ }
+
const result = await response.json();
if (!result.worker_id || !result.run_id) {
throw new Error("Error starting new worker");
diff --git a/web/app/strategy/[id]/page.tsx b/web/app/r/[id]/page.tsx
similarity index 88%
rename from web/app/strategy/[id]/page.tsx
rename to web/app/r/[id]/page.tsx
index 33b0022..6becc64 100644
--- a/web/app/strategy/[id]/page.tsx
+++ b/web/app/r/[id]/page.tsx
@@ -7,7 +7,6 @@ export default async function StrategyPage({
}: {
params: { id: string };
}) {
- const workerId = params.id;
const supabase = createSupabaseServerClient();
// Fetch the runs for this worker
@@ -25,13 +24,13 @@ export default async function StrategyPage({
)
`
)
- .eq("worker_id", workerId)
+ .eq("id", params.id)
.order("created_at", { ascending: false })
.single();
if (run.error || !run.data) {
console.error(run.error);
- throw Error(`Runs with worker_id ${workerId} not found.`);
+ throw Error(`Runs with id ${params.id} not found.`);
}
const data = run.data.strategy_entries as unknown as StrategyWithProjects;
diff --git a/web/app/r/[id]/progress/page.tsx b/web/app/r/[id]/progress/page.tsx
new file mode 100644
index 0000000..c4e5cbc
--- /dev/null
+++ b/web/app/r/[id]/progress/page.tsx
@@ -0,0 +1,40 @@
+import Logs from "@/components/Logs";
+import TextField from "@/components/TextField";
+import { createSupabaseServerClient } from "@/utils/supabase-server";
+
+async function PromptField(props: { runId: string }) {
+ const supabase = createSupabaseServerClient()
+
+ const { data: run } = await supabase.from('runs').select(`
+ id,
+ prompt
+ `).eq("id", props.runId).single()
+
+ if (!run) {
+ throw new Error(`Run with ID '${props.runId}' not found`)
+ }
+
+ return
+}
+
+export default function ProgressPage(props: {
+ params: {
+ id: string
+ }
+}) {
+ return (
+
+ )
+}
\ No newline at end of file
diff --git a/web/app/strategy/[id]/transaction/page.tsx b/web/app/r/[id]/transaction/page.tsx
similarity index 80%
rename from web/app/strategy/[id]/transaction/page.tsx
rename to web/app/r/[id]/transaction/page.tsx
index 60973ba..df78cd4 100644
--- a/web/app/strategy/[id]/transaction/page.tsx
+++ b/web/app/r/[id]/transaction/page.tsx
@@ -3,16 +3,15 @@ import { FundingEntry } from "@/components/FundingTable";
import { createSupabaseServerClient } from "@/utils/supabase-server";
export default async function Page({ params }: { params: { id: string } }) {
- const workerId = params.id;
const supabase = createSupabaseServerClient();
const run = await supabase
.from("funding_entries_view")
.select("*")
- .eq("worker_id", workerId);
+ .eq("run_id", params.id);
if (run.error || !run.data) {
console.error(run.error);
- throw Error(`Runs with worker_id ${workerId} not found.`);
+ throw Error(`Run with ID ${params.id} not found.`);
}
return ;
diff --git a/web/components/FundingReview.tsx b/web/components/FundingReview.tsx
index ce380ab..912df41 100644
--- a/web/components/FundingReview.tsx
+++ b/web/components/FundingReview.tsx
@@ -25,11 +25,11 @@ export default function FundingReview(props: { entries: FundingEntry[] }) {
// TODO: Handle interaction of funding in multiple chains
const selectedNetwork = projects[0].network as NetworkId;
- const selectedToken = projects[0].token || "WETH"
+ const selectedToken = projects[0].token
- const networkIndex = Object.values(SUPPORTED_NETWORKS).indexOf(selectedNetwork)
+ const networkIndex = Object.values(SUPPORTED_NETWORKS).indexOf(11155111)
const networkName = Object.keys(SUPPORTED_NETWORKS)[networkIndex] as NetworkName
- const token = getTokensForNetwork(networkName).find(t => t.name == selectedToken)
+ const token = getTokensForNetwork(networkName).find(t => t.name == "WETH")
if (!token) {
throw new Error(`Token with name: ${selectedToken} is not valid`)
@@ -39,7 +39,7 @@ export default function FundingReview(props: { entries: FundingEntry[] }) {
try {
await splitTransferFunds(
// TODO: Modify this with project.recipient; this is just for testing purposes
- projects.map((project) => "ADD_YOUR_ADDRESS"),
+ projects.map((project) => "0xAC39C85F4E54797e4909f70a302d9e11E428135D"),
amounts,
signer,
token.address,
diff --git a/web/components/LoadingCircle.tsx b/web/components/LoadingCircle.tsx
index 86ce915..c36a717 100644
--- a/web/components/LoadingCircle.tsx
+++ b/web/components/LoadingCircle.tsx
@@ -3,9 +3,10 @@ import clsx from "clsx";
interface LoadingCircleProps {
strokeWidth?: number;
className?: string;
+ hideText?: boolean
}
-const LoadingCircle = ({ strokeWidth = 12, className }: LoadingCircleProps) => {
+const LoadingCircle = ({ strokeWidth = 12, className, hideText }: LoadingCircleProps) => {
return (
- Loading...
+ { hideText ? <>> : Loading... }
);
};
diff --git a/web/components/Logs.tsx b/web/components/Logs.tsx
new file mode 100644
index 0000000..6e48727
--- /dev/null
+++ b/web/components/Logs.tsx
@@ -0,0 +1,30 @@
+"use server"
+
+import { createSupabaseServerClient } from "@/utils/supabase-server";
+import RealtimeLogs from "./RealtimeLogs";
+
+export default async function Logs(props: { runId: string }) {
+ const supabase = createSupabaseServerClient()
+
+ const { data: run } = await supabase.from('runs').select(`
+ id,
+ prompt,
+ logs(
+ id,
+ run_id,
+ created_at,
+ value,
+ ended_at,
+ status,
+ step_name
+ )
+ `).eq("id", props.runId).single()
+
+ if (!run) {
+ throw new Error(`Run with ID '${props.runId}' not found`)
+ }
+
+ return (
+
+ )
+}
\ No newline at end of file
diff --git a/web/components/Prompt.tsx b/web/components/Prompt.tsx
index 69de7df..498f92e 100644
--- a/web/components/Prompt.tsx
+++ b/web/components/Prompt.tsx
@@ -1,13 +1,10 @@
"use client";
-import { ChangeEvent, useEffect, useState } from "react";
+import { ChangeEvent, useState } from "react";
import TextField from "./TextField";
import ChatInputButton from "./ChatInputButton";
import { SparkleIcon } from "./Icons";
-import Image from "next/image";
import { startWorker } from "@/app/actions";
-import { createSupabaseBrowserClient } from "@/utils/supabase-browser";
-import { Tables } from "@/supabase/dbTypes";
import { useRouter } from "next/navigation";
import LoadingCircle from "./LoadingCircle";
import PromptInput from "./PromptInput";
@@ -25,60 +22,27 @@ const PROMPT_SUGESTIONS = [
export default function Prompt() {
const [prompt, setPrompt] = useState("");
const [isWaiting, setIsWaiting] = useState(false);
- const [workerId, setWorkerId] = useState();
- const [runId, setRunId] = useState();
- const [status, setStatus] = useState();
const router = useRouter();
- const supabase = createSupabaseBrowserClient();
const sendPrompt = async (prompt: string) => {
setIsWaiting(true);
try {
const response = await startWorker(prompt);
- setWorkerId(response.workerId);
- setRunId(response.runId);
+ router.push(`/r/${response.runId}/progress`)
} finally {
setIsWaiting(false);
}
};
- useEffect(() => {
- if (runId) {
- const channel = supabase
- .channel("logs-added")
- .on(
- "postgres_changes",
- {
- event: "INSERT",
- table: "logs",
- schema: "public",
- filter: `run_id=eq.${runId}`,
- },
- (payload: { new: Tables<"logs"> }) => {
- if (payload.new.message === "STRATEGY_CREATED") {
- router.push(`strategy/${workerId}`);
- return;
- }
- setStatus(payload.new.message);
- }
- )
- .subscribe();
-
- return () => {
- supabase.removeChannel(channel);
- };
- }
- }, [workerId, supabase, runId, workerId]);
-
return (
<>
- {status ? (
+ {isWaiting ? (
) : (
diff --git a/web/components/RealtimeLogs.tsx b/web/components/RealtimeLogs.tsx
new file mode 100644
index 0000000..1030ba3
--- /dev/null
+++ b/web/components/RealtimeLogs.tsx
@@ -0,0 +1,142 @@
+"use client"
+
+import { Tables } from "@/supabase/dbTypes";
+import { createSupabaseBrowserClient } from "@/utils/supabase-browser";
+import clsx from "clsx";
+import { useRouter } from "next/navigation";
+import { useState, useEffect } from "react";
+import LoadingCircle from "./LoadingCircle";
+import Button from "./Button";
+
+const UNSTARTED_TEXTS: Record
["step_name"], string> = {
+ FETCH_PROJECTS: "Search for relevant projects",
+ EVALUATE_PROJECTS: "Evaluate proof of impact",
+ ANALYZE_FUNDING: "Analyze funding needs",
+ SYNTHESIZE_RESULTS: "Synthesize results",
+}
+
+const LOADING_TEXTS: Record["step_name"], string> = {
+ FETCH_PROJECTS: "Searching for relevant projects...",
+ EVALUATE_PROJECTS: "Evaluating proof of impact...",
+ ANALYZE_FUNDING: "Analyzing funding needs...",
+ SYNTHESIZE_RESULTS: "Synthesizing results...",
+}
+
+const STEPS_ORDER: Record["step_name"], number> = {
+ FETCH_PROJECTS: 1,
+ EVALUATE_PROJECTS: 2,
+ ANALYZE_FUNDING: 3,
+ SYNTHESIZE_RESULTS: 4,
+}
+
+const getLogMessage = (log: Tables<"logs">) => {
+ switch (log.status) {
+ case "NOT_STARTED": return UNSTARTED_TEXTS[log.step_name]
+ case "IN_PROGRESS": return LOADING_TEXTS[log.step_name]
+ case "COMPLETED": return log.value ?? `Completed: ${UNSTARTED_TEXTS[log.step_name]}`
+ case "ERRORED": return `Error while ${LOADING_TEXTS[log.step_name].toLowerCase()}`
+ }
+}
+
+const checkIfFinished = (logs: Tables<"logs">[]) => {
+ const sortedLogs = logs.sort((a, b) => {
+ return STEPS_ORDER[a.step_name] - STEPS_ORDER[b.step_name]
+ })
+ const lastStep = sortedLogs.slice(-1)[0];
+ const isFinished = lastStep.status === "COMPLETED" && lastStep.step_name === "SYNTHESIZE_RESULTS"
+
+ return isFinished
+}
+
+export default function RealtimeLogs(props: {
+ logs: Tables<"logs">[]
+ run: {
+ id: string;
+ prompt: string;
+ }
+}) {
+ const [logs, setLogs] = useState[]>(props.logs)
+ const supabase = createSupabaseBrowserClient();
+ const router = useRouter()
+
+ const sortedLogsWithSteps = logs.sort((a, b) => {
+ return STEPS_ORDER[a.step_name] - STEPS_ORDER[b.step_name]
+ })
+
+ const isFinished = checkIfFinished(sortedLogsWithSteps)
+
+ const navigateToStrategy = () => {
+ router.push(`./`)
+ }
+
+ useEffect(() => {
+ const channel = supabase
+ .channel("logs-added")
+ .on(
+ "postgres_changes",
+ {
+ event: "UPDATE",
+ table: "logs",
+ schema: "public",
+ filter: `run_id=eq.${props.run.id}`,
+ },
+ async () => {
+ const response = await supabase.from("logs").select(`
+ id,
+ run_id,
+ created_at,
+ value,
+ ended_at,
+ status,
+ step_name
+ `).eq("run_id", props.run.id)
+ const updatedLogs = response.data
+
+ if (!updatedLogs) {
+ throw new Error(`Logs for Run with ID '${props.run.id}' not found`)
+ }
+
+ setLogs([...updatedLogs])
+
+ if (checkIfFinished(updatedLogs)) {
+ navigateToStrategy()
+ return;
+ }
+ }
+ )
+ .subscribe()
+
+ return () => {
+ supabase.removeChannel(channel);
+ };
+ }, [supabase, props.run.id]);
+
+ return (
+ <>
+
+
Results:
+
+ { sortedLogsWithSteps.map(log => (
+
+ { log.status === "IN_PROGRESS" ?
: <>>}
+
{ getLogMessage(log) }
+
+
+ )) }
+
+
+
+ >
+ )
+}
\ No newline at end of file
diff --git a/web/supabase/dbTypes.ts b/web/supabase/dbTypes.ts
index 940eaa2..f402b3e 100644
--- a/web/supabase/dbTypes.ts
+++ b/web/supabase/dbTypes.ts
@@ -12,26 +12,39 @@ export interface Database {
applications: {
Row: {
answers: Json | null
+ created_at: number
id: string
+ network: number
project_id: string
recipient: string
round: string
}
Insert: {
answers?: Json | null
+ created_at: number
id: string
+ network: number
project_id: string
recipient: string
round: string
}
Update: {
answers?: Json | null
+ created_at?: number
id?: string
+ network?: number
project_id?: string
recipient?: string
round?: string
}
Relationships: [
+ {
+ foreignKeyName: "applications_project_id_fkey"
+ columns: ["project_id"]
+ isOneToOne: false
+ referencedRelation: "funding_entries_view"
+ referencedColumns: ["project_id"]
+ },
{
foreignKeyName: "applications_project_id_fkey"
columns: ["project_id"]
@@ -73,6 +86,13 @@ export interface Database {
weight?: number
}
Relationships: [
+ {
+ foreignKeyName: "funding_entries_project_id_fkey"
+ columns: ["project_id"]
+ isOneToOne: false
+ referencedRelation: "funding_entries_view"
+ referencedColumns: ["project_id"]
+ },
{
foreignKeyName: "funding_entries_project_id_fkey"
columns: ["project_id"]
@@ -80,6 +100,13 @@ export interface Database {
referencedRelation: "projects"
referencedColumns: ["id"]
},
+ {
+ foreignKeyName: "funding_entries_run_id_fkey"
+ columns: ["run_id"]
+ isOneToOne: false
+ referencedRelation: "funding_entries_view"
+ referencedColumns: ["run_id"]
+ },
{
foreignKeyName: "funding_entries_run_id_fkey"
columns: ["run_id"]
@@ -91,7 +118,7 @@ export interface Database {
}
gitcoin_applications: {
Row: {
- created_at: string
+ created_at: number
data: Json
id: string
pointer: string
@@ -100,7 +127,7 @@ export interface Database {
round_id: string
}
Insert: {
- created_at?: string
+ created_at: number
data: Json
id: string
pointer: string
@@ -109,7 +136,7 @@ export interface Database {
round_id: string
}
Update: {
- created_at?: string
+ created_at?: number
data?: Json
id?: string
pointer?: string
@@ -135,6 +162,7 @@ export interface Database {
is_failed: boolean
is_running: boolean
last_updated_at: string
+ network_id: number
skip_projects: number
skip_rounds: number
url: string
@@ -146,6 +174,7 @@ export interface Database {
is_failed?: boolean
is_running?: boolean
last_updated_at?: string
+ network_id: number
skip_projects?: number
skip_rounds?: number
url: string
@@ -157,6 +186,7 @@ export interface Database {
is_failed?: boolean
is_running?: boolean
last_updated_at?: string
+ network_id?: number
skip_projects?: number
skip_rounds?: number
url?: string
@@ -190,23 +220,39 @@ export interface Database {
logs: {
Row: {
created_at: string
+ ended_at: string | null
id: string
- message: string
run_id: string
+ status: Database["public"]["Enums"]["step_status"]
+ step_name: Database["public"]["Enums"]["step_name"]
+ value: string | null
}
Insert: {
created_at?: string
+ ended_at?: string | null
id?: string
- message: string
run_id: string
+ status: Database["public"]["Enums"]["step_status"]
+ step_name: Database["public"]["Enums"]["step_name"]
+ value?: string | null
}
Update: {
created_at?: string
+ ended_at?: string | null
id?: string
- message?: string
run_id?: string
+ status?: Database["public"]["Enums"]["step_status"]
+ step_name?: Database["public"]["Enums"]["step_name"]
+ value?: string | null
}
Relationships: [
+ {
+ foreignKeyName: "logs_run_id_fkey"
+ columns: ["run_id"]
+ isOneToOne: false
+ referencedRelation: "funding_entries_view"
+ referencedColumns: ["run_id"]
+ },
{
foreignKeyName: "logs_run_id_fkey"
columns: ["run_id"]
@@ -221,18 +267,21 @@ export interface Database {
description: string | null
id: string
title: string | null
+ updated_at: number
website: string | null
}
Insert: {
description?: string | null
id: string
title?: string | null
+ updated_at: number
website?: string | null
}
Update: {
description?: string | null
id?: string
title?: string | null
+ updated_at?: number
website?: string | null
}
Relationships: []
@@ -298,6 +347,13 @@ export interface Database {
weight?: number | null
}
Relationships: [
+ {
+ foreignKeyName: "strategy_entries_project_id_fkey"
+ columns: ["project_id"]
+ isOneToOne: false
+ referencedRelation: "funding_entries_view"
+ referencedColumns: ["project_id"]
+ },
{
foreignKeyName: "strategy_entries_project_id_fkey"
columns: ["project_id"]
@@ -305,6 +361,13 @@ export interface Database {
referencedRelation: "projects"
referencedColumns: ["id"]
},
+ {
+ foreignKeyName: "strategy_entries_run_id_fkey"
+ columns: ["run_id"]
+ isOneToOne: false
+ referencedRelation: "funding_entries_view"
+ referencedColumns: ["run_id"]
+ },
{
foreignKeyName: "strategy_entries_run_id_fkey"
columns: ["run_id"]
@@ -331,13 +394,30 @@ export interface Database {
}
}
Views: {
- [_ in never]: never
+ funding_entries_view: {
+ Row: {
+ amount: string | null
+ description: string | null
+ network: number | null
+ project_id: string | null
+ recipient: string | null
+ run_id: string | null
+ title: string | null
+ token: string | null
+ }
+ Relationships: []
+ }
}
Functions: {
[_ in never]: never
}
Enums: {
- [_ in never]: never
+ step_name:
+ | "FETCH_PROJECTS"
+ | "EVALUATE_PROJECTS"
+ | "ANALYZE_FUNDING"
+ | "SYNTHESIZE_RESULTS"
+ step_status: "NOT_STARTED" | "IN_PROGRESS" | "COMPLETED" | "ERRORED"
}
CompositeTypes: {
[_ in never]: never
diff --git a/web/supabase/migrations/20240118120046_init.sql b/web/supabase/migrations/20240118120046_init.sql
index 3897bbb..8017932 100644
--- a/web/supabase/migrations/20240118120046_init.sql
+++ b/web/supabase/migrations/20240118120046_init.sql
@@ -47,6 +47,7 @@ SELECT
CREATE TABLE "public"."projects" (
"id" text NOT NULL,
+ "updated_at" int NOT NULL,
"title" TEXT,
"description" TEXT,
"website" TEXT,
diff --git a/web/supabase/migrations/20240118150300_gitcoin.sql b/web/supabase/migrations/20240118150300_gitcoin.sql
index 8ba1b28..4a53cee 100644
--- a/web/supabase/migrations/20240118150300_gitcoin.sql
+++ b/web/supabase/migrations/20240118150300_gitcoin.sql
@@ -11,7 +11,7 @@ ALTER TABLE "public"."gitcoin_projects" enable row level security;
create table "public"."gitcoin_applications" (
"id" text not null,
- "created_at" timestamp with time zone not null default now(),
+ "created_at" int not null,
"data" json not null,
"protocol" int not null,
"pointer" text not null,
@@ -27,6 +27,7 @@ create table "public"."gitcoin_indexing_jobs" (
"id" uuid not null default gen_random_uuid(),
"created_at" timestamp with time zone not null default now(),
"url" text not null,
+ "network_id" int not null,
"is_running" boolean not null default false,
"skip_rounds" int not null default 0,
"skip_projects" int not null default 0,
@@ -38,14 +39,13 @@ create table "public"."gitcoin_indexing_jobs" (
ALTER TABLE "public"."gitcoin_indexing_jobs" OWNER TO "postgres";
ALTER TABLE "public"."gitcoin_indexing_jobs" enable row level security;
-insert into "public"."gitcoin_indexing_jobs" ("url") values
- ('https://api.thegraph.com/subgraphs/name/allo-protocol/grants-round-polygon'),
- ('https://api.thegraph.com/subgraphs/name/vacekj/allo-mainnet'),
- ('https://graph-gitcoin-mainnet.hirenodes.io/subgraphs/name/gitcoin/allo'),
- ('https://api.thegraph.com/subgraphs/name/gitcoinco/gitcoin-grants-arbitrum-one'),
- ('https://api.thegraph.com/subgraphs/name/gitcoinco/grants-round-optimism-mainnet'),
- ('https://api.studio.thegraph.com/query/45391/grants-round-base/v0.0.1'),
- ('https://api.studio.thegraph.com/query/45391/grants-round-zkera/v0.0.2'),
- ('https://api.thegraph.com/subgraphs/name/gitcoinco/grants-round-avalanche-mainnet'),
- ('https://api.thegraph.com/subgraphs/name/gitcoinco/grants-round-fantom-mainnet'),
- ('https://api.thegraph.com/subgraphs/name/gitcoinco/grants-round-optimism-mainnet');
+insert into "public"."gitcoin_indexing_jobs" ("url", "network_id") values
+ ('https://api.thegraph.com/subgraphs/name/allo-protocol/grants-round-polygon', 137),
+ ('https://api.thegraph.com/subgraphs/name/vacekj/allo-mainnet', 1),
+ ('https://graph-gitcoin-mainnet.hirenodes.io/subgraphs/name/gitcoin/allo', 424),
+ ('https://api.thegraph.com/subgraphs/name/gitcoinco/gitcoin-grants-arbitrum-one', 42161),
+ ('https://api.thegraph.com/subgraphs/name/gitcoinco/grants-round-optimism-mainnet', 10),
+ ('https://api.studio.thegraph.com/query/45391/grants-round-base/v0.0.1', 8453),
+ ('https://api.studio.thegraph.com/query/45391/grants-round-zkera/v0.0.2', 324),
+ ('https://api.thegraph.com/subgraphs/name/gitcoinco/grants-round-avalanche-mainnet', 43114),
+ ('https://api.thegraph.com/subgraphs/name/gitcoinco/grants-round-fantom-mainnet', 250);
diff --git a/web/supabase/migrations/20240123163623_agent_workflow.sql b/web/supabase/migrations/20240123163623_agent_workflow.sql
index 647216d..a01f231 100644
--- a/web/supabase/migrations/20240123163623_agent_workflow.sql
+++ b/web/supabase/migrations/20240123163623_agent_workflow.sql
@@ -1,59 +1,14 @@
create table "public"."applications" (
"id" text not null,
+ "created_at" int not null,
"recipient" text not null,
+ "network" int not null,
"round" text not null,
"answers" json,
- "project_id" text not null
+ "project_id" text not null,
+ FOREIGN KEY ("project_id") REFERENCES "public"."projects"("id"),
+ PRIMARY KEY ("id")
);
-alter table "public"."applications" enable row level security;
-
-CREATE UNIQUE INDEX applications_pkey ON public.applications USING btree (id);
-
-alter table "public"."applications" add constraint "applications_pkey" PRIMARY KEY using index "applications_pkey";
-
-alter table "public"."applications" add constraint "applications_project_id_fkey" FOREIGN KEY (project_id) REFERENCES projects(id) not valid;
-
-alter table "public"."applications" validate constraint "applications_project_id_fkey";
-
-grant delete on table "public"."applications" to "anon";
-
-grant insert on table "public"."applications" to "anon";
-
-grant references on table "public"."applications" to "anon";
-
-grant select on table "public"."applications" to "anon";
-
-grant trigger on table "public"."applications" to "anon";
-
-grant truncate on table "public"."applications" to "anon";
-
-grant update on table "public"."applications" to "anon";
-
-grant delete on table "public"."applications" to "authenticated";
-
-grant insert on table "public"."applications" to "authenticated";
-
-grant references on table "public"."applications" to "authenticated";
-
-grant select on table "public"."applications" to "authenticated";
-
-grant trigger on table "public"."applications" to "authenticated";
-
-grant truncate on table "public"."applications" to "authenticated";
-
-grant update on table "public"."applications" to "authenticated";
-
-grant delete on table "public"."applications" to "service_role";
-
-grant insert on table "public"."applications" to "service_role";
-
-grant references on table "public"."applications" to "service_role";
-
-grant select on table "public"."applications" to "service_role";
-
-grant trigger on table "public"."applications" to "service_role";
-
-grant truncate on table "public"."applications" to "service_role";
-
-grant update on table "public"."applications" to "service_role";
\ No newline at end of file
+ALTER TABLE "public"."applications" OWNER TO "postgres";
+ALTER TABLE "public"."applications" enable row level security;
diff --git a/web/supabase/migrations/20240125221508_fundings.sql b/web/supabase/migrations/20240125221508_fundings.sql
index 37d4c2d..137427e 100644
--- a/web/supabase/migrations/20240125221508_fundings.sql
+++ b/web/supabase/migrations/20240125221508_fundings.sql
@@ -25,10 +25,10 @@ SELECT
FE.amount,
P.description,
P.title,
- R.worker_id,
- P.id,
+ P.id as project_id,
A.network,
- FE.token
+ FE.token,
+ R.id as run_id
FROM runs R
INNER JOIN funding_entries FE ON R.id = FE.run_id
INNER JOIN projects P ON FE.project_id = P.id
diff --git a/web/supabase/migrations/20240126113522_logs.sql b/web/supabase/migrations/20240126113522_logs.sql
new file mode 100644
index 0000000..a7cacc6
--- /dev/null
+++ b/web/supabase/migrations/20240126113522_logs.sql
@@ -0,0 +1,14 @@
+
+create type "public"."step_name" as enum ('FETCH_PROJECTS', 'EVALUATE_PROJECTS', 'ANALYZE_FUNDING', 'SYNTHESIZE_RESULTS');
+
+create type "public"."step_status" as enum ('NOT_STARTED', 'IN_PROGRESS', 'COMPLETED', 'ERRORED');
+
+alter table "public"."logs" drop column "message";
+
+alter table "public"."logs" add column "ended_at" timestamp with time zone;
+
+alter table "public"."logs" add column "status" step_status not null;
+
+alter table "public"."logs" add column "step_name" step_name not null;
+
+alter table "public"."logs" add column "value" text;
\ No newline at end of file
diff --git a/web/utils/ethereum/splitTransferFunds.ts b/web/utils/ethereum/splitTransferFunds.ts
index d0d0297..1e1b9c3 100644
--- a/web/utils/ethereum/splitTransferFunds.ts
+++ b/web/utils/ethereum/splitTransferFunds.ts
@@ -31,20 +31,17 @@ export async function splitTransferFunds(
if (!tokenAddress || tokenAddress === ethers.constants.AddressZero) {
// Ether transfer
- console.log("ether transfer");
await disperseContract.disperseEther(validAddresses, values, {
value: totalValue,
});
} else {
// ERC20 token transfer
- console.log("tokenAddress", tokenAddress);
const tokenContract = new ethers.Contract(tokenAddress, ERC20_ABI, signer);
const currentAllowance: BigNumber = await tokenContract.allowance(
await signer.getAddress(),
DISPERSE_CONTRACT_ADDRESS
);
- console.log("currentAllowance", currentAllowance);
if (currentAllowance.lt(totalValue)) {
const approveTx = await tokenContract.approve(DISPERSE_CONTRACT_ADDRESS, totalValue);
diff --git a/workers/fund_public_goods/api/runs.py b/workers/fund_public_goods/api/runs.py
index 22800a4..ee9673c 100644
--- a/workers/fund_public_goods/api/runs.py
+++ b/workers/fund_public_goods/api/runs.py
@@ -1,6 +1,6 @@
from fund_public_goods.inngest_client import inngest_client
from fund_public_goods.workflows.create_strategy.events import CreateStrategyEvent
-from fund_public_goods.db import client, tables
+from fund_public_goods.db import client, tables, entities
from fastapi import APIRouter, HTTPException
from pydantic import BaseModel
@@ -22,16 +22,19 @@ async def runs(worker_id: str, params: Params) -> Response:
if prompt == "":
raise HTTPException(status_code=400, detail="Prompt cannot be empty.")
- supabase = client.create_admin()
- worker_exists = tables.workers.exists(supabase, worker_id)
+ db = client.create_admin()
+ worker_exists = tables.workers.exists(db, worker_id)
if not worker_exists:
raise HTTPException(
status_code=400, detail=f"Worker with ID: {worker_id} is not valid"
)
- run_id = tables.runs.insert(supabase, worker_id, prompt)
+ run_id = tables.runs.insert(db, entities.Runs(
+ worker_id=worker_id,
+ prompt=prompt
+ ))
await inngest_client.send(
- CreateStrategyEvent.Data(prompt=prompt, run_id=run_id).to_event()
+ CreateStrategyEvent.Data(run_id=run_id).to_event()
)
return Response(run_id=run_id)
diff --git a/workers/fund_public_goods/api/workers.py b/workers/fund_public_goods/api/workers.py
index 6009acf..3c4fb66 100644
--- a/workers/fund_public_goods/api/workers.py
+++ b/workers/fund_public_goods/api/workers.py
@@ -2,7 +2,7 @@
from pydantic import BaseModel
from fund_public_goods.inngest_client import inngest_client
from fund_public_goods.workflows.create_strategy.events import CreateStrategyEvent
-from fund_public_goods.db import client, tables
+from fund_public_goods.db import client, tables, entities
router = APIRouter()
@@ -23,9 +23,12 @@ async def workers(params: Params) -> Response:
if prompt == "":
raise HTTPException(status_code=400, detail="Prompt cannot be empty.")
- supabase = client.create_admin()
- worker_id = tables.workers.insert(supabase)
- run_id = tables.runs.insert(supabase, worker_id, prompt)
+ db = client.create_admin()
+ worker_id = tables.workers.insert(db)
+ run_id = tables.runs.insert(db, entities.Runs(
+ worker_id=worker_id,
+ prompt=prompt
+ ))
await inngest_client.send(
CreateStrategyEvent.Data(
diff --git a/workers/fund_public_goods/db/__init__.py b/workers/fund_public_goods/db/__init__.py
index 4646060..23c8d7c 100644
--- a/workers/fund_public_goods/db/__init__.py
+++ b/workers/fund_public_goods/db/__init__.py
@@ -1,3 +1,4 @@
+from .tables import applications
from .tables import gitcoin
from .tables import logs
from .tables import projects
diff --git a/workers/fund_public_goods/db/entities.py b/workers/fund_public_goods/db/entities.py
new file mode 100644
index 0000000..f8fc0dc
--- /dev/null
+++ b/workers/fund_public_goods/db/entities.py
@@ -0,0 +1,147 @@
+from enum import Enum
+from uuid import UUID
+import datetime
+from typing import Optional
+from pydantic import BaseModel, Json, Field, ConfigDict
+
+
+class StepName(str, Enum):
+
+ ANALYZE_FUNDING = 'ANALYZE_FUNDING'
+ EVALUATE_PROJECTS = 'EVALUATE_PROJECTS'
+ FETCH_PROJECTS = 'FETCH_PROJECTS'
+ SYNTHESIZE_RESULTS = 'SYNTHESIZE_RESULTS'
+
+
+class StepStatus(str, Enum):
+
+ COMPLETED = 'COMPLETED'
+ ERRORED = 'ERRORED'
+ IN_PROGRESS = 'IN_PROGRESS'
+ NOT_STARTED = 'NOT_STARTED'
+
+
+class Applications(BaseModel):
+
+ id: str
+ created_at: int = Field(..., alias="createdAt")
+ recipient: str
+ network: int
+ round: str
+ answers: Optional[Json] = None
+ project_id: str = Field(..., alias="projectId")
+ model_config = ConfigDict(
+ populate_by_name=True
+ )
+
+
+class GitcoinApplications(BaseModel):
+
+ id: str
+ created_at: int = Field(..., alias="createdAt")
+ data: Json
+ protocol: int
+ pointer: str
+ round_id: str = Field(..., alias="roundId")
+ project_id: str = Field(..., alias="projectId")
+ model_config = ConfigDict(
+ populate_by_name=True
+ )
+
+
+class GitcoinIndexingJobs(BaseModel):
+
+ id: Optional[UUID] = None
+ created_at: Optional[datetime.datetime] = Field(default=None, alias="createdAt")
+ url: str
+ network_id: int = Field(..., alias="networkId")
+ is_running: bool = False
+ skip_rounds: int = 0
+ skip_projects: int = 0
+ last_updated_at: Optional[datetime.datetime] = Field(default=None, alias="lastUpdatedAt")
+ is_failed: bool = False
+ error: Optional[str] = None
+
+ model_config = ConfigDict(
+ populate_by_name=True
+ )
+
+
+class GitcoinProjects(BaseModel):
+
+ id: str
+ created_at: Optional[datetime.datetime] = Field(default=None, alias="createdAt")
+ data: Json
+ protocol: int
+ pointer: str
+
+ model_config = ConfigDict(
+ populate_by_name=True
+ )
+
+
+class Logs(BaseModel):
+
+ id: Optional[UUID] = None
+ run_id: UUID = Field(..., alias="runId")
+ created_at: Optional[datetime.datetime] = Field(default=None, alias="createdAt")
+ ended_at: Optional[datetime.datetime] = Field(default=None, alias="endedAt")
+ status: StepStatus
+ step_name: StepName = Field(..., alias="stepName")
+ value: Optional[str] = None
+
+ model_config = ConfigDict(
+ populate_by_name=True
+ )
+
+
+class Projects(BaseModel):
+
+ id: str
+ updated_at: int = Field(..., alias="updatedAt")
+ title: Optional[str] = None
+ description: Optional[str] = None
+ website: Optional[str] = None
+
+ model_config = ConfigDict(
+ populate_by_name=True
+ )
+
+
+class Runs(BaseModel):
+
+ id: Optional[UUID] = None
+ worker_id: UUID = Field(..., alias="workerId")
+ created_at: Optional[datetime.datetime] = Field(default=None, alias="createdAt")
+ prompt: str
+
+ model_config = ConfigDict(
+ populate_by_name=True
+ )
+
+
+class StrategyEntries(BaseModel):
+
+ id: Optional[UUID] = None
+ run_id: UUID = Field(..., alias="runId")
+ project_id: str = Field(..., alias="projectId")
+ created_at: Optional[datetime.datetime] = Field(default=None, alias="createdAt")
+ reasoning: Optional[str] = None
+ impact: Optional[float] = None
+ interest: Optional[float] = None
+ weight: Optional[float] = None
+
+ model_config = ConfigDict(
+ populate_by_name=True
+ )
+
+
+class Workers(BaseModel):
+
+ id: Optional[UUID] = None
+ created_at: Optional[datetime.datetime] = Field(default=None, alias="createdAt")
+
+
+ model_config = ConfigDict(
+ populate_by_name=True
+ )
\ No newline at end of file
diff --git a/workers/fund_public_goods/db/tables/applications.py b/workers/fund_public_goods/db/tables/applications.py
new file mode 100644
index 0000000..629fd19
--- /dev/null
+++ b/workers/fund_public_goods/db/tables/applications.py
@@ -0,0 +1,43 @@
+from supabase import Client, PostgrestAPIResponse
+from fund_public_goods.db.entities import Applications
+
+def insert(
+ db: Client,
+ row: Applications
+):
+ db.table("applications").insert({
+ "id": row.id,
+ "created_at": row.created_at,
+ "recipient": row.recipient,
+ "network": row.network,
+ "round": row.round,
+ "answers": row.answers,
+ "project_id": row.project_id
+ }).execute()
+
+def get_applications(
+ db: Client,
+ project_id: str
+) -> list[Applications]:
+ result = (db.table("applications")
+ .select("id, created_at, recipient, network, round, answers, project_id")
+ .eq("project_id", project_id)
+ .execute())
+
+ if not result.data:
+ return []
+
+ applications = []
+
+ for item in result.data:
+ applications.append(Applications(
+ id=item["id"],
+ created_at=item["created_at"],
+ recipient=item["recipient"],
+ network=item["network"],
+ round=item["round"],
+ answers=item["answers"],
+ project_id=item["project_id"]
+ ))
+
+ return applications
diff --git a/workers/fund_public_goods/db/tables/gitcoin.py b/workers/fund_public_goods/db/tables/gitcoin.py
index b02778a..2625364 100644
--- a/workers/fund_public_goods/db/tables/gitcoin.py
+++ b/workers/fund_public_goods/db/tables/gitcoin.py
@@ -1,65 +1,83 @@
import datetime
import json
-from fund_public_goods.lib.gitcoin.models import GitcoinIndexingJob, ProjectApplicationInfo, ProjectInfo
+from fund_public_goods.db.entities import GitcoinProjects, GitcoinApplications, GitcoinIndexingJobs
from fund_public_goods.db.client import create_admin
+from fund_public_goods.db import tables, entities
-def upsert_project(app: ProjectInfo):
+def upsert_project(project: GitcoinProjects, created_at: int):
db = create_admin()
db.table("gitcoin_projects").upsert({
- "id": app.id,
- "protocol": app.protocol,
- "pointer": app.pointer,
- "data": app.data
- }).execute()
-
- db.table("projects").upsert({
- "id": app.id,
- "title": app.data["title"],
- "description": app.data["description"],
- "website": app.data["website"],
+ "id": project.id,
+ "protocol": project.protocol,
+ "pointer": project.pointer,
+ "data": project.data
}).execute()
-def save_application(app: ProjectApplicationInfo):
+ result = tables.projects.get(db, project.id)
+
+ if result and result.updated_at > created_at:
+ return
+
+ row = entities.Projects(
+ id=project.id,
+ updated_at=created_at,
+ title=project.data["title"],
+ description=project.data["description"],
+ website=project.data["website"]
+ )
+
+ if result == None:
+ tables.projects.insert(db, row)
+ else:
+ tables.projects.upsert(db, row)
+
+def save_application(app: GitcoinApplications, network: int):
db = create_admin()
-
+
db.table("gitcoin_applications").insert({
"id": app.id,
+ "created_at": app.created_at,
"protocol": app.protocol,
"pointer": app.pointer,
"round_id": app.round_id,
"project_id": app.project_id,
"data": app.data
}).execute()
-
- db.table("applications").insert({
- "id": app.id,
- "recipient": app.data["application"]["recipient"],
- "round": app.round_id,
- "project_id": app.project_id,
- "answers": app.data["application"]["answers"]
- }).execute()
-def get_non_running_job() -> GitcoinIndexingJob | None:
+ tables.applications.insert(db, entities.Applications(
+ id=app.id,
+ created_at=app.created_at,
+ recipient=app.data["application"]["recipient"],
+ network=network,
+ round=app.round_id,
+ answers=json.dumps(app.data["application"]["answers"]),
+ project_id=app.project_id
+ ))
+
+def get_non_running_job() -> GitcoinIndexingJobs | None:
db = create_admin()
result = (db.table("gitcoin_indexing_jobs")
- .select("id", "url", "is_running", "skip_rounds", "skip_projects")
+ .select("id", "url", "is_running", "skip_rounds", "skip_projects", "network_id")
.order("last_updated_at", desc=False)
.eq("is_running", False)
.eq("is_failed", False)
.limit(1)
.execute())
-
+
if not result.data:
return None
- return GitcoinIndexingJob (
- id = result.data[0]["id"],
- url = result.data[0]["url"],
- is_running = result.data[0]["is_running"],
- skip_rounds = result.data[0]["skip_rounds"],
- skip_projects = result.data[0]["skip_projects"]
+ data = result.data[0]
+
+ return GitcoinIndexingJobs (
+ id = data["id"],
+ url = data["url"],
+ network_id = data["network_id"],
+ is_running = data["is_running"],
+ skip_rounds = data["skip_rounds"],
+ skip_projects = data["skip_projects"]
)
def is_any_job_running() -> bool:
diff --git a/workers/fund_public_goods/db/tables/logs.py b/workers/fund_public_goods/db/tables/logs.py
index bac80f8..7db3e01 100644
--- a/workers/fund_public_goods/db/tables/logs.py
+++ b/workers/fund_public_goods/db/tables/logs.py
@@ -1,11 +1,31 @@
+from typing import Literal
from supabase import Client
+import datetime
+from fund_public_goods.db.entities import StepStatus, StepName
-def insert(
+def create(
db: Client,
run_id: str,
- message: str
+ step_name: StepName,
):
- db.table("logs").insert({
+ return db.table("logs").insert({
"run_id": run_id,
- "message": message
+ "step_name": step_name.value,
+ "status": StepStatus.NOT_STARTED.value
}).execute()
+
+def update(
+ db: Client,
+ log_id: str,
+ status: Literal[StepStatus.IN_PROGRESS, StepStatus.COMPLETED, StepStatus.ERRORED],
+ value: str | None
+):
+ ended_at = None
+ if status == StepStatus.COMPLETED or status == StepStatus.ERRORED:
+ ended_at = datetime.datetime.now().isoformat()
+
+ return db.table("logs").update({
+ "status": status.value,
+ "value": value,
+ "ended_at": ended_at
+ }).eq("id", log_id).execute()
diff --git a/workers/fund_public_goods/db/tables/projects.py b/workers/fund_public_goods/db/tables/projects.py
index dbd9b04..3ee6417 100644
--- a/workers/fund_public_goods/db/tables/projects.py
+++ b/workers/fund_public_goods/db/tables/projects.py
@@ -1,29 +1,88 @@
from typing import Any, Dict
+from fund_public_goods.lib.strategy.models.answer import Answer
+from fund_public_goods.lib.strategy.models.project import Project
from supabase import Client, PostgrestAPIResponse
-import uuid
+from fund_public_goods.db.entities import Projects
def insert(
- db: Client, title: str, recipient: str, description: str, website: str
-) -> str:
- id = str(uuid.uuid4())
- db.table("projects").insert(
- {
- "id": id,
- "title": title,
- "recipient": recipient,
- "description": description,
- "website": website,
- }
- ).execute()
- return id
+ db: Client,
+ row: Projects
+):
+ db.table("projects").insert({
+ "id": row.id,
+ "updated_at": row.updated_at,
+ "title": row.title,
+ "description": row.description,
+ "website": row.website,
+ }).execute()
+def upsert(
+ db: Client,
+ row: Projects
+):
+ db.table("projects").upsert({
+ "id": row.id,
+ "updated_at": row.updated_at,
+ "title": row.title,
+ "description": row.description,
+ "website": row.website,
+ }).execute()
+
+def get(
+ db: Client,
+ project_id: str
+) -> Projects | None:
+ result = (db.table("projects")
+ .select("id", "updated_at", "title", "description", "website")
+ .eq("id", project_id)
+ .execute())
+
+ if not result.data:
+ return None
+
+ data = result.data[0]
+
+ return Projects(
+ id=data["id"],
+ updated_at=data["updated_at"],
+ title=data["title"],
+ description=data["description"],
+ website=data["website"]
+ )
def get_projects(db: Client) -> PostgrestAPIResponse[Dict[str, Any]]:
return (
db.table("projects")
.select(
- "id, title, description, website, applications(id, recipient, round, answers)"
+ "id, updated_at, title, description, website, applications(id, recipient, round, answers)"
)
.execute()
)
+
+def fetch_projects_data(supabase: Client) -> list[Project]:
+ response = get_projects(supabase)
+
+ projects: list[Project] = []
+
+ for item in response.data:
+ answers: list[Answer] = []
+
+ for application in item.get("applications", []):
+ for answer in application.get("answers", []):
+ answers.append(Answer(
+ question=answer.get("question", ""),
+ answer=answer.get("answer", None)
+ ))
+
+ project = Project(
+ id=item.get("id", ""),
+ title=item.get("title", ""),
+ description=item.get("description", ""),
+ website=item.get("website", ""),
+ answers=answers,
+ )
+
+ projects.append(project)
+
+ return projects
\ No newline at end of file
diff --git a/workers/fund_public_goods/db/tables/runs.py b/workers/fund_public_goods/db/tables/runs.py
index 3a51458..67c9b1f 100644
--- a/workers/fund_public_goods/db/tables/runs.py
+++ b/workers/fund_public_goods/db/tables/runs.py
@@ -1,12 +1,15 @@
from supabase import Client
import uuid
+from fund_public_goods.db.entities import Runs
-def insert(db: Client, worker_id: str, prompt: str) -> str:
+def insert(db: Client, row: Runs) -> str:
id = str(uuid.uuid4())
- db.table("runs").insert(
- {"id": id, "worker_id": worker_id, "prompt": prompt}
- ).execute()
+ db.table("runs").insert({
+ "id": id,
+ "worker_id": str(row.worker_id),
+ "prompt": row.prompt
+ }).execute()
return id
@@ -18,7 +21,7 @@ def get_prompt(db: Client, run_id: str) -> str:
.limit(1)
.single()
.execute()
- .data
+ .data["prompt"]
)
diff --git a/workers/fund_public_goods/db/tables/strategy_entries.py b/workers/fund_public_goods/db/tables/strategy_entries.py
index 77dc4d6..28b2131 100644
--- a/workers/fund_public_goods/db/tables/strategy_entries.py
+++ b/workers/fund_public_goods/db/tables/strategy_entries.py
@@ -1,27 +1,20 @@
from supabase import Client
from fund_public_goods.lib.strategy.models.weighted_project import WeightedProject
+from fund_public_goods.db.entities import StrategyEntries
def insert(
db: Client,
- run_id: str,
- project_id: str,
- reasoning: str,
- impact: float,
- interest: float,
- weight: float,
+ row: StrategyEntries
):
- db.table("strategy_entries").insert(
- {
- "run_id": run_id,
- "project_id": project_id,
- "reasoning": reasoning,
- "impact": impact,
- "interest": interest,
- "weight": weight,
- }
- ).execute()
-
+ db.table("strategy_entries").insert({
+ "run_id": str(row.run_id),
+ "project_id": row.project_id,
+ "reasoning": row.reasoning,
+ "impact": row.impact,
+ "interest": row.interest,
+ "weight": row.weight,
+ }).execute()
def insert_multiple(db: Client, run_id: str, strategies: list[WeightedProject]) -> None:
db.table("strategy_entries").insert(
diff --git a/workers/fund_public_goods/lib/gitcoin/models.py b/workers/fund_public_goods/lib/gitcoin/models.py
index 0e57356..3fa2b63 100644
--- a/workers/fund_public_goods/lib/gitcoin/models.py
+++ b/workers/fund_public_goods/lib/gitcoin/models.py
@@ -1,21 +1,11 @@
from pydantic import BaseModel, ConfigDict, Field
-class GitcoinIndexingJob(BaseModel):
- id: str
- url: str
- is_running: bool = Field(..., alias="isRunning")
- skip_rounds: int = Field(..., alias="skipRounds")
- skip_projects: int = Field(..., alias="skipProjects")
-
- model_config = ConfigDict(
- populate_by_name=True,
- )
-
class RoundInfo(BaseModel):
id: str
class ApplicationInfo(BaseModel):
id: str
+ created_at: int = Field(..., alias="createdAt")
protocol: int
pointer: str
round_id: str = Field(..., alias="roundId")
@@ -23,19 +13,3 @@ class ApplicationInfo(BaseModel):
model_config = ConfigDict(
populate_by_name=True,
)
-
-class ProjectApplicationInfo(BaseModel):
- model_config = ConfigDict(populate_by_name=True)
-
- id: str
- protocol: int
- pointer: str
- round_id: str = Field(..., alias="roundId")
- project_id: str = Field(..., alias="projectId")
- data: dict
-
-class ProjectInfo(BaseModel):
- id: str
- protocol: int
- pointer: str
- data: dict
diff --git a/workers/fund_public_goods/lib/gitcoin/utils.py b/workers/fund_public_goods/lib/gitcoin/utils.py
index c026282..cc5ca1a 100644
--- a/workers/fund_public_goods/lib/gitcoin/utils.py
+++ b/workers/fund_public_goods/lib/gitcoin/utils.py
@@ -49,6 +49,7 @@ def fetch_project_applications(url: str, round_id: str, skip: int, first: int) -
skip: $skip
) {
id
+ createdAt
metaPtr {
protocol
pointer
@@ -64,8 +65,6 @@ def fetch_project_applications(url: str, round_id: str, skip: int, first: int) -
}
}
- print(f"Fetching projects for round {round_id} ...")
-
response = requests.post(url, json=data)
if response.status_code == 200:
@@ -78,8 +77,9 @@ def fetch_project_applications(url: str, round_id: str, skip: int, first: int) -
apps = [
ApplicationInfo(
- id = project["id"],
- protocol = project['metaPtr']['protocol'],
+ id = project["id"],
+ created_at = project["createdAt"],
+ protocol = project['metaPtr']['protocol'],
pointer = project['metaPtr']['pointer'],
round_id = round_id,
)
diff --git a/workers/fund_public_goods/lib/strategy/utils/evaluate_projects.py b/workers/fund_public_goods/lib/strategy/utils/evaluate_projects.py
index 19a3623..c5959f2 100644
--- a/workers/fund_public_goods/lib/strategy/utils/evaluate_projects.py
+++ b/workers/fund_public_goods/lib/strategy/utils/evaluate_projects.py
@@ -1,76 +1,13 @@
-from chromadb import EphemeralClient
+from fund_public_goods.lib.strategy.utils.utils import stringify_projects
from langchain_openai import ChatOpenAI
from langchain_core.prompts import ChatPromptTemplate
from langchain_core.output_parsers import StrOutputParser
from langchain_core.output_parsers.json import JsonOutputParser
-from langchain_openai import OpenAIEmbeddings
-from langchain.vectorstores.chroma import Chroma
-from fund_public_goods.lib.strategy.utils.generate_queries import generate_queries
from fund_public_goods.lib.strategy.models.evaluated_project import EvaluatedProject
from fund_public_goods.lib.strategy.models.project import Project
from fund_public_goods.lib.strategy.models.project_evaluation import ProjectEvaluation
-def stringify_projects(projects: list[Project], separator: str) -> str:
- project_strings = []
-
- for project in projects:
- project_str = get_project_text(project=project)
- project_strings.append(project_str)
-
- return separator.join(project_strings)
-
-def get_project_text(project: Project) -> str:
- result = f"ID: {project.id} - Description: {project.description}\n"
-
- for answer in project.answers:
- result += f" Question: {answer.question}\n"
- result += f" Answer: {answer.answer}\n"
-
- return result
-
-def remove_duplicate_projects(projects: list[Project]) -> list[Project]:
- seen = {}
- unique_projects = []
-
- for project in projects:
- if project.id not in seen:
- unique_projects.append(project)
- seen[project.id] = True
-
- return unique_projects
-
-def get_top_matching_projects(prompt: str, projects: list[Project]) -> list[Project]:
- projects_by_id = {project.id: project for project in projects}
- queries = generate_queries(prompt=prompt, n=3)
- texts: list[str] = []
- metadatas: list[dict] = []
-
- for project in projects:
- project_text = get_project_text(project=project)
- texts.append(project_text)
- metadatas.append({ "id": project["id"] })
-
- db_client = EphemeralClient()
- collection = Chroma.from_texts(
- texts=texts,
- metadatas=metadatas,
- embedding=OpenAIEmbeddings(),
- client=db_client,
- collection_name="projects"
- )
-
- top_matches: list[Project] = []
-
- for query in queries:
- matches = collection.similarity_search(query, k=5)
-
- for match in matches:
- matched_project = projects_by_id[match.metadata["id"]]
- top_matches.append(matched_project)
-
- return remove_duplicate_projects(top_matches)
-
extract_evaluations_prompts_template = """
You will be given a list of project evaluations that measure how well each project
matches the user's interest, and its impact in regards to that interest.
@@ -139,7 +76,6 @@ def extract_project_evaluations(evaluation_report: str) -> list[ProjectEvaluatio
def evaluate_projects(prompt: str, projects: list[Project]) -> list[EvaluatedProject]:
projects_by_id = {project.id: project for project in projects}
- top_matching_projects = get_top_matching_projects(prompt=prompt, projects=projects)
evaluation_prompt = ChatPromptTemplate.from_messages([
("system", evaluation_prompt_template),
@@ -154,7 +90,7 @@ def evaluate_projects(prompt: str, projects: list[Project]) -> list[EvaluatedPro
evaluation_report = evaluation_chain.invoke({
"prompt": prompt,
"separator": separator,
- "projects": stringify_projects(projects=top_matching_projects, separator=separator)
+ "projects": stringify_projects(projects=projects, separator=separator)
})
evaluations = extract_project_evaluations(evaluation_report=evaluation_report)
diff --git a/workers/fund_public_goods/lib/strategy/utils/generate_queries.py b/workers/fund_public_goods/lib/strategy/utils/generate_queries.py
index bbd62ce..1d3c0c7 100644
--- a/workers/fund_public_goods/lib/strategy/utils/generate_queries.py
+++ b/workers/fund_public_goods/lib/strategy/utils/generate_queries.py
@@ -2,6 +2,7 @@
from langchain_core.prompts import ChatPromptTemplate
from langchain_core.output_parsers import CommaSeparatedListOutputParser
+
queries_prompt_template = """
Your goal is to provide a list of queries that will be used to perform
and embeddings search over different project descriptions and get the ones
diff --git a/workers/fund_public_goods/lib/strategy/utils/get_top_matching_projects.py b/workers/fund_public_goods/lib/strategy/utils/get_top_matching_projects.py
new file mode 100644
index 0000000..253dbda
--- /dev/null
+++ b/workers/fund_public_goods/lib/strategy/utils/get_top_matching_projects.py
@@ -0,0 +1,38 @@
+from chromadb import EphemeralClient
+from fund_public_goods.lib.strategy.models.project import Project
+from fund_public_goods.lib.strategy.utils.generate_queries import generate_queries
+from fund_public_goods.lib.strategy.utils.utils import get_project_text, remove_duplicate_projects
+from langchain_openai import OpenAIEmbeddings
+from langchain.vectorstores.chroma import Chroma
+
+
+def get_top_matching_projects(prompt: str, projects: list[Project]) -> list[Project]:
+ projects_by_id = {project.id: project for project in projects}
+ queries = generate_queries(prompt=prompt, n=3)
+ texts: list[str] = []
+ metadatas: list[dict] = []
+
+ for project in projects:
+ project_text = get_project_text(project=project)
+ texts.append(project_text)
+ metadatas.append({ "id": project["id"] })
+
+ db_client = EphemeralClient()
+ collection = Chroma.from_texts(
+ texts=texts,
+ metadatas=metadatas,
+ embedding=OpenAIEmbeddings(),
+ client=db_client,
+ collection_name="projects"
+ )
+
+ top_matches: list[Project] = []
+
+ for query in queries:
+ matches = collection.similarity_search(query, k=5)
+
+ for match in matches:
+ matched_project = projects_by_id[match.metadata["id"]]
+ top_matches.append(matched_project)
+
+ return remove_duplicate_projects(top_matches)
\ No newline at end of file
diff --git a/workers/fund_public_goods/lib/strategy/utils/utils.py b/workers/fund_public_goods/lib/strategy/utils/utils.py
new file mode 100644
index 0000000..4cd7a31
--- /dev/null
+++ b/workers/fund_public_goods/lib/strategy/utils/utils.py
@@ -0,0 +1,33 @@
+from fund_public_goods.lib.strategy.models.project import Project
+
+
+def stringify_projects(projects: list[Project], separator: str) -> str:
+ project_strings = []
+
+ for project in projects:
+ project_str = get_project_text(project=project)
+ project_strings.append(project_str)
+
+ return separator.join(project_strings)
+
+
+def get_project_text(project: Project) -> str:
+ result = f"ID: {project.id} - Description: {project.description}\n"
+
+ for answer in project.answers:
+ result += f" Question: {answer.question}\n"
+ result += f" Answer: {answer.answer}\n"
+
+ return result
+
+
+def remove_duplicate_projects(projects: list[Project]) -> list[Project]:
+ seen = {}
+ unique_projects = []
+
+ for project in projects:
+ if project.id not in seen:
+ unique_projects.append(project)
+ seen[project.id] = True
+
+ return unique_projects
\ No newline at end of file
diff --git a/workers/fund_public_goods/workflows/create_strategy/functions/create_strategy.py b/workers/fund_public_goods/workflows/create_strategy/functions/create_strategy.py
index ff4e688..ee52f13 100644
--- a/workers/fund_public_goods/workflows/create_strategy/functions/create_strategy.py
+++ b/workers/fund_public_goods/workflows/create_strategy/functions/create_strategy.py
@@ -1,3 +1,6 @@
+import json
+import typing
+from fund_public_goods.lib.strategy.utils.get_top_matching_projects import get_top_matching_projects
import inngest
from supabase import Client
from fund_public_goods.lib.strategy.utils.assign_weights import assign_weights
@@ -9,40 +12,33 @@
)
from fund_public_goods.lib.strategy.models.project import Project
from fund_public_goods.lib.strategy.models.weighted_project import WeightedProject
-from fund_public_goods.db.tables.projects import get_projects
+from fund_public_goods.db.tables.projects import fetch_projects_data
from fund_public_goods.db.tables.runs import get_prompt
from fund_public_goods.db.tables.strategy_entries import insert_multiple
from fund_public_goods.db import client, logs
+from fund_public_goods.db.entities import StepName, StepStatus
from fund_public_goods.workflows.create_strategy.events import CreateStrategyEvent
+def fetch_matching_projects(supabase: Client, prompt: str):
+ projects = fetch_projects_data(supabase)
+ matching_projects = get_top_matching_projects(prompt, projects)
+
+ return [project.model_dump() for project in matching_projects]
-def fetch_projects_data(supabase: Client) -> list[Project]:
- response = get_projects(supabase)
- projects = []
- for item in response.data:
- answers = []
+def initialize_logs(supabase: Client, run_id: str) -> str:
+ log_ids: dict[StepName, str] = {}
- for application in item.get("applications", []):
- for answer in application.get("answers", []):
- answers.append(
- {
- "question": answer.get("question", ""),
- "answer": answer.get("answer", None),
- }
- )
-
- project = Project(
- id=item.get("id", ""),
- title=item.get("title", ""),
- description=item.get("description", ""),
- website=item.get("website", ""),
- answers=answers,
- )
- projects.append(project)
-
- return projects
+ for step_name in StepName:
+ new_log = logs.create(
+ db=supabase,
+ run_id=run_id,
+ step_name=step_name,
+ ).data
+
+ log_ids[step_name] = new_log[0]["id"]
+ return json.dumps(log_ids)
@inngest.create_function(
fn_id="create_strategy",
@@ -54,32 +50,53 @@ async def create_strategy(
) -> str | None:
data = CreateStrategyEvent.Data.model_validate(ctx.event.data)
run_id = data.run_id
- supabase = client.create_admin()
+ db = client.create_admin()
- await step.run(
- "extracting_prompt",
- lambda: logs.insert(supabase, run_id, "Extracting prompt from run_id"),
+ prompt = await step.run(
+ "extract_prompt",
+ lambda: get_prompt(db, run_id),
)
-
- prompt = await step.run("extract_prompt", lambda: get_prompt(supabase, run_id))
-
+
+ log_ids_str = await step.run(
+ "initialize_logs",
+ lambda: initialize_logs(db, run_id),
+ )
+
+ log_ids: dict[StepName, str] = json.loads(log_ids_str)
+
await step.run(
- "fetching_projects_info",
- lambda: logs.insert(supabase, run_id, "Getting information from data sources"),
+ "start_fetch_projects_data",
+ lambda: logs.update(
+ db=db,
+ status=StepStatus.IN_PROGRESS,
+ log_id=log_ids[StepName.FETCH_PROJECTS],
+ value=None,
+ ),
)
json_projects = await step.run(
- "fetch_projects_data", lambda: fetch_projects_data(supabase)
+ "fetch_projects_data", lambda: fetch_matching_projects(db, prompt)
)
- projects: list[Project] = [Project(**json_project) for json_project in json_projects] # type: ignore
+ projects = [Project(**json_project) for json_project in json_projects]
await step.run(
- "assessing",
- lambda: logs.insert(
- supabase,
- run_id,
- "Assessing impact of each project related to the users interest",
+ "completed_fetch_projects_data",
+ lambda: logs.update(
+ db=db,
+ status=StepStatus.COMPLETED,
+ log_id=log_ids[StepName.FETCH_PROJECTS],
+ value=f"Found {len(projects)} projects related to '{prompt}'",
+ ),
+ )
+
+ await step.run(
+ "start_assess_projects",
+ lambda: logs.update(
+ db=db,
+ status=StepStatus.IN_PROGRESS,
+ log_id=log_ids[StepName.EVALUATE_PROJECTS],
+ value=None,
),
)
@@ -87,13 +104,24 @@ async def create_strategy(
"assess_projects", lambda: evaluate_projects(prompt, projects)
)
assessed_projects = [EvaluatedProject(**x) for x in json_asessed_projects] # type: ignore
-
+
await step.run(
- "determining_funding",
- lambda: logs.insert(
- supabase,
- run_id,
- "Determining the relative funding that the best matching projects need",
+ "completed_assess_projects",
+ lambda: logs.update(
+ db=db,
+ status=StepStatus.COMPLETED,
+ log_id=log_ids[StepName.EVALUATE_PROJECTS],
+ value=f"Evaluated {len(assessed_projects)} projects",
+ ),
+ )
+
+ await step.run(
+ "start_determine_funding",
+ lambda: logs.update(
+ db,
+ status=StepStatus.IN_PROGRESS,
+ log_id=log_ids[StepName.ANALYZE_FUNDING],
+ value=None,
),
)
@@ -101,16 +129,39 @@ async def create_strategy(
"determine_funding", lambda: assign_weights(assessed_projects)
)
weighted_projects = [WeightedProject(**x) for x in json_weighted_projects] # type: ignore
-
+
await step.run(
- "saving_results_to_db",
- lambda: logs.insert(supabase, run_id, "Generating results"),
+ "completed_determine_funding",
+ lambda: logs.update(
+ db,
+ status=StepStatus.COMPLETED,
+ log_id=log_ids[StepName.ANALYZE_FUNDING],
+ value="Determined the relative funding that the best matching projects need",
+ ),
)
-
+
await step.run(
- "save_strategy_to_db", lambda: insert_multiple(supabase, run_id, weighted_projects)
+ "start_synthesize_results",
+ lambda: logs.update(
+ db,
+ status=StepStatus.IN_PROGRESS,
+ log_id=log_ids[StepName.SYNTHESIZE_RESULTS],
+ value=None
+ ),
)
- await step.run("result", lambda: logs.insert(supabase, run_id, "STRATEGY_CREATED"))
+ await step.run(
+ "save_strategy_to_db", lambda: insert_multiple(db, run_id, weighted_projects)
+ )
+
+ await step.run(
+ "completed_synthesize_results",
+ lambda: logs.update(
+ db,
+ status=StepStatus.COMPLETED,
+ log_id=log_ids[StepName.SYNTHESIZE_RESULTS],
+ value="Results generated"
+ ),
+ )
return "done"
diff --git a/workers/fund_public_goods/workflows/index_gitcoin/events/index_gitcoin_page_event.py b/workers/fund_public_goods/workflows/index_gitcoin/events/index_gitcoin_page_event.py
index 13dc071..cce9747 100644
--- a/workers/fund_public_goods/workflows/index_gitcoin/events/index_gitcoin_page_event.py
+++ b/workers/fund_public_goods/workflows/index_gitcoin/events/index_gitcoin_page_event.py
@@ -7,6 +7,7 @@ class IndexGitcoinPageEvent():
class Data(BaseModel):
url: str
+ network_id: int
job_id: str
project_page_size: int
skip_rounds: int
diff --git a/workers/fund_public_goods/workflows/index_gitcoin/functions/index_gitcoin_page.py b/workers/fund_public_goods/workflows/index_gitcoin/functions/index_gitcoin_page.py
index 37ba6eb..1e65f42 100644
--- a/workers/fund_public_goods/workflows/index_gitcoin/functions/index_gitcoin_page.py
+++ b/workers/fund_public_goods/workflows/index_gitcoin/functions/index_gitcoin_page.py
@@ -1,8 +1,10 @@
from datetime import datetime
from typing import cast
+import json
import inngest
from pydantic import parse_obj_as
-from fund_public_goods.lib.gitcoin.models import ApplicationInfo, ProjectApplicationInfo, ProjectInfo, RoundInfo
+from fund_public_goods.lib.gitcoin.models import ApplicationInfo, RoundInfo
+from fund_public_goods.db.entities import GitcoinApplications, GitcoinProjects
from fund_public_goods.workflows.index_gitcoin.events import IndexGitcoinPageEvent
from fund_public_goods.lib.gitcoin.utils import fetch_json_from_ipfs, fetch_project_applications, fetch_rounds
from fund_public_goods.db.tables.gitcoin import save_application, stop_and_mark_job_as_failed, stop_job, update_job_progress, upsert_project
@@ -41,7 +43,7 @@ async def index_gitcoin_page(
round = rounds[0]
apps = await step.run("fetch_project_applications", lambda: fetch_project_applications(data.url, round.id, first=data.project_page_size, skip=data.skip_projects))
-
+
apps = parse_obj_as(list[ApplicationInfo], apps)
if not apps:
@@ -51,7 +53,8 @@ async def index_gitcoin_page(
"index_gitcoin_page",
IndexGitcoinPageEvent.Data(
job_id=data.job_id,
- url = data.url,
+ url = data.url,
+ network_id = data.network_id,
project_page_size = data.project_page_size,
skip_rounds = data.skip_rounds + 1,
skip_projects = 0
@@ -61,33 +64,34 @@ async def index_gitcoin_page(
return "Next round page: No projects"
else:
return "Next round page: No more projects"
-
+
for i in range(len(apps)):
app = apps[i]
app_data = await step.run("fetch_json_from_ipfs_" + str(i), lambda: fetch_json_from_ipfs(app.pointer))
project_id = app_data["application"]["project"]["id"]
- application = ProjectApplicationInfo(
- id = app.id,
- protocol = app.protocol,
- pointer = app.pointer,
+ application = GitcoinApplications(
+ id = app.id,
+ created_at = app.created_at,
+ protocol = app.protocol,
+ pointer = app.pointer,
round_id = app.round_id,
project_id = project_id,
- data = app_data
+ data = json.dumps(app_data)
)
project_pointer = app_data["application"]["project"]["metaPtr"]["pointer"]
project_data = await step.run("fetch_json_from_ipfs_" + str(i), lambda: fetch_json_from_ipfs(project_pointer))
- project = ProjectInfo(
- id = app_data["application"]["project"]["id"],
+ project = GitcoinProjects(
+ id = app_data["application"]["project"]["id"],
protocol = app_data["application"]["project"]["metaPtr"]["protocol"],
pointer = project_pointer,
- data = project_data,
+ data = json.dumps(project_data),
)
- await step.run("upsert_project_" + str(i), lambda: upsert_project(project))
+ await step.run("upsert_project_" + str(i), lambda: upsert_project(project, application.created_at))
- await step.run("save_application_" + str(i), lambda: save_application(application))
+ await step.run("save_application_" + str(i), lambda: save_application(application, data.network_id))
total_skip_rounds = 0
total_skip_projects = 0
@@ -105,7 +109,8 @@ async def index_gitcoin_page(
"index_gitcoin_page",
IndexGitcoinPageEvent.Data(
job_id=data.job_id,
- url = data.url,
+ url = data.url,
+ network_id = data.network_id,
project_page_size = data.project_page_size,
skip_rounds = total_skip_rounds,
skip_projects = total_skip_projects,
diff --git a/workers/fund_public_goods/workflows/index_gitcoin/functions/start_index_gitcoin.py b/workers/fund_public_goods/workflows/index_gitcoin/functions/start_index_gitcoin.py
index fc375a7..7db7f32 100644
--- a/workers/fund_public_goods/workflows/index_gitcoin/functions/start_index_gitcoin.py
+++ b/workers/fund_public_goods/workflows/index_gitcoin/functions/start_index_gitcoin.py
@@ -1,7 +1,7 @@
import inngest
from fund_public_goods.workflows.index_gitcoin.events import IndexGitcoinPageEvent
from fund_public_goods.db.tables.gitcoin import get_non_running_job, is_any_job_running, start_job
-from fund_public_goods.lib.gitcoin.models import GitcoinIndexingJob
+
@inngest.create_function(
fn_id="start_index_gitcoin",
@@ -22,26 +22,25 @@ def get_not_running_job_step():
if not job:
return None
else:
- return job.model_dump()
+ return job
- job_dto = await step.run("get_not_running_job", get_not_running_job_step)
+ job = await step.run("get_not_running_job", get_not_running_job_step)
- if not job_dto:
+ if not job:
return "No non-running job found"
- job = GitcoinIndexingJob.model_validate(job_dto)
-
- await step.run("start_job", lambda: start_job(job.id))
+ await step.run("start_job", lambda: start_job(job["id"]))
await step.send_event(
"index_gitcoin_page",
IndexGitcoinPageEvent.Data(
- url = job.url,
+ url = job["url"],
+ network_id = job["networkId"],
project_page_size = 100,
- skip_rounds = job.skip_rounds,
- skip_projects = job.skip_projects,
- job_id=job.id
+ skip_rounds = job["skipRounds"],
+ skip_projects = job["skipProjects"],
+ job_id=job["id"]
).to_event()
)
- return "Started job: ID=" + job.id + ", URL=" + job.url
+ return "Started job: ID=" + job["id"] + ", URL=" + job["url"]