From b9e2beae9d240bacb1f837135b5e01d8dbb199a8 Mon Sep 17 00:00:00 2001 From: mhdzumair Date: Tue, 19 Nov 2024 14:11:26 +0530 Subject: [PATCH 1/7] Migrate mongodb to postgres --- Pipfile | 3 + Pipfile.lock | 394 ++++++----- api/__init__.py | 1 - db/models.py | 4 +- db/new_models.py | 345 ++++++++++ db/schemas.py | 6 +- migrations/__init__.py | 0 migrations/mongo_to_postgres.py | 1134 +++++++++++++++++++++++++++++++ 8 files changed, 1714 insertions(+), 173 deletions(-) create mode 100644 db/new_models.py create mode 100644 migrations/__init__.py create mode 100644 migrations/mongo_to_postgres.py diff --git a/Pipfile b/Pipfile index 3d2cac2d..204558ce 100644 --- a/Pipfile +++ b/Pipfile @@ -47,6 +47,9 @@ tenacity = "*" ratelimit = "*" qrcode = "*" aioseedrcc = "*" +typer = "*" +sqlmodel = "*" +asyncpg = "*" [dev-packages] pysocks = "*" diff --git a/Pipfile.lock b/Pipfile.lock index 184b8768..1060af06 100644 --- a/Pipfile.lock +++ b/Pipfile.lock @@ -1,7 +1,7 @@ { "_meta": { "hash": { - "sha256": "1bfd1eeb0ef28abd4907e5b1e1046ab6912f9b37f4b1b66001a91d41d2f2a115" + "sha256": "5f83ba895ec53953a40a258502afc31ad2217142fadf2549634cf7f5c980d76c" }, "pipfile-spec": 6, "requires": { @@ -41,85 +41,85 @@ }, "aiohttp": { "hashes": [ - "sha256:024409c1b1d6076d0ed933dcebd7e4fc6f3320a227bfa0c1b6b93a8b5a146f04", - "sha256:04b24497b3baf15035730de5f207ade88a67d4483a5f16ced7ece348933a5b47", - "sha256:08474e71772a516ba2e2167b4707af8361d2c452b3d8a5364c984f4867869499", - "sha256:0e7a0762cc29cd3acd01a4d2b547b3af7956ad230ebb80b529a8e4f3e4740fe8", - "sha256:104deb7873681273c5daa13c41924693df394043a118dae90387d35bc5531788", - "sha256:104ea21994b1403e4c1b398866f1187c1694fa291314ad7216ec1d8ec6b49f38", - "sha256:113bf06b029143e94a47c4f36e11a8b7e396e9d1f1fc8cea58e6b7e370cfed38", - "sha256:12071dd2cc95ba81e0f2737bebcb98b2a8656015e87772e84e8fb9e635b5da6e", - "sha256:170fb2324826bb9f08055a8291f42192ae5ee2f25b2966c8f0f4537c61d73a7b", - "sha256:21b4545e8d96870da9652930c5198366605ff8f982757030e2148cf341e5746b", - "sha256:229ae13959a5f499d90ffbb4b9eac2255d8599315027d6f7c22fa9803a94d5b1", - "sha256:2ec5efbc872b00ddd85e3904059d274f284cff314e13f48776050ca2c58f451d", - "sha256:31b91ff3a1fcb206a1fa76e0de1f08c9ffb1dc0deb7296fa2618adfe380fc676", - "sha256:329f5059e0bf6983dceebac8e6ed20e75eaff6163b3414f4a4cb59e0d7037672", - "sha256:37f8cf3c43f292d9bb3e6760476c2b55b9663a581fad682a586a410c43a7683e", - "sha256:3e1ed8d152cccceffb1ee7a2ac227c16372e453fb11b3aeaa56783049b85d3f6", - "sha256:3ed360d6672a9423aad39902a4e9fe305464d20ed7931dbdba30a4625782d875", - "sha256:40dc9446cff326672fcbf93efdb8ef7e949824de1097624efe4f61ac7f0d2c43", - "sha256:4d218d3eca40196384ad3b481309c56fd60e664128885d1734da0a8aa530d433", - "sha256:4e4e155968040e32c124a89852a1a5426d0e920a35f4331e1b3949037bfe93a3", - "sha256:4f698aa61879df64425191d41213dfd99efdc1627e6398e6d7aa5c312fac9702", - "sha256:508cfcc99534b1282595357592d8367b44392b21f6eb5d4dc021f8d0d809e94d", - "sha256:577c7429f8869fa30186fc2c9eee64d75a30b51b61f26aac9725866ae5985cfd", - "sha256:57e17c6d71f2dc857a8a1d09be1be7802e35d90fb4ba4b06cf1aab6414a57894", - "sha256:5ecc2fb1a0a9d48cf773add34196cddf7e488e48e9596e090849751bf43098f4", - "sha256:600b1d9f86a130131915e2f2127664311b33902c486b21a747d626f5144b4471", - "sha256:62502b8ffee8c6a4b5c6bf99d1de277d42bf51b2fb713975d9b63b560150b7ac", - "sha256:62a2f5268b672087c45b33479ba1bb1d5a48c6d76c133cfce3a4f77410c200d1", - "sha256:6362f50a6f0e5482c4330d2151cb682779230683da0e155c15ec9fc58cb50b6a", - "sha256:6533dd06df3d17d1756829b68b365b1583929b54082db8f65083a4184bf68322", - "sha256:6c5a6958f4366496004cf503d847093d464814543f157ef3b738bbf604232415", - "sha256:72cd984f7f14e8c01b3e38f18f39ea85dba84e52ea05e37116ba5e2a72eef396", - "sha256:76d6ee8bb132f8ee0fcb0e205b4708ddb6fba524eb515ee168113063d825131b", - "sha256:7867d0808614f04e78e0a8d5a2c1f8ac6bc626a0c0e2f62be48be6b749e2f8b2", - "sha256:7d664e5f937c08adb7908ea9f391fbf2928a9b09cb412ac0aba602bde9e499e4", - "sha256:85ae6f182be72c3531915e90625cc65afce4df8a0fc4988bd52d8a5d5faaeb68", - "sha256:89a96a0696dc67d548f69cb518c581a7a33cc1f26ab42229dea1709217c9d926", - "sha256:8b323b5d3aef7dd811424c269322eec58a977c0c8152e650159e47210d900504", - "sha256:8c47a0ba6c2b3d3e5715f8338d657badd21f778c6be16701922c65521c5ecfc9", - "sha256:8fef105113d56e817cb9bcc609667ee461321413a7b972b03f5b4939f40f307c", - "sha256:900ff74d78eb580ae4aa5883242893b123a0c442a46570902500f08d6a7e6696", - "sha256:9095580806d9ed07c0c29b23364a0b1fb78258ef9f4bddf7e55bac0e475d4edf", - "sha256:91d3991fad8b65e5dbc13cd95669ea689fe0a96ff63e4e64ac24ed724e4f8103", - "sha256:9231d610754724273a6ac05a1f177979490bfa6f84d49646df3928af2e88cfd5", - "sha256:97056d3422594e0787733ac4c45bef58722d452f4dc6615fee42f59fe51707dd", - "sha256:a896059b6937d1a22d8ee8377cdcd097bd26cd8c653b8f972051488b9baadee9", - "sha256:aabc4e92cb153636d6be54e84dad1b252ddb9aebe077942b6dcffe5e468d476a", - "sha256:ad14cdc0fba4df31c0f6e06c21928c5b924725cbf60d0ccc5f6e7132636250e9", - "sha256:ae36ae52b0c22fb69fb8b744eff82a20db512a29eafc6e3a4ab43b17215b219d", - "sha256:b3e4fb7f5354d39490d8209aefdf5830b208d01c7293a2164e404312c3d8bc55", - "sha256:b40c304ab01e89ad0aeeecf91bbaa6ae3b00e27b796c9e8d50b71a4a7e885cc8", - "sha256:b7349205bb163318dcc102329d30be59a647a3d24c82c3d91ed35b7e7301ea7e", - "sha256:b8b95a63a8e8b5f0464bd8b1b0d59d2bec98a59b6aacc71e9be23df6989b3dfb", - "sha256:bb2e82e515e268b965424ecabebd91834a41b36260b6ef5db015ee12ddb28ef3", - "sha256:c0315978b2a4569e03fb59100f6a7e7d23f718a4521491f5c13d946d37549f3d", - "sha256:c1828e10c3a49e2b234b87600ecb68a92b8a8dcf8b99bca9447f16c4baaa1630", - "sha256:c1c49bc393d854d4421ebc174a0a41f9261f50d3694d8ca277146cbbcfd24ee7", - "sha256:c415b9601ff50709d6050c8a9281733a9b042b9e589265ac40305b875cf9c463", - "sha256:c54c635d1f52490cde7ef3a423645167a8284e452a35405d5c7dc1242a8e75c9", - "sha256:c5e6a1f8b0268ffa1c84d7c3558724956002ba8361176e76406233e704bbcffb", - "sha256:c98a596ac20e8980cc6f34c0c92a113e98eb08f3997c150064d26d2aeb043e5a", - "sha256:cd0834e4260eab78671b81d34f110fbaac449563e48d419cec0030d9a8e58693", - "sha256:cdad66685fcf2ad14ce522cf849d4a025f4fd206d6cfc3f403d9873e4c243b03", - "sha256:d1ea006426edf7e1299c52a58b0443158012f7a56fed3515164b60bfcb1503a9", - "sha256:d33b4490026968bdc7f0729b9d87a3a6b1e09043557d2fc1c605c6072deb2f11", - "sha256:d5cae4cd271e20b7ab757e966cc919186b9f02535418ab36c471a5377ef4deaa", - "sha256:dd505a1121ad5b666191840b7bd1d8cb917df2647deeca6f3474331b72452362", - "sha256:e1668ef2f3a7ec9881f4b6a917e5f97c87a343fa6b0d5fc826b7b0297ddd0887", - "sha256:e7bcfcede95531589295f56e924702cef7f9685c9e4e5407592e04ded6a65bf3", - "sha256:ebf610c37df4f09c71c9bbf8309b4b459107e6fe889ac0d7e16f6e4ebd975f86", - "sha256:f3bf5c132eb48002bcc3825702d241d35b4e9585009e65e9dcf9c4635d0b7424", - "sha256:f40380c96dd407dfa84eb2d264e68aa47717b53bdbe210a59cc3c35a4635f195", - "sha256:f57a0de48dda792629e7952d34a0c7b81ea336bb9b721391c7c58145b237fe55", - "sha256:f6b925c7775ab857bdc1e52e1f5abcae7d18751c09b751aeb641a5276d9b990e", - "sha256:f8f0d79b923070f25674e4ea8f3d61c9d89d24d9598d50ff32c5b9b23c79a25b", - "sha256:feca9fafa4385aea6759c171cd25ea82f7375312fca04178dae35331be45e538" + "sha256:08ebe7a1d6c1e5ca766d68407280d69658f5f98821c2ba6c41c63cabfed159af", + "sha256:0a90a0dc4b054b5af299a900bf950fe8f9e3e54322bc405005f30aa5cacc5c98", + "sha256:0cba0b8d25aa2d450762f3dd6df85498f5e7c3ad0ddeb516ef2b03510f0eea32", + "sha256:0ebdf5087e2ce903d8220cc45dcece90c2199ae4395fd83ca616fcc81010db2c", + "sha256:10a5f91c319d9d4afba812f72984816b5fcd20742232ff7ecc1610ffbf3fc64d", + "sha256:122768e3ae9ce74f981b46edefea9c6e5a40aea38aba3ac50168e6370459bf20", + "sha256:14eb6c628432720e41b4fab1ada879d56cfe7034159849e083eb536b4c2afa99", + "sha256:177b000efaf8d2f7012c649e8aee5b0bf488677b1162be5e7511aa4f9d567607", + "sha256:1c2496182e577042e0e07a328d91c949da9e77a2047c7291071e734cd7a6e780", + "sha256:1e33a7eddcd07545ccf5c3ab230f60314a17dc33e285475e8405e26e21f02660", + "sha256:2793d3297f3e49015140e6d3ea26142c967e07998e2fb00b6ee8d041138fbc4e", + "sha256:2914061f5ca573f990ec14191e6998752fa8fe50d518e3405410353c3f44aa5d", + "sha256:2adb967454e10e69478ba4a8d8afbba48a7c7a8619216b7c807f8481cc66ddfb", + "sha256:2b02a68b9445c70d7f5c8b578c5f5e5866b1d67ca23eb9e8bc8658ae9e3e2c74", + "sha256:3129151378f858cdc4a0a4df355c9a0d060ab49e2eea7e62e9f085bac100551b", + "sha256:32334f35824811dd20a12cc90825d000e6b50faaeaa71408d42269151a66140d", + "sha256:33af11eca7bb0f5c6ffaf5e7d9d2336c2448f9c6279b93abdd6f3c35f9ee321f", + "sha256:34f37c59b12bc3afc52bab6fcd9cd3be82ff01c4598a84cbea934ccb3a9c54a0", + "sha256:3666c750b73ce463a413692e3a57c60f7089e2d9116a2aa5a0f0eaf2ae325148", + "sha256:374baefcb1b6275f350da605951f5f02487a9bc84a574a7d5b696439fabd49a3", + "sha256:382f853516664d2ebfc75dc01da4a10fdef5edcb335fe7b45cf471ce758ecb18", + "sha256:3b1f4844909321ef2c1cee50ddeccbd6018cd8c8d1ddddda3f553e94a5859497", + "sha256:3f617a48b70f4843d54f52440ea1e58da6bdab07b391a3a6aed8d3b311a4cc04", + "sha256:435f7a08d8aa42371a94e7c141205a9cb092ba551084b5e0c57492e6673601a3", + "sha256:44b69c69c194ffacbc50165911cf023a4b1b06422d1e1199d3aea82eac17004e", + "sha256:486273d3b5af75a80c31c311988931bdd2a4b96a74d5c7f422bad948f99988ef", + "sha256:4a23475d8d5c56e447b7752a1e2ac267c1f723f765e406c81feddcd16cdc97bc", + "sha256:4c979fc92aba66730b66099cd5becb42d869a26c0011119bc1c2478408a8bf7a", + "sha256:4d7fad8c456d180a6d2f44c41cfab4b80e2e81451815825097db48b8293f59d5", + "sha256:50e0aee4adc9abcd2109c618a8d1b2c93b85ac277b24a003ab147d91e068b06d", + "sha256:556564d89e2f4a6e8fe000894c03e4e84cf0b6cfa5674e425db122633ee244d1", + "sha256:5587da333b7d280a312715b843d43e734652aa382cba824a84a67c81f75b338b", + "sha256:57993f406ce3f114b2a6756d7809be3ffd0cc40f33e8f8b9a4aa1b027fd4e3eb", + "sha256:5d6e069b882c1fdcbe5577dc4be372eda705180197140577a4cddb648c29d22e", + "sha256:5d878a0186023ac391861958035174d0486f3259cabf8fd94e591985468da3ea", + "sha256:5d90b5a3b0f32a5fecf5dd83d828713986c019585f5cddf40d288ff77f366615", + "sha256:5e9a766c346b2ed7e88937919d84ed64b4ef489dad1d8939f806ee52901dc142", + "sha256:64e8f5178958a9954043bc8cd10a5ae97352c3f2fc99aa01f2aebb0026010910", + "sha256:66e58a2e8c7609a3545c4b38fb8b01a6b8346c4862e529534f7674c5265a97b8", + "sha256:68d1f46f9387db3785508f5225d3acbc5825ca13d9c29f2b5cce203d5863eb79", + "sha256:6ad9a7d2a3a0f235184426425f80bd3b26c66b24fd5fddecde66be30c01ebe6e", + "sha256:6e8e19a80ba194db5c06915a9df23c0c06e0e9ca9a4db9386a6056cca555a027", + "sha256:73a664478ae1ea011b5a710fb100b115ca8b2146864fa0ce4143ff944df714b8", + "sha256:766d0ebf8703d28f854f945982aa09224d5a27a29594c70d921c43c3930fe7ac", + "sha256:783741f534c14957fbe657d62a34b947ec06db23d45a2fd4a8aeb73d9c84d7e6", + "sha256:79efd1ee3827b2f16797e14b1e45021206c3271249b4d0025014466d416d7413", + "sha256:83a70e22e0f6222effe7f29fdeba6c6023f9595e59a0479edacfbd7de4b77bb7", + "sha256:85de9904bc360fd29a98885d2bfcbd4e02ab33c53353cb70607f2bea2cb92468", + "sha256:8d954ba0eae7f33884d27dc00629ca4389d249eb8d26ca07c30911257cae8c96", + "sha256:9075313f8e41b481e4cb10af405054564b0247dc335db5398ed05f8ec38787e2", + "sha256:97fba98fc5d9ccd3d33909e898d00f2494d6a9eec7cbda3d030632e2c8bb4d00", + "sha256:994cb893936dd2e1803655ae8667a45066bfd53360b148e22b4e3325cc5ea7a3", + "sha256:9aa4e68f1e4f303971ec42976fb170204fb5092de199034b57199a1747e78a2d", + "sha256:9b6d15adc9768ff167614ca853f7eeb6ee5f1d55d5660e3af85ce6744fed2b82", + "sha256:9bbb2dbc2701ab7e9307ca3a8fa4999c5b28246968e0a0202a5afabf48a42e22", + "sha256:9c8d1db4f65bbc9d75b7b271d68fb996f1c8c81a525263862477d93611856c2d", + "sha256:a7b0a1618060e3f5aa73d3526ca2108a16a1b6bf86612cd0bb2ddcbef9879d06", + "sha256:afa55e863224e664a782effa62245df73fdfc55aee539bed6efacf35f6d4e4b7", + "sha256:b339d91ac9060bd6ecdc595a82dc151045e5d74f566e0864ef3f2ba0887fec42", + "sha256:b470de64d17156c37e91effc109d3b032b39867000e2c126732fe01d034441f9", + "sha256:b4ec8afd362356b8798c8caa806e91deb3f0602d8ffae8e91d2d3ced2a90c35e", + "sha256:c28c1677ea33ccb8b14330560094cc44d3ff4fad617a544fd18beb90403fe0f1", + "sha256:c681f34e2814bc6e1eef49752b338061b94a42c92734d0be9513447d3f83718c", + "sha256:cccb2937bece1310c5c0163d0406aba170a2e5fb1f0444d7b0e7fdc9bd6bb713", + "sha256:cdc6f8dce09281ae534eaf08a54f0d38612398375f28dad733a8885f3bf9b978", + "sha256:d23854e5867650d40cba54d49956aad8081452aa80b2cf0d8c310633f4f48510", + "sha256:d2d942421cf3a1d1eceae8fa192f1fbfb74eb9d3e207d35ad2696bd2ce2c987c", + "sha256:d2f991c18132f3e505c108147925372ffe4549173b7c258cf227df1c5977a635", + "sha256:d3a2bcf6c81639a165da93469e1e0aff67c956721f3fa9c0560f07dd1e505116", + "sha256:d84930b4145991214602372edd7305fc76b700220db79ac0dd57d3afd0f0a1ca", + "sha256:de3b4d5fb5d69749104b880a157f38baeea7765c93d9cd3837cedd5b84729e10", + "sha256:e57a10aacedcf24666f4c90d03e599f71d172d1c5e00dcf48205c445806745b0", + "sha256:f1d06c8fd8b453c3e553c956bd3b8395100401060430572174bb7876dd95ad49", + "sha256:f833a80d9de9307d736b6af58c235b17ef7f90ebea7b9c49cd274dec7a66a2f1", + "sha256:fb0544a0e8294a5a5e20d3cacdaaa9a911d7c0a9150f5264aef36e7d8fdfa07e", + "sha256:ff5d22eece44528023254b595c670dfcf9733ac6af74c4b6cb4f6a784dc3870c" ], "markers": "python_version >= '3.9'", - "version": "==3.11.0" + "version": "==3.11.2" }, "aioqbt": { "git": "git+https://github.com/mhdzumair/aioqbt.git", @@ -927,11 +927,11 @@ }, "httpcore": { "hashes": [ - "sha256:27b59625743b85577a8c0e10e55b50b5368a4f2cfe8cc7bcfa9cf00829c2682f", - "sha256:73f6dbd6eb8c21bbf7ef8efad555481853f5f6acdeaff1edb0694289269ee17f" + "sha256:8551cb62a169ec7162ac7be8d4817d561f60e08eaa485234898414bb5a8a0b4c", + "sha256:a3fff8f43dc260d5bd363d9f9cf1830fa3a458b332856f34282de498ed420edd" ], "markers": "python_version >= '3.8'", - "version": "==1.0.6" + "version": "==1.0.7" }, "httptools": { "hashes": [ @@ -1227,6 +1227,14 @@ "markers": "python_version >= '3.6' and python_version < '4'", "version": "==0.2.11" }, + "markdown-it-py": { + "hashes": [ + "sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1", + "sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb" + ], + "markers": "python_version >= '3.8'", + "version": "==3.0.0" + }, "markupsafe": { "hashes": [ "sha256:0bff5e0ae4ef2e1ae4fdf2dfd5b76c75e5c2fa4132d05fc1b0dabcd20c7e28c4", @@ -1294,6 +1302,14 @@ "markers": "python_version >= '3.9'", "version": "==3.0.2" }, + "mdurl": { + "hashes": [ + "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8", + "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba" + ], + "markers": "python_version >= '3.7'", + "version": "==0.1.2" + }, "motor": { "hashes": [ "sha256:0ef7f520213e852bf0eac306adf631aabe849227d8aec900a2612512fb9c5b8d", @@ -1419,12 +1435,12 @@ }, "parsett": { "hashes": [ - "sha256:50b17e1091171b6960c1621bdce9cbb42e1d2a749b24a6923898a765135f399f", - "sha256:dfcad77a07b1ef6221b220e0d8fd88a40f41f6d14aa18ec5afbb689cd0e3be74" + "sha256:2c53a757d81ca1de52042c4d320dae05b1013814a8950d7a63d81abf28a2e6dc", + "sha256:fb7d37b3697a51f3483096cb121b4598968b8f1ee6478880b3886386d938c7c4" ], "index": "pypi", "markers": "python_version >= '3.11' and python_version < '4.0'", - "version": "==1.4.1" + "version": "==1.5.0" }, "pikpakapi": { "git": "git+https://github.com/mhdzumair/PikPakAPI.git", @@ -1905,6 +1921,14 @@ "markers": "python_version >= '3.8'", "version": "==12.0.0" }, + "pygments": { + "hashes": [ + "sha256:786ff802f32e91311bff3889f6e9a86e81505fe99f2735bb6d60ae0c5004f199", + "sha256:b8e6aca0523f3ab76fee51799c488e38782ac06eafcf95e7ba832985c8e7b13a" + ], + "markers": "python_version >= '3.8'", + "version": "==2.18.0" + }, "pymongo": { "extras": [ "srv" @@ -2324,6 +2348,14 @@ ], "version": "==2.1.0" }, + "rich": { + "hashes": [ + "sha256:439594978a49a09530cff7ebc4b5c7103ef57baf48d5ea3184f21d9a2befa098", + "sha256:6049d5e6ec054bf2779ab3358186963bac2ea89175919d699e378b99738c2a90" + ], + "markers": "python_full_version >= '3.8.0'", + "version": "==13.9.4" + }, "rpds-py": { "hashes": [ "sha256:031819f906bb146561af051c7cef4ba2003d28cff07efacef59da973ff7969ba", @@ -2454,6 +2486,14 @@ "markers": "python_version >= '3.9'", "version": "==75.5.0" }, + "shellingham": { + "hashes": [ + "sha256:7ecfff8f2fd72616f7481040475a65b2bf8af90a56c89140852d1120324e8686", + "sha256:8dbca0739d487e5bd35ab3ca4b36e11c4078f3a234bfce294b0a0291363404de" + ], + "markers": "python_version >= '3.7'", + "version": "==1.5.4" + }, "six": { "hashes": [ "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926", @@ -2541,6 +2581,15 @@ "markers": "python_version >= '3.7'", "version": "==2.0.36" }, + "sqlmodel": { + "hashes": [ + "sha256:7d37c882a30c43464d143e35e9ecaf945d88035e20117bf5ec2834a23cbe505e", + "sha256:a1ed13e28a1f4057cbf4ff6cdb4fc09e85702621d3259ba17b3c230bfb2f941b" + ], + "index": "pypi", + "markers": "python_version >= '3.7'", + "version": "==0.0.22" + }, "starlette": { "hashes": [ "sha256:9834fd799d1a87fd346deb76158668cfa0b0d56f85caefe8268e2d97c3468b62", @@ -2616,6 +2665,15 @@ "markers": "python_version >= '3.8'", "version": "==2.35" }, + "typer": { + "hashes": [ + "sha256:d85fe0b777b2517cc99c8055ed735452f2659cd45e451507c76f48ce5c1d00e2", + "sha256:f1c7198347939361eec90139ffa0fd8b3df3a2259d5852a0f7400e476d95985c" + ], + "index": "pypi", + "markers": "python_version >= '3.7'", + "version": "==0.13.0" + }, "types-python-dateutil": { "hashes": [ "sha256:250e1d8e80e7bbc3a6c99b907762711d1a1cdd00e978ad39cb5940f6f0a87f3d", @@ -2629,7 +2687,7 @@ "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d", "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8" ], - "markers": "python_version >= '3.8'", + "markers": "python_version < '3.13'", "version": "==4.12.2" }, "tzlocal": { @@ -2916,91 +2974,91 @@ }, "yarl": { "hashes": [ - "sha256:06157fb3c58f2736a5e47c8fcbe1afc8b5de6fb28b14d25574af9e62150fcaac", - "sha256:067a63fcfda82da6b198fa73079b1ca40b7c9b7994995b6ee38acda728b64d47", - "sha256:0b1794853124e2f663f0ea54efb0340b457f08d40a1cef78edfa086576179c91", - "sha256:0bdff5e0995522706c53078f531fb586f56de9c4c81c243865dd5c66c132c3b5", - "sha256:117ed8b3732528a1e41af3aa6d4e08483c2f0f2e3d3d7dca7cf538b3516d93df", - "sha256:14bc88baa44e1f84164a392827b5defb4fa8e56b93fecac3d15315e7c8e5d8b3", - "sha256:1654ec814b18be1af2c857aa9000de7a601400bd4c9ca24629b18486c2e35463", - "sha256:16bca6678a83657dd48df84b51bd56a6c6bd401853aef6d09dc2506a78484c7b", - "sha256:1a3b91c44efa29e6c8ef8a9a2b583347998e2ba52c5d8280dbd5919c02dfc3b5", - "sha256:1a52a1ffdd824fb1835272e125385c32fd8b17fbdefeedcb4d543cc23b332d74", - "sha256:1ce36ded585f45b1e9bb36d0ae94765c6608b43bd2e7f5f88079f7a85c61a4d3", - "sha256:299f11b44d8d3a588234adbe01112126010bd96d9139c3ba7b3badd9829261c3", - "sha256:2b24ec55fad43e476905eceaf14f41f6478780b870eda5d08b4d6de9a60b65b4", - "sha256:2d374d70fdc36f5863b84e54775452f68639bc862918602d028f89310a034ab0", - "sha256:2d9f0606baaec5dd54cb99667fcf85183a7477f3766fbddbe3f385e7fc253299", - "sha256:2e7ba4c9377e48fb7b20dedbd473cbcbc13e72e1826917c185157a137dac9df2", - "sha256:2f0a6423295a0d282d00e8701fe763eeefba8037e984ad5de44aa349002562ac", - "sha256:327828786da2006085a4d1feb2594de6f6d26f8af48b81eb1ae950c788d97f61", - "sha256:380e6c38ef692b8fd5a0f6d1fa8774d81ebc08cfbd624b1bca62a4d4af2f9931", - "sha256:3b74ff4767d3ef47ffe0cd1d89379dc4d828d4873e5528976ced3b44fe5b0a21", - "sha256:3e844be8d536afa129366d9af76ed7cb8dfefec99f5f1c9e4f8ae542279a6dc3", - "sha256:459e81c2fb920b5f5df744262d1498ec2c8081acdcfe18181da44c50f51312f7", - "sha256:46ddf6e0b975cd680eb83318aa1d321cb2bf8d288d50f1754526230fcf59ba96", - "sha256:482c122b72e3c5ec98f11457aeb436ae4aecca75de19b3d1de7cf88bc40db82f", - "sha256:561c87fea99545ef7d692403c110b2f99dced6dff93056d6e04384ad3bc46243", - "sha256:578d00c9b7fccfa1745a44f4eddfdc99d723d157dad26764538fbdda37209857", - "sha256:58c8e9620eb82a189c6c40cb6b59b4e35b2ee68b1f2afa6597732a2b467d7e8f", - "sha256:5b29beab10211a746f9846baa39275e80034e065460d99eb51e45c9a9495bcca", - "sha256:5d1d42556b063d579cae59e37a38c61f4402b47d70c29f0ef15cee1acaa64488", - "sha256:5f236cb5999ccd23a0ab1bd219cfe0ee3e1c1b65aaf6dd3320e972f7ec3a39da", - "sha256:62a91aefff3d11bf60e5956d340eb507a983a7ec802b19072bb989ce120cd948", - "sha256:64cc6e97f14cf8a275d79c5002281f3040c12e2e4220623b5759ea7f9868d6a5", - "sha256:6f4c9156c4d1eb490fe374fb294deeb7bc7eaccda50e23775b2354b6a6739934", - "sha256:7294e38f9aa2e9f05f765b28ffdc5d81378508ce6dadbe93f6d464a8c9594473", - "sha256:7615058aabad54416ddac99ade09a5510cf77039a3b903e94e8922f25ed203d7", - "sha256:7e48cdb8226644e2fbd0bdb0a0f87906a3db07087f4de77a1b1b1ccfd9e93685", - "sha256:7f63d176a81555984e91f2c84c2a574a61cab7111cc907e176f0f01538e9ff6e", - "sha256:7f6595c852ca544aaeeb32d357e62c9c780eac69dcd34e40cae7b55bc4fb1147", - "sha256:7fac95714b09da9278a0b52e492466f773cfe37651cf467a83a1b659be24bf71", - "sha256:81713b70bea5c1386dc2f32a8f0dab4148a2928c7495c808c541ee0aae614d67", - "sha256:846dd2e1243407133d3195d2d7e4ceefcaa5f5bf7278f0a9bda00967e6326b04", - "sha256:84c063af19ef5130084db70ada40ce63a84f6c1ef4d3dbc34e5e8c4febb20822", - "sha256:881764d610e3269964fc4bb3c19bb6fce55422828e152b885609ec176b41cf11", - "sha256:8994b29c462de9a8fce2d591028b986dbbe1b32f3ad600b2d3e1c482c93abad6", - "sha256:8c79e9d7e3d8a32d4824250a9c6401194fb4c2ad9a0cec8f6a96e09a582c2cc0", - "sha256:8ee427208c675f1b6e344a1f89376a9613fc30b52646a04ac0c1f6587c7e46ec", - "sha256:949681f68e0e3c25377462be4b658500e85ca24323d9619fdc41f68d46a1ffda", - "sha256:9e275792097c9f7e80741c36de3b61917aebecc08a67ae62899b074566ff8556", - "sha256:9fb815155aac6bfa8d86184079652c9715c812d506b22cfa369196ef4e99d1b4", - "sha256:a2a64e62c7a0edd07c1c917b0586655f3362d2c2d37d474db1a509efb96fea1c", - "sha256:a7ac5b4984c468ce4f4a553df281450df0a34aefae02e58d77a0847be8d1e11f", - "sha256:aa46dce75078fceaf7cecac5817422febb4355fbdda440db55206e3bd288cfb8", - "sha256:ae3476e934b9d714aa8000d2e4c01eb2590eee10b9d8cd03e7983ad65dfbfcba", - "sha256:b0341e6d9a0c0e3cdc65857ef518bb05b410dbd70d749a0d33ac0f39e81a4258", - "sha256:b40d1bf6e6f74f7c0a567a9e5e778bbd4699d1d3d2c0fe46f4b717eef9e96b95", - "sha256:b5c4804e4039f487e942c13381e6c27b4b4e66066d94ef1fae3f6ba8b953f383", - "sha256:b5d6a6c9602fd4598fa07e0389e19fe199ae96449008d8304bf5d47cb745462e", - "sha256:b5f1ac7359e17efe0b6e5fec21de34145caef22b260e978336f325d5c84e6938", - "sha256:c0167540094838ee9093ef6cc2c69d0074bbf84a432b4995835e8e5a0d984374", - "sha256:c180ac742a083e109c1a18151f4dd8675f32679985a1c750d2ff806796165b55", - "sha256:c73df5b6e8fabe2ddb74876fb82d9dd44cbace0ca12e8861ce9155ad3c886139", - "sha256:c7e177c619342e407415d4f35dec63d2d134d951e24b5166afcdfd1362828e17", - "sha256:cbad927ea8ed814622305d842c93412cb47bd39a496ed0f96bfd42b922b4a217", - "sha256:cc353841428d56b683a123a813e6a686e07026d6b1c5757970a877195f880c2d", - "sha256:cc7c92c1baa629cb03ecb0c3d12564f172218fb1739f54bf5f3881844daadc6d", - "sha256:cc7d768260f4ba4ea01741c1b5fe3d3a6c70eb91c87f4c8761bbcce5181beafe", - "sha256:d0eea830b591dbc68e030c86a9569826145df485b2b4554874b07fea1275a199", - "sha256:d216e5d9b8749563c7f2c6f7a0831057ec844c68b4c11cb10fc62d4fd373c26d", - "sha256:d401f07261dc5aa36c2e4efc308548f6ae943bfff20fcadb0a07517a26b196d8", - "sha256:d6324274b4e0e2fa1b3eccb25997b1c9ed134ff61d296448ab8269f5ac068c4c", - "sha256:d8a8b74d843c2638f3864a17d97a4acda58e40d3e44b6303b8cc3d3c44ae2d29", - "sha256:d9b6b28a57feb51605d6ae5e61a9044a31742db557a3b851a74c13bc61de5172", - "sha256:de599af166970d6a61accde358ec9ded821234cbbc8c6413acfec06056b8e860", - "sha256:e594b22688d5747b06e957f1ef822060cb5cb35b493066e33ceac0cf882188b7", - "sha256:e5b078134f48552c4d9527db2f7da0b5359abd49393cdf9794017baec7506170", - "sha256:eb6dce402734575e1a8cc0bb1509afca508a400a57ce13d306ea2c663bad1138", - "sha256:f1790a4b1e8e8e028c391175433b9c8122c39b46e1663228158e61e6f915bf06", - "sha256:f5efe0661b9fcd6246f27957f6ae1c0eb29bc60552820f01e970b4996e016004", - "sha256:f9cbfbc5faca235fbdf531b93aa0f9f005ec7d267d9d738761a4d42b744ea159", - "sha256:fbea1751729afe607d84acfd01efd95e3b31db148a181a441984ce9b3d3469da", - "sha256:fca4b4307ebe9c3ec77a084da3a9d1999d164693d16492ca2b64594340999988", - "sha256:ff5c6771c7e3511a06555afa317879b7db8d640137ba55d6ab0d0c50425cab75" + "sha256:0c8e589379ef0407b10bed16cc26e7392ef8f86961a706ade0a22309a45414d7", + "sha256:0d41c684f286ce41fa05ab6af70f32d6da1b6f0457459a56cf9e393c1c0b2217", + "sha256:1056cadd5e850a1c026f28e0704ab0a94daaa8f887ece8dfed30f88befb87bb0", + "sha256:11d86c6145ac5c706c53d484784cf504d7d10fa407cb73b9d20f09ff986059ef", + "sha256:170ed4971bf9058582b01a8338605f4d8c849bd88834061e60e83b52d0c76870", + "sha256:17791acaa0c0f89323c57da7b9a79f2174e26d5debbc8c02d84ebd80c2b7bff8", + "sha256:17931dfbb84ae18b287279c1f92b76a3abcd9a49cd69b92e946035cff06bcd20", + "sha256:18662443c6c3707e2fc7fad184b4dc32dd428710bbe72e1bce7fe1988d4aa654", + "sha256:187df91395c11e9f9dc69b38d12406df85aa5865f1766a47907b1cc9855b6303", + "sha256:1fee66b32e79264f428dc8da18396ad59cc48eef3c9c13844adec890cd339db5", + "sha256:2270d590997445a0dc29afa92e5534bfea76ba3aea026289e811bf9ed4b65a7f", + "sha256:2654caaf5584449d49c94a6b382b3cb4a246c090e72453493ea168b931206a4d", + "sha256:26bfb6226e0c157af5da16d2d62258f1ac578d2899130a50433ffee4a5dfa673", + "sha256:2941756754a10e799e5b87e2319bbec481ed0957421fba0e7b9fb1c11e40509f", + "sha256:3294f787a437cb5d81846de3a6697f0c35ecff37a932d73b1fe62490bef69211", + "sha256:358dc7ddf25e79e1cc8ee16d970c23faee84d532b873519c5036dbb858965795", + "sha256:38bc4ed5cae853409cb193c87c86cd0bc8d3a70fd2268a9807217b9176093ac6", + "sha256:3a0baff7827a632204060f48dca9e63fbd6a5a0b8790c1a2adfb25dc2c9c0d50", + "sha256:3a3ede8c248f36b60227eb777eac1dbc2f1022dc4d741b177c4379ca8e75571a", + "sha256:3a58a2f2ca7aaf22b265388d40232f453f67a6def7355a840b98c2d547bd037f", + "sha256:4434b739a8a101a837caeaa0137e0e38cb4ea561f39cb8960f3b1e7f4967a3fc", + "sha256:460024cacfc3246cc4d9f47a7fc860e4fcea7d1dc651e1256510d8c3c9c7cde0", + "sha256:46c465ad06971abcf46dd532f77560181387b4eea59084434bdff97524444032", + "sha256:48e424347a45568413deec6f6ee2d720de2cc0385019bedf44cd93e8638aa0ed", + "sha256:4a8c83f6fcdc327783bdc737e8e45b2e909b7bd108c4da1892d3bc59c04a6d84", + "sha256:4c840cc11163d3c01a9d8aad227683c48cd3e5be5a785921bcc2a8b4b758c4f3", + "sha256:4d486ddcaca8c68455aa01cf53d28d413fb41a35afc9f6594a730c9779545876", + "sha256:4e76381be3d8ff96a4e6c77815653063e87555981329cf8f85e5be5abf449021", + "sha256:50d866f7b1a3f16f98603e095f24c0eeba25eb508c85a2c5939c8b3870ba2df8", + "sha256:52492b87d5877ec405542f43cd3da80bdcb2d0c2fbc73236526e5f2c28e6db28", + "sha256:56afb44a12b0864d17b597210d63a5b88915d680f6484d8d202ed68ade38673d", + "sha256:585ce7cd97be8f538345de47b279b879e091c8b86d9dbc6d98a96a7ad78876a3", + "sha256:5870d620b23b956f72bafed6a0ba9a62edb5f2ef78a8849b7615bd9433384171", + "sha256:5c6ea72fe619fee5e6b5d4040a451d45d8175f560b11b3d3e044cd24b2720526", + "sha256:688058e89f512fb7541cb85c2f149c292d3fa22f981d5a5453b40c5da49eb9e8", + "sha256:6a3f47930fbbed0f6377639503848134c4aa25426b08778d641491131351c2c8", + "sha256:6b981316fcd940f085f646b822c2ff2b8b813cbd61281acad229ea3cbaabeb6b", + "sha256:734144cd2bd633a1516948e477ff6c835041c0536cef1d5b9a823ae29899665b", + "sha256:736bb076f7299c5c55dfef3eb9e96071a795cb08052822c2bb349b06f4cb2e0a", + "sha256:752485cbbb50c1e20908450ff4f94217acba9358ebdce0d8106510859d6eb19a", + "sha256:753eaaa0c7195244c84b5cc159dc8204b7fd99f716f11198f999f2332a86b178", + "sha256:75ac158560dec3ed72f6d604c81090ec44529cfb8169b05ae6fcb3e986b325d9", + "sha256:76499469dcc24759399accd85ec27f237d52dec300daaca46a5352fcbebb1071", + "sha256:782ca9c58f5c491c7afa55518542b2b005caedaf4685ec814fadfcee51f02493", + "sha256:792155279dc093839e43f85ff7b9b6493a8eaa0af1f94f1f9c6e8f4de8c63500", + "sha256:7a1606ba68e311576bcb1672b2a1543417e7e0aa4c85e9e718ba6466952476c0", + "sha256:8281db240a1616af2f9c5f71d355057e73a1409c4648c8949901396dc0a3c151", + "sha256:871e1b47eec7b6df76b23c642a81db5dd6536cbef26b7e80e7c56c2fd371382e", + "sha256:8b9c4643e7d843a0dca9cd9d610a0876e90a1b2cbc4c5ba7930a0d90baf6903f", + "sha256:8c6d5fed96f0646bfdf698b0a1cebf32b8aae6892d1bec0c5d2d6e2df44e1e2d", + "sha256:8e1bf59e035534ba4077f5361d8d5d9194149f9ed4f823d1ee29ef3e8964ace3", + "sha256:8fd51299e21da709eabcd5b2dd60e39090804431292daacbee8d3dabe39a6bc0", + "sha256:91c012dceadc695ccf69301bfdccd1fc4472ad714fe2dd3c5ab4d2046afddf29", + "sha256:93771146ef048b34201bfa382c2bf74c524980870bb278e6df515efaf93699ff", + "sha256:93d1c8cc5bf5df401015c5e2a3ce75a5254a9839e5039c881365d2a9dcfc6dc2", + "sha256:9611b83810a74a46be88847e0ea616794c406dbcb4e25405e52bff8f4bee2d0a", + "sha256:9bc27dd5cfdbe3dc7f381b05e6260ca6da41931a6e582267d5ca540270afeeb2", + "sha256:ac8eda86cc75859093e9ce390d423aba968f50cf0e481e6c7d7d63f90bae5c9c", + "sha256:bc3003710e335e3f842ae3fd78efa55f11a863a89a72e9a07da214db3bf7e1f8", + "sha256:bc61b005f6521fcc00ca0d1243559a5850b9dd1e1fe07b891410ee8fe192d0c0", + "sha256:be4c7b1c49d9917c6e95258d3d07f43cfba2c69a6929816e77daf322aaba6628", + "sha256:c019abc2eca67dfa4d8fb72ba924871d764ec3c92b86d5b53b405ad3d6aa56b0", + "sha256:c42774d1d1508ec48c3ed29e7b110e33f5e74a20957ea16197dbcce8be6b52ba", + "sha256:c556fbc6820b6e2cda1ca675c5fa5589cf188f8da6b33e9fc05b002e603e44fa", + "sha256:c6e659b9a24d145e271c2faf3fa6dd1fcb3e5d3f4e17273d9e0350b6ab0fe6e2", + "sha256:c74f0b0472ac40b04e6d28532f55cac8090e34c3e81f118d12843e6df14d0909", + "sha256:cd7e35818d2328b679a13268d9ea505c85cd773572ebb7a0da7ccbca77b6a52e", + "sha256:d17832ba39374134c10e82d137e372b5f7478c4cceeb19d02ae3e3d1daed8721", + "sha256:d1fa68a3c921365c5745b4bd3af6221ae1f0ea1bf04b69e94eda60e57958907f", + "sha256:d63123bfd0dce5f91101e77c8a5427c3872501acece8c90df457b486bc1acd47", + "sha256:da9d3061e61e5ae3f753654813bc1cd1c70e02fb72cf871bd6daf78443e9e2b1", + "sha256:db5ac3871ed76340210fe028f535392f097fb31b875354bcb69162bba2632ef4", + "sha256:dd7abf4f717e33b7487121faf23560b3a50924f80e4bef62b22dab441ded8f3b", + "sha256:dd90238d3a77a0e07d4d6ffdebc0c21a9787c5953a508a2231b5f191455f31e9", + "sha256:ef6eee1a61638d29cd7c85f7fd3ac7b22b4c0fabc8fd00a712b727a3e73b0685", + "sha256:f11fd61d72d93ac23718d393d2a64469af40be2116b24da0a4ca6922df26807e", + "sha256:f1e7fedb09c059efee2533119666ca7e1a2610072076926fa028c2ba5dfeb78c", + "sha256:f25b7e93f5414b9a983e1a6c1820142c13e1782cc9ed354c25e933aebe97fcf2", + "sha256:f2f44a4247461965fed18b2573f3a9eb5e2c3cad225201ee858726cde610daca", + "sha256:f5ffc6b7ace5b22d9e73b2a4c7305740a339fbd55301d52735f73e21d9eb3130", + "sha256:ff6af03cac0d1a4c3c19e5dcc4c05252411bf44ccaa2485e20d0a7c77892ab6e", + "sha256:ff8d95e06546c3a8c188f68040e9d0360feb67ba8498baf018918f669f7bc39b" ], "markers": "python_version >= '3.9'", - "version": "==1.17.1" + "version": "==1.17.2" }, "zope.event": { "hashes": [ diff --git a/api/__init__.py b/api/__init__.py index dd1f770c..15c1f0cd 100644 --- a/api/__init__.py +++ b/api/__init__.py @@ -17,7 +17,6 @@ # Setup the broker and the middleware redis_broker = RedisBroker(url=settings.redis_url) redis_broker.middleware = [ - Prometheus(), AgeLimit(), TimeLimit(), ShutdownNotifications(), diff --git a/db/models.py b/db/models.py index 00a987de..9e0b54e1 100644 --- a/db/models.py +++ b/db/models.py @@ -101,6 +101,7 @@ class TVStreams(Document): source: str behaviorHints: dict[str, Any] | None = None created_at: datetime = Field(default_factory=datetime.now) + updated_at: datetime = Field(default_factory=datetime.now) country: str | None = None is_working: Optional[bool] = True test_failure_count: int = 0 @@ -152,6 +153,7 @@ class MediaFusionMetaData(Document): runtime: Optional[str] = None website: Optional[str] = None genres: Optional[list[str]] = Field(default_factory=list) + created_at: datetime = Field(default_factory=datetime.now) last_updated_at: datetime = Field(default_factory=datetime.now) class Settings: @@ -171,7 +173,7 @@ def validate_runtime(cls, v): class MediaFusionMovieMetaData(MediaFusionMetaData): type: str = "movie" imdb_rating: Optional[float] = None - parent_guide_nudity_status: Optional[str] = "None" + parent_guide_nudity_status: Optional[str] = "Unknown" parent_guide_certificates: Optional[list[str]] = Field(default_factory=list) stars: Optional[list[str]] = Field(default_factory=list) diff --git a/db/new_models.py b/db/new_models.py new file mode 100644 index 00000000..6719182a --- /dev/null +++ b/db/new_models.py @@ -0,0 +1,345 @@ +from datetime import datetime +from enum import Enum as PyEnum +from typing import ClassVar + +import pytz +from sqlalchemy import DateTime, BigInteger, UniqueConstraint, Index, JSON +from sqlmodel import SQLModel, Field + + +# Enums +class MediaType(str, PyEnum): + MOVIE = "movie" + SERIES = "series" + TV = "tv" + EVENTS = "events" + + +class IndexerType(str, PyEnum): + FREELEACH = "freeleech" + SEMI_PRIVATE = "semi-private" + PRIVATE = "private" + + +class NudityStatus(str, PyEnum): + NONE = "None" + MILD = "Mild" + MODERATE = "Moderate" + SEVERE = "Severe" + UNKNOWN = "Unknown" + + +# Base Models and Mixins +class TimestampMixin(SQLModel): + created_at: datetime = Field( + default_factory=lambda: datetime.now(pytz.UTC), + nullable=False, + sa_type=DateTime(timezone=True), + ) + updated_at: datetime | None = Field( + default=None, + sa_column_kwargs={"onupdate": datetime.now(pytz.UTC)}, + index=True, + sa_type=DateTime(timezone=True), + ) + + +class BaseMetadata(TimestampMixin, table=True): + """Base table for all metadata""" + + __tablename__ = "base_metadata" + __table_args__ = ( + Index("idx_base_meta_type_title", "type", "title"), + UniqueConstraint("title", "year"), + # Pattern matching index for partial title searches + Index( + "idx_base_title_search", + "title", + postgresql_using="gin", + postgresql_ops={"title": "gin_trgm_ops"}, + ), + ) + + id: str = Field(primary_key=True) + type: MediaType = Field(index=True) + title: str + year: int | None = Field(default=None) + poster: str | None + is_poster_working: bool = Field(default=True) + is_add_title_to_poster: bool = Field(default=False) + background: str | None + description: str | None + runtime: str | None + website: str | None + + +class MovieMetadata(TimestampMixin, table=True): + """Movie specific metadata table""" + + __tablename__ = "movie_metadata" + + id: str = Field( + primary_key=True, foreign_key="base_metadata.id", ondelete="CASCADE" + ) + imdb_rating: float | None = Field(default=None, index=True) + parent_guide_nudity_status: NudityStatus = Field( + default=NudityStatus.UNKNOWN, index=True + ) + type: ClassVar[MediaType] = MediaType.MOVIE + + +class SeriesMetadata(TimestampMixin, table=True): + """Series specific metadata table""" + + __tablename__ = "series_metadata" + + id: str = Field( + primary_key=True, foreign_key="base_metadata.id", ondelete="CASCADE" + ) + end_year: int | None = Field(default=None, index=True) + imdb_rating: float | None = Field(default=None, index=True) + parent_guide_nudity_status: NudityStatus = Field( + default=NudityStatus.UNKNOWN, index=True + ) + type: ClassVar[MediaType] = MediaType.SERIES + + +class TVMetadata(TimestampMixin, table=True): + """TV specific metadata table""" + + __tablename__ = "tv_metadata" + + id: str = Field( + primary_key=True, foreign_key="base_metadata.id", ondelete="CASCADE" + ) + country: str | None = Field(default=None, index=True) + tv_language: str | None = Field(default=None, index=True) + logo: str | None + type: ClassVar[MediaType] = MediaType.TV + + +# Supporting Models +class Genre(SQLModel, table=True): + __tablename__ = "genre" + + id: int | None = Field(default=None, primary_key=True) + name: str = Field(unique=True, index=True) + + +class MediaGenreLink(SQLModel, table=True): + __tablename__ = "media_genre_link" + + media_id: str = Field( + foreign_key="base_metadata.id", primary_key=True, ondelete="CASCADE" + ) + genre_id: int = Field(foreign_key="genre.id", primary_key=True, ondelete="CASCADE") + + +class Catalog(SQLModel, table=True): + __tablename__ = "catalog" + + id: int | None = Field(default=None, primary_key=True) + name: str = Field(unique=True) + + +class MediaCatalogLink(SQLModel, table=True): + __tablename__ = "media_catalog_link" + __table_args__ = {"postgresql_partition_by": "LIST (catalog_id)"} + + media_id: str = Field( + foreign_key="base_metadata.id", primary_key=True, ondelete="CASCADE" + ) + catalog_id: int = Field( + foreign_key="catalog.id", primary_key=True, ondelete="CASCADE" + ) + priority: int = Field(default=0, index=True) + + class Config: + arbitrary_types_allowed = True + + +class AkaTitle(SQLModel, table=True): + __tablename__ = "aka_title" + + id: int | None = Field(default=None, primary_key=True) + title: str = Field(index=True) + media_id: str = Field( + foreign_key="base_metadata.id", index=True, ondelete="CASCADE" + ) + + +class ParentalCertificate(SQLModel, table=True): + __tablename__ = "parental_certificate" + + id: int | None = Field(default=None, primary_key=True) + name: str = Field(unique=True, index=True) + + +class MediaParentalCertificateLink(SQLModel, table=True): + __tablename__ = "media_parental_certificate_link" + + media_id: str = Field( + foreign_key="base_metadata.id", primary_key=True, ondelete="CASCADE" + ) + certificate_id: int = Field( + foreign_key="parental_certificate.id", primary_key=True, ondelete="CASCADE" + ) + + +class Star(SQLModel, table=True): + __tablename__ = "star" + + id: int | None = Field(default=None, primary_key=True) + name: str = Field(index=True) + + +class MediaStarLink(SQLModel, table=True): + __tablename__ = "media_star_link" + + media_id: str = Field( + foreign_key="base_metadata.id", primary_key=True, ondelete="CASCADE" + ) + star_id: int = Field(foreign_key="star.id", primary_key=True, ondelete="CASCADE") + + +# Stream Models +class TorrentStream(TimestampMixin, table=True): + __tablename__ = "torrent_stream" + __table_args__ = ( + Index( + "idx_torrent_stream_meta_blocked", + "meta_id", + postgresql_where="NOT is_blocked", + ), + Index( + "idx_torrent_meta_created", + "meta_id", + "created_at", + postgresql_where="NOT is_blocked", + ), + Index( + "idx_torrent_meta_source", + "meta_id", + "source", + ), + ) + + id: str = Field(primary_key=True) + meta_id: str = Field(foreign_key="base_metadata.id", index=True, ondelete="CASCADE") + torrent_name: str + size: int = Field(sa_type=BigInteger, gt=0) + filename: str | None + file_index: int | None + source: str = Field(index=True) + resolution: str | None = Field(default=None) + codec: str | None + quality: str | None = Field(default=None) + audio: str | None + seeders: int | None = Field(default=None) + is_blocked: bool = Field(default=False, index=True) + indexer_flag: IndexerType = Field(default=IndexerType.FREELEACH) + + +class Season(SQLModel, table=True): + __tablename__ = "season" + __table_args__ = ( + Index("idx_season_torrent_number", "torrent_stream_id", "season_number"), + ) + + id: int | None = Field(default=None, primary_key=True) + torrent_stream_id: str = Field(foreign_key="torrent_stream.id", ondelete="CASCADE") + season_number: int + + +class Episode(SQLModel, table=True): + __tablename__ = "episode" + __table_args__ = (UniqueConstraint("season_id", "episode_number"),) + + id: int | None = Field(default=None, primary_key=True) + season_id: int = Field(foreign_key="season.id", ondelete="CASCADE") + episode_number: int = Field(index=True) + filename: str | None + size: int | None = Field(default=None, sa_type=BigInteger) + file_index: int | None + title: str | None + released: datetime | None = Field( + default=None, + sa_type=DateTime(timezone=True), + ) + + +class TVStream(TimestampMixin, table=True): + __tablename__ = "tv_stream" + __table_args__ = ( + UniqueConstraint("url", "ytId"), + Index("idx_tv_stream_meta_working", "meta_id", "is_working"), + ) + + id: int | None = Field(default=None, primary_key=True) + meta_id: str = Field(foreign_key="base_metadata.id", index=True, ondelete="CASCADE") + name: str + url: str | None = Field(default=None) + ytId: str | None = Field(default=None) + externalUrl: str | None + source: str = Field(index=True) + country: str | None = Field(default=None, index=True) + is_working: bool = Field(default=True, index=True) + test_failure_count: int = Field(default=0) + drm_key_id: str | None + drm_key: str | None + behaviorHints: dict | None = Field(default=None, sa_type=JSON) + + +# Stream Relationship Models +class Language(SQLModel, table=True): + __tablename__ = "language" + + id: int | None = Field(default=None, primary_key=True) + name: str = Field(unique=True) + + +class TorrentLanguageLink(SQLModel, table=True): + __tablename__ = "torrent_language_link" + + torrent_id: str = Field( + foreign_key="torrent_stream.id", primary_key=True, ondelete="CASCADE" + ) + language_id: int = Field( + foreign_key="language.id", primary_key=True, ondelete="CASCADE" + ) + + +class AnnounceURL(SQLModel, table=True): + __tablename__ = "announce_url" + + id: int | None = Field(default=None, primary_key=True) + name: str = Field(unique=True) + + +class TorrentAnnounceLink(SQLModel, table=True): + __tablename__ = "torrent_announce_link" + + torrent_id: str = Field( + foreign_key="torrent_stream.id", primary_key=True, ondelete="CASCADE" + ) + announce_id: int = Field( + foreign_key="announce_url.id", primary_key=True, ondelete="CASCADE" + ) + + +class Namespace(SQLModel, table=True): + __tablename__ = "namespace" + + id: int | None = Field(default=None, primary_key=True) + name: str = Field(unique=True) + + +class TVStreamNamespaceLink(SQLModel, table=True): + __tablename__ = "tv_stream_namespace_link" + + stream_id: int = Field( + foreign_key="tv_stream.id", primary_key=True, ondelete="CASCADE" + ) + namespace_id: int = Field( + foreign_key="namespace.id", primary_key=True, ondelete="CASCADE" + ) diff --git a/db/schemas.py b/db/schemas.py index 864b92be..c2a3d893 100644 --- a/db/schemas.py +++ b/db/schemas.py @@ -57,7 +57,7 @@ class MetaItem(BaseModel): class Metas(BaseModel): - metas: list[Meta] = [] + metas: list[Meta] = Field(default_factory=list) class StreamBehaviorHints(BaseModel): @@ -81,7 +81,7 @@ class Stream(BaseModel): class Streams(BaseModel): - streams: Optional[list[Stream]] = [] + streams: Optional[list[Stream]] = Field(default_factory=list) class QBittorrentConfig(BaseModel): @@ -292,7 +292,7 @@ class TVMetaData(BaseModel): country: str | None = None tv_language: str | None = None logo: Optional[str] = None - genres: list[str] = [] + genres: list[str] = Field(default_factory=list) streams: list[TVStreams] namespace: str = Field(default="mediafusion") diff --git a/migrations/__init__.py b/migrations/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/migrations/mongo_to_postgres.py b/migrations/mongo_to_postgres.py new file mode 100644 index 00000000..1cbc8cf6 --- /dev/null +++ b/migrations/mongo_to_postgres.py @@ -0,0 +1,1134 @@ +import asyncio +import logging +from datetime import timezone +from typing import Dict, List + +import sqlalchemy +import typer +from beanie import init_beanie +from motor.motor_asyncio import AsyncIOMotorClient +from sqlalchemy import make_url, func, text +from sqlalchemy.ext.asyncio import create_async_engine +from sqlmodel import select +from sqlmodel.ext.asyncio.session import AsyncSession +from tqdm import tqdm + +from db.models import ( + MediaFusionMetaData as OldMetaData, + MediaFusionMovieMetaData as OldMovieMetaData, + MediaFusionSeriesMetaData as OldSeriesMetaData, + MediaFusionTVMetaData as OldTVMetaData, + TorrentStreams as OldTorrentStreams, + TVStreams as OldTVStreams, +) +from db.new_models import * # Import all new models +from utils.validation_helper import is_video_file + +logging.basicConfig( + level=logging.INFO, format="%(asctime)s - %(levelname)s - %(message)s" +) +logger = logging.getLogger(__name__) + +app = typer.Typer() + + +class ResourceTracker: + """Track and manage resources across the migration""" + + def __init__(self): + self._resource_maps: Dict[str, Dict[str, int]] = { + "genre": {}, + "catalog": {}, + "language": {}, + "announce_url": {}, + "namespace": {}, + "star": {}, + } + self._pending_inserts: Dict[str, set] = { + key: set() for key in self._resource_maps + } + + async def initialize_from_db(self, session: AsyncSession): + """Load existing resource IDs from PostgreSQL""" + resource_models = { + "genre": Genre, + "catalog": Catalog, + "language": Language, + "announce_url": AnnounceURL, + "namespace": Namespace, + "star": Star, + } + + for resource_type, model in resource_models.items(): + result = await session.exec(select(model)) + existing_resources = result.all() + for resource in existing_resources: + self._resource_maps[resource_type][resource.name] = resource.id + + async def ensure_resources(self, session: AsyncSession): + """Ensure all pending resources are created in the database""" + resource_models = { + "genre": Genre, + "catalog": Catalog, + "language": Language, + "announce_url": AnnounceURL, + "namespace": Namespace, + "star": Star, + } + + for resource_type, model in resource_models.items(): + pending = self._pending_inserts[resource_type] + if not pending: + continue + + # Get existing resources + stmt = select(model).where(model.name.in_(pending)) + result = await session.exec(stmt) + existing = {r.name: r.id for r in result} + + # Create new resources + new_resources = pending - existing.keys() + if new_resources: + for name in new_resources: + new_resource = model(name=name) + session.add(new_resource) + + await session.commit() + + # Get IDs of newly created resources + stmt = select(model).where(model.name.in_(new_resources)) + result = await session.exec(stmt) + new_ids = {r.name: r.id for r in result} + + existing.update(new_ids) + + # Update resource map + self._resource_maps[resource_type].update(existing) + self._pending_inserts[resource_type].clear() + + def track_resource(self, resource_type: str, name: str): + """Track a resource for creation""" + if name and name not in self._resource_maps[resource_type]: + self._pending_inserts[resource_type].add(name) + + def get_resource_id(self, resource_type: str, name: str) -> int | None: + """Get ID of a tracked resource""" + return self._resource_maps[resource_type].get(name) + + +class VerificationResult: + """Stores verification results""" + + def __init__(self): + self.counts: Dict[str, tuple[int, int]] = {} # (mongo_count, pg_count) + self.sample_checks: Dict[str, List[str]] = {} # List of failed checks + self.relationship_checks: Dict[str, List[str]] = ( + {} + ) # List of failed relationships + + +class DatabaseMigration: + def __init__( + self, + mongo_uri: str, + postgres_uri: str, + batch_size: int = 1000, + ): + + self.pg_engine = None + self.mongo_client = None + self.mongo_uri = mongo_uri + self.postgres_uri = postgres_uri + self.batch_size = batch_size + self.resource_tracker = ResourceTracker() + self.verification_result = VerificationResult() + + async def init_connections(self, connect_mongo: bool = True): + """Initialize database connections""" + # Initialize MongoDB + if connect_mongo: + self.mongo_client = AsyncIOMotorClient(self.mongo_uri) + db = self.mongo_client.get_default_database() + await init_beanie( + database=db, + document_models=[ + OldMovieMetaData, + OldSeriesMetaData, + OldTVMetaData, + OldTorrentStreams, + OldTVStreams, + ], + ) + + # Create database if not exists + + postgres_url = make_url(self.postgres_uri) + database_name = postgres_url.database + # PostgreSQL connection for creating database + temp_engine = create_async_engine( + postgres_url.set(database="postgres"), echo=False + ) + + async with temp_engine.connect() as conn: + # Close any open transactions + await conn.execute(sqlalchemy.text("COMMIT")) + + result = await conn.execute( + sqlalchemy.text( + f"SELECT 1 FROM pg_database WHERE datname='{database_name}'" + ) + ) + + if not result.scalar(): + await conn.execute(sqlalchemy.text(f"CREATE DATABASE {database_name}")) + logger.info(f"Database '{database_name}' created.") + + await temp_engine.dispose() + + # Initialize PostgreSQL + self.pg_engine = create_async_engine( + self.postgres_uri, echo=False, pool_size=20, max_overflow=30 + ) + + # Create tables if not exists + async with self.pg_engine.begin() as conn: + # Create extensions first + await conn.execute(text("CREATE EXTENSION IF NOT EXISTS pg_trgm;")) + await conn.execute(text("CREATE EXTENSION IF NOT EXISTS btree_gin;")) + + await conn.run_sync(SQLModel.metadata.create_all) + + async def initialize_resources(self): + # Initialize resource tracker + async with AsyncSession(self.pg_engine) as session: + await self.resource_tracker.initialize_from_db(session) + + async def reset_database(self): + """Reset PostgreSQL database""" + async with self.pg_engine.begin() as conn: + await conn.run_sync(SQLModel.metadata.drop_all) + await conn.run_sync(SQLModel.metadata.create_all) + + async def close_connections(self): + """Close database connections""" + try: + if self.mongo_client: + self.mongo_client.close() + except Exception as e: + logger.error(f"Error closing MongoDB connection: {str(e)}") + + try: + if self.pg_engine: + await self.pg_engine.dispose() + except Exception as e: + logger.error(f"Error closing PostgreSQL connection: {str(e)}") + + async def migrate_metadata(self): + """Migrate metadata to separate tables using cursor-based pagination""" + collections = [ + (OldMovieMetaData, MovieMetadata, "movies"), + (OldSeriesMetaData, SeriesMetadata, "series"), + (OldTVMetaData, TVMetadata, "tv"), + ] + + for old_model, new_model_class, collection_name in collections: + total = await old_model.find().count() + if total == 0: + logger.info(f"No {collection_name} to migrate") + continue + + processed = 0 + cursor = old_model.find() + + with tqdm(total=total) as pbar: + pbar.set_description(f"Migrating {collection_name}") + + async for old_doc in cursor: + try: + async with AsyncSession(self.pg_engine) as session: + # Handle base metadata first + base_data = await self.transform_base_metadata( + old_doc, new_model_class.type + ) + stmt = select(BaseMetadata).where( + BaseMetadata.id == old_doc.id + ) + result = await session.exec(stmt) + base_meta = result.first() + + if base_meta: + for key, value in base_data.items(): + setattr(base_meta, key, value) + else: + base_meta = BaseMetadata(**base_data) + session.add(base_meta) + + await session.commit() + + # Handle type-specific metadata + specific_data = await self.transform_specific_metadata( + old_doc, new_model_class.type + ) + stmt = select(new_model_class).where( + new_model_class.id == old_doc.id + ) + result = await session.exec(stmt) + specific_meta = result.first() + + if specific_meta: + for key, value in specific_data.items(): + if key != "id" and specific_meta.__fields__.get( + key + ): + setattr(specific_meta, key, value) + else: + specific_meta = new_model_class(**specific_data) + session.add(specific_meta) + + await session.commit() + + # Handle relationships + await self.migrate_metadata_relationships( + session, old_doc, old_doc.id, new_model_class.type + ) + + processed += 1 + pbar.update(1) + + except Exception as e: + logger.exception( + f"Error processing document {old_doc.id}: {str(e)}" + ) + await session.rollback() + continue + + @staticmethod + async def transform_base_metadata( + old_doc: OldMetaData, media_type: MediaType + ) -> dict: + """Transform metadata to base table format""" + # Ensure timezone-aware datetimes + created_at = ( + old_doc.created_at.replace(tzinfo=timezone.utc) + if old_doc.created_at + else None + ) + updated_at = ( + old_doc.last_updated_at.replace(tzinfo=timezone.utc) + if old_doc.last_updated_at + else None + ) + + return { + "id": old_doc.id, + "title": old_doc.title, + "year": old_doc.year, + "poster": old_doc.poster, + "is_poster_working": old_doc.is_poster_working, + "is_add_title_to_poster": old_doc.is_add_title_to_poster, + "background": old_doc.background, + "description": old_doc.description, + "runtime": old_doc.runtime, + "website": old_doc.website, + "type": media_type, + "created_at": created_at, + "updated_at": updated_at, + } + + @staticmethod + async def transform_specific_metadata( + old_doc: OldMetaData, media_type: MediaType + ) -> dict: + """Transform metadata to specific table format""" + # Ensure timezone-aware datetimes + created_at = ( + old_doc.created_at.replace(tzinfo=timezone.utc) + if old_doc.created_at + else None + ) + updated_at = ( + old_doc.last_updated_at.replace(tzinfo=timezone.utc) + if old_doc.last_updated_at + else None + ) + + data = { + "id": old_doc.id, + "title": old_doc.title, + "year": old_doc.year, + "poster": old_doc.poster, + "is_poster_working": old_doc.is_poster_working, + "is_add_title_to_poster": old_doc.is_add_title_to_poster, + "background": old_doc.background, + "description": old_doc.description, + "runtime": old_doc.runtime, + "website": old_doc.website, + "created_at": created_at, + "updated_at": updated_at, + } + + # Add type-specific fields + if media_type == MediaType.MOVIE: + data.update( + { + "imdb_rating": getattr(old_doc, "imdb_rating", None), + "parent_guide_nudity_status": getattr( + old_doc, "parent_guide_nudity_status" + ), + } + ) + elif media_type == MediaType.SERIES: + data.update( + { + "end_year": getattr(old_doc, "end_year", None), + "imdb_rating": getattr(old_doc, "imdb_rating", None), + "parent_guide_nudity_status": getattr( + old_doc, "parent_guide_nudity_status" + ), + } + ) + elif media_type == MediaType.TV: + data.update( + { + "country": getattr(old_doc, "country", None), + "tv_language": getattr(old_doc, "tv_language", None), + "logo": getattr(old_doc, "logo", None), + } + ) + + return data + + async def migrate_metadata_relationships( + self, + session: AsyncSession, + old_doc: OldMetaData, + media_id: str, + media_type: MediaType, + ): + """Migrate all relationships for a metadata record""" + try: + # Migrate genres + existing_genres_result = await session.exec( + select(MediaGenreLink.genre_id).where( + MediaGenreLink.media_id == media_id + ) + ) + existing_genre_ids = set(existing_genres_result.all()) + + for genre in old_doc.genres or []: + genre_id = self.resource_tracker.get_resource_id("genre", genre) + if genre_id and genre_id not in existing_genre_ids: + link = MediaGenreLink(media_id=media_id, genre_id=genre_id) + session.add(link) + + # Migrate AKA titles + stmt = select(AkaTitle.title).where(AkaTitle.media_id == media_id) + aka_title = await session.exec(stmt) + existing_aka_titles = set(aka_title.all()) + + for title in getattr(old_doc, "aka_titles", None) or []: + if title not in existing_aka_titles: + aka = AkaTitle(title=title, media_id=media_id) + session.add(aka) + + # Migrate stars + if hasattr(old_doc, "stars"): + existing_stars_result = await session.exec( + select(MediaStarLink.star_id).where( + MediaStarLink.media_id == media_id, + ) + ) + existing_star_ids = set(existing_stars_result.all()) + + for star_name in old_doc.stars or []: + star_id = self.resource_tracker.get_resource_id("star", star_name) + if star_id and star_id not in existing_star_ids: + link = MediaStarLink( + media_id=media_id, + star_id=star_id, + ) + session.add(link) + + # Migrate certificates + if hasattr(old_doc, "parent_guide_certificates"): + stmt = select(ParentalCertificate).where( + ParentalCertificate.name.in_( + old_doc.parent_guide_certificates or [] + ) + ) + existing_certs = await session.exec(stmt) + existing_certificates = {cert.name: cert.id for cert in existing_certs} + + for cert in old_doc.parent_guide_certificates or []: + if cert not in existing_certificates: + new_cert = ParentalCertificate(name=cert) + session.add(new_cert) + await session.commit() + await session.refresh(new_cert) + existing_certificates[cert] = new_cert.id + + stmt = select(MediaParentalCertificateLink).where( + MediaParentalCertificateLink.media_id == media_id, + MediaParentalCertificateLink.certificate_id + == existing_certificates[cert], + ) + existing_link = await session.exec(stmt) + if not existing_link.first(): + link = MediaParentalCertificateLink( + media_id=media_id, + certificate_id=existing_certificates[cert], + ) + session.add(link) + + # Migrate catalogs from torrent streams + existing_catalogs_result = await session.exec( + select(MediaCatalogLink.catalog_id).where( + MediaCatalogLink.media_id == media_id + ) + ) + existing_catalog_ids = set(existing_catalogs_result.all()) + + torrent_streams = await OldTorrentStreams.find( + {"meta_id": media_id} + ).to_list() + + for stream in torrent_streams: + catalogs = ( + [stream.catalog] + if isinstance(stream.catalog, str) + else (stream.catalog or []) + ) + for catalog in catalogs: + catalog_id = self.resource_tracker.get_resource_id( + "catalog", catalog + ) + if catalog_id and catalog_id not in existing_catalog_ids: + link = MediaCatalogLink( + media_id=media_id, catalog_id=catalog_id + ) + session.add(link) + existing_catalog_ids.add(catalog_id) + + await session.commit() + + except Exception as e: + logger.error(f"Error migrating relationships for {media_id}: {str(e)}") + await session.rollback() + raise + + async def migrate_torrent_streams(self): + """Migrate torrent streams using cursor-based pagination""" + total = await OldTorrentStreams.find().count() + if total == 0: + logger.info("No torrent streams to migrate") + return + + processed = 0 + cursor = OldTorrentStreams.find() + + with tqdm(total=total) as pbar: + pbar.set_description("Migrating torrent streams") + + async for old_stream in cursor: + try: + async with AsyncSession(self.pg_engine) as session: + # Track all resources first + for lang in old_stream.languages or []: + self.resource_tracker.track_resource("language", lang) + for url in old_stream.announce_list or []: + self.resource_tracker.track_resource("announce_url", url) + + # Ensure all resources exist + await self.resource_tracker.ensure_resources(session) + + # Validate metadata exists + result = await session.exec( + select(BaseMetadata).where( + BaseMetadata.id == old_stream.meta_id + ) + ) + if not result.first(): + logger.warning( + f"Skipping stream {old_stream.id} as metadata {old_stream.meta_id} does not exist" + ) + continue + + # Check if stream exists + stmt = select(TorrentStream).where( + TorrentStream.id == old_stream.id.lower() + ) + result = await session.exec(stmt) + existing_stream = result.first() + + # Transform stream data + stream_data = self.transform_torrent_stream(old_stream) + + if existing_stream: + for key, value in stream_data.items(): + setattr(existing_stream, key, value) + stream = existing_stream + else: + stream = TorrentStream(**stream_data) + session.add(stream) + + stream_id = stream.id + await session.commit() + + # Handle season and episodes if present + if old_stream.season: + # Delete existing season and episodes + existing_seasons = await session.exec( + select(Season).where( + Season.torrent_stream_id == stream_id + ) + ) + for existing_season in existing_seasons: + await session.delete(existing_season) + await session.commit() + + # Create new season and episodes + await self.migrate_season_and_episodes( + session, old_stream.season, stream_id + ) + + # Update relationships + await self.migrate_torrent_relationships( + session, old_stream, stream_id + ) + + processed += 1 + pbar.update(1) + + except Exception as e: + logger.exception( + f"Error processing torrent stream {old_stream.id}: {str(e)}" + ) + await session.rollback() + continue + + async def migrate_torrent_relationships( + self, session: AsyncSession, old_stream: OldTorrentStreams, stream_id: str + ): + """Migrate torrent stream relationships""" + try: + # Migrate languages + existing_languages = await session.exec( + select(TorrentLanguageLink.language_id).where( + TorrentLanguageLink.torrent_id == stream_id + ) + ) + existing_language_ids = set(existing_languages.all()) + added_languages = set() + for lang in old_stream.languages or []: + lang_id = self.resource_tracker.get_resource_id("language", lang) + if ( + lang_id + and lang_id not in existing_language_ids + and lang_id not in added_languages + ): + link = TorrentLanguageLink( + torrent_id=stream_id, language_id=lang_id + ) + session.add(link) + added_languages.add(lang_id) + + # Migrate announce URLs + existing_announces = await session.exec( + select(TorrentAnnounceLink.announce_id).where( + TorrentAnnounceLink.torrent_id == stream_id + ) + ) + existing_announce_ids = set(existing_announces.all()) + added_announces = set() + for url in set(old_stream.announce_list): + url_id = self.resource_tracker.get_resource_id("announce_url", url) + if ( + url_id + and url_id not in existing_announce_ids + and url_id not in added_announces + ): + link = TorrentAnnounceLink(torrent_id=stream_id, announce_id=url_id) + session.add(link) + added_announces.add(url_id) + + await session.commit() + + except Exception as e: + logger.exception( + f"Error migrating relationships for stream {stream_id}: {str(e)}" + ) + await session.rollback() + raise + + async def migrate_season_and_episodes( + self, session: AsyncSession, old_season, stream_id: str + ): + """Migrate season and its episodes""" + try: + # Create season + season = Season( + torrent_stream_id=stream_id, season_number=old_season.season_number + ) + session.add(season) + await session.commit() # Commit to get season ID + await session.refresh(season, ["id"]) + + added_episodes = set() + # Create episodes + for old_ep in old_season.episodes or []: + if old_ep.filename and not is_video_file(old_ep.filename): + logger.warning( + f"Skipping non-video file {old_ep.filename} for episode in {stream_id}" + ) + continue + + if old_ep.episode_number in added_episodes: + logger.warning( + f"Skipping duplicate episode {old_ep.episode_number} for torrent {stream_id}" + ) + continue + episode = Episode( + season_id=season.id, + episode_number=old_ep.episode_number, + filename=old_ep.filename, + size=old_ep.size, + file_index=old_ep.file_index, + title=old_ep.title, + released=( + old_ep.released.replace(tzinfo=timezone.utc) + if old_ep.released + else None + ), + ) + session.add(episode) + added_episodes.add(old_ep.episode_number) + + await session.commit() + except Exception as e: + logger.error( + f"Error migrating season and episodes for stream {stream_id}: {str(e)}" + ) + await session.rollback() + raise + + async def migrate_tv_streams(self): + """Migrate TV streams using cursor-based pagination""" + total = await OldTVStreams.find().count() + if total == 0: + logger.info("No TV streams to migrate") + return + + processed = 0 + cursor = OldTVStreams.find() + + with tqdm(total=total) as pbar: + pbar.set_description("Migrating TV streams") + + async for old_stream in cursor: + try: + async with AsyncSession(self.pg_engine) as session: + # Track namespaces + for namespace in old_stream.namespaces or ["mediafusion"]: + self.resource_tracker.track_resource("namespace", namespace) + + await self.resource_tracker.ensure_resources(session) + + # Validate metadata exists + result = await session.exec( + select(BaseMetadata).where( + BaseMetadata.id == old_stream.meta_id + ) + ) + if not result.first(): + continue + + # Transform and insert TV stream + stream_data = await self.transform_tv_stream(old_stream) + + # Check for existing stream with the same URL + existing_stream = await session.exec( + select(TVStream).where( + TVStream.url == stream_data["url"], + TVStream.ytId == stream_data["ytId"], + ) + ) + existing_stream = existing_stream.first() + + if not existing_stream: + stream = TVStream(**stream_data) + session.add(stream) + await session.commit() + await session.refresh(stream, ["id"]) + else: + stream = existing_stream + + # Add namespace relationships + await self.migrate_tv_stream_namespaces( + session, old_stream, stream.id + ) + await session.commit() + + processed += 1 + pbar.update(1) + + except Exception as e: + logger.exception(f"Error processing TV stream: {str(e)}") + await session.rollback() + continue + + async def migrate_tv_stream_namespaces( + self, session: AsyncSession, old_stream: OldTVStreams, stream_id: int + ): + """Migrate TV stream namespace relationships""" + # validate existing namespaces + stmt = select(TVStreamNamespaceLink.namespace_id).where( + TVStreamNamespaceLink.stream_id == stream_id + ) + existing_namespaces = await session.exec(stmt) + existing_namespace_ids = set(existing_namespaces.all()) + + for namespace in old_stream.namespaces or ["mediafusion"]: + namespace_id = self.resource_tracker.get_resource_id("namespace", namespace) + if namespace_id and namespace_id not in existing_namespace_ids: + link = TVStreamNamespaceLink( + stream_id=stream_id, namespace_id=namespace_id + ) + session.add(link) + + await session.commit() + + @staticmethod + def transform_torrent_stream(old_stream: OldTorrentStreams) -> dict: + """Transform torrent stream to new format""" + return { + "id": old_stream.id.lower(), + "meta_id": old_stream.meta_id, + "torrent_name": old_stream.torrent_name, + "size": old_stream.size, + "filename": old_stream.filename, + "file_index": old_stream.file_index, + "source": old_stream.source, + "resolution": old_stream.resolution, + "codec": old_stream.codec, + "quality": old_stream.quality, + "audio": ( + old_stream.audio[0] + if isinstance(old_stream.audio, list) + else old_stream.audio + ), + "seeders": old_stream.seeders, + "is_blocked": old_stream.is_blocked, + "created_at": old_stream.created_at, + "updated_at": old_stream.updated_at, + "indexer_flag": ( + old_stream.indexer_flags[0] if old_stream.indexer_flags else "freeleech" + ), + } + + async def transform_tv_stream(self, old_stream: OldTVStreams) -> dict: + """Transform TV stream to new format""" + # Get next ID if not exists + if not hasattr(old_stream, "id") or not old_stream.id: + async with AsyncSession(self.pg_engine) as session: + result = await session.exec( + select(func.coalesce(func.max(TVStream.id), 0)) + ) + max_id = result.one() or 0 + stream_id = max_id + 1 + else: + try: + stream_id = int(old_stream.id) + except (ValueError, TypeError): + async with AsyncSession(self.pg_engine) as session: + result = await session.exec( + select(func.coalesce(func.max(TVStream.id), 0)) + ) + max_id = result.one() or 0 + stream_id = max_id + 1 + + return { + "id": stream_id, + "meta_id": old_stream.meta_id, + "name": old_stream.name, + "url": old_stream.url, + "ytId": old_stream.ytId, + "externalUrl": old_stream.externalUrl, + "source": old_stream.source, + "behaviorHints": old_stream.behaviorHints, + "country": old_stream.country, + "is_working": old_stream.is_working, + "test_failure_count": old_stream.test_failure_count, + "drm_key_id": old_stream.drm_key_id, + "drm_key": old_stream.drm_key, + "created_at": old_stream.created_at, + "updated_at": old_stream.updated_at, + } + + async def verify_migration(self) -> VerificationResult: + """Verify the migration by comparing document counts and sampling data""" + logger.info("Starting migration verification...") + + # Document count verification + await self.verify_counts() + + # Data sampling verification + await self.verify_samples() + + # Relationship verification + await self.verify_relationships() + + # Log verification results + self.log_verification_results() + + return self.verification_result + + async def verify_counts(self): + """Verify document counts between MongoDB and PostgreSQL""" + logger.info("Verifying document counts...") + + async with AsyncSession(self.pg_engine) as session: + # Metadata counts + mongo_movie_count = await OldMovieMetaData.count() + mongo_series_count = await OldSeriesMetaData.count() + mongo_tv_count = await OldTVMetaData.count() + + pg_movie_count = ( + await session.exec(select(func.count()).select_from(MovieMetadata)) + ).first() + pg_series_count = ( + await session.exec(select(func.count()).select_from(SeriesMetadata)) + ).first() + pg_tv_count = ( + await session.exec(select(func.count()).select_from(TVMetadata)) + ).first() + + # Stream counts + mongo_torrent_count = await OldTorrentStreams.count() + mongo_tv_streams_count = await OldTVStreams.count() + + pg_torrent_count = ( + await session.exec(select(func.count()).select_from(TorrentStream)) + ).first() + pg_tv_streams_count = ( + await session.exec(select(func.count()).select_from(TVStream)) + ).first() + + self.verification_result.counts.update( + { + "movies": (mongo_movie_count, pg_movie_count), + "series": (mongo_series_count, pg_series_count), + "tv": (mongo_tv_count, pg_tv_count), + "torrent_streams": (mongo_torrent_count, pg_torrent_count), + "tv_streams": (mongo_tv_streams_count, pg_tv_streams_count), + } + ) + + async def verify_samples(self, sample_size: int = 10): + """Verify data integrity by sampling records""" + logger.info("Verifying data samples...") + + collections = [ + (OldMovieMetaData, MovieMetadata, "movies"), + (OldSeriesMetaData, SeriesMetadata, "series"), + (OldTVMetaData, TVMetadata, "tv"), + ] + + for old_model, new_model, collection_name in collections: + failed_checks = [] + samples = await old_model.aggregate( + [{"$sample": {"size": sample_size}}], projection_model=old_model + ).to_list() + + async with AsyncSession(self.pg_engine) as session: + for sample in samples: + # Check base metadata + base_result = await session.exec( + select(BaseMetadata).where(BaseMetadata.id == sample.id) + ) + base_meta = base_result.first() + + # Check specific metadata + specific_result = await session.exec( + select(new_model).where(new_model.id == sample.id) + ) + specific_meta = specific_result.first() + + if not all([base_meta, specific_meta]): + failed_checks.append(f"Missing metadata for {sample.id}") + continue + + # Compare fields + if base_meta.title != sample.title or base_meta.year != sample.year: + failed_checks.append(f"Mismatch in base fields for {sample.id}") + + # Compare type-specific fields + if ( + hasattr(sample, "imdb_rating") + and specific_meta.imdb_rating != sample.imdb_rating + ): + failed_checks.append(f"Mismatch in imdb_rating for {sample.id}") + + self.verification_result.sample_checks[collection_name] = failed_checks + + async def verify_relationships(self): + """Verify relationship integrity""" + logger.info("Verifying relationships...") + + async with AsyncSession(self.pg_engine) as session: + # Verify genre relationships + genre_issues = [] + genre_links = await session.exec(select(MediaGenreLink)) + for link in genre_links: + meta = ( + await session.exec( + select(BaseMetadata).where(BaseMetadata.id == link.media_id) + ) + ).first() + genre = ( + await session.exec(select(Genre).where(Genre.id == link.genre_id)) + ).first() + if not all([meta, genre]): + genre_issues.append( + f"Invalid genre link: {link.media_id}-{link.genre_id}" + ) + + # Verify torrent stream relationships + stream_issues = [] + torrent_streams = await session.exec(select(TorrentStream)) + for stream in torrent_streams: + meta = ( + await session.exec( + select(BaseMetadata).where(BaseMetadata.id == stream.meta_id) + ) + ).first() + if not meta: + stream_issues.append(f"Invalid metadata reference: {stream.id}") + + self.verification_result.relationship_checks.update( + {"genres": genre_issues, "streams": stream_issues} + ) + + def log_verification_results(self): + """Log verification results""" + logger.info("\nVerification Results:") + logger.info("=" * 50) + + # Log count comparisons + logger.info("\nDocument Counts:") + for category, ( + mongo_count, + pg_count, + ) in self.verification_result.counts.items(): + status = "✅" if mongo_count == pg_count else "❌" + logger.info( + f"{status} {category}: MongoDB={mongo_count}, PostgreSQL={pg_count}" + ) + + # Log sample check results + logger.info("\nSample Checks:") + for category, issues in self.verification_result.sample_checks.items(): + status = "✅" if not issues else "❌" + logger.info(f"{status} {category}: {len(issues)} issues") + for issue in issues: + logger.info(f" - {issue}") + + # Log relationship check results + logger.info("\nRelationship Checks:") + for category, issues in self.verification_result.relationship_checks.items(): + status = "✅" if not issues else "❌" + logger.info(f"{status} {category}: {len(issues)} issues") + for issue in issues: + logger.info(f" - {issue}") + + +@app.command() +def migrate( + mongo_uri: str = typer.Option(..., help="MongoDB connection URI"), + postgres_uri: str = typer.Option(..., help="PostgreSQL connection URI"), + batch_size: int = typer.Option(1000, help="Batch size for processing documents"), + skip_verification: bool = typer.Option( + False, help="Skip verification after migration" + ), +): + """ + Migrate data from MongoDB to PostgreSQL + """ + + async def run_migration(): + migration = DatabaseMigration(mongo_uri, postgres_uri, batch_size) + try: + await migration.init_connections() + await migration.initialize_resources() + + # Migrate data + # await migration.migrate_metadata() + await migration.migrate_torrent_streams() + await migration.migrate_tv_streams() + + # Verify migration + if not skip_verification: + verification_result = await migration.verify_migration() + + # Check for critical issues + if any( + len(issues) > 0 + for issues in verification_result.relationship_checks.values() + ): + logger.error("Critical issues found during verification!") + raise typer.Exit(code=1) + + logger.info("Migration completed successfully!") + except Exception as e: + logger.error(f"Migration failed: {str(e)}") + logger.exception("Detailed error:") + raise typer.Exit(code=1) + finally: + await migration.close_connections() + + typer.echo("Starting migration...") + asyncio.run(run_migration()) + + +@app.command() +def verify( + mongo_uri: str = typer.Option(..., help="MongoDB connection URI"), + postgres_uri: str = typer.Option(..., help="PostgreSQL connection URI"), +): + """ + Verify migration between MongoDB and PostgreSQL + """ + + async def run_verification(): + migration = DatabaseMigration(mongo_uri, postgres_uri) + try: + await migration.init_connections() + await migration.verify_migration() + finally: + await migration.close_connections() + + typer.echo("Starting verification...") + asyncio.run(run_verification()) + + +@app.command() +def reset( + postgres_uri: str = typer.Option(..., help="PostgreSQL connection URI"), +): + """ + Reset PostgreSQL database + """ + + async def run_reset(): + migration = DatabaseMigration("", postgres_uri) + try: + await migration.init_connections(connect_mongo=False) + await migration.reset_database() + finally: + await migration.close_connections() + + typer.echo("Resetting database...") + asyncio.run(run_reset()) + + +if __name__ == "__main__": + app() From b49f28349c43488828cf5d5642ca4dba727b6ab8 Mon Sep 17 00:00:00 2001 From: mhdzumair Date: Tue, 19 Nov 2024 15:23:19 +0530 Subject: [PATCH 2/7] update dependencies --- Pipfile.lock | 234 +++++++++++++++++++++++++++++++-------------------- 1 file changed, 145 insertions(+), 89 deletions(-) diff --git a/Pipfile.lock b/Pipfile.lock index 1060af06..02eba0f9 100644 --- a/Pipfile.lock +++ b/Pipfile.lock @@ -1,7 +1,7 @@ { "_meta": { "hash": { - "sha256": "5f83ba895ec53953a40a258502afc31ad2217142fadf2549634cf7f5c980d76c" + "sha256": "05a828c1c95aadefe019688aaf19619d19bc69c89e92f9e88514f965c5b9f64b" }, "pipfile-spec": 6, "requires": { @@ -41,85 +41,85 @@ }, "aiohttp": { "hashes": [ - "sha256:08ebe7a1d6c1e5ca766d68407280d69658f5f98821c2ba6c41c63cabfed159af", - "sha256:0a90a0dc4b054b5af299a900bf950fe8f9e3e54322bc405005f30aa5cacc5c98", - "sha256:0cba0b8d25aa2d450762f3dd6df85498f5e7c3ad0ddeb516ef2b03510f0eea32", - "sha256:0ebdf5087e2ce903d8220cc45dcece90c2199ae4395fd83ca616fcc81010db2c", - "sha256:10a5f91c319d9d4afba812f72984816b5fcd20742232ff7ecc1610ffbf3fc64d", - "sha256:122768e3ae9ce74f981b46edefea9c6e5a40aea38aba3ac50168e6370459bf20", - "sha256:14eb6c628432720e41b4fab1ada879d56cfe7034159849e083eb536b4c2afa99", - "sha256:177b000efaf8d2f7012c649e8aee5b0bf488677b1162be5e7511aa4f9d567607", - "sha256:1c2496182e577042e0e07a328d91c949da9e77a2047c7291071e734cd7a6e780", - "sha256:1e33a7eddcd07545ccf5c3ab230f60314a17dc33e285475e8405e26e21f02660", - "sha256:2793d3297f3e49015140e6d3ea26142c967e07998e2fb00b6ee8d041138fbc4e", - "sha256:2914061f5ca573f990ec14191e6998752fa8fe50d518e3405410353c3f44aa5d", - "sha256:2adb967454e10e69478ba4a8d8afbba48a7c7a8619216b7c807f8481cc66ddfb", - "sha256:2b02a68b9445c70d7f5c8b578c5f5e5866b1d67ca23eb9e8bc8658ae9e3e2c74", - "sha256:3129151378f858cdc4a0a4df355c9a0d060ab49e2eea7e62e9f085bac100551b", - "sha256:32334f35824811dd20a12cc90825d000e6b50faaeaa71408d42269151a66140d", - "sha256:33af11eca7bb0f5c6ffaf5e7d9d2336c2448f9c6279b93abdd6f3c35f9ee321f", - "sha256:34f37c59b12bc3afc52bab6fcd9cd3be82ff01c4598a84cbea934ccb3a9c54a0", - "sha256:3666c750b73ce463a413692e3a57c60f7089e2d9116a2aa5a0f0eaf2ae325148", - "sha256:374baefcb1b6275f350da605951f5f02487a9bc84a574a7d5b696439fabd49a3", - "sha256:382f853516664d2ebfc75dc01da4a10fdef5edcb335fe7b45cf471ce758ecb18", - "sha256:3b1f4844909321ef2c1cee50ddeccbd6018cd8c8d1ddddda3f553e94a5859497", - "sha256:3f617a48b70f4843d54f52440ea1e58da6bdab07b391a3a6aed8d3b311a4cc04", - "sha256:435f7a08d8aa42371a94e7c141205a9cb092ba551084b5e0c57492e6673601a3", - "sha256:44b69c69c194ffacbc50165911cf023a4b1b06422d1e1199d3aea82eac17004e", - "sha256:486273d3b5af75a80c31c311988931bdd2a4b96a74d5c7f422bad948f99988ef", - "sha256:4a23475d8d5c56e447b7752a1e2ac267c1f723f765e406c81feddcd16cdc97bc", - "sha256:4c979fc92aba66730b66099cd5becb42d869a26c0011119bc1c2478408a8bf7a", - "sha256:4d7fad8c456d180a6d2f44c41cfab4b80e2e81451815825097db48b8293f59d5", - "sha256:50e0aee4adc9abcd2109c618a8d1b2c93b85ac277b24a003ab147d91e068b06d", - "sha256:556564d89e2f4a6e8fe000894c03e4e84cf0b6cfa5674e425db122633ee244d1", - "sha256:5587da333b7d280a312715b843d43e734652aa382cba824a84a67c81f75b338b", - "sha256:57993f406ce3f114b2a6756d7809be3ffd0cc40f33e8f8b9a4aa1b027fd4e3eb", - "sha256:5d6e069b882c1fdcbe5577dc4be372eda705180197140577a4cddb648c29d22e", - "sha256:5d878a0186023ac391861958035174d0486f3259cabf8fd94e591985468da3ea", - "sha256:5d90b5a3b0f32a5fecf5dd83d828713986c019585f5cddf40d288ff77f366615", - "sha256:5e9a766c346b2ed7e88937919d84ed64b4ef489dad1d8939f806ee52901dc142", - "sha256:64e8f5178958a9954043bc8cd10a5ae97352c3f2fc99aa01f2aebb0026010910", - "sha256:66e58a2e8c7609a3545c4b38fb8b01a6b8346c4862e529534f7674c5265a97b8", - "sha256:68d1f46f9387db3785508f5225d3acbc5825ca13d9c29f2b5cce203d5863eb79", - "sha256:6ad9a7d2a3a0f235184426425f80bd3b26c66b24fd5fddecde66be30c01ebe6e", - "sha256:6e8e19a80ba194db5c06915a9df23c0c06e0e9ca9a4db9386a6056cca555a027", - "sha256:73a664478ae1ea011b5a710fb100b115ca8b2146864fa0ce4143ff944df714b8", - "sha256:766d0ebf8703d28f854f945982aa09224d5a27a29594c70d921c43c3930fe7ac", - "sha256:783741f534c14957fbe657d62a34b947ec06db23d45a2fd4a8aeb73d9c84d7e6", - "sha256:79efd1ee3827b2f16797e14b1e45021206c3271249b4d0025014466d416d7413", - "sha256:83a70e22e0f6222effe7f29fdeba6c6023f9595e59a0479edacfbd7de4b77bb7", - "sha256:85de9904bc360fd29a98885d2bfcbd4e02ab33c53353cb70607f2bea2cb92468", - "sha256:8d954ba0eae7f33884d27dc00629ca4389d249eb8d26ca07c30911257cae8c96", - "sha256:9075313f8e41b481e4cb10af405054564b0247dc335db5398ed05f8ec38787e2", - "sha256:97fba98fc5d9ccd3d33909e898d00f2494d6a9eec7cbda3d030632e2c8bb4d00", - "sha256:994cb893936dd2e1803655ae8667a45066bfd53360b148e22b4e3325cc5ea7a3", - "sha256:9aa4e68f1e4f303971ec42976fb170204fb5092de199034b57199a1747e78a2d", - "sha256:9b6d15adc9768ff167614ca853f7eeb6ee5f1d55d5660e3af85ce6744fed2b82", - "sha256:9bbb2dbc2701ab7e9307ca3a8fa4999c5b28246968e0a0202a5afabf48a42e22", - "sha256:9c8d1db4f65bbc9d75b7b271d68fb996f1c8c81a525263862477d93611856c2d", - "sha256:a7b0a1618060e3f5aa73d3526ca2108a16a1b6bf86612cd0bb2ddcbef9879d06", - "sha256:afa55e863224e664a782effa62245df73fdfc55aee539bed6efacf35f6d4e4b7", - "sha256:b339d91ac9060bd6ecdc595a82dc151045e5d74f566e0864ef3f2ba0887fec42", - "sha256:b470de64d17156c37e91effc109d3b032b39867000e2c126732fe01d034441f9", - "sha256:b4ec8afd362356b8798c8caa806e91deb3f0602d8ffae8e91d2d3ced2a90c35e", - "sha256:c28c1677ea33ccb8b14330560094cc44d3ff4fad617a544fd18beb90403fe0f1", - "sha256:c681f34e2814bc6e1eef49752b338061b94a42c92734d0be9513447d3f83718c", - "sha256:cccb2937bece1310c5c0163d0406aba170a2e5fb1f0444d7b0e7fdc9bd6bb713", - "sha256:cdc6f8dce09281ae534eaf08a54f0d38612398375f28dad733a8885f3bf9b978", - "sha256:d23854e5867650d40cba54d49956aad8081452aa80b2cf0d8c310633f4f48510", - "sha256:d2d942421cf3a1d1eceae8fa192f1fbfb74eb9d3e207d35ad2696bd2ce2c987c", - "sha256:d2f991c18132f3e505c108147925372ffe4549173b7c258cf227df1c5977a635", - "sha256:d3a2bcf6c81639a165da93469e1e0aff67c956721f3fa9c0560f07dd1e505116", - "sha256:d84930b4145991214602372edd7305fc76b700220db79ac0dd57d3afd0f0a1ca", - "sha256:de3b4d5fb5d69749104b880a157f38baeea7765c93d9cd3837cedd5b84729e10", - "sha256:e57a10aacedcf24666f4c90d03e599f71d172d1c5e00dcf48205c445806745b0", - "sha256:f1d06c8fd8b453c3e553c956bd3b8395100401060430572174bb7876dd95ad49", - "sha256:f833a80d9de9307d736b6af58c235b17ef7f90ebea7b9c49cd274dec7a66a2f1", - "sha256:fb0544a0e8294a5a5e20d3cacdaaa9a911d7c0a9150f5264aef36e7d8fdfa07e", - "sha256:ff5d22eece44528023254b595c670dfcf9733ac6af74c4b6cb4f6a784dc3870c" + "sha256:010bc9606f798eda8ef071759c7b163893071502bcaedc7d5dc49f9d8f12e553", + "sha256:03d53b0888f984f4f0c216a37577ee7e7b1ed1dac89cdd2fde61bf2ccb32009b", + "sha256:06defa9017ab50d215446ebbee294e07eb2fcee72d9a909a08192cfacbd43a08", + "sha256:0ccbe8ece8a7796ef41b86a3240034c5918d9b324c2ae48fa0be33565e297c64", + "sha256:0d2cea21ec05b832e9f6a2390b23d32ce5575f6cf4812bd171d4493f59c101fe", + "sha256:12bf9c139dfa004b65d2d71906abc593dcafe78a508f33d56c1ca9d87b18337f", + "sha256:1dd5b7947e23a08c70d4c1924809b91211f14136ffd13d303dc487913cfebfeb", + "sha256:1e32517c01905e0f4e665c3f3a495868ad996a32c243fcd917587d740253d589", + "sha256:1ff7afc3c461bd9217e2b8a90ddbe5edd94687d5a331c4ae6166dca5876d1a4b", + "sha256:262e45dbd7f1829bcb024259f65b2cf69d1ef5b37626af6955a1c487613aeb3a", + "sha256:2d978a95e4b58ef1fd937fbe347ab397c79ba24e17912595b54faafb88b9b937", + "sha256:365df6cf2ad144479ba0e0b58abdc5276923676d34da4c1c45613a80d2aac130", + "sha256:3e9fd9c11299d6d230eb2669fd1ed0238d33970e36b495b0432ace7f157fc931", + "sha256:4275160583df18158e0d6789797ad314a14ae611b98933fbe7d7a1c3dcc6bad4", + "sha256:474f7266a61d1c3218ef4ec0325747884b2d5a13fab5bff5dd3b55d9c849406a", + "sha256:49eb5a0338f141ef32299d48f1415486f47953d37b0c7fa6d778b73b66f3a7e2", + "sha256:4bc936d10b8fa3f2aa66e59e034085208b588442263400ddb042703d0db99421", + "sha256:4c0b3378dc294ad6ec6c038ed57164165e0b83ef5f61eee72f6eefccd7df34b8", + "sha256:4ef6eb1367046fb22085f10c5c84ea2efd0d836ad8088306d652ab1d743faf9e", + "sha256:4ff6105856ae688b29d5daaede1256f5e02e9d5cb3059f8f5ef55d975c2e6992", + "sha256:518578d6821c942362daa14a56f26b739abeede6e408b0b83e27dfcde17730f7", + "sha256:542a4610571b0affc6e13dda9357235f5f1f2ad9859acc69b188eb53901292d6", + "sha256:5cd60673be31449c63f59886f3581478bbdfaddd87e7394a4d73ad134d9be9b9", + "sha256:5ecdf43ddde709c336a655c8b3858c56af8f7402de2572001a5a99f7bebf2f78", + "sha256:635397b5b4de2397f8136f8fd15c8ebee560e36473195c7aa992ffb8e46acdd3", + "sha256:66e83a9a1131f0060aaedcc57f1a7e489898b6c3607eededccc7a9f80b95bdb4", + "sha256:696adff3594bd449e0fe287441062bdc6f5300928426275b39ed27884ba083a7", + "sha256:6dd1411ecfc070af4df129e81fe42c799d95d81c29c22d2c3e4341d974c38f1a", + "sha256:6ec84106c8b7ff347be06bf579c298a23b6d1d2225c57273a8cd502f257125d4", + "sha256:769457243dc4bc902d376cd14c5c7ec234a4faadb4f283dc2738f004cce9a9e1", + "sha256:7a360c18b2cb391fec9585ba1efc55150e2fbc6100308113117dfea521e810d8", + "sha256:7be4efe60e9bddf78ee165a296e80170147282081e1366f0580cf4cc0fb1182f", + "sha256:821c9c640d3dc456c6a7b005e38bc5af05326b6a08ce91a068719934d108a1bb", + "sha256:822dedad93947fcb1096cc41ee8fd32e9f652777561a37c740e5335699f01cea", + "sha256:83bd5aa621b732a0ca1aa3490abd2b010247c4677371a804431935aeedf26e74", + "sha256:88e681c0d17bb285d2ccbb73ae77ef86339b632ee7967044c2284411120b9730", + "sha256:89261fee04715083ef3b5a0d222b094021793c1728b8ff21da361c79f6384095", + "sha256:8ae8480148d696dae49126e97568333fc01493069ad46a94b82f69c7a33197ea", + "sha256:8feffa8396724116be5bc05bf4fcba0c738cbe908c82a95f71371e32b28cd2ca", + "sha256:94acecf2eee13a45f627ed25a28f5a7f2db66b90ff94cd7a1e9cc1ad32cddd43", + "sha256:9788781f57fb732426ae74b9955b899e677ce42b848e60a11be29358fb20c976", + "sha256:9a8b6b3c788a8a6f88f5ce23d729cfde7a2ccebbeb09db0822ef266de0445a27", + "sha256:9b41e0fb3b415beccd6d0c6e5f3ee34b7952cd76120a1db3e45507b83dc5ef81", + "sha256:9d95cce8bb010597b3f2217155befe4708e0538d3548aa08d640ebf54e3f57cb", + "sha256:a34c30e1461da3a69c5bdcfce44418b6f969e1e68ebf367edfa5eaab380abf7a", + "sha256:a468b1b9d5499cbfd0411f5d28adbe651c90508540fdaefb4b7a2171a837a88d", + "sha256:a51f983d91edae7777b5a2af8e5d83224ba01284502c6874a17647ad6cbf0211", + "sha256:a54424050d1eb36edfef913b1bc8552d52a37864c0ea7df3e1e764663e11053a", + "sha256:a550b4ff70d06c15057d75ddad89a3e7c496e0609d28c567c20b61cd1265c0a6", + "sha256:a60f8206818e3582c999c999c799ab068e14f1870ade47d1fe8536dbfd88010b", + "sha256:a7986fb988314fd2225c1ecab45fd457e1f2c097dcc3c0aacd2a7aec7486beb6", + "sha256:ab5c6a521b156edef13a57a6d524903c547573ff8101e3d1bbe9ee1b97267973", + "sha256:aef239c307f3a3f830933d612c0aef4ad4b3aa9ce5233a0954262a00f5c379f1", + "sha256:b10b316413c80a4dcc5228c092a8d019e4b75d4efbca8988cb5b67ae9fa56881", + "sha256:b169507c98b924fd68b82ae366c285daf6d22456835294c329c3226d61e1f69d", + "sha256:b71aab89800fa2eaeb28923ee05e7e56c28dab4ebdba524db06e963431bf6192", + "sha256:beaed1b2d03033dd301a7b67430f03c8255d6856a269c20995a0292de596519e", + "sha256:cb2d5a24586b508f658ddd710f7d4b7e4f5656cb5d569aeb1f432c1c3704347a", + "sha256:cb4c676ab99ca2dd231928d481e19cd540155dff36e70e613179c4927bd520b8", + "sha256:cbe3e356523d0b336543996f92a0e65f760be82447db21c95c60392c8075ff5c", + "sha256:cc2d64b1747efa183ced57b6bce53c9ea8e16e53419e389051b2a214ad0ed051", + "sha256:cfe8646a24856624c1eb7649da99333f0d7e75d9cf7c155ea870957d24b7c63c", + "sha256:d0fd6510c6d67d08ec80d9ba10cd340a8cfb0dd33436c858ed38d4564abb27c7", + "sha256:d40d9a740053cb7fef72442fa7bd699060ff4c710971ebdb8dd7c8b36417570f", + "sha256:d60255f3ed71aa14a2e75383543ca31bd362fdc7f0d2eafc060d85a9051598df", + "sha256:d68bb99bc6a4b0a3eceb95a246f5a0262e600e094b5178c2b1ab0f4bcbae6729", + "sha256:d97668595bf03299148ea968fed2195cc76ad063aeec8161731aa6a5dbc2f675", + "sha256:e0898a77298dc24eef368511d98e551e0b2db293fa9b40c982f4d5ab4d8d2a3a", + "sha256:e1e09bc44a1abbd96f55d15330d6cab80459cb8b06a0b656efd712ce47a3710d", + "sha256:e5786e5926f888ce3a996d38d9c9b8f9306f399edb1f1ca3ce7760dab9b1043c", + "sha256:e69d9869df50dd591228c62fbb3923d6124517d6bfc47a804492813888b497be", + "sha256:e7d182164aebad4e2faf2742ee7486d4af73d933461adbd8f183ac9b1837323c", + "sha256:eb4f1fe110332651c00d2df160978cf1be70896ed9e612ff7c7e67955091b2c4", + "sha256:ee081375d10fa2f3f7b0d050c8b9c1ae23190e1d9be256035bf8a41059c4df3a", + "sha256:f307632f3eaa676f8c2f5df11e4c00ad47dfa79b06cb2fa39156a4e9c6821bdb", + "sha256:f37ece590451ecffc815f2eb41f07191d1a31a0404361d1ae2ed532e05c86da4" ], "markers": "python_version >= '3.9'", - "version": "==3.11.2" + "version": "==3.11.4" }, "aioqbt": { "git": "git+https://github.com/mhdzumair/aioqbt.git", @@ -185,6 +185,62 @@ "markers": "python_version >= '3.8'", "version": "==1.3.0" }, + "asyncpg": { + "hashes": [ + "sha256:04ff0785ae7eed6cc138e73fc67b8e51d54ee7a3ce9b63666ce55a0bf095f7ba", + "sha256:05b185ebb8083c8568ea8a40e896d5f7af4b8554b64d7719c0eaa1eb5a5c3a70", + "sha256:0b448f0150e1c3b96cb0438a0d0aa4871f1472e58de14a3ec320dbb2798fb0d4", + "sha256:0f5712350388d0cd0615caec629ad53c81e506b1abaaf8d14c93f54b35e3595a", + "sha256:1292b84ee06ac8a2ad8e51c7475aa309245874b61333d97411aab835c4a2f737", + "sha256:1b11a555a198b08f5c4baa8f8231c74a366d190755aa4f99aacec5970afe929a", + "sha256:1b982daf2441a0ed314bd10817f1606f1c28b1136abd9e4f11335358c2c631cb", + "sha256:1c06a3a50d014b303e5f6fc1e5f95eb28d2cee89cf58384b700da621e5d5e547", + "sha256:1c198a00cce9506fcd0bf219a799f38ac7a237745e1d27f0e1f66d3707c84a5a", + "sha256:26683d3b9a62836fad771a18ecf4659a30f348a561279d6227dab96182f46144", + "sha256:29ff1fc8b5bf724273782ff8b4f57b0f8220a1b2324184846b39d1ab4122031d", + "sha256:3152fef2e265c9c24eec4ee3d22b4f4d2703d30614b0b6753e9ed4115c8a146f", + "sha256:3326e6d7381799e9735ca2ec9fd7be4d5fef5dcbc3cb555d8a463d8460607956", + "sha256:3356637f0bd830407b5597317b3cb3571387ae52ddc3bca6233682be88bbbc1f", + "sha256:393af4e3214c8fa4c7b86da6364384c0d1b3298d45803375572f415b6f673f38", + "sha256:46973045b567972128a27d40001124fbc821c87a6cade040cfcd4fa8a30bcdc4", + "sha256:51da377487e249e35bd0859661f6ee2b81db11ad1f4fc036194bc9cb2ead5056", + "sha256:574156480df14f64c2d76450a3f3aaaf26105869cad3865041156b38459e935d", + "sha256:578445f09f45d1ad7abddbff2a3c7f7c291738fdae0abffbeb737d3fc3ab8b75", + "sha256:5b290f4726a887f75dcd1b3006f484252db37602313f806e9ffc4e5996cfe5cb", + "sha256:5df69d55add4efcd25ea2a3b02025b669a285b767bfbf06e356d68dbce4234ff", + "sha256:5e0511ad3dec5f6b4f7a9e063591d407eee66b88c14e2ea636f187da1dcfff6a", + "sha256:64e899bce0600871b55368b8483e5e3e7f1860c9482e7f12e0a771e747988168", + "sha256:68d71a1be3d83d0570049cd1654a9bdfe506e794ecc98ad0873304a9f35e411e", + "sha256:6c2a2ef565400234a633da0eafdce27e843836256d40705d83ab7ec42074efb3", + "sha256:6f4e83f067b35ab5e6371f8a4c93296e0439857b4569850b178a01385e82e9ad", + "sha256:8b684a3c858a83cd876f05958823b68e8d14ec01bb0c0d14a6704c5bf9711773", + "sha256:9110df111cabc2ed81aad2f35394a00cadf4f2e0635603db6ebbd0fc896f46a4", + "sha256:915aeb9f79316b43c3207363af12d0e6fd10776641a7de8a01212afd95bdf0ed", + "sha256:9a0292c6af5c500523949155ec17b7fe01a00ace33b68a476d6b5059f9630305", + "sha256:9b6fde867a74e8c76c71e2f64f80c64c0f3163e687f1763cfaf21633ec24ec33", + "sha256:a3479a0d9a852c7c84e822c073622baca862d1217b10a02dd57ee4a7a081f708", + "sha256:aa403147d3e07a267ada2ae34dfc9324e67ccc4cdca35261c8c22792ba2b10cf", + "sha256:aca1548e43bbb9f0f627a04666fedaca23db0a31a84136ad1f868cb15deb6e3a", + "sha256:ae374585f51c2b444510cdf3595b97ece4f233fde739aa14b50e0d64e8a7a590", + "sha256:bc6d84136f9c4d24d358f3b02be4b6ba358abd09f80737d1ac7c444f36108454", + "sha256:bfb4dd5ae0699bad2b233672c8fc5ccbd9ad24b89afded02341786887e37927e", + "sha256:c42f6bb65a277ce4d93f3fba46b91a265631c8df7250592dd4f11f8b0152150f", + "sha256:c47806b1a8cbb0a0db896f4cd34d89942effe353a5035c62734ab13b9f938da3", + "sha256:c551e9928ab6707602f44811817f82ba3c446e018bfe1d3abecc8ba5f3eac851", + "sha256:c7255812ac85099a0e1ffb81b10dc477b9973345793776b128a23e60148dd1af", + "sha256:c902a60b52e506d38d7e80e0dd5399f657220f24635fee368117b8b5fce1142e", + "sha256:db9891e2d76e6f425746c5d2da01921e9a16b5a71a1c905b13f30e12a257c4af", + "sha256:dc1f62c792752a49f88b7e6f774c26077091b44caceb1983509edc18a2222ec0", + "sha256:f23b836dd90bea21104f69547923a02b167d999ce053f3d502081acea2fba15b", + "sha256:f59b430b8e27557c3fb9869222559f7417ced18688375825f8f12302c34e915e", + "sha256:f86b0e2cd3f1249d6fe6fd6cfe0cd4538ba994e2d8249c0491925629b9104d0f", + "sha256:fb622c94db4e13137c4c7f98834185049cc50ee01d8f657ef898b6407c7b9c50", + "sha256:fd4406d09208d5b4a14db9a9dbb311b6d7aeeab57bded7ed2f8ea41aeef39b34" + ], + "index": "pypi", + "markers": "python_full_version >= '3.8.0'", + "version": "==0.30.0" + }, "attrs": { "hashes": [ "sha256:5cfb1b9148b5b086569baec03f20d7b6bf3bcacc9a42bebf87ffaaca362f6346", @@ -2454,12 +2510,12 @@ }, "scrapy": { "hashes": [ - "sha256:4be353d6abbb942a9f7e7614ca8b5f3d9037381176ac8d8859c8cac676e74fa0", - "sha256:dfbd565384fc3fffeba121f5a3a2d0899ac1f756d41432ca0879933fbfb3401d" + "sha256:c33e2dc7da42e727390bacb32dd9938a54ac210fa71972b5c392754f478669cd", + "sha256:d66d6e76009b12447604196875a463b61d10721140032a8084a0a52df7f4788f" ], "index": "pypi", - "markers": "python_version >= '3.8'", - "version": "==2.11.2" + "markers": "python_version >= '3.9'", + "version": "==2.12.0" }, "scrapy-playwright": { "hashes": [ @@ -2592,11 +2648,11 @@ }, "starlette": { "hashes": [ - "sha256:9834fd799d1a87fd346deb76158668cfa0b0d56f85caefe8268e2d97c3468b62", - "sha256:fbc189474b4731cf30fcef52f18a8d070e3f3b46c6a04c97579e85e6ffca942d" + "sha256:0e4ab3d16522a255be6b28260b938eae2482f98ce5cc934cb08dce8dc3ba5835", + "sha256:44cedb2b7c77a9de33a8b74b2b90e9f50d11fcf25d8270ea525ad71a25374ff7" ], "markers": "python_version >= '3.8'", - "version": "==0.41.2" + "version": "==0.41.3" }, "tenacity": { "hashes": [ @@ -2667,12 +2723,12 @@ }, "typer": { "hashes": [ - "sha256:d85fe0b777b2517cc99c8055ed735452f2659cd45e451507c76f48ce5c1d00e2", - "sha256:f1c7198347939361eec90139ffa0fd8b3df3a2259d5852a0f7400e476d95985c" + "sha256:5b59580fd925e89463a29d363e0a43245ec02765bde9fb77d39e5d0f29dd7157", + "sha256:9d444cb96cc268ce6f8b94e13b4335084cef4c079998a9f4851a90229a3bd25c" ], "index": "pypi", "markers": "python_version >= '3.7'", - "version": "==0.13.0" + "version": "==0.13.1" }, "types-python-dateutil": { "hashes": [ @@ -2687,7 +2743,7 @@ "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d", "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8" ], - "markers": "python_version < '3.13'", + "markers": "python_version >= '3.8'", "version": "==4.12.2" }, "tzlocal": { From a75106c4c76f8fe597025047f834c41f3bbd2b54 Mon Sep 17 00:00:00 2001 From: mhdzumair Date: Wed, 20 Nov 2024 06:33:55 +0530 Subject: [PATCH 3/7] remove commented line --- migrations/mongo_to_postgres.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/migrations/mongo_to_postgres.py b/migrations/mongo_to_postgres.py index 1cbc8cf6..40f79852 100644 --- a/migrations/mongo_to_postgres.py +++ b/migrations/mongo_to_postgres.py @@ -1061,7 +1061,7 @@ async def run_migration(): await migration.initialize_resources() # Migrate data - # await migration.migrate_metadata() + await migration.migrate_metadata() await migration.migrate_torrent_streams() await migration.migrate_tv_streams() From 10d372bd1580fa33dbf09b71dd758d9a26e4d430 Mon Sep 17 00:00:00 2001 From: mhdzumair Date: Sat, 30 Nov 2024 22:41:04 +0530 Subject: [PATCH 4/7] Added new db structures and migration scripts & implement some crud functions --- api/main.py | 325 ++-- db/config.py | 1 + db/data_models.py | 186 +++ db/database.py | 14 +- db/enums.py | 23 + db/models.py | 2 +- db/public_schemas.py | 87 + db/sql_crud.py | 419 +++++ db/{new_models.py => sql_models.py} | 401 +++-- migrations/mongo_to_postgres.py | 2405 ++++++++++++++++++--------- scrapers/rpdb.py | 8 +- utils/network.py | 12 +- utils/runtime_const.py | 2 + 13 files changed, 2802 insertions(+), 1083 deletions(-) create mode 100644 db/data_models.py create mode 100644 db/enums.py create mode 100644 db/public_schemas.py create mode 100644 db/sql_crud.py rename db/{new_models.py => sql_models.py} (56%) diff --git a/api/main.py b/api/main.py index 03d4f2c1..c8e77f8b 100644 --- a/api/main.py +++ b/api/main.py @@ -1,9 +1,8 @@ import asyncio -import json import logging from contextlib import asynccontextmanager from io import BytesIO -from typing import Literal, Annotated +from typing import Literal, Annotated, Optional import aiohttp from apscheduler.schedulers.asyncio import AsyncIOScheduler @@ -19,17 +18,22 @@ from fastapi.responses import RedirectResponse, StreamingResponse from fastapi.staticfiles import StaticFiles from pydantic import ValidationError +from sqlmodel.ext.asyncio.session import AsyncSession from starlette.responses import HTMLResponse from api import middleware from api.scheduler import setup_scheduler -from db import crud, database, schemas +from db import crud, database, schemas, public_schemas, sql_crud from db.config import settings +from db.database import get_async_session +from db.enums import MediaType +from db.redis_database import REDIS_ASYNC_CLIENT from db.schemas import SortingOption +from db.schemas import UserData from kodi.routes import kodi_router from metrics.routes import metrics_router from scrapers.routes import router as scrapers_router -from scrapers.rpdb import update_rpdb_posters, update_rpdb_poster +from scrapers.rpdb import update_rpdb_posters from streaming_providers import mapper from streaming_providers.routes import router as streaming_provider_router from streaming_providers.validator import validate_provider_credentials @@ -40,13 +44,11 @@ release_scheduler_lock, ) from utils.network import get_request_namespace, get_user_public_ip, get_user_data -from utils.parser import generate_manifest +from utils.parser import generate_manifest, fetch_downloaded_info_hashes from utils.runtime_const import ( DELETE_ALL_META, - DELETE_ALL_META_ITEM, TEMPLATES, ) -from db.redis_database import REDIS_ASYNC_CLIENT from utils.validation_helper import ( validate_mediaflow_proxy_credentials, validate_rpdb_token, @@ -248,42 +250,49 @@ async def get_manifest( @app.get( "/{secret_str}/catalog/{catalog_type}/{catalog_id}.json", - response_model=schemas.Metas, + response_model=public_schemas.Metas, response_model_exclude_none=True, response_model_by_alias=False, tags=["catalog"], ) @app.get( "/catalog/{catalog_type}/{catalog_id}.json", - response_model=schemas.Metas, + response_model=public_schemas.Metas, response_model_exclude_none=True, response_model_by_alias=False, tags=["catalog"], ) @app.get( "/{secret_str}/catalog/{catalog_type}/{catalog_id}/skip={skip}.json", - response_model=schemas.Metas, + response_model=public_schemas.Metas, response_model_exclude_none=True, response_model_by_alias=False, tags=["catalog"], ) @app.get( "/catalog/{catalog_type}/{catalog_id}/skip={skip}.json", - response_model=schemas.Metas, + response_model=public_schemas.Metas, response_model_exclude_none=True, response_model_by_alias=False, tags=["catalog"], ) @app.get( "/{secret_str}/catalog/{catalog_type}/{catalog_id}/genre={genre}.json", - response_model=schemas.Metas, + response_model=public_schemas.Metas, response_model_exclude_none=True, response_model_by_alias=False, tags=["catalog"], ) @app.get( "/catalog/{catalog_type}/{catalog_id}/genre={genre}.json", - response_model=schemas.Metas, + response_model=public_schemas.Metas, + response_model_exclude_none=True, + response_model_by_alias=False, + tags=["catalog"], +) +@app.get( + "/{secret_str}/catalog/{catalog_type}/{catalog_id}/skip={skip}&genre={genre}.json", + response_model=public_schemas.Metas, response_model_exclude_none=True, response_model_by_alias=False, tags=["catalog"], @@ -293,163 +302,192 @@ async def get_manifest( async def get_catalog( response: Response, request: Request, - catalog_type: Literal["movie", "series", "tv", "events"], + catalog_type: MediaType, catalog_id: str, skip: int = 0, genre: str = None, - user_data: schemas.UserData = Depends(get_user_data), -): - skip, genre = parse_genre_and_skip(genre, skip) - cache_key, is_watchlist_catalog = get_cache_key( - catalog_type, catalog_id, skip, genre, user_data + user_data: UserData = Depends(get_user_data), + session: AsyncSession = Depends(get_async_session), +) -> public_schemas.Metas: + """ + Enhanced catalog endpoint with support for watchlists and external services + """ + is_watchlist_catalog = user_data.streaming_provider and catalog_id.startswith( + user_data.streaming_provider.service ) + # Handle watchlist info hashes + info_hashes = None + if is_watchlist_catalog: + info_hashes = await fetch_downloaded_info_hashes( + user_data, await get_user_public_ip(request, user_data) + ) + if not info_hashes: + return public_schemas.Metas(metas=[]) + + namespace = get_request_namespace(request) + # Cache handling + cache_key = get_cache_key( + catalog_type, + catalog_id, + skip, + genre, + user_data, + is_watchlist_catalog, + namespace, + ) if cache_key: response.headers.update(const.CACHE_HEADERS) - if cached_data := await REDIS_ASYNC_CLIENT.get(cache_key): + cached_data = await REDIS_ASYNC_CLIENT.get(cache_key) + if cached_data: try: - metas = schemas.Metas.model_validate_json(cached_data) + metas = public_schemas.Metas.model_validate_json(cached_data) return await update_rpdb_posters(metas, user_data, catalog_type) except ValidationError: pass else: response.headers.update(const.NO_CACHE_HEADERS) - metas = await fetch_metas( - catalog_type, catalog_id, genre, skip, user_data, request, is_watchlist_catalog + # Get metadata list + metas = await sql_crud.get_catalog_meta_list( + session=session, + catalog_type=catalog_type, + catalog_id=catalog_id, + user_data=user_data, + skip=skip, + genre=genre, + namespace=namespace, + is_watchlist_catalog=is_watchlist_catalog, + info_hashes=info_hashes, ) + # Handle watchlist special case + if ( + is_watchlist_catalog + and catalog_type == MediaType.MOVIE + and metas.metas + and mapper.DELETE_ALL_WATCHLIST_FUNCTIONS.get( + user_data.streaming_provider.service + ) + ): + delete_all_meta = DELETE_ALL_META.model_copy() + delete_all_meta.id = delete_all_meta.id.format( + user_data.streaming_provider.service + ) + metas.metas.insert(0, delete_all_meta) + + # Cache result if applicable if cache_key: await REDIS_ASYNC_CLIENT.set( cache_key, - metas.model_dump_json(exclude_none=True, by_alias=True), + metas.model_dump_json(exclude_none=True), ex=settings.meta_cache_ttl, ) return await update_rpdb_posters(metas, user_data, catalog_type) -def parse_genre_and_skip(genre: str, skip: int) -> tuple[int, str]: - if genre and "&" in genre: - genre, skip = genre.split("&") - skip = skip.split("=")[1] if "=" in skip else "0" - skip = int(skip) if skip and skip.isdigit() else 0 - return skip, genre - - def get_cache_key( - catalog_type: str, + catalog_type: MediaType, catalog_id: str, skip: int, - genre: str, - user_data: schemas.UserData, -) -> tuple[str, bool]: - cache_key = f"{catalog_type}_{catalog_id}_{skip}_{genre}_catalog" - is_watchlist_catalog = False + genre: Optional[str], + user_data: UserData, + is_watchlist: bool, + namespace: str, +) -> Optional[str]: + """Generate cache key for catalog queries""" + if is_watchlist or catalog_type == MediaType.EVENTS: + return None - if user_data.streaming_provider and catalog_id.startswith( - user_data.streaming_provider.service - ): - cache_key = None - is_watchlist_catalog = True - elif catalog_type == "events": - cache_key = None - elif catalog_type in ["movie", "series"]: - cache_key += "_" + "_".join( - user_data.nudity_filter + user_data.certification_filter - ) + key_parts = [catalog_type.value, catalog_id, str(skip), genre or ""] - return cache_key, is_watchlist_catalog + if catalog_type in [MediaType.MOVIE, MediaType.SERIES]: + key_parts.extend(user_data.nudity_filter + user_data.certification_filter) + if catalog_type == MediaType.TV: + key_parts.append(namespace) + return f"catalog:{':'.join(key_parts)}" -async def fetch_metas( - catalog_type: str, - catalog_id: str, - genre: str, - skip: int, - user_data: schemas.UserData, - request: Request, - is_watchlist_catalog: bool, -) -> schemas.Metas: - metas = schemas.Metas() - - if catalog_type == "tv": - metas.metas.extend( - await crud.get_tv_meta_list( - namespace=get_request_namespace(request), genre=genre, skip=skip - ) - ) - elif catalog_type == "events": - metas.metas.extend(await crud.get_events_meta_list(genre, skip)) - else: - user_ip = await get_user_public_ip(request, user_data) - metas.metas.extend( - await crud.get_meta_list( - user_data, - catalog_type, - catalog_id, - is_watchlist_catalog, - skip, - user_ip=user_ip, - genre=genre, - ) - ) - if ( - is_watchlist_catalog - and catalog_type == "movie" - and metas.metas - and mapper.DELETE_ALL_WATCHLIST_FUNCTIONS.get( - user_data.streaming_provider.service - ) - ): - delete_all_meta = DELETE_ALL_META.model_copy() - delete_all_meta.id = delete_all_meta.id.format( - user_data.streaming_provider.service - ) - metas.metas.insert(0, delete_all_meta) - - return metas +async def get_search_cache_key( + catalog_type: MediaType, + catalog_id: str, + search_query: str, + user_data: UserData, + namespace: str, +) -> str: + """Generate cache key for search results""" + key_parts = [catalog_type.value, catalog_id, search_query] + if catalog_type in [MediaType.MOVIE, MediaType.SERIES]: + key_parts.extend(user_data.nudity_filter + user_data.certification_filter) + if catalog_type == MediaType.TV: + key_parts.append(namespace) + return f"search:{':'.join(key_parts)}" @app.get( "/{secret_str}/catalog/{catalog_type}/{catalog_id}/search={search_query}.json", tags=["search"], - response_model=schemas.Metas, + response_model=public_schemas.Metas, response_model_exclude_none=True, response_model_by_alias=False, ) @app.get( "/catalog/{catalog_type}/{catalog_id}/search={search_query}.json", tags=["search"], - response_model=schemas.Metas, + response_model=public_schemas.Metas, response_model_exclude_none=True, response_model_by_alias=False, ) @wrappers.auth_required async def search_meta( request: Request, - catalog_type: Literal["movie", "series", "tv"], - catalog_id: Literal[ - "mediafusion_search_movies", - "mediafusion_search_series", - "mediafusion_search_tv", - ], + catalog_type: MediaType, + catalog_id: str, search_query: str, - user_data: schemas.UserData = Depends(get_user_data), -): - logging.debug("search for catalog_id: %s", catalog_id) + user_data: UserData = Depends(get_user_data), + session: AsyncSession = Depends(get_async_session), +) -> public_schemas.Metas: + """ + Enhanced search endpoint with caching and efficient text search + """ + if not search_query.strip(): + return public_schemas.Metas(metas=[]) + + namespace = get_request_namespace(request) + # Generate cache key + cache_key = await get_search_cache_key( + catalog_type, catalog_id, search_query, user_data, namespace + ) - if catalog_type == "tv": - return await crud.process_tv_search_query( - search_query, namespace=get_request_namespace(request) - ) + # Try to get from cache + cached_data = await REDIS_ASYNC_CLIENT.get(cache_key) + if cached_data: + try: + metas = public_schemas.Metas.model_validate_json(cached_data) + return await update_rpdb_posters(metas, user_data, catalog_type) + except ValidationError: + pass + + # Perform search + metas = await sql_crud.search_metadata( + session=session, + catalog_type=catalog_type, + search_query=search_query, + user_data=user_data, + namespace=namespace, + ) - metadata = await crud.process_search_query(search_query, catalog_type, user_data) - return await update_rpdb_posters( - schemas.Metas.model_validate(metadata), user_data, catalog_type + # Cache the results (5 minutes for search results) + await REDIS_ASYNC_CLIENT.set( + cache_key, + metas.model_dump_json(exclude_none=True), + ex=300, # 5 minutes cache ) + return await update_rpdb_posters(metas, user_data, catalog_type) + @app.get( "/{secret_str}/meta/{catalog_type}/{meta_id}.json", @@ -467,50 +505,25 @@ async def search_meta( ) @wrappers.auth_required async def get_meta( - catalog_type: Literal["movie", "series", "tv", "events"], + catalog_type: MediaType, meta_id: str, - user_data: schemas.UserData = Depends(get_user_data), -): - cache_key = f"{catalog_type}_{meta_id}_meta" - - if catalog_type in ["movie", "series"]: - cache_key += "_" + "_".join( - user_data.nudity_filter + user_data.certification_filter - ) - - # Try retrieving the cached data - cached_data = await REDIS_ASYNC_CLIENT.get(cache_key) - if cached_data: - try: - meta_data = schemas.MetaItem.model_validate_json(cached_data) - return await update_rpdb_poster(meta_data, user_data, catalog_type) - except ValidationError: - pass - - if catalog_type == "movie": - if meta_id.startswith("dl"): - delete_all_meta_item = DELETE_ALL_META_ITEM.copy() - delete_all_meta_item["meta"]["_id"] = meta_id - data = delete_all_meta_item - else: - data = await crud.get_movie_meta(meta_id, user_data) - elif catalog_type == "series": - data = await crud.get_series_meta(meta_id, user_data) - elif catalog_type == "events": - data = await crud.get_event_meta(meta_id) - else: - data = await crud.get_tv_meta(meta_id) - - # Cache the data with a TTL of 30 minutes - # If the data is not found, cached the empty data to avoid db query. - await REDIS_ASYNC_CLIENT.set(cache_key, json.dumps(data, default=str), ex=1800) + session: AsyncSession = Depends(get_async_session), +) -> schemas.MetaItem: + metadata = await sql_crud.get_metadata_by_type(session, catalog_type, meta_id) + if not metadata: + raise HTTPException(status_code=404, detail="Metadata not found") - if not data: - raise HTTPException(status_code=404, detail="Meta ID not found.") + if catalog_type == MediaType.SERIES: + # For series, also fetch episodes + seasons = await sql_crud.series_metadata.get_episodes_data(session, meta_id) + return { + "meta": { + **metadata.model_dump(), + "seasons": [season.model_dump() for season in seasons], + } + } - return await update_rpdb_poster( - schemas.MetaItem.model_validate(data), user_data, catalog_type - ) + return {"meta": metadata.model_dump()} @app.get( diff --git a/db/config.py b/db/config.py index 971154f6..dfab2751 100644 --- a/db/config.py +++ b/db/config.py @@ -42,6 +42,7 @@ class Settings(BaseSettings): # Database and Cache Settings mongo_uri: str + postgres_uri: str db_max_connections: int = 50 redis_url: str = "redis://redis-service:6379" redis_max_connections: int = 100 diff --git a/db/data_models.py b/db/data_models.py new file mode 100644 index 00000000..577ff9bf --- /dev/null +++ b/db/data_models.py @@ -0,0 +1,186 @@ +from datetime import datetime +from typing import List, Optional, Callable, Any +from pydantic import BaseModel, Field, field_validator + +from db.enums import MediaType, NudityStatus, IndexerType + + +def create_string_list_validator(attribute_name: str = "name") -> Callable: + """ + Creates a validator function for converting various inputs to a list of strings. + + Args: + attribute_name (str): The attribute to extract from dict/object (default: 'name') + + Returns: + Callable: A validator function that converts input to List[str] + """ + + def validator(v: Any) -> List[str]: + if not v: + return [] + + if isinstance(v, list): + return [ + # Handle dictionaries + ( + item.get(attribute_name) + if isinstance(item, dict) + # Handle objects + else ( + getattr(item, attribute_name) + if hasattr(item, attribute_name) + # Handle direct strings + else str(item) + ) + ) + for item in v + ] + + if isinstance(v, str): + return [v] + + raise ValueError( + f"Invalid input type. Expected list of strings, dicts with '{attribute_name}' key, " + f"or objects with '{attribute_name}' attribute" + ) + + return validator + + +class BaseMediaData(BaseModel): + """Base model for common metadata fields""" + + id: str + title: str + type: str + year: Optional[int] = None + poster: Optional[str] = None + is_poster_working: bool = True + is_add_title_to_poster: bool = False + background: Optional[str] = None + description: Optional[str] = None + runtime: Optional[str] = None + website: Optional[str] = None + created_at: datetime + updated_at: Optional[datetime] = None + + # Common relationship fields + genres: List[str] = Field(default_factory=list) + catalogs: List[str] = Field(default_factory=list) + alternate_titles: List[str] = Field(default_factory=list) + + # Validators using the helper function + _validate_genres = field_validator("genres", mode="before")( + create_string_list_validator() + ) + _validate_catalogs = field_validator("catalogs", mode="before")( + create_string_list_validator() + ) + _validate_alternate_titles = field_validator("alternate_titles", mode="before")( + create_string_list_validator("title") + ) + + class Config: + from_attributes = True + + +class MovieData(BaseModel): + """Movie metadata data model""" + + id: str + base_metadata: BaseMediaData | None = None + type: MediaType = MediaType.MOVIE + imdb_rating: Optional[float] = None + parent_guide_nudity_status: NudityStatus = NudityStatus.UNKNOWN + stars: List[str] = Field(default_factory=list) + parental_certificates: List[str] = Field(default_factory=list) + + _validate_stars = field_validator("stars", mode="before")( + create_string_list_validator() + ) + _validate_certificates = field_validator("parental_certificates", mode="before")( + create_string_list_validator() + ) + + class Config: + from_attributes = True + + +class SeriesData(BaseModel): + """Series metadata data model""" + + id: str + base_metadata: BaseMediaData | None = None + type: MediaType = MediaType.SERIES + end_year: Optional[int] = None + imdb_rating: Optional[float] = None + parent_guide_nudity_status: NudityStatus = NudityStatus.UNKNOWN + stars: List[str] = Field(default_factory=list) + parental_certificates: List[str] = Field(default_factory=list) + + # Validators using the helper function + _validate_stars = field_validator("stars", mode="before")( + create_string_list_validator() + ) + _validate_certificates = field_validator("parental_certificates", mode="before")( + create_string_list_validator() + ) + + class Config: + from_attributes = True + + +class TVData(BaseModel): + """TV metadata data model""" + + id: str + base_metadata: BaseMediaData | None = None + type: MediaType = MediaType.TV + country: Optional[str] = None + tv_language: Optional[str] = None + logo: Optional[str] = None + + class Config: + from_attributes = True + + +class TorrentStreamData(BaseModel): + """Torrent stream data model""" + + id: str + torrent_name: str + size: int + filename: Optional[str] = None + file_index: Optional[int] = None + source: str + resolution: Optional[str] = None + codec: Optional[str] = None + quality: Optional[str] = None + audio: Optional[str] = None + seeders: Optional[int] = None + is_blocked: bool = False + indexer_flag: IndexerType = IndexerType.FREELEACH + created_at: datetime + updated_at: Optional[datetime] = None + + class Config: + from_attributes = True + + +class TVStreamData(BaseModel): + """TV stream data model""" + + id: int + name: str + url: Optional[str] = None + ytId: Optional[str] = None + externalUrl: Optional[str] = None + source: str + country: Optional[str] = None + is_working: bool = True + test_failure_count: int = 0 + drm_key_id: Optional[str] = None + drm_key: Optional[str] = None + created_at: datetime + updated_at: Optional[datetime] = None diff --git a/db/database.py b/db/database.py index f42fbdd3..b96b5621 100644 --- a/db/database.py +++ b/db/database.py @@ -1,8 +1,10 @@ import asyncio import logging -from motor.motor_asyncio import AsyncIOMotorClient from beanie import init_beanie +from motor.motor_asyncio import AsyncIOMotorClient +from sqlalchemy.ext.asyncio import create_async_engine +from sqlmodel.ext.asyncio.session import AsyncSession from db.config import settings from db.models import ( @@ -48,3 +50,13 @@ async def init(): else: logging.error("Failed to initialize database after several attempts.") raise e + + +ASYNC_ENGINE = create_async_engine( + settings.postgres_uri, echo=True, pool_size=20, max_overflow=30 +) + + +async def get_async_session(): + async with AsyncSession(ASYNC_ENGINE, expire_on_commit=False) as session: + yield session diff --git a/db/enums.py b/db/enums.py new file mode 100644 index 00000000..22635d09 --- /dev/null +++ b/db/enums.py @@ -0,0 +1,23 @@ +from enum import StrEnum + + +# Enums +class MediaType(StrEnum): + MOVIE = "movie" + SERIES = "series" + TV = "tv" + EVENTS = "events" + + +class IndexerType(StrEnum): + FREELEACH = "freeleech" + SEMI_PRIVATE = "semi-private" + PRIVATE = "private" + + +class NudityStatus(StrEnum): + NONE = "None" + MILD = "Mild" + MODERATE = "Moderate" + SEVERE = "Severe" + UNKNOWN = "Unknown" diff --git a/db/models.py b/db/models.py index 9e0b54e1..93707394 100644 --- a/db/models.py +++ b/db/models.py @@ -35,7 +35,6 @@ class TorrentStreams(Document): announce_list: list[str] languages: list[str] source: str - catalog: list[str] created_at: datetime = Field(default_factory=datetime.now) updated_at: Optional[datetime] = None resolution: Optional[str] = None @@ -152,6 +151,7 @@ class MediaFusionMetaData(Document): description: Optional[str] = None runtime: Optional[str] = None website: Optional[str] = None + catalog: list[str] = Field(default_factory=list) genres: Optional[list[str]] = Field(default_factory=list) created_at: datetime = Field(default_factory=datetime.now) last_updated_at: datetime = Field(default_factory=datetime.now) diff --git a/db/public_schemas.py b/db/public_schemas.py new file mode 100644 index 00000000..c50841ac --- /dev/null +++ b/db/public_schemas.py @@ -0,0 +1,87 @@ +from typing import Literal, Optional + +from pydantic import BaseModel, Field, model_validator, ConfigDict + +from db.config import settings + + +class Catalog(BaseModel): + id: str + name: str + type: str + + +class Video(BaseModel): + id: str + title: str + released: str + season: int | None = None + episode: int | None = None + + +class Meta(BaseModel): + id: str + name: str = Field(alias="title") + type: str + poster: str | None = None + background: str | None = None + videos: list[Video] | None = None + country: str | None = None + language: str | None = Field(None, alias="tv_language") + logo: str | None = None + genres: list[str] | None = None + description: str | None = None + runtime: str | None = None + website: str | None = None + imdbRating: str | float | None = Field(None, alias="imdb_rating") + releaseInfo: str | int | None = Field(None, alias="year") + end_year: int | None = Field(None, exclude=True) + + model_config = ConfigDict(populate_by_name=True) + + @model_validator(mode="after") + def parse_meta(self) -> "Meta": + if self.releaseInfo: + self.releaseInfo = ( + f"{self.releaseInfo}-" + str(self.end_year) + if self.end_year + else "" if self.type == "series" else str(self.releaseInfo) + ) + if self.imdbRating: + self.imdbRating = str(self.imdbRating) + if self.poster is None: + self.poster = f"{settings.poster_host_url}/poster/{self.type}/{self.id}.jpg" + + return self + + +class MetaItem(BaseModel): + meta: Meta + + +class Metas(BaseModel): + metas: list[Meta] = Field(default_factory=list) + + +class StreamBehaviorHints(BaseModel): + notWebReady: Optional[bool] = None + bingeGroup: Optional[str] = None + proxyHeaders: Optional[dict[Literal["request", "response"], dict]] = None + filename: Optional[str] = None + videoSize: Optional[int] = None + + +class Stream(BaseModel): + name: str + description: str + infoHash: str | None = None + fileIdx: int | None = None + url: str | None = None + ytId: str | None = None + externalUrl: str | None = None + behaviorHints: StreamBehaviorHints | None = None + sources: list[str] | None = None + + +class Streams(BaseModel): + streams: Optional[list[Stream]] = Field(default_factory=list) diff --git a/db/sql_crud.py b/db/sql_crud.py new file mode 100644 index 00000000..3490f879 --- /dev/null +++ b/db/sql_crud.py @@ -0,0 +1,419 @@ +import logging + +from sqlalchemy import func +from sqlalchemy.orm import joinedload, selectinload +from sqlmodel import select, or_ +from sqlmodel.ext.asyncio.session import AsyncSession +from sqlmodel.sql._expression_select_cls import Select + +from db import data_models, public_schemas +from db.schemas import UserData +from db.sql_models import ( + BaseMetadata, + MovieMetadata, + SeriesMetadata, + TVMetadata, + TVStream, + MediaGenreLink, + Genre, + MediaCatalogLink, + Catalog, + MediaParentalCertificateLink, + ParentalCertificate, + TVStreamNamespaceLink, + Namespace, + MediaType, + AkaTitle, + TorrentStream, +) +from db.redis_database import REDIS_ASYNC_CLIENT + +logger = logging.getLogger(__name__) + +from abc import ABC +from typing import Optional, List, TypeVar, Generic, Type, Any + +B = TypeVar("B", bound="BaseQueryBuilder") + + +class CatalogBaseQueryBuilder(ABC, Generic[B]): + """Base class for query builders with common functionality""" + + def __init__( + self, + catalog_type: MediaType, + user_data: UserData, + ): + self.catalog_type = catalog_type + self.user_data = user_data + self.base_query = select(BaseMetadata.id, BaseMetadata.title) + + def add_type_filter(self) -> B: + """Add media type filter""" + self.base_query = self.base_query.where(BaseMetadata.type == self.catalog_type) + return self + + def add_content_filters(self) -> B: + """Add user preference based content filters""" + if self.catalog_type in [MediaType.MOVIE, MediaType.SERIES]: + specific_model = ( + MovieMetadata + if self.catalog_type == MediaType.MOVIE + else SeriesMetadata + ) + self.base_query = self.base_query.join(specific_model) + + if "Disable" not in self.user_data.nudity_filter: + self.base_query = self.base_query.where( + specific_model.parent_guide_nudity_status.notin_( + self.user_data.nudity_filter + ) + ) + + if "Disable" not in self.user_data.certification_filter: + self.base_query = ( + self.base_query.join(MediaParentalCertificateLink) + .join(ParentalCertificate) + .where( + ParentalCertificate.name.notin_( + self.user_data.certification_filter + ) + ) + ) + return self + + def add_tv_filters(self, namespace: str) -> B: + """Add TV-specific filters""" + if self.catalog_type == MediaType.TV: + self.base_query = ( + self.base_query.join(TVMetadata) + .join(TVStream) + .join(TVStreamNamespaceLink) + .join(Namespace) + .where( + TVStream.is_working == True, + Namespace.name.in_([namespace, "mediafusion", None]), + ) + ) + return self + + def add_pagination(self, skip: int = 0, limit: int = 25) -> B: + """Add pagination""" + self.base_query = self.base_query.offset(skip).limit(limit) + return self + + def build(self) -> Select: + """Build the final query""" + return self.base_query + + +class CatalogQueryBuilder(CatalogBaseQueryBuilder["CatalogQueryBuilder"]): + """Builder for constructing optimized catalog queries""" + + def __init__( + self, catalog_type: MediaType, user_data: UserData, is_watchlist: bool = False + ): + super().__init__(catalog_type, user_data) + self.is_watchlist = is_watchlist + + def add_watchlist_filter(self, info_hashes: List[str]) -> "CatalogQueryBuilder": + """Add watchlist-specific filters""" + if self.is_watchlist and info_hashes: + self.base_query = self.base_query.join(TorrentStream).where( + TorrentStream.id.in_(info_hashes) + ) + return self + + def add_catalog_filter(self, catalog_id: str) -> "CatalogQueryBuilder": + """Add catalog-specific filters""" + if not self.is_watchlist: + self.base_query = ( + self.base_query.join(MediaCatalogLink) + .join(Catalog) + .where(Catalog.name == catalog_id) + ) + return self + + def add_genre_filter(self, genre: Optional[str]) -> "CatalogQueryBuilder": + """Add genre-specific filters""" + if genre: + self.base_query = ( + self.base_query.join(MediaGenreLink) + .join(Genre) + .where(Genre.name == genre) + ) + return self + + def add_sorting(self) -> "CatalogQueryBuilder": + """Add default sorting""" + self.base_query = self.base_query.order_by( + BaseMetadata.last_stream_added.desc() + ) + return self + + +class SearchQueryBuilder(CatalogBaseQueryBuilder["SearchQueryBuilder"]): + """Builder for constructing optimized search queries""" + + def __init__( + self, + catalog_type: MediaType, + user_data: UserData, + search_query: str, + ): + super().__init__(catalog_type, user_data) + self.search_query = search_query.lower() + + def add_text_search(self) -> "SearchQueryBuilder": + """Optimized text search for all languages""" + search_vector = func.plainto_tsquery("simple", self.search_query) + + # Build efficient subqueries + base_matches = ( + select(BaseMetadata.id) + .where( + or_( + BaseMetadata.title_tsv.op("@@")(search_vector), + func.similarity(func.lower(BaseMetadata.title), self.search_query) + > 0.3, + ) + ) + .subquery() + ) + + aka_matches = ( + select(AkaTitle.media_id) + .where( + or_( + AkaTitle.title_tsv.op("@@")(search_vector), + func.similarity(func.lower(AkaTitle.title), self.search_query) + > 0.3, + ) + ) + .subquery() + ) + + # Combine results efficiently + self.base_query = self.base_query.filter( + or_(BaseMetadata.id.in_(base_matches), BaseMetadata.id.in_(aka_matches)) + ).order_by( + func.greatest( + func.ts_rank_cd(BaseMetadata.title_tsv, search_vector), + func.similarity(func.lower(BaseMetadata.title), self.search_query), + ).desc() + ) + return self + + def add_torrent_stream_filter(self) -> "SearchQueryBuilder": + """Add torrent stream specific filters""" + if self.catalog_type in [MediaType.MOVIE, MediaType.SERIES]: + self.base_query = self.base_query.join(TorrentStream).where( + TorrentStream.is_blocked != True, + TorrentStream.meta_id == BaseMetadata.id, + ) + return self + + +async def get_catalog_meta_list( + session: AsyncSession, + catalog_type: MediaType, + catalog_id: str, + user_data: UserData, + skip: int = 0, + limit: int = 25, + genre: Optional[str] = None, + namespace: Optional[str] = None, + is_watchlist_catalog: bool = False, + info_hashes: Optional[List[str]] = None, +) -> public_schemas.Metas: + """Get metadata list for catalog with efficient filtering and pagination""" + query = ( + CatalogQueryBuilder(catalog_type, user_data, is_watchlist_catalog) + .add_type_filter() + .add_content_filters() + .add_watchlist_filter(info_hashes) + .add_catalog_filter(catalog_id) + .add_genre_filter(genre) + .add_tv_filters(namespace) + .add_sorting() + .add_pagination(skip, limit) + .build() + ) + + result = await session.exec(query) + data = result.unique().all() + metas = [ + public_schemas.Meta(id=meta[0], name=meta[1], type=catalog_type) + for meta in data + ] + return public_schemas.Metas(metas=metas) + + +async def search_metadata( + session: AsyncSession, + catalog_type: MediaType, + search_query: str, + user_data: UserData, + namespace: Optional[str] = None, + limit: int = 50, +) -> public_schemas.Metas: + """Search metadata with efficient filtering and ranking""" + query = ( + SearchQueryBuilder(catalog_type, user_data, search_query) + .add_type_filter() + .add_text_search() + .add_content_filters() + .add_torrent_stream_filter() + .add_tv_filters(namespace) + .add_pagination(limit=limit) + .build() + ) + + result = await session.exec(query) + data = result.unique().all() + metas = [ + public_schemas.Meta(id=meta[0], name=meta[1], type=catalog_type) + for meta in data + ] + return public_schemas.Metas(metas=metas) + + +T = TypeVar("T", data_models.MovieData, data_models.SeriesData, data_models.TVData) +M = TypeVar("M", MovieMetadata, SeriesMetadata, TVMetadata) + + +class MetadataRetriever(Generic[T, M]): + """Generic class for retrieving metadata with caching support""" + + def __init__( + self, + data_model: Type[T], + sql_model: Type[M], + media_type: MediaType, + cache_prefix: str, + ): + self.data_model = data_model + self.sql_model = sql_model + self.media_type = media_type + self.cache_prefix = cache_prefix + + async def _get_from_cache(self, media_id: str) -> Optional[T]: + """Retrieve metadata from cache""" + cached_data = await REDIS_ASYNC_CLIENT.get(f"{self.cache_prefix}:{media_id}") + if cached_data: + try: + return self.data_model.model_validate_json(cached_data) + except Exception as e: + logger.error(f"Error deserializing cached data for {media_id}: {e}") + return None + + async def _set_cache(self, media_id: str, data: T) -> None: + """Store metadata in cache""" + try: + await REDIS_ASYNC_CLIENT.set( + f"{self.cache_prefix}:{media_id}", + data.model_dump_json(exclude_none=True), + ex=86400, # 24 hours + ) + except Exception as e: + logger.error(f"Error caching data for {media_id}: {e}") + + async def _fetch_metadata( + self, session: AsyncSession, media_id: str + ) -> Optional[M]: + """Fetch type-specific metadata""" + query = ( + select(self.sql_model) + .where(self.sql_model.id == media_id) + .options( + joinedload(self.sql_model.base_metadata).options( + selectinload(BaseMetadata.genres), + selectinload(BaseMetadata.catalogs), + selectinload(BaseMetadata.aka_titles), + ) + ) + ) + + if self.media_type in [MediaType.MOVIE, MediaType.SERIES]: + query = query.options( + selectinload(self.sql_model.parental_certificates), + selectinload(self.sql_model.stars), + ) + + result = await session.exec(query) + return result.one_or_none() + + async def get_metadata( + self, session: AsyncSession, media_id: str, bypass_cache: bool = False + ) -> Optional[T]: + """Main method to retrieve metadata with caching""" + if not bypass_cache: + cached_data = await self._get_from_cache(media_id) + if cached_data: + return cached_data + + metadata = await self._fetch_metadata(session, media_id) + if not metadata: + return None + + # Construct the full metadata object + metadata = self.data_model.model_validate(metadata) + + await self._set_cache(media_id, metadata) + return metadata + + +class SeriesMetadataRetriever( + MetadataRetriever[data_models.SeriesData, SeriesMetadata] +): + """Series-specific metadata retriever with episode information""" + + async def get_metadata( + self, session: AsyncSession, media_id: str, bypass_cache: bool = False + ) -> Optional[data_models.SeriesData]: + """Fetch series metadata with episodes""" + metadata = await super().get_metadata(session, media_id, bypass_cache) + if not metadata: + return None + + # Fetch episode data + season_data = await self.get_season_data(session, media_id) + metadata.seasons = season_data + return metadata + + async def get_season_data(self, session: AsyncSession, series_id: str) -> list: + """Fetch season data for series""" + # TODO: Implement season data retrieval + return [] + + +# Initialize retrievers +movie_metadata = MetadataRetriever( + data_models.MovieData, MovieMetadata, MediaType.MOVIE, "movie_data" +) + +series_metadata = SeriesMetadataRetriever( + data_models.SeriesData, SeriesMetadata, MediaType.SERIES, "series_data" +) + +tv_metadata = MetadataRetriever(data_models.TVData, TVMetadata, MediaType.TV, "tv_data") + + +async def get_metadata_by_type( + session: AsyncSession, + media_type: MediaType, + media_id: str, + bypass_cache: bool = False, +) -> Optional[Any]: + """Factory function to get metadata based on media type""" + retrievers = { + MediaType.MOVIE: movie_metadata, + MediaType.SERIES: series_metadata, + MediaType.TV: tv_metadata, + } + + retriever = retrievers.get(media_type) + if not retriever: + raise ValueError(f"Unsupported media type: {media_type}") + + return await retriever.get_metadata(session, media_id, bypass_cache) diff --git a/db/new_models.py b/db/sql_models.py similarity index 56% rename from db/new_models.py rename to db/sql_models.py index 6719182a..06c41c60 100644 --- a/db/new_models.py +++ b/db/sql_models.py @@ -1,32 +1,20 @@ from datetime import datetime -from enum import Enum as PyEnum -from typing import ClassVar +from typing import ClassVar, List, Optional import pytz -from sqlalchemy import DateTime, BigInteger, UniqueConstraint, Index, JSON -from sqlmodel import SQLModel, Field +from sqlalchemy import ( + DateTime, + BigInteger, + UniqueConstraint, + Index, + JSON, + Column, + Computed, +) +from sqlalchemy.dialects.postgresql import TSVECTOR +from sqlmodel import SQLModel, Field, Relationship - -# Enums -class MediaType(str, PyEnum): - MOVIE = "movie" - SERIES = "series" - TV = "tv" - EVENTS = "events" - - -class IndexerType(str, PyEnum): - FREELEACH = "freeleech" - SEMI_PRIVATE = "semi-private" - PRIVATE = "private" - - -class NudityStatus(str, PyEnum): - NONE = "None" - MILD = "Mild" - MODERATE = "Moderate" - SEVERE = "Severe" - UNKNOWN = "Unknown" +from db.enums import MediaType, IndexerType, NudityStatus # Base Models and Mixins @@ -44,6 +32,131 @@ class TimestampMixin(SQLModel): ) +class MediaGenreLink(SQLModel, table=True): + __tablename__ = "media_genre_link" + + media_id: str = Field( + foreign_key="base_metadata.id", primary_key=True, ondelete="CASCADE" + ) + genre_id: int = Field(foreign_key="genre.id", primary_key=True, ondelete="CASCADE") + + +class MediaCatalogLink(SQLModel, table=True): + __tablename__ = "media_catalog_link" + + media_id: str = Field( + foreign_key="base_metadata.id", primary_key=True, ondelete="CASCADE" + ) + catalog_id: int = Field( + foreign_key="catalog.id", primary_key=True, ondelete="CASCADE" + ) + + +class MediaStarLink(SQLModel, table=True): + __tablename__ = "media_star_link" + + media_id: str = Field( + foreign_key="base_metadata.id", primary_key=True, ondelete="CASCADE" + ) + star_id: int = Field(foreign_key="star.id", primary_key=True, ondelete="CASCADE") + + +class MediaParentalCertificateLink(SQLModel, table=True): + __tablename__ = "media_parental_certificate_link" + + media_id: str = Field( + foreign_key="base_metadata.id", primary_key=True, ondelete="CASCADE" + ) + certificate_id: int = Field( + foreign_key="parental_certificate.id", primary_key=True, ondelete="CASCADE" + ) + + +class TorrentLanguageLink(SQLModel, table=True): + __tablename__ = "torrent_language_link" + + torrent_id: str = Field( + foreign_key="torrent_stream.id", primary_key=True, ondelete="CASCADE" + ) + language_id: int = Field( + foreign_key="language.id", primary_key=True, ondelete="CASCADE" + ) + + +class TorrentAnnounceLink(SQLModel, table=True): + __tablename__ = "torrent_announce_link" + + torrent_id: str = Field( + foreign_key="torrent_stream.id", primary_key=True, ondelete="CASCADE" + ) + announce_id: int = Field( + foreign_key="announce_url.id", primary_key=True, ondelete="CASCADE" + ) + + +class TVStreamNamespaceLink(SQLModel, table=True): + __tablename__ = "tv_stream_namespace_link" + + stream_id: int = Field( + foreign_key="tv_stream.id", primary_key=True, ondelete="CASCADE" + ) + namespace_id: int = Field( + foreign_key="namespace.id", primary_key=True, ondelete="CASCADE" + ) + + +class GenreName(SQLModel): + name: str + + +class CatalogName(SQLModel): + name: str + + +class ParentalCertificateName(SQLModel): + name: str + + +class AkaTitleName(SQLModel): + title: str + + +class SeriesSeason(SQLModel, table=True): + """Series season - primarily for organizing episodes""" + + __tablename__ = "series_season" + __table_args__ = (UniqueConstraint("series_id", "season_number"),) + + id: int = Field(default=None, primary_key=True) + series_id: str = Field(foreign_key="series_metadata.id", index=True) + season_number: int = Field(index=True) + + # Relationships + series: "SeriesMetadata" = Relationship(back_populates="seasons") + episodes: List["SeriesEpisode"] = Relationship(back_populates="season") + + +class SeriesEpisode(SQLModel, table=True): + """Series episode metadata from IMDb""" + + __tablename__ = "series_episode" + __table_args__ = (UniqueConstraint("season_id", "episode_number"),) + + id: int = Field(default=None, primary_key=True) + season_id: int = Field(foreign_key="series_season.id", index=True) + episode_number: int = Field(index=True) + title: str + plot: Optional[str] = None + runtime: Optional[int] = None + air_date: Optional[datetime] = Field(default=None, sa_type=DateTime(timezone=True)) + imdb_rating: Optional[float] = None + poster: Optional[str] = None + is_poster_working: bool = Field(default=True) + + # Relationships + season: SeriesSeason = Relationship(back_populates="episodes") + + class BaseMetadata(TimestampMixin, table=True): """Base table for all metadata""" @@ -51,13 +164,25 @@ class BaseMetadata(TimestampMixin, table=True): __table_args__ = ( Index("idx_base_meta_type_title", "type", "title"), UniqueConstraint("title", "year"), - # Pattern matching index for partial title searches + # Materialized tsvector columns for faster searches with multilingual titles + Column( + "title_tsv", + TSVECTOR, + Computed("to_tsvector('simple'::regconfig, title)"), + nullable=False, + ), + Index("idx_base_title_fts", "title_tsv", postgresql_using="gin"), Index( - "idx_base_title_search", + "idx_base_title_trgm", "title", postgresql_using="gin", postgresql_ops={"title": "gin_trgm_ops"}, ), + Index( + "idx_base_meta_last_stream_added", + "last_stream_added", + ), + Index("idx_last_stream_added", "last_stream_added", "type"), ) id: str = Field(primary_key=True) @@ -71,6 +196,25 @@ class BaseMetadata(TimestampMixin, table=True): description: str | None runtime: str | None website: str | None + last_stream_added: datetime = Field( + default_factory=lambda: datetime.now(pytz.UTC), + nullable=False, + index=True, + sa_type=DateTime(timezone=True), + ) + + # Relationships + genres: List["Genre"] = Relationship( + link_model=MediaGenreLink, + sa_relationship_kwargs={"cascade": "all, delete"}, + ) + catalogs: List["Catalog"] = Relationship( + link_model=MediaCatalogLink, + sa_relationship_kwargs={"cascade": "all, delete"}, + ) + aka_titles: List["AkaTitle"] = Relationship( + sa_relationship_kwargs={"cascade": "all, delete"} + ) class MovieMetadata(TimestampMixin, table=True): @@ -87,6 +231,29 @@ class MovieMetadata(TimestampMixin, table=True): ) type: ClassVar[MediaType] = MediaType.MOVIE + # Relationships + base_metadata: BaseMetadata = Relationship( + sa_relationship_kwargs={"uselist": False, "cascade": "all, delete"} + ) + + parental_certificates: List["ParentalCertificate"] = Relationship( + link_model=MediaParentalCertificateLink, + sa_relationship_kwargs={ + "cascade": "all, delete", + "primaryjoin": "MovieMetadata.id == MediaParentalCertificateLink.media_id", + "overlaps": "parental_certificates", + }, + ) + + stars: List["Star"] = Relationship( + link_model=MediaStarLink, + sa_relationship_kwargs={ + "cascade": "all, delete", + "primaryjoin": "MovieMetadata.id == MediaStarLink.media_id", + "overlaps": "stars", + }, + ) + class SeriesMetadata(TimestampMixin, table=True): """Series specific metadata table""" @@ -103,6 +270,28 @@ class SeriesMetadata(TimestampMixin, table=True): ) type: ClassVar[MediaType] = MediaType.SERIES + # Relationships + base_metadata: BaseMetadata = Relationship( + sa_relationship_kwargs={"uselist": False, "cascade": "all, delete"} + ) + seasons: List[SeriesSeason] = Relationship(back_populates="series") + parental_certificates: List["ParentalCertificate"] = Relationship( + link_model=MediaParentalCertificateLink, + sa_relationship_kwargs={ + "cascade": "all, delete", + "primaryjoin": "SeriesMetadata.id == MediaParentalCertificateLink.media_id", + "overlaps": "parental_certificates", + }, + ) + stars: List["Star"] = Relationship( + link_model=MediaStarLink, + sa_relationship_kwargs={ + "cascade": "all, delete", + "primaryjoin": "SeriesMetadata.id == MediaStarLink.media_id", + "overlaps": "stars", + }, + ) + class TVMetadata(TimestampMixin, table=True): """TV specific metadata table""" @@ -117,6 +306,10 @@ class TVMetadata(TimestampMixin, table=True): logo: str | None type: ClassVar[MediaType] = MediaType.TV + base_metadata: BaseMetadata = Relationship( + sa_relationship_kwargs={"uselist": False, "cascade": "all, delete"} + ) + # Supporting Models class Genre(SQLModel, table=True): @@ -126,15 +319,6 @@ class Genre(SQLModel, table=True): name: str = Field(unique=True, index=True) -class MediaGenreLink(SQLModel, table=True): - __tablename__ = "media_genre_link" - - media_id: str = Field( - foreign_key="base_metadata.id", primary_key=True, ondelete="CASCADE" - ) - genre_id: int = Field(foreign_key="genre.id", primary_key=True, ondelete="CASCADE") - - class Catalog(SQLModel, table=True): __tablename__ = "catalog" @@ -142,24 +326,24 @@ class Catalog(SQLModel, table=True): name: str = Field(unique=True) -class MediaCatalogLink(SQLModel, table=True): - __tablename__ = "media_catalog_link" - __table_args__ = {"postgresql_partition_by": "LIST (catalog_id)"} - - media_id: str = Field( - foreign_key="base_metadata.id", primary_key=True, ondelete="CASCADE" - ) - catalog_id: int = Field( - foreign_key="catalog.id", primary_key=True, ondelete="CASCADE" - ) - priority: int = Field(default=0, index=True) - - class Config: - arbitrary_types_allowed = True - - class AkaTitle(SQLModel, table=True): __tablename__ = "aka_title" + __table_args__ = ( + # Materialized tsvector columns for faster searches with multilingual titles + Column( + "title_tsv", + TSVECTOR, + Computed("to_tsvector('simple'::regconfig, title)"), + nullable=False, + ), + Index("idx_aka_title_fts", "title_tsv", postgresql_using="gin"), + Index( + "idx_aka_title_trgm", + "title", + postgresql_using="gin", + postgresql_ops={"title": "gin_trgm_ops"}, + ), + ) id: int | None = Field(default=None, primary_key=True) title: str = Field(index=True) @@ -175,17 +359,6 @@ class ParentalCertificate(SQLModel, table=True): name: str = Field(unique=True, index=True) -class MediaParentalCertificateLink(SQLModel, table=True): - __tablename__ = "media_parental_certificate_link" - - media_id: str = Field( - foreign_key="base_metadata.id", primary_key=True, ondelete="CASCADE" - ) - certificate_id: int = Field( - foreign_key="parental_certificate.id", primary_key=True, ondelete="CASCADE" - ) - - class Star(SQLModel, table=True): __tablename__ = "star" @@ -193,16 +366,30 @@ class Star(SQLModel, table=True): name: str = Field(index=True) -class MediaStarLink(SQLModel, table=True): - __tablename__ = "media_star_link" +class EpisodeFile(SQLModel, table=True): + """Episode file information within a torrent""" - media_id: str = Field( - foreign_key="base_metadata.id", primary_key=True, ondelete="CASCADE" + __tablename__ = "episode_file" + __table_args__ = ( + UniqueConstraint("torrent_stream_id", "season_number", "episode_number"), ) - star_id: int = Field(foreign_key="star.id", primary_key=True, ondelete="CASCADE") + + id: int = Field(default=None, primary_key=True) + torrent_stream_id: str = Field(foreign_key="torrent_stream.id", index=True) + season_number: int + episode_number: int + + # File details - nullable as they might not be known initially + file_index: Optional[int] = None + filename: Optional[str] = None + size: Optional[int] = Field(default=None, sa_type=BigInteger) + + episode_id: Optional[int] = Field(default=None, foreign_key="series_episode.id") + + # Relationships + torrent_stream: "TorrentStream" = Relationship(back_populates="episode_files") -# Stream Models class TorrentStream(TimestampMixin, table=True): __tablename__ = "torrent_stream" __table_args__ = ( @@ -228,8 +415,6 @@ class TorrentStream(TimestampMixin, table=True): meta_id: str = Field(foreign_key="base_metadata.id", index=True, ondelete="CASCADE") torrent_name: str size: int = Field(sa_type=BigInteger, gt=0) - filename: str | None - file_index: int | None source: str = Field(index=True) resolution: str | None = Field(default=None) codec: str | None @@ -239,32 +424,19 @@ class TorrentStream(TimestampMixin, table=True): is_blocked: bool = Field(default=False, index=True) indexer_flag: IndexerType = Field(default=IndexerType.FREELEACH) + # For movies only (nullable for series) + filename: Optional[str] = None + file_index: Optional[int] = None -class Season(SQLModel, table=True): - __tablename__ = "season" - __table_args__ = ( - Index("idx_season_torrent_number", "torrent_stream_id", "season_number"), + # Relationships + episode_files: List[EpisodeFile] = Relationship(back_populates="torrent_stream") + languages: List["Language"] = Relationship( + link_model=TorrentLanguageLink, + sa_relationship_kwargs={"cascade": "all, delete"}, ) - - id: int | None = Field(default=None, primary_key=True) - torrent_stream_id: str = Field(foreign_key="torrent_stream.id", ondelete="CASCADE") - season_number: int - - -class Episode(SQLModel, table=True): - __tablename__ = "episode" - __table_args__ = (UniqueConstraint("season_id", "episode_number"),) - - id: int | None = Field(default=None, primary_key=True) - season_id: int = Field(foreign_key="season.id", ondelete="CASCADE") - episode_number: int = Field(index=True) - filename: str | None - size: int | None = Field(default=None, sa_type=BigInteger) - file_index: int | None - title: str | None - released: datetime | None = Field( - default=None, - sa_type=DateTime(timezone=True), + announce_urls: List["AnnounceURL"] = Relationship( + link_model=TorrentAnnounceLink, + sa_relationship_kwargs={"cascade": "all, delete"}, ) @@ -289,6 +461,12 @@ class TVStream(TimestampMixin, table=True): drm_key: str | None behaviorHints: dict | None = Field(default=None, sa_type=JSON) + # Relationships + namespaces: List["Namespace"] = Relationship( + link_model=TVStreamNamespaceLink, + sa_relationship_kwargs={"cascade": "all, delete"}, + ) + # Stream Relationship Models class Language(SQLModel, table=True): @@ -298,17 +476,6 @@ class Language(SQLModel, table=True): name: str = Field(unique=True) -class TorrentLanguageLink(SQLModel, table=True): - __tablename__ = "torrent_language_link" - - torrent_id: str = Field( - foreign_key="torrent_stream.id", primary_key=True, ondelete="CASCADE" - ) - language_id: int = Field( - foreign_key="language.id", primary_key=True, ondelete="CASCADE" - ) - - class AnnounceURL(SQLModel, table=True): __tablename__ = "announce_url" @@ -316,30 +483,8 @@ class AnnounceURL(SQLModel, table=True): name: str = Field(unique=True) -class TorrentAnnounceLink(SQLModel, table=True): - __tablename__ = "torrent_announce_link" - - torrent_id: str = Field( - foreign_key="torrent_stream.id", primary_key=True, ondelete="CASCADE" - ) - announce_id: int = Field( - foreign_key="announce_url.id", primary_key=True, ondelete="CASCADE" - ) - - class Namespace(SQLModel, table=True): __tablename__ = "namespace" id: int | None = Field(default=None, primary_key=True) name: str = Field(unique=True) - - -class TVStreamNamespaceLink(SQLModel, table=True): - __tablename__ = "tv_stream_namespace_link" - - stream_id: int = Field( - foreign_key="tv_stream.id", primary_key=True, ondelete="CASCADE" - ) - namespace_id: int = Field( - foreign_key="namespace.id", primary_key=True, ondelete="CASCADE" - ) diff --git a/migrations/mongo_to_postgres.py b/migrations/mongo_to_postgres.py index 40f79852..c3110b4d 100644 --- a/migrations/mongo_to_postgres.py +++ b/migrations/mongo_to_postgres.py @@ -1,17 +1,18 @@ import asyncio import logging +from contextlib import asynccontextmanager +from dataclasses import dataclass from datetime import timezone -from typing import Dict, List +from typing import Dict, Set, TypeVar, Type -import sqlalchemy import typer from beanie import init_beanie from motor.motor_asyncio import AsyncIOMotorClient from sqlalchemy import make_url, func, text -from sqlalchemy.ext.asyncio import create_async_engine +from sqlalchemy.ext.asyncio import create_async_engine, AsyncEngine from sqlmodel import select from sqlmodel.ext.asyncio.session import AsyncSession -from tqdm import tqdm +from tqdm.asyncio import tqdm from db.models import ( MediaFusionMetaData as OldMetaData, @@ -21,19 +22,60 @@ TorrentStreams as OldTorrentStreams, TVStreams as OldTVStreams, ) -from db.new_models import * # Import all new models +from db.sql_models import * from utils.validation_helper import is_video_file +# Set up logging with more detailed format logging.basicConfig( - level=logging.INFO, format="%(asctime)s - %(levelname)s - %(message)s" + level=logging.INFO, + format="%(asctime)s - %(levelname)s - [%(name)s:%(lineno)d] - %(message)s", ) logger = logging.getLogger(__name__) +# Type variables for generic operations +T = TypeVar("T") +ModelType = TypeVar("ModelType", bound=SQLModel) + app = typer.Typer() +@dataclass +class MigrationStats: + """Track migration statistics and errors""" + + processed: int = 0 + successful: int = 0 + failed: int = 0 + errors: Dict[str, List[str]] = None + + def __post_init__(self): + self.errors = {} + + def add_error(self, category: str, error: str): + if category not in self.errors: + self.errors[category] = [] + self.errors[category].append(error) + + def log_summary(self): + logger.info( + f""" +Migration Summary: +---------------- +Total Processed: {self.processed} +Successful: {self.successful} +Failed: {self.failed} + """ + ) + if self.errors: + logger.exception("Errors by category:") + for category, errors in self.errors.items(): + logger.exception(f"\n{category}:") + for error in errors: + logger.exception(f" - {error}") + + class ResourceTracker: - """Track and manage resources across the migration""" + """Enhanced resource tracking with caching and batch operations""" def __init__(self): self._resource_maps: Dict[str, Dict[str, int]] = { @@ -43,171 +85,167 @@ def __init__(self): "announce_url": {}, "namespace": {}, "star": {}, + "parental_certificate": {}, } - self._pending_inserts: Dict[str, set] = { + self._pending_inserts: Dict[str, Set[str]] = { key: set() for key in self._resource_maps } - - async def initialize_from_db(self, session: AsyncSession): - """Load existing resource IDs from PostgreSQL""" - resource_models = { + self.resource_models = { "genre": Genre, "catalog": Catalog, "language": Language, "announce_url": AnnounceURL, "namespace": Namespace, "star": Star, + "parental_certificate": ParentalCertificate, } + self._batch_size = 100 - for resource_type, model in resource_models.items(): - result = await session.exec(select(model)) - existing_resources = result.all() - for resource in existing_resources: - self._resource_maps[resource_type][resource.name] = resource.id + async def initialize_from_db(self, session: AsyncSession): + """Load existing resources with optimized batch queries""" + for resource_type, model in self.resource_models.items(): + stmt = select(model) + result = await session.exec(stmt) + resources = result.all() + self._resource_maps[resource_type] = {r.name: r.id for r in resources} async def ensure_resources(self, session: AsyncSession): - """Ensure all pending resources are created in the database""" - resource_models = { - "genre": Genre, - "catalog": Catalog, - "language": Language, - "announce_url": AnnounceURL, - "namespace": Namespace, - "star": Star, - } - - for resource_type, model in resource_models.items(): - pending = self._pending_inserts[resource_type] + """Batch create pending resources efficiently""" + for resource_type, pending in self._pending_inserts.items(): if not pending: continue - # Get existing resources - stmt = select(model).where(model.name.in_(pending)) - result = await session.exec(stmt) - existing = {r.name: r.id for r in result} - - # Create new resources - new_resources = pending - existing.keys() - if new_resources: - for name in new_resources: - new_resource = model(name=name) - session.add(new_resource) - - await session.commit() + model = self.resource_models[resource_type] + # Process in batches + pending_list = list(pending) + for i in range(0, len(pending_list), self._batch_size): + batch = pending_list[i : i + self._batch_size] - # Get IDs of newly created resources - stmt = select(model).where(model.name.in_(new_resources)) + # Get existing resources in batch + stmt = select(model).where(model.name.in_(batch)) result = await session.exec(stmt) - new_ids = {r.name: r.id for r in result} + existing = {r.name: r.id for r in result} + + # Create new resources in batch + new_resources = [ + model(name=name) for name in batch if name not in existing + ] + if new_resources: + session.add_all(new_resources) + await session.commit() + + # Update cache with new IDs + stmt = select(model).where( + model.name.in_([r.name for r in new_resources]) + ) + result = await session.exec(stmt) + new_ids = {r.name: r.id for r in result} + existing.update(new_ids) - existing.update(new_ids) + self._resource_maps[resource_type].update(existing) - # Update resource map - self._resource_maps[resource_type].update(existing) self._pending_inserts[resource_type].clear() - def track_resource(self, resource_type: str, name: str): - """Track a resource for creation""" - if name and name not in self._resource_maps[resource_type]: - self._pending_inserts[resource_type].add(name) - - def get_resource_id(self, resource_type: str, name: str) -> int | None: - """Get ID of a tracked resource""" - return self._resource_maps[resource_type].get(name) - - -class VerificationResult: - """Stores verification results""" + async def get_resource_id( + self, session: AsyncSession, resource_type: str, name: str + ) -> Optional[int]: + """Get resource ID with efficient caching""" + if not name: + return None + + resource_id = self._resource_maps[resource_type].get(name) + if resource_id is None: + model = self.resource_models[resource_type] + new_resource = model(name=name) + session.add(new_resource) + await session.commit() + await session.refresh(new_resource) + resource_id = new_resource.id + self._resource_maps[resource_type][name] = resource_id - def __init__(self): - self.counts: Dict[str, tuple[int, int]] = {} # (mongo_count, pg_count) - self.sample_checks: Dict[str, List[str]] = {} # List of failed checks - self.relationship_checks: Dict[str, List[str]] = ( - {} - ) # List of failed relationships + return resource_id class DatabaseMigration: + """Enhanced database migration with connection pooling and batch processing""" + def __init__( self, mongo_uri: str, postgres_uri: str, batch_size: int = 1000, ): - - self.pg_engine = None - self.mongo_client = None self.mongo_uri = mongo_uri self.postgres_uri = postgres_uri self.batch_size = batch_size self.resource_tracker = ResourceTracker() - self.verification_result = VerificationResult() + self.stats = MigrationStats() + self.pg_engine: Optional[AsyncEngine] = None + self.mongo_client: Optional[AsyncIOMotorClient] = None + + @asynccontextmanager + async def get_session(self): + """Provide managed session context""" + async with AsyncSession(self.pg_engine, expire_on_commit=False) as session: + try: + yield session + except Exception as e: + await session.rollback() + raise e async def init_connections(self, connect_mongo: bool = True): - """Initialize database connections""" - # Initialize MongoDB - if connect_mongo: - self.mongo_client = AsyncIOMotorClient(self.mongo_uri) - db = self.mongo_client.get_default_database() - await init_beanie( - database=db, - document_models=[ - OldMovieMetaData, - OldSeriesMetaData, - OldTVMetaData, - OldTorrentStreams, - OldTVStreams, - ], - ) - - # Create database if not exists - - postgres_url = make_url(self.postgres_uri) - database_name = postgres_url.database - # PostgreSQL connection for creating database - temp_engine = create_async_engine( - postgres_url.set(database="postgres"), echo=False - ) + """Initialize database connections with improved error handling""" + try: + if connect_mongo: + self.mongo_client = AsyncIOMotorClient( + self.mongo_uri, maxPoolSize=50, minPoolSize=10 + ) + db = self.mongo_client.get_default_database() + await init_beanie( + database=db, + document_models=[ + OldMovieMetaData, + OldSeriesMetaData, + OldTVMetaData, + OldTorrentStreams, + OldTVStreams, + ], + ) - async with temp_engine.connect() as conn: - # Close any open transactions - await conn.execute(sqlalchemy.text("COMMIT")) + postgres_url = make_url(self.postgres_uri) + database_name = postgres_url.database - result = await conn.execute( - sqlalchemy.text( - f"SELECT 1 FROM pg_database WHERE datname='{database_name}'" + # Create database if not exists + async with create_async_engine( + postgres_url.set(database="postgres") + ).connect() as conn: + await conn.execute(text("COMMIT")) + result = await conn.execute( + text(f"SELECT 1 FROM pg_database WHERE datname='{database_name}'") ) + if not result.scalar(): + await conn.execute(text(f"CREATE DATABASE {database_name}")) + logger.info(f"Database '{database_name}' created.") + + # Initialize PostgreSQL with optimized connection pool + self.pg_engine = create_async_engine( + self.postgres_uri, + echo=False, + pool_size=20, + max_overflow=30, + pool_pre_ping=True, + pool_recycle=300, ) - if not result.scalar(): - await conn.execute(sqlalchemy.text(f"CREATE DATABASE {database_name}")) - logger.info(f"Database '{database_name}' created.") - - await temp_engine.dispose() - - # Initialize PostgreSQL - self.pg_engine = create_async_engine( - self.postgres_uri, echo=False, pool_size=20, max_overflow=30 - ) - - # Create tables if not exists - async with self.pg_engine.begin() as conn: - # Create extensions first - await conn.execute(text("CREATE EXTENSION IF NOT EXISTS pg_trgm;")) - await conn.execute(text("CREATE EXTENSION IF NOT EXISTS btree_gin;")) + # Create extensions and tables + async with self.pg_engine.begin() as conn: + await conn.execute(text("CREATE EXTENSION IF NOT EXISTS pg_trgm;")) + await conn.execute(text("CREATE EXTENSION IF NOT EXISTS btree_gin;")) + await conn.run_sync(SQLModel.metadata.create_all) - await conn.run_sync(SQLModel.metadata.create_all) - - async def initialize_resources(self): - # Initialize resource tracker - async with AsyncSession(self.pg_engine) as session: - await self.resource_tracker.initialize_from_db(session) - - async def reset_database(self): - """Reset PostgreSQL database""" - async with self.pg_engine.begin() as conn: - await conn.run_sync(SQLModel.metadata.drop_all) - await conn.run_sync(SQLModel.metadata.create_all) + except Exception as e: + logger.exception(f"Failed to initialize connections: {str(e)}") + raise async def close_connections(self): """Close database connections""" @@ -215,112 +253,315 @@ async def close_connections(self): if self.mongo_client: self.mongo_client.close() except Exception as e: - logger.error(f"Error closing MongoDB connection: {str(e)}") + logger.exception(f"Error closing MongoDB connection: {str(e)}") try: if self.pg_engine: await self.pg_engine.dispose() except Exception as e: - logger.error(f"Error closing PostgreSQL connection: {str(e)}") + logger.exception(f"Error closing PostgreSQL connection: {str(e)}") + + +class MetadataMigrator: + """Dedicated class for handling metadata migration with optimized batch processing""" + + def __init__(self, migration: DatabaseMigration): + self.migration = migration + self.batch_size = migration.batch_size + self.resource_tracker = migration.resource_tracker + self.stats = migration.stats async def migrate_metadata(self): - """Migrate metadata to separate tables using cursor-based pagination""" + """Migrate metadata with enhanced parallel processing and error handling""" collections = [ - (OldMovieMetaData, MovieMetadata, "movies"), - (OldSeriesMetaData, SeriesMetadata, "series"), - (OldTVMetaData, TVMetadata, "tv"), + (OldMovieMetaData, MovieMetadata, MediaType.MOVIE, "movies"), + (OldSeriesMetaData, SeriesMetadata, MediaType.SERIES, "series"), + (OldTVMetaData, TVMetadata, MediaType.TV, "tv"), ] - for old_model, new_model_class, collection_name in collections: - total = await old_model.find().count() - if total == 0: - logger.info(f"No {collection_name} to migrate") - continue + for old_model, new_model_class, media_type, collection_name in collections: + try: + total = await old_model.find().count() + if total == 0: + logger.info(f"No {collection_name} to migrate") + continue - processed = 0 - cursor = old_model.find() + logger.info(f"Starting migration of {total} {collection_name}") - with tqdm(total=total) as pbar: - pbar.set_description(f"Migrating {collection_name}") + async for batch in self._get_document_batches(old_model, total): + async with self.migration.get_session() as session: + await self._process_metadata_batch( + session, batch, new_model_class, media_type + ) - async for old_doc in cursor: - try: - async with AsyncSession(self.pg_engine) as session: - # Handle base metadata first - base_data = await self.transform_base_metadata( - old_doc, new_model_class.type - ) - stmt = select(BaseMetadata).where( - BaseMetadata.id == old_doc.id - ) - result = await session.exec(stmt) - base_meta = result.first() + except Exception as e: + logger.exception(f"Failed to migrate {collection_name}: {str(e)}") + self.stats.add_error(collection_name, str(e)) - if base_meta: - for key, value in base_data.items(): - setattr(base_meta, key, value) - else: - base_meta = BaseMetadata(**base_data) - session.add(base_meta) + async def _migrate_genres( + self, session: AsyncSession, genres: List[str], media_id: str + ): + """Batch migrate genres""" + existing = await session.exec( + select(MediaGenreLink.genre_id).where(MediaGenreLink.media_id == media_id) + ) + existing_ids = set(row for row in existing) - await session.commit() + for genre in genres: + genre_id = await self.resource_tracker.get_resource_id( + session, "genre", genre + ) + if genre_id and genre_id not in existing_ids: + session.add(MediaGenreLink(media_id=media_id, genre_id=genre_id)) - # Handle type-specific metadata - specific_data = await self.transform_specific_metadata( - old_doc, new_model_class.type - ) - stmt = select(new_model_class).where( - new_model_class.id == old_doc.id - ) - result = await session.exec(stmt) - specific_meta = result.first() - - if specific_meta: - for key, value in specific_data.items(): - if key != "id" and specific_meta.__fields__.get( - key - ): - setattr(specific_meta, key, value) - else: - specific_meta = new_model_class(**specific_data) - session.add(specific_meta) - - await session.commit() - - # Handle relationships - await self.migrate_metadata_relationships( - session, old_doc, old_doc.id, new_model_class.type - ) + async def _migrate_aka_titles( + self, session: AsyncSession, titles: List[str], media_id: str + ): + """Batch migrate AKA titles""" + existing = await session.exec( + select(AkaTitle.title).where(AkaTitle.media_id == media_id) + ) + existing_titles = set(row for row in existing) - processed += 1 - pbar.update(1) + new_titles = [ + AkaTitle(title=title, media_id=media_id) + for title in titles + if title not in existing_titles + ] + if new_titles: + session.add_all(new_titles) - except Exception as e: - logger.exception( - f"Error processing document {old_doc.id}: {str(e)}" - ) - await session.rollback() + async def _migrate_stars( + self, session: AsyncSession, stars: List[str], media_id: str + ): + """Batch migrate stars""" + existing = await session.exec( + select(MediaStarLink.star_id).where(MediaStarLink.media_id == media_id) + ) + existing_ids = set(row for row in existing) + + for star in stars: + star_id = await self.resource_tracker.get_resource_id(session, "star", star) + if star_id and star_id not in existing_ids: + session.add(MediaStarLink(media_id=media_id, star_id=star_id)) + + async def _migrate_certificates( + self, session: AsyncSession, certificates: List[str], media_id: str + ): + """Batch migrate certificates""" + existing = await session.exec( + select(MediaParentalCertificateLink.certificate_id).where( + MediaParentalCertificateLink.media_id == media_id + ) + ) + existing_ids = set(row for row in existing) + + for certificate in certificates: + cert_id = await self.resource_tracker.get_resource_id( + session, "parental_certificate", certificate + ) + if cert_id and cert_id not in existing_ids: + session.add( + MediaParentalCertificateLink( + media_id=media_id, certificate_id=cert_id + ) + ) + + async def _migrate_catalogs(self, session: AsyncSession, media_id: str): + """Migrate catalogs from torrent streams with efficient batch processing""" + try: + # Get existing catalogs for the media + existing_result = await session.exec( + select(MediaCatalogLink.catalog_id).where( + MediaCatalogLink.media_id == media_id + ) + ) + existing_catalog_ids = set(row for row in existing_result) + + # Get all torrent streams for this media + torrent_streams = await OldTorrentStreams.find( + {"meta_id": media_id} + ).to_list() + + # Process catalogs from all streams + new_catalog_links = [] + processed_catalogs = set() + + for stream in torrent_streams: + catalogs = ( + [stream.catalog] + if isinstance(stream.catalog, str) + else (stream.catalog or []) + ) + + for catalog in catalogs: + if not catalog or catalog in processed_catalogs: continue - @staticmethod - async def transform_base_metadata( - old_doc: OldMetaData, media_type: MediaType + catalog_id = await self.resource_tracker.get_resource_id( + session, "catalog", catalog + ) + + if ( + catalog_id + and catalog_id not in existing_catalog_ids + and catalog_id not in processed_catalogs + ): + new_catalog_links.append( + MediaCatalogLink(media_id=media_id, catalog_id=catalog_id) + ) + processed_catalogs.add(catalog_id) + + # Batch insert new catalog links + if new_catalog_links: + session.add_all(new_catalog_links) + await session.commit() + + except Exception as e: + logger.exception(f"Error migrating catalogs for media {media_id}: {str(e)}") + await session.rollback() + raise + + async def _migrate_metadata_relationships( + self, + session: AsyncSession, + old_doc: OldMetaData | OldSeriesMetaData | OldTVMetaData | OldMovieMetaData, + metadata: SQLModel, + media_type: MediaType, + ): + """Migrate all relationships for a metadata record with batch processing""" + try: + # Migrate genres + if old_doc.genres: + await self._migrate_genres(session, old_doc.genres, metadata.id) + + # Migrate AKA titles + if hasattr(old_doc, "aka_titles") and old_doc.aka_titles: + await self._migrate_aka_titles(session, old_doc.aka_titles, metadata.id) + + # Migrate stars + if hasattr(old_doc, "stars") and old_doc.stars: + await self._migrate_stars(session, old_doc.stars, metadata.id) + + # Migrate certificates + if hasattr(old_doc, "parent_guide_certificates"): + await self._migrate_certificates( + session, old_doc.parent_guide_certificates or [], metadata.id + ) + + # Migrate catalogs from torrent streams + await self._migrate_catalogs(session, metadata.id) + + await session.commit() + + except Exception as e: + logger.exception( + f"Error migrating relationships for {metadata.id}: {str(e)}" + ) + await session.rollback() + raise + + async def _get_document_batches(self, model, total): + """Efficiently yield batches of documents""" + cursor = model.find() + with tqdm(total=total) as pbar: + current_batch = [] + async for doc in cursor: + current_batch.append(doc) + if len(current_batch) >= self.batch_size: + yield current_batch + pbar.update(len(current_batch)) + current_batch = [] + + if current_batch: + yield current_batch + pbar.update(len(current_batch)) + + async def _process_metadata_batch( + self, + session: AsyncSession, + batch: List[OldMetaData], + new_model_class: Type[SQLModel], + media_type: MediaType, + ): + """Process a batch of metadata documents efficiently""" + for old_doc in batch: + try: + # Handle base metadata + base_data = await self._transform_base_metadata(old_doc, media_type) + base_meta = await self._upsert_base_metadata(session, base_data) + + # Handle type-specific metadata + specific_data = await self._transform_specific_metadata( + old_doc, media_type, new_model_class + ) + specific_meta = await self._upsert_specific_metadata( + session, specific_data, new_model_class + ) + + # Handle relationships + await self._migrate_metadata_relationships( + session, old_doc, specific_meta, media_type + ) + + self.stats.successful += 1 + + except Exception as e: + logger.exception(f"Error processing document {old_doc.id}: {str(e)}") + self.stats.add_error("metadata_processing", f"{old_doc.id}: {str(e)}") + self.stats.failed += 1 + continue + + finally: + self.stats.processed += 1 + + async def _upsert_base_metadata( + self, session: AsyncSession, base_data: dict + ) -> BaseMetadata: + """Upsert base metadata with optimized query""" + stmt = select(BaseMetadata).where(BaseMetadata.id == base_data["id"]) + result = await session.exec(stmt) + base_meta = result.one_or_none() + + if base_meta: + for key, value in base_data.items(): + setattr(base_meta, key, value) + else: + base_meta = BaseMetadata(**base_data) + session.add(base_meta) + + await session.commit() + return base_meta + + async def _upsert_specific_metadata( + self, session: AsyncSession, specific_data: dict, model_class: Type[SQLModel] + ) -> SQLModel: + """Upsert specific metadata with optimized query""" + stmt = select(model_class).where(model_class.id == specific_data["id"]) + result = await session.exec(stmt) + specific_meta = result.one_or_none() + + if specific_meta: + for key, value in specific_data.items(): + if key != "id" and key in specific_meta.model_fields: + setattr(specific_meta, key, value) + else: + specific_meta = model_class(**specific_data) + session.add(specific_meta) + + await session.commit() + return specific_meta + + async def _transform_base_metadata( + self, old_doc: OldMetaData, media_type: MediaType ) -> dict: - """Transform metadata to base table format""" - # Ensure timezone-aware datetimes - created_at = ( - old_doc.created_at.replace(tzinfo=timezone.utc) - if old_doc.created_at - else None - ) - updated_at = ( - old_doc.last_updated_at.replace(tzinfo=timezone.utc) - if old_doc.last_updated_at - else None - ) + """Transform base metadata with enhanced validation""" + created_at = self._ensure_timezone(old_doc.created_at) + updated_at = self._ensure_timezone(old_doc.last_updated_at) return { "id": old_doc.id, + "type": media_type, "title": old_doc.title, "year": old_doc.year, "poster": old_doc.poster, @@ -330,50 +571,23 @@ async def transform_base_metadata( "description": old_doc.description, "runtime": old_doc.runtime, "website": old_doc.website, - "type": media_type, "created_at": created_at, "updated_at": updated_at, + "last_stream_added": created_at, # Will be updated later } - @staticmethod - async def transform_specific_metadata( - old_doc: OldMetaData, media_type: MediaType + async def _transform_specific_metadata( + self, old_doc: OldMetaData, media_type: MediaType, model_class: Type[SQLModel] ) -> dict: - """Transform metadata to specific table format""" - # Ensure timezone-aware datetimes - created_at = ( - old_doc.created_at.replace(tzinfo=timezone.utc) - if old_doc.created_at - else None - ) - updated_at = ( - old_doc.last_updated_at.replace(tzinfo=timezone.utc) - if old_doc.last_updated_at - else None - ) - - data = { - "id": old_doc.id, - "title": old_doc.title, - "year": old_doc.year, - "poster": old_doc.poster, - "is_poster_working": old_doc.is_poster_working, - "is_add_title_to_poster": old_doc.is_add_title_to_poster, - "background": old_doc.background, - "description": old_doc.description, - "runtime": old_doc.runtime, - "website": old_doc.website, - "created_at": created_at, - "updated_at": updated_at, - } + """Transform specific metadata with type validation""" + data = {"id": old_doc.id} - # Add type-specific fields if media_type == MediaType.MOVIE: data.update( { "imdb_rating": getattr(old_doc, "imdb_rating", None), "parent_guide_nudity_status": getattr( - old_doc, "parent_guide_nudity_status" + old_doc, "parent_guide_nudity_status", NudityStatus.UNKNOWN ), } ) @@ -383,7 +597,7 @@ async def transform_specific_metadata( "end_year": getattr(old_doc, "end_year", None), "imdb_rating": getattr(old_doc, "imdb_rating", None), "parent_guide_nudity_status": getattr( - old_doc, "parent_guide_nudity_status" + old_doc, "parent_guide_nudity_status", NudityStatus.UNKNOWN ), } ) @@ -398,415 +612,556 @@ async def transform_specific_metadata( return data - async def migrate_metadata_relationships( + @staticmethod + def _ensure_timezone(dt: Optional[datetime]) -> Optional[datetime]: + """Ensure datetime is timezone-aware""" + if dt and dt.tzinfo is None: + return dt.replace(tzinfo=timezone.utc) + return dt + + +class SeriesDataMigrator: + """Handle series-specific data migration""" + + def __init__(self, migration: DatabaseMigration): + self.migration = migration + self.stats = migration.stats + + async def migrate_series_metadata(self): + """Migrate series metadata with enhanced parallel processing and error handling""" + logger.info("Migrating series data...") + # Get all series metadata + series_docs = await OldSeriesMetaData.find().to_list() + async with self.migration.get_session() as session: + for series_doc in tqdm( + series_docs, desc="Migrating series seasons and episodes" + ): + try: + # Get the corresponding series metadata from postgres + stmt = select(SeriesMetadata).where( + SeriesMetadata.id == series_doc.id + ) + series_meta = (await session.exec(stmt)).one_or_none() + + if series_meta: + await self.migrate_series_seasons_episodes( + session, series_doc, series_meta + ) + except Exception as e: + logger.error( + f"Error migrating series data for {series_doc.id}: {str(e)}" + ) + self.migration.stats.add_error( + "series_migration", f"Series {series_doc.id}: {str(e)}" + ) + await session.rollback() + continue + + async def migrate_series_seasons_episodes( self, session: AsyncSession, - old_doc: OldMetaData, - media_id: str, - media_type: MediaType, + old_doc: OldSeriesMetaData, + series_meta: SeriesMetadata, ): - """Migrate all relationships for a metadata record""" + """Migrate series seasons and episodes with efficient batch processing""" try: - # Migrate genres - existing_genres_result = await session.exec( - select(MediaGenreLink.genre_id).where( - MediaGenreLink.media_id == media_id - ) - ) - existing_genre_ids = set(existing_genres_result.all()) + # Get all existing seasons for the series + existing_seasons = await self._get_existing_seasons(session, series_meta.id) + existing_season_numbers = {s.season_number for s in existing_seasons} - for genre in old_doc.genres or []: - genre_id = self.resource_tracker.get_resource_id("genre", genre) - if genre_id and genre_id not in existing_genre_ids: - link = MediaGenreLink(media_id=media_id, genre_id=genre_id) - session.add(link) + # Get episodes from torrent streams + torrent_episodes = await self._gather_torrent_episodes(old_doc.id) - # Migrate AKA titles - stmt = select(AkaTitle.title).where(AkaTitle.media_id == media_id) - aka_title = await session.exec(stmt) - existing_aka_titles = set(aka_title.all()) - - for title in getattr(old_doc, "aka_titles", None) or []: - if title not in existing_aka_titles: - aka = AkaTitle(title=title, media_id=media_id) - session.add(aka) + # Group episodes by season + seasons_data = self._organize_episodes_by_season(torrent_episodes) - # Migrate stars - if hasattr(old_doc, "stars"): - existing_stars_result = await session.exec( - select(MediaStarLink.star_id).where( - MediaStarLink.media_id == media_id, + # Process each season + for season_number, episodes in seasons_data.items(): + if season_number not in existing_season_numbers: + season = await self._create_series_season( + session, series_meta.id, season_number ) - ) - existing_star_ids = set(existing_stars_result.all()) - - for star_name in old_doc.stars or []: - star_id = self.resource_tracker.get_resource_id("star", star_name) - if star_id and star_id not in existing_star_ids: - link = MediaStarLink( - media_id=media_id, - star_id=star_id, - ) - session.add(link) - - # Migrate certificates - if hasattr(old_doc, "parent_guide_certificates"): - stmt = select(ParentalCertificate).where( - ParentalCertificate.name.in_( - old_doc.parent_guide_certificates or [] + else: + season = next( + s for s in existing_seasons if s.season_number == season_number ) - ) - existing_certs = await session.exec(stmt) - existing_certificates = {cert.name: cert.id for cert in existing_certs} - for cert in old_doc.parent_guide_certificates or []: - if cert not in existing_certificates: - new_cert = ParentalCertificate(name=cert) - session.add(new_cert) - await session.commit() - await session.refresh(new_cert) - existing_certificates[cert] = new_cert.id + await self._process_season_episodes(session, season, episodes) - stmt = select(MediaParentalCertificateLink).where( - MediaParentalCertificateLink.media_id == media_id, - MediaParentalCertificateLink.certificate_id - == existing_certificates[cert], - ) - existing_link = await session.exec(stmt) - if not existing_link.first(): - link = MediaParentalCertificateLink( - media_id=media_id, - certificate_id=existing_certificates[cert], - ) - session.add(link) + except Exception as e: + logger.exception( + f"Failed to migrate seasons/episodes for series {old_doc.id}: {str(e)}" + ) + self.stats.add_error("series_migration", f"{old_doc.id}: {str(e)}") + raise - # Migrate catalogs from torrent streams - existing_catalogs_result = await session.exec( - select(MediaCatalogLink.catalog_id).where( - MediaCatalogLink.media_id == media_id - ) + async def fix_null_episode_ids(self, session: AsyncSession): + """Fix episode files with null episode_ids with enhanced matching and logging""" + try: + # Get all episode files with null episode_ids + stmt = select(EpisodeFile).where(EpisodeFile.episode_id.is_(None)) + null_episode_files = (await session.exec(stmt)).all() + + if not null_episode_files: + return + + logger.info( + f"Found {len(null_episode_files)} episode files with null episode_ids" ) - existing_catalog_ids = set(existing_catalogs_result.all()) - torrent_streams = await OldTorrentStreams.find( - {"meta_id": media_id} - ).to_list() + # Get all seasons and episodes with series information + stmt = select(SeriesSeason, SeriesMetadata).join( + SeriesMetadata, SeriesSeason.series_id == SeriesMetadata.id + ) + seasons_result = await session.exec(stmt) + seasons_with_meta = seasons_result.all() + + # Create comprehensive mapping structures + episodes_by_season = {} + meta_by_season = {} + for season, meta in seasons_with_meta: + # Get episodes for this season + stmt = select(SeriesEpisode).where(SeriesEpisode.season_id == season.id) + episodes = (await session.exec(stmt)).all() + episodes_by_season[season.season_number] = { + ep.episode_number: ep for ep in episodes + } + meta_by_season[season.season_number] = meta + + # Track unmatched files for analysis + unmatched_files = [] + fixed_count = 0 + + # Get torrent stream information for unmatched files + torrent_ids = {ef.torrent_stream_id for ef in null_episode_files} + stmt = select(TorrentStream).where(TorrentStream.id.in_(torrent_ids)) + torrent_streams = (await session.exec(stmt)).all() + torrent_meta_map = {ts.id: ts.meta_id for ts in torrent_streams} + + # Fix null episode_ids with enhanced matching + for ef in null_episode_files: + season_episodes = episodes_by_season.get(ef.season_number, {}) + matching_episode = season_episodes.get(ef.episode_number) + + if matching_episode: + ef.episode_id = matching_episode.id + fixed_count += 1 + else: + # Collect detailed information about unmatched files + meta_id = torrent_meta_map.get(ef.torrent_stream_id) + unmatched_files.append( + { + "torrent_id": ef.torrent_stream_id, + "meta_id": meta_id, + "season": ef.season_number, + "episode": ef.episode_number, + "filename": ef.filename, + } + ) - for stream in torrent_streams: - catalogs = ( - [stream.catalog] - if isinstance(stream.catalog, str) - else (stream.catalog or []) + if fixed_count: + await session.commit() + logger.info(f"Fixed {fixed_count} episode files with null episode_ids") + + if unmatched_files: + logger.warning( + f"\nRemaining {len(unmatched_files)} unmatched episode files:" ) - for catalog in catalogs: - catalog_id = self.resource_tracker.get_resource_id( - "catalog", catalog - ) - if catalog_id and catalog_id not in existing_catalog_ids: - link = MediaCatalogLink( - media_id=media_id, catalog_id=catalog_id - ) - session.add(link) - existing_catalog_ids.add(catalog_id) - await session.commit() + # Group unmatched files by meta_id for better analysis + unmatched_by_meta = {} + for uf in unmatched_files: + meta_id = uf["meta_id"] + if meta_id not in unmatched_by_meta: + unmatched_by_meta[meta_id] = [] + unmatched_by_meta[meta_id].append(uf) + + # Create missing seasons and episodes + await self._create_missing_episodes(session, unmatched_by_meta) except Exception as e: - logger.error(f"Error migrating relationships for {media_id}: {str(e)}") + logger.error(f"Error fixing null episode_ids: {str(e)}") await session.rollback() raise - async def migrate_torrent_streams(self): - """Migrate torrent streams using cursor-based pagination""" - total = await OldTorrentStreams.find().count() - if total == 0: - logger.info("No torrent streams to migrate") - return - - processed = 0 - cursor = OldTorrentStreams.find() + async def _create_missing_episodes( + self, session: AsyncSession, unmatched_by_meta: dict + ): + """Create missing seasons and episodes for unmatched files with duplicate handling""" + try: + created_seasons = 0 + created_episodes = 0 - with tqdm(total=total) as pbar: - pbar.set_description("Migrating torrent streams") + for meta_id, files in unmatched_by_meta.items(): + # Get series metadata + stmt = select(SeriesMetadata).where(SeriesMetadata.id == meta_id) + series_meta = await session.exec(stmt) + series_meta = series_meta.one_or_none() - async for old_stream in cursor: - try: - async with AsyncSession(self.pg_engine) as session: - # Track all resources first - for lang in old_stream.languages or []: - self.resource_tracker.track_resource("language", lang) - for url in old_stream.announce_list or []: - self.resource_tracker.track_resource("announce_url", url) - - # Ensure all resources exist - await self.resource_tracker.ensure_resources(session) - - # Validate metadata exists - result = await session.exec( - select(BaseMetadata).where( - BaseMetadata.id == old_stream.meta_id - ) - ) - if not result.first(): - logger.warning( - f"Skipping stream {old_stream.id} as metadata {old_stream.meta_id} does not exist" - ) - continue + if not series_meta: + logger.warning(f"Series metadata not found for {meta_id}") + continue - # Check if stream exists - stmt = select(TorrentStream).where( - TorrentStream.id == old_stream.id.lower() + # Get existing seasons + stmt = select(SeriesSeason).where(SeriesSeason.series_id == meta_id) + existing_seasons = await session.exec(stmt) + existing_seasons = {s.season_number: s for s in existing_seasons} + + # Group files by season + files_by_season = {} + for f in files: + season_num = f["season"] + if season_num not in files_by_season: + files_by_season[season_num] = ( + set() + ) # Use set to deduplicate episodes + files_by_season[season_num].add(f["episode"]) + + # Create missing seasons and episodes + for season_num, episode_numbers in files_by_season.items(): + # Get or create season + season = existing_seasons.get(season_num) + if not season: + season = SeriesSeason( + series_id=meta_id, season_number=season_num ) - result = await session.exec(stmt) - existing_stream = result.first() - - # Transform stream data - stream_data = self.transform_torrent_stream(old_stream) - - if existing_stream: - for key, value in stream_data.items(): - setattr(existing_stream, key, value) - stream = existing_stream - else: - stream = TorrentStream(**stream_data) - session.add(stream) - - stream_id = stream.id + session.add(season) await session.commit() + await session.refresh(season) + created_seasons += 1 - # Handle season and episodes if present - if old_stream.season: - # Delete existing season and episodes - existing_seasons = await session.exec( - select(Season).where( - Season.torrent_stream_id == stream_id + # Get existing episodes for this season + stmt = select(SeriesEpisode).where( + SeriesEpisode.season_id == season.id + ) + existing_episodes = await session.exec(stmt) + existing_episodes = {e.episode_number: e for e in existing_episodes} + + # Create missing episodes (deduplicated) + episodes_to_create = [] + for ep_num in episode_numbers: + if ep_num not in existing_episodes: + episodes_to_create.append( + SeriesEpisode( + season_id=season.id, + episode_number=ep_num, + title=f"Episode {ep_num}", ) ) - for existing_season in existing_seasons: - await session.delete(existing_season) - await session.commit() + created_episodes += 1 + + if episodes_to_create: + session.add_all(episodes_to_create) + await session.commit() - # Create new season and episodes - await self.migrate_season_and_episodes( - session, old_stream.season, stream_id + # Update episode files + for file in files: + if file["season"] == season_num: + # Refresh episode query to get newly created episodes + stmt = select(SeriesEpisode).where( + SeriesEpisode.season_id == season.id, + SeriesEpisode.episode_number == file["episode"], ) + episode = await session.exec(stmt) + episode = episode.one_or_none() + + if episode: + stmt = select(EpisodeFile).where( + EpisodeFile.torrent_stream_id == file["torrent_id"], + EpisodeFile.season_number == season_num, + EpisodeFile.episode_number == file["episode"], + ) + ep_file = await session.exec(stmt) + ep_file = ep_file.one_or_none() - # Update relationships - await self.migrate_torrent_relationships( - session, old_stream, stream_id - ) - - processed += 1 - pbar.update(1) - - except Exception as e: - logger.exception( - f"Error processing torrent stream {old_stream.id}: {str(e)}" - ) - await session.rollback() - continue + if ep_file and ep_file.episode_id is None: + ep_file.episode_id = episode.id + session.add(ep_file) - async def migrate_torrent_relationships( - self, session: AsyncSession, old_stream: OldTorrentStreams, stream_id: str - ): - """Migrate torrent stream relationships""" - try: - # Migrate languages - existing_languages = await session.exec( - select(TorrentLanguageLink.language_id).where( - TorrentLanguageLink.torrent_id == stream_id - ) - ) - existing_language_ids = set(existing_languages.all()) - added_languages = set() - for lang in old_stream.languages or []: - lang_id = self.resource_tracker.get_resource_id("language", lang) - if ( - lang_id - and lang_id not in existing_language_ids - and lang_id not in added_languages - ): - link = TorrentLanguageLink( - torrent_id=stream_id, language_id=lang_id - ) - session.add(link) - added_languages.add(lang_id) + await session.commit() - # Migrate announce URLs - existing_announces = await session.exec( - select(TorrentAnnounceLink.announce_id).where( - TorrentAnnounceLink.torrent_id == stream_id - ) + logger.info( + f"Created {created_seasons} missing seasons and " + f"{created_episodes} missing episodes" ) - existing_announce_ids = set(existing_announces.all()) - added_announces = set() - for url in set(old_stream.announce_list): - url_id = self.resource_tracker.get_resource_id("announce_url", url) - if ( - url_id - and url_id not in existing_announce_ids - and url_id not in added_announces - ): - link = TorrentAnnounceLink(torrent_id=stream_id, announce_id=url_id) - session.add(link) - added_announces.add(url_id) - - await session.commit() except Exception as e: - logger.exception( - f"Error migrating relationships for stream {stream_id}: {str(e)}" - ) + logger.error(f"Error creating missing episodes: {str(e)}") await session.rollback() raise - async def migrate_season_and_episodes( - self, session: AsyncSession, old_season, stream_id: str + async def _gather_torrent_episodes(self, series_id: str) -> List[dict]: + """Gather all episode information from torrent streams""" + torrent_streams = await OldTorrentStreams.find({"meta_id": series_id}).to_list() + + episodes = [] + for stream in torrent_streams: + if stream.season and stream.season.episodes: + for episode in stream.season.episodes: + if episode.filename and is_video_file(episode.filename): + episodes.append( + { + "season_number": stream.season.season_number, + "episode_number": episode.episode_number, + "filename": episode.filename, + "size": episode.size, + "file_index": episode.file_index, + "title": episode.title, + "released": episode.released, + "torrent_id": stream.id, + } + ) + + return episodes + + def _organize_episodes_by_season( + self, episodes: List[dict] + ) -> Dict[int, List[dict]]: + """Organize episodes by season with deduplication""" + seasons_data = {} + for episode in episodes: + season_num = episode["season_number"] + if season_num not in seasons_data: + seasons_data[season_num] = {} + + ep_num = episode["episode_number"] + # Keep the episode with the most complete information + if ep_num not in seasons_data[season_num] or self._is_better_episode( + episode, seasons_data[season_num][ep_num] + ): + seasons_data[season_num][ep_num] = episode + + return { + season: list(episodes.values()) for season, episodes in seasons_data.items() + } + + async def _get_existing_seasons( + self, session: AsyncSession, series_id: str + ) -> List[SeriesSeason]: + """Get existing seasons for a series""" + stmt = select(SeriesSeason).where(SeriesSeason.series_id == series_id) + result = await session.exec(stmt) + return result.all() + + async def _create_series_season( + self, session: AsyncSession, series_id: str, season_number: int + ) -> SeriesSeason: + """Create a new season""" + season = SeriesSeason(series_id=series_id, season_number=season_number) + session.add(season) + await session.commit() + await session.refresh(season) + return season + + async def _get_existing_episodes( + self, session: AsyncSession, season_id: int + ) -> List[SeriesEpisode]: + """Get existing episodes for a season""" + stmt = select(SeriesEpisode).where(SeriesEpisode.season_id == season_id) + result = await session.exec(stmt) + return result.all() + + async def _process_season_episodes( + self, session: AsyncSession, season: SeriesSeason, episodes: List[dict] ): - """Migrate season and its episodes""" + """Process episodes for a season with improved episode matching""" try: - # Create season - season = Season( - torrent_stream_id=stream_id, season_number=old_season.season_number + # Get existing episodes for this series and season + existing_episodes = await self._get_existing_episodes(session, season.id) + existing_ep_map = {ep.episode_number: ep for ep in existing_episodes} + + # Get ALL episodes for this series to avoid wrong season linking + stmt = select(SeriesSeason).where( + SeriesSeason.series_id == season.series_id + ) + all_seasons = (await session.exec(stmt)).all() + all_episodes = {} + for s in all_seasons: + s_episodes = await self._get_existing_episodes(session, s.id) + for ep in s_episodes: + all_episodes[(s.season_number, ep.episode_number)] = ep + + # Get existing episode files + existing_file_stmt = select(EpisodeFile).where( + EpisodeFile.season_number == season.season_number ) - session.add(season) - await session.commit() # Commit to get season ID - await session.refresh(season, ["id"]) - - added_episodes = set() - # Create episodes - for old_ep in old_season.episodes or []: - if old_ep.filename and not is_video_file(old_ep.filename): + existing_files = await session.exec(existing_file_stmt) + existing_file_map = { + (ef.torrent_stream_id, ef.season_number, ef.episode_number): ef + for ef in existing_files + } + + # First ensure all episodes exist in correct season + episodes_to_create = [] + episode_map = existing_ep_map.copy() + + for ep_data in episodes: + ep_num = ep_data["episode_number"] + + # Check if episode exists in ANY season + existing_wrong_season = all_episodes.get((season.season_number, ep_num)) + if ( + existing_wrong_season + and existing_wrong_season.season_id != season.id + ): logger.warning( - f"Skipping non-video file {old_ep.filename} for episode in {stream_id}" + f"Episode S{season.season_number}E{ep_num} exists in wrong season. " + f"Skipping creation." ) continue - if old_ep.episode_number in added_episodes: - logger.warning( - f"Skipping duplicate episode {old_ep.episode_number} for torrent {stream_id}" + if ep_num not in episode_map: + episode = SeriesEpisode( + season_id=season.id, + episode_number=ep_num, + title=ep_data.get("title") or f"Episode {ep_num}", + air_date=self._ensure_timezone(ep_data.get("released")), ) + episodes_to_create.append(episode) + + # Batch create new episodes + if episodes_to_create: + session.add_all(episodes_to_create) + await session.commit() + for episode in episodes_to_create: + await session.refresh(episode) + episode_map[episode.episode_number] = episode + + # Now handle episode files + episode_files_to_create = [] + episode_files_to_update = [] + + for ep_data in episodes: + if not ep_data.get("torrent_id"): continue - episode = Episode( - season_id=season.id, - episode_number=old_ep.episode_number, - filename=old_ep.filename, - size=old_ep.size, - file_index=old_ep.file_index, - title=old_ep.title, - released=( - old_ep.released.replace(tzinfo=timezone.utc) - if old_ep.released - else None - ), - ) - session.add(episode) - added_episodes.add(old_ep.episode_number) - await session.commit() + ep_num = ep_data["episode_number"] + # Get correct episode from this season + episode = episode_map.get(ep_num) + + # If not found in current season, check other seasons + if not episode: + other_season_ep = all_episodes.get((season.season_number, ep_num)) + if other_season_ep: + episode = other_season_ep + else: + logger.warning( + f"No episode record found for S{season.season_number}E{ep_num}" + ) + continue + + file_key = (ep_data["torrent_id"], season.season_number, ep_num) + + if file_key not in existing_file_map: + episode_file = EpisodeFile( + torrent_stream_id=ep_data["torrent_id"], + season_number=season.season_number, + episode_number=ep_num, + file_index=ep_data.get("file_index"), + filename=ep_data.get("filename"), + size=ep_data.get("size"), + episode_id=episode.id, + ) + episode_files_to_create.append(episode_file) + else: + existing_file = existing_file_map[file_key] + if ( + existing_file.file_index != ep_data.get("file_index") + or existing_file.filename != ep_data.get("filename") + or existing_file.size != ep_data.get("size") + or existing_file.episode_id != episode.id + ): + existing_file.file_index = ep_data.get("file_index") + existing_file.filename = ep_data.get("filename") + existing_file.size = ep_data.get("size") + existing_file.episode_id = episode.id + episode_files_to_update.append(existing_file) + + # Batch create/update episode files + if episode_files_to_create: + session.add_all(episode_files_to_create) + await session.commit() + + if episode_files_to_update: + session.add_all(episode_files_to_update) + await session.commit() + except Exception as e: logger.error( - f"Error migrating season and episodes for stream {stream_id}: {str(e)}" + f"Error processing episodes for season {season.season_number}: {str(e)}" ) await session.rollback() raise - async def migrate_tv_streams(self): - """Migrate TV streams using cursor-based pagination""" - total = await OldTVStreams.find().count() - if total == 0: - logger.info("No TV streams to migrate") - return - - processed = 0 - cursor = OldTVStreams.find() - - with tqdm(total=total) as pbar: - pbar.set_description("Migrating TV streams") - - async for old_stream in cursor: - try: - async with AsyncSession(self.pg_engine) as session: - # Track namespaces - for namespace in old_stream.namespaces or ["mediafusion"]: - self.resource_tracker.track_resource("namespace", namespace) + @staticmethod + def _is_better_episode(new_ep: dict, existing_ep: dict) -> bool: + """Determine if new episode data is better than existing""" + score_new = sum(1 for k, v in new_ep.items() if v is not None) + score_existing = sum(1 for k, v in existing_ep.items() if v is not None) + return score_new > score_existing - await self.resource_tracker.ensure_resources(session) + @staticmethod + def _ensure_timezone(dt: Optional[datetime]) -> Optional[datetime]: + """Ensure datetime is timezone-aware""" + if dt and dt.tzinfo is None: + return dt.replace(tzinfo=timezone.utc) + return dt - # Validate metadata exists - result = await session.exec( - select(BaseMetadata).where( - BaseMetadata.id == old_stream.meta_id - ) - ) - if not result.first(): - continue - # Transform and insert TV stream - stream_data = await self.transform_tv_stream(old_stream) +class StreamMigrator: + """Handle stream-related migrations""" - # Check for existing stream with the same URL - existing_stream = await session.exec( - select(TVStream).where( - TVStream.url == stream_data["url"], - TVStream.ytId == stream_data["ytId"], - ) - ) - existing_stream = existing_stream.first() - - if not existing_stream: - stream = TVStream(**stream_data) - session.add(stream) - await session.commit() - await session.refresh(stream, ["id"]) - else: - stream = existing_stream - - # Add namespace relationships - await self.migrate_tv_stream_namespaces( - session, old_stream, stream.id - ) - await session.commit() + def __init__(self, migration: DatabaseMigration): + self.migration = migration + self.resource_tracker = migration.resource_tracker + self.stats = migration.stats + self.series_migrator = SeriesDataMigrator(migration) - processed += 1 - pbar.update(1) + async def migrate_torrent_streams(self): + """Migrate torrent streams with enhanced error handling and batching""" + total = await OldTorrentStreams.find().count() + if total == 0: + logger.info("No torrent streams to migrate") + return - except Exception as e: - logger.exception(f"Error processing TV stream: {str(e)}") - await session.rollback() - continue + async for batch in self._get_stream_batches(OldTorrentStreams, total): + async with self.migration.get_session() as session: + await self._process_torrent_batch(session, batch) - async def migrate_tv_stream_namespaces( - self, session: AsyncSession, old_stream: OldTVStreams, stream_id: int - ): - """Migrate TV stream namespace relationships""" - # validate existing namespaces - stmt = select(TVStreamNamespaceLink.namespace_id).where( - TVStreamNamespaceLink.stream_id == stream_id - ) - existing_namespaces = await session.exec(stmt) - existing_namespace_ids = set(existing_namespaces.all()) - - for namespace in old_stream.namespaces or ["mediafusion"]: - namespace_id = self.resource_tracker.get_resource_id("namespace", namespace) - if namespace_id and namespace_id not in existing_namespace_ids: - link = TVStreamNamespaceLink( - stream_id=stream_id, namespace_id=namespace_id - ) - session.add(link) + async def migrate_tv_streams(self): + """Migrate TV streams with enhanced error handling and batching""" + total = await OldTVStreams.find().count() + if total == 0: + logger.info("No TV streams to migrate") + return - await session.commit() + async for batch in self._get_stream_batches(OldTVStreams, total): + async with self.migration.get_session() as session: + await self._process_tv_stream_batch(session, batch) - @staticmethod - def transform_torrent_stream(old_stream: OldTorrentStreams) -> dict: - """Transform torrent stream to new format""" + async def _get_stream_batches(self, model, total): + """Yield stream batches efficiently""" + cursor = model.find() + with tqdm(total=total) as pbar: + current_batch = [] + async for stream in cursor: + current_batch.append(stream) + if len(current_batch) >= self.migration.batch_size: + yield current_batch + pbar.update(len(current_batch)) + current_batch = [] + + if current_batch: + yield current_batch + pbar.update(len(current_batch)) + + def _transform_torrent_stream(self, old_stream: OldTorrentStreams) -> dict: + """Transform torrent stream to new format with validation""" return { "id": old_stream.id.lower(), "meta_id": old_stream.meta_id, "torrent_name": old_stream.torrent_name, "size": old_stream.size, - "filename": old_stream.filename, - "file_index": old_stream.file_index, "source": old_stream.source, "resolution": old_stream.resolution, "codec": old_stream.codec, @@ -818,36 +1173,242 @@ def transform_torrent_stream(old_stream: OldTorrentStreams) -> dict: ), "seeders": old_stream.seeders, "is_blocked": old_stream.is_blocked, - "created_at": old_stream.created_at, - "updated_at": old_stream.updated_at, + "filename": (old_stream.filename if not old_stream.season else None), + "file_index": (old_stream.file_index if not old_stream.season else None), "indexer_flag": ( - old_stream.indexer_flags[0] if old_stream.indexer_flags else "freeleech" + old_stream.indexer_flags[0] + if old_stream.indexer_flags + else IndexerType.FREELEACH ), } - async def transform_tv_stream(self, old_stream: OldTVStreams) -> dict: - """Transform TV stream to new format""" - # Get next ID if not exists - if not hasattr(old_stream, "id") or not old_stream.id: - async with AsyncSession(self.pg_engine) as session: - result = await session.exec( - select(func.coalesce(func.max(TVStream.id), 0)) - ) - max_id = result.one() or 0 - stream_id = max_id + 1 + async def _upsert_torrent_stream( + self, session: AsyncSession, stream_data: dict + ) -> TorrentStream: + """Upsert torrent stream with optimized query""" + stmt = select(TorrentStream).where(TorrentStream.id == stream_data["id"]) + result = await session.exec(stmt) + stream = result.one_or_none() + + if stream: + for key, value in stream_data.items(): + setattr(stream, key, value) else: - try: - stream_id = int(old_stream.id) - except (ValueError, TypeError): - async with AsyncSession(self.pg_engine) as session: - result = await session.exec( - select(func.coalesce(func.max(TVStream.id), 0)) + stream = TorrentStream(**stream_data) + session.add(stream) + + await session.commit() + await session.refresh(stream) + return stream + + async def _migrate_torrent_relationships( + self, session: AsyncSession, old_stream: OldTorrentStreams, stream_id: str + ): + """Migrate torrent stream relationships with batch processing""" + try: + # Migrate languages + if old_stream.languages: + existing = await session.exec( + select(TorrentLanguageLink.language_id).where( + TorrentLanguageLink.torrent_id == stream_id + ) + ) + existing_ids = set(row for row in existing) + + for lang in old_stream.languages: + lang_id = await self.resource_tracker.get_resource_id( + session, "language", lang + ) + if lang_id and lang_id not in existing_ids: + session.add( + TorrentLanguageLink( + torrent_id=stream_id, language_id=lang_id + ) + ) + existing_ids.add(lang_id) + + # Migrate announce URLs + if old_stream.announce_list: + existing = await session.exec( + select(TorrentAnnounceLink.announce_id).where( + TorrentAnnounceLink.torrent_id == stream_id + ) + ) + existing_ids = set(row for row in existing) + + for url in set(old_stream.announce_list): + url_id = await self.resource_tracker.get_resource_id( + session, "announce_url", url ) - max_id = result.one() or 0 - stream_id = max_id + 1 + if url_id and url_id not in existing_ids: + session.add( + TorrentAnnounceLink( + torrent_id=stream_id, announce_id=url_id + ) + ) + existing_ids.add(url_id) + + await session.commit() + + except Exception as e: + logger.exception( + f"Error migrating relationships for stream {stream_id}: {str(e)}" + ) + await session.rollback() + raise + + async def _process_torrent_batch( + self, session: AsyncSession, batch: List[OldTorrentStreams] + ): + """Process a batch of torrent streams with optimized operations""" + try: + # Pre-fetch metadata types for the batch + meta_ids = {stream.meta_id for stream in batch} + stmt = select(BaseMetadata.id, BaseMetadata.type).where( + BaseMetadata.id.in_(meta_ids) + ) + result = await session.exec(stmt) + meta_types = {id_: type_ for id_, type_ in result} + + for old_stream in batch: + try: + if old_stream.meta_id not in meta_types: + continue + + # Transform and upsert torrent stream + stream_data = self._transform_torrent_stream(old_stream) + torrent_stream = await self._upsert_torrent_stream( + session, stream_data + ) + + # Handle episode files for series + if ( + meta_types[old_stream.meta_id] == MediaType.SERIES + and old_stream.season + ): + await self._migrate_episode_files( + session, old_stream, torrent_stream.id + ) + + # Migrate stream relationships + await self._migrate_torrent_relationships( + session, old_stream, torrent_stream.id + ) + + self.stats.successful += 1 + + except Exception as e: + logger.exception( + f"Error processing torrent stream {old_stream.id}: {str(e)}" + ) + self.stats.add_error( + "torrent_processing", f"{old_stream.id}: {str(e)}" + ) + self.stats.failed += 1 + await session.rollback() + + except Exception as e: + logger.exception(f"Batch processing failed: {str(e)}") + raise + + async def _migrate_episode_files( + self, session: AsyncSession, old_stream: OldTorrentStreams, stream_id: str + ): + """Migrate episode files with efficient batch processing and duplicate handling""" + try: + # First get all existing episode files for this torrent stream + existing_episodes = await session.exec( + select(EpisodeFile).where(EpisodeFile.torrent_stream_id == stream_id) + ) + existing_map = { + (ep.season_number, ep.episode_number): ep for ep in existing_episodes + } + + # Deduplicate episodes from source + deduplicated_episodes = {} + for episode in old_stream.season.episodes: + if episode.filename and not is_video_file(episode.filename): + continue + + key = (old_stream.season.season_number, episode.episode_number) + # If we already have this episode, keep the one with more complete information + if key in deduplicated_episodes: + existing = deduplicated_episodes[key] + if self._is_better_episode(episode, existing): + deduplicated_episodes[key] = episode + else: + deduplicated_episodes[key] = episode + + episode_files = [] + for key, episode in deduplicated_episodes.items(): + season_num, episode_num = key + + # Check if this episode already exists in the database + if key in existing_map: + # Update existing episode if needed + existing_ep = existing_map[key] + if ( + existing_ep.file_index != episode.file_index + or existing_ep.filename != episode.filename + or existing_ep.size != episode.size + ): + existing_ep.file_index = episode.file_index + existing_ep.filename = episode.filename + existing_ep.size = episode.size + continue + + # Create new episode file if it doesn't exist + episode_file = EpisodeFile( + torrent_stream_id=stream_id, + season_number=season_num, + episode_number=episode_num, + file_index=episode.file_index, + filename=episode.filename, + size=episode.size, + ) + episode_files.append(episode_file) + + # Batch insert new episode files + if episode_files: + session.add_all(episode_files) + await session.commit() + + except Exception as e: + logger.error( + f"Error migrating episode files for stream {stream_id}: {str(e)}" + ) + await session.rollback() + raise + + def _is_better_episode(self, new_ep, existing_ep) -> bool: + """ + Determine if the new episode data is better than the existing one + based on completeness of information + """ + + def score_episode(ep): + score = 0 + if ep.filename: + score += 1 + if ep.file_index is not None: + score += 1 + if ep.size: + score += 1 + return score + + new_score = score_episode(new_ep) + existing_score = score_episode(existing_ep) + + # If scores are equal, prefer the one with actual file information + if new_score == existing_score: + return bool(new_ep.filename and new_ep.size) + + return new_score > existing_score + + async def _transform_tv_stream(self, old_stream: OldTVStreams) -> dict: + """Transform TV stream""" return { - "id": stream_id, "meta_id": old_stream.meta_id, "name": old_stream.name, "url": old_stream.url, @@ -860,185 +1421,439 @@ async def transform_tv_stream(self, old_stream: OldTVStreams) -> dict: "test_failure_count": old_stream.test_failure_count, "drm_key_id": old_stream.drm_key_id, "drm_key": old_stream.drm_key, - "created_at": old_stream.created_at, - "updated_at": old_stream.updated_at, } - async def verify_migration(self) -> VerificationResult: - """Verify the migration by comparing document counts and sampling data""" + async def _upsert_tv_stream( + self, session: AsyncSession, stream_data: dict + ) -> TVStream: + """Upsert TV stream with constraint handling""" + stmt = select(TVStream).where( + TVStream.url == stream_data["url"], TVStream.ytId == stream_data["ytId"] + ) + result = await session.exec(stmt) + stream = result.one_or_none() + + if stream: + for key, value in stream_data.items(): + if key != "id": # Don't update ID for existing streams + setattr(stream, key, value) + else: + stream = TVStream(**stream_data) + session.add(stream) + + await session.commit() + await session.refresh(stream) + return stream + + async def _migrate_tv_stream_namespaces( + self, session: AsyncSession, old_stream: OldTVStreams, stream_id: int + ): + """Migrate TV stream namespaces with efficient querying""" + try: + existing = await session.exec( + select(TVStreamNamespaceLink.namespace_id).where( + TVStreamNamespaceLink.stream_id == stream_id + ) + ) + existing_ids = set(row for row in existing) + + namespaces = old_stream.namespaces or ["mediafusion"] + for namespace in namespaces: + namespace_id = await self.resource_tracker.get_resource_id( + session, "namespace", namespace + ) + if namespace_id and namespace_id not in existing_ids: + session.add( + TVStreamNamespaceLink( + stream_id=stream_id, namespace_id=namespace_id + ) + ) + + await session.commit() + + except Exception as e: + logger.exception( + f"Error migrating namespaces for stream {stream_id}: {str(e)}" + ) + await session.rollback() + raise + + async def _process_tv_stream_batch( + self, session: AsyncSession, batch: List[OldTVStreams] + ): + """Process a batch of TV streams with optimized operations""" + try: + for old_stream in batch: + try: + # Transform and upsert TV stream + stream_data = await self._transform_tv_stream(old_stream) + tv_stream = await self._upsert_tv_stream(session, stream_data) + + # Migrate namespace relationships + await self._migrate_tv_stream_namespaces( + session, old_stream, tv_stream.id + ) + + self.stats.successful += 1 + + except Exception as e: + logger.exception(f"Error processing TV stream: {str(e)}") + self.stats.add_error("tv_stream_processing", str(e)) + self.stats.failed += 1 + + except Exception as e: + logger.exception(f"Batch processing failed: {str(e)}") + raise + + +class MigrationVerifier: + """Enhanced verification system with detailed checking""" + + def __init__(self, migration: DatabaseMigration): + self.migration = migration + self.verification_results = {} + + async def verify_migration(self): + """Comprehensive migration verification""" logger.info("Starting migration verification...") - # Document count verification - await self.verify_counts() + try: + # Verify document counts + await self._verify_counts() - # Data sampling verification - await self.verify_samples() + # Verify data integrity + await self._verify_data_integrity() - # Relationship verification - await self.verify_relationships() + # Verify relationships + await self._verify_relationships() - # Log verification results - self.log_verification_results() + # Verify series-specific data + await self._verify_series_data() - return self.verification_result + # Log verification results + self._log_verification_results() - async def verify_counts(self): + except Exception as e: + logger.exception(f"Verification failed: {str(e)}") + raise + + async def _verify_counts(self): """Verify document counts between MongoDB and PostgreSQL""" - logger.info("Verifying document counts...") - - async with AsyncSession(self.pg_engine) as session: - # Metadata counts - mongo_movie_count = await OldMovieMetaData.count() - mongo_series_count = await OldSeriesMetaData.count() - mongo_tv_count = await OldTVMetaData.count() - - pg_movie_count = ( - await session.exec(select(func.count()).select_from(MovieMetadata)) - ).first() - pg_series_count = ( - await session.exec(select(func.count()).select_from(SeriesMetadata)) - ).first() - pg_tv_count = ( - await session.exec(select(func.count()).select_from(TVMetadata)) - ).first() - - # Stream counts - mongo_torrent_count = await OldTorrentStreams.count() - mongo_tv_streams_count = await OldTVStreams.count() - - pg_torrent_count = ( - await session.exec(select(func.count()).select_from(TorrentStream)) - ).first() - pg_tv_streams_count = ( - await session.exec(select(func.count()).select_from(TVStream)) - ).first() - - self.verification_result.counts.update( - { - "movies": (mongo_movie_count, pg_movie_count), - "series": (mongo_series_count, pg_series_count), - "tv": (mongo_tv_count, pg_tv_count), - "torrent_streams": (mongo_torrent_count, pg_torrent_count), - "tv_streams": (mongo_tv_streams_count, pg_tv_streams_count), + async with self.migration.get_session() as session: + collections = [ + (OldMovieMetaData, MovieMetadata, "movies"), + (OldSeriesMetaData, SeriesMetadata, "series"), + (OldTVMetaData, TVMetadata, "tv"), + (OldTorrentStreams, TorrentStream, "torrent_streams"), + (OldTVStreams, TVStream, "tv_streams"), + ] + + for old_model, new_model, name in collections: + mongo_count = await old_model.find().count() + pg_count = await session.scalar( + select(func.count()).select_from(new_model) + ) + + self.verification_results[f"{name}_count"] = { + "mongo": mongo_count, + "postgres": pg_count, + "matched": mongo_count == pg_count, } - ) - async def verify_samples(self, sample_size: int = 10): - """Verify data integrity by sampling records""" - logger.info("Verifying data samples...") + def _log_verification_results(self): + """Log detailed verification results""" + logger.info("\nVerification Results:") + logger.info("=" * 50) - collections = [ - (OldMovieMetaData, MovieMetadata, "movies"), - (OldSeriesMetaData, SeriesMetadata, "series"), - (OldTVMetaData, TVMetadata, "tv"), - ] + # Document counts + logger.info("\nDocument Counts:") + for category, data in self.verification_results.items(): + if category.endswith("_count"): + status = "✅" if data["matched"] else "❌" + logger.info( + f"{status} {category.replace('_count', '')}: " + f"MongoDB={data['mongo']}, PostgreSQL={data['postgres']}" + ) - for old_model, new_model, collection_name in collections: - failed_checks = [] - samples = await old_model.aggregate( - [{"$sample": {"size": sample_size}}], projection_model=old_model - ).to_list() + # Data integrity + if "data_integrity" in self.verification_results: + logger.info("\nData Integrity Checks:") + for check, result in self.verification_results["data_integrity"].items(): + status = "✅" if result["passed"] else "❌" + logger.info(f"{status} {check}: {result['details']}") + + # Relationship integrity + if "relationships" in self.verification_results: + logger.info("\nRelationship Checks:") + for rel, result in self.verification_results["relationships"].items(): + status = "✅" if result["valid"] else "❌" + logger.info(f"{status} {rel}: {result['details']}") + logger.info("\n".join(result["issues"][:5])) + + # Series-specific checks + if "series_data" in self.verification_results: + logger.info("\nSeries Data Checks:") + for check, result in self.verification_results["series_data"].items(): + status = "✅" if result["valid"] else "❌" + logger.info(f"{status} {check}: {result['details']}") + logger.info("\n".join(result["issues"][:5])) + logger.info("=" * 50) - async with AsyncSession(self.pg_engine) as session: - for sample in samples: - # Check base metadata - base_result = await session.exec( - select(BaseMetadata).where(BaseMetadata.id == sample.id) - ) - base_meta = base_result.first() + async def _verify_data_integrity(self): + """Verify data integrity with comprehensive checks""" + async with self.migration.get_session() as session: + self.verification_results["data_integrity"] = {} + + # Sample size for each type + sample_size = 10 + + # Verify movies + await self._verify_media_type( + session, + OldMovieMetaData, + MovieMetadata, + "movies", + sample_size, + self._verify_movie_data, + ) - # Check specific metadata - specific_result = await session.exec( - select(new_model).where(new_model.id == sample.id) - ) - specific_meta = specific_result.first() + # Verify TV + await self._verify_media_type( + session, + OldTVMetaData, + TVMetadata, + "tv", + sample_size, + self._verify_tv_data, + ) - if not all([base_meta, specific_meta]): - failed_checks.append(f"Missing metadata for {sample.id}") - continue + async def _verify_media_type( + self, + session: AsyncSession, + old_model, + new_model, + type_name: str, + sample_size: int, + verify_func, + ): + """Verify specific media type with sampling""" + samples = await old_model.aggregate( + [{"$sample": {"size": sample_size}}] + ).to_list() + + mismatches = [] + for sample in samples: + sample = old_model.model_validate(sample) + stmt = select(new_model).where(new_model.id == sample.id) + result = await session.exec(stmt) + new_record = result.one_or_none() - # Compare fields - if base_meta.title != sample.title or base_meta.year != sample.year: - failed_checks.append(f"Mismatch in base fields for {sample.id}") + if not new_record: + mismatches.append(f"Missing record: {sample.id}") + continue - # Compare type-specific fields - if ( - hasattr(sample, "imdb_rating") - and specific_meta.imdb_rating != sample.imdb_rating - ): - failed_checks.append(f"Mismatch in imdb_rating for {sample.id}") + # Verify specific fields + field_mismatches = await verify_func(session, sample, new_record) + if field_mismatches: + mismatches.extend(field_mismatches) - self.verification_result.sample_checks[collection_name] = failed_checks + self.verification_results["data_integrity"][type_name] = { + "passed": len(mismatches) == 0, + "details": f"Found {len(mismatches)} issues", + "mismatches": mismatches, + } - async def verify_relationships(self): - """Verify relationship integrity""" - logger.info("Verifying relationships...") + async def _verify_relationships(self): + """Verify relationship integrity across all models""" + async with self.migration.get_session() as session: + self.verification_results["relationships"] = {} - async with AsyncSession(self.pg_engine) as session: # Verify genre relationships - genre_issues = [] - genre_links = await session.exec(select(MediaGenreLink)) - for link in genre_links: - meta = ( - await session.exec( - select(BaseMetadata).where(BaseMetadata.id == link.media_id) + await self._verify_genre_relationships(session) + + # Verify catalog relationships + await self._verify_catalog_relationships(session) + + async def _verify_genre_relationships(self, session: AsyncSession): + """Verify genre relationships integrity""" + # Sample some genres + stmt = select(Genre).limit(5) + genres = (await session.exec(stmt)).all() + + issues = [] + for genre in genres: + # Check media links + stmt = select(MediaGenreLink).where(MediaGenreLink.genre_id == genre.id) + links = (await session.exec(stmt)).all() + + for link in links: + # Verify media exists + stmt = select(BaseMetadata).where(BaseMetadata.id == link.media_id) + if not (await session.exec(stmt)).one_or_none(): + issues.append( + f"Genre {genre.name} linked to non-existent media {link.media_id}" ) - ).first() - genre = ( - await session.exec(select(Genre).where(Genre.id == link.genre_id)) - ).first() - if not all([meta, genre]): - genre_issues.append( - f"Invalid genre link: {link.media_id}-{link.genre_id}" + + self.verification_results["relationships"]["genres"] = { + "valid": len(issues) == 0, + "details": f"Found {len(issues)} issues", + "issues": issues, + } + + async def _verify_catalog_relationships(self, session: AsyncSession): + """Verify catalog relationships integrity""" + # Sample some catalogs + stmt = select(Catalog).limit(5) + catalogs = (await session.exec(stmt)).all() + + issues = [] + for catalog in catalogs: + # Check media links + stmt = select(MediaCatalogLink).where( + MediaCatalogLink.catalog_id == catalog.id + ) + links = (await session.exec(stmt)).all() + + for link in links: + # Verify media exists + stmt = select(BaseMetadata).where(BaseMetadata.id == link.media_id) + if not (await session.exec(stmt)).one_or_none(): + issues.append( + f"Catalog {catalog.name} linked to non-existent media {link.media_id}" ) - # Verify torrent stream relationships - stream_issues = [] - torrent_streams = await session.exec(select(TorrentStream)) - for stream in torrent_streams: - meta = ( - await session.exec( - select(BaseMetadata).where(BaseMetadata.id == stream.meta_id) + self.verification_results["relationships"]["catalogs"] = { + "valid": len(issues) == 0, + "details": f"Found {len(issues)} issues", + "issues": issues, + } + + async def _verify_series_data(self): + """Verify series-specific data including seasons and episodes""" + async with self.migration.get_session() as session: + self.verification_results["series_data"] = {} + + # Sample some series + series_samples = await OldSeriesMetaData.aggregate( + [{"$sample": {"size": 5}}] + ).to_list() + + season_issues = [] + episode_issues = [] + episode_file_issues = [] + + for old_series in series_samples: + old_series = OldSeriesMetaData.model_validate(old_series) + # Get new series record + stmt = select(SeriesMetadata).where(SeriesMetadata.id == old_series.id) + new_series = (await session.exec(stmt)).one_or_none() + + if not new_series: + continue + + # Get old torrent streams for series + old_torrents = await OldTorrentStreams.find( + {"meta_id": old_series.id} + ).to_list() + + # Verify seasons and episodes + for old_torrent in old_torrents: + if not old_torrent.season: + continue + + # Check season exists + stmt = select(SeriesSeason).where( + SeriesSeason.series_id == new_series.id, + SeriesSeason.season_number == old_torrent.season.season_number, ) - ).first() - if not meta: - stream_issues.append(f"Invalid metadata reference: {stream.id}") + season = (await session.exec(stmt)).one_or_none() + + if not season: + season_issues.append( + f"Missing season {old_torrent.season.season_number} " + f"for series {old_series.id}" + ) + continue + + # Check episode files + for old_episode in old_torrent.season.episodes: + if old_episode.filename and not is_video_file( + old_episode.filename + ): + continue + + stmt = select(EpisodeFile).where( + EpisodeFile.torrent_stream_id == old_torrent.id, + EpisodeFile.season_number + == old_torrent.season.season_number, + EpisodeFile.episode_number == old_episode.episode_number, + ) + episode_file = (await session.exec(stmt)).one_or_none() + + if not episode_file: + episode_file_issues.append( + f"Missing episode file for S{season.season_number}" + f"E{old_episode.episode_number} in torrent {old_torrent.id}" + ) - self.verification_result.relationship_checks.update( - {"genres": genre_issues, "streams": stream_issues} + self.verification_results["series_data"]["seasons"] = { + "valid": len(season_issues) == 0, + "details": f"Found {len(season_issues)} season issues", + "issues": season_issues, + } + + self.verification_results["series_data"]["episode_files"] = { + "valid": len(episode_file_issues) == 0, + "details": f"Found {len(episode_file_issues)} episode file issues", + "issues": episode_file_issues, + } + + @staticmethod + async def _verify_movie_data( + session: AsyncSession, old_movie: OldMovieMetaData, new_movie: MovieMetadata + ) -> List[str]: + """Verify movie-specific fields""" + mismatches = [] + + if old_movie.imdb_rating != new_movie.imdb_rating: + mismatches.append( + f"IMDb rating mismatch for {old_movie.id}: " + f"{old_movie.imdb_rating} vs {new_movie.imdb_rating}" ) - def log_verification_results(self): - """Log verification results""" - logger.info("\nVerification Results:") - logger.info("=" * 50) + if ( + getattr(old_movie, "parent_guide_nudity_status", None) + != new_movie.parent_guide_nudity_status + ): + mismatches.append(f"Nudity status mismatch for {old_movie.id}") - # Log count comparisons - logger.info("\nDocument Counts:") - for category, ( - mongo_count, - pg_count, - ) in self.verification_result.counts.items(): - status = "✅" if mongo_count == pg_count else "❌" - logger.info( - f"{status} {category}: MongoDB={mongo_count}, PostgreSQL={pg_count}" + return mismatches + + @staticmethod + async def _verify_tv_data( + session: AsyncSession, old_tv: OldTVMetaData, new_tv: TVMetadata + ) -> List[str]: + """Verify TV-specific fields""" + mismatches = [] + + if old_tv.country != new_tv.country: + mismatches.append( + f"Country mismatch for {old_tv.id}: " + f"{old_tv.country} vs {new_tv.country}" + ) + + if old_tv.tv_language != new_tv.tv_language: + mismatches.append( + f"Language mismatch for {old_tv.id}: " + f"{old_tv.tv_language} vs {new_tv.tv_language}" ) - # Log sample check results - logger.info("\nSample Checks:") - for category, issues in self.verification_result.sample_checks.items(): - status = "✅" if not issues else "❌" - logger.info(f"{status} {category}: {len(issues)} issues") - for issue in issues: - logger.info(f" - {issue}") + if old_tv.logo != new_tv.logo: + mismatches.append( + f"Logo mismatch for {old_tv.id}: " f"{old_tv.logo} vs {new_tv.logo}" + ) - # Log relationship check results - logger.info("\nRelationship Checks:") - for category, issues in self.verification_result.relationship_checks.items(): - status = "✅" if not issues else "❌" - logger.info(f"{status} {category}: {len(issues)} issues") - for issue in issues: - logger.info(f" - {issue}") + return mismatches @app.command() @@ -1049,39 +1864,56 @@ def migrate( skip_verification: bool = typer.Option( False, help="Skip verification after migration" ), + only_metadata: bool = typer.Option(False, help="Migrate only metadata"), + only_streams: bool = typer.Option(False, help="Migrate only streams"), ): - """ - Migrate data from MongoDB to PostgreSQL - """ + """Enhanced migration command with flexible options""" async def run_migration(): migration = DatabaseMigration(mongo_uri, postgres_uri, batch_size) try: + # Initialize connections and resources await migration.init_connections() - await migration.initialize_resources() + async with migration.get_session() as session: + await migration.resource_tracker.initialize_from_db(session) - # Migrate data - await migration.migrate_metadata() - await migration.migrate_torrent_streams() - await migration.migrate_tv_streams() + # Initialize migrators + metadata_migrator = MetadataMigrator(migration) + stream_migrator = StreamMigrator(migration) + series_migrator = SeriesDataMigrator(migration) + verifier = MigrationVerifier(migration) - # Verify migration - if not skip_verification: - verification_result = await migration.verify_migration() + # Execute migration based on options + if not only_streams: + logger.info("Migrating metadata...") + await metadata_migrator.migrate_metadata() - # Check for critical issues - if any( - len(issues) > 0 - for issues in verification_result.relationship_checks.values() - ): - logger.error("Critical issues found during verification!") - raise typer.Exit(code=1) + if not only_metadata: + logger.info("Migrating torrent streams...") + await stream_migrator.migrate_torrent_streams() + + logger.info("Migrating TV streams...") + await stream_migrator.migrate_tv_streams() + logger.info("Migrating series seasons and episodes...") + await series_migrator.migrate_series_metadata() + + # Fix null episode_ids + async with migration.get_session() as session: + await series_migrator.fix_null_episode_ids(session) + + # Verify migration if not skipped + if not skip_verification: + await verifier.verify_migration() + + # Log final statistics + migration.stats.log_summary() logger.info("Migration completed successfully!") + except Exception as e: - logger.error(f"Migration failed: {str(e)}") - logger.exception("Detailed error:") + logger.exception(f"Migration failed: {str(e)}") raise typer.Exit(code=1) + finally: await migration.close_connections() @@ -1094,40 +1926,29 @@ def verify( mongo_uri: str = typer.Option(..., help="MongoDB connection URI"), postgres_uri: str = typer.Option(..., help="PostgreSQL connection URI"), ): - """ - Verify migration between MongoDB and PostgreSQL - """ + """Verify migration data integrity and relationships""" async def run_verification(): migration = DatabaseMigration(mongo_uri, postgres_uri) try: + # Initialize connections and resources await migration.init_connections() - await migration.verify_migration() - finally: - await migration.close_connections() + async with migration.get_session() as session: + await migration.resource_tracker.initialize_from_db(session) - typer.echo("Starting verification...") - asyncio.run(run_verification()) + # Initialize verifier and run verification + verifier = MigrationVerifier(migration) + await verifier.verify_migration() + except Exception as e: + logger.exception(f"Verification failed: {str(e)}") + raise typer.Exit(code=1) -@app.command() -def reset( - postgres_uri: str = typer.Option(..., help="PostgreSQL connection URI"), -): - """ - Reset PostgreSQL database - """ - - async def run_reset(): - migration = DatabaseMigration("", postgres_uri) - try: - await migration.init_connections(connect_mongo=False) - await migration.reset_database() finally: await migration.close_connections() - typer.echo("Resetting database...") - asyncio.run(run_reset()) + typer.echo("Starting verification...") + asyncio.run(run_verification()) if __name__ == "__main__": diff --git a/scrapers/rpdb.py b/scrapers/rpdb.py index 1708c7ee..fa49ae78 100644 --- a/scrapers/rpdb.py +++ b/scrapers/rpdb.py @@ -6,6 +6,7 @@ from db import schemas from db.redis_database import REDIS_ASYNC_CLIENT +from db.enums import MediaType RPDB_SUPPORTED_SET = "rpdb_supported_ids" RPDB_UNSUPPORTED_HASH = "rpdb_unsupported_ids" @@ -78,9 +79,12 @@ async def update_rpdb_poster( async def update_rpdb_posters( - metas: schemas.Metas, user_data: schemas.UserData, catalog_type: str + metas: schemas.Metas, user_data: schemas.UserData, catalog_type: MediaType ) -> schemas.Metas: - if not user_data.rpdb_config or catalog_type not in ["movie", "series"]: + if not user_data.rpdb_config or catalog_type not in [ + MediaType.MOVIE, + MediaType.SERIES, + ]: return metas rpdb_poster_base = f"https://api.ratingposterdb.com/{user_data.rpdb_config.api_key}/imdb/poster-default/" diff --git a/utils/network.py b/utils/network.py index bf5bed3b..4800bcbd 100644 --- a/utils/network.py +++ b/utils/network.py @@ -10,7 +10,7 @@ from db.schemas import UserData from utils import crypto from utils.crypto import encrypt_data -from utils.runtime_const import PRIVATE_CIDR +from utils import runtime_const from db.redis_database import REDIS_ASYNC_CLIENT @@ -249,7 +249,7 @@ async def get_mediaflow_proxy_public_ip(mediaflow_config) -> str | None: return mediaflow_config.public_ip parsed_url = urlparse(mediaflow_config.proxy_url) - if PRIVATE_CIDR.match(parsed_url.netloc): + if runtime_const.PRIVATE_CIDR.match(parsed_url.netloc): # MediaFlow proxy URL is a private IP address return None @@ -296,7 +296,7 @@ async def get_user_public_ip( # Get the user's public IP address user_ip = get_client_ip(request) # check if the user's IP address is a private IP address - if PRIVATE_CIDR.match(user_ip): + if runtime_const.PRIVATE_CIDR.match(user_ip): # Use host public IP address. return None return user_ip @@ -306,17 +306,23 @@ def get_request_namespace(request: Request) -> str: """ Extract the namespace from the request URL. """ + if runtime_const.SERVER_NAMESPACE: + return runtime_const.SERVER_NAMESPACE + host = request.url.hostname if "elfhosted.com" not in host: + runtime_const.SERVER_NAMESPACE = "mediafusion" return "mediafusion" subdomain = host.split(".")[0] parts = subdomain.rsplit("-mediafusion") if len(parts) == 1: # public namespace + runtime_const.SERVER_NAMESPACE = "mediafusion" return "mediafusion" namespace = f"tenant-{parts[0]}" + runtime_const.SERVER_NAMESPACE = namespace return namespace diff --git a/utils/runtime_const.py b/utils/runtime_const.py index 2905fd59..33f472de 100644 --- a/utils/runtime_const.py +++ b/utils/runtime_const.py @@ -46,3 +46,5 @@ ZILEAN_SEARCH_TTL = int( timedelta(hours=settings.zilean_search_interval_hour).total_seconds() ) + +SERVER_NAMESPACE = None From da7814aaf8f835e4561ffa88eb001287d3b1d360 Mon Sep 17 00:00:00 2001 From: mhdzumair Date: Sat, 30 Nov 2024 22:41:27 +0530 Subject: [PATCH 5/7] Added alembic migrations --- alembic.ini | 115 ++++ migrations/README | 1 + migrations/env.py | 94 +++ migrations/script.py.mako | 26 + ...9e203ecaf_setting_up_mediafusion_tables.py | 534 ++++++++++++++++++ ...6e3631b327_restructure_seasons_episodes.py | 180 ++++++ .../bf75239e668e_add_title_search_index.py | 42 ++ ..._cleanup_catalog_add_stream_added_date_.py | 87 +++ 8 files changed, 1079 insertions(+) create mode 100644 alembic.ini create mode 100644 migrations/README create mode 100644 migrations/env.py create mode 100644 migrations/script.py.mako create mode 100644 migrations/versions/4829e203ecaf_setting_up_mediafusion_tables.py create mode 100644 migrations/versions/7f6e3631b327_restructure_seasons_episodes.py create mode 100644 migrations/versions/bf75239e668e_add_title_search_index.py create mode 100644 migrations/versions/c63392160ce7_cleanup_catalog_add_stream_added_date_.py diff --git a/alembic.ini b/alembic.ini new file mode 100644 index 00000000..7c041832 --- /dev/null +++ b/alembic.ini @@ -0,0 +1,115 @@ +# A generic, single database configuration. + +[alembic] +# path to migration scripts. +# Use forward slashes (/) also on windows to provide an os agnostic path +script_location = migrations + +# template used to generate migration file names; The default value is %%(rev)s_%%(slug)s +# Uncomment the line below if you want the files to be prepended with date and time +# file_template = %%(year)d_%%(month).2d_%%(day).2d_%%(hour).2d%%(minute).2d-%%(rev)s_%%(slug)s + +# sys.path path, will be prepended to sys.path if present. +# defaults to the current working directory. +prepend_sys_path = . + +# timezone to use when rendering the date within the migration file +# as well as the filename. +# If specified, requires the python>=3.9 or backports.zoneinfo library. +# Any required deps can installed by adding `alembic[tz]` to the pip requirements +# string value is passed to ZoneInfo() +# leave blank for localtime +# timezone = + +# max length of characters to apply to the "slug" field +# truncate_slug_length = 40 + +# set to 'true' to run the environment during +# the 'revision' command, regardless of autogenerate +# revision_environment = false + +# set to 'true' to allow .pyc and .pyo files without +# a source .py file to be detected as revisions in the +# versions/ directory +# sourceless = false + +# version location specification; This defaults +# to migration/versions. When using multiple version +# directories, initial revisions must be specified with --version-path. +# The path separator used here should be the separator specified by "version_path_separator" below. +# version_locations = %(here)s/bar:%(here)s/bat:migration/versions + +# version path separator; As mentioned above, this is the character used to split +# version_locations. The default within new alembic.ini files is "os", which uses os.pathsep. +# If this key is omitted entirely, it falls back to the legacy behavior of splitting on spaces and/or commas. +# Valid values for version_path_separator are: +# +# version_path_separator = : +# version_path_separator = ; +# version_path_separator = space +# version_path_separator = newline +version_path_separator = os # Use os.pathsep. Default configuration used for new projects. + +# set to 'true' to search source files recursively +# in each "version_locations" directory +# new in Alembic version 1.10 +# recursive_version_locations = false + +# the output encoding used when revision files +# are written from script.py.mako +# output_encoding = utf-8 + +sqlalchemy.url = driver://user:pass@localhost/dbname + + +[post_write_hooks] +# post_write_hooks defines scripts or Python functions that are run +# on newly generated revision scripts. See the documentation for further +# detail and examples + +# format using "black" - use the console_scripts runner, against the "black" entrypoint +# hooks = black +# black.type = console_scripts +# black.entrypoint = black +# black.options = -l 120 REVISION_SCRIPT_FILENAME + +# lint with attempts to fix using "ruff" - use the exec runner, execute a binary +# hooks = ruff +# ruff.type = exec +# ruff.executable = %(here)s/.venv/bin/ruff +# ruff.options = --fix REVISION_SCRIPT_FILENAME + +# Logging configuration +[loggers] +keys = root,sqlalchemy,alembic + +[handlers] +keys = console + +[formatters] +keys = generic + +[logger_root] +level = WARNING +handlers = console +qualname = + +[logger_sqlalchemy] +level = WARNING +handlers = +qualname = sqlalchemy.engine + +[logger_alembic] +level = INFO +handlers = +qualname = alembic + +[handler_console] +class = StreamHandler +args = (sys.stderr,) +level = NOTSET +formatter = generic + +[formatter_generic] +format = %(levelname)-5.5s [%(name)s] %(message)s +datefmt = %H:%M:%S diff --git a/migrations/README b/migrations/README new file mode 100644 index 00000000..e0d0858f --- /dev/null +++ b/migrations/README @@ -0,0 +1 @@ +Generic single-database configuration with an async dbapi. \ No newline at end of file diff --git a/migrations/env.py b/migrations/env.py new file mode 100644 index 00000000..0f75fcd5 --- /dev/null +++ b/migrations/env.py @@ -0,0 +1,94 @@ +import asyncio +from logging.config import fileConfig + +from sqlalchemy import pool +from sqlalchemy.engine import Connection +from sqlalchemy.ext.asyncio import async_engine_from_config + +from alembic import context + +from db.config import settings +from db.sql_models import * +from sqlmodel import SQLModel + +# this is the Alembic Config object, which provides +# access to the values within the .ini file in use. +config = context.config + +# Interpret the config file for Python logging. +# This line sets up loggers basically. +if config.config_file_name is not None: + fileConfig(config.config_file_name) + +# add your model's MetaData object here +# for 'autogenerate' support +# from myapp import mymodel +# target_metadata = mymodel.Base.metadata +config.set_main_option("sqlalchemy.url", settings.postgres_uri) +target_metadata = SQLModel.metadata + +# other values from the config, defined by the needs of env.py, +# can be acquired: +# my_important_option = config.get_main_option("my_important_option") +# ... etc. + + +def run_migrations_offline() -> None: + """Run migrations in 'offline' mode. + + This configures the context with just a URL + and not an Engine, though an Engine is acceptable + here as well. By skipping the Engine creation + we don't even need a DBAPI to be available. + + Calls to context.execute() here emit the given string to the + script output. + + """ + url = config.get_main_option("sqlalchemy.url") + context.configure( + url=url, + target_metadata=target_metadata, + literal_binds=True, + dialect_opts={"paramstyle": "named"}, + ) + + with context.begin_transaction(): + context.run_migrations() + + +def do_run_migrations(connection: Connection) -> None: + context.configure(connection=connection, target_metadata=target_metadata) + + with context.begin_transaction(): + context.run_migrations() + + +async def run_async_migrations() -> None: + """In this scenario we need to create an Engine + and associate a connection with the context. + + """ + + connectable = async_engine_from_config( + config.get_section(config.config_ini_section, {}), + prefix="sqlalchemy.", + poolclass=pool.NullPool, + ) + + async with connectable.connect() as connection: + await connection.run_sync(do_run_migrations) + + await connectable.dispose() + + +def run_migrations_online() -> None: + """Run migrations in 'online' mode.""" + + asyncio.run(run_async_migrations()) + + +if context.is_offline_mode(): + run_migrations_offline() +else: + run_migrations_online() diff --git a/migrations/script.py.mako b/migrations/script.py.mako new file mode 100644 index 00000000..fbc4b07d --- /dev/null +++ b/migrations/script.py.mako @@ -0,0 +1,26 @@ +"""${message} + +Revision ID: ${up_revision} +Revises: ${down_revision | comma,n} +Create Date: ${create_date} + +""" +from typing import Sequence, Union + +from alembic import op +import sqlalchemy as sa +${imports if imports else ""} + +# revision identifiers, used by Alembic. +revision: str = ${repr(up_revision)} +down_revision: Union[str, None] = ${repr(down_revision)} +branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)} +depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)} + + +def upgrade() -> None: + ${upgrades if upgrades else "pass"} + + +def downgrade() -> None: + ${downgrades if downgrades else "pass"} diff --git a/migrations/versions/4829e203ecaf_setting_up_mediafusion_tables.py b/migrations/versions/4829e203ecaf_setting_up_mediafusion_tables.py new file mode 100644 index 00000000..be6346e1 --- /dev/null +++ b/migrations/versions/4829e203ecaf_setting_up_mediafusion_tables.py @@ -0,0 +1,534 @@ +"""Setting up MediaFusion Tables + +Revision ID: 4829e203ecaf +Revises: +Create Date: 2024-11-22 15:21:37.757408 + +""" + +from typing import Sequence, Union + +from alembic import op +import sqlalchemy as sa +import sqlmodel.sql.sqltypes + + +# revision identifiers, used by Alembic. +revision: str = "4829e203ecaf" +down_revision: Union[str, None] = None +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + conn = op.get_bind() + conn.execute(sa.text("CREATE EXTENSION IF NOT EXISTS pg_trgm;")) + conn.execute(sa.text("CREATE EXTENSION IF NOT EXISTS btree_gin;")) + + # ### commands auto generated by Alembic - please adjust! ### + op.create_table( + "announce_url", + sa.Column("id", sa.Integer(), nullable=False), + sa.Column("name", sqlmodel.sql.sqltypes.AutoString(), nullable=False), + sa.PrimaryKeyConstraint("id"), + sa.UniqueConstraint("name"), + ) + op.create_table( + "base_metadata", + sa.Column("created_at", sa.DateTime(timezone=True), nullable=False), + sa.Column("updated_at", sa.DateTime(timezone=True), nullable=True), + sa.Column("id", sqlmodel.sql.sqltypes.AutoString(), nullable=False), + sa.Column( + "type", + sa.Enum("MOVIE", "SERIES", "TV", "EVENTS", name="mediatype"), + nullable=False, + ), + sa.Column("title", sqlmodel.sql.sqltypes.AutoString(), nullable=False), + sa.Column("year", sa.Integer(), nullable=True), + sa.Column("poster", sqlmodel.sql.sqltypes.AutoString(), nullable=True), + sa.Column("is_poster_working", sa.Boolean(), nullable=False), + sa.Column("is_add_title_to_poster", sa.Boolean(), nullable=False), + sa.Column("background", sqlmodel.sql.sqltypes.AutoString(), nullable=True), + sa.Column("description", sqlmodel.sql.sqltypes.AutoString(), nullable=True), + sa.Column("runtime", sqlmodel.sql.sqltypes.AutoString(), nullable=True), + sa.Column("website", sqlmodel.sql.sqltypes.AutoString(), nullable=True), + sa.PrimaryKeyConstraint("id"), + sa.UniqueConstraint("title", "year"), + ) + op.create_index( + "idx_base_meta_type_title", "base_metadata", ["type", "title"], unique=False + ) + op.create_index( + "idx_base_title_search", + "base_metadata", + ["title"], + unique=False, + postgresql_using="gin", + postgresql_ops={"title": "gin_trgm_ops"}, + ) + op.create_index( + op.f("ix_base_metadata_type"), "base_metadata", ["type"], unique=False + ) + op.create_index( + op.f("ix_base_metadata_updated_at"), + "base_metadata", + ["updated_at"], + unique=False, + ) + op.create_table( + "catalog", + sa.Column("id", sa.Integer(), nullable=False), + sa.Column("name", sqlmodel.sql.sqltypes.AutoString(), nullable=False), + sa.PrimaryKeyConstraint("id"), + sa.UniqueConstraint("name"), + ) + op.create_table( + "genre", + sa.Column("id", sa.Integer(), nullable=False), + sa.Column("name", sqlmodel.sql.sqltypes.AutoString(), nullable=False), + sa.PrimaryKeyConstraint("id"), + ) + op.create_index(op.f("ix_genre_name"), "genre", ["name"], unique=True) + op.create_table( + "language", + sa.Column("id", sa.Integer(), nullable=False), + sa.Column("name", sqlmodel.sql.sqltypes.AutoString(), nullable=False), + sa.PrimaryKeyConstraint("id"), + sa.UniqueConstraint("name"), + ) + op.create_table( + "namespace", + sa.Column("id", sa.Integer(), nullable=False), + sa.Column("name", sqlmodel.sql.sqltypes.AutoString(), nullable=False), + sa.PrimaryKeyConstraint("id"), + sa.UniqueConstraint("name"), + ) + op.create_table( + "parental_certificate", + sa.Column("id", sa.Integer(), nullable=False), + sa.Column("name", sqlmodel.sql.sqltypes.AutoString(), nullable=False), + sa.PrimaryKeyConstraint("id"), + ) + op.create_index( + op.f("ix_parental_certificate_name"), + "parental_certificate", + ["name"], + unique=True, + ) + op.create_table( + "star", + sa.Column("id", sa.Integer(), nullable=False), + sa.Column("name", sqlmodel.sql.sqltypes.AutoString(), nullable=False), + sa.PrimaryKeyConstraint("id"), + ) + op.create_index(op.f("ix_star_name"), "star", ["name"], unique=False) + op.create_table( + "aka_title", + sa.Column("id", sa.Integer(), nullable=False), + sa.Column("title", sqlmodel.sql.sqltypes.AutoString(), nullable=False), + sa.Column("media_id", sqlmodel.sql.sqltypes.AutoString(), nullable=False), + sa.ForeignKeyConstraint(["media_id"], ["base_metadata.id"], ondelete="CASCADE"), + sa.PrimaryKeyConstraint("id"), + ) + op.create_index( + op.f("ix_aka_title_media_id"), "aka_title", ["media_id"], unique=False + ) + op.create_index(op.f("ix_aka_title_title"), "aka_title", ["title"], unique=False) + op.create_table( + "media_catalog_link", + sa.Column("media_id", sqlmodel.sql.sqltypes.AutoString(), nullable=False), + sa.Column("catalog_id", sa.Integer(), nullable=False), + sa.Column("priority", sa.Integer(), nullable=False), + sa.ForeignKeyConstraint(["catalog_id"], ["catalog.id"], ondelete="CASCADE"), + sa.ForeignKeyConstraint(["media_id"], ["base_metadata.id"], ondelete="CASCADE"), + sa.PrimaryKeyConstraint("media_id", "catalog_id"), + postgresql_partition_by="LIST (catalog_id)", + ) + op.create_index( + op.f("ix_media_catalog_link_priority"), + "media_catalog_link", + ["priority"], + unique=False, + ) + op.create_table( + "media_genre_link", + sa.Column("media_id", sqlmodel.sql.sqltypes.AutoString(), nullable=False), + sa.Column("genre_id", sa.Integer(), nullable=False), + sa.ForeignKeyConstraint(["genre_id"], ["genre.id"], ondelete="CASCADE"), + sa.ForeignKeyConstraint(["media_id"], ["base_metadata.id"], ondelete="CASCADE"), + sa.PrimaryKeyConstraint("media_id", "genre_id"), + ) + op.create_table( + "media_parental_certificate_link", + sa.Column("media_id", sqlmodel.sql.sqltypes.AutoString(), nullable=False), + sa.Column("certificate_id", sa.Integer(), nullable=False), + sa.ForeignKeyConstraint( + ["certificate_id"], ["parental_certificate.id"], ondelete="CASCADE" + ), + sa.ForeignKeyConstraint(["media_id"], ["base_metadata.id"], ondelete="CASCADE"), + sa.PrimaryKeyConstraint("media_id", "certificate_id"), + ) + op.create_table( + "media_star_link", + sa.Column("media_id", sqlmodel.sql.sqltypes.AutoString(), nullable=False), + sa.Column("star_id", sa.Integer(), nullable=False), + sa.ForeignKeyConstraint(["media_id"], ["base_metadata.id"], ondelete="CASCADE"), + sa.ForeignKeyConstraint(["star_id"], ["star.id"], ondelete="CASCADE"), + sa.PrimaryKeyConstraint("media_id", "star_id"), + ) + op.create_table( + "movie_metadata", + sa.Column("created_at", sa.DateTime(timezone=True), nullable=False), + sa.Column("updated_at", sa.DateTime(timezone=True), nullable=True), + sa.Column("id", sqlmodel.sql.sqltypes.AutoString(), nullable=False), + sa.Column("imdb_rating", sa.Float(), nullable=True), + sa.Column( + "parent_guide_nudity_status", + sa.Enum( + "NONE", "MILD", "MODERATE", "SEVERE", "UNKNOWN", name="nuditystatus" + ), + nullable=False, + ), + sa.ForeignKeyConstraint(["id"], ["base_metadata.id"], ondelete="CASCADE"), + sa.PrimaryKeyConstraint("id"), + ) + op.create_index( + op.f("ix_movie_metadata_imdb_rating"), + "movie_metadata", + ["imdb_rating"], + unique=False, + ) + op.create_index( + op.f("ix_movie_metadata_parent_guide_nudity_status"), + "movie_metadata", + ["parent_guide_nudity_status"], + unique=False, + ) + op.create_index( + op.f("ix_movie_metadata_updated_at"), + "movie_metadata", + ["updated_at"], + unique=False, + ) + op.create_table( + "series_metadata", + sa.Column("created_at", sa.DateTime(timezone=True), nullable=False), + sa.Column("updated_at", sa.DateTime(timezone=True), nullable=True), + sa.Column("id", sqlmodel.sql.sqltypes.AutoString(), nullable=False), + sa.Column("end_year", sa.Integer(), nullable=True), + sa.Column("imdb_rating", sa.Float(), nullable=True), + sa.Column( + "parent_guide_nudity_status", + sa.Enum( + "NONE", "MILD", "MODERATE", "SEVERE", "UNKNOWN", name="nuditystatus" + ), + nullable=False, + ), + sa.ForeignKeyConstraint(["id"], ["base_metadata.id"], ondelete="CASCADE"), + sa.PrimaryKeyConstraint("id"), + ) + op.create_index( + op.f("ix_series_metadata_end_year"), + "series_metadata", + ["end_year"], + unique=False, + ) + op.create_index( + op.f("ix_series_metadata_imdb_rating"), + "series_metadata", + ["imdb_rating"], + unique=False, + ) + op.create_index( + op.f("ix_series_metadata_parent_guide_nudity_status"), + "series_metadata", + ["parent_guide_nudity_status"], + unique=False, + ) + op.create_index( + op.f("ix_series_metadata_updated_at"), + "series_metadata", + ["updated_at"], + unique=False, + ) + op.create_table( + "torrent_stream", + sa.Column("created_at", sa.DateTime(timezone=True), nullable=False), + sa.Column("updated_at", sa.DateTime(timezone=True), nullable=True), + sa.Column("id", sqlmodel.sql.sqltypes.AutoString(), nullable=False), + sa.Column("meta_id", sqlmodel.sql.sqltypes.AutoString(), nullable=False), + sa.Column("torrent_name", sqlmodel.sql.sqltypes.AutoString(), nullable=False), + sa.Column("size", sa.BigInteger(), nullable=False), + sa.Column("filename", sqlmodel.sql.sqltypes.AutoString(), nullable=True), + sa.Column("file_index", sa.Integer(), nullable=True), + sa.Column("source", sqlmodel.sql.sqltypes.AutoString(), nullable=False), + sa.Column("resolution", sqlmodel.sql.sqltypes.AutoString(), nullable=True), + sa.Column("codec", sqlmodel.sql.sqltypes.AutoString(), nullable=True), + sa.Column("quality", sqlmodel.sql.sqltypes.AutoString(), nullable=True), + sa.Column("audio", sqlmodel.sql.sqltypes.AutoString(), nullable=True), + sa.Column("seeders", sa.Integer(), nullable=True), + sa.Column("is_blocked", sa.Boolean(), nullable=False), + sa.Column( + "indexer_flag", + sa.Enum("FREELEACH", "SEMI_PRIVATE", "PRIVATE", name="indexertype"), + nullable=False, + ), + sa.ForeignKeyConstraint(["meta_id"], ["base_metadata.id"], ondelete="CASCADE"), + sa.PrimaryKeyConstraint("id"), + ) + op.create_index( + "idx_torrent_meta_created", + "torrent_stream", + ["meta_id", "created_at"], + unique=False, + postgresql_where="NOT is_blocked", + ) + op.create_index( + "idx_torrent_meta_source", "torrent_stream", ["meta_id", "source"], unique=False + ) + op.create_index( + "idx_torrent_stream_meta_blocked", + "torrent_stream", + ["meta_id"], + unique=False, + postgresql_where="NOT is_blocked", + ) + op.create_index( + op.f("ix_torrent_stream_is_blocked"), + "torrent_stream", + ["is_blocked"], + unique=False, + ) + op.create_index( + op.f("ix_torrent_stream_meta_id"), "torrent_stream", ["meta_id"], unique=False + ) + op.create_index( + op.f("ix_torrent_stream_source"), "torrent_stream", ["source"], unique=False + ) + op.create_index( + op.f("ix_torrent_stream_updated_at"), + "torrent_stream", + ["updated_at"], + unique=False, + ) + op.create_table( + "tv_metadata", + sa.Column("created_at", sa.DateTime(timezone=True), nullable=False), + sa.Column("updated_at", sa.DateTime(timezone=True), nullable=True), + sa.Column("id", sqlmodel.sql.sqltypes.AutoString(), nullable=False), + sa.Column("country", sqlmodel.sql.sqltypes.AutoString(), nullable=True), + sa.Column("tv_language", sqlmodel.sql.sqltypes.AutoString(), nullable=True), + sa.Column("logo", sqlmodel.sql.sqltypes.AutoString(), nullable=True), + sa.ForeignKeyConstraint(["id"], ["base_metadata.id"], ondelete="CASCADE"), + sa.PrimaryKeyConstraint("id"), + ) + op.create_index( + op.f("ix_tv_metadata_country"), "tv_metadata", ["country"], unique=False + ) + op.create_index( + op.f("ix_tv_metadata_tv_language"), "tv_metadata", ["tv_language"], unique=False + ) + op.create_index( + op.f("ix_tv_metadata_updated_at"), "tv_metadata", ["updated_at"], unique=False + ) + op.create_table( + "tv_stream", + sa.Column("created_at", sa.DateTime(timezone=True), nullable=False), + sa.Column("updated_at", sa.DateTime(timezone=True), nullable=True), + sa.Column("id", sa.Integer(), nullable=False), + sa.Column("meta_id", sqlmodel.sql.sqltypes.AutoString(), nullable=False), + sa.Column("name", sqlmodel.sql.sqltypes.AutoString(), nullable=False), + sa.Column("url", sqlmodel.sql.sqltypes.AutoString(), nullable=True), + sa.Column("ytId", sqlmodel.sql.sqltypes.AutoString(), nullable=True), + sa.Column("externalUrl", sqlmodel.sql.sqltypes.AutoString(), nullable=True), + sa.Column("source", sqlmodel.sql.sqltypes.AutoString(), nullable=False), + sa.Column("country", sqlmodel.sql.sqltypes.AutoString(), nullable=True), + sa.Column("is_working", sa.Boolean(), nullable=False), + sa.Column("test_failure_count", sa.Integer(), nullable=False), + sa.Column("drm_key_id", sqlmodel.sql.sqltypes.AutoString(), nullable=True), + sa.Column("drm_key", sqlmodel.sql.sqltypes.AutoString(), nullable=True), + sa.Column("behaviorHints", sa.JSON(), nullable=True), + sa.ForeignKeyConstraint(["meta_id"], ["base_metadata.id"], ondelete="CASCADE"), + sa.PrimaryKeyConstraint("id"), + sa.UniqueConstraint("url", "ytId"), + ) + op.create_index( + "idx_tv_stream_meta_working", + "tv_stream", + ["meta_id", "is_working"], + unique=False, + ) + op.create_index( + op.f("ix_tv_stream_country"), "tv_stream", ["country"], unique=False + ) + op.create_index( + op.f("ix_tv_stream_is_working"), "tv_stream", ["is_working"], unique=False + ) + op.create_index( + op.f("ix_tv_stream_meta_id"), "tv_stream", ["meta_id"], unique=False + ) + op.create_index(op.f("ix_tv_stream_source"), "tv_stream", ["source"], unique=False) + op.create_index( + op.f("ix_tv_stream_updated_at"), "tv_stream", ["updated_at"], unique=False + ) + op.create_table( + "season", + sa.Column("id", sa.Integer(), nullable=False), + sa.Column( + "torrent_stream_id", sqlmodel.sql.sqltypes.AutoString(), nullable=False + ), + sa.Column("season_number", sa.Integer(), nullable=False), + sa.ForeignKeyConstraint( + ["torrent_stream_id"], ["torrent_stream.id"], ondelete="CASCADE" + ), + sa.PrimaryKeyConstraint("id"), + ) + op.create_index( + "idx_season_torrent_number", + "season", + ["torrent_stream_id", "season_number"], + unique=False, + ) + op.create_table( + "torrent_announce_link", + sa.Column("torrent_id", sqlmodel.sql.sqltypes.AutoString(), nullable=False), + sa.Column("announce_id", sa.Integer(), nullable=False), + sa.ForeignKeyConstraint( + ["announce_id"], ["announce_url.id"], ondelete="CASCADE" + ), + sa.ForeignKeyConstraint( + ["torrent_id"], ["torrent_stream.id"], ondelete="CASCADE" + ), + sa.PrimaryKeyConstraint("torrent_id", "announce_id"), + ) + op.create_table( + "torrent_language_link", + sa.Column("torrent_id", sqlmodel.sql.sqltypes.AutoString(), nullable=False), + sa.Column("language_id", sa.Integer(), nullable=False), + sa.ForeignKeyConstraint(["language_id"], ["language.id"], ondelete="CASCADE"), + sa.ForeignKeyConstraint( + ["torrent_id"], ["torrent_stream.id"], ondelete="CASCADE" + ), + sa.PrimaryKeyConstraint("torrent_id", "language_id"), + ) + op.create_table( + "tv_stream_namespace_link", + sa.Column("stream_id", sa.Integer(), nullable=False), + sa.Column("namespace_id", sa.Integer(), nullable=False), + sa.ForeignKeyConstraint(["namespace_id"], ["namespace.id"], ondelete="CASCADE"), + sa.ForeignKeyConstraint(["stream_id"], ["tv_stream.id"], ondelete="CASCADE"), + sa.PrimaryKeyConstraint("stream_id", "namespace_id"), + ) + op.create_table( + "episode", + sa.Column("id", sa.Integer(), nullable=False), + sa.Column("season_id", sa.Integer(), nullable=False), + sa.Column("episode_number", sa.Integer(), nullable=False), + sa.Column("filename", sqlmodel.sql.sqltypes.AutoString(), nullable=True), + sa.Column("size", sa.BigInteger(), nullable=True), + sa.Column("file_index", sa.Integer(), nullable=True), + sa.Column("title", sqlmodel.sql.sqltypes.AutoString(), nullable=True), + sa.Column("released", sa.DateTime(timezone=True), nullable=True), + sa.ForeignKeyConstraint(["season_id"], ["season.id"], ondelete="CASCADE"), + sa.PrimaryKeyConstraint("id"), + sa.UniqueConstraint("season_id", "episode_number"), + ) + op.create_index( + op.f("ix_episode_episode_number"), "episode", ["episode_number"], unique=False + ) + # ### end Alembic commands ### + + +def downgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + op.drop_index(op.f("ix_episode_episode_number"), table_name="episode") + op.drop_table("episode") + op.drop_table("tv_stream_namespace_link") + op.drop_table("torrent_language_link") + op.drop_table("torrent_announce_link") + op.drop_index("idx_season_torrent_number", table_name="season") + op.drop_table("season") + op.drop_index(op.f("ix_tv_stream_updated_at"), table_name="tv_stream") + op.drop_index(op.f("ix_tv_stream_source"), table_name="tv_stream") + op.drop_index(op.f("ix_tv_stream_meta_id"), table_name="tv_stream") + op.drop_index(op.f("ix_tv_stream_is_working"), table_name="tv_stream") + op.drop_index(op.f("ix_tv_stream_country"), table_name="tv_stream") + op.drop_index("idx_tv_stream_meta_working", table_name="tv_stream") + op.drop_table("tv_stream") + op.drop_index(op.f("ix_tv_metadata_updated_at"), table_name="tv_metadata") + op.drop_index(op.f("ix_tv_metadata_tv_language"), table_name="tv_metadata") + op.drop_index(op.f("ix_tv_metadata_country"), table_name="tv_metadata") + op.drop_table("tv_metadata") + op.drop_index(op.f("ix_torrent_stream_updated_at"), table_name="torrent_stream") + op.drop_index(op.f("ix_torrent_stream_source"), table_name="torrent_stream") + op.drop_index(op.f("ix_torrent_stream_meta_id"), table_name="torrent_stream") + op.drop_index(op.f("ix_torrent_stream_is_blocked"), table_name="torrent_stream") + op.drop_index( + "idx_torrent_stream_meta_blocked", + table_name="torrent_stream", + postgresql_where="NOT is_blocked", + ) + op.drop_index("idx_torrent_meta_source", table_name="torrent_stream") + op.drop_index( + "idx_torrent_meta_created", + table_name="torrent_stream", + postgresql_where="NOT is_blocked", + ) + op.drop_table("torrent_stream") + op.drop_index(op.f("ix_series_metadata_updated_at"), table_name="series_metadata") + op.drop_index( + op.f("ix_series_metadata_parent_guide_nudity_status"), + table_name="series_metadata", + ) + op.drop_index(op.f("ix_series_metadata_imdb_rating"), table_name="series_metadata") + op.drop_index(op.f("ix_series_metadata_end_year"), table_name="series_metadata") + op.drop_table("series_metadata") + op.drop_index(op.f("ix_movie_metadata_updated_at"), table_name="movie_metadata") + op.drop_index( + op.f("ix_movie_metadata_parent_guide_nudity_status"), + table_name="movie_metadata", + ) + op.drop_index(op.f("ix_movie_metadata_imdb_rating"), table_name="movie_metadata") + op.drop_table("movie_metadata") + op.drop_table("media_star_link") + op.drop_table("media_parental_certificate_link") + op.drop_table("media_genre_link") + op.drop_index( + op.f("ix_media_catalog_link_priority"), table_name="media_catalog_link" + ) + op.drop_table("media_catalog_link") + op.drop_index(op.f("ix_aka_title_title"), table_name="aka_title") + op.drop_index(op.f("ix_aka_title_media_id"), table_name="aka_title") + op.drop_table("aka_title") + op.drop_index(op.f("ix_star_name"), table_name="star") + op.drop_table("star") + op.drop_index( + op.f("ix_parental_certificate_name"), table_name="parental_certificate" + ) + op.drop_table("parental_certificate") + op.drop_table("namespace") + op.drop_table("language") + op.drop_index(op.f("ix_genre_name"), table_name="genre") + op.drop_table("genre") + op.drop_table("catalog") + op.drop_index(op.f("ix_base_metadata_updated_at"), table_name="base_metadata") + op.drop_index(op.f("ix_base_metadata_type"), table_name="base_metadata") + op.drop_index( + "idx_base_title_search", + table_name="base_metadata", + postgresql_using="gin", + postgresql_ops={"title": "gin_trgm_ops"}, + ) + op.drop_index("idx_base_meta_type_title", table_name="base_metadata") + op.drop_table("base_metadata") + op.drop_table("announce_url") + + # drop enums + op.execute("DROP TYPE IF EXISTS mediatype;") + op.execute("DROP TYPE IF EXISTS nuditystatus;") + op.execute("DROP TYPE IF EXISTS indexertype;") + + # drop extensions + op.execute("DROP EXTENSION IF EXISTS pg_trgm;") + op.execute("DROP EXTENSION IF EXISTS btree_gin;") + # ### end Alembic commands ### diff --git a/migrations/versions/7f6e3631b327_restructure_seasons_episodes.py b/migrations/versions/7f6e3631b327_restructure_seasons_episodes.py new file mode 100644 index 00000000..f4d3e6de --- /dev/null +++ b/migrations/versions/7f6e3631b327_restructure_seasons_episodes.py @@ -0,0 +1,180 @@ +"""restructure seasons & episodes + +Revision ID: 7f6e3631b327 +Revises: bf75239e668e +Create Date: 2024-11-30 08:08:03.960297 + +""" + +from typing import Sequence, Union + +from alembic import op +import sqlalchemy as sa +import sqlmodel.sql.sqltypes +from sqlalchemy.dialects import postgresql + +# revision identifiers, used by Alembic. +revision: str = "7f6e3631b327" +down_revision: Union[str, None] = "bf75239e668e" +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + op.create_table( + "series_season", + sa.Column("id", sa.Integer(), nullable=False), + sa.Column("series_id", sqlmodel.sql.sqltypes.AutoString(), nullable=False), + sa.Column("season_number", sa.Integer(), nullable=False), + sa.ForeignKeyConstraint( + ["series_id"], + ["series_metadata.id"], + ), + sa.PrimaryKeyConstraint("id"), + sa.UniqueConstraint("series_id", "season_number"), + ) + op.create_index( + op.f("ix_series_season_season_number"), + "series_season", + ["season_number"], + unique=False, + ) + op.create_index( + op.f("ix_series_season_series_id"), "series_season", ["series_id"], unique=False + ) + op.create_table( + "series_episode", + sa.Column("id", sa.Integer(), nullable=False), + sa.Column("season_id", sa.Integer(), nullable=False), + sa.Column("episode_number", sa.Integer(), nullable=False), + sa.Column("title", sqlmodel.sql.sqltypes.AutoString(), nullable=False), + sa.Column("plot", sqlmodel.sql.sqltypes.AutoString(), nullable=True), + sa.Column("runtime", sa.Integer(), nullable=True), + sa.Column("air_date", sa.DateTime(timezone=True), nullable=True), + sa.Column("imdb_rating", sa.Float(), nullable=True), + sa.Column("poster", sqlmodel.sql.sqltypes.AutoString(), nullable=True), + sa.Column("is_poster_working", sa.Boolean(), nullable=False), + sa.ForeignKeyConstraint( + ["season_id"], + ["series_season.id"], + ), + sa.PrimaryKeyConstraint("id"), + sa.UniqueConstraint("season_id", "episode_number"), + ) + op.create_index( + op.f("ix_series_episode_episode_number"), + "series_episode", + ["episode_number"], + unique=False, + ) + op.create_index( + op.f("ix_series_episode_season_id"), + "series_episode", + ["season_id"], + unique=False, + ) + op.create_table( + "episode_file", + sa.Column("id", sa.Integer(), nullable=False), + sa.Column( + "torrent_stream_id", sqlmodel.sql.sqltypes.AutoString(), nullable=False + ), + sa.Column("season_number", sa.Integer(), nullable=False), + sa.Column("episode_number", sa.Integer(), nullable=False), + sa.Column("file_index", sa.Integer(), nullable=True), + sa.Column("filename", sqlmodel.sql.sqltypes.AutoString(), nullable=True), + sa.Column("size", sa.BigInteger(), nullable=True), + sa.Column("episode_id", sa.Integer(), nullable=True), + sa.ForeignKeyConstraint( + ["episode_id"], + ["series_episode.id"], + ), + sa.ForeignKeyConstraint( + ["torrent_stream_id"], + ["torrent_stream.id"], + ), + sa.PrimaryKeyConstraint("id"), + sa.UniqueConstraint("torrent_stream_id", "season_number", "episode_number"), + ) + op.create_index( + op.f("ix_episode_file_torrent_stream_id"), + "episode_file", + ["torrent_stream_id"], + unique=False, + ) + op.drop_index("ix_episode_episode_number", table_name="episode") + op.drop_table("episode") + op.drop_index("idx_season_torrent_number", table_name="season") + op.drop_table("season") + # ### end Alembic commands ### + + +def downgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + op.create_table( + "season", + sa.Column( + "id", + sa.INTEGER(), + server_default=sa.text("nextval('season_id_seq'::regclass)"), + autoincrement=True, + nullable=False, + ), + sa.Column( + "torrent_stream_id", sa.VARCHAR(), autoincrement=False, nullable=False + ), + sa.Column("season_number", sa.INTEGER(), autoincrement=False, nullable=False), + sa.ForeignKeyConstraint( + ["torrent_stream_id"], + ["torrent_stream.id"], + name="season_torrent_stream_id_fkey", + ondelete="CASCADE", + ), + sa.PrimaryKeyConstraint("id", name="season_pkey"), + postgresql_ignore_search_path=False, + ) + op.create_index( + "idx_season_torrent_number", + "season", + ["torrent_stream_id", "season_number"], + unique=False, + ) + op.create_table( + "episode", + sa.Column("id", sa.INTEGER(), autoincrement=True, nullable=False), + sa.Column("season_id", sa.INTEGER(), autoincrement=False, nullable=False), + sa.Column("episode_number", sa.INTEGER(), autoincrement=False, nullable=False), + sa.Column("filename", sa.VARCHAR(), autoincrement=False, nullable=True), + sa.Column("size", sa.BIGINT(), autoincrement=False, nullable=True), + sa.Column("file_index", sa.INTEGER(), autoincrement=False, nullable=True), + sa.Column("title", sa.VARCHAR(), autoincrement=False, nullable=True), + sa.Column( + "released", + postgresql.TIMESTAMP(timezone=True), + autoincrement=False, + nullable=True, + ), + sa.ForeignKeyConstraint( + ["season_id"], + ["season.id"], + name="episode_season_id_fkey", + ondelete="CASCADE", + ), + sa.PrimaryKeyConstraint("id", name="episode_pkey"), + sa.UniqueConstraint( + "season_id", "episode_number", name="episode_season_id_episode_number_key" + ), + ) + op.create_index( + "ix_episode_episode_number", "episode", ["episode_number"], unique=False + ) + op.drop_index(op.f("ix_episode_file_torrent_stream_id"), table_name="episode_file") + op.drop_table("episode_file") + op.drop_index(op.f("ix_series_episode_season_id"), table_name="series_episode") + op.drop_index(op.f("ix_series_episode_episode_number"), table_name="series_episode") + op.drop_table("series_episode") + op.drop_index(op.f("ix_series_season_series_id"), table_name="series_season") + op.drop_index(op.f("ix_series_season_season_number"), table_name="series_season") + op.drop_table("series_season") + # ### end Alembic commands ### diff --git a/migrations/versions/bf75239e668e_add_title_search_index.py b/migrations/versions/bf75239e668e_add_title_search_index.py new file mode 100644 index 00000000..e10e8af2 --- /dev/null +++ b/migrations/versions/bf75239e668e_add_title_search_index.py @@ -0,0 +1,42 @@ +"""add title search index + +Revision ID: bf75239e668e +Revises: c63392160ce7 +Create Date: 2024-11-24 08:21:14.384259 + +""" +from typing import Sequence, Union + +from alembic import op +import sqlalchemy as sa +from sqlalchemy.dialects import postgresql + +# revision identifiers, used by Alembic. +revision: str = 'bf75239e668e' +down_revision: Union[str, None] = 'c63392160ce7' +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + op.add_column('aka_title', sa.Column('title_tsv', postgresql.TSVECTOR(), sa.Computed("to_tsvector('simple'::regconfig, title)", ), nullable=False)) + op.create_index('idx_aka_title_fts', 'aka_title', ['title_tsv'], unique=False, postgresql_using='gin') + op.create_index('idx_aka_title_trgm', 'aka_title', ['title'], unique=False, postgresql_using='gin', postgresql_ops={'title': 'gin_trgm_ops'}) + op.add_column('base_metadata', sa.Column('title_tsv', postgresql.TSVECTOR(), sa.Computed("to_tsvector('simple'::regconfig, title)", ), nullable=False)) + op.drop_index('idx_base_title_search', table_name='base_metadata', postgresql_using='gin') + op.create_index('idx_base_title_fts', 'base_metadata', ['title_tsv'], unique=False, postgresql_using='gin') + op.create_index('idx_base_title_trgm', 'base_metadata', ['title'], unique=False, postgresql_using='gin', postgresql_ops={'title': 'gin_trgm_ops'}) + # ### end Alembic commands ### + + +def downgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + op.drop_index('idx_base_title_trgm', table_name='base_metadata', postgresql_using='gin', postgresql_ops={'title': 'gin_trgm_ops'}) + op.drop_index('idx_base_title_fts', table_name='base_metadata', postgresql_using='gin') + op.create_index('idx_base_title_search', 'base_metadata', ['title'], unique=False, postgresql_using='gin') + op.drop_column('base_metadata', 'title_tsv') + op.drop_index('idx_aka_title_trgm', table_name='aka_title', postgresql_using='gin', postgresql_ops={'title': 'gin_trgm_ops'}) + op.drop_index('idx_aka_title_fts', table_name='aka_title', postgresql_using='gin') + op.drop_column('aka_title', 'title_tsv') + # ### end Alembic commands ### diff --git a/migrations/versions/c63392160ce7_cleanup_catalog_add_stream_added_date_.py b/migrations/versions/c63392160ce7_cleanup_catalog_add_stream_added_date_.py new file mode 100644 index 00000000..a82ec416 --- /dev/null +++ b/migrations/versions/c63392160ce7_cleanup_catalog_add_stream_added_date_.py @@ -0,0 +1,87 @@ +"""cleanup catalog & add stream added date for sorting metadata + +Revision ID: c63392160ce7 +Revises: 4829e203ecaf +Create Date: 2024-11-23 14:05:46.061957 + +""" + +from typing import Sequence, Union + +from alembic import op +import sqlmodel.sql.sqltypes +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision: str = "c63392160ce7" +down_revision: Union[str, None] = "4829e203ecaf" +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + op.add_column( + "base_metadata", + sa.Column("last_stream_added", sa.DateTime(timezone=True), nullable=False), + ) + op.create_index( + "idx_base_meta_last_stream_added", + "base_metadata", + ["last_stream_added"], + unique=False, + ) + op.create_index( + "idx_last_stream_added", + "base_metadata", + ["last_stream_added", "type"], + unique=False, + ) + op.create_index( + op.f("ix_base_metadata_last_stream_added"), + "base_metadata", + ["last_stream_added"], + unique=False, + ) + op.drop_index( + op.f("ix_media_catalog_link_priority"), table_name="media_catalog_link" + ) + op.drop_table("media_catalog_link") + op.create_table( + "media_catalog_link", + sa.Column("media_id", sqlmodel.sql.sqltypes.AutoString(), nullable=False), + sa.Column("catalog_id", sa.Integer(), nullable=False), + sa.ForeignKeyConstraint(["catalog_id"], ["catalog.id"], ondelete="CASCADE"), + sa.ForeignKeyConstraint(["media_id"], ["base_metadata.id"], ondelete="CASCADE"), + sa.PrimaryKeyConstraint("media_id", "catalog_id"), + ) + # ### end Alembic commands ### + + +def downgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + op.drop_table("media_catalog_link") + op.create_table( + "media_catalog_link", + sa.Column("media_id", sqlmodel.sql.sqltypes.AutoString(), nullable=False), + sa.Column("catalog_id", sa.Integer(), nullable=False), + sa.Column("priority", sa.Integer(), nullable=False), + sa.ForeignKeyConstraint(["catalog_id"], ["catalog.id"], ondelete="CASCADE"), + sa.ForeignKeyConstraint(["media_id"], ["base_metadata.id"], ondelete="CASCADE"), + sa.PrimaryKeyConstraint("media_id", "catalog_id"), + postgresql_partition_by="LIST (catalog_id)", + ) + op.create_index( + op.f("ix_media_catalog_link_priority"), + "media_catalog_link", + ["priority"], + unique=False, + ) + op.drop_index( + op.f("ix_base_metadata_last_stream_added"), table_name="base_metadata" + ) + op.drop_index("idx_last_stream_added", table_name="base_metadata") + op.drop_index("idx_base_meta_last_stream_added", table_name="base_metadata") + op.drop_column("base_metadata", "last_stream_added") + # ### end Alembic commands ### From 6d2ca85782abafe738de8d3738b38fef60fa63af Mon Sep 17 00:00:00 2001 From: mhdzumair Date: Tue, 3 Dec 2024 07:41:40 +0530 Subject: [PATCH 6/7] update libs lock file --- Pipfile.lock | 841 +++++++++++++++++++++++++++++---------------------- 1 file changed, 475 insertions(+), 366 deletions(-) diff --git a/Pipfile.lock b/Pipfile.lock index d1f22eee..7a992389 100644 --- a/Pipfile.lock +++ b/Pipfile.lock @@ -1,7 +1,7 @@ { "_meta": { "hash": { - "sha256": "10c314fc13fb936a0433e9a5081840b3143c19ef66c54d21e501e14575f49b07" + "sha256": "11e1cee026366ea5870046e84b0226aa9a2cb39b0fc66ab69ba714b1e7a47f9a" }, "pipfile-spec": 6, "requires": { @@ -33,93 +33,93 @@ }, "aiohappyeyeballs": { "hashes": [ - "sha256:75cf88a15106a5002a8eb1dab212525c00d1f4c0fa96e551c9fbe6f09a621586", - "sha256:8a7a83727b2756f394ab2895ea0765a0a8c475e3c71e98d43d76f22b4b435572" + "sha256:5fdd7d87889c63183afc18ce9271f9b0a7d32c2303e394468dd45d514a757745", + "sha256:a980909d50efcd44795c4afeca523296716d50cd756ddca6af8c65b996e27de8" ], "markers": "python_version >= '3.8'", - "version": "==2.4.3" + "version": "==2.4.4" }, "aiohttp": { "hashes": [ - "sha256:00618c37a350884c08e87cf9a6532be274d564227ac49e0b474cf41f27e1f190", - "sha256:0983d0ce329f2f9dbeb355c3744bd6333f34e0dc56025b6b7d4f285b90acb51e", - "sha256:0e3b5bfef913d6be270c81976fbc0cbf66625cd92663bbb7e03b3adbd6aa4ac6", - "sha256:16bda233a7b159ab08107e8858fedca90a9de287057fab54cafde51bd83f9819", - "sha256:17e6b9d8e29e3bfc7f893f327e92c9769d3582cee2fb1652c1431ac3f60115a0", - "sha256:1c5838a68e31712354129add1b5fe32b06aa05275f835130edc650e6288af05f", - "sha256:220bbce18b3046973465be45415430f1cab39d7fdc40cbcf0a8c05485c6902fe", - "sha256:2a5afbd805e449048ecebb1a256176e953d4ca9e48bab387d4d1c8524f1c7a95", - "sha256:2b6f8716044ae5e5f2a3b4e4b6bfee48e97c8b2a92e56f43aadd728c7fd26b7d", - "sha256:2d2ca685c6a851ce64e511fbcb906e4dd97d13e567ca7ecb5cb30b184e15dc6d", - "sha256:2fa50ddc6b21cc1ae23e13524d6f75b27e279fdf5cf905b2df6fd171891ac4e2", - "sha256:3260c77cff4e35245bc517658bd54d7a64787f71f3c4f723877c82f22835b032", - "sha256:336bbf7a33dd8cb4a7afb98c70e9935a81e5e88f7ac595ba2e84b1fb5da190d6", - "sha256:35d4545e7684da7a954ffc2dce495462cb16a902dffdebe98572408f6aaaee83", - "sha256:35dafc70051b6cbd6dafb533b4e3f0df6225a4896be373ef86367b2987409331", - "sha256:3c5e4f1ba5059b85e05c551961a448ce2689c6249ed6a2e2174796842c191d10", - "sha256:3e6523f39071a01757048985e4cc22d04aa130bc40d9128503f3a61a3ee98328", - "sha256:4448c9c7f77bad48a6569062c0c16deb77fbb7363de1dc71ed087f66fb3b3c96", - "sha256:47c6663df9446aa848b478413219600da4b54bc0409e1ac4bc80fb1a81501363", - "sha256:481075a1949de79a8a6841e0086f2f5f464785c592cf527ed0db2c0cbd0e1ba2", - "sha256:4867008617bbf86e9fb5b00f72dd0e3a00a579b32233caff834320867f9b7cac", - "sha256:48be7cff468c9c0d86a02e6a826e1fe159094b16d5aa2c17703e7317f791b0f9", - "sha256:52913bb8a0a72a57479f54b281300c9d23036aa9aa3ebbc9a32a643484eadfc2", - "sha256:561b9596a9f90266673ef0b950c27e04ab597cdb53785e2ac91b83b33c31b509", - "sha256:58bd94ad48143e1d42e05fc055da41de0a9933f378ad87760595b8aec83d317b", - "sha256:5a20ddaa58fea717177fac9a4a1fb8b39be868aa4fed2af6de4313b7a08f0f71", - "sha256:64e6b14608a56a4c76c60daac730b0c0eeaf9d10dfc3231f7fc26521a0d628fd", - "sha256:658052941324edea3dee1f681375e70779f55e437e07bdfc4b5bbe65ad53cefb", - "sha256:65fd04f1fea668ad1af48ac31b752000e222dccffedcad3de8ccf9d34489ccd3", - "sha256:6c829471a9e2266da4a0666f8a9e215f19320f79778af379c1c7db324ac24ed2", - "sha256:72779bfb34d6d6b51e55a7f4901b410e416b5431738b367d49696928c91a2ca8", - "sha256:7565689e86a88c1d258351ebd14e343337b76a56ca5c0a2c1db96ec28149386f", - "sha256:7bc9d64a2350cbb29a9732334e1a0743cbb6844de1731cbdf5949b235653f3fd", - "sha256:7d141631a7348038fc7b5d1a81b3c9afa9aa056188ded7902fe754028fdea5c5", - "sha256:7d71d4ac0792ff89541179394d303be846a0b6cd3821ae67286ee69ecec16f9f", - "sha256:7f3be4961a5c2c670f31caab7641a37ea2a97031f0d8ae15bcfd36b6bf273200", - "sha256:80b3ac163145660ce660aed2f1005e6d4de840d39728990b7250525eeec4e4a8", - "sha256:832e58d9454fe501b0d092cdf660c0e34e16005f61acd06e1c79b0fc45019c94", - "sha256:85be3899e6860dd2cd3f4370ded6708e939d00d5ec922a8eb328d114db605a47", - "sha256:8ca580edc3ccd7f6ea76ad9cf59f5a8756d338e770b5eda7be26bcda8fa7ef53", - "sha256:8eeaac75203da1a54afe1faea3c855a1973026b54929112aa9b67bceadbcb0ca", - "sha256:98f596cf59292e779bc387f22378a3d2c5e052c9fe2bf822ac4f547c6fe57758", - "sha256:9bf52642b12d70d78c18882915201bc5345f7c8f0f2ab8919d99b886aa6475a7", - "sha256:9ce8eb6444bb6e862feca664ce365afa8e2e32db24dcf1a502719a8a002f9274", - "sha256:9f4aadfea6b48cfa17aef1a68ba6bee5a0246374f5a588e299a4f4ff5bd1c77b", - "sha256:a0ed9f1f2697713c48efc9ec483ad5d062e4aa91854f090a3eba0b19c002851d", - "sha256:a750ee5a177e0f873d6b2d7d0fa6e1e7c658fc0ca8ea56438dcba2ac94bedb09", - "sha256:a7def89a41fe32120d89cd4577f5efbab3c52234c5890066ced8a2f7202dff88", - "sha256:a81525430da5ca356fae6e889daeb6f5cc0d5f0cef88e59cdde48e2394ea1365", - "sha256:aa7deebb4bc5143745e6282139d7b9de50beb6d06609df64d2c993ef496bc7eb", - "sha256:afba47981ff73b1794c00dce774334dcfe62664b3b4f78f278b77d21ce9daf43", - "sha256:b64fa6b76b35b695cd3e5c42a4e568cbea8d41c9e59165e2a43da00976e2027e", - "sha256:beae08f900b2980af4353a0200eb162b39f276fd8a6e43079a540f83964671f4", - "sha256:c0dbae99737badf3f5e862088a118e28d3b36f03eb608a6382eddfd68178e05b", - "sha256:c3f397e0511a0ec4fe331e602fc057dfd336d352062deb9969ebd81e253a149c", - "sha256:c665ed4b52256614858b20711bbbd2755b0e19ec86870f8ff1645acf9ae9e760", - "sha256:c92e763cf641e10ad9342597d20060ba23de5e411aada96660e679e3f9371189", - "sha256:cb51a81cb637b9a072c9cfae1839e35c6579638861eb3479eb5d6e6ce8bc6782", - "sha256:cf8f05f4abe3288fe2e106e1461fd20d8abf6103886ddfb6d746a5b8fb830d2b", - "sha256:d0f9dbe9763c014c408ad51a027dc9582518e992dc63e2ffe359ac1b4840a560", - "sha256:d21951756690f5d86d0215da38eb0fd65def03b5e2a1c08a4a39718a6d0d48f2", - "sha256:d479c1fdcc920056a06d04059db52eb8590ecbbb3acdcaeeea26a88ff782e94a", - "sha256:d6f9e5fd1b3ecbaca3e04a15a02d1fa213248608caee99fd5bdddd4759959cf7", - "sha256:d96b93a46a3742880fa21bcb35c6c40cf27714ec0fb8ec85fe444d73b95131b9", - "sha256:da343903214bf9f9d314b913caa499fa19e26d73e6e23a3db7d4898ea6d47028", - "sha256:daea456b79ca2bacc7f062845bbb1139c3b3231fc83169da5a682cf385416dd1", - "sha256:dd2ca84e5f7a35f313a62eb7d6a50bac6760b60bafce34586750712731c0aeff", - "sha256:df9bf08eb93611b1d4d6245b6fecf88728e90eece00e00d554e1b0c445557d83", - "sha256:e9ac0cce897904b77e109e5403ed713187dbdf96832bfd061ac07164264be16c", - "sha256:e9f9fd5c672c962389429abd11ed32c9c93f7932fd58584cae1e43951b141c6b", - "sha256:ea68db69f2a4ddc24b28b8e754fc0b963ed7f9b9a76137f06fe44643d6821fbd", - "sha256:f4f1779c3142d913c509c2ed1de8b8f920e07a5cd65ac1f57c61cfb6bfded5a4", - "sha256:f7fd9c11ffad6b022bf02a41a70418cb2ab3b33f2c27842a5999e3ab78daf280", - "sha256:f8dd02b44555893adfe7cc4b3b454fee04f9dcec45cf66ef5bb53ebf393f0505", - "sha256:fe503a76b9e3a13b62e64545693c9463afe9d429e0909120f7bb66de91ed8bc2", - "sha256:fee12d8487b0df2b683424cca2a0d8fb7281d5607518d742e98119a74af01026" + "sha256:0411777249f25d11bd2964a230b3ffafcbed6cd65d0f2b132bc2b8f5b8c347c7", + "sha256:0a97d657f6cf8782a830bb476c13f7d777cfcab8428ac49dde15c22babceb361", + "sha256:0b5a5009b0159a8f707879dc102b139466d8ec6db05103ec1520394fdd8ea02c", + "sha256:0bcb7f6976dc0b6b56efde13294862adf68dd48854111b422a336fa729a82ea6", + "sha256:14624d96f0d69cf451deed3173079a68c322279be6030208b045ab77e1e8d550", + "sha256:15c4e489942d987d5dac0ba39e5772dcbed4cc9ae3710d1025d5ba95e4a5349c", + "sha256:176f8bb8931da0613bb0ed16326d01330066bb1e172dd97e1e02b1c27383277b", + "sha256:17af09d963fa1acd7e4c280e9354aeafd9e3d47eaa4a6bfbd2171ad7da49f0c5", + "sha256:1a8b13b9950d8b2f8f58b6e5842c4b842b5887e2c32e3f4644d6642f1659a530", + "sha256:202f40fb686e5f93908eee0c75d1e6fbe50a43e9bd4909bf3bf4a56b560ca180", + "sha256:21cbe97839b009826a61b143d3ca4964c8590d7aed33d6118125e5b71691ca46", + "sha256:27935716f8d62c1c73010428db310fd10136002cfc6d52b0ba7bdfa752d26066", + "sha256:282e0a7ddd36ebc411f156aeaa0491e8fe7f030e2a95da532cf0c84b0b70bc66", + "sha256:28f29bce89c3b401a53d6fd4bee401ee943083bf2bdc12ef297c1d63155070b0", + "sha256:2ac9fd83096df36728da8e2f4488ac3b5602238f602706606f3702f07a13a409", + "sha256:30f9f89ae625d412043f12ca3771b2ccec227cc93b93bb1f994db6e1af40a7d3", + "sha256:317251b9c9a2f1a9ff9cd093775b34c6861d1d7df9439ce3d32a88c275c995cd", + "sha256:31de2f10f63f96cc19e04bd2df9549559beadd0b2ee2da24a17e7ed877ca8c60", + "sha256:36df00e0541f264ce42d62280281541a47474dfda500bc5b7f24f70a7f87be7a", + "sha256:39625703540feb50b6b7f938b3856d1f4886d2e585d88274e62b1bd273fae09b", + "sha256:3f5461c77649358610fb9694e790956b4238ac5d9e697a17f63619c096469afe", + "sha256:4313f3bc901255b22f01663eeeae167468264fdae0d32c25fc631d5d6e15b502", + "sha256:442356e8924fe1a121f8c87866b0ecdc785757fd28924b17c20493961b3d6697", + "sha256:44cb1a1326a0264480a789e6100dc3e07122eb8cd1ad6b784a3d47d13ed1d89c", + "sha256:44d323aa80a867cb6db6bebb4bbec677c6478e38128847f2c6b0f70eae984d72", + "sha256:499368eb904566fbdf1a3836a1532000ef1308f34a1bcbf36e6351904cced771", + "sha256:4b01d9cfcb616eeb6d40f02e66bebfe7b06d9f2ef81641fdd50b8dd981166e0b", + "sha256:5720ebbc7a1b46c33a42d489d25d36c64c419f52159485e55589fbec648ea49a", + "sha256:5cc5e0d069c56645446c45a4b5010d4b33ac6c5ebfd369a791b5f097e46a3c08", + "sha256:618b18c3a2360ac940a5503da14fa4f880c5b9bc315ec20a830357bcc62e6bae", + "sha256:6435a66957cdba1a0b16f368bde03ce9c79c57306b39510da6ae5312a1a5b2c1", + "sha256:647ec5bee7e4ec9f1034ab48173b5fa970d9a991e565549b965e93331f1328fe", + "sha256:6e1e9e447856e9b7b3d38e1316ae9a8c92e7536ef48373de758ea055edfd5db5", + "sha256:6ef1550bb5f55f71b97a6a395286db07f7f2c01c8890e613556df9a51da91e8d", + "sha256:6ffa45cc55b18d4ac1396d1ddb029f139b1d3480f1594130e62bceadf2e1a838", + "sha256:77f31cebd8c27a36af6c7346055ac564946e562080ee1a838da724585c67474f", + "sha256:7a3b5b2c012d70c63d9d13c57ed1603709a4d9d7d473e4a9dfece0e4ea3d5f51", + "sha256:7a7ddf981a0b953ade1c2379052d47ccda2f58ab678fca0671c7c7ca2f67aac2", + "sha256:84de955314aa5e8d469b00b14d6d714b008087a0222b0f743e7ffac34ef56aff", + "sha256:8dcfd14c712aa9dd18049280bfb2f95700ff6a8bde645e09f17c3ed3f05a0130", + "sha256:928f92f80e2e8d6567b87d3316c1fd9860ccfe36e87a9a7f5237d4cda8baa1ba", + "sha256:9384b07cfd3045b37b05ed002d1c255db02fb96506ad65f0f9b776b762a7572e", + "sha256:96726839a42429318017e67a42cca75d4f0d5248a809b3cc2e125445edd7d50d", + "sha256:96bbec47beb131bbf4bae05d8ef99ad9e5738f12717cfbbf16648b78b0232e87", + "sha256:9bcf97b971289be69638d8b1b616f7e557e1342debc7fc86cf89d3f08960e411", + "sha256:a0cf4d814689e58f57ecd5d8c523e6538417ca2e72ff52c007c64065cef50fb2", + "sha256:a7c6147c6306f537cff59409609508a1d2eff81199f0302dd456bb9e7ea50c39", + "sha256:a9266644064779840feec0e34f10a89b3ff1d2d6b751fe90017abcad1864fa7c", + "sha256:afbe85b50ade42ddff5669947afde9e8a610e64d2c80be046d67ec4368e555fa", + "sha256:afcda759a69c6a8be3aae764ec6733155aa4a5ad9aad4f398b52ba4037942fe3", + "sha256:b2fab23003c4bb2249729a7290a76c1dda38c438300fdf97d4e42bf78b19c810", + "sha256:bd3f711f4c99da0091ced41dccdc1bcf8be0281dc314d6d9c6b6cf5df66f37a9", + "sha256:be0c7c98e38a1e3ad7a6ff64af8b6d6db34bf5a41b1478e24c3c74d9e7f8ed42", + "sha256:c1f2d7fd583fc79c240094b3e7237d88493814d4b300d013a42726c35a734bc9", + "sha256:c5bba6b83fde4ca233cfda04cbd4685ab88696b0c8eaf76f7148969eab5e248a", + "sha256:c6beeac698671baa558e82fa160be9761cf0eb25861943f4689ecf9000f8ebd0", + "sha256:c7333e7239415076d1418dbfb7fa4df48f3a5b00f8fdf854fca549080455bc14", + "sha256:c8a02f74ae419e3955af60f570d83187423e42e672a6433c5e292f1d23619269", + "sha256:c9c23e62f3545c2216100603614f9e019e41b9403c47dd85b8e7e5015bf1bde0", + "sha256:cca505829cdab58c2495ff418c96092d225a1bbd486f79017f6de915580d3c44", + "sha256:d3108f0ad5c6b6d78eec5273219a5bbd884b4aacec17883ceefaac988850ce6e", + "sha256:d4b8a1b6c7a68c73191f2ebd3bf66f7ce02f9c374e309bdb68ba886bbbf1b938", + "sha256:d6e274661c74195708fc4380a4ef64298926c5a50bb10fbae3d01627d7a075b7", + "sha256:db2914de2559809fdbcf3e48f41b17a493b58cb7988d3e211f6b63126c55fe82", + "sha256:e738aabff3586091221044b7a584865ddc4d6120346d12e28e788307cd731043", + "sha256:e7f6173302f8a329ca5d1ee592af9e628d3ade87816e9958dcf7cdae2841def7", + "sha256:e9d036a9a41fc78e8a3f10a86c2fc1098fca8fab8715ba9eb999ce4788d35df0", + "sha256:ea142255d4901b03f89cb6a94411ecec117786a76fc9ab043af8f51dd50b5313", + "sha256:ebd3e6b0c7d4954cca59d241970011f8d3327633d555051c430bd09ff49dc494", + "sha256:ec656680fc53a13f849c71afd0c84a55c536206d524cbc831cde80abbe80489e", + "sha256:ec8df0ff5a911c6d21957a9182402aad7bf060eaeffd77c9ea1c16aecab5adbf", + "sha256:ed95d66745f53e129e935ad726167d3a6cb18c5d33df3165974d54742c373868", + "sha256:ef2c9499b7bd1e24e473dc1a85de55d72fd084eea3d8bdeec7ee0720decb54fa", + "sha256:f5252ba8b43906f206048fa569debf2cd0da0316e8d5b4d25abe53307f573941", + "sha256:f737fef6e117856400afee4f17774cdea392b28ecf058833f5eca368a18cf1bf", + "sha256:fc726c3fa8f606d07bd2b500e5dc4c0fd664c59be7788a16b9e34352c50b6b6b" ], "markers": "python_version >= '3.9'", - "version": "==3.11.8" + "version": "==3.11.9" }, "aioqbt": { "git": "git+https://github.com/mhdzumair/aioqbt.git", @@ -185,6 +185,62 @@ "markers": "python_version >= '3.8'", "version": "==1.3.0" }, + "asyncpg": { + "hashes": [ + "sha256:04ff0785ae7eed6cc138e73fc67b8e51d54ee7a3ce9b63666ce55a0bf095f7ba", + "sha256:05b185ebb8083c8568ea8a40e896d5f7af4b8554b64d7719c0eaa1eb5a5c3a70", + "sha256:0b448f0150e1c3b96cb0438a0d0aa4871f1472e58de14a3ec320dbb2798fb0d4", + "sha256:0f5712350388d0cd0615caec629ad53c81e506b1abaaf8d14c93f54b35e3595a", + "sha256:1292b84ee06ac8a2ad8e51c7475aa309245874b61333d97411aab835c4a2f737", + "sha256:1b11a555a198b08f5c4baa8f8231c74a366d190755aa4f99aacec5970afe929a", + "sha256:1b982daf2441a0ed314bd10817f1606f1c28b1136abd9e4f11335358c2c631cb", + "sha256:1c06a3a50d014b303e5f6fc1e5f95eb28d2cee89cf58384b700da621e5d5e547", + "sha256:1c198a00cce9506fcd0bf219a799f38ac7a237745e1d27f0e1f66d3707c84a5a", + "sha256:26683d3b9a62836fad771a18ecf4659a30f348a561279d6227dab96182f46144", + "sha256:29ff1fc8b5bf724273782ff8b4f57b0f8220a1b2324184846b39d1ab4122031d", + "sha256:3152fef2e265c9c24eec4ee3d22b4f4d2703d30614b0b6753e9ed4115c8a146f", + "sha256:3326e6d7381799e9735ca2ec9fd7be4d5fef5dcbc3cb555d8a463d8460607956", + "sha256:3356637f0bd830407b5597317b3cb3571387ae52ddc3bca6233682be88bbbc1f", + "sha256:393af4e3214c8fa4c7b86da6364384c0d1b3298d45803375572f415b6f673f38", + "sha256:46973045b567972128a27d40001124fbc821c87a6cade040cfcd4fa8a30bcdc4", + "sha256:51da377487e249e35bd0859661f6ee2b81db11ad1f4fc036194bc9cb2ead5056", + "sha256:574156480df14f64c2d76450a3f3aaaf26105869cad3865041156b38459e935d", + "sha256:578445f09f45d1ad7abddbff2a3c7f7c291738fdae0abffbeb737d3fc3ab8b75", + "sha256:5b290f4726a887f75dcd1b3006f484252db37602313f806e9ffc4e5996cfe5cb", + "sha256:5df69d55add4efcd25ea2a3b02025b669a285b767bfbf06e356d68dbce4234ff", + "sha256:5e0511ad3dec5f6b4f7a9e063591d407eee66b88c14e2ea636f187da1dcfff6a", + "sha256:64e899bce0600871b55368b8483e5e3e7f1860c9482e7f12e0a771e747988168", + "sha256:68d71a1be3d83d0570049cd1654a9bdfe506e794ecc98ad0873304a9f35e411e", + "sha256:6c2a2ef565400234a633da0eafdce27e843836256d40705d83ab7ec42074efb3", + "sha256:6f4e83f067b35ab5e6371f8a4c93296e0439857b4569850b178a01385e82e9ad", + "sha256:8b684a3c858a83cd876f05958823b68e8d14ec01bb0c0d14a6704c5bf9711773", + "sha256:9110df111cabc2ed81aad2f35394a00cadf4f2e0635603db6ebbd0fc896f46a4", + "sha256:915aeb9f79316b43c3207363af12d0e6fd10776641a7de8a01212afd95bdf0ed", + "sha256:9a0292c6af5c500523949155ec17b7fe01a00ace33b68a476d6b5059f9630305", + "sha256:9b6fde867a74e8c76c71e2f64f80c64c0f3163e687f1763cfaf21633ec24ec33", + "sha256:a3479a0d9a852c7c84e822c073622baca862d1217b10a02dd57ee4a7a081f708", + "sha256:aa403147d3e07a267ada2ae34dfc9324e67ccc4cdca35261c8c22792ba2b10cf", + "sha256:aca1548e43bbb9f0f627a04666fedaca23db0a31a84136ad1f868cb15deb6e3a", + "sha256:ae374585f51c2b444510cdf3595b97ece4f233fde739aa14b50e0d64e8a7a590", + "sha256:bc6d84136f9c4d24d358f3b02be4b6ba358abd09f80737d1ac7c444f36108454", + "sha256:bfb4dd5ae0699bad2b233672c8fc5ccbd9ad24b89afded02341786887e37927e", + "sha256:c42f6bb65a277ce4d93f3fba46b91a265631c8df7250592dd4f11f8b0152150f", + "sha256:c47806b1a8cbb0a0db896f4cd34d89942effe353a5035c62734ab13b9f938da3", + "sha256:c551e9928ab6707602f44811817f82ba3c446e018bfe1d3abecc8ba5f3eac851", + "sha256:c7255812ac85099a0e1ffb81b10dc477b9973345793776b128a23e60148dd1af", + "sha256:c902a60b52e506d38d7e80e0dd5399f657220f24635fee368117b8b5fce1142e", + "sha256:db9891e2d76e6f425746c5d2da01921e9a16b5a71a1c905b13f30e12a257c4af", + "sha256:dc1f62c792752a49f88b7e6f774c26077091b44caceb1983509edc18a2222ec0", + "sha256:f23b836dd90bea21104f69547923a02b167d999ce053f3d502081acea2fba15b", + "sha256:f59b430b8e27557c3fb9869222559f7417ced18688375825f8f12302c34e915e", + "sha256:f86b0e2cd3f1249d6fe6fd6cfe0cd4538ba994e2d8249c0491925629b9104d0f", + "sha256:fb622c94db4e13137c4c7f98834185049cc50ee01d8f657ef898b6407c7b9c50", + "sha256:fd4406d09208d5b4a14db9a9dbb311b6d7aeeab57bded7ed2f8ea41aeef39b34" + ], + "index": "pypi", + "markers": "python_full_version >= '3.8.0'", + "version": "==0.30.0" + }, "attrs": { "hashes": [ "sha256:5cfb1b9148b5b086569baec03f20d7b6bf3bcacc9a42bebf87ffaaca362f6346", @@ -438,11 +494,11 @@ }, "click-loglevel": { "hashes": [ - "sha256:897070fd4bf5b503edb5a1ecc0551448647901f4fac83a01c9f00aa28ad86d60", - "sha256:fcc98a136a96479b4768494df25017114ba1cb525dac0bed619209b3578fd4f3" + "sha256:519e90c7991cf4d0b9a70f037b74197eb3ee079923f590c786fb93dd61af820a", + "sha256:6bfcb27435e1b859cc08d479a1b32cd8518f83b4d313cfe7d7142ee62b841cf4" ], - "markers": "python_version >= '3.7'", - "version": "==0.5.0" + "markers": "python_version >= '3.8'", + "version": "==0.5.1" }, "colorlog": { "hashes": [ @@ -995,7 +1051,7 @@ "humanize": { "git": "git+https://github.com/python-humanize/humanize.git", "markers": "python_version >= '3.9'", - "ref": "6a705828cbdf2ba3f7b02901a54121bbf1b73b07" + "ref": "a0b1dd00d6c99a6567c0d3e103fb55bde43c331e" }, "hyperlink": { "hashes": [ @@ -1229,6 +1285,14 @@ "markers": "python_version >= '3.6' and python_version < '4'", "version": "==0.2.11" }, + "markdown-it-py": { + "hashes": [ + "sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1", + "sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb" + ], + "markers": "python_version >= '3.8'", + "version": "==3.0.0" + }, "markupsafe": { "hashes": [ "sha256:0bff5e0ae4ef2e1ae4fdf2dfd5b76c75e5c2fa4132d05fc1b0dabcd20c7e28c4", @@ -1296,6 +1360,14 @@ "markers": "python_version >= '3.9'", "version": "==3.0.2" }, + "mdurl": { + "hashes": [ + "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8", + "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba" + ], + "markers": "python_version >= '3.7'", + "version": "==0.1.2" + }, "motor": { "hashes": [ "sha256:0ef7f520213e852bf0eac306adf631aabe849227d8aec900a2612512fb9c5b8d", @@ -1539,107 +1611,91 @@ }, "propcache": { "hashes": [ - "sha256:00181262b17e517df2cd85656fcd6b4e70946fe62cd625b9d74ac9977b64d8d9", - "sha256:0e53cb83fdd61cbd67202735e6a6687a7b491c8742dfc39c9e01e80354956763", - "sha256:1235c01ddaa80da8235741e80815ce381c5267f96cc49b1477fdcf8c047ef325", - "sha256:140fbf08ab3588b3468932974a9331aff43c0ab8a2ec2c608b6d7d1756dbb6cb", - "sha256:191db28dc6dcd29d1a3e063c3be0b40688ed76434622c53a284e5427565bbd9b", - "sha256:1e41d67757ff4fbc8ef2af99b338bfb955010444b92929e9e55a6d4dcc3c4f09", - "sha256:1ec43d76b9677637a89d6ab86e1fef70d739217fefa208c65352ecf0282be957", - "sha256:20a617c776f520c3875cf4511e0d1db847a076d720714ae35ffe0df3e440be68", - "sha256:218db2a3c297a3768c11a34812e63b3ac1c3234c3a086def9c0fee50d35add1f", - "sha256:22aa8f2272d81d9317ff5756bb108021a056805ce63dd3630e27d042c8092798", - "sha256:25a1f88b471b3bc911d18b935ecb7115dff3a192b6fef46f0bfaf71ff4f12418", - "sha256:25c8d773a62ce0451b020c7b29a35cfbc05de8b291163a7a0f3b7904f27253e6", - "sha256:2a60ad3e2553a74168d275a0ef35e8c0a965448ffbc3b300ab3a5bb9956c2162", - "sha256:2a66df3d4992bc1d725b9aa803e8c5a66c010c65c741ad901e260ece77f58d2f", - "sha256:2ccc28197af5313706511fab3a8b66dcd6da067a1331372c82ea1cb74285e036", - "sha256:2e900bad2a8456d00a113cad8c13343f3b1f327534e3589acc2219729237a2e8", - "sha256:2ee7606193fb267be4b2e3b32714f2d58cad27217638db98a60f9efb5efeccc2", - "sha256:33ac8f098df0585c0b53009f039dfd913b38c1d2edafed0cedcc0c32a05aa110", - "sha256:3444cdba6628accf384e349014084b1cacd866fbb88433cd9d279d90a54e0b23", - "sha256:363ea8cd3c5cb6679f1c2f5f1f9669587361c062e4899fce56758efa928728f8", - "sha256:375a12d7556d462dc64d70475a9ee5982465fbb3d2b364f16b86ba9135793638", - "sha256:388f3217649d6d59292b722d940d4d2e1e6a7003259eb835724092a1cca0203a", - "sha256:3947483a381259c06921612550867b37d22e1df6d6d7e8361264b6d037595f44", - "sha256:39e104da444a34830751715f45ef9fc537475ba21b7f1f5b0f4d71a3b60d7fe2", - "sha256:3c997f8c44ec9b9b0bcbf2d422cc00a1d9b9c681f56efa6ca149a941e5560da2", - "sha256:3dfafb44f7bb35c0c06eda6b2ab4bfd58f02729e7c4045e179f9a861b07c9850", - "sha256:3ebbcf2a07621f29638799828b8d8668c421bfb94c6cb04269130d8de4fb7136", - "sha256:3f88a4095e913f98988f5b338c1d4d5d07dbb0b6bad19892fd447484e483ba6b", - "sha256:439e76255daa0f8151d3cb325f6dd4a3e93043e6403e6491813bcaaaa8733887", - "sha256:4569158070180c3855e9c0791c56be3ceeb192defa2cdf6a3f39e54319e56b89", - "sha256:466c219deee4536fbc83c08d09115249db301550625c7fef1c5563a584c9bc87", - "sha256:4a9d9b4d0a9b38d1c391bb4ad24aa65f306c6f01b512e10a8a34a2dc5675d348", - "sha256:4c7dde9e533c0a49d802b4f3f218fa9ad0a1ce21f2c2eb80d5216565202acab4", - "sha256:53d1bd3f979ed529f0805dd35ddaca330f80a9a6d90bc0121d2ff398f8ed8861", - "sha256:55346705687dbd7ef0d77883ab4f6fabc48232f587925bdaf95219bae072491e", - "sha256:56295eb1e5f3aecd516d91b00cfd8bf3a13991de5a479df9e27dd569ea23959c", - "sha256:56bb5c98f058a41bb58eead194b4db8c05b088c93d94d5161728515bd52b052b", - "sha256:5a5b3bb545ead161be780ee85a2b54fdf7092815995661947812dde94a40f6fb", - "sha256:5f2564ec89058ee7c7989a7b719115bdfe2a2fb8e7a4543b8d1c0cc4cf6478c1", - "sha256:608cce1da6f2672a56b24a015b42db4ac612ee709f3d29f27a00c943d9e851de", - "sha256:63f13bf09cc3336eb04a837490b8f332e0db41da66995c9fd1ba04552e516354", - "sha256:662dd62358bdeaca0aee5761de8727cfd6861432e3bb828dc2a693aa0471a563", - "sha256:676135dcf3262c9c5081cc8f19ad55c8a64e3f7282a21266d05544450bffc3a5", - "sha256:67aeb72e0f482709991aa91345a831d0b707d16b0257e8ef88a2ad246a7280bf", - "sha256:67b69535c870670c9f9b14a75d28baa32221d06f6b6fa6f77a0a13c5a7b0a5b9", - "sha256:682a7c79a2fbf40f5dbb1eb6bfe2cd865376deeac65acf9beb607505dced9e12", - "sha256:6994984550eaf25dd7fc7bd1b700ff45c894149341725bb4edc67f0ffa94efa4", - "sha256:69d3a98eebae99a420d4b28756c8ce6ea5a29291baf2dc9ff9414b42676f61d5", - "sha256:6e2e54267980349b723cff366d1e29b138b9a60fa376664a157a342689553f71", - "sha256:73e4b40ea0eda421b115248d7e79b59214411109a5bc47d0d48e4c73e3b8fcf9", - "sha256:74acd6e291f885678631b7ebc85d2d4aec458dd849b8c841b57ef04047833bed", - "sha256:7665f04d0c7f26ff8bb534e1c65068409bf4687aa2534faf7104d7182debb336", - "sha256:7735e82e3498c27bcb2d17cb65d62c14f1100b71723b68362872bca7d0913d90", - "sha256:77a86c261679ea5f3896ec060be9dc8e365788248cc1e049632a1be682442063", - "sha256:7cf18abf9764746b9c8704774d8b06714bcb0a63641518a3a89c7f85cc02c2ad", - "sha256:83928404adf8fb3d26793665633ea79b7361efa0287dfbd372a7e74311d51ee6", - "sha256:8e40876731f99b6f3c897b66b803c9e1c07a989b366c6b5b475fafd1f7ba3fb8", - "sha256:8f188cfcc64fb1266f4684206c9de0e80f54622c3f22a910cbd200478aeae61e", - "sha256:91997d9cb4a325b60d4e3f20967f8eb08dfcb32b22554d5ef78e6fd1dda743a2", - "sha256:91ee8fc02ca52e24bcb77b234f22afc03288e1dafbb1f88fe24db308910c4ac7", - "sha256:92fe151145a990c22cbccf9ae15cae8ae9eddabfc949a219c9f667877e40853d", - "sha256:945db8ee295d3af9dbdbb698cce9bbc5c59b5c3fe328bbc4387f59a8a35f998d", - "sha256:9517d5e9e0731957468c29dbfd0f976736a0e55afaea843726e887f36fe017df", - "sha256:952e0d9d07609d9c5be361f33b0d6d650cd2bae393aabb11d9b719364521984b", - "sha256:97a58a28bcf63284e8b4d7b460cbee1edaab24634e82059c7b8c09e65284f178", - "sha256:97e48e8875e6c13909c800fa344cd54cc4b2b0db1d5f911f840458a500fde2c2", - "sha256:9e0f07b42d2a50c7dd2d8675d50f7343d998c64008f1da5fef888396b7f84630", - "sha256:a3dc1a4b165283bd865e8f8cb5f0c64c05001e0718ed06250d8cac9bec115b48", - "sha256:a3ebe9a75be7ab0b7da2464a77bb27febcb4fab46a34f9288f39d74833db7f61", - "sha256:a64e32f8bd94c105cc27f42d3b658902b5bcc947ece3c8fe7bc1b05982f60e89", - "sha256:a6ed8db0a556343d566a5c124ee483ae113acc9a557a807d439bcecc44e7dfbb", - "sha256:ad9c9b99b05f163109466638bd30ada1722abb01bbb85c739c50b6dc11f92dc3", - "sha256:b33d7a286c0dc1a15f5fc864cc48ae92a846df287ceac2dd499926c3801054a6", - "sha256:bc092ba439d91df90aea38168e11f75c655880c12782facf5cf9c00f3d42b562", - "sha256:c436130cc779806bdf5d5fae0d848713105472b8566b75ff70048c47d3961c5b", - "sha256:c5869b8fd70b81835a6f187c5fdbe67917a04d7e52b6e7cc4e5fe39d55c39d58", - "sha256:c5ecca8f9bab618340c8e848d340baf68bcd8ad90a8ecd7a4524a81c1764b3db", - "sha256:cfac69017ef97db2438efb854edf24f5a29fd09a536ff3a992b75990720cdc99", - "sha256:d2f0d0f976985f85dfb5f3d685697ef769faa6b71993b46b295cdbbd6be8cc37", - "sha256:d5bed7f9805cc29c780f3aee05de3262ee7ce1f47083cfe9f77471e9d6777e83", - "sha256:d6a21ef516d36909931a2967621eecb256018aeb11fc48656e3257e73e2e247a", - "sha256:d9b6ddac6408194e934002a69bcaadbc88c10b5f38fb9307779d1c629181815d", - "sha256:db47514ffdbd91ccdc7e6f8407aac4ee94cc871b15b577c1c324236b013ddd04", - "sha256:df81779732feb9d01e5d513fad0122efb3d53bbc75f61b2a4f29a020bc985e70", - "sha256:e4a91d44379f45f5e540971d41e4626dacd7f01004826a18cb048e7da7e96544", - "sha256:e63e3e1e0271f374ed489ff5ee73d4b6e7c60710e1f76af5f0e1a6117cd26394", - "sha256:e70fac33e8b4ac63dfc4c956fd7d85a0b1139adcfc0d964ce288b7c527537fea", - "sha256:ecddc221a077a8132cf7c747d5352a15ed763b674c0448d811f408bf803d9ad7", - "sha256:f45eec587dafd4b2d41ac189c2156461ebd0c1082d2fe7013571598abb8505d1", - "sha256:f52a68c21363c45297aca15561812d542f8fc683c85201df0bebe209e349f793", - "sha256:f571aea50ba5623c308aa146eb650eebf7dbe0fd8c5d946e28343cb3b5aad577", - "sha256:f60f0ac7005b9f5a6091009b09a419ace1610e163fa5deaba5ce3484341840e7", - "sha256:f6475a1b2ecb310c98c28d271a30df74f9dd436ee46d09236a6b750a7599ce57", - "sha256:f6d5749fdd33d90e34c2efb174c7e236829147a2713334d708746e94c4bde40d", - "sha256:f902804113e032e2cdf8c71015651c97af6418363bea8d78dc0911d56c335032", - "sha256:fa1076244f54bb76e65e22cb6910365779d5c3d71d1f18b275f1dfc7b0d71b4d", - "sha256:fc2db02409338bf36590aa985a461b2c96fce91f8e7e0f14c50c5fcc4f229016", - "sha256:ffcad6c564fe6b9b8916c1aefbb37a362deebf9394bd2974e9d84232e3e08504" + "sha256:03ff9d3f665769b2a85e6157ac8b439644f2d7fd17615a82fa55739bc97863f4", + "sha256:049324ee97bb67285b49632132db351b41e77833678432be52bdd0289c0e05e4", + "sha256:081a430aa8d5e8876c6909b67bd2d937bfd531b0382d3fdedb82612c618bc41a", + "sha256:0f022d381747f0dfe27e99d928e31bc51a18b65bb9e481ae0af1380a6725dd1f", + "sha256:12d1083f001ace206fe34b6bdc2cb94be66d57a850866f0b908972f90996b3e9", + "sha256:14d86fe14b7e04fa306e0c43cdbeebe6b2c2156a0c9ce56b815faacc193e320d", + "sha256:160291c60081f23ee43d44b08a7e5fb76681221a8e10b3139618c5a9a291b84e", + "sha256:1672137af7c46662a1c2be1e8dc78cb6d224319aaa40271c9257d886be4363a6", + "sha256:19a0f89a7bb9d8048d9c4370c9c543c396e894c76be5525f5e1ad287f1750ddf", + "sha256:1ac2f5fe02fa75f56e1ad473f1175e11f475606ec9bd0be2e78e4734ad575034", + "sha256:1cd9a1d071158de1cc1c71a26014dcdfa7dd3d5f4f88c298c7f90ad6f27bb46d", + "sha256:1ffc3cca89bb438fb9c95c13fc874012f7b9466b89328c3c8b1aa93cdcfadd16", + "sha256:297878dc9d0a334358f9b608b56d02e72899f3b8499fc6044133f0d319e2ec30", + "sha256:2d3af2e79991102678f53e0dbf4c35de99b6b8b58f29a27ca0325816364caaba", + "sha256:30b43e74f1359353341a7adb783c8f1b1c676367b011709f466f42fda2045e95", + "sha256:3156628250f46a0895f1f36e1d4fbe062a1af8718ec3ebeb746f1d23f0c5dc4d", + "sha256:31f5af773530fd3c658b32b6bdc2d0838543de70eb9a2156c03e410f7b0d3aae", + "sha256:3935bfa5fede35fb202c4b569bb9c042f337ca4ff7bd540a0aa5e37131659348", + "sha256:39d51fbe4285d5db5d92a929e3e21536ea3dd43732c5b177c7ef03f918dff9f2", + "sha256:3f77ce728b19cb537714499928fe800c3dda29e8d9428778fc7c186da4c09a64", + "sha256:4160d9283bd382fa6c0c2b5e017acc95bc183570cd70968b9202ad6d8fc48dce", + "sha256:4a571d97dbe66ef38e472703067021b1467025ec85707d57e78711c085984e54", + "sha256:4e6281aedfca15301c41f74d7005e6e3f4ca143584ba696ac69df4f02f40d629", + "sha256:52277518d6aae65536e9cea52d4e7fd2f7a66f4aa2d30ed3f2fcea620ace3c54", + "sha256:556fc6c10989f19a179e4321e5d678db8eb2924131e64652a51fe83e4c3db0e1", + "sha256:574faa3b79e8ebac7cb1d7930f51184ba1ccf69adfdec53a12f319a06030a68b", + "sha256:58791550b27d5488b1bb52bc96328456095d96206a250d28d874fafe11b3dfaf", + "sha256:5b750a8e5a1262434fb1517ddf64b5de58327f1adc3524a5e44c2ca43305eb0b", + "sha256:5d97151bc92d2b2578ff7ce779cdb9174337390a535953cbb9452fb65164c587", + "sha256:5eee736daafa7af6d0a2dc15cc75e05c64f37fc37bafef2e00d77c14171c2097", + "sha256:6445804cf4ec763dc70de65a3b0d9954e868609e83850a47ca4f0cb64bd79fea", + "sha256:647894f5ae99c4cf6bb82a1bb3a796f6e06af3caa3d32e26d2350d0e3e3faf24", + "sha256:66d4cfda1d8ed687daa4bc0274fcfd5267873db9a5bc0418c2da19273040eeb7", + "sha256:6a9a8c34fb7bb609419a211e59da8887eeca40d300b5ea8e56af98f6fbbb1541", + "sha256:6b3f39a85d671436ee3d12c017f8fdea38509e4f25b28eb25877293c98c243f6", + "sha256:6b6fb63ae352e13748289f04f37868099e69dba4c2b3e271c46061e82c745634", + "sha256:70693319e0b8fd35dd863e3e29513875eb15c51945bf32519ef52927ca883bc3", + "sha256:781e65134efaf88feb447e8c97a51772aa75e48b794352f94cb7ea717dedda0d", + "sha256:819ce3b883b7576ca28da3861c7e1a88afd08cc8c96908e08a3f4dd64a228034", + "sha256:857112b22acd417c40fa4595db2fe28ab900c8c5fe4670c7989b1c0230955465", + "sha256:887d9b0a65404929641a9fabb6452b07fe4572b269d901d622d8a34a4e9043b2", + "sha256:8b3489ff1ed1e8315674d0775dc7d2195fb13ca17b3808721b54dbe9fd020faf", + "sha256:92fc4500fcb33899b05ba73276dfb684a20d31caa567b7cb5252d48f896a91b1", + "sha256:9403db39be1393618dd80c746cb22ccda168efce239c73af13c3763ef56ffc04", + "sha256:98110aa363f1bb4c073e8dcfaefd3a5cea0f0834c2aab23dda657e4dab2f53b5", + "sha256:999779addc413181912e984b942fbcc951be1f5b3663cd80b2687758f434c583", + "sha256:9caac6b54914bdf41bcc91e7eb9147d331d29235a7c967c150ef5df6464fd1bb", + "sha256:a7a078f5d37bee6690959c813977da5291b24286e7b962e62a94cec31aa5188b", + "sha256:a7e65eb5c003a303b94aa2c3852ef130230ec79e349632d030e9571b87c4698c", + "sha256:a96dc1fa45bd8c407a0af03b2d5218392729e1822b0c32e62c5bf7eeb5fb3958", + "sha256:aca405706e0b0a44cc6bfd41fbe89919a6a56999157f6de7e182a990c36e37bc", + "sha256:accb6150ce61c9c4b7738d45550806aa2b71c7668c6942f17b0ac182b6142fd4", + "sha256:ad1af54a62ffe39cf34db1aa6ed1a1873bd548f6401db39d8e7cd060b9211f82", + "sha256:ae1aa1cd222c6d205853b3013c69cd04515f9d6ab6de4b0603e2e1c33221303e", + "sha256:b2d0a12018b04f4cb820781ec0dffb5f7c7c1d2a5cd22bff7fb055a2cb19ebce", + "sha256:b480c6a4e1138e1aa137c0079b9b6305ec6dcc1098a8ca5196283e8a49df95a9", + "sha256:b74c261802d3d2b85c9df2dfb2fa81b6f90deeef63c2db9f0e029a3cac50b518", + "sha256:ba278acf14471d36316159c94a802933d10b6a1e117b8554fe0d0d9b75c9d536", + "sha256:bb6178c241278d5fe853b3de743087be7f5f4c6f7d6d22a3b524d323eecec505", + "sha256:bf72af5e0fb40e9babf594308911436c8efde3cb5e75b6f206c34ad18be5c052", + "sha256:bfd3223c15bebe26518d58ccf9a39b93948d3dcb3e57a20480dfdd315356baff", + "sha256:c214999039d4f2a5b2073ac506bba279945233da8c786e490d411dfc30f855c1", + "sha256:c2f992c07c0fca81655066705beae35fc95a2fa7366467366db627d9f2ee097f", + "sha256:cba4cfa1052819d16699e1d55d18c92b6e094d4517c41dd231a8b9f87b6fa681", + "sha256:cea7daf9fc7ae6687cf1e2c049752f19f146fdc37c2cc376e7d0032cf4f25347", + "sha256:cf6c4150f8c0e32d241436526f3c3f9cbd34429492abddbada2ffcff506c51af", + "sha256:d09c333d36c1409d56a9d29b3a1b800a42c76a57a5a8907eacdbce3f18768246", + "sha256:d27b84d5880f6d8aa9ae3edb253c59d9f6642ffbb2c889b78b60361eed449787", + "sha256:d2ccec9ac47cf4e04897619c0e0c1a48c54a71bdf045117d3a26f80d38ab1fb0", + "sha256:d71264a80f3fcf512eb4f18f59423fe82d6e346ee97b90625f283df56aee103f", + "sha256:d93f3307ad32a27bda2e88ec81134b823c240aa3abb55821a8da553eed8d9439", + "sha256:d9631c5e8b5b3a0fda99cb0d29c18133bca1e18aea9effe55adb3da1adef80d3", + "sha256:ddfab44e4489bd79bda09d84c430677fc7f0a4939a73d2bba3073036f487a0a6", + "sha256:e7048abd75fe40712005bcfc06bb44b9dfcd8e101dda2ecf2f5aa46115ad07ca", + "sha256:e73091191e4280403bde6c9a52a6999d69cdfde498f1fdf629105247599b57ec", + "sha256:e800776a79a5aabdb17dcc2346a7d66d0777e942e4cd251defeb084762ecd17d", + "sha256:edc9fc7051e3350643ad929df55c451899bb9ae6d24998a949d2e4c87fb596d3", + "sha256:f089118d584e859c62b3da0892b88a83d611c2033ac410e929cb6754eec0ed16", + "sha256:f174bbd484294ed9fdf09437f889f95807e5f229d5d93588d34e92106fbf6717", + "sha256:f508b0491767bb1f2b87fdfacaba5f7eddc2f867740ec69ece6d1946d29029a6", + "sha256:f7a31fc1e1bd362874863fdeed71aed92d348f5336fd84f2197ba40c59f061bd", + "sha256:f9479aa06a793c5aeba49ce5c5692ffb51fcd9a7016e017d555d5e2b0045d212" ], - "markers": "python_version >= '3.8'", - "version": "==0.2.0" + "markers": "python_version >= '3.9'", + "version": "==0.2.1" }, "protego": { "hashes": [ @@ -1938,6 +1994,14 @@ "markers": "python_version >= '3.8'", "version": "==12.0.0" }, + "pygments": { + "hashes": [ + "sha256:786ff802f32e91311bff3889f6e9a86e81505fe99f2735bb6d60ae0c5004f199", + "sha256:b8e6aca0523f3ab76fee51799c488e38782ac06eafcf95e7ba832985c8e7b13a" + ], + "markers": "python_version >= '3.8'", + "version": "==2.18.0" + }, "pymongo": { "extras": [ "srv" @@ -2033,12 +2097,12 @@ }, "python-multipart": { "hashes": [ - "sha256:7a68db60c8bfb82e460637fa4750727b45af1d5e2ed215593f917f64694d34fe", - "sha256:efe91480f485f6a361427a541db4796f9e1591afc0fb8e7a4ba06bfbc6708996" + "sha256:905502ef39050557b7a6af411f454bc19526529ca46ae6831508438890ce12cc", + "sha256:f8d5b0b9c618575bf9df01c684ded1d94a338839bdd8223838afacfb4bb2082d" ], "index": "pypi", "markers": "python_version >= '3.8'", - "version": "==0.0.18" + "version": "==0.0.19" }, "pytz": { "hashes": [ @@ -2358,101 +2422,120 @@ ], "version": "==2.1.0" }, + "rich": { + "hashes": [ + "sha256:439594978a49a09530cff7ebc4b5c7103ef57baf48d5ea3184f21d9a2befa098", + "sha256:6049d5e6ec054bf2779ab3358186963bac2ea89175919d699e378b99738c2a90" + ], + "markers": "python_full_version >= '3.8.0'", + "version": "==13.9.4" + }, "rpds-py": { "hashes": [ - "sha256:031819f906bb146561af051c7cef4ba2003d28cff07efacef59da973ff7969ba", - "sha256:0626238a43152918f9e72ede9a3b6ccc9e299adc8ade0d67c5e142d564c9a83d", - "sha256:085ed25baac88953d4283e5b5bd094b155075bb40d07c29c4f073e10623f9f2e", - "sha256:0a9e0759e7be10109645a9fddaaad0619d58c9bf30a3f248a2ea57a7c417173a", - "sha256:0c025820b78817db6a76413fff6866790786c38f95ea3f3d3c93dbb73b632202", - "sha256:1ff2eba7f6c0cb523d7e9cff0903f2fe1feff8f0b2ceb6bd71c0e20a4dcee271", - "sha256:20cc1ed0bcc86d8e1a7e968cce15be45178fd16e2ff656a243145e0b439bd250", - "sha256:241e6c125568493f553c3d0fdbb38c74babf54b45cef86439d4cd97ff8feb34d", - "sha256:2c51d99c30091f72a3c5d126fad26236c3f75716b8b5e5cf8effb18889ced928", - "sha256:2d6129137f43f7fa02d41542ffff4871d4aefa724a5fe38e2c31a4e0fd343fb0", - "sha256:30b912c965b2aa76ba5168fd610087bad7fcde47f0a8367ee8f1876086ee6d1d", - "sha256:30bdc973f10d28e0337f71d202ff29345320f8bc49a31c90e6c257e1ccef4333", - "sha256:320c808df533695326610a1b6a0a6e98f033e49de55d7dc36a13c8a30cfa756e", - "sha256:32eb88c30b6a4f0605508023b7141d043a79b14acb3b969aa0b4f99b25bc7d4a", - "sha256:3b766a9f57663396e4f34f5140b3595b233a7b146e94777b97a8413a1da1be18", - "sha256:3b929c2bb6e29ab31f12a1117c39f7e6d6450419ab7464a4ea9b0b417174f044", - "sha256:3e30a69a706e8ea20444b98a49f386c17b26f860aa9245329bab0851ed100677", - "sha256:3e53861b29a13d5b70116ea4230b5f0f3547b2c222c5daa090eb7c9c82d7f664", - "sha256:40c91c6e34cf016fa8e6b59d75e3dbe354830777fcfd74c58b279dceb7975b75", - "sha256:4991ca61656e3160cdaca4851151fd3f4a92e9eba5c7a530ab030d6aee96ec89", - "sha256:4ab2c2a26d2f69cdf833174f4d9d86118edc781ad9a8fa13970b527bf8236027", - "sha256:4e8921a259f54bfbc755c5bbd60c82bb2339ae0324163f32868f63f0ebb873d9", - "sha256:4eb2de8a147ffe0626bfdc275fc6563aa7bf4b6db59cf0d44f0ccd6ca625a24e", - "sha256:5145282a7cd2ac16ea0dc46b82167754d5e103a05614b724457cffe614f25bd8", - "sha256:520ed8b99b0bf86a176271f6fe23024323862ac674b1ce5b02a72bfeff3fff44", - "sha256:52c041802a6efa625ea18027a0723676a778869481d16803481ef6cc02ea8cb3", - "sha256:5555db3e618a77034954b9dc547eae94166391a98eb867905ec8fcbce1308d95", - "sha256:58a0e345be4b18e6b8501d3b0aa540dad90caeed814c515e5206bb2ec26736fd", - "sha256:590ef88db231c9c1eece44dcfefd7515d8bf0d986d64d0caf06a81998a9e8cab", - "sha256:5afb5efde74c54724e1a01118c6e5c15e54e642c42a1ba588ab1f03544ac8c7a", - "sha256:688c93b77e468d72579351a84b95f976bd7b3e84aa6686be6497045ba84be560", - "sha256:6b4ef7725386dc0762857097f6b7266a6cdd62bfd209664da6712cb26acef035", - "sha256:6bc0e697d4d79ab1aacbf20ee5f0df80359ecf55db33ff41481cf3e24f206919", - "sha256:6dcc4949be728ede49e6244eabd04064336012b37f5c2200e8ec8eb2988b209c", - "sha256:6f54e7106f0001244a5f4cf810ba8d3f9c542e2730821b16e969d6887b664266", - "sha256:808f1ac7cf3b44f81c9475475ceb221f982ef548e44e024ad5f9e7060649540e", - "sha256:8404b3717da03cbf773a1d275d01fec84ea007754ed380f63dfc24fb76ce4592", - "sha256:878f6fea96621fda5303a2867887686d7a198d9e0f8a40be100a63f5d60c88c9", - "sha256:8a7ff941004d74d55a47f916afc38494bd1cfd4b53c482b77c03147c91ac0ac3", - "sha256:95a5bad1ac8a5c77b4e658671642e4af3707f095d2b78a1fdd08af0dfb647624", - "sha256:97ef67d9bbc3e15584c2f3c74bcf064af36336c10d2e21a2131e123ce0f924c9", - "sha256:98486337f7b4f3c324ab402e83453e25bb844f44418c066623db88e4c56b7c7b", - "sha256:98e4fe5db40db87ce1c65031463a760ec7906ab230ad2249b4572c2fc3ef1f9f", - "sha256:998a8080c4495e4f72132f3d66ff91f5997d799e86cec6ee05342f8f3cda7dca", - "sha256:9afe42102b40007f588666bc7de82451e10c6788f6f70984629db193849dced1", - "sha256:9e20da3957bdf7824afdd4b6eeb29510e83e026473e04952dca565170cd1ecc8", - "sha256:a017f813f24b9df929674d0332a374d40d7f0162b326562daae8066b502d0590", - "sha256:a429b99337062877d7875e4ff1a51fe788424d522bd64a8c0a20ef3021fdb6ed", - "sha256:a58ce66847711c4aa2ecfcfaff04cb0327f907fead8945ffc47d9407f41ff952", - "sha256:a78d8b634c9df7f8d175451cfeac3810a702ccb85f98ec95797fa98b942cea11", - "sha256:a89a8ce9e4e75aeb7fa5d8ad0f3fecdee813802592f4f46a15754dcb2fd6b061", - "sha256:a8eeec67590e94189f434c6d11c426892e396ae59e4801d17a93ac96b8c02a6c", - "sha256:aaeb25ccfb9b9014a10eaf70904ebf3f79faaa8e60e99e19eef9f478651b9b74", - "sha256:ad116dda078d0bc4886cb7840e19811562acdc7a8e296ea6ec37e70326c1b41c", - "sha256:af04ac89c738e0f0f1b913918024c3eab6e3ace989518ea838807177d38a2e94", - "sha256:af4a644bf890f56e41e74be7d34e9511e4954894d544ec6b8efe1e21a1a8da6c", - "sha256:b21747f79f360e790525e6f6438c7569ddbfb1b3197b9e65043f25c3c9b489d8", - "sha256:b229ce052ddf1a01c67d68166c19cb004fb3612424921b81c46e7ea7ccf7c3bf", - "sha256:b4de1da871b5c0fd5537b26a6fc6814c3cc05cabe0c941db6e9044ffbb12f04a", - "sha256:b80b4690bbff51a034bfde9c9f6bf9357f0a8c61f548942b80f7b66356508bf5", - "sha256:b876f2bc27ab5954e2fd88890c071bd0ed18b9c50f6ec3de3c50a5ece612f7a6", - "sha256:b8f107395f2f1d151181880b69a2869c69e87ec079c49c0016ab96860b6acbe5", - "sha256:b9b76e2afd585803c53c5b29e992ecd183f68285b62fe2668383a18e74abe7a3", - "sha256:c2b2f71c6ad6c2e4fc9ed9401080badd1469fa9889657ec3abea42a3d6b2e1ed", - "sha256:c3761f62fcfccf0864cc4665b6e7c3f0c626f0380b41b8bd1ce322103fa3ef87", - "sha256:c38dbf31c57032667dd5a2f0568ccde66e868e8f78d5a0d27dcc56d70f3fcd3b", - "sha256:ca9989d5d9b1b300bc18e1801c67b9f6d2c66b8fd9621b36072ed1df2c977f72", - "sha256:cbd7504a10b0955ea287114f003b7ad62330c9e65ba012c6223dba646f6ffd05", - "sha256:d167e4dbbdac48bd58893c7e446684ad5d425b407f9336e04ab52e8b9194e2ed", - "sha256:d2132377f9deef0c4db89e65e8bb28644ff75a18df5293e132a8d67748397b9f", - "sha256:da52d62a96e61c1c444f3998c434e8b263c384f6d68aca8274d2e08d1906325c", - "sha256:daa8efac2a1273eed2354397a51216ae1e198ecbce9036fba4e7610b308b6153", - "sha256:dc5695c321e518d9f03b7ea6abb5ea3af4567766f9852ad1560f501b17588c7b", - "sha256:de552f4a1916e520f2703ec474d2b4d3f86d41f353e7680b597512ffe7eac5d0", - "sha256:de609a6f1b682f70bb7163da745ee815d8f230d97276db049ab447767466a09d", - "sha256:e12bb09678f38b7597b8346983d2323a6482dcd59e423d9448108c1be37cac9d", - "sha256:e168afe6bf6ab7ab46c8c375606298784ecbe3ba31c0980b7dcbb9631dcba97e", - "sha256:e78868e98f34f34a88e23ee9ccaeeec460e4eaf6db16d51d7a9b883e5e785a5e", - "sha256:e860f065cc4ea6f256d6f411aba4b1251255366e48e972f8a347cf88077b24fd", - "sha256:ea3a6ac4d74820c98fcc9da4a57847ad2cc36475a8bd9683f32ab6d47a2bd682", - "sha256:ebf64e281a06c904a7636781d2e973d1f0926a5b8b480ac658dc0f556e7779f4", - "sha256:ed6378c9d66d0de903763e7706383d60c33829581f0adff47b6535f1802fa6db", - "sha256:ee1e4fc267b437bb89990b2f2abf6c25765b89b72dd4a11e21934df449e0c976", - "sha256:ee4eafd77cc98d355a0d02f263efc0d3ae3ce4a7c24740010a8b4012bbb24937", - "sha256:efec946f331349dfc4ae9d0e034c263ddde19414fe5128580f512619abed05f1", - "sha256:f414da5c51bf350e4b7960644617c130140423882305f7574b6cf65a3081cecb", - "sha256:f71009b0d5e94c0e86533c0b27ed7cacc1239cb51c178fd239c3cfefefb0400a", - "sha256:f983e4c2f603c95dde63df633eec42955508eefd8d0f0e6d236d31a044c882d7", - "sha256:faa5e8496c530f9c71f2b4e1c49758b06e5f4055e17144906245c99fa6d45356", - "sha256:fed5dfefdf384d6fe975cc026886aece4f292feaf69d0eeb716cfd3c5a4dd8be" + "sha256:034964ea0ea09645bdde13038b38abb14be0aa747f20fcfab6181207dd9e0483", + "sha256:0686f2c16eafdc2c6b4ce6e86e5b3092e87db09ae64be2787616444eb35b9756", + "sha256:0903ffdb5b9007e503203b6285e4ff0faf96d875c19f1d103b475acf7d9f7311", + "sha256:1212cb231f2002934cd8d71a0d718fdd9d9a2dd671e0feef8501038df3508026", + "sha256:1357c3092702078b7782b6ebd5ba9b22c1a291c34fbf9d8f1a48237466ac7758", + "sha256:1a6cc4eb1e86364331928acafb2bb41d8ab735ca3caf2d6019b9f6dac3f4f65d", + "sha256:208ce1d8e3af138d1d9b21d7206356b7f29b96675e0113aea652cf024e4ddfdc", + "sha256:2498ff422823be087b48bc82710deb87ac34f6b7c8034ee39920647647de1e60", + "sha256:24c28df05bd284879d0fac850ba697077d2a33b7ebcaea6318d6b6cdfdc86ddc", + "sha256:2a57300cc8b034c5707085249efd09f19116bb80278d0ec925d7f3710165c510", + "sha256:2d2fc3ab021be3e0b5aec6d4164f2689d231b8bfc5185cc454314746aa4aee72", + "sha256:2f513758e7cda8bc262e80299a8e3395d7ef7f4ae705be62632f229bc6c33208", + "sha256:306da3dfa174b489a3fc63b0872e2226a5ddf94c59875a770d72aff945d5ed96", + "sha256:326e42f2b49462e05f8527a1311ce98f9f97c484b3e443ec0ea4638bed3aebcf", + "sha256:32a0e24cab2daae0503b06666d516e90a080c1a95aff0406b9f03c6489177c4b", + "sha256:32de71c393f126d8203e9815557c7ff4d72ed1ad3aa3f52f6c7938413176750a", + "sha256:341a07a4b55126bfae68c9bf24220a73d456111e5eb3dcbdab9fd16de2341224", + "sha256:38cacf1f378571450576f2c8ce87da6f3fddc59d744de5c12b37acc23285b1e1", + "sha256:3b94b074dcce39976db22ea75c7aea8b22d95e6d3b62f76e20e1179a278521d8", + "sha256:3dc7c64b56b82428894f056e9ff6e8ee917ff74fc26b65211a33602c2372e928", + "sha256:3f7a048ec1ebc991331d709be4884dc318c9eaafa66dcde8be0933ac0e702149", + "sha256:41f65a97bf2c4b161c9f8f89bc37058346bec9b36e373c8ad00a16c957bff625", + "sha256:48c95997af9314f4034fe5ba2d837399e786586e220835a578d28fe8161e6ae5", + "sha256:49e084d47a66027ac72844f9f52f13d347a9a1f05d4f84381b420e47f836a7fd", + "sha256:4b5d17d8f5b885ce50e0cda85f99c0719e365e98b587338535fa566a48375afb", + "sha256:4c0321bc03a1c513eca1837e3bba948b975bcf3a172aebc197ab3573207f137a", + "sha256:4e7c9aa2353eb0b0d845323857197daa036c2ff8624df990b0d886d22a8f665e", + "sha256:4fc4824e38c1e91a73bc820e7caacaf19d0acd557465aceef0420ca59489b390", + "sha256:54d8f94dec5765a9edc19610fecf0fdf9cab36cbb9def1213188215f735a6f98", + "sha256:574c5c94213bc9990805bfd7e4ba3826d3c098516cbc19f0d0ef0433ad93fa06", + "sha256:59e63da174ff287db05ef7c21d75974a5bac727ed60452aeb3a14278477842a8", + "sha256:5ae7927cd2b869ca4dc645169d8af5494a29c99afd0ea0f24dd00c811ab1d8b8", + "sha256:5f21e1278c9456cd601832375c778ca44614d3433996488221a56572c223f04a", + "sha256:5fdf91a7c07f40e47b193f2acae0ed9da35d09325d7c3c3279f722b7cbf3d264", + "sha256:62ab12fe03ffc49978d29de9c31bbb216610157f7e5ca8e172fed6642aead3be", + "sha256:632d2fdddd9fbe3ac8896a119fd18a71fc95ca9c4cbe5223096c142d8c4a2b1d", + "sha256:64a0c965a1e299c9b280006bdb15c276c427c45360aed676305dc36bcaa4d13c", + "sha256:67e013a17a3db4d98cc228fd5aeb36a51b0f5cf7330b9102a552060f1fe4e560", + "sha256:6b639a19e1791b646d27f15d17530a51722cc728d43b2dff3aeb904f92d91bac", + "sha256:6b6e4bcfc32f831bfe3d6d8a5acedfbfd5e252a03c83fa24813b277a3a8a13ca", + "sha256:7539dbb8f705e13629ba6f23388976aad809e387f32a6e5c0712e4e8d9bfcce7", + "sha256:758098b38c344d9a7f279baf0689261777e601f620078ef5afdc9bd3339965c3", + "sha256:762206ba3bf1d6c8c9e0055871d3c0d5b074b7c3120193e6c067e7866f106ab1", + "sha256:771c9a3851beaa617d8c8115d65f834a2b52490f42ee2b88b13f1fc5529e9e0c", + "sha256:81e7a27365b02fe70a77f1365376879917235b3fec551d19b4c91b51d0bc1d07", + "sha256:8338db3c76833d02dc21c3e2c42534091341d26e4f7ba32c6032bb558a02e07b", + "sha256:8426f97117b914b9bfb2a7bd46edc148e8defda728a55a5df3a564abe70cd7a4", + "sha256:842855bbb113a19c393c6de5aa6ed9a26c6b13c2fead5e49114d39f0d08b94d8", + "sha256:87453d491369cd8018016d2714a13e8461975161703c18ee31eecf087a8ae5d4", + "sha256:875fe8dffb43c20f68379ee098b035a7038d7903c795d46715f66575a7050b19", + "sha256:8ad4dfda52e64af3202ceb2143a62deba97894b71c64a4405ee80f6b3ea77285", + "sha256:8c48fc7458fe3a74dcdf56ba3534ff41bd421f69436df09ff3497fdaac18b431", + "sha256:8cbb040fec8eddd5a6a75e737fd73c9ce37e51f94bacdd0b178d0174a4758395", + "sha256:92d28a608127b357da47c99e0d0e0655ca2060286540fe9f2a25a2e8ac666e05", + "sha256:931bf3d0705b2834fed29354f35170fa022fe22a95542b61b7c66aca5f8a224f", + "sha256:93bbd66f46dddc41e8c656130c97c0fb515e0fa44e1eebb2592769dbbd41b2f5", + "sha256:9ad4640a409bc2b7d22b7921e7660f0db96c5c8c69fbb2e8f3261d4f71d33983", + "sha256:a4366f264fa60d3c109f0b27af0cd9eb8d46746bd70bd3d9d425f035b6c7e286", + "sha256:a73ed43d64209e853bba567a543170267a5cd64f359540b0ca2d597e329ba172", + "sha256:a810a57ce5e8ecf8eac6ec4dab534ff80c34e5a2c31db60e992009cd20f58e0f", + "sha256:b4660943030406aaa40ec9f51960dd88049903d9536bc3c8ebb5cc4e1f119bbe", + "sha256:b8906f537978da3f7f0bd1ba37b69f6a877bb43312023b086582707d2835bf2f", + "sha256:b91bfef5daa2a5a4fe62f8d317fc91a626073639f951f851bd2cb252d01bc6c5", + "sha256:ba1fc34d0b2f6fd53377a4c954116251eba6d076bf64f903311f4a7d27d10acd", + "sha256:ba235e00e0878ba1080b0f2a761f143b2a2d1c354f3d8e507fbf2f3de401bf18", + "sha256:bb11809b0de643a292a82f728c494a2bbef0e30a7c42d37464abbd6bef7ca7b1", + "sha256:c17b43fe9c6da16885e3fe28922bcd1a029e61631fb771c7d501019b40bcc904", + "sha256:c1c21030ed494deb10226f90e2dbd84a012d59810c409832714a3dd576527be2", + "sha256:c398a5a8e258dfdc5ea2aa4e5aa2ca3207f654a8eb268693dd1a76939074a588", + "sha256:c637188b930175c256f13adbfc427b83ec7e64476d1ec9d6608f312bb84e06c3", + "sha256:c7b4450093c0c909299770226fb0285be47b0a57545bae25b5c4e51566b0e587", + "sha256:c8fd7a16f7a047e06c747cfcf2acef3ac316132df1c6077445b29ee6f3f3a70b", + "sha256:ca505fd3767a09a139737f3278bc8a485cb64043062da89bcba27e2f2ea78d33", + "sha256:d1522025cda9e57329aade769f56e5793b2a5da7759a21914ee10e67e17e601e", + "sha256:d276280649305c1da6cdd84585d48ae1f0efa67434d8b10d2df95228e59a05bb", + "sha256:d33622dc63c295788eed09dbb1d11bed178909d3267b02d873116ee6be368244", + "sha256:d4f2af3107fe4dc40c0d1a2409863f5249c6796398a1d83c1d99a0b3fa6cfb8d", + "sha256:d5469b347445d1c31105f33e7bfc9a8ba213d48e42641a610dda65bf9e3c83f5", + "sha256:d80fd710b3307a3c63809048b72c536689b9b0b31a2518339c3f1a4d29c73d7a", + "sha256:d9bb9242b38a664f307b3b897f093896f7ed51ef4fe25a0502e5a368de9151ea", + "sha256:d9ceca96df54cb1675a0b7f52f1c6d5d1df62c5b40741ba211780f1b05a282a2", + "sha256:dc2c00acdf68f1f69a476b770af311a7dc3955b7de228b04a40bcc51ac4d743b", + "sha256:dfdabdf8519c93908b2bf0f87c3f86f9e88bab279fb4acfd0907519ca5a1739f", + "sha256:e04919ffa9a728c446b27b6b625fa1d00ece221bdb9d633e978a7e0353a12c0e", + "sha256:e0abcce5e874474d3eab5ad53be03dae2abe651d248bdeaabe83708e82969e78", + "sha256:e1c04fb380bc8efaae2fdf17ed6cd5d223da78a8b0b18a610f53d4c5d6e31dfd", + "sha256:e23dcdd4b2ff9c6b3317ea7921b210d39592f8ca1cdea58ada25b202c65c0a69", + "sha256:e34a3e665d38d0749072e6565400c8ce9abae976e338919a0dfbfb0e1ba43068", + "sha256:e6da2e0500742e0f157f005924a0589f2e2dcbfdd6cd0cc0abce367433e989be", + "sha256:e9aa4af6b879bb75a3c7766fbf49d77f4097dd12b548ecbbd8b3f85caa833281", + "sha256:e9bbdba9e75b1a9ee1dd1335034dad998ef1acc08492226c6fd50aa773bdfa7d", + "sha256:e9d4293b21c69ee4f9e1a99ac4f772951d345611c614a0cfae2ec6b565279bc9", + "sha256:eadd2417e83a77ce3ae4a0efd08cb0ebdfd317b6406d11020354a53ad458ec84", + "sha256:ed0102146574e5e9f079b2e1a06e6b5b12a691f9c74a65b93b7f3d4feda566c6", + "sha256:f0fb8efc9e579acf1e556fd86277fecec320c21ca9b5d39db96433ad8c45bc4a", + "sha256:f4e9946c8c7def17e4fcb5eddb14c4eb6ebc7f6f309075e6c8d23b133c104607", + "sha256:f7649c8b8e4bd1ccc5fcbd51a855d57a617deeba19c66e3d04b1abecc61036b2", + "sha256:f980a0640599a74f27fd9d50c84c293f1cb7afc2046c5c6d3efaf8ec7cdbc326", + "sha256:f9dc2113e0cf0dd637751ca736186fca63664939ceb9f9f67e93ade88c69c0c9", + "sha256:fde778947304e55fc732bc8ea5c6063e74244ac1808471cb498983a210aaf62c", + "sha256:fe23687924b25a2dee52fab15976fd6577ed8518072bcda9ff2e2b88ab1f168b" ], "markers": "python_version >= '3.9'", - "version": "==0.21.0" + "version": "==0.22.0" }, "scrapy": { "hashes": [ @@ -2488,6 +2571,14 @@ "markers": "python_version >= '3.9'", "version": "==75.6.0" }, + "shellingham": { + "hashes": [ + "sha256:7ecfff8f2fd72616f7481040475a65b2bf8af90a56c89140852d1120324e8686", + "sha256:8dbca0739d487e5bd35ab3ca4b36e11c4078f3a234bfce294b0a0291363404de" + ], + "markers": "python_version >= '3.7'", + "version": "==1.5.4" + }, "six": { "hashes": [ "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926", @@ -2575,6 +2666,15 @@ "markers": "python_version >= '3.7'", "version": "==2.0.36" }, + "sqlmodel": { + "hashes": [ + "sha256:7d37c882a30c43464d143e35e9ecaf945d88035e20117bf5ec2834a23cbe505e", + "sha256:a1ed13e28a1f4057cbf4ff6cdb4fc09e85702621d3259ba17b3c230bfb2f941b" + ], + "index": "pypi", + "markers": "python_version >= '3.7'", + "version": "==0.0.22" + }, "starlette": { "hashes": [ "sha256:0e4ab3d16522a255be6b28260b938eae2482f98ce5cc934cb08dce8dc3ba5835", @@ -2636,11 +2736,11 @@ }, "twisted": { "hashes": [ - "sha256:02951299672595fea0f70fa2d5f7b5e3d56836157eda68859a6ad6492d36756e", - "sha256:67aa7c8aa94387385302acf44ade12967c747858c8bcce0f11d38077a11c5326" + "sha256:695d0556d5ec579dcc464d2856b634880ed1319f45b10d19043f2b57eb0115b5", + "sha256:fe403076c71f04d5d2d789a755b687c5637ec3bcd3b2b8252d76f2ba65f54261" ], "markers": "python_full_version >= '3.8.0'", - "version": "==24.10.0" + "version": "==24.11.0" }, "typedload": { "hashes": [ @@ -2650,6 +2750,15 @@ "markers": "python_version >= '3.9'", "version": "==2.37" }, + "typer": { + "hashes": [ + "sha256:af58f737f8d0c0c37b9f955a6d39000b9ff97813afcbeef56af5e37cf743b45a", + "sha256:f476233a25770ab3e7b2eebf7c68f3bc702031681a008b20167573a4b7018f09" + ], + "index": "pypi", + "markers": "python_version >= '3.7'", + "version": "==0.14.0" + }, "types-python-dateutil": { "hashes": [ "sha256:250e1d8e80e7bbc3a6c99b907762711d1a1cdd00e978ad39cb5940f6f0a87f3d", @@ -2938,91 +3047,91 @@ }, "yarl": { "hashes": [ - "sha256:01be8688fc211dc237e628fcc209dda412d35de7642453059a0553747018d075", - "sha256:039c299a0864d1f43c3e31570045635034ea7021db41bf4842693a72aca8df3a", - "sha256:074fee89caab89a97e18ef5f29060ef61ba3cae6cd77673acc54bfdd3214b7b7", - "sha256:13aaf2bdbc8c86ddce48626b15f4987f22e80d898818d735b20bd58f17292ee8", - "sha256:14408cc4d34e202caba7b5ac9cc84700e3421a9e2d1b157d744d101b061a4a88", - "sha256:1db1537e9cb846eb0ff206eac667f627794be8b71368c1ab3207ec7b6f8c5afc", - "sha256:1ece25e2251c28bab737bdf0519c88189b3dd9492dc086a1d77336d940c28ced", - "sha256:1ff116f0285b5c8b3b9a2680aeca29a858b3b9e0402fc79fd850b32c2bcb9f8b", - "sha256:205de377bd23365cd85562c9c6c33844050a93661640fda38e0567d2826b50df", - "sha256:20d95535e7d833889982bfe7cc321b7f63bf8879788fee982c76ae2b24cfb715", - "sha256:20de4a8b04de70c49698dc2390b7fd2d18d424d3b876371f9b775e2b462d4b41", - "sha256:2d90f2e4d16a5b0915ee065218b435d2ef619dd228973b1b47d262a6f7cd8fa5", - "sha256:2e6b4466714a73f5251d84b471475850954f1fa6acce4d3f404da1d55d644c34", - "sha256:309f8d27d6f93ceeeb80aa6980e883aa57895270f7f41842b92247e65d7aeddf", - "sha256:32141e13a1d5a48525e519c9197d3f4d9744d818d5c7d6547524cc9eccc8971e", - "sha256:34176bfb082add67cb2a20abd85854165540891147f88b687a5ed0dc225750a0", - "sha256:38b39b7b3e692b6c92b986b00137a3891eddb66311b229d1940dcbd4f025083c", - "sha256:3a3709450a574d61be6ac53d582496014342ea34876af8dc17cc16da32826c9a", - "sha256:3adaaf9c6b1b4fc258584f4443f24d775a2086aee82d1387e48a8b4f3d6aecf6", - "sha256:3f576ed278860df2721a5d57da3381040176ef1d07def9688a385c8330db61a1", - "sha256:42ba84e2ac26a3f252715f8ec17e6fdc0cbf95b9617c5367579fafcd7fba50eb", - "sha256:454902dc1830d935c90b5b53c863ba2a98dcde0fbaa31ca2ed1ad33b2a7171c6", - "sha256:466d31fd043ef9af822ee3f1df8fdff4e8c199a7f4012c2642006af240eade17", - "sha256:49a98ecadc5a241c9ba06de08127ee4796e1009555efd791bac514207862b43d", - "sha256:4d26f1fa9fa2167bb238f6f4b20218eb4e88dd3ef21bb8f97439fa6b5313e30d", - "sha256:52c136f348605974c9b1c878addd6b7a60e3bf2245833e370862009b86fa4689", - "sha256:536a7a8a53b75b2e98ff96edb2dfb91a26b81c4fed82782035767db5a465be46", - "sha256:576d258b21c1db4c6449b1c572c75d03f16a482eb380be8003682bdbe7db2f28", - "sha256:609ffd44fed2ed88d9b4ef62ee860cf86446cf066333ad4ce4123505b819e581", - "sha256:67b336c15e564d76869c9a21316f90edf546809a5796a083b8f57c845056bc01", - "sha256:685cc37f3f307c6a8e879986c6d85328f4c637f002e219f50e2ef66f7e062c1d", - "sha256:6a49ad0102c0f0ba839628d0bf45973c86ce7b590cdedf7540d5b1833ddc6f00", - "sha256:6fb64dd45453225f57d82c4764818d7a205ee31ce193e9f0086e493916bd4f72", - "sha256:701bb4a8f4de191c8c0cc9a1e6d5142f4df880e9d1210e333b829ca9425570ed", - "sha256:73553bbeea7d6ec88c08ad8027f4e992798f0abc459361bf06641c71972794dc", - "sha256:7520e799b1f84e095cce919bd6c23c9d49472deeef25fe1ef960b04cca51c3fc", - "sha256:7609b8462351c4836b3edce4201acb6dd46187b207c589b30a87ffd1813b48dc", - "sha256:7db9584235895a1dffca17e1c634b13870852094f6389b68dcc6338086aa7b08", - "sha256:7fa7d37f2ada0f42e0723632993ed422f2a679af0e200874d9d861720a54f53e", - "sha256:80741ec5b471fbdfb997821b2842c59660a1c930ceb42f8a84ba8ca0f25a66aa", - "sha256:8254dbfce84ee5d1e81051ee7a0f1536c108ba294c0fdb5933476398df0654f3", - "sha256:8b8d3e4e014fb4274f1c5bf61511d2199e263909fb0b8bda2a7428b0894e8dc6", - "sha256:8e1c18890091aa3cc8a77967943476b729dc2016f4cfe11e45d89b12519d4a93", - "sha256:9106025c7f261f9f5144f9aa7681d43867eed06349a7cfb297a1bc804de2f0d1", - "sha256:91b8fb9427e33f83ca2ba9501221ffaac1ecf0407f758c4d2f283c523da185ee", - "sha256:96404e8d5e1bbe36bdaa84ef89dc36f0e75939e060ca5cd45451aba01db02902", - "sha256:9b4c90c5363c6b0a54188122b61edb919c2cd1119684999d08cd5e538813a28e", - "sha256:a0509475d714df8f6d498935b3f307cd122c4ca76f7d426c7e1bb791bcd87eda", - "sha256:a173401d7821a2a81c7b47d4e7d5c4021375a1441af0c58611c1957445055056", - "sha256:a45d94075ac0647621eaaf693c8751813a3eccac455d423f473ffed38c8ac5c9", - "sha256:a5f72421246c21af6a92fbc8c13b6d4c5427dfd949049b937c3b731f2f9076bd", - "sha256:a64619a9c47c25582190af38e9eb382279ad42e1f06034f14d794670796016c0", - "sha256:a7ee6884a8848792d58b854946b685521f41d8871afa65e0d4a774954e9c9e89", - "sha256:ae38bd86eae3ba3d2ce5636cc9e23c80c9db2e9cb557e40b98153ed102b5a736", - "sha256:b026cf2c32daf48d90c0c4e406815c3f8f4cfe0c6dfccb094a9add1ff6a0e41a", - "sha256:b0a2074a37285570d54b55820687de3d2f2b9ecf1b714e482e48c9e7c0402038", - "sha256:b1a3297b9cad594e1ff0c040d2881d7d3a74124a3c73e00c3c71526a1234a9f7", - "sha256:b212452b80cae26cb767aa045b051740e464c5129b7bd739c58fbb7deb339e7b", - "sha256:b234a4a9248a9f000b7a5dfe84b8cb6210ee5120ae70eb72a4dcbdb4c528f72f", - "sha256:b4095c5019bb889aa866bf12ed4c85c0daea5aafcb7c20d1519f02a1e738f07f", - "sha256:b8e8c516dc4e1a51d86ac975b0350735007e554c962281c432eaa5822aa9765c", - "sha256:bd80ed29761490c622edde5dd70537ca8c992c2952eb62ed46984f8eff66d6e8", - "sha256:c083f6dd6951b86e484ebfc9c3524b49bcaa9c420cb4b2a78ef9f7a512bfcc85", - "sha256:c0f4808644baf0a434a3442df5e0bedf8d05208f0719cedcd499e168b23bfdc4", - "sha256:c4cb992d8090d5ae5f7afa6754d7211c578be0c45f54d3d94f7781c495d56716", - "sha256:c60e547c0a375c4bfcdd60eef82e7e0e8698bf84c239d715f5c1278a73050393", - "sha256:c73a6bbc97ba1b5a0c3c992ae93d721c395bdbb120492759b94cc1ac71bc6350", - "sha256:c893f8c1a6d48b25961e00922724732d00b39de8bb0b451307482dc87bddcd74", - "sha256:cd6ab7d6776c186f544f893b45ee0c883542b35e8a493db74665d2e594d3ca75", - "sha256:d89ae7de94631b60d468412c18290d358a9d805182373d804ec839978b120422", - "sha256:d9d4f5e471e8dc49b593a80766c2328257e405f943c56a3dc985c125732bc4cf", - "sha256:da206d1ec78438a563c5429ab808a2b23ad7bc025c8adbf08540dde202be37d5", - "sha256:dbf53db46f7cf176ee01d8d98c39381440776fcda13779d269a8ba664f69bec0", - "sha256:dd21c0128e301851de51bc607b0a6da50e82dc34e9601f4b508d08cc89ee7929", - "sha256:e2580c1d7e66e6d29d6e11855e3b1c6381971e0edd9a5066e6c14d79bc8967af", - "sha256:e3818eabaefb90adeb5e0f62f047310079d426387991106d4fbf3519eec7d90a", - "sha256:ed69af4fe2a0949b1ea1d012bf065c77b4c7822bad4737f17807af2adb15a73c", - "sha256:f172b8b2c72a13a06ea49225a9c47079549036ad1b34afa12d5491b881f5b993", - "sha256:f275ede6199d0f1ed4ea5d55a7b7573ccd40d97aee7808559e1298fe6efc8dbd", - "sha256:f7edeb1dcc7f50a2c8e08b9dc13a413903b7817e72273f00878cb70e766bdb3b", - "sha256:fa2c9cb607e0f660d48c54a63de7a9b36fef62f6b8bd50ff592ce1137e73ac7d", - "sha256:fe94d1de77c4cd8caff1bd5480e22342dbd54c93929f5943495d9c1e8abe9f42" + "sha256:00e5a1fea0fd4f5bfa7440a47eff01d9822a65b4488f7cff83155a0f31a2ecba", + "sha256:02ddb6756f8f4517a2d5e99d8b2f272488e18dd0bfbc802f31c16c6c20f22193", + "sha256:045b8482ce9483ada4f3f23b3774f4e1bf4f23a2d5c912ed5170f68efb053318", + "sha256:09c7907c8548bcd6ab860e5f513e727c53b4a714f459b084f6580b49fa1b9cee", + "sha256:0b0cad37311123211dc91eadcb322ef4d4a66008d3e1bdc404808992260e1a0e", + "sha256:0b3c92fa08759dbf12b3a59579a4096ba9af8dd344d9a813fc7f5070d86bbab1", + "sha256:0fb2171a4486bb075316ee754c6d8382ea6eb8b399d4ec62fde2b591f879778a", + "sha256:1a74a13a4c857a84a845505fd2d68e54826a2cd01935a96efb1e9d86c728e186", + "sha256:1d407181cfa6e70077df3377938c08012d18893f9f20e92f7d2f314a437c30b1", + "sha256:1dd4bdd05407ced96fed3d7f25dbbf88d2ffb045a0db60dbc247f5b3c5c25d50", + "sha256:25b411eddcfd56a2f0cd6a384e9f4f7aa3efee14b188de13048c25b5e91f1640", + "sha256:2d06d3005e668744e11ed80812e61efd77d70bb7f03e33c1598c301eea20efbb", + "sha256:2ec9bbba33b2d00999af4631a3397d1fd78290c48e2a3e52d8dd72db3a067ac8", + "sha256:3236da9272872443f81fedc389bace88408f64f89f75d1bdb2256069a8730ccc", + "sha256:35098b24e0327fc4ebdc8ffe336cee0a87a700c24ffed13161af80124b7dc8e5", + "sha256:41f7ce59d6ee7741af71d82020346af364949314ed3d87553763a2df1829cc58", + "sha256:436c4fc0a4d66b2badc6c5fc5ef4e47bb10e4fd9bf0c79524ac719a01f3607c2", + "sha256:4891ed92157e5430874dad17b15eb1fda57627710756c27422200c52d8a4e393", + "sha256:4ac515b860c36becb81bb84b667466885096b5fc85596948548b667da3bf9f24", + "sha256:5094d9206c64181d0f6e76ebd8fb2f8fe274950a63890ee9e0ebfd58bf9d787b", + "sha256:54d6921f07555713b9300bee9c50fb46e57e2e639027089b1d795ecd9f7fa910", + "sha256:578e281c393af575879990861823ef19d66e2b1d0098414855dd367e234f5b3c", + "sha256:5a3f356548e34a70b0172d8890006c37be92995f62d95a07b4a42e90fba54272", + "sha256:602d98f2c2d929f8e697ed274fbadc09902c4025c5a9963bf4e9edfc3ab6f7ed", + "sha256:61b1a825a13bef4a5f10b1885245377d3cd0bf87cba068e1d9a88c2ae36880e1", + "sha256:61e5e68cb65ac8f547f6b5ef933f510134a6bf31bb178be428994b0cb46c2a04", + "sha256:61ee62ead9b68b9123ec24bc866cbef297dd266175d53296e2db5e7f797f902d", + "sha256:6333c5a377c8e2f5fae35e7b8f145c617b02c939d04110c76f29ee3676b5f9a5", + "sha256:6748dbf9bfa5ba1afcc7556b71cda0d7ce5f24768043a02a58846e4a443d808d", + "sha256:67a283dd2882ac98cc6318384f565bffc751ab564605959df4752d42483ad889", + "sha256:75674776d96d7b851b6498f17824ba17849d790a44d282929c42dbb77d4f17ae", + "sha256:757e81cae69244257d125ff31663249b3013b5dc0a8520d73694aed497fb195b", + "sha256:77a6e85b90a7641d2e07184df5557132a337f136250caafc9ccaa4a2a998ca2c", + "sha256:7c33dd1931a95e5d9a772d0ac5e44cac8957eaf58e3c8da8c1414de7dd27c576", + "sha256:7df647e8edd71f000a5208fe6ff8c382a1de8edfbccdbbfe649d263de07d8c34", + "sha256:7e2ee16578af3b52ac2f334c3b1f92262f47e02cc6193c598502bd46f5cd1477", + "sha256:80316a8bd5109320d38eef8833ccf5f89608c9107d02d2a7f985f98ed6876990", + "sha256:82123d0c954dc58db301f5021a01854a85bf1f3bb7d12ae0c01afc414a882ca2", + "sha256:84b2deecba4a3f1a398df819151eb72d29bfeb3b69abb145a00ddc8d30094512", + "sha256:8503ad47387b8ebd39cbbbdf0bf113e17330ffd339ba1144074da24c545f0069", + "sha256:877d209b6aebeb5b16c42cbb377f5f94d9e556626b1bfff66d7b0d115be88d0a", + "sha256:8874027a53e3aea659a6d62751800cf6e63314c160fd607489ba5c2edd753cf6", + "sha256:88a19f62ff30117e706ebc9090b8ecc79aeb77d0b1f5ec10d2d27a12bc9f66d0", + "sha256:8d39d351e7faf01483cc7ff7c0213c412e38e5a340238826be7e0e4da450fdc8", + "sha256:90adb47ad432332d4f0bc28f83a5963f426ce9a1a8809f5e584e704b82685dcb", + "sha256:913829534200eb0f789d45349e55203a091f45c37a2674678744ae52fae23efa", + "sha256:93b2e109287f93db79210f86deb6b9bbb81ac32fc97236b16f7433db7fc437d8", + "sha256:9d41beda9dc97ca9ab0b9888cb71f7539124bc05df02c0cff6e5acc5a19dcc6e", + "sha256:a440a2a624683108a1b454705ecd7afc1c3438a08e890a1513d468671d90a04e", + "sha256:a4bb030cf46a434ec0225bddbebd4b89e6471814ca851abb8696170adb163985", + "sha256:a9ca04806f3be0ac6d558fffc2fdf8fcef767e0489d2684a21912cc4ed0cd1b8", + "sha256:ac1801c45cbf77b6c99242eeff4fffb5e4e73a800b5c4ad4fc0be5def634d2e1", + "sha256:ac36703a585e0929b032fbaab0707b75dc12703766d0b53486eabd5139ebadd5", + "sha256:b1771de9944d875f1b98a745bc547e684b863abf8f8287da8466cf470ef52690", + "sha256:b464c4ab4bfcb41e3bfd3f1c26600d038376c2de3297760dfe064d2cb7ea8e10", + "sha256:b4f6450109834af88cb4cc5ecddfc5380ebb9c228695afc11915a0bf82116789", + "sha256:b57f4f58099328dfb26c6a771d09fb20dbbae81d20cfb66141251ea063bd101b", + "sha256:b643562c12680b01e17239be267bc306bbc6aac1f34f6444d1bded0c5ce438ca", + "sha256:b958ddd075ddba5b09bb0be8a6d9906d2ce933aee81100db289badbeb966f54e", + "sha256:b9d60031cf568c627d028239693fd718025719c02c9f55df0a53e587aab951b5", + "sha256:ba23302c0c61a9999784e73809427c9dbedd79f66a13d84ad1b1943802eaaf59", + "sha256:ba87babd629f8af77f557b61e49e7c7cac36f22f871156b91e10a6e9d4f829e9", + "sha256:c017a3b6df3a1bd45b9fa49a0f54005e53fbcad16633870104b66fa1a30a29d8", + "sha256:c1e1cc06da1491e6734f0ea1e6294ce00792193c463350626571c287c9a704db", + "sha256:c654d5207c78e0bd6d749f6dae1dcbbfde3403ad3a4b11f3c5544d9906969dde", + "sha256:c69697d3adff5aa4f874b19c0e4ed65180ceed6318ec856ebc423aa5850d84f7", + "sha256:c7d79f7d9aabd6011004e33b22bc13056a3e3fb54794d138af57f5ee9d9032cb", + "sha256:ccaa3a4b521b780a7e771cc336a2dba389a0861592bbce09a476190bb0c8b4b3", + "sha256:ccd17349166b1bee6e529b4add61727d3f55edb7babbe4069b5764c9587a8cc6", + "sha256:ce1af883b94304f493698b00d0f006d56aea98aeb49d75ec7d98cd4a777e9285", + "sha256:d0e883008013c0e4aef84dcfe2a0b172c4d23c2669412cf5b3371003941f72bb", + "sha256:d980e0325b6eddc81331d3f4551e2a333999fb176fd153e075c6d1c2530aa8a8", + "sha256:e17c9361d46a4d5addf777c6dd5eab0715a7684c2f11b88c67ac37edfba6c482", + "sha256:e2c08cc9b16f4f4bc522771d96734c7901e7ebef70c6c5c35dd0f10845270bcd", + "sha256:e35ef8683211db69ffe129a25d5634319a677570ab6b2eba4afa860f54eeaf75", + "sha256:e3b9fd71836999aad54084906f8663dffcd2a7fb5cdafd6c37713b2e72be1760", + "sha256:ef9f7768395923c3039055c14334ba4d926f3baf7b776c923c93d80195624782", + "sha256:f52a265001d830bc425f82ca9eabda94a64a4d753b07d623a9f2863fde532b53", + "sha256:f91c4803173928a25e1a55b943c81f55b8872f0018be83e3ad4938adffb77dd2", + "sha256:fbd6748e8ab9b41171bb95c6142faf068f5ef1511935a0aa07025438dd9a9bc1", + "sha256:fe57328fbc1bfd0bd0514470ac692630f3901c0ee39052ae47acd1d90a436719", + "sha256:fea09ca13323376a2fdfb353a5fa2e59f90cd18d7ca4eaa1fd31f0a8b4f91e62" ], "markers": "python_version >= '3.9'", - "version": "==1.18.0" + "version": "==1.18.3" }, "zope.event": { "hashes": [ From 7e56c7ffb170e17622c0195c1dcec8b2f2ae9750 Mon Sep 17 00:00:00 2001 From: mhdzumair Date: Sat, 7 Dec 2024 10:33:22 +0530 Subject: [PATCH 7/7] Add support for fetching metadata & migrate get_meta endpoint --- api/main.py | 46 +++++++++++---- db/data_models.py | 59 +++++++++++++------ db/public_schemas.py | 4 +- db/sql_crud.py | 136 +++++++++++++++++++++++++++++++------------ db/sql_models.py | 18 +++--- 5 files changed, 188 insertions(+), 75 deletions(-) diff --git a/api/main.py b/api/main.py index 33ae7047..6e4b054a 100644 --- a/api/main.py +++ b/api/main.py @@ -492,14 +492,14 @@ async def search_meta( @app.get( "/{secret_str}/meta/{catalog_type}/{meta_id}.json", tags=["meta"], - response_model=schemas.MetaItem, + response_model=public_schemas.MetaItem, response_model_exclude_none=True, response_model_by_alias=False, ) @app.get( "/meta/{catalog_type}/{meta_id}.json", tags=["meta"], - response_model=schemas.MetaItem, + response_model=public_schemas.MetaItem, response_model_exclude_none=True, response_model_by_alias=False, ) @@ -514,16 +514,40 @@ async def get_meta( raise HTTPException(status_code=404, detail="Metadata not found") if catalog_type == MediaType.SERIES: - # For series, also fetch episodes - seasons = await sql_crud.series_metadata.get_episodes_data(session, meta_id) - return { - "meta": { - **metadata.model_dump(), - "seasons": [season.model_dump() for season in seasons], - } - } + # For series, parse episodes and seasons + episodes = [ + public_schemas.Video( + id=f"{meta_id}:{season.season_number}:{episode.episode_number}", + title=episode.title, + released=str(episode.released) if episode.released else None, + description=episode.overview, + thumbnail=episode.thumbnail, + season=season.season_number, + episode=episode.episode_number, + ) + for season in metadata.seasons + for episode in season.episodes + ] + metadata = public_schemas.Meta( + imdbRating=metadata.imdb_rating, + end_year=metadata.end_year, + videos=episodes, + **metadata.base_metadata.model_dump(), + ) + elif catalog_type == MediaType.TV: + metadata = public_schemas.Meta( + language=metadata.tv_language, + country=metadata.country, + logo=metadata.logo, + **metadata.base_metadata.model_dump(), + ) + elif catalog_type == MediaType.MOVIE: + metadata = public_schemas.Meta( + imdbRating=metadata.imdb_rating, + **metadata.base_metadata.model_dump(), + ) - return {"meta": metadata.model_dump()} + return public_schemas.MetaItem(meta=metadata) @app.get( diff --git a/db/data_models.py b/db/data_models.py index 577ff9bf..9298c34d 100644 --- a/db/data_models.py +++ b/db/data_models.py @@ -1,6 +1,6 @@ from datetime import datetime from typing import List, Optional, Callable, Any -from pydantic import BaseModel, Field, field_validator +from pydantic import BaseModel, Field, field_validator, ConfigDict from db.enums import MediaType, NudityStatus, IndexerType @@ -48,7 +48,11 @@ def validator(v: Any) -> List[str]: return validator -class BaseMediaData(BaseModel): +class BasePydanticModel(BaseModel): + model_config = ConfigDict(from_attributes=True) + + +class BaseMediaData(BasePydanticModel): """Base model for common metadata fields""" id: str @@ -81,11 +85,8 @@ class BaseMediaData(BaseModel): create_string_list_validator("title") ) - class Config: - from_attributes = True - -class MovieData(BaseModel): +class MovieData(BasePydanticModel): """Movie metadata data model""" id: str @@ -103,11 +104,26 @@ class MovieData(BaseModel): create_string_list_validator() ) - class Config: - from_attributes = True +class SeriesEpisodeData(BasePydanticModel): + """Series episode data model""" + + episode_number: int + title: str + overview: Optional[str] = None + released: Optional[datetime] = None + imdb_rating: Optional[float] = None + thumbnail: Optional[str] = None + + +class SeriesSeasonData(BasePydanticModel): + """Series season data model""" -class SeriesData(BaseModel): + season_number: int + episodes: List[SeriesEpisodeData] = [] + + +class SeriesData(BasePydanticModel): """Series metadata data model""" id: str @@ -119,6 +135,8 @@ class SeriesData(BaseModel): stars: List[str] = Field(default_factory=list) parental_certificates: List[str] = Field(default_factory=list) + seasons: List[SeriesSeasonData] = [] + # Validators using the helper function _validate_stars = field_validator("stars", mode="before")( create_string_list_validator() @@ -127,11 +145,8 @@ class SeriesData(BaseModel): create_string_list_validator() ) - class Config: - from_attributes = True - -class TVData(BaseModel): +class TVData(BasePydanticModel): """TV metadata data model""" id: str @@ -141,11 +156,18 @@ class TVData(BaseModel): tv_language: Optional[str] = None logo: Optional[str] = None - class Config: - from_attributes = True + +class EpisodeFileData(BasePydanticModel): + """Episode file data model""" + + season_number: int + episode_number: int + file_index: Optional[int] = None + filename: Optional[str] = None + size: Optional[int] = None -class TorrentStreamData(BaseModel): +class TorrentStreamData(BasePydanticModel): """Torrent stream data model""" id: str @@ -164,11 +186,10 @@ class TorrentStreamData(BaseModel): created_at: datetime updated_at: Optional[datetime] = None - class Config: - from_attributes = True + episode_files: List[EpisodeFileData] = [] -class TVStreamData(BaseModel): +class TVStreamData(BasePydanticModel): """TV stream data model""" id: int diff --git a/db/public_schemas.py b/db/public_schemas.py index c50841ac..30963232 100644 --- a/db/public_schemas.py +++ b/db/public_schemas.py @@ -14,7 +14,9 @@ class Catalog(BaseModel): class Video(BaseModel): id: str title: str - released: str + released: str | None = None + description: str | None = None + thumbnail: str | None = None season: int | None = None episode: int | None = None diff --git a/db/sql_crud.py b/db/sql_crud.py index 3490f879..ccb8cd4c 100644 --- a/db/sql_crud.py +++ b/db/sql_crud.py @@ -1,12 +1,13 @@ import logging from sqlalchemy import func -from sqlalchemy.orm import joinedload, selectinload +from sqlalchemy.orm import selectinload, subqueryload from sqlmodel import select, or_ from sqlmodel.ext.asyncio.session import AsyncSession from sqlmodel.sql._expression_select_cls import Select -from db import data_models, public_schemas +from db import data_models, public_schemas, sql_models +from db.redis_database import REDIS_ASYNC_CLIENT from db.schemas import UserData from db.sql_models import ( BaseMetadata, @@ -26,7 +27,6 @@ AkaTitle, TorrentStream, ) -from db.redis_database import REDIS_ASYNC_CLIENT logger = logging.getLogger(__name__) @@ -318,26 +318,36 @@ async def _set_cache(self, media_id: str, data: T) -> None: except Exception as e: logger.error(f"Error caching data for {media_id}: {e}") - async def _fetch_metadata( + async def _fetch_base_metadata( self, session: AsyncSession, media_id: str - ) -> Optional[M]: - """Fetch type-specific metadata""" + ) -> Optional[BaseMetadata]: + """Fetch base metadata for a media item""" query = ( - select(self.sql_model) - .where(self.sql_model.id == media_id) + select(BaseMetadata) + .where(BaseMetadata.id == media_id) .options( - joinedload(self.sql_model.base_metadata).options( - selectinload(BaseMetadata.genres), - selectinload(BaseMetadata.catalogs), - selectinload(BaseMetadata.aka_titles), - ) + selectinload(BaseMetadata.genres), + selectinload(BaseMetadata.catalogs), + selectinload(BaseMetadata.aka_titles), ) ) + result = await session.exec(query) + return result.one_or_none() + + async def _fetch_media_type_metadata( + self, session: AsyncSession, media_id: str + ) -> Optional[M]: + """Fetch type-specific metadata""" + if self.media_type in [MediaType.MOVIE, MediaType.SERIES]: - query = query.options( - selectinload(self.sql_model.parental_certificates), - selectinload(self.sql_model.stars), + query = ( + select(self.sql_model) + .where(self.sql_model.id == media_id) + .options( + selectinload(self.sql_model.parental_certificates), + selectinload(self.sql_model.stars), + ) ) result = await session.exec(query) @@ -352,15 +362,20 @@ async def get_metadata( if cached_data: return cached_data - metadata = await self._fetch_metadata(session, media_id) - if not metadata: + # Fetch base metadata + base_metadata = await self._fetch_base_metadata(session, media_id) + if not base_metadata: + return None + + media_metadata = await self._fetch_media_type_metadata(session, media_id) + if not media_metadata: return None # Construct the full metadata object - metadata = self.data_model.model_validate(metadata) + model_metadata = self.data_model.model_validate(media_metadata) - await self._set_cache(media_id, metadata) - return metadata + await self._set_cache(media_id, model_metadata) + return model_metadata class SeriesMetadataRetriever( @@ -368,23 +383,70 @@ class SeriesMetadataRetriever( ): """Series-specific metadata retriever with episode information""" - async def get_metadata( - self, session: AsyncSession, media_id: str, bypass_cache: bool = False - ) -> Optional[data_models.SeriesData]: - """Fetch series metadata with episodes""" - metadata = await super().get_metadata(session, media_id, bypass_cache) - if not metadata: - return None + async def _fetch_media_type_metadata(self, session: AsyncSession, media_id: str): + """Fetch type-specific metadata with seasons and episodes""" + query = ( + select(self.sql_model) + .where(self.sql_model.id == media_id) + .options( + selectinload(self.sql_model.parental_certificates), + selectinload(self.sql_model.stars), + subqueryload(self.sql_model.seasons).options( + subqueryload(sql_models.SeriesSeason.episodes) + ), + ) + ) + + result = await session.exec(query) + return result.one_or_none() + + async def _fetch_seasons_with_episodes( + self, session: AsyncSession, series_id: str + ) -> List[data_models.SeriesSeasonData]: + """Fetch all seasons and episodes for a series with stream counts""" + # First, get all seasons with their episodes + season_query = ( + select(sql_models.SeriesSeason) + .where(sql_models.SeriesSeason.series_id == series_id) + .options(selectinload(sql_models.SeriesSeason.episodes)) + .order_by( + sql_models.SeriesSeason.season_number, + ) + ) - # Fetch episode data - season_data = await self.get_season_data(session, media_id) - metadata.seasons = season_data - return metadata + result = await session.exec(season_query) + return result.all() - async def get_season_data(self, session: AsyncSession, series_id: str) -> list: - """Fetch season data for series""" - # TODO: Implement season data retrieval - return [] + async def get_episode_streams( + self, + session: AsyncSession, + series_id: str, + season_number: int, + episode_number: int, + ) -> List[dict]: + """Get available streams for a specific episode""" + query = ( + select(TorrentStream) + .where( + TorrentStream.meta_id == series_id, TorrentStream.is_blocked == False + ) + .join(sql_models.EpisodeFile) + .where( + sql_models.EpisodeFile.season_number == season_number, + sql_models.EpisodeFile.episode_number == episode_number, + ) + .options( + selectinload(TorrentStream.languages), + selectinload(TorrentStream.episode_files).where( + sql_models.EpisodeFile.season_number == season_number, + sql_models.EpisodeFile.episode_number == episode_number, + ), + ) + ) + + result = await session.exec(query) + streams = result.unique().all() + return [stream.to_dict() for stream in streams] # Initialize retrievers @@ -404,7 +466,7 @@ async def get_metadata_by_type( media_type: MediaType, media_id: str, bypass_cache: bool = False, -) -> Optional[Any]: +) -> data_models.MovieData | data_models.SeriesData | data_models.TVData | None: """Factory function to get metadata based on media type""" retrievers = { MediaType.MOVIE: movie_metadata, diff --git a/db/sql_models.py b/db/sql_models.py index 06c41c60..a9953712 100644 --- a/db/sql_models.py +++ b/db/sql_models.py @@ -133,7 +133,10 @@ class SeriesSeason(SQLModel, table=True): # Relationships series: "SeriesMetadata" = Relationship(back_populates="seasons") - episodes: List["SeriesEpisode"] = Relationship(back_populates="season") + episodes: List["SeriesEpisode"] = Relationship( + back_populates="season", + sa_relationship_kwargs={"order_by": "SeriesEpisode.episode_number"}, + ) class SeriesEpisode(SQLModel, table=True): @@ -146,12 +149,10 @@ class SeriesEpisode(SQLModel, table=True): season_id: int = Field(foreign_key="series_season.id", index=True) episode_number: int = Field(index=True) title: str - plot: Optional[str] = None - runtime: Optional[int] = None - air_date: Optional[datetime] = Field(default=None, sa_type=DateTime(timezone=True)) + overview: Optional[str] = None + released: Optional[datetime] = Field(default=None, sa_type=DateTime(timezone=True)) imdb_rating: Optional[float] = None - poster: Optional[str] = None - is_poster_working: bool = Field(default=True) + thumbnail: Optional[str] = None # Relationships season: SeriesSeason = Relationship(back_populates="episodes") @@ -274,7 +275,10 @@ class SeriesMetadata(TimestampMixin, table=True): base_metadata: BaseMetadata = Relationship( sa_relationship_kwargs={"uselist": False, "cascade": "all, delete"} ) - seasons: List[SeriesSeason] = Relationship(back_populates="series") + seasons: List[SeriesSeason] = Relationship( + back_populates="series", + sa_relationship_kwargs={"order_by": "SeriesSeason.season_number"}, + ) parental_certificates: List["ParentalCertificate"] = Relationship( link_model=MediaParentalCertificateLink, sa_relationship_kwargs={