diff --git a/Pipfile b/Pipfile index 6f2d0b3..030d5e9 100644 --- a/Pipfile +++ b/Pipfile @@ -16,7 +16,6 @@ python-telegram-bot = {version = ">=20", extras = ["all"]} tortoise-orm = {extras = ["accel"], version = "*"} telegraph = {extras = ["aio"], version = "*"} uvicorn = "*" -asyncmy = "*" redis = "*" [dev-packages] @@ -24,4 +23,4 @@ pytest = "*" pytest-asyncio = "*" [requires] -python_version = "3.11" +python_version = "3.12" diff --git a/Pipfile.lock b/Pipfile.lock index 64e4e74..94f43cd 100644 --- a/Pipfile.lock +++ b/Pipfile.lock @@ -1,11 +1,11 @@ { "_meta": { "hash": { - "sha256": "a5780fe673b5796ac6c92e8698472a90db1fc0516731652391288a881254c75e" + "sha256": "0a80f38ff0c749d0faf67ffed142c27179200c70884e659ca1b489286cf0bc9e" }, "pipfile-spec": 6, "requires": { - "python_version": "3.11" + "python_version": "3.12" }, "sources": [ { @@ -42,11 +42,11 @@ }, "anyio": { "hashes": [ - "sha256:048e05d0f6caeed70d731f3db756d35dcc1f35747c8c403364a8332c630441b8", - "sha256:f75253795a87df48568485fd18cdd2a3fa5c4f7c5be8e5e36637733fce06fed6" + "sha256:5aadc6a1bbb7cdb0bede386cac5e2940f5e2ff3aa20277e991cf028e0585ce94", + "sha256:c1b2d8f46a8a812513012e1107cb0e68c17159a7a594208005a57dc776e1bdc7" ], "markers": "python_version >= '3.8'", - "version": "==4.3.0" + "version": "==4.4.0" }, "apscheduler": { "hashes": [ @@ -56,78 +56,6 @@ "markers": "python_version >= '3.6'", "version": "==3.10.4" }, - "async-timeout": { - "hashes": [ - "sha256:4640d96be84d82d02ed59ea2b7105a0f7b33abe8703703cd0ab0bf87c427522f", - "sha256:7405140ff1230c310e51dc27b3145b9092d659ce68ff733fb0cefe3ee42be028" - ], - "markers": "python_full_version < '3.12.0'", - "version": "==4.0.3" - }, - "asyncmy": { - "hashes": [ - "sha256:02caedc00035b2bd0be5555ef61d83ee9cb356ab488ac40072630ba224af02b0", - "sha256:0d56df7342f7b5467a9d09a854f0e5602c8da09afdad8181ba40b0434d66d8a4", - "sha256:1021796f1910a0c2ab2d878f8f5d56f939ef0681f9c1fe925b78161cad2f8297", - "sha256:1b1dd463bb054138bd1fd3fec9911eb618e92f54f61abb476658f863340394d1", - "sha256:1ef02186cc02cb767ee5d5cf9ab002d5c7910a1a9f4c16a666867a9325c9ec5e", - "sha256:20ae3acc326b4b104949cc5e3a728a927e671f671c6f26266ad4a44f57ea9a5b", - "sha256:2136b749ac489c25ab3aab4a81ae6e9dfb18fd0a5ebda96cd72788c5e4d46927", - "sha256:2a83e3895bed6d44aa334deb1c343d4ffc64b0def2215149f8df2e0e13499250", - "sha256:2b4a2a7cf0bd5051931756e765fefef3c9f9561550e0dd8b1e79308d048b710a", - "sha256:3ceb59b9307b5eb893f4d473fcbc43ac0321ffb0436e0115b20cc2e0baa44eb5", - "sha256:4025db2a27b1d84d3c68b5d5aacecac17258b69f25ec8a8c350c5f666003a778", - "sha256:4321c4cb4c691689aa26a56354e3fa723d89dc2cac82751e8671b2a4e6441778", - "sha256:47609d34e6b49fc5ad5bd2a2a593ca120e143e2a4f4206f27a543c5c598a18ca", - "sha256:49622dc4ec69b5a4cbddb3695a1e9249b31092c6f19604abb664b43dcb509b6f", - "sha256:4a664d58f9ebe4132f6cb3128206392be8ad71ad6fb09a5f4a990b04ec142024", - "sha256:4c4f1dc0acbaac8c3f046215031bbf3ca3d2cd7716244365325496e4f6222b78", - "sha256:4ecad6826086e47596c6aa65dcbe221305f3d9232f0d4de11b8562ee2c55464a", - "sha256:544d3736fd6682f0201a123e4f49335420b6abf6c245abe0487f5967021f1436", - "sha256:55e3bc41aa0d4ab410fc3a1d0c31b9cdb6688cd3b0cae6f2ee49c2e7f42968be", - "sha256:5b944d9cdf7ce25b396cd1e0c9319ba24c6583bde7a5dd31157614f3b9cc5b2f", - "sha256:625f96371d64769b94f7f7f699cfa5be56e669828aef3698cbf4f5bb0014ccb3", - "sha256:63c2a98f225560f9a52d5bd0d2e58517639e209e5d996e9ab7470e661b39394d", - "sha256:64bcd5110dca7a96cb411de85ab8f79fa867e864150939b8e76286a66eab28fc", - "sha256:696da0f71db0fe11e62fa58cd5a27d7c9d9a90699d13d82640755d0061da0624", - "sha256:7678d3641d5a19f20e7e19220c83405fe8616a3b437efbc494f34ad186cedcf0", - "sha256:8171a64888453423a17ae507cd97d256541ea880b314bba16376ab9deffef6e8", - "sha256:83cf951a44294626df43c5a85cf328297c3bac63f25ede216f9706514dabb322", - "sha256:84d20745bb187ced05bd4072ae8b0bff4b4622efa23b79935519edb717174584", - "sha256:8a1d63c1bb8e3a09c90767199954fd423c48084a1f6c0d956217bc2e48d37d6d", - "sha256:901aac048e5342acc62e1f68f6dec5aa3ed272cb2b138dca38d1c74fc414285d", - "sha256:a4aa17cc6ac0f7bc6b72e08d112566e69a36e2e1ebebad43d699757b7b4ff028", - "sha256:a8755248429f9bd3d7768c71494c9943fced18f9f526f768e96f5b9b3c727c84", - "sha256:ad06f3c02d455947e95087d29f7122411208f0eadaf8671772fe5bad97d9873a", - "sha256:b8412e825443ee876ef0d55ac4356b56173f5cb64ca8e4638974f8cf5c912a63", - "sha256:beb3d0e434ce0bd9e609cf5341c3b82433ef544f89055d3792186e11fa2433d9", - "sha256:bf18aef65ac98f5130ca588c55a83a56e74ae416cf0fe2c0757a2b597c4269d0", - "sha256:c19f27b7ff0e297f2981335a85599ffe1c9a8a35c97230203321d5d6e9e4cb30", - "sha256:c2d4ad8817f99d9734912c2ff91c42e419031441f512b4aecd7e40a167908c1c", - "sha256:c966de493928f26218e0bfaa284cfa609540e52841c423d7babf9ca97c9ff820", - "sha256:cd7cde6759dbbfcc467c2af4ef3d75de0b756dde39a3d176383d8c6d9f8a34f3", - "sha256:d077eaee9a126f36bbe95e0412baa89e93172dd46193ef7bf7650a686e458e50", - "sha256:d08fb8722150a9c0645665cf777916335687bddb5f37a8e02af772e330be777b", - "sha256:d2593717fa7a92a7d361444726292ce34edea76d5aa67d469b5efeee1c9b729e", - "sha256:da188be013291d1f831d63cdd3614567f4c63bfdcde73631ddff8df00c56d614", - "sha256:da7640f3357849b176364ed546908e28c8460701ddc0d23cc3fa7113ec52a076", - "sha256:dbee276a9c8750b522aaad86315a6ed1ffbcb9145ce89070db77831c00dd2da1", - "sha256:dc608ff331c5d1065e2d3566493d2d9e17f36e315bd5fad3c91c421eea306edb", - "sha256:e2b77f03a17a8db338d74311e38ca6dbd4ff9aacb07d2af6b9e0cac9cf1c7b87", - "sha256:e7e6f5205722e67c910510e294ad483bdafa7e29d5cf455d49ffa4b819e55fd8", - "sha256:e8f48d09adf3426e7a59066eaae3c7c84c318ec56cc2f20732d652056c7a3f62", - "sha256:e9f1ca623517552a637900b90d65b5bafc9c67bebf96e3427eecb9359ffa24b1", - "sha256:e9f22e13bd77277593b56de2e4b65c40c2e81b1a42c4845d062403c5c5bc52bc", - "sha256:ea242364523f6205c4426435272bd57cbf593c20d5e5551efb28d44cfbd595c2", - "sha256:ea44eefc965c62bcfebf34e9ef00f6e807edf51046046767c56914243e0737e4", - "sha256:eeeb53fdd54eef54b9793a7a5c849c5f7a2fb2540a637f21585a996ef9dd8845", - "sha256:f0c606a55625146e189534cc39038540f7a8f2c680ea82845c1f4315a9ad2914", - "sha256:f2bbd7b75e2d751216f48c3b1b5092b812d70c2cd0053f8d2f50ec3f76a525a8" - ], - "index": "pypi", - "markers": "python_version >= '3.7' and python_version < '4.0'", - "version": "==0.2.9" - }, "asyncpg": { "hashes": [ "sha256:0009a300cae37b8c525e5b449233d59cd9868fd35431abc470a3e364d2b85cb9", @@ -194,11 +122,11 @@ }, "certifi": { "hashes": [ - "sha256:0569859f95fc761b18b45ef421b1290a0f65f147e92a1e5eb3e635f9a5e4e66f", - "sha256:dc383c07b76109f368f6106eee2b593b04a011ea4d55f652c6ca24a754d1cdd1" + "sha256:3cd43f1c6fa7dedc5899d69d3ad0398fd018ad1a17fba83ddaf78aa46c747516", + "sha256:ddc6c8ce995e6987e7faf5e3f1b02b302836a0e5d98ece18392cb1a36c72ad56" ], "markers": "python_version >= '3.6'", - "version": "==2024.2.2" + "version": "==2024.6.2" }, "cffi": { "hashes": [ @@ -354,65 +282,6 @@ "markers": "python_full_version >= '3.7.0'", "version": "==3.3.2" }, - "ciso8601": { - "hashes": [ - "sha256:013410263cba46748d2de29e9894341ae41223356cde7970478c32bd0984d10c", - "sha256:024c52d5d0670f15ca3dc53eff7345b6eaee22fba929675f6a408f9d1e159d98", - "sha256:025859ec286a994aa3f2120c0f27d053b719cabc975398338374f2cc1f961125", - "sha256:02828107880848ff497971ebc98e6dc851ad7af8ec14a58089e0e11f3111cad6", - "sha256:02ecbd7c8336c4e1c6bb725b898e29414ee92bdc0be6c72fb07036836b1ac867", - "sha256:06941e2ee46701f083aeb21d13eb762d74d5ed6c46ff22119f27a42ed6edc8f9", - "sha256:070f568de3bc269268296cb9265704dc5fcb9d4c12b1f1c67536624174df5d09", - "sha256:0d980a2a88030d4d8b2434623c250866a75b4979d289eba69bec445c51ace99f", - "sha256:121d27c55f4455eaa27ba3bd602beca915df9a352f235e935636a4660321070e", - "sha256:21204d98496cf5c0511dc21533be55c2a2d34b8c65603946a116812ffbae3b2d", - "sha256:21cf83ca945bb26ecd95364ae2c9ed0276378e5fe35ce1b64d4c6d5b33038ea3", - "sha256:22128f0def36fa3c4cf0c482a216e8b8ad722def08bc11c07438eff82bdcd02a", - "sha256:2a64ff58904d4418d60fa9619014ae820ae21f7aef58da46df78a4c647f951ec", - "sha256:2c1ef17d1ea52a39b2dce6535583631ae4bfb65c76f0ee8c99413a6861a46c9e", - "sha256:2c690ac24ec3407f68cdfd5e032c6cb18126ef33d6c4b3db0669b9cbb8c96bd4", - "sha256:3212c7ffe5d8080270548b5f2692ffd2039683b6628a8d2ad456122cc5793c4c", - "sha256:364702e338212b6c1a8643d9399ada21560cf132f363853473560625cb4207f1", - "sha256:36525b1f380f4601533f4631c69911e44efb9cb50beab1da3248b0daa32bced4", - "sha256:3771049ba29bd1077588c0a24be1d53f7493e7cc686b2caa92f7cae129636a0e", - "sha256:46a3663c2cf838f0149e1cdb8e4bdc95716e03cf2d5f803a6eb755d825896ebe", - "sha256:473288cd63efe6a2cf3f4b5f90394e53095358ccb13d6128f87a2da85d0f389b", - "sha256:4ac00d293cdb3d1a5c78e09b3d75c7b0292ab45d5b26853b436ff5087eba2165", - "sha256:4e30501eed43eea7ef64f032c81cd1d8b2020035cbdcefad40db72e2f3bc97ff", - "sha256:55381365366dacb57207cec610d26c9a6c0d237cb65a0cf67a2baaa5299f2366", - "sha256:566b4a8b2f9717e54ffcdd732a7c8051a91da30a60a4f1dafb62e303a1dbac69", - "sha256:57db9a28e87f9e4fccba643fb70a9ba1515adc5e1325508eb2c10dd96620314c", - "sha256:58a749d63f28c2eda71416c9d6014113b0748abf5fd14c502b01bd515502fedf", - "sha256:6850889813f3135e0aa18f0aaec64249dd81d36a1b9bce60bb45182930c86663", - "sha256:695583810836a42945084b33621b22b0309701c6916689f6a3588fa44c5bc413", - "sha256:6a25da209193134842cd573464a5323f46fcc3ed781b633f15a34793ba7e1064", - "sha256:7533256af90724b8b7a707dcd1be4b67989447595c8e1e1c28399d4fd51dac50", - "sha256:7eb7b5ef8714d3d1fe9f3256b7a679ad783da899a0b7503a5ace78186735f840", - "sha256:874d20c6339e9096baaadfd1b9610bb8d5b373a0f2858cc06de8142b98d2129c", - "sha256:87721de54e008fb1c4c3978553b05a9c417aa25b76ddf5702d6f7e8d9b109288", - "sha256:8acb45545e6a654310c6ef788aacb2d73686646c414ceacdd9f5f78a83165af5", - "sha256:8c29ea2b03dee2dc0a5d3e4a0b7d7768c597781e9fa451fe1025600f7cb55a89", - "sha256:8c59646197ddbf84909b6c31d55f744cfeef51811e3910b61d0f58f2885823fd", - "sha256:9065053c034c80c0afd74c71a4906675d07078a05cfd1cb5ff70661378cdbe60", - "sha256:99addd8b113f85fac549167073f317a318cd2b5841552598ceb97b97c5708a38", - "sha256:9f107a4c051e7c0416824279264d94f4ed3da0fbd82bd96ec3c3293426826de4", - "sha256:9f25647803c9a5aaaed130c53bbec7ea06a4f95ba5c7016f59e444b4ef7ac39e", - "sha256:ad8f417c45eea973a694599b96f40d841215bfee352cb9963383e8d66b309981", - "sha256:b26935687ef1837b56997d8c61f1d789e698be58b261410e629eda9c89812141", - "sha256:b869396e9756a7c0696d8eb69ce1d8980bea5e25c86e5996b10d78c900a4362c", - "sha256:cb135de0e3b8feb7e74a4f7a234e8c8545957fe8d26316a1a549553f425c629d", - "sha256:d1f85c0b7fa742bbfd18177137ccbaa3f867dd06157f91595075bb959a733048", - "sha256:d4bc9d577c0d1e57532513fc2899f5231727e28981a426767f7fa13dacb18c06", - "sha256:e4ac59453664781dfddebee51f9a36e41819993823fdb09ddc0ce0e4bd3ff0c3", - "sha256:e7ae2c3442d042de5330672d0d28486ed92f9d7c6dc010943aa618fd361d4638", - "sha256:e8e76825f80ce313d75bbbef1d3b8bd9e0ce31dbc157d1981e9593922c9983e7", - "sha256:eaecca7e0c3ef9e8f5e963e212b083684e849f9a9bb25834d3042363223a73cd", - "sha256:ef44cb4dc83f37019a356c7a72692cbe17072456f4879ca6bc0339f67eee5d00", - "sha256:f39bb5936debf21c52e5d52b89f26857c303da80c43a72883946096a6ef5e561", - "sha256:f3ae83f4e60fc7e260a4188e4ec4ac1bdd40bdb382eeda92fc266c5aa2f0a1ee" - ], - "version": "==2.3.1" - }, "click": { "hashes": [ "sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28", @@ -421,43 +290,51 @@ "markers": "python_version >= '3.7'", "version": "==8.1.7" }, + "colorama": { + "hashes": [ + "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44", + "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6" + ], + "markers": "sys_platform == 'win32'", + "version": "==0.4.6" + }, "cryptography": { "hashes": [ - "sha256:02c0eee2d7133bdbbc5e24441258d5d2244beb31da5ed19fbb80315f4bbbff55", - "sha256:0d563795db98b4cd57742a78a288cdbdc9daedac29f2239793071fe114f13785", - "sha256:16268d46086bb8ad5bf0a2b5544d8a9ed87a0e33f5e77dd3c3301e63d941a83b", - "sha256:1a58839984d9cb34c855197043eaae2c187d930ca6d644612843b4fe8513c886", - "sha256:2954fccea107026512b15afb4aa664a5640cd0af630e2ee3962f2602693f0c82", - "sha256:2e47577f9b18723fa294b0ea9a17d5e53a227867a0a4904a1a076d1646d45ca1", - "sha256:31adb7d06fe4383226c3e963471f6837742889b3c4caa55aac20ad951bc8ffda", - "sha256:3577d029bc3f4827dd5bf8bf7710cac13527b470bbf1820a3f394adb38ed7d5f", - "sha256:36017400817987670037fbb0324d71489b6ead6231c9604f8fc1f7d008087c68", - "sha256:362e7197754c231797ec45ee081f3088a27a47c6c01eff2ac83f60f85a50fe60", - "sha256:3de9a45d3b2b7d8088c3fbf1ed4395dfeff79d07842217b38df14ef09ce1d8d7", - "sha256:4f698edacf9c9e0371112792558d2f705b5645076cc0aaae02f816a0171770fd", - "sha256:5482e789294854c28237bba77c4c83be698be740e31a3ae5e879ee5444166582", - "sha256:5e44507bf8d14b36b8389b226665d597bc0f18ea035d75b4e53c7b1ea84583cc", - "sha256:779245e13b9a6638df14641d029add5dc17edbef6ec915688f3acb9e720a5858", - "sha256:789caea816c6704f63f6241a519bfa347f72fbd67ba28d04636b7c6b7da94b0b", - "sha256:7f8b25fa616d8b846aef64b15c606bb0828dbc35faf90566eb139aa9cff67af2", - "sha256:8cb8ce7c3347fcf9446f201dc30e2d5a3c898d009126010cbd1f443f28b52678", - "sha256:93a3209f6bb2b33e725ed08ee0991b92976dfdcf4e8b38646540674fc7508e13", - "sha256:a3a5ac8b56fe37f3125e5b72b61dcde43283e5370827f5233893d461b7360cd4", - "sha256:a47787a5e3649008a1102d3df55424e86606c9bae6fb77ac59afe06d234605f8", - "sha256:a79165431551042cc9d1d90e6145d5d0d3ab0f2d66326c201d9b0e7f5bf43604", - "sha256:a987f840718078212fdf4504d0fd4c6effe34a7e4740378e59d47696e8dfb477", - "sha256:a9bc127cdc4ecf87a5ea22a2556cab6c7eda2923f84e4f3cc588e8470ce4e42e", - "sha256:bd13b5e9b543532453de08bcdc3cc7cebec6f9883e886fd20a92f26940fd3e7a", - "sha256:c65f96dad14f8528a447414125e1fc8feb2ad5a272b8f68477abbcc1ea7d94b9", - "sha256:d8e3098721b84392ee45af2dd554c947c32cc52f862b6a3ae982dbb90f577f14", - "sha256:e6b79d0adb01aae87e8a44c2b64bc3f3fe59515280e00fb6d57a7267a2583cda", - "sha256:e6b8f1881dac458c34778d0a424ae5769de30544fc678eac51c1c8bb2183e9da", - "sha256:e9b2a6309f14c0497f348d08a065d52f3020656f675819fc405fb63bbcd26562", - "sha256:ecbfbc00bf55888edda9868a4cf927205de8499e7fabe6c050322298382953f2", - "sha256:efd0bf5205240182e0f13bcaea41be4fdf5c22c5129fc7ced4a0282ac86998c9" + "sha256:013629ae70b40af70c9a7a5db40abe5d9054e6f4380e50ce769947b73bf3caad", + "sha256:2346b911eb349ab547076f47f2e035fc8ff2c02380a7cbbf8d87114fa0f1c583", + "sha256:2f66d9cd9147ee495a8374a45ca445819f8929a3efcd2e3df6428e46c3cbb10b", + "sha256:2f88d197e66c65be5e42cd72e5c18afbfae3f741742070e3019ac8f4ac57262c", + "sha256:31f721658a29331f895a5a54e7e82075554ccfb8b163a18719d342f5ffe5ecb1", + "sha256:343728aac38decfdeecf55ecab3264b015be68fc2816ca800db649607aeee648", + "sha256:5226d5d21ab681f432a9c1cf8b658c0cb02533eece706b155e5fbd8a0cdd3949", + "sha256:57080dee41209e556a9a4ce60d229244f7a66ef52750f813bfbe18959770cfba", + "sha256:5a94eccb2a81a309806027e1670a358b99b8fe8bfe9f8d329f27d72c094dde8c", + "sha256:6b7c4f03ce01afd3b76cf69a5455caa9cfa3de8c8f493e0d3ab7d20611c8dae9", + "sha256:7016f837e15b0a1c119d27ecd89b3515f01f90a8615ed5e9427e30d9cdbfed3d", + "sha256:81884c4d096c272f00aeb1f11cf62ccd39763581645b0812e99a91505fa48e0c", + "sha256:81d8a521705787afe7a18d5bfb47ea9d9cc068206270aad0b96a725022e18d2e", + "sha256:8d09d05439ce7baa8e9e95b07ec5b6c886f548deb7e0f69ef25f64b3bce842f2", + "sha256:961e61cefdcb06e0c6d7e3a1b22ebe8b996eb2bf50614e89384be54c48c6b63d", + "sha256:9c0c1716c8447ee7dbf08d6db2e5c41c688544c61074b54fc4564196f55c25a7", + "sha256:a0608251135d0e03111152e41f0cc2392d1e74e35703960d4190b2e0f4ca9c70", + "sha256:a0c5b2b0585b6af82d7e385f55a8bc568abff8923af147ee3c07bd8b42cda8b2", + "sha256:ad803773e9df0b92e0a817d22fd8a3675493f690b96130a5e24f1b8fabbea9c7", + "sha256:b297f90c5723d04bcc8265fc2a0f86d4ea2e0f7ab4b6994459548d3a6b992a14", + "sha256:ba4f0a211697362e89ad822e667d8d340b4d8d55fae72cdd619389fb5912eefe", + "sha256:c4783183f7cb757b73b2ae9aed6599b96338eb957233c58ca8f49a49cc32fd5e", + "sha256:c9bb2ae11bfbab395bdd072985abde58ea9860ed84e59dbc0463a5d0159f5b71", + "sha256:cafb92b2bc622cd1aa6a1dce4b93307792633f4c5fe1f46c6b97cf67073ec961", + "sha256:d45b940883a03e19e944456a558b67a41160e367a719833c53de6911cabba2b7", + "sha256:dc0fdf6787f37b1c6b08e6dfc892d9d068b5bdb671198c72072828b80bd5fe4c", + "sha256:dea567d1b0e8bc5764b9443858b673b734100c2871dc93163f58c46a97a83d28", + "sha256:dec9b018df185f08483f294cae6ccac29e7a6e0678996587363dc352dc65c842", + "sha256:e3ec3672626e1b9e55afd0df6d774ff0e953452886e06e0f1eb7eb0c832e8902", + "sha256:e599b53fd95357d92304510fb7bda8523ed1f79ca98dce2f43c115950aa78801", + "sha256:fa76fbb7596cc5839320000cdd5d0955313696d9511debab7ee7278fc8b5c84a", + "sha256:fff12c88a672ab9c9c1cf7b0c80e3ad9e2ebd9d828d955c126be4fd3e5578c9e" ], "markers": "python_version >= '3.7'", - "version": "==42.0.7" + "version": "==42.0.8" }, "h11": { "hashes": [ @@ -807,6 +684,7 @@ "sha256:fdcbb4068117dfd9ce0138d068ac512843c52295ed996ae6dd1faf537b6dbc27", "sha256:ff61bfd9253c3915e6d41c651d5f962da23eda633cf02262990094a18a55371a" ], + "index": "pypi", "markers": "python_version >= '3.8'", "version": "==10.3.0" }, @@ -820,96 +698,96 @@ }, "pydantic": { "hashes": [ - "sha256:e029badca45266732a9a79898a15ae2e8b14840b1eabbb25844be28f0b33f3d5", - "sha256:e9dbb5eada8abe4d9ae5f46b9939aead650cd2b68f249bb3a8139dbe125803cc" + "sha256:c46c76a40bb1296728d7a8b99aa73dd70a48c3510111ff290034f860c99c419e", + "sha256:ea91b002777bf643bb20dd717c028ec43216b24a6001a280f83877fd2655d0b4" ], "markers": "python_version >= '3.8'", - "version": "==2.7.1" + "version": "==2.7.3" }, "pydantic-core": { "hashes": [ - "sha256:0098300eebb1c837271d3d1a2cd2911e7c11b396eac9661655ee524a7f10587b", - "sha256:042473b6280246b1dbf530559246f6842b56119c2926d1e52b631bdc46075f2a", - "sha256:05b7133a6e6aeb8df37d6f413f7705a37ab4031597f64ab56384c94d98fa0e90", - "sha256:0680b1f1f11fda801397de52c36ce38ef1c1dc841a0927a94f226dea29c3ae3d", - "sha256:0d69b4c2f6bb3e130dba60d34c0845ba31b69babdd3f78f7c0c8fae5021a253e", - "sha256:1404c69d6a676245199767ba4f633cce5f4ad4181f9d0ccb0577e1f66cf4c46d", - "sha256:182245ff6b0039e82b6bb585ed55a64d7c81c560715d1bad0cbad6dfa07b4027", - "sha256:1a388a77e629b9ec814c1b1e6b3b595fe521d2cdc625fcca26fbc2d44c816804", - "sha256:1d90c3265ae107f91a4f279f4d6f6f1d4907ac76c6868b27dc7fb33688cfb347", - "sha256:20aca1e2298c56ececfd8ed159ae4dde2df0781988c97ef77d5c16ff4bd5b400", - "sha256:219da3f096d50a157f33645a1cf31c0ad1fe829a92181dd1311022f986e5fbe3", - "sha256:22057013c8c1e272eb8d0eebc796701167d8377441ec894a8fed1af64a0bf399", - "sha256:223ee893d77a310a0391dca6df00f70bbc2f36a71a895cecd9a0e762dc37b349", - "sha256:224c421235f6102e8737032483f43c1a8cfb1d2f45740c44166219599358c2cd", - "sha256:2334ce8c673ee93a1d6a65bd90327588387ba073c17e61bf19b4fd97d688d63c", - "sha256:269322dcc3d8bdb69f054681edff86276b2ff972447863cf34c8b860f5188e2e", - "sha256:2728b01246a3bba6de144f9e3115b532ee44bd6cf39795194fb75491824a1413", - "sha256:2b8ed04b3582771764538f7ee7001b02e1170223cf9b75dff0bc698fadb00cf3", - "sha256:2e29d20810dfc3043ee13ac7d9e25105799817683348823f305ab3f349b9386e", - "sha256:36789b70d613fbac0a25bb07ab3d9dba4d2e38af609c020cf4d888d165ee0bf3", - "sha256:390193c770399861d8df9670fb0d1874f330c79caaca4642332df7c682bf6b91", - "sha256:3a6515ebc6e69d85502b4951d89131ca4e036078ea35533bb76327f8424531ce", - "sha256:3f9a801e7c8f1ef8718da265bba008fa121243dfe37c1cea17840b0944dfd72c", - "sha256:43f0f463cf89ace478de71a318b1b4f05ebc456a9b9300d027b4b57c1a2064fb", - "sha256:4456f2dca97c425231d7315737d45239b2b51a50dc2b6f0c2bb181fce6207664", - "sha256:470b94480bb5ee929f5acba6995251ada5e059a5ef3e0dfc63cca287283ebfa6", - "sha256:4774f3184d2ef3e14e8693194f661dea5a4d6ca4e3dc8e39786d33a94865cefd", - "sha256:4b4356d3538c3649337df4074e81b85f0616b79731fe22dd11b99499b2ebbdf3", - "sha256:553ef617b6836fc7e4df130bb851e32fe357ce36336d897fd6646d6058d980af", - "sha256:6132dd3bd52838acddca05a72aafb6eab6536aa145e923bb50f45e78b7251043", - "sha256:6a46e22a707e7ad4484ac9ee9f290f9d501df45954184e23fc29408dfad61350", - "sha256:6e5c584d357c4e2baf0ff7baf44f4994be121e16a2c88918a5817331fc7599d7", - "sha256:75250dbc5290e3f1a0f4618db35e51a165186f9034eff158f3d490b3fed9f8a0", - "sha256:75f7e9488238e920ab6204399ded280dc4c307d034f3924cd7f90a38b1829563", - "sha256:78363590ef93d5d226ba21a90a03ea89a20738ee5b7da83d771d283fd8a56761", - "sha256:7ca4ae5a27ad7a4ee5170aebce1574b375de390bc01284f87b18d43a3984df72", - "sha256:800d60565aec896f25bc3cfa56d2277d52d5182af08162f7954f938c06dc4ee3", - "sha256:82d5d4d78e4448683cb467897fe24e2b74bb7b973a541ea1dcfec1d3cbce39fb", - "sha256:852e966fbd035a6468fc0a3496589b45e2208ec7ca95c26470a54daed82a0788", - "sha256:868649da93e5a3d5eacc2b5b3b9235c98ccdbfd443832f31e075f54419e1b96b", - "sha256:886eec03591b7cf058467a70a87733b35f44707bd86cf64a615584fd72488b7c", - "sha256:8b172601454f2d7701121bbec3425dd71efcb787a027edf49724c9cefc14c038", - "sha256:95b9d5e72481d3780ba3442eac863eae92ae43a5f3adb5b4d0a1de89d42bb250", - "sha256:98758d627ff397e752bc339272c14c98199c613f922d4a384ddc07526c86a2ec", - "sha256:997abc4df705d1295a42f95b4eec4950a37ad8ae46d913caeee117b6b198811c", - "sha256:9b5155ff768083cb1d62f3e143b49a8a3432e6789a3abee8acd005c3c7af1c74", - "sha256:9e08e867b306f525802df7cd16c44ff5ebbe747ff0ca6cf3fde7f36c05a59a81", - "sha256:9fdad8e35f278b2c3eb77cbdc5c0a49dada440657bf738d6905ce106dc1de439", - "sha256:a1874c6dd4113308bd0eb568418e6114b252afe44319ead2b4081e9b9521fe75", - "sha256:a8309f67285bdfe65c372ea3722b7a5642680f3dba538566340a9d36e920b5f0", - "sha256:ae0a8a797a5e56c053610fa7be147993fe50960fa43609ff2a9552b0e07013e8", - "sha256:b14d82cdb934e99dda6d9d60dc84a24379820176cc4a0d123f88df319ae9c150", - "sha256:b1bd7e47b1558ea872bd16c8502c414f9e90dcf12f1395129d7bb42a09a95438", - "sha256:b3ef08e20ec49e02d5c6717a91bb5af9b20f1805583cb0adfe9ba2c6b505b5ae", - "sha256:b89ed9eb7d616ef5714e5590e6cf7f23b02d0d539767d33561e3675d6f9e3857", - "sha256:c4fcf5cd9c4b655ad666ca332b9a081112cd7a58a8b5a6ca7a3104bc950f2038", - "sha256:c6fdc8627910eed0c01aed6a390a252fe3ea6d472ee70fdde56273f198938374", - "sha256:c9bd70772c720142be1020eac55f8143a34ec9f82d75a8e7a07852023e46617f", - "sha256:ca7b0c1f1c983e064caa85f3792dd2fe3526b3505378874afa84baf662e12241", - "sha256:cbca948f2d14b09d20268cda7b0367723d79063f26c4ffc523af9042cad95592", - "sha256:cc1cfd88a64e012b74e94cd00bbe0f9c6df57049c97f02bb07d39e9c852e19a4", - "sha256:ccdd111c03bfd3666bd2472b674c6899550e09e9f298954cfc896ab92b5b0e6d", - "sha256:cfeecd1ac6cc1fb2692c3d5110781c965aabd4ec5d32799773ca7b1456ac636b", - "sha256:d4d938ec0adf5167cb335acb25a4ee69a8107e4984f8fbd2e897021d9e4ca21b", - "sha256:d7d904828195733c183d20a54230c0df0eb46ec746ea1a666730787353e87182", - "sha256:d91cb5ea8b11607cc757675051f61b3d93f15eca3cefb3e6c704a5d6e8440f4e", - "sha256:d9319e499827271b09b4e411905b24a426b8fb69464dfa1696258f53a3334641", - "sha256:e0e8b1be28239fc64a88a8189d1df7fad8be8c1ae47fcc33e43d4be15f99cc70", - "sha256:e18609ceaa6eed63753037fc06ebb16041d17d28199ae5aba0052c51449650a9", - "sha256:e1b395e58b10b73b07b7cf740d728dd4ff9365ac46c18751bf8b3d8cca8f625a", - "sha256:e23ec367a948b6d812301afc1b13f8094ab7b2c280af66ef450efc357d2ae543", - "sha256:e25add29b8f3b233ae90ccef2d902d0ae0432eb0d45370fe315d1a5cf231004b", - "sha256:e6dac87ddb34aaec85f873d737e9d06a3555a1cc1a8e0c44b7f8d5daeb89d86f", - "sha256:ef26c9e94a8c04a1b2924149a9cb081836913818e55681722d7f29af88fe7b38", - "sha256:eff2de745698eb46eeb51193a9f41d67d834d50e424aef27df2fcdee1b153845", - "sha256:f0a21cbaa69900cbe1a2e7cad2aa74ac3cf21b10c3efb0fa0b80305274c0e8a2", - "sha256:f459a5ce8434614dfd39bbebf1041952ae01da6bed9855008cb33b875cb024c0", - "sha256:f93a8a2e3938ff656a7c1bc57193b1319960ac015b6e87d76c76bf14fe0244b4", - "sha256:fb2bd7be70c0fe4dfd32c951bc813d9fe6ebcbfdd15a07527796c8204bd36242" + "sha256:01dd777215e2aa86dfd664daed5957704b769e726626393438f9c87690ce78c3", + "sha256:0eb2a4f660fcd8e2b1c90ad566db2b98d7f3f4717c64fe0a83e0adb39766d5b8", + "sha256:0fbbdc827fe5e42e4d196c746b890b3d72876bdbf160b0eafe9f0334525119c8", + "sha256:123c3cec203e3f5ac7b000bd82235f1a3eced8665b63d18be751f115588fea30", + "sha256:14601cdb733d741b8958224030e2bfe21a4a881fb3dd6fbb21f071cabd48fa0a", + "sha256:18f469a3d2a2fdafe99296a87e8a4c37748b5080a26b806a707f25a902c040a8", + "sha256:19894b95aacfa98e7cb093cd7881a0c76f55731efad31073db4521e2b6ff5b7d", + "sha256:1b4de2e51bbcb61fdebd0ab86ef28062704f62c82bbf4addc4e37fa4b00b7cbc", + "sha256:1d886dc848e60cb7666f771e406acae54ab279b9f1e4143babc9c2258213daa2", + "sha256:1f4d26ceb5eb9eed4af91bebeae4b06c3fb28966ca3a8fb765208cf6b51102ab", + "sha256:21a5e440dbe315ab9825fcd459b8814bb92b27c974cbc23c3e8baa2b76890077", + "sha256:293afe532740370aba8c060882f7d26cfd00c94cae32fd2e212a3a6e3b7bc15e", + "sha256:2f5966897e5461f818e136b8451d0551a2e77259eb0f73a837027b47dc95dab9", + "sha256:2fd41f6eff4c20778d717af1cc50eca52f5afe7805ee530a4fbd0bae284f16e9", + "sha256:2fdf2156aa3d017fddf8aea5adfba9f777db1d6022d392b682d2a8329e087cef", + "sha256:3c40d4eaad41f78e3bbda31b89edc46a3f3dc6e171bf0ecf097ff7a0ffff7cb1", + "sha256:43d447dd2ae072a0065389092a231283f62d960030ecd27565672bd40746c507", + "sha256:44a688331d4a4e2129140a8118479443bd6f1905231138971372fcde37e43528", + "sha256:44c7486a4228413c317952e9d89598bcdfb06399735e49e0f8df643e1ccd0558", + "sha256:44cd83ab6a51da80fb5adbd9560e26018e2ac7826f9626bc06ca3dc074cd198b", + "sha256:46387e38bd641b3ee5ce247563b60c5ca098da9c56c75c157a05eaa0933ed154", + "sha256:4701b19f7e3a06ea655513f7938de6f108123bf7c86bbebb1196eb9bd35cf724", + "sha256:4748321b5078216070b151d5271ef3e7cc905ab170bbfd27d5c83ee3ec436695", + "sha256:4b06beb3b3f1479d32befd1f3079cc47b34fa2da62457cdf6c963393340b56e9", + "sha256:4d0dcc59664fcb8974b356fe0a18a672d6d7cf9f54746c05f43275fc48636851", + "sha256:4e99bc050fe65c450344421017f98298a97cefc18c53bb2f7b3531eb39bc7805", + "sha256:509daade3b8649f80d4e5ff21aa5673e4ebe58590b25fe42fac5f0f52c6f034a", + "sha256:51991a89639a912c17bef4b45c87bd83593aee0437d8102556af4885811d59f5", + "sha256:53db086f9f6ab2b4061958d9c276d1dbe3690e8dd727d6abf2321d6cce37fa94", + "sha256:564d7922e4b13a16b98772441879fcdcbe82ff50daa622d681dd682175ea918c", + "sha256:574d92eac874f7f4db0ca653514d823a0d22e2354359d0759e3f6a406db5d55d", + "sha256:578e24f761f3b425834f297b9935e1ce2e30f51400964ce4801002435a1b41ef", + "sha256:59ff3e89f4eaf14050c8022011862df275b552caef8082e37b542b066ce1ff26", + "sha256:5f09baa656c904807e832cf9cce799c6460c450c4ad80803517032da0cd062e2", + "sha256:6891a2ae0e8692679c07728819b6e2b822fb30ca7445f67bbf6509b25a96332c", + "sha256:6a750aec7bf431517a9fd78cb93c97b9b0c496090fee84a47a0d23668976b4b0", + "sha256:6f5c4d41b2771c730ea1c34e458e781b18cc668d194958e0112455fff4e402b2", + "sha256:77450e6d20016ec41f43ca4a6c63e9fdde03f0ae3fe90e7c27bdbeaece8b1ed4", + "sha256:81b5efb2f126454586d0f40c4d834010979cb80785173d1586df845a632e4e6d", + "sha256:823be1deb01793da05ecb0484d6c9e20baebb39bd42b5d72636ae9cf8350dbd2", + "sha256:834b5230b5dfc0c1ec37b2fda433b271cbbc0e507560b5d1588e2cc1148cf1ce", + "sha256:847a35c4d58721c5dc3dba599878ebbdfd96784f3fb8bb2c356e123bdcd73f34", + "sha256:86110d7e1907ab36691f80b33eb2da87d780f4739ae773e5fc83fb272f88825f", + "sha256:8951eee36c57cd128f779e641e21eb40bc5073eb28b2d23f33eb0ef14ffb3f5d", + "sha256:8a7164fe2005d03c64fd3b85649891cd4953a8de53107940bf272500ba8a788b", + "sha256:8b8bab4c97248095ae0c4455b5a1cd1cdd96e4e4769306ab19dda135ea4cdb07", + "sha256:90afc12421df2b1b4dcc975f814e21bc1754640d502a2fbcc6d41e77af5ec312", + "sha256:938cb21650855054dc54dfd9120a851c974f95450f00683399006aa6e8abb057", + "sha256:942ba11e7dfb66dc70f9ae66b33452f51ac7bb90676da39a7345e99ffb55402d", + "sha256:972658f4a72d02b8abfa2581d92d59f59897d2e9f7e708fdabe922f9087773af", + "sha256:97736815b9cc893b2b7f663628e63f436018b75f44854c8027040e05230eeddb", + "sha256:98906207f29bc2c459ff64fa007afd10a8c8ac080f7e4d5beff4c97086a3dabd", + "sha256:99457f184ad90235cfe8461c4d70ab7dd2680e28821c29eca00252ba90308c78", + "sha256:a0d829524aaefdebccb869eed855e2d04c21d2d7479b6cada7ace5448416597b", + "sha256:a2fdd81edd64342c85ac7cf2753ccae0b79bf2dfa063785503cb85a7d3593223", + "sha256:a55b5b16c839df1070bc113c1f7f94a0af4433fcfa1b41799ce7606e5c79ce0a", + "sha256:a642295cd0c8df1b86fc3dced1d067874c353a188dc8e0f744626d49e9aa51c4", + "sha256:ab86ce7c8f9bea87b9d12c7f0af71102acbf5ecbc66c17796cff45dae54ef9a5", + "sha256:abc267fa9837245cc28ea6929f19fa335f3dc330a35d2e45509b6566dc18be23", + "sha256:ae1d6df168efb88d7d522664693607b80b4080be6750c913eefb77e34c12c71a", + "sha256:b2ebef0e0b4454320274f5e83a41844c63438fdc874ea40a8b5b4ecb7693f1c4", + "sha256:b48ece5bde2e768197a2d0f6e925f9d7e3e826f0ad2271120f8144a9db18d5c8", + "sha256:b7cdf28938ac6b8b49ae5e92f2735056a7ba99c9b110a474473fd71185c1af5d", + "sha256:bb4462bd43c2460774914b8525f79b00f8f407c945d50881568f294c1d9b4443", + "sha256:bc4ff9805858bd54d1a20efff925ccd89c9d2e7cf4986144b30802bf78091c3e", + "sha256:c1322d7dd74713dcc157a2b7898a564ab091ca6c58302d5c7b4c07296e3fd00f", + "sha256:c67598100338d5d985db1b3d21f3619ef392e185e71b8d52bceacc4a7771ea7e", + "sha256:ca26a1e73c48cfc54c4a76ff78df3727b9d9f4ccc8dbee4ae3f73306a591676d", + "sha256:d323a01da91851a4f17bf592faf46149c9169d68430b3146dcba2bb5e5719abc", + "sha256:dc1803ac5c32ec324c5261c7209e8f8ce88e83254c4e1aebdc8b0a39f9ddb443", + "sha256:e00a3f196329e08e43d99b79b286d60ce46bed10f2280d25a1718399457e06be", + "sha256:e85637bc8fe81ddb73fda9e56bab24560bdddfa98aa64f87aaa4e4b6730c23d2", + "sha256:e858ac0a25074ba4bce653f9b5d0a85b7456eaddadc0ce82d3878c22489fa4ee", + "sha256:eae237477a873ab46e8dd748e515c72c0c804fb380fbe6c85533c7de51f23a8f", + "sha256:ebef0dd9bf9b812bf75bda96743f2a6c5734a02092ae7f721c048d156d5fabae", + "sha256:ec3beeada09ff865c344ff3bc2f427f5e6c26401cc6113d77e372c3fdac73864", + "sha256:f76d0ad001edd426b92233d45c746fd08f467d56100fd8f30e9ace4b005266e4", + "sha256:f85d05aa0918283cf29a30b547b4df2fbb56b45b135f9e35b6807cb28bc47951", + "sha256:f9899c94762343f2cc2fc64c13e7cae4c3cc65cdfc87dd810a31654c9b7358cc" ], "markers": "python_version >= '3.8'", - "version": "==2.18.2" + "version": "==2.18.4" }, "pypika-tortoise": { "hashes": [ @@ -924,11 +802,11 @@ "all" ], "hashes": [ - "sha256:2ebb462a98f502727d108c00bb50c513a68ddaf9545298c42f13996a9acf8354", - "sha256:af0f45d61521126de98f5bdc8a75a9df8b93d0c35d18b018181ca7648a38b017" + "sha256:1be3c8b6f2b7354418109daa3f23c522e82ed22e7fc904346bee0c7b4aab52ae", + "sha256:8f575e6da903edd1e78967b5b481455ee6b27f2804d2384029177eab165f2e93" ], "markers": "python_version >= '3.8'", - "version": "==21.2" + "version": "==21.3" }, "pytz": { "hashes": [ @@ -939,20 +817,20 @@ }, "redis": { "hashes": [ - "sha256:7adc2835c7a9b5033b7ad8f8918d09b7344188228809c98df07af226d39dec91", - "sha256:ec31f2ed9675cc54c21ba854cfe0462e6faf1d83c8ce5944709db8a4700b9c61" + "sha256:30b47d4ebb6b7a0b9b40c1275a19b87bb6f46b3bed82a89012cf56dea4024ada", + "sha256:3417688621acf6ee368dec4a04dd95881be24efd34c79f00d31f62bb528800ae" ], "index": "pypi", "markers": "python_version >= '3.7'", - "version": "==5.0.4" + "version": "==5.0.5" }, "requests": { "hashes": [ - "sha256:dd951ff5ecf3e3b3aa26b40703ba77495dab41da839ae72ef3c8e5d8e2433289", - "sha256:fc06670dd0ed212426dfeb94fc1b983d917c4f9847c863f313c9dfaaffb7c23c" + "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760", + "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6" ], "markers": "python_version >= '3.8'", - "version": "==2.32.2" + "version": "==2.32.3" }, "six": { "hashes": [ @@ -997,38 +875,46 @@ }, "tornado": { "hashes": [ - "sha256:02ccefc7d8211e5a7f9e8bc3f9e5b0ad6262ba2fbb683a6443ecc804e5224ce0", - "sha256:10aeaa8006333433da48dec9fe417877f8bcc21f48dda8d661ae79da357b2a63", - "sha256:27787de946a9cffd63ce5814c33f734c627a87072ec7eed71f7fc4417bb16263", - "sha256:6f8a6c77900f5ae93d8b4ae1196472d0ccc2775cc1dfdc9e7727889145c45052", - "sha256:71ddfc23a0e03ef2df1c1397d859868d158c8276a0603b96cf86892bff58149f", - "sha256:72291fa6e6bc84e626589f1c29d90a5a6d593ef5ae68052ee2ef000dfd273dee", - "sha256:88b84956273fbd73420e6d4b8d5ccbe913c65d31351b4c004ae362eba06e1f78", - "sha256:e43bc2e5370a6a8e413e1e1cd0c91bedc5bd62a74a532371042a18ef19e10579", - "sha256:f0251554cdd50b4b44362f73ad5ba7126fc5b2c2895cc62b14a1c2d7ea32f212", - "sha256:f7894c581ecdcf91666a0912f18ce5e757213999e183ebfc2c3fdbf4d5bd764e", - "sha256:fd03192e287fbd0899dd8f81c6fb9cbbc69194d2074b38f384cb6fa72b80e9c2" + "sha256:163b0aafc8e23d8cdc3c9dfb24c5368af84a81e3364745ccb4427669bf84aec8", + "sha256:25486eb223babe3eed4b8aecbac33b37e3dd6d776bc730ca14e1bf93888b979f", + "sha256:454db8a7ecfcf2ff6042dde58404164d969b6f5d58b926da15e6b23817950fc4", + "sha256:613bf4ddf5c7a95509218b149b555621497a6cc0d46ac341b30bd9ec19eac7f3", + "sha256:6d5ce3437e18a2b66fbadb183c1d3364fb03f2be71299e7d10dbeeb69f4b2a14", + "sha256:8ae50a504a740365267b2a8d1a90c9fbc86b780a39170feca9bcc1787ff80842", + "sha256:92d3ab53183d8c50f8204a51e6f91d18a15d5ef261e84d452800d4ff6fc504e9", + "sha256:a02a08cc7a9314b006f653ce40483b9b3c12cda222d6a46d4ac63bb6c9057698", + "sha256:b24b8982ed444378d7f21d563f4180a2de31ced9d8d84443907a0a64da2072e7", + "sha256:d9a566c40b89757c9aa8e6f032bcdb8ca8795d7c1a9762910c722b1635c9de4d", + "sha256:e2e20b9113cd7293f164dc46fffb13535266e713cdb87bd2d15ddb336e96cfc4" ], - "version": "==6.4" + "version": "==6.4.1" }, "tortoise-orm": { "extras": [ "accel" ], "hashes": [ - "sha256:bf88bc1ba7495a8827565c071efba0a89c4b5f83ff1c16be3c837a4e6b672c21", - "sha256:c896c90a90d1213b822ac0d607b61659ad5fcd5ff72698a8ba2d9efbad9932f3" + "sha256:9b8f8f8ba23a51f3407bfdc76cf9b2e5bc901ff07c7bec71250a83fa7724dab4", + "sha256:d6e3a627915d4037d312f6ca0cb7d0bf6593630cf1da466df60c7c4c3128398e" ], "markers": "python_version >= '3.8' and python_version < '4.0'", - "version": "==0.20.1" + "version": "==0.21.3" }, "typing-extensions": { "hashes": [ - "sha256:83f085bd5ca59c80295fc2a82ab5dac679cbe02b9f33f7d83af68e241bea51b0", - "sha256:c1f94d72897edaf4ce775bb7558d5b79d8126906a14ea5ed1635921406c0387a" + "sha256:6024b58b69089e5a89c347397254e35f1bf02a907728ec7fee9bf0fe837d203a", + "sha256:915f5e35ff76f56588223f15fdd5938f9a1cf9195c0de25130c627e4d597f6d1" ], "markers": "python_version >= '3.8'", - "version": "==4.11.0" + "version": "==4.12.1" + }, + "tzdata": { + "hashes": [ + "sha256:2674120f8d891909751c38abcdfd386ac0a5a1127954fbc332af6b5ceae07efd", + "sha256:9068bc196136463f5245e51efda838afa15aaeca9903f49050dfa2679db4d252" + ], + "markers": "platform_system == 'Windows'", + "version": "==2024.1" }, "tzlocal": { "hashes": [ @@ -1048,48 +934,12 @@ }, "uvicorn": { "hashes": [ - "sha256:2c2aac7ff4f4365c206fd773a39bf4ebd1047c238f8b8268ad996829323473de", - "sha256:6a69214c0b6a087462412670b3ef21224fa48cae0e452b5883e8e8bdfdd11dd0" + "sha256:cd17daa7f3b9d7a24de3617820e634d0933b69eed8e33a516071174427238c81", + "sha256:d46cd8e0fd80240baffbcd9ec1012a712938754afcf81bce56c024c1656aece8" ], "index": "pypi", "markers": "python_version >= '3.8'", - "version": "==0.29.0" - }, - "uvloop": { - "hashes": [ - "sha256:0246f4fd1bf2bf702e06b0d45ee91677ee5c31242f39aab4ea6fe0c51aedd0fd", - "sha256:02506dc23a5d90e04d4f65c7791e65cf44bd91b37f24cfc3ef6cf2aff05dc7ec", - "sha256:13dfdf492af0aa0a0edf66807d2b465607d11c4fa48f4a1fd41cbea5b18e8e8b", - "sha256:2693049be9d36fef81741fddb3f441673ba12a34a704e7b4361efb75cf30befc", - "sha256:271718e26b3e17906b28b67314c45d19106112067205119dddbd834c2b7ce797", - "sha256:2df95fca285a9f5bfe730e51945ffe2fa71ccbfdde3b0da5772b4ee4f2e770d5", - "sha256:31e672bb38b45abc4f26e273be83b72a0d28d074d5b370fc4dcf4c4eb15417d2", - "sha256:34175c9fd2a4bc3adc1380e1261f60306344e3407c20a4d684fd5f3be010fa3d", - "sha256:45bf4c24c19fb8a50902ae37c5de50da81de4922af65baf760f7c0c42e1088be", - "sha256:472d61143059c84947aa8bb74eabbace30d577a03a1805b77933d6bd13ddebbd", - "sha256:47bf3e9312f63684efe283f7342afb414eea4d3011542155c7e625cd799c3b12", - "sha256:492e2c32c2af3f971473bc22f086513cedfc66a130756145a931a90c3958cb17", - "sha256:4ce6b0af8f2729a02a5d1575feacb2a94fc7b2e983868b009d51c9a9d2149bef", - "sha256:5138821e40b0c3e6c9478643b4660bd44372ae1e16a322b8fc07478f92684e24", - "sha256:5588bd21cf1fcf06bded085f37e43ce0e00424197e7c10e77afd4bbefffef428", - "sha256:570fc0ed613883d8d30ee40397b79207eedd2624891692471808a95069a007c1", - "sha256:5a05128d315e2912791de6088c34136bfcdd0c7cbc1cf85fd6fd1bb321b7c849", - "sha256:5daa304d2161d2918fa9a17d5635099a2f78ae5b5960e742b2fcfbb7aefaa593", - "sha256:5f17766fb6da94135526273080f3455a112f82570b2ee5daa64d682387fe0dcd", - "sha256:6e3d4e85ac060e2342ff85e90d0c04157acb210b9ce508e784a944f852a40e67", - "sha256:7010271303961c6f0fe37731004335401eb9075a12680738731e9c92ddd96ad6", - "sha256:7207272c9520203fea9b93843bb775d03e1cf88a80a936ce760f60bb5add92f3", - "sha256:78ab247f0b5671cc887c31d33f9b3abfb88d2614b84e4303f1a63b46c046c8bd", - "sha256:7b1fd71c3843327f3bbc3237bedcdb6504fd50368ab3e04d0410e52ec293f5b8", - "sha256:8ca4956c9ab567d87d59d49fa3704cf29e37109ad348f2d5223c9bf761a332e7", - "sha256:91ab01c6cd00e39cde50173ba4ec68a1e578fee9279ba64f5221810a9e786533", - "sha256:cd81bdc2b8219cb4b2556eea39d2e36bfa375a2dd021404f90a62e44efaaf957", - "sha256:da8435a3bd498419ee8c13c34b89b5005130a476bda1d6ca8cfdde3de35cd650", - "sha256:de4313d7f575474c8f5a12e163f6d89c0a878bc49219641d49e6f1444369a90e", - "sha256:e27f100e1ff17f6feeb1f33968bc185bf8ce41ca557deee9d9bbbffeb72030b7", - "sha256:f467a5fd23b4fc43ed86342641f3936a68ded707f4627622fa3f82a120e18256" - ], - "version": "==0.19.0" + "version": "==0.30.1" }, "wand": { "hashes": [ @@ -1098,9 +948,25 @@ ], "index": "pypi", "version": "==0.6.13" + }, + "win32-setctime": { + "hashes": [ + "sha256:15cf5750465118d6929ae4de4eb46e8edae9a5634350c01ba582df868e932cb2", + "sha256:231db239e959c2fe7eb1d7dc129f11172354f98361c4fa2d6d2d7e278baa8aad" + ], + "markers": "sys_platform == 'win32'", + "version": "==1.1.0" } }, "develop": { + "colorama": { + "hashes": [ + "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44", + "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6" + ], + "markers": "sys_platform == 'win32'", + "version": "==0.4.6" + }, "iniconfig": { "hashes": [ "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3", @@ -1127,12 +993,12 @@ }, "pytest": { "hashes": [ - "sha256:5046e5b46d8e4cac199c373041f26be56fdb81eb4e67dc11d4e10811fc3408fd", - "sha256:faccc5d332b8c3719f40283d0d44aa5cf101cec36f88cde9ed8f2bc0538612b1" + "sha256:c434598117762e2bd304e526244f67bf66bbd7b5d6cf22138be51ff661980343", + "sha256:de4bb8104e201939ccdc688b27a89a7be2079b22e2bd2b07f806b6ba71117977" ], "index": "pypi", "markers": "python_version >= '3.8'", - "version": "==8.2.1" + "version": "==8.2.2" }, "pytest-asyncio": { "hashes": [ diff --git a/biliparser/__init__.py b/biliparser/__init__.py index 4b77fd7..8537ef5 100644 --- a/biliparser/__init__.py +++ b/biliparser/__init__.py @@ -3,8 +3,7 @@ import httpx -from .model import Audio, Live, Opus, Read, Video -from .parser import parse_audio, parse_live, parse_opus, parse_read, parse_video +from .strategy import Audio, Live, Opus, Read, Video from .utils import ParserException, headers, logger, retry_catcher @@ -12,22 +11,22 @@ async def __feed_parser(client: httpx.AsyncClient, url: str): # bypass b23 short link if re.search(r"BV\w{10}|av\d+|ep\d+|ss\d+", url): - return await parse_video(client, url if "/" in url else f"b23.tv/{url}") + return await Video(url if "/" in url else f"b23.tv/{url}", client).handle() r = await client.get(url) url = str(r.url) logger.debug(f"URL: {url}") # main video if re.search(r"video|bangumi/play|festival", url): - return await parse_video(client, url) + return await Video(url, client).handle() # au audio elif "read" in url: - return await parse_read(client, url) + return await Read(url, client).handle() # au audio elif "audio" in url: - return await parse_audio(client, url) + return await Audio(url, client).handle() # live image elif "live" in url: - return await parse_live(client, url) + return await Live(url, client).handle() # API link blackboard link user space link elif re.search( r"^https?:\/\/(?:api|www\.bilibili\.com\/blackboard|space\.bilibili\.com)", url @@ -35,7 +34,7 @@ async def __feed_parser(client: httpx.AsyncClient, url: str): pass # dynamic opus elif re.search(r"^https?:\/\/[th]\.|dynamic|opus", url): - return await parse_opus(client, url) + return await Opus(url, client).handle() raise ParserException("URL错误", url) diff --git a/biliparser/model/audio.py b/biliparser/model/audio.py deleted file mode 100644 index 3bdc445..0000000 --- a/biliparser/model/audio.py +++ /dev/null @@ -1,14 +0,0 @@ -from functools import cached_property - -from .feed import Feed - - -class Audio(Feed): - infocontent: dict = {} - mediacontent: str = "" - audio_id: int = 0 - reply_type: int = 14 - - @cached_property - def url(self): - return f"https://www.bilibili.com/audio/au{self.audio_id}" diff --git a/biliparser/model/live.py b/biliparser/model/live.py deleted file mode 100644 index 6c3ece5..0000000 --- a/biliparser/model/live.py +++ /dev/null @@ -1,12 +0,0 @@ -from functools import cached_property - -from .feed import Feed - - -class Live(Feed): - rawcontent: dict = {} - room_id: int = 0 - - @cached_property - def url(self): - return f"https://live.bilibili.com/{self.room_id}" diff --git a/biliparser/model/opus.py b/biliparser/model/opus.py deleted file mode 100644 index 4e9bdda..0000000 --- a/biliparser/model/opus.py +++ /dev/null @@ -1,69 +0,0 @@ -from functools import cached_property, lru_cache - -from ..utils import escape_markdown -from .feed import Feed - - -class Opus(Feed): - detailcontent: dict = {} - dynamic_id: int = 0 - user: str = "" - __content: str = "" - forward_user: str = "" - forward_uid: int = 0 - forward_content: str = "" - has_forward: bool = False - - @cached_property - def reply_type(self): - if self.rtype == 2: - return 11 - if self.rtype == 16: - return 5 - if self.rtype == 64: - return 12 - if self.rtype == 256: - return 14 - if self.rtype in [8, 512, *range(4000, 4200)]: - return 1 - if self.rtype in [1, 4, *range(4200, 4300), *range(2048, 2100)]: - return 17 - - @cached_property - def rtype(self): - return int(self.detailcontent["item"]["basic"]["rtype"]) - - @cached_property - def rid(self): - return int(self.detailcontent["item"]["basic"]["rid_str"]) - - @property - @lru_cache(maxsize=1) - def content(self): - content = self.__content - if self.has_forward: - if self.forward_user: - content += f"//@{self.forward_user}:\n" - content += self.forward_content - return self.shrink_line(content) - - @content.setter - def content(self, content): - self.__content = content - - @cached_property - def content_markdown(self): - content_markdown = escape_markdown(self.__content) - if self.has_forward: - if self.uid: - content_markdown += f"//{self.make_user_markdown(self.forward_user, self.forward_uid)}:\n" - elif self.user: - content_markdown += f"//@{escape_markdown(self.forward_user)}:\n" - content_markdown += escape_markdown(self.forward_content) - if not content_markdown.endswith("\n"): - content_markdown += "\n" - return self.shrink_line(content_markdown) - - @cached_property - def url(self): - return f"https://t.bilibili.com/{self.dynamic_id}" diff --git a/biliparser/model/read.py b/biliparser/model/read.py deleted file mode 100644 index ff5f50e..0000000 --- a/biliparser/model/read.py +++ /dev/null @@ -1,13 +0,0 @@ -from functools import cached_property - -from .feed import Feed - - -class Read(Feed): - rawcontent: str = "" - read_id: int = 0 - reply_type: int = 12 - - @cached_property - def url(self): - return f"https://www.bilibili.com/read/cv{self.read_id}" diff --git a/biliparser/model/video.py b/biliparser/model/video.py deleted file mode 100644 index 00bb508..0000000 --- a/biliparser/model/video.py +++ /dev/null @@ -1,56 +0,0 @@ -from functools import cached_property - -from .feed import Feed - - -class Video(Feed): - cidcontent: dict = {} - epcontent: dict = {} - infocontent: dict = {} - mediacontent: dict = {} - page = 1 - reply_type: int = 1 - - @cached_property - def cid(self): - if self.infocontent and self.infocontent.get("data"): - if self.page != 1 and self.infocontent["data"].get("pages"): - for item in self.infocontent["data"]["pages"]: - if item.get("page") == self.page: - return item.get("cid") - self.page = 1 - return self.infocontent["data"].get("cid") - - @cached_property - def bvid(self): - if self.infocontent and self.infocontent.get("data"): - return self.infocontent["data"].get("bvid") - - @cached_property - def aid(self): - if self.infocontent and self.infocontent.get("data"): - return self.infocontent["data"].get("aid") - elif self.epid and self.epcontent and self.epcontent.get("result"): - for episode in self.epcontent["result"].get("episodes"): - if str(episode.get("id")) == self.epid: - return episode.get("aid") - - @cached_property - def epid(self): - if ( - self.epcontent - and self.epcontent.get("result") - and self.epcontent["result"].get("episodes") - ): - if not self.aid: - self.aid = self.epcontent["result"]["episodes"][-1].get("aid") - return self.epcontent["result"]["episodes"][-1].get("id") - - @cached_property - def ssid(self): - if self.epcontent and self.epcontent.get("result"): - return self.epcontent["result"].get("season_id") - - @cached_property - def url(self): - return f"https://www.bilibili.com/video/av{self.aid}?p={self.page}" diff --git a/biliparser/parser/__init__.py b/biliparser/parser/__init__.py deleted file mode 100644 index 95c300e..0000000 --- a/biliparser/parser/__init__.py +++ /dev/null @@ -1,5 +0,0 @@ -from .audio_parser import parse_audio -from .live_parser import parse_live -from .opus_parser import parse_opus -from .read_parser import parse_read -from .video_parser import parse_video \ No newline at end of file diff --git a/biliparser/parser/audio_parser.py b/biliparser/parser/audio_parser.py deleted file mode 100644 index ec29812..0000000 --- a/biliparser/parser/audio_parser.py +++ /dev/null @@ -1,122 +0,0 @@ -import re - -import httpx -import orjson -from telegram.constants import FileSizeLimit - -from ..cache import ( - CACHES_TIMER, - RedisCache, -) -from ..model import Audio -from ..utils import ( - BILI_API, - LOCAL_MODE, - ParserException, - escape_markdown, - logger, - retry_catcher, -) -from .reply_parser import parse_reply - - -@retry_catcher -async def parse_audio(client: httpx.AsyncClient, url: str): - logger.info(f"处理音频信息: 链接: {url}") - match = re.search(r"bilibili\.com\/audio\/au(\d+)", url) - if not match: - raise ParserException("音频链接错误", url) - f = Audio(url) - f.audio_id = int(match.group(1)) - # 1.获取缓存 - try: - cache = RedisCache().get(f"audio:info:{f.audio_id}") - except Exception as e: - logger.exception(f"拉取音频缓存错误: {e}") - cache = None - # 2.拉取音频 - if cache: - logger.info(f"拉取音频缓存: {f.audio_id}") - f.infocontent = orjson.loads(cache) # type: ignore - else: - try: - r = await client.get( - BILI_API + "/audio/music-service-c/songs/playing", - params={"song_id": f.audio_id}, - ) - f.infocontent = r.json() - except Exception as e: - raise ParserException(f"音频获取错误:{f.audio_id}", url, e) - # 3.解析音频 - if not f.infocontent or not f.infocontent.get("data"): - raise ParserException("音频解析错误", r.url, f.infocontent) - # 4.缓存音频 - try: - RedisCache().set( - f"audio:info:{f.audio_id}", - orjson.dumps(f.infocontent), - ex=CACHES_TIMER.get("audio"), - nx=True, - ) - except Exception as e: - logger.exception(f"缓存音频错误: {e}") - detail = f.infocontent["data"] - f.user = detail.get("author") - f.content = detail.get("intro") - f.extra_markdown = f"[{escape_markdown(detail.get('title'))}]({f.url})" - f.mediathumb = detail.get("cover_url") - f.mediatitle = detail.get("title") - f.mediaduration = detail.get("duration") - f.uid = detail.get("mid") - # 1.获取缓存 - try: - cache = RedisCache().get(f"audio:media:{f.audio_id}") - except Exception as e: - logger.exception(f"拉取音频缓存错误: {e}") - cache = None - # 2.拉取音频 - if cache: - logger.info(f"拉取音频缓存: {f.audio_id}") - f.mediacontent = orjson.loads(cache) # type: ignore - else: - try: - r = await client.get( - BILI_API + "/audio/music-service-c/url", - params={ - "songid": f.audio_id, - "mid": f.uid, - "privilege": 2, - "quality": 3, - "platform": "", - }, - ) - f.mediacontent = r.json() - except Exception as e: - raise ParserException(f"音频媒体获取错误:{f.audio_id}", url, e) - # 3.解析音频 - if not f.mediacontent or not f.mediacontent.get("data"): - raise ParserException("音频媒体解析错误", r.url, f.mediacontent) - # 4.缓存音频 - try: - RedisCache().set( - f"audio:media:{f.audio_id}", - orjson.dumps(f.mediacontent), - ex=CACHES_TIMER.get("audio"), - nx=True, - ) - except Exception as e: - logger.exception(f"缓存音频媒体错误: {e}") - f.mediaurls = f.mediacontent["data"].get("cdns") - f.mediatype = "audio" - f.mediaraws = ( - False - if f.mediacontent["data"].get("size") - < ( - FileSizeLimit.FILESIZE_DOWNLOAD_LOCAL_MODE - if LOCAL_MODE - else FileSizeLimit.FILESIZE_DOWNLOAD - ) - else True - ) - f.replycontent = await parse_reply(client, f.audio_id, f.reply_type) - return f diff --git a/biliparser/parser/live_parser.py b/biliparser/parser/live_parser.py deleted file mode 100644 index 30b7e28..0000000 --- a/biliparser/parser/live_parser.py +++ /dev/null @@ -1,67 +0,0 @@ -import re - -import httpx -import orjson - -from ..cache import ( - CACHES_TIMER, - RedisCache, -) -from ..model import Live -from ..utils import ( - ParserException, - escape_markdown, - logger, - retry_catcher, -) - - -@retry_catcher -async def parse_live(client: httpx.AsyncClient, url: str): - logger.info(f"处理直播信息: 链接: {url}") - match = re.search(r"live\.bilibili\.com[\/\w]*\/(\d+)", url) - if not match: - raise ParserException("直播链接错误", url) - f = Live(url) - f.room_id = int(match.group(1)) - # 1.获取缓存 - try: - cache = RedisCache().get(f"live:{f.room_id}") - except Exception as e: - logger.exception(f"拉取直播缓存错误: {e}") - cache = None - # 2.拉取直播 - if cache: - logger.info(f"拉取直播缓存: {f.room_id}") - f.rawcontent = orjson.loads(cache) # type: ignore - else: - try: - r = await client.get( - "https://api.live.bilibili.com/xlive/web-room/v1/index/getInfoByRoom", - params={"room_id": f.room_id}, - ) - f.rawcontent = r.json() - except Exception as e: - raise ParserException(f"直播获取错误:{f.room_id}", url, e) - # 3.解析直播 - if not f.rawcontent or not f.rawcontent.get("data"): - raise ParserException("直播解析错误", r.url, f.rawcontent) - # 4.缓存直播 - try: - RedisCache().set( - f"live:{f.room_id}", - orjson.dumps(f.rawcontent), - ex=CACHES_TIMER.get("live"), - nx=True, - ) - except Exception as e: - logger.exception(f"缓存直播错误: {e}") - detail = f.rawcontent.get("data") - f.user = detail["anchor_info"]["base_info"]["uname"] - roominfo = detail.get("room_info") - f.uid = roominfo.get("uid") - f.content = f"{roominfo.get('title')} - {roominfo.get('area_name')} - {roominfo.get('parent_area_name')}" - f.extra_markdown = f"[{escape_markdown(f.user)}的直播间]({f.url})" - f.mediaurls = roominfo.get("keyframe") or roominfo.get("cover") - f.mediatype = "image" - return f diff --git a/biliparser/parser/opus_parser.py b/biliparser/parser/opus_parser.py deleted file mode 100644 index 669d9a4..0000000 --- a/biliparser/parser/opus_parser.py +++ /dev/null @@ -1,118 +0,0 @@ -import re -from functools import reduce - -import httpx -import orjson - -from ..cache import ( - CACHES_TIMER, - RedisCache, -) -from ..model import Opus -from ..utils import ( - BILI_API, - ParserException, - escape_markdown, - logger, - retry_catcher, -) -from .reply_parser import parse_reply - - -def __list_dicts_to_dict(lists: list[dict]): - return reduce(lambda old, new: old.update(new) or old, lists, {}) - - -def __opus_handle_major(f: Opus, major: dict): - datapath_map = { - "MDL_DYN_TYPE_ARCHIVE": "dyn_archive", - "MDL_DYN_TYPE_PGC": "dyn_pgc", - "MDL_DYN_TYPE_ARTICLE": "dyn_article", - "MDL_DYN_TYPE_MUSIC": "dyn_music", - "MDL_DYN_TYPE_COMMON": "dyn_common", - "MDL_DYN_TYPE_LIVE": "dyn_live", - "MDL_DYN_TYPE_UGC_SEASON": "dyn_ugc_season", - "MDL_DYN_TYPE_DRAW": "dyn_draw", - "MDL_DYN_TYPE_OPUS": "dyn_opus", - "MDL_DYN_TYPE_FORWARD": "dyn_forward", - } - if not major: - return - target = datapath_map.get(major["type"]) - if major["type"] == "MDL_DYN_TYPE_FORWARD": - f.has_forward = True - majorcontent = __list_dicts_to_dict(major[target]["item"]["modules"]) - f.forward_user = majorcontent["module_author"]["user"]["name"] - f.forward_uid = majorcontent["module_author"]["user"]["mid"] - if majorcontent.get("module_desc"): - f.forward_content = __opus_handle_desc_text(majorcontent["module_desc"]) - if not f.mediatype and majorcontent.get("module_dynamic"): - __opus_handle_major(f, majorcontent["module_dynamic"]) - elif major["type"] == "MDL_DYN_TYPE_DRAW": - f.mediaurls = [item["src"] for item in major[target]["items"]] - f.mediatype = "image" - elif datapath_map.get(major["type"]): - if major[target].get("cover"): - f.mediaurls = major[target]["cover"] - f.mediatype = "image" - if major[target].get("aid") and major[target].get("title"): - f.extra_markdown = f"[{escape_markdown(major[target]['title'])}](https://www.bilibili.com/video/av{major[target]['aid']})" - - -def __opus_handle_desc_text(desc: dict): - if not desc: - return "" - return desc["text"] - - -@retry_catcher -async def parse_opus(client: httpx.AsyncClient, url: str): - logger.info(f"处理动态信息: 链接: {url}") - match = re.search(r"bilibili\.com[\/\w]*\/(\d+)", url) - if not match: - raise ParserException("动态链接错误", url) - f = Opus(url) - f.dynamic_id = int(match.group(1)) - # 1.获取缓存 - try: - cache = RedisCache().get(f"opus:dynamic_id:{f.dynamic_id}") - except Exception as e: - logger.exception(f"拉取动态缓存错误: {e}") - cache = None - # 2.拉取动态 - if cache: - logger.info(f"拉取动态缓存: {f.dynamic_id}") - f.detailcontent = orjson.loads(cache) # type: ignore - else: - try: - r = await client.get( - BILI_API + "/x/polymer/web-dynamic/desktop/v1/detail", - params={"id": f.dynamic_id}, - ) - response = r.json() - except Exception as e: - raise ParserException(f"动态获取错误:{f.dynamic_id}", url, e) - # 3.动态解析 - if not response or not response.get("data") or not response["data"].get("item"): - raise ParserException("动态解析错误", url, response) - f.detailcontent = response["data"] - # 4.缓存动态 - try: - RedisCache().set( - f"opus:dynamic_id:{f.dynamic_id}", - orjson.dumps(f.detailcontent), - ex=CACHES_TIMER.get("opus"), - nx=True, - ) - except Exception as e: - logger.exception(f"缓存动态错误: {e}") - detailcontent = __list_dicts_to_dict(f.detailcontent["item"]["modules"]) - f.user = detailcontent["module_author"]["user"]["name"] - f.uid = detailcontent["module_author"]["user"]["mid"] - if detailcontent.get("module_desc"): - f.content = __opus_handle_desc_text(detailcontent["module_desc"]) - if detailcontent.get("module_dynamic"): - __opus_handle_major(f, detailcontent["module_dynamic"]) - f.extra_markdown = f"[{escape_markdown(f.user)}的动态]({f.url})" - f.replycontent = await parse_reply(client, f.rid, f.reply_type) - return f diff --git a/biliparser/parser/read_parser.py b/biliparser/parser/read_parser.py deleted file mode 100644 index 2e50be8..0000000 --- a/biliparser/parser/read_parser.py +++ /dev/null @@ -1,180 +0,0 @@ -import asyncio -import os -import re -from io import BytesIO - -import httpx -import orjson -from bs4 import BeautifulSoup -from bs4.element import Tag -from telegraph.aio import Telegraph - -from ..cache import ( - CACHES_TIMER, - RedisCache, -) -from ..model import Read -from ..utils import ( - ParserException, - compress, - escape_markdown, - logger, - retry_catcher, -) -from .reply_parser import parse_reply - -telegraph = Telegraph(access_token=os.environ.get("TELEGRAPH_ACCESS_TOKEN", None)) - - -async def __relink(client: httpx.AsyncClient, img): - src = img.attrs.pop("data-src") - img.attrs = {"src": src} - logger.info(f"下载图片: {src}") - async with client.stream("GET", f"https:{src}") as response: - if response.status_code != 200: - logger.error(f"图片获取错误: {src}") - return - media = BytesIO(await response.aread()) - mediatype = response.headers.get("content-type") - if mediatype in ["image/jpeg", "image/png"]: - content_length = int(response.headers.get("content-length")) - logger.info(f"图片大小: {content_length} 压缩: {src} {mediatype}") - if content_length > 1024 * 1024 * 5: - media = compress(media, fix_ratio=True) - else: - media = compress(media, size=0, fix_ratio=True) - try: - resp = await telegraph.upload_file(media) - logger.info(f"图片上传: {resp}") - img.attrs["src"] = f"https://telegra.ph{resp[0].get('src')}" - except Exception as e: - logger.exception(f"图片上传错误: {e}") - - -@retry_catcher -async def parse_read(client: httpx.AsyncClient, url: str): - logger.info(f"处理文章信息: 链接: {url}") - match = re.search(r"bilibili\.com\/read\/(?:cv|mobile\/|mobile\?id=)(\d+)", url) - if not match: - raise ParserException("文章链接错误", url) - f = Read(url) - f.read_id = int(match.group(1)) - # 获取文章 - # 1.获取缓存 - try: - cache_base = RedisCache().get(f"read:page:{f.read_id}") - except Exception as e: - logger.exception(f"拉取文章页面缓存错误: {e}") - cache_base = None - # 2.拉取文章 - if cache_base: - logger.info(f"拉取文章页面缓存: {f.read_id}") - cv_content = orjson.loads(cache_base) # type: ignore - else: - try: - r = await client.get(f.url) - except Exception as e: - raise ParserException(f"文章页面获取错误:{f.read_id}", url, e) - # 3.解析文章 - cv_init = re.search(r"window\.__INITIAL_STATE__=(.*?);\(function\(\)", r.text) - if not cv_init: - raise ParserException(f"文章页面内容获取错误:{f.read_id}", url, cv_init) - cv_content = orjson.loads(cv_init.group(1)) - f.uid = cv_content.get("readInfo").get("author").get("mid") - f.user = cv_content.get("readInfo").get("author").get("name") - f.content = cv_content.get("readInfo").get("summary") - mediaurls = ( - cv_content.get("readInfo").get("banner_url") - if cv_content.get("readInfo").get("banner_url") - else cv_content.get("readInfo").get("image_urls") - ) - if mediaurls: - logger.info(f"文章mediaurls: {mediaurls}") - f.mediaurls = mediaurls - f.mediatype = "image" - title = cv_content.get("readInfo").get("title") - if not cache_base: - # 4.缓存文章 - try: - cache_base = RedisCache().set( - f"read:page:{f.read_id}", - orjson.dumps(cv_content), - ex=CACHES_TIMER.get("read"), - nx=True, - ) - except Exception as e: - logger.exception(f"缓存文章页面错误: {e}") - # 转存文章 - # 1.获取缓存 - try: - cache_graphurl = RedisCache().get(f"read:graphurl:{f.read_id}") - except Exception as e: - logger.exception(f"拉取文章链接缓存错误: {e}") - cache_graphurl = None - # 2.拉取文章 - if cache_graphurl: - logger.info(f"拉取文章链接缓存: {f.read_id}") - graphurl = cache_graphurl - else: - # 3.解析文章转为链接 - article_content = cv_content.get("readInfo").get("content") - if not telegraph.get_access_token(): - logger.info("creating_account") - result = await telegraph.create_account( - "bilifeedbot", "bilifeedbot", "https://t.me/bilifeedbot" - ) - logger.info(f"Telegraph create_account: {result}") - try: - article = orjson.loads(article_content) - result = article.get("ops")[0].get("insert").split("\n") - logger.info(result) - graphurl = ( - await telegraph.create_page( - title=title, - content=result, - author_name=f.user, - author_url=f"https://space.bilibili.com/{f.uid}", - ) - ).get("url") - except orjson.JSONDecodeError: - article = BeautifulSoup(article_content, "lxml") - if not isinstance(article, Tag): - raise ParserException("文章内容解析错误", url, cv_content) - imgs = article.find_all("img") - task = list(__relink(client, img) for img in imgs) ## data-src -> src - for _ in article.find_all("h1"): ## h1 -> h3 - _.name = "h3" - for item in ["span", "div"]: ## remove tags - for _ in article.find_all(item): - _.unwrap() - for item in ["p", "figure", "figcaption"]: ## clean tags - for _ in article.find_all(item): - _.attrs = {} - await asyncio.gather(*task) - result = "" - if isinstance(article.body, Tag): - result = "".join( - [str(i) for i in article.body.contents] - ) ## convert tags to string - graphurl = ( - await telegraph.create_page( - title=title, - html_content=result, - author_name=f.user, - author_url=f"https://space.bilibili.com/{f.uid}", - ) - ).get("url") - logger.info(f"生成页面: {graphurl}") - # 4.缓存文章 - try: - RedisCache().set( - f"read:graphurl:{f.read_id}", - orjson.dumps(graphurl), - ex=CACHES_TIMER.get("read"), - nx=True, - ) - except Exception as e: - logger.exception(f"缓存文章链接错误: {e}") - f.extra_markdown = f"[{escape_markdown(title)}]({graphurl})" - f.replycontent = await parse_reply(client, f.read_id, f.reply_type) - return f diff --git a/biliparser/parser/reply_parser.py b/biliparser/parser/reply_parser.py deleted file mode 100644 index 3faadc3..0000000 --- a/biliparser/parser/reply_parser.py +++ /dev/null @@ -1,54 +0,0 @@ -import httpx -import orjson - -from ..cache import ( - CACHES_TIMER, - RedisCache, -) -from ..utils import ( - BILI_API, - logger, - retry_catcher, -) - - -@retry_catcher -async def parse_reply(client: httpx.AsyncClient, oid, reply_type): - logger.info(f"处理评论信息: 评论ID: {oid} 评论类型: {reply_type}") - # 1.获取缓存 - try: - cache = RedisCache().get(f"reply:{oid}:{reply_type}") - except Exception as e: - logger.exception(f"拉取评论缓存错误: {e}") - cache = None - # 2.拉取评论 - if cache: - logger.info(f"拉取评论缓存: {oid}") - reply = orjson.loads(cache) # type: ignore - else: - try: - r = await client.get( - BILI_API + "/x/v2/reply/main", - params={"oid": oid, "type": reply_type}, - headers={"Referer": "https://www.bilibili.com/client"}, - ) - response = r.json() - except Exception as e: - logger.exception(f"评论获取错误: {oid}-{reply_type} {e}") - return {} - # 3.评论解析 - if not response or not response.get("data"): - logger.warning(f"评论解析错误: {oid}-{reply_type} {response}") - return {} - reply = response["data"] - # 4.缓存评论 - try: - RedisCache().set( - f"reply:{oid}:{reply_type}", - orjson.dumps(reply), - ex=CACHES_TIMER.get("reply"), - nx=True, - ) - except Exception as e: - logger.exception(f"缓存评论错误: {e}") - return reply diff --git a/biliparser/parser/video_parser.py b/biliparser/parser/video_parser.py deleted file mode 100644 index 9d0154c..0000000 --- a/biliparser/parser/video_parser.py +++ /dev/null @@ -1,240 +0,0 @@ -import os -import re - -import httpx -from telegram.constants import FileSizeLimit -import orjson - -from ..cache import ( - CACHES_TIMER, - RedisCache, -) -from ..model import Video -from ..utils import ( - BILI_API, - LOCAL_MODE, - ParserException, - escape_markdown, - headers, - logger, - retry_catcher, -) -from .reply_parser import parse_reply - -QN = [64, 32, 16] - - -async def __test_url_status_code(client, url, referer): - header = headers.copy() - header["Referer"] = referer - async with client.stream("GET", url, headers=header) as response: - if response.status_code != 200: - return False - return True - - -async def __get_video_result(client: httpx.AsyncClient, f: Video, detail, qn: int): - params = {"avid": f.aid, "cid": f.cid} - if qn: - params["qn"] = qn - r = await client.get( - BILI_API + "/x/player/playurl", - params=params, - ) - video_result = r.json() - logger.debug(f"视频内容: {video_result}") - if ( - video_result.get("code") == 0 - and video_result.get("data") - and video_result.get("data").get("durl") - and video_result.get("data").get("durl")[0].get("size") - < ( - int( - os.environ.get( - "VIDEO_SIZE_LIMIT", FileSizeLimit.FILESIZE_UPLOAD_LOCAL_MODE - ) - ) - if LOCAL_MODE - else FileSizeLimit.FILESIZE_UPLOAD - ) - ): - url = video_result["data"]["durl"][0]["url"] - result = await __test_url_status_code(client, url, f.url) - if not result and video_result["data"]["durl"][0].get("backup_url", None): - url = video_result["data"]["durl"][0]["backup_url"] - result = await __test_url_status_code(client, url, f.url) - if result: - f.mediacontent = video_result - f.mediathumb = detail.get("pic") - f.mediaduration = round(video_result["data"]["durl"][0]["length"] / 1000) - f.mediadimention = detail.get("pages")[0].get("dimension") - f.mediaurls = url - f.mediatype = "video" - f.mediaraws = ( - False - if video_result.get("data").get("durl")[0].get("size") - < ( - FileSizeLimit.FILESIZE_DOWNLOAD_LOCAL_MODE - if LOCAL_MODE - else FileSizeLimit.FILESIZE_DOWNLOAD - ) - else True - ) - return True - - -@retry_catcher -async def parse_video(client: httpx.AsyncClient, url: str): - logger.info(f"处理视频信息: 链接: {url}") - match = re.search( - r"(?:bilibili\.com/(?:video|bangumi/play)|b23\.tv|acg\.tv)/(?:(?PBV\w{10})|av(?P\d+)|ep(?P\d+)|ss(?P\d+)|)/?\??(?:p=(?P\d+))?", - url, - ) - match_fes = re.search( - r"bilibili\.com/festival/(?P\w+)\?(?:bvid=(?PBV\w{10}))", url - ) - if match_fes: - bvid = match_fes.group("bvid") - epid = None - aid = None - ssid = None - page = 1 - elif match: - bvid = match.group("bvid") - epid = match.group("epid") - aid = match.group("aid") - ssid = match.group("ssid") - page = match.group("page") - if page and page.isdigit(): - page = max(1, int(page)) - else: - page = 1 - else: - raise ParserException("视频链接错误", url) - if epid: - params = {"ep_id": epid} - elif bvid: - params = {"bvid": bvid} - elif aid: - params = {"aid": aid} - elif ssid: - params = {"season_id": ssid} - else: - raise ParserException("视频链接解析错误", url) - f = Video(url) - f.page = page - if epid: - f.epid = epid - if epid is not None or ssid is not None: - # 1.获取缓存 - try: - cache = ( - RedisCache().get(f"bangumi:ep:{epid}") - if epid - else RedisCache().get(f"bangumi:ss:{ssid}") - ) - except Exception as e: - logger.exception(f"拉取番剧缓存错误: {e}") - cache = None - # 2.拉取番剧 - if cache: - logger.info( - f"拉取番剧缓存:epid {epid}" if epid else f"拉取番剧缓存:ssid {ssid}" - ) - f.epcontent = orjson.loads(cache) # type: ignore - else: - try: - r = await client.get( - BILI_API + "/pgc/view/web/season", - params=params, - ) - f.epcontent = r.json() - except Exception as e: - raise ParserException(f"番剧获取错误:{epid if epid else ssid}", url, e) - # 3.番剧解析 - if not f.epcontent or not f.epcontent.get("result"): - # Anime detects non-China IP - raise ParserException( - f"番剧解析错误:{epid if epid else ssid} {f.epcontent}", - url, - f.epcontent, - ) - if not f.epid or not f.ssid or not f.aid: - raise ParserException( - f"番剧解析错误:{f.aid} {f.ssid} {f.aid}", url, f.epcontent - ) - # 4.缓存评论 - try: - for key in [f"bangumi:ep:{f.epid}", f"bangumi:ss:{f.ssid}"]: - RedisCache().set( - key, - orjson.dumps(f.epcontent), - ex=CACHES_TIMER.get("bangumi"), - nx=True, - ) - except Exception as e: - logger.exception(f"缓存番剧错误: {e}") - params = {"aid": f.aid} - aid = f.aid - # 1.获取缓存 - try: - cache = ( - RedisCache().get(f"video:aid:{aid}") - if aid - else RedisCache().get(f"video:bvid:{bvid}") - ) - except Exception as e: - logger.exception(f"拉取视频缓存错误: {e}") - cache = None - # 2.拉取视频 - if cache: - logger.info(f"拉取视频缓存:{aid if aid else bvid}") - f.infocontent = orjson.loads(cache) # type: ignore - else: - try: - r = await client.get( - BILI_API + "/x/web-interface/view", - params=params, - ) - f.infocontent = r.json() - except Exception as e: - raise ParserException(f"视频获取错误:{aid if aid else bvid}", url, e) - # 3.视频解析 - if not f.infocontent and not f.infocontent.get("data"): - # Video detects non-China IP - raise ParserException( - f"视频解析错误{aid if aid else bvid}", r.url, f.infocontent - ) - if not f.aid or not f.bvid or not f.cid: - raise ParserException( - f"视频解析错误:{f.aid} {f.bvid} {f.cid}", url, f.epcontent - ) - # 4.缓存视频 - try: - for key in [f"video:aid:{f.aid}", f"video:bvid:{f.bvid}"]: - RedisCache().set( - key, - orjson.dumps(f.infocontent), - ex=CACHES_TIMER.get("video"), - nx=True, - ) - except Exception as e: - logger.exception(f"缓存番剧错误: {e}") - detail = f.infocontent.get("data") - f.user = detail.get("owner").get("name") - f.uid = detail.get("owner").get("mid") - f.content = detail.get("tname", "发布视频") - if detail.get("pages") and len(detail["pages"]) > 1: - f.content += f" - 第{page}P/共{len(detail['pages'])}P" - if detail.get("dynamic") or detail.get("desc"): - f.content += f" - {detail.get('dynamic') or detail.get('desc')}" - f.extra_markdown = f"[{escape_markdown(detail.get('title'))}]({f.url})" - f.mediatitle = detail.get("title") - f.mediaurls = detail.get("pic") - f.mediatype = "image" - f.replycontent = await parse_reply(client, f.aid, f.reply_type) - - for qn in QN: - if await __get_video_result(client, f, detail, qn): - break - return f diff --git a/biliparser/model/__init__.py b/biliparser/strategy/__init__.py similarity index 100% rename from biliparser/model/__init__.py rename to biliparser/strategy/__init__.py diff --git a/biliparser/strategy/audio.py b/biliparser/strategy/audio.py new file mode 100644 index 0000000..eb9d5bc --- /dev/null +++ b/biliparser/strategy/audio.py @@ -0,0 +1,118 @@ +import re +from functools import cached_property + +import orjson +from telegram.constants import FileSizeLimit + +from ..cache import CACHES_TIMER, RedisCache +from ..utils import BILI_API, LOCAL_MODE, ParserException, escape_markdown, logger +from .feed import Feed + + +class Audio(Feed): + infocontent: dict = {} + audio_id: int = 0 + reply_type: int = 14 + + @cached_property + def url(self): + return f"https://www.bilibili.com/audio/au{self.audio_id}" + + async def handle(self): + logger.info(f"处理音频信息: 链接: {self.rawurl}") + match = re.search(r"bilibili\.com\/audio\/au(\d+)", self.rawurl) + if not match: + raise ParserException("音频链接错误", self.rawurl) + self.audio_id = int(match.group(1)) + # 1.获取缓存 + try: + cache = RedisCache().get(f"audio:info:{self.audio_id}") + except Exception as e: + logger.exception(f"拉取音频缓存错误: {e}") + cache = None + # 2.拉取音频 + if cache: + logger.info(f"拉取音频缓存: {self.audio_id}") + self.infocontent = orjson.loads(cache) # type: ignore + else: + try: + r = await self.client.get( + BILI_API + "/audio/music-service-c/songs/playing", + params={"song_id": self.audio_id}, + ) + self.infocontent = r.json() + except Exception as e: + raise ParserException(f"音频获取错误:{self.audio_id}", self.rawurl, e) + # 3.解析音频 + if not self.infocontent or not self.infocontent.get("data"): + raise ParserException("音频解析错误", r.url, self.infocontent) + # 4.缓存音频 + try: + RedisCache().set( + f"audio:info:{self.audio_id}", + orjson.dumps(self.infocontent), + ex=CACHES_TIMER.get("audio"), + nx=True, + ) + except Exception as e: + logger.exception(f"缓存音频错误: {e}") + detail = self.infocontent["data"] + self.user = detail.get("author") + self.content = detail.get("intro") + self.extra_markdown = f"[{escape_markdown(detail.get('title'))}]({self.rawurl})" + self.mediathumb = detail.get("cover_url") + self.mediatitle = detail.get("title") + self.mediaduration = detail.get("duration") + self.uid = detail.get("mid") + # 1.获取缓存 + try: + cache = RedisCache().get(f"audio:media:{self.audio_id}") + except Exception as e: + logger.exception(f"拉取音频缓存错误: {e}") + cache = None + # 2.拉取音频 + if cache: + logger.info(f"拉取音频缓存: {self.audio_id}") + self.mediacontent = orjson.loads(cache) # type: ignore + else: + try: + r = await self.client.get( + BILI_API + "/audio/music-service-c/url", + params={ + "songid": self.audio_id, + "mid": self.uid, + "privilege": 2, + "quality": 3, + "platform": "", + }, + ) + self.mediacontent = r.json() + except Exception as e: + raise ParserException(f"音频媒体获取错误:{self.audio_id}", self.rawurl, e) + # 3.解析音频 + if not self.mediacontent or not self.mediacontent.get("data"): + raise ParserException("音频媒体解析错误", r.url, self.mediacontent) + # 4.缓存音频 + try: + RedisCache().set( + f"audio:media:{self.audio_id}", + orjson.dumps(self.mediacontent), + ex=CACHES_TIMER.get("audio"), + nx=True, + ) + except Exception as e: + logger.exception(f"缓存音频媒体错误: {e}") + self.mediaurls = self.mediacontent["data"].get("cdns") + self.mediatype = "audio" + self.mediaraws = ( + False + if self.mediacontent["data"].get("size") + < ( + FileSizeLimit.FILESIZE_DOWNLOAD_LOCAL_MODE + if LOCAL_MODE + else FileSizeLimit.FILESIZE_DOWNLOAD + ) + else True + ) + self.replycontent = await self.parse_reply(self.audio_id, self.reply_type) + return self diff --git a/biliparser/model/feed.py b/biliparser/strategy/feed.py similarity index 69% rename from biliparser/model/feed.py rename to biliparser/strategy/feed.py index 0405365..f8f40a5 100644 --- a/biliparser/model/feed.py +++ b/biliparser/strategy/feed.py @@ -1,15 +1,21 @@ import re +from abc import ABC, abstractmethod from functools import cached_property + +import httpx +import orjson from telegram.constants import MessageLimit -from ..utils import escape_markdown +from ..cache import CACHES_TIMER, RedisCache +from ..utils import BILI_API, escape_markdown, logger -class Feed: +class Feed(ABC): user: str = "" uid: str = "" __content: str = "" __mediaurls: list = [] + mediacontent: dict = {} mediaraws: bool = False mediatype: str = "" mediathumb: str = "" @@ -19,8 +25,9 @@ class Feed: extra_markdown: str = "" replycontent: dict = {} - def __init__(self, rawurl): + def __init__(self, rawurl: str, client: httpx.AsyncClient): self.rawurl = rawurl + self.client = client @staticmethod def get_filename(url) -> str: @@ -50,6 +57,13 @@ def shrink_line(text: str): else str() ) + @staticmethod + def clean_cn_tag_style(content: str) -> str: + if not content: + return "" + ## Refine cn tag style display: #abc# -> #abc + return re.sub(r"\\#((?:(?!\\#).)+)\\#", r"\\#\1 ", content) + @cached_property def user_markdown(self): return self.make_user_markdown(self.user, self.uid) @@ -120,13 +134,6 @@ def mediathumbfilename(self): def url(self): return self.rawurl - @staticmethod - def clean_cn_tag_style(content: str) -> str: - if not content: - return "" - ## Refine cn tag style display: #abc# -> #abc - return re.sub(r"\\#((?:(?!\\#).)+)\\#", r"\\#\1 ", content) - @cached_property def caption(self): caption = ( @@ -149,3 +156,47 @@ def caption(self): if len(caption) > MessageLimit.CAPTION_LENGTH: return prev_caption return caption + + async def parse_reply(self, oid, reply_type): + logger.info(f"处理评论信息: 评论ID: {oid} 评论类型: {reply_type}") + # 1.获取缓存 + try: + cache = RedisCache().get(f"reply:{oid}:{reply_type}") + except Exception as e: + logger.exception(f"拉取评论缓存错误: {e}") + cache = None + # 2.拉取评论 + if cache: + logger.info(f"拉取评论缓存: {oid}") + reply = orjson.loads(cache) # type: ignore + else: + try: + r = await self.client.get( + BILI_API + "/x/v2/reply/main", + params={"oid": oid, "type": reply_type}, + headers={"Referer": "https://www.bilibili.com/client"}, + ) + response = r.json() + except Exception as e: + logger.exception(f"评论获取错误: {oid}-{reply_type} {e}") + return {} + # 3.评论解析 + if not response or not response.get("data"): + logger.warning(f"评论解析错误: {oid}-{reply_type} {response}") + return {} + reply = response["data"] + # 4.缓存评论 + try: + RedisCache().set( + f"reply:{oid}:{reply_type}", + orjson.dumps(reply), + ex=CACHES_TIMER.get("reply"), + nx=True, + ) + except Exception as e: + logger.exception(f"缓存评论错误: {e}") + return reply + + @abstractmethod + async def handle(self): + return self diff --git a/biliparser/strategy/live.py b/biliparser/strategy/live.py new file mode 100644 index 0000000..6d73a33 --- /dev/null +++ b/biliparser/strategy/live.py @@ -0,0 +1,65 @@ +import re +from functools import cached_property + +import orjson + +from ..cache import CACHES_TIMER, RedisCache +from ..utils import ParserException, escape_markdown, logger +from .feed import Feed + + +class Live(Feed): + rawcontent: dict = {} + room_id: int = 0 + + @cached_property + def url(self): + return f"https://live.bilibili.com/{self.room_id}" + + async def handle(self): + logger.info(f"处理直播信息: 链接: {self.rawurl}") + match = re.search(r"live\.bilibili\.com[\/\w]*\/(\d+)", self.rawurl) + if not match: + raise ParserException("直播链接错误", self.rawurl) + self.room_id = int(match.group(1)) + # 1.获取缓存 + try: + cache = RedisCache().get(f"live:{self.room_id}") + except Exception as e: + logger.exception(f"拉取直播缓存错误: {e}") + cache = None + # 2.拉取直播 + if cache: + logger.info(f"拉取直播缓存: {self.room_id}") + self.rawcontent = orjson.loads(cache) # type: ignore + else: + try: + r = await self.client.get( + "https://api.live.bilibili.com/xlive/web-room/v1/index/getInfoByRoom", + params={"room_id": self.room_id}, + ) + self.rawcontent = r.json() + except Exception as e: + raise ParserException(f"直播获取错误:{self.room_id}", self.rawurl, e) + # 3.解析直播 + if not self.rawcontent or not self.rawcontent.get("data"): + raise ParserException("直播解析错误", r.url, self.rawcontent) + # 4.缓存直播 + try: + RedisCache().set( + f"live:{self.room_id}", + orjson.dumps(self.rawcontent), + ex=CACHES_TIMER.get("live"), + nx=True, + ) + except Exception as e: + logger.exception(f"缓存直播错误: {e}") + detail = self.rawcontent["data"] + self.user = detail["anchor_info"]["base_info"]["uname"] + roominfo = detail.get("room_info") + self.uid = roominfo.get("uid") + self.content = f"{roominfo.get('title')} - {roominfo.get('area_name')} - {roominfo.get('parent_area_name')}" + self.extra_markdown = f"[{escape_markdown(self.user)}的直播间]({self.url})" + self.mediaurls = roominfo.get("keyframe") or roominfo.get("cover") + self.mediatype = "image" + return self diff --git a/biliparser/strategy/opus.py b/biliparser/strategy/opus.py new file mode 100644 index 0000000..0be1b27 --- /dev/null +++ b/biliparser/strategy/opus.py @@ -0,0 +1,172 @@ +import re +from functools import cached_property, lru_cache, reduce + +import orjson + +from ..cache import CACHES_TIMER, RedisCache +from ..utils import BILI_API, ParserException, escape_markdown, logger +from .feed import Feed + + +class Opus(Feed): + detailcontent: dict = {} + dynamic_id: int = 0 + user: str = "" + __content: str = "" + forward_user: str = "" + forward_uid: int = 0 + forward_content: str = "" + has_forward: bool = False + + @cached_property + def reply_type(self): + if self.rtype == 2: + return 11 + if self.rtype == 16: + return 5 + if self.rtype == 64: + return 12 + if self.rtype == 256: + return 14 + if self.rtype in [8, 512, *range(4000, 4200)]: + return 1 + if self.rtype in [1, 4, *range(4200, 4300), *range(2048, 2100)]: + return 17 + + @cached_property + def rtype(self): + return int(self.detailcontent["item"]["basic"]["rtype"]) + + @cached_property + def rid(self): + return int(self.detailcontent["item"]["basic"]["rid_str"]) + + @property + @lru_cache(maxsize=1) + def content(self): + content = self.__content + if self.has_forward: + if self.forward_user: + content += f"//@{self.forward_user}:\n" + content += self.forward_content + return self.shrink_line(content) + + @content.setter + def content(self, content): + self.__content = content + + @cached_property + def content_markdown(self): + content_markdown = escape_markdown(self.__content) + if self.has_forward: + if self.uid: + content_markdown += f"//{self.make_user_markdown(self.forward_user, self.forward_uid)}:\n" + elif self.user: + content_markdown += f"//@{escape_markdown(self.forward_user)}:\n" + content_markdown += escape_markdown(self.forward_content) + if not content_markdown.endswith("\n"): + content_markdown += "\n" + return self.shrink_line(content_markdown) + + @cached_property + def url(self): + return f"https://t.bilibili.com/{self.dynamic_id}" + + def __list_dicts_to_dict(self, lists: list[dict]): + return reduce(lambda old, new: old.update(new) or old, lists, {}) + + def __opus_handle_major(self, major): + datapath_map = { + "MDL_DYN_TYPE_ARCHIVE": "dyn_archive", + "MDL_DYN_TYPE_PGC": "dyn_pgc", + "MDL_DYN_TYPE_ARTICLE": "dyn_article", + "MDL_DYN_TYPE_MUSIC": "dyn_music", + "MDL_DYN_TYPE_COMMON": "dyn_common", + "MDL_DYN_TYPE_LIVE": "dyn_live", + "MDL_DYN_TYPE_UGC_SEASON": "dyn_ugc_season", + "MDL_DYN_TYPE_DRAW": "dyn_draw", + "MDL_DYN_TYPE_OPUS": "dyn_opus", + "MDL_DYN_TYPE_FORWARD": "dyn_forward", + } + if not major: + return + target = datapath_map.get(major["type"]) + if major["type"] == "MDL_DYN_TYPE_FORWARD": + self.has_forward = True + majorcontent = self.__list_dicts_to_dict(major[target]["item"]["modules"]) + self.forward_user = majorcontent["module_author"]["user"]["name"] + self.forward_uid = majorcontent["module_author"]["user"]["mid"] + if majorcontent.get("module_desc"): + self.forward_content = self.__opus_handle_desc_text( + majorcontent["module_desc"] + ) + if not self.mediatype and majorcontent.get("module_dynamic"): + self.__opus_handle_major(majorcontent["module_dynamic"]) + elif major["type"] == "MDL_DYN_TYPE_DRAW": + self.mediaurls = [item["src"] for item in major[target]["items"]] + self.mediatype = "image" + elif datapath_map.get(major["type"]): + if major[target].get("cover"): + self.mediaurls = major[target]["cover"] + self.mediatype = "image" + if major[target].get("aid") and major[target].get("title"): + self.extra_markdown = f"[{escape_markdown(major[target]['title'])}](https://www.bilibili.com/video/av{major[target]['aid']})" + + def __opus_handle_desc_text(self, desc: dict): + if not desc: + return "" + return desc["text"] + + async def handle(self): + logger.info(f"处理动态信息: 链接: {self.rawurl}") + match = re.search(r"bilibili\.com[\/\w]*\/(\d+)", self.rawurl) + if not match: + raise ParserException("动态链接错误", self.rawurl) + self.dynamic_id = int(match.group(1)) + # 1.获取缓存 + try: + cache = RedisCache().get(f"opus:dynamic_id:{self.dynamic_id}") + except Exception as e: + logger.exception(f"拉取动态缓存错误: {e}") + cache = None + # 2.拉取动态 + if cache: + logger.info(f"拉取动态缓存: {self.dynamic_id}") + self.detailcontent = orjson.loads(cache) # type: ignore + else: + try: + r = await self.client.get( + BILI_API + "/x/polymer/web-dynamic/desktop/v1/detail", + params={"id": self.dynamic_id}, + ) + response = r.json() + except Exception as e: + raise ParserException(f"动态获取错误:{self.dynamic_id}", self.rawurl, e) + # 3.动态解析 + if ( + not response + or not response.get("data") + or not response["data"].get("item") + ): + raise ParserException("动态解析错误", self.rawurl, response) + self.detailcontent = response["data"] + # 4.缓存动态 + try: + RedisCache().set( + f"opus:dynamic_id:{self.dynamic_id}", + orjson.dumps(self.detailcontent), + ex=CACHES_TIMER.get("opus"), + nx=True, + ) + except Exception as e: + logger.exception(f"缓存动态错误: {e}") + detailcontent = self.__list_dicts_to_dict(self.detailcontent["item"]["modules"]) + self.user = detailcontent["module_author"]["user"]["name"] + self.uid = detailcontent["module_author"]["user"]["mid"] + if detailcontent.get("module_desc"): + self.content = self.__opus_handle_desc_text(detailcontent["module_desc"]) + if detailcontent.get("module_dynamic"): + self.__opus_handle_major(detailcontent["module_dynamic"]) + self.extra_markdown = f"[{escape_markdown(self.user)}的动态]({self.url})" + self.replycontent = await self.parse_reply(self.rid, self.reply_type) + return self diff --git a/biliparser/strategy/read.py b/biliparser/strategy/read.py new file mode 100644 index 0000000..6af6469 --- /dev/null +++ b/biliparser/strategy/read.py @@ -0,0 +1,182 @@ +import asyncio +import os +import re +from functools import cached_property +from io import BytesIO + +import orjson +from bs4 import BeautifulSoup +from bs4.element import Tag +from telegraph.aio import Telegraph + +from ..cache import CACHES_TIMER, RedisCache +from ..utils import ParserException, compress, escape_markdown, logger +from .feed import Feed + +telegraph = Telegraph(access_token=os.environ.get("TELEGRAPH_ACCESS_TOKEN", None)) + + +class Read(Feed): + rawcontent: str = "" + read_id: int = 0 + reply_type: int = 12 + + @cached_property + def url(self): + return f"https://www.bilibili.com/read/cv{self.read_id}" + + + async def __relink(self, img): + src = img.attrs.pop("data-src") + img.attrs = {"src": src} + logger.info(f"下载图片: {src}") + async with self.client.stream("GET", f"https:{src}") as response: + if response.status_code != 200: + logger.error(f"图片获取错误: {src}") + return + media = BytesIO(await response.aread()) + mediatype = response.headers.get("content-type") + if mediatype in ["image/jpeg", "image/png"]: + content_length = int(response.headers.get("content-length")) + logger.info(f"图片大小: {content_length} 压缩: {src} {mediatype}") + if content_length > 1024 * 1024 * 5: + media = compress(media, fix_ratio=True) + else: + media = compress(media, size=0, fix_ratio=True) + try: + resp = await telegraph.upload_file(media) + logger.info(f"图片上传: {resp}") + img.attrs["src"] = f"https://telegra.ph{resp[0].get('src')}" + except Exception as e: + logger.exception(f"图片上传错误: {e}") + + + async def handle(self): + logger.info(f"处理文章信息: 链接: {self.rawurl}") + match = re.search( + r"bilibili\.com\/read\/(?:cv|mobile\/|mobile\?id=)(\d+)", self.rawurl + ) + if not match: + raise ParserException("文章链接错误", self.rawurl) + self.read_id = int(match.group(1)) + # 获取文章 + # 1.获取缓存 + try: + cache_base = RedisCache().get(f"read:page:{self.read_id}") + except Exception as e: + logger.exception(f"拉取文章页面缓存错误: {e}") + cache_base = None + # 2.拉取文章 + if cache_base: + logger.info(f"拉取文章页面缓存: {self.read_id}") + cv_content = orjson.loads(cache_base) # type: ignore + else: + try: + r = await self.client.get(self.rawurl) + except Exception as e: + raise ParserException(f"文章页面获取错误:{self.read_id}", self.rawurl, e) + # 3.解析文章 + cv_init = re.search(r"window\.__INITIAL_STATE__=(.*?);\(function\(\)", r.text) + if not cv_init: + raise ParserException( + f"文章页面内容获取错误:{self.read_id}", self.rawurl, cv_init + ) + cv_content = orjson.loads(cv_init.group(1)) + self.uid = cv_content.get("readInfo").get("author").get("mid") + self.user = cv_content.get("readInfo").get("author").get("name") + self.content = cv_content.get("readInfo").get("summary") + mediaurls = ( + cv_content.get("readInfo").get("banner_url") + if cv_content.get("readInfo").get("banner_url") + else cv_content.get("readInfo").get("image_urls") + ) + if mediaurls: + logger.info(f"文章mediaurls: {mediaurls}") + self.mediaurls = mediaurls + self.mediatype = "image" + title = cv_content.get("readInfo").get("title") + if not cache_base: + # 4.缓存文章 + try: + cache_base = RedisCache().set( + f"read:page:{self.read_id}", + orjson.dumps(cv_content), + ex=CACHES_TIMER.get("read"), + nx=True, + ) + except Exception as e: + logger.exception(f"缓存文章页面错误: {e}") + # 转存文章 + # 1.获取缓存 + try: + cache_graphurl = RedisCache().get(f"read:graphurl:{self.read_id}") + except Exception as e: + logger.exception(f"拉取文章链接缓存错误: {e}") + cache_graphurl = None + # 2.拉取文章 + if cache_graphurl: + logger.info(f"拉取文章链接缓存: {self.read_id}") + graphurl = cache_graphurl + else: + # 3.解析文章转为链接 + article_content = cv_content.get("readInfo").get("content") + if not telegraph.get_access_token(): + logger.info("creating_account") + result = await telegraph.create_account( + "bilifeedbot", "bilifeedbot", "https://t.me/bilifeedbot" + ) + logger.info(f"Telegraph create_account: {result}") + try: + article = orjson.loads(article_content) + result = article.get("ops")[0].get("insert").split("\n") + logger.info(result) + graphurl = ( + await telegraph.create_page( + title=title, + content=result, + author_name=self.user, + author_url=f"https://space.bilibili.com/{self.uid}", + ) + ).get("url") + except orjson.JSONDecodeError: + article = BeautifulSoup(article_content, "lxml") + if not isinstance(article, Tag): + raise ParserException("文章内容解析错误", self.rawurl, cv_content) + imgs = article.find_all("img") + task = list(self.__relink(img) for img in imgs) ## data-src -> src + for _ in article.find_all("h1"): ## h1 -> h3 + _.name = "h3" + for item in ["span", "div"]: ## remove tags + for _ in article.find_all(item): + _.unwrap() + for item in ["p", "figure", "figcaption"]: ## clean tags + for _ in article.find_all(item): + _.attrs = {} + await asyncio.gather(*task) + result = "" + if isinstance(article.body, Tag): + result = "".join( + [str(i) for i in article.body.contents] + ) ## convert tags to string + graphurl = ( + await telegraph.create_page( + title=title, + html_content=result, + author_name=self.user, + author_url=f"https://space.bilibili.com/{self.uid}", + ) + ).get("url") + logger.info(f"生成页面: {graphurl}") + # 4.缓存文章 + try: + RedisCache().set( + f"read:graphurl:{self.read_id}", + orjson.dumps(graphurl), + ex=CACHES_TIMER.get("read"), + nx=True, + ) + except Exception as e: + logger.exception(f"缓存文章链接错误: {e}") + self.extra_markdown = f"[{escape_markdown(title)}]({graphurl})" + self.replycontent = await self.parse_reply(self.read_id, self.reply_type) + return self diff --git a/biliparser/strategy/video.py b/biliparser/strategy/video.py new file mode 100644 index 0000000..5cad34d --- /dev/null +++ b/biliparser/strategy/video.py @@ -0,0 +1,294 @@ +import os +import re +from functools import cached_property + +import httpx +import orjson +from telegram.constants import FileSizeLimit + +from ..cache import CACHES_TIMER, RedisCache +from ..utils import ( + BILI_API, + LOCAL_MODE, + ParserException, + escape_markdown, + headers, + logger, +) +from .feed import Feed + +QN = [64, 32, 16] + + +class Video(Feed): + cidcontent: dict = {} + epcontent: dict = {} + infocontent: dict = {} + page = 1 + reply_type: int = 1 + + @cached_property + def cid(self): + if self.infocontent and self.infocontent.get("data"): + if self.page != 1 and self.infocontent["data"].get("pages"): + for item in self.infocontent["data"]["pages"]: + if item.get("page") == self.page: + return item.get("cid") + self.page = 1 + return self.infocontent["data"].get("cid") + + @cached_property + def bvid(self): + if self.infocontent and self.infocontent.get("data"): + return self.infocontent["data"].get("bvid") + + @cached_property + def aid(self): + if self.infocontent and self.infocontent.get("data"): + return self.infocontent["data"].get("aid") + elif self.epid and self.epcontent and self.epcontent.get("result"): + for episode in self.epcontent["result"].get("episodes"): + if str(episode.get("id")) == self.epid: + return episode.get("aid") + + @cached_property + def epid(self): + if ( + self.epcontent + and self.epcontent.get("result") + and self.epcontent["result"].get("episodes") + ): + if not self.aid: + self.aid = self.epcontent["result"]["episodes"][-1].get("aid") + return self.epcontent["result"]["episodes"][-1].get("id") + + @cached_property + def ssid(self): + if self.epcontent and self.epcontent.get("result"): + return self.epcontent["result"].get("season_id") + + @cached_property + def url(self): + return f"https://www.bilibili.com/video/av{self.aid}?p={self.page}" + + async def __test_url_status_code(self, url, referer): + header = headers.copy() + header["Referer"] = referer + async with self.client.stream("GET", url, headers=header) as response: + if response.status_code != 200: + return False + return True + + async def __get_video_result(self, detail, qn: int): + params = {"avid": self.aid, "cid": self.cid} + if qn: + params["qn"] = qn + r = await self.client.get( + BILI_API + "/x/player/playurl", + params=params, + ) + video_result = r.json() + logger.debug(f"视频内容: {video_result}") + if ( + video_result.get("code") == 0 + and video_result.get("data") + and video_result.get("data").get("durl") + and video_result.get("data").get("durl")[0].get("size") + < ( + int( + os.environ.get( + "VIDEO_SIZE_LIMIT", FileSizeLimit.FILESIZE_UPLOAD_LOCAL_MODE + ) + ) + if LOCAL_MODE + else FileSizeLimit.FILESIZE_UPLOAD + ) + ): + url = video_result["data"]["durl"][0]["url"] + result = await self.__test_url_status_code(url, self.url) + if not result and video_result["data"]["durl"][0].get("backup_url", None): + url = video_result["data"]["durl"][0]["backup_url"] + result = await self.__test_url_status_code(url, self.url) + if result: + self.mediacontent = video_result + self.mediathumb = detail.get("pic") + self.mediaduration = round( + video_result["data"]["durl"][0]["length"] / 1000 + ) + self.mediadimention = detail.get("pages")[0].get("dimension") + self.mediaurls = url + self.mediatype = "video" + self.mediaraws = ( + False + if video_result.get("data").get("durl")[0].get("size") + < ( + FileSizeLimit.FILESIZE_DOWNLOAD_LOCAL_MODE + if LOCAL_MODE + else FileSizeLimit.FILESIZE_DOWNLOAD + ) + else True + ) + return True + + async def handle(self): + logger.info(f"处理视频信息: 链接: {self.rawurl}") + match = re.search( + r"(?:bilibili\.com/(?:video|bangumi/play)|b23\.tv|acg\.tv)/(?:(?PBV\w{10})|av(?P\d+)|ep(?P\d+)|ss(?P\d+)|)/?\??(?:p=(?P\d+))?", + self.rawurl, + ) + match_fes = re.search( + r"bilibili\.com/festival/(?P\w+)\?(?:bvid=(?PBV\w{10}))", + self.rawurl, + ) + if match_fes: + bvid = match_fes.group("bvid") + epid = None + aid = None + ssid = None + page = 1 + elif match: + bvid = match.group("bvid") + epid = match.group("epid") + aid = match.group("aid") + ssid = match.group("ssid") + page = match.group("page") + if page and page.isdigit(): + page = max(1, int(page)) + else: + page = 1 + else: + raise ParserException("视频链接错误", self.rawurl) + if epid: + params = {"ep_id": epid} + elif bvid: + params = {"bvid": bvid} + elif aid: + params = {"aid": aid} + elif ssid: + params = {"season_id": ssid} + else: + raise ParserException("视频链接解析错误", self.rawurl) + self.page = page + if epid: + self.epid = epid + if epid is not None or ssid is not None: + # 1.获取缓存 + try: + cache = ( + RedisCache().get(f"bangumi:ep:{epid}") + if epid + else RedisCache().get(f"bangumi:ss:{ssid}") + ) + except Exception as e: + logger.exception(f"拉取番剧缓存错误: {e}") + cache = None + # 2.拉取番剧 + if cache: + logger.info( + f"拉取番剧缓存:epid {epid}" if epid else f"拉取番剧缓存:ssid {ssid}" + ) + self.epcontent = orjson.loads(cache) # type: ignore + else: + try: + r = await self.client.get( + BILI_API + "/pgc/view/web/season", + params=params, + ) + self.epcontent = r.json() + except Exception as e: + raise ParserException( + f"番剧获取错误:{epid if epid else ssid}", self.rawurl, e + ) + # 3.番剧解析 + if not self.epcontent or not self.epcontent.get("result"): + # Anime detects non-China IP + raise ParserException( + f"番剧解析错误:{epid if epid else ssid} {self.epcontent}", + self.rawurl, + self.epcontent, + ) + if not self.epid or not self.ssid or not self.aid: + raise ParserException( + f"番剧解析错误:{self.aid} {self.ssid} {self.aid}", + self.rawurl, + self.epcontent, + ) + # 4.缓存评论 + try: + for key in [f"bangumi:ep:{self.epid}", f"bangumi:ss:{self.ssid}"]: + RedisCache().set( + key, + orjson.dumps(self.epcontent), + ex=CACHES_TIMER.get("bangumi"), + nx=True, + ) + except Exception as e: + logger.exception(f"缓存番剧错误: {e}") + params = {"aid": self.aid} + aid = self.aid + # 1.获取缓存 + try: + cache = ( + RedisCache().get(f"video:aid:{aid}") + if aid + else RedisCache().get(f"video:bvid:{bvid}") + ) + except Exception as e: + logger.exception(f"拉取视频缓存错误: {e}") + cache = None + # 2.拉取视频 + if cache: + logger.info(f"拉取视频缓存:{aid if aid else bvid}") + self.infocontent = orjson.loads(cache) # type: ignore + else: + try: + r = await self.client.get( + BILI_API + "/x/web-interface/view", + params=params, + ) + self.infocontent = r.json() + except Exception as e: + raise ParserException( + f"视频获取错误:{aid if aid else bvid}", self.rawurl, e + ) + # 3.视频解析 + if not self.infocontent and not self.infocontent.get("data"): + # Video detects non-China IP + raise ParserException( + f"视频解析错误{aid if aid else bvid}", r.url, self.infocontent + ) + if not self.aid or not self.bvid or not self.cid: + raise ParserException( + f"视频解析错误:{self.aid} {self.bvid} {self.cid}", + self.rawurl, + self.epcontent, + ) + # 4.缓存视频 + try: + for key in [f"video:aid:{self.aid}", f"video:bvid:{self.bvid}"]: + RedisCache().set( + key, + orjson.dumps(self.infocontent), + ex=CACHES_TIMER.get("video"), + nx=True, + ) + except Exception as e: + logger.exception(f"缓存番剧错误: {e}") + detail = self.infocontent["data"] + self.user = detail.get("owner").get("name") + self.uid = detail.get("owner").get("mid") + self.content = detail.get("tname", "发布视频") + if detail.get("pages") and len(detail["pages"]) > 1: + self.content += f" - 第{page}P/共{len(detail['pages'])}P" + if detail.get("dynamic") or detail.get("desc"): + self.content += f" - {detail.get('dynamic') or detail.get('desc')}" + self.extra_markdown = f"[{escape_markdown(detail.get('title'))}]({self.url})" + self.mediatitle = detail.get("title") + self.mediaurls = detail.get("pic") + self.mediatype = "image" + self.replycontent = await self.parse_reply(self.aid, self.reply_type) + + for qn in QN: + if await self.__get_video_result(detail, qn): + break + return self diff --git a/main.py b/main.py index fe48caa..20a15bf 100644 --- a/main.py +++ b/main.py @@ -8,7 +8,6 @@ import httpx import pytz -import uvloop from telegram import ( InlineKeyboardButton, InlineKeyboardMarkup, @@ -56,8 +55,6 @@ ) from database import db_close, db_init, file_cache -asyncio.set_event_loop_policy(uvloop.EventLoopPolicy()) - BILIBILI_URL_REGEX = r"(?i)(?:https?://)?[\w\.]*?(?:bilibili(?:bb)?\.com|(?:b23(?:bb)?|acg)\.tv)\S+|BV\w{10}" BILIBILI_SHARE_URL_REGEX = r"(?i)【.*】 https://[\w\.]*?(?:bilibili\.com|b23\.tv)\S+" diff --git a/test/test_biliparser.py b/test/test_biliparser.py index dfd2cd8..45dc253 100644 --- a/test/test_biliparser.py +++ b/test/test_biliparser.py @@ -1,12 +1,15 @@ import pytest +from biliparser import biliparser +from biliparser.strategy.audio import Audio +from biliparser.strategy.live import Live +from biliparser.strategy.opus import Opus +from biliparser.strategy.read import Read +from biliparser.strategy.video import Video + @pytest.mark.asyncio async def test_dynamic_parser(): - from biliparser import biliparser - from biliparser.database import db_init - - await db_init() urls = [ "https://t.bilibili.com/379593676394065939?tab=2", # 动态带图非转发 "https://t.bilibili.com/371426091702577219?tab=2", # 引用带视频 @@ -32,7 +35,6 @@ async def test_dynamic_parser(): "https://www.bilibili.com/video/BV1bW411n7fY/", # 视频(活动) "https://b23.tv/BV1bW411n7fY", # 视频(活动) "av912905698", # 视频(短链) - "BV1bW411n7fY", # 视频(短链) ] for i in urls: result = await biliparser(i) @@ -40,13 +42,27 @@ async def test_dynamic_parser(): @pytest.mark.asyncio -async def test_cache(): - import orjson - - from biliparser.cache import RedisCache - req = ["2"] - RedisCache().set("1", orjson.dumps(req)) - result = RedisCache().get("1") - # result = RedisCache().get("2") - result = orjson.loads(result) - assert result +async def test_video_parser(): + result: list[Video] = await biliparser("BV1bW411n7fY") # type: ignore + assert result[0].aid == 19390801 + assert result[0].bvid == "BV1bW411n7fY" + assert ( + result[0].caption + == "[【春晚鬼畜】赵本山:我就是念诗之王!【改革春风吹满地】](https://www.bilibili.com/video/av19390801?p=1)\n[@UP\\-Sings](https://space.bilibili.com/353246678):\n鬼畜调教 \\- 不管今年春晚有没有本山叔,鬼畜区总归是有的!\n" + ) + assert result[0].cid == 31621681 + assert result[0].cidcontent == {} + assert result[0].comment == "" + assert result[0].comment_markdown == "" + assert ( + result[0].content == "鬼畜调教 - 不管今年春晚有没有本山叔,鬼畜区总归是有的!" + ) + assert ( + result[0].content_markdown + == "鬼畜调教 \\- 不管今年春晚有没有本山叔,鬼畜区总归是有的!" + ) + assert ( + result[0].extra_markdown + == "[【春晚鬼畜】赵本山:我就是念诗之王!【改革春风吹满地】](https://www.bilibili.com/video/av19390801?p=1)" + ) + assert result[0].url == "https://www.bilibili.com/video/av19390801?p=1"