2023-06-01 07:00:37 -07:00
|
|
|
import asyncio
|
|
|
|
import datetime
|
2023-06-01 08:36:21 -07:00
|
|
|
import os
|
|
|
|
import sys
|
|
|
|
from bilix.sites.bilibili.downloader import DownloaderBilibili
|
2023-06-01 07:00:37 -07:00
|
|
|
from _biliup_archive_bvid import archive_bvid
|
|
|
|
import argparse
|
2023-06-01 08:53:24 -07:00
|
|
|
import uvloop
|
|
|
|
|
|
|
|
asyncio.set_event_loop_policy(uvloop.EventLoopPolicy())
|
2023-06-01 07:00:37 -07:00
|
|
|
|
|
|
|
def parse_args():
|
|
|
|
parser = argparse.ArgumentParser()
|
|
|
|
today = datetime.date.today()
|
|
|
|
parser.add_argument('--sess-data', type=str, default=get_sess_data())
|
|
|
|
parser.add_argument('--bvids', type=str, default=f'bvids/bvids-{today.isoformat()}.txt')
|
|
|
|
args = parser.parse_args()
|
|
|
|
return args
|
|
|
|
|
|
|
|
def main():
|
|
|
|
args = parse_args()
|
|
|
|
with open(args.bvids, 'r', encoding='utf-8') as f:
|
|
|
|
bvids = f.read().splitlines()
|
2023-06-01 08:36:21 -07:00
|
|
|
async def do():
|
2023-06-01 09:42:53 -07:00
|
|
|
d = DownloaderBilibili(video_concurrency=6, part_concurrency=1, hierarchy=True, sess_data=args.sess_data)
|
|
|
|
d.progress.start()
|
|
|
|
futs = []
|
2023-06-01 08:36:21 -07:00
|
|
|
for bvid in bvids:
|
2023-06-01 08:53:24 -07:00
|
|
|
cor = asyncio.create_task(archive_bvid(d=d, bvid=bvid))
|
2023-06-01 09:42:53 -07:00
|
|
|
fut = asyncio.gather(cor)
|
|
|
|
futs.append(fut)
|
|
|
|
if len(futs) == 6:
|
|
|
|
await asyncio.gather(*futs)
|
|
|
|
futs = []
|
|
|
|
if len(futs) > 0:
|
|
|
|
await asyncio.gather(*futs)
|
|
|
|
futs = []
|
|
|
|
d.progress.stop()
|
2023-06-01 08:36:21 -07:00
|
|
|
asyncio.run(do())
|
2023-06-01 09:42:53 -07:00
|
|
|
loop = asyncio.get_event_loop()
|
|
|
|
loop.run_until_complete(do())
|
2023-06-01 07:00:37 -07:00
|
|
|
|
|
|
|
def get_sess_data():
|
|
|
|
with open('sess_data.txt', 'r', encoding='utf-8') as f:
|
|
|
|
sess_data = f.read().strip()
|
|
|
|
return sess_data
|
|
|
|
|
|
|
|
|
|
|
|
if __name__ == '__main__':
|
|
|
|
main()
|