This commit is contained in:
TheTechRobo 2022-03-11 22:31:17 -05:00
commit cfa125884f
1 changed files with 32 additions and 0 deletions

32
app.py Normal file
View File

@ -0,0 +1,32 @@
import requests
from cprint import cprint
cprint.info("Please wait...")
ALBUM = "https://space.bilibili.com/233193626/album"
POST = "https://t.bilibili.com/?spm_id_from=333.999.0.0"
base = "https://api.vc.bilibili.com/dynamic_svr/v1/dynamic_svr/get_dynamic_detail?dynamic_id=%s"
def userScraper(id):
urls = []
urls.append((f"https://api.bilibili.com/x/space/acc/info?mid={id}&jsonp=jsonp", "USER_INFO"))
urls.append((f"https://space.bilibili.com/ajax/settings/getSettings?mid={id_}", "USER_SETTINGS"))
urls.append((f"https://api.bilibili.com/x/relation/stat?vmid={id}&jsonp=jsonp", "USER_STATS"))
urls.append((f"https://api.bilibili.com/x/space/navnum?mid={id}", "NAV_NUMS"))
for url, explanation in urls:
yield requests.get(url).json(), explanation
def albumScraper(id):
page = 0
size = 30
maxpage = requests.get(f"https://api.bilibili.com/x/space/navnum?mid={id}").json()["data"]["album"] / size
while page <= maxpage:
yield requests.get(f"https://api.bilibili.com/x/dynamic/feed/draw/doc_list?uid=233193626&page_num={page}&page_size={size}&biz=all&jsonp=jsonp").json(), page
cprint.ok(f"Scraping, {page * size} results so far")
page += 1
def postScraper(id):
return requests.get(base % id_).json()
list(albumScraper("233193626"))