|
| 1 | +import os |
| 2 | + |
| 3 | +import requests |
| 4 | +from bs4 import BeautifulSoup |
| 5 | +from fake_useragent import UserAgent |
| 6 | + |
| 7 | +headers = {"UserAgent": UserAgent().random} |
| 8 | +URL = "https://www.mywaifulist.moe/random" |
| 9 | + |
| 10 | + |
| 11 | +def save_image(image_url: str, image_title: str) -> None: |
| 12 | + """ |
| 13 | + Saves the image of anime character |
| 14 | + """ |
| 15 | + image = requests.get(image_url, headers=headers) |
| 16 | + with open(image_title, "wb") as file: |
| 17 | + file.write(image.content) |
| 18 | + |
| 19 | + |
| 20 | +def random_anime_character() -> tuple[str, str, str]: |
| 21 | + """ |
| 22 | + Returns the Title, Description, and Image Title of a random anime character . |
| 23 | + """ |
| 24 | + soup = BeautifulSoup(requests.get(URL, headers=headers).text, "html.parser") |
| 25 | + title = soup.find("meta", attrs={"property": "og:title"}).attrs["content"] |
| 26 | + image_url = soup.find("meta", attrs={"property": "og:image"}).attrs["content"] |
| 27 | + description = soup.find("p", id="description").get_text() |
| 28 | + _, image_extension = os.path.splitext(os.path.basename(image_url)) |
| 29 | + image_title = title.strip().replace(" ", "_") |
| 30 | + image_title = f"{image_title}{image_extension}" |
| 31 | + save_image(image_url, image_title) |
| 32 | + return (title, description, image_title) |
| 33 | + |
| 34 | + |
| 35 | +if __name__ == "__main__": |
| 36 | + title, desc, image_title = random_anime_character() |
| 37 | + print(f"{title}\n\n{desc}\n\nImage saved : {image_title}") |
0 commit comments