Browse Source

Cleaned up reddit cog. Also error catching may have been broken but I learned how to do except for multiple errors now.

tags/v1.8.0
Roxie Gibson 6 years ago
parent
commit
88b8d6af67
1 changed files with 5 additions and 13 deletions
  1. +5
    -13
      roxbot/cogs/reddit.py

+ 5
- 13
roxbot/cogs/reddit.py View File

import random import random
from asyncio import TimeoutError, sleep
import roxbot
from html import unescape from html import unescape
from bs4 import BeautifulSoup from bs4 import BeautifulSoup

import discord
from discord.ext import commands

import roxbot
from roxbot import guild_settings from roxbot import guild_settings
from discord.ext import commands




async def _imgur_removed(url): async def _imgur_removed(url):
page = await roxbot.http.get_page(url) page = await roxbot.http.get_page(url)
soup = BeautifulSoup(page, 'html.parser') soup = BeautifulSoup(page, 'html.parser')
if "removed.png" in soup.img["src"]:
return True
else:
return False
return bool("removed.png" in soup.img["src"])




async def imgur_get(url): async def imgur_get(url):
try: try:
posts = r["data"] posts = r["data"]
return posts return posts
except KeyError or TypeError:
except (KeyError, TypeError):
return {} return {}




return False return False




class Reddit():
class Reddit:
def __init__(self, bot_client): def __init__(self, bot_client):
self.bot = bot_client self.bot = bot_client
self.post_cache = {} self.post_cache = {}


url = "" url = ""
x = 0 x = 0
# TODO: Test possible crashing here.
# While loop here to make sure that we check if there is any image posts in the links we have. If so, just take the first one. # While loop here to make sure that we check if there is any image posts in the links we have. If so, just take the first one.
# Choosing a while loop here because, for some reason, the for loop would never exit till the end. Leading to slow times. # Choosing a while loop here because, for some reason, the for loop would never exit till the end. Leading to slow times.
while not url and x <= 20: while not url and x <= 20:

Loading…
Cancel
Save