2021-06-21 02:10:02 +01:00
|
|
|
import discum
|
|
|
|
from typing import List
|
|
|
|
import webbrowser
|
|
|
|
import re
|
|
|
|
from bs4 import BeautifulSoup
|
|
|
|
import requests
|
|
|
|
from dotenv import load_dotenv
|
|
|
|
from os import getenv
|
|
|
|
|
2021-06-21 02:35:27 +01:00
|
|
|
URL_REGEX = re.compile(
|
|
|
|
"(?:(?:https?|ftp):\/\/|\b(?:[a-z\d]+\.))(?:(?:[^\s()<>]+|\((?:[^\s()<>]+|(?:\([^\s()<>]+\)))?\))+(?:\((?:[^\s()<>]+|(?:\(?:[^\s()<>]+\)))?\)|[^\s`!()\[\]{};:'.,<>?«»“”‘’]))?")
|
2021-06-21 02:10:02 +01:00
|
|
|
|
2021-06-21 17:16:41 +01:00
|
|
|
# consoles
|
|
|
|
PS5_CHANNELS = [713321461124694056, 721009876893040682,
|
|
|
|
781965674838753331, 802674800786145311,
|
|
|
|
780190141599580161, 780190104357961759]
|
|
|
|
|
|
|
|
# gpus
|
|
|
|
FOUNDERS_CHANNELS = [802674384120446996]
|
|
|
|
RTX3060_CHANNELS = [809731473669619730] + FOUNDERS_CHANNELS
|
|
|
|
RTX3060TI_CHANNELS = [783682409635250187, 783425011116539964,
|
|
|
|
802674527850725377, 796451904401834015] + FOUNDERS_CHANNELS
|
|
|
|
RTX3070_CHANNELS = [755728127069519913, 761002102804709448,
|
|
|
|
802674552541806662, 796451880224948274] + FOUNDERS_CHANNELS
|
|
|
|
RTX3070TI_CHANNELS = [839507735531749446,
|
|
|
|
849924994686910504] + FOUNDERS_CHANNELS
|
|
|
|
RTX3080_CHANNELS = [755727814912901170, 758224323843850250,
|
|
|
|
802674584473567303, 796451854808121414] + FOUNDERS_CHANNELS
|
|
|
|
RTX3080TI_CHANNELS = [833700313819119676,
|
|
|
|
849924965842419722] + FOUNDERS_CHANNELS
|
|
|
|
RTX3090_CHANNELS = [755728368413966387, 760577787332788315,
|
|
|
|
796451832968642561] + FOUNDERS_CHANNELS
|
|
|
|
|
|
|
|
MONITORED_CHANNELS = set(PS5_CHANNELS + RTX3060_CHANNELS + RTX3060TI_CHANNELS + RTX3070_CHANNELS +
|
|
|
|
RTX3070TI_CHANNELS + RTX3080_CHANNELS + RTX3080TI_CHANNELS + RTX3090_CHANNELS)
|
2021-06-21 02:10:02 +01:00
|
|
|
|
|
|
|
load_dotenv()
|
|
|
|
|
|
|
|
# load env vars from .env file
|
|
|
|
token = getenv("TOKEN")
|
|
|
|
|
|
|
|
if not token:
|
|
|
|
print("Could not load environmental variables. Make sure TOKEN is set in .env")
|
|
|
|
exit(0)
|
|
|
|
|
|
|
|
|
2021-06-21 15:16:13 +01:00
|
|
|
bot = discum.Client(token=token, log=False)
|
2021-06-21 02:10:02 +01:00
|
|
|
|
|
|
|
########################################
|
|
|
|
# Callbacks
|
|
|
|
########################################
|
|
|
|
|
|
|
|
|
2021-06-21 02:35:27 +01:00
|
|
|
def get_soup(url: str):
|
2021-06-21 02:10:02 +01:00
|
|
|
r = requests.get(url)
|
2021-06-21 15:16:13 +01:00
|
|
|
return BeautifulSoup(r.text, features="lxml")
|
2021-06-21 02:35:27 +01:00
|
|
|
|
|
|
|
|
|
|
|
def get_stockinformer_url(url: str) -> str:
|
|
|
|
bs = get_soup(url)
|
2021-06-21 02:10:02 +01:00
|
|
|
|
2021-06-21 02:35:27 +01:00
|
|
|
for a in bs.find_all("a"):
|
2021-06-21 02:10:02 +01:00
|
|
|
if "view at" in a.text.lower():
|
2021-06-21 16:52:55 +01:00
|
|
|
url = a.get("href")
|
|
|
|
|
|
|
|
if "stockinformer" in url:
|
|
|
|
return f"https://stockinformer.co.uk/{url}"
|
2021-06-21 17:16:41 +01:00
|
|
|
elif "amazon" in url:
|
|
|
|
return f"{url.split('?')[0]}"
|
2021-06-21 17:21:11 +01:00
|
|
|
|
2021-06-21 17:16:41 +01:00
|
|
|
return url
|
2021-06-21 16:52:55 +01:00
|
|
|
|
2021-06-21 02:10:02 +01:00
|
|
|
return None
|
|
|
|
|
|
|
|
|
2021-06-21 02:35:27 +01:00
|
|
|
def get_partalert_url(url: str) -> str:
|
|
|
|
ret_url = None
|
|
|
|
bs = get_soup(url)
|
|
|
|
|
|
|
|
for a in bs.find_all("a"):
|
|
|
|
if "amazon" in a.text.lower():
|
2021-06-21 15:16:13 +01:00
|
|
|
# remove tags and referrals
|
|
|
|
amazon_url = a.get("href").split("?")[0]
|
2021-06-21 02:35:27 +01:00
|
|
|
|
2021-06-21 15:16:13 +01:00
|
|
|
ret_url = amazon_url
|
2021-06-21 02:35:27 +01:00
|
|
|
|
|
|
|
return ret_url
|
|
|
|
|
|
|
|
|
2021-06-21 02:10:02 +01:00
|
|
|
def check_urls(urls: List[str]):
|
|
|
|
for url in urls:
|
|
|
|
if "partalert" in url:
|
2021-06-21 02:35:27 +01:00
|
|
|
url = get_partalert_url(url)
|
2021-06-21 02:10:02 +01:00
|
|
|
|
2021-06-21 02:35:27 +01:00
|
|
|
if not url:
|
2021-06-21 02:10:02 +01:00
|
|
|
continue
|
|
|
|
elif "stockinformer" in url:
|
|
|
|
url = get_stockinformer_url(url)
|
|
|
|
|
|
|
|
if not url:
|
|
|
|
continue
|
|
|
|
|
2021-06-21 14:06:47 +01:00
|
|
|
print(f'Opening {url}')
|
2021-06-21 02:10:02 +01:00
|
|
|
webbrowser.open(url)
|
|
|
|
|
|
|
|
|
|
|
|
@bot.gateway.command
|
|
|
|
def on_message(resp):
|
2021-06-21 02:35:27 +01:00
|
|
|
urls = []
|
|
|
|
|
2021-06-21 02:10:02 +01:00
|
|
|
if resp.event.ready_supplemental:
|
|
|
|
bot.gateway.subscribeToGuildEvents(wait=1)
|
|
|
|
|
|
|
|
if resp.event.message:
|
|
|
|
m = resp.parsed.auto()
|
2021-06-21 02:35:27 +01:00
|
|
|
|
|
|
|
channel_id = int(m['channel_id'])
|
2021-06-21 02:10:02 +01:00
|
|
|
content = m['content']
|
2021-06-21 02:35:27 +01:00
|
|
|
embeds = m['embeds']
|
2021-06-21 02:10:02 +01:00
|
|
|
|
2021-06-21 02:35:27 +01:00
|
|
|
if channel_id in MONITORED_CHANNELS:
|
|
|
|
# search for urls in message text
|
2021-06-21 14:06:47 +01:00
|
|
|
urls.extend(URL_REGEX.findall(content))
|
2021-06-21 02:35:27 +01:00
|
|
|
|
|
|
|
# search for urls in embeds
|
|
|
|
for e in embeds:
|
2021-06-21 14:06:47 +01:00
|
|
|
for f in [x['value'] for x in e['fields']]:
|
|
|
|
urls.extend(URL_REGEX.findall(f))
|
|
|
|
|
2021-06-21 02:10:02 +01:00
|
|
|
if (urls):
|
|
|
|
check_urls(urls)
|
|
|
|
|
|
|
|
|
|
|
|
print("Initialized.")
|
|
|
|
bot.gateway.run(auto_reconnect=True)
|