Dockerized and fixed errors

This commit is contained in:
Remy Moll
2022-01-15 22:14:12 +01:00
parent 3bbe3e6cc6
commit 54b52f78bf
48 changed files with 35 additions and 11 deletions

12
app/bot/api/__init__.py Normal file
View File

@@ -0,0 +1,12 @@
from . import reddit
from . import weather
from . import reddit
from . import search
from . import metmuseum
import os
if os.getenv("dockerized", "") == "true":
import sys
sys.path.append("/keys")
import api_keys as keys
else:
from . import keys

39
app/bot/api/metmuseum.py Normal file
View File

@@ -0,0 +1,39 @@
import requests
import random
from PIL import Image
import io
class ArtFetch:
def __init__(self):
self.base_url = "https://collectionapi.metmuseum.org/"
self.objects = self.fetch_objects() # chosen set of images to select randomly
def fetch_objects(self):
"""We restrict ourselves to a few domains."""
# fetch all departements
t = requests.get(self.base_url + "public/collection/v1/departments").json()
deps = t["departments"]
keep_id = []
for d in deps:
name = d["displayName"]
if name == "American Decorative Arts" or name == "Arts of Africa, Oceania, and the Americas" or name == "Asian Art" or name == "European Paintings":
keep_id.append(str(d["departmentId"]))
# fetch artworks listed under these departments
data = {"departmentIds" : "|".join(keep_id)}
t = requests.get(self.base_url + "public/collection/v1/objects",params=data).json()
# num = t["total"]
ids = t["objectIDs"]
return ids
def get_random_art(self):
"""Returns an image object of a randomly selected artwork"""
# fetch the artwork's url
r_id = self.objects[random.randint(0,len(self.objects))]
t = requests.get(self.base_url + "public/collection/v1/objects/" + str(r_id)).json()
im_url = t["primaryImageSmall"]
# download the image
resp = requests.get(im_url)
img = Image.open(io.BytesIO(resp.content))
return img

56
app/bot/api/reddit.py Normal file
View File

@@ -0,0 +1,56 @@
import praw
class RedditFetch():
def __init__(self, key):
self.stream = praw.Reddit(client_id = key["id"], client_secret = key["secret"], user_agent=key["user_agent"])
def get_top(self, subreddit, number, return_type="text"):
if return_type == "text":
posts = []
try:
for submission in self.stream.subreddit(subreddit).top(limit=number):
p = {}
if not submission.stickied:
p["title"] = submission.title
p["content"] = submission.selftext
posts.append(p)
return posts
except:
return []
else:
images = []
try:
for submission in self.stream.subreddit(subreddit).top(limit=number):
if not submission.stickied:
t = {"image": submission.url, "caption": submission.title}
images.append(t)
return images
except:
return []
def get_random_rising(self, subreddit, number, return_type="text"):
if return_type == "text":
posts = []
try:
for submission in self.stream.subreddit(subreddit).random_rising(limit=number):
p = {}
if not submission.stickied:
p["title"] = submission.title
p["content"] = submission.selftext
posts.append(p)
return posts
except:
return []
else:
images = []
try:
for submission in self.stream.subreddit(subreddit).random_rising(limit=number):
if not submission.stickied:
t = {"image": submission.url, "caption": submission.title}
images.append(t)
return images
except:
return []

21
app/bot/api/search.py Normal file
View File

@@ -0,0 +1,21 @@
import duckduckpy
class WebSearch():
def __init__(self):
self.search = duckduckpy.query
def get_result(self, query):
try:
res = []
response = self.search(query, container = "dict")["related_topics"]
for r in response:
if "text" in r:
res.append({
"text" : r["text"],
"url": r["first_url"]
})
except:
res = ["Connection error"]
return res
# TODO: this api has more potential. Extract images or quick facts!

50
app/bot/api/weather.py Normal file
View File

@@ -0,0 +1,50 @@
import requests
import datetime
import logging
logger = logging.getLogger(__name__)
class WeatherFetch():
def __init__(self, key):
self.last_fetch = datetime.datetime.fromtimestamp(0)
self.last_fetch_location = []
self.last_weather = []
self.calls = 0
self.url = "https://api.openweathermap.org/data/2.5/onecall?"
self.key = key
def show_weather(self, location):
delta = datetime.datetime.now() - self.last_fetch
# 1 hour passed, error, or location change
if delta.total_seconds() > 3600 \
or len(self.last_weather) == 0\
or self.last_fetch_location != location:
data = {"lat" : location[0], "lon" : location[1], "exclude" : "minutely,hourly", "appid" : self.key, "units" : "metric"}
self.calls += 1
logger.info("Just fetched weather. ({}th time)".format(self.calls))
try:
weather = requests.get(self.url,params=data).json()
now = weather["current"]
ret_weather = []
ret_weather.append({
"short" : now["weather"][0]["main"],
"temps" : [int(now["temp"])]
})
weather_days = weather["daily"]
for i, day in enumerate(weather_days):
ret_weather.append({
"short" : day["weather"][0]["main"],
"temps" : [int(day["temp"]["min"]),int(day["temp"]["max"])]
})
self.last_fetch_location = location
self.last_weather = ret_weather
self.last_fetch = datetime.datetime.now()
except:
ret_weather = []
else:
ret_weather = self.last_weather
return ret_weather