Better websearch. Readded reddit to bot commands
This commit is contained in:
@@ -1,4 +1,5 @@
|
||||
from . import google
|
||||
from . import keys
|
||||
from . import reddit
|
||||
from . import weather
|
||||
from . import weather
|
||||
from . import reddit
|
||||
from . import search
|
@@ -1,20 +0,0 @@
|
||||
import googlesearch
|
||||
|
||||
|
||||
def query(params):
|
||||
param_string = ""
|
||||
for word in params:
|
||||
param_string += word + "+"
|
||||
param_string = param_string[:-1]
|
||||
search_url = "https://google.com/search?q=" + param_string
|
||||
|
||||
try:
|
||||
res = googlesearch.search(param_string.replace("+"," ") ,num=5,start=0,stop=5)
|
||||
send_string = "Results for <b>" + param_string.replace("+"," ") + "</b>:\n\n"
|
||||
for url in res:
|
||||
send_string += url + "\n\n"
|
||||
send_string += "Search url:\n" + search_url
|
||||
except:
|
||||
send_string = "Search url:\n" + search_url
|
||||
|
||||
return send_string
|
@@ -1,50 +1,56 @@
|
||||
import praw
|
||||
try:
|
||||
import bot.api.keys as keys
|
||||
except:
|
||||
import keys
|
||||
|
||||
stream = praw.Reddit(client_id = keys.reddit_id, client_secret = keys.reddit_secret, user_agent=keys.reddit_user_agent)
|
||||
|
||||
def get_top(subreddit, number, return_type="text"):
|
||||
if return_type == "text":
|
||||
message = ""
|
||||
try:
|
||||
for submission in stream.subreddit(subreddit).top(limit=number):
|
||||
if not submission.stickied:
|
||||
message += "<b>" + submission.title + "</b>" + "\n" + submission.selftext + "\n\n\n"
|
||||
return message
|
||||
except:
|
||||
return "Api call failed, sorry"
|
||||
else:
|
||||
images = []
|
||||
try:
|
||||
for submission in stream.subreddit(subreddit).top(limit=number):
|
||||
if not submission.stickied:
|
||||
t = {"image": submission.url, "caption": submission.title}
|
||||
images.append(t)
|
||||
return images
|
||||
except:
|
||||
return ["Api call failed, sorry"]
|
||||
|
||||
|
||||
def get_random_rising(subreddit, number, return_type="text"):
|
||||
if return_type == "text":
|
||||
message = ""
|
||||
try:
|
||||
for submission in stream.subreddit(subreddit).random_rising(limit=number):
|
||||
if not submission.stickied:
|
||||
message += "<b>" + submission.title + "</b>" + "\n" + submission.selftext + "\n\n\n"
|
||||
return message
|
||||
except:
|
||||
return "Api call failed, sorry"
|
||||
else:
|
||||
images = []
|
||||
try:
|
||||
for submission in stream.subreddit(subreddit).random_rising(limit=number):
|
||||
if not submission.stickied:
|
||||
t = {"image": submission.url, "caption": submission.title}
|
||||
images.append(t)
|
||||
return images
|
||||
except:
|
||||
return ["Api call failed, sorry"]
|
||||
|
||||
class RedditFetch():
|
||||
def __init__(self, key):
|
||||
self.stream = praw.Reddit(client_id = key["id"], client_secret = key["secret"], user_agent=key["user_agent"])
|
||||
|
||||
def get_top(self, subreddit, number, return_type="text"):
|
||||
if return_type == "text":
|
||||
posts = []
|
||||
try:
|
||||
for submission in self.stream.subreddit(subreddit).top(limit=number):
|
||||
p = {}
|
||||
if not submission.stickied:
|
||||
p["title"] = submission.title
|
||||
p["content"] = submission.selftext
|
||||
posts.append(p)
|
||||
return posts
|
||||
except:
|
||||
return []
|
||||
else:
|
||||
images = []
|
||||
try:
|
||||
for submission in self.stream.subreddit(subreddit).top(limit=number):
|
||||
if not submission.stickied:
|
||||
t = {"image": submission.url, "caption": submission.title}
|
||||
images.append(t)
|
||||
return images
|
||||
except:
|
||||
return []
|
||||
|
||||
|
||||
def get_random_rising(self, subreddit, number, return_type="text"):
|
||||
if return_type == "text":
|
||||
posts = []
|
||||
try:
|
||||
for submission in self.stream.subreddit(subreddit).random_rising(limit=number):
|
||||
p = {}
|
||||
if not submission.stickied:
|
||||
p["title"] = submission.title
|
||||
p["content"] = submission.selftext
|
||||
posts.append(p)
|
||||
return posts
|
||||
except:
|
||||
return []
|
||||
else:
|
||||
images = []
|
||||
try:
|
||||
for submission in self.stream.subreddit(subreddit).random_rising(limit=number):
|
||||
if not submission.stickied:
|
||||
t = {"image": submission.url, "caption": submission.title}
|
||||
images.append(t)
|
||||
return images
|
||||
except:
|
||||
return []
|
||||
|
21
bot2/api/search.py
Normal file
21
bot2/api/search.py
Normal file
@@ -0,0 +1,21 @@
|
||||
import duckduckpy
|
||||
|
||||
class WebSearch():
|
||||
def __init__(self):
|
||||
self.search = duckduckpy.query
|
||||
|
||||
def get_result(self, query):
|
||||
try:
|
||||
res = []
|
||||
response = self.search(query, container = "dict")["related_topics"]
|
||||
for r in response:
|
||||
if "text" in r:
|
||||
res.append({
|
||||
"text" : r["text"],
|
||||
"url": r["first_url"]
|
||||
})
|
||||
except:
|
||||
res = ["Connection error"]
|
||||
return res
|
||||
|
||||
# TODO: this api has more potential. Extract images or quick facts!
|
Reference in New Issue
Block a user