-
Notifications
You must be signed in to change notification settings - Fork 0
/
RedditTwitterBot.py
131 lines (111 loc) · 3.93 KB
/
RedditTwitterBot.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
import sys
import praw
import os
import requests
import json
import time
import urllib.parse
from glob import glob
from twython import Twython
CONSUMER_KEY = ' '
CONSUMER_SECRET = ' '
ACCESS_TOKEN = ' '
ACCESS_SECRET = ' '
SUBREDDIT = 'dankmemes'
IMAGE_DIR = ' '
POSTED_CACHE = ' '
MAX_CHARACTER_LENGTH = 280
T_CO_LINKS_LEN = 24
CLIENT_ID = ' '
CLIENT_SECRET = ' '
USER_AGENT = ' '
stickied_posts=[]
def setup_reddit_connection(subreddit):
print('!!setting up reddit connection!!')
reddit = praw.Reddit(client_id = CLIENT_ID, client_secret = CLIENT_SECRET, user_agent = USER_AGENT)
return reddit.subreddit(subreddit)
def tweet_creator(subreddit_info):
post_dict = {}
post_ids = []
print('!!getting posts!!')
for submission in subreddit_info.hot(limit= 1 + len(stickied_posts)):
if not already_tweeted(submission.id) and submission.id not in stickied_posts:
post_dict[submission.title] = {}
post = post_dict[submission.title]
post['link'] = submission.permalink
post['img_path'] = get_image(submission.url)
post_ids.append(submission.id)
else:
print('!!already tweeted: {}'.format(str(submission)))
return post_dict, post_ids
def strip_title(title, character_amt):
if len(title) <= character_amt:
return title
else:
return title[:character_amt -1] + '...'
def get_stickies(subreddit):
for sticky in subreddit.hot(limit=10):
if sticky.stickied:
stickied_posts.append(sticky.id)
def get_image(img_url):
if img_url:
filename = os.path.basename(urllib.parse.urlsplit(img_url).path)
img_path = IMAGE_DIR + '/' + filename
print('!!!downloading img url ' + img_url + ' to ' + img_path + ' !!!')
resp = requests.get(img_url, stream = True)
if resp.status_code == 200:
with open(img_path, 'wb') as image_file:
for chunk in resp:
image_file.write(chunk)
return img_path
else:
print('!!! image failed to download. status code: ' + str(resp.status_code) + ' !!!')
else:
print('!!!No Image!!!')
return ''
def tweeter(post_dict, post_ids):
api = Twython(CONSUMER_KEY, CONSUMER_SECRET, ACCESS_TOKEN, ACCESS_SECRET)
for post, post_id in zip(post_dict, post_ids):
img_path = post_dict[post]['img_path']
extra_text = '' + post_dict[post]['link']
extra_text_len = 1 + T_CO_LINKS_LEN
if img_path:
extra_text_len += T_CO_LINKS_LEN
post_text = strip_title(post, MAX_CHARACTER_LENGTH - extra_text_len) + ' www.reddit.com' + extra_text
print('!!! posting link on Twitter!!!')
print(post_text)
if img_path:
img = open(img_path, 'rb')
response = api.upload_media(media=img)
print('!!! w/ image ' + img_path + ' !!!')
api.update_status(status=post_text, media_ids = [response['media_id']])
else:
api.update_status(status=post_text)
log_tweet(post_id)
def log_tweet(post_id):
with open(POSTED_CACHE, 'a') as out_file:
out_file.write(str(post_id) + '\n')
def already_tweeted(post_id):
found = False
with open(POSTED_CACHE, 'r') as in_file:
for line in in_file:
if post_id in line:
found = True
break
return found
def main():
subreddit = setup_reddit_connection(SUBREDDIT)
get_stickies(subreddit)
while True:
if not os.path.exists(POSTED_CACHE):
with open(POSTED_CACHE, 'w'):
pass
if not os.path.exists(IMAGE_DIR):
os.makedirs(IMAGE_DIR)
post_dict, post_ids = tweet_creator(subreddit)
tweeter(post_dict, post_ids)
for filename in glob(IMAGE_DIR + '/*'):
os.remove(filename)
time.sleep(300)
if __name__ == '__main__':
main()