-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathscrape.py
76 lines (50 loc) · 1.67 KB
/
scrape.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
"""
Author: Yusuf Ozkan
Twitter API
Scrapes Trends in Turkey
"""
import numpy as np
import requests
import base64
import json
sayi = 0
def get_tweets():
consumer_key = 'YOUR CONSUMER KEY'
consumer_secret = 'YOUR CONSUMER SECRET KEY'
key_secret = '{}:{}'.format(consumer_key, consumer_secret).encode('ascii')
b64_encoded_key = base64.b64encode(key_secret)
b64_encoded_key = b64_encoded_key.decode('ascii')
base_url = 'https://api.twitter.com/'
auth_url = '{}oauth2/token'.format(base_url)
auth_headers = {
'Authorization': 'Basic {}'.format(b64_encoded_key),
'Content-Type': 'application/x-www-form-urlencoded;charset=UTF-8'
}
auth_data = {
'grant_type': 'client_credentials'
}
auth_resp = requests.post(auth_url, headers=auth_headers, data=auth_data)
access_token = auth_resp.json()['access_token']
trend_headers = {
'Authorization': 'Bearer {}'.format(access_token)
}
# woeid of TR Istanbul
trend_params = {
'id': 2344116,
}
trend_url = 'https://api.twitter.com/1.1/trends/place.json'
trend_resp = requests.get(trend_url, headers=trend_headers, params=trend_params)
tweet_data = trend_resp.json()
tweets = []
for i in range(0,20):
tweets.append(tweet_data[0]['trends'][i]["name"])
data = tweets
print(data)
with open(f'public/data.json', 'w', encoding='utf-8') as f:
json.dump(data, f, ensure_ascii=False, indent=4)
if __name__ == "__main__":
#schedule.every(1).minutes.do(get_tweets)
get_tweets()
# while 1:
# schedule.run_pending()
# time.sleep(1)