-
Notifications
You must be signed in to change notification settings - Fork 1
/
Copy pathserver.py
91 lines (69 loc) · 2.51 KB
/
server.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
from tornado import ioloop, web, escape, gen, httpclient, httpserver #install
import json
import time
import datetime
import dateutil.parser as parser #install
from multiprocessing.pool import ThreadPool
_workers = ThreadPool(3) #We should only need one of these, but it doesn't hurt
token = open("token.txt", "r").read().strip() #FB Graph Access Token
#print(token)
print "restarted"
def checkGroup(id, name):
client = httpclient.HTTPClient()
resp = client.fetch("https://graph.facebook.com/"+id+"/feed?access_token="+token+"&format=json&method=get&suppress_http_code=1").body #Get group posts
j = json.loads(resp)['data']
print(name)
for post in j:
post['datetime'] = parser.parse(post['updated_time'])
return j
@gen.engine
def crawlGroups():
print("here")
j = []
for group in groups:
try:
j.extend(checkGroup(group['id'], group['name'])) #Add all posts to array
except Exception:
time.sleep(5)
crawlGroups()
print("done fetching")
newlist = sorted(j, key=lambda k: k['datetime'], reverse=True) #Sort by datetimes
#print("BAM", newlist)
for post in newlist: #Remove the datetimes so JSON doesn't screw up.
del post['datetime']
open("feed.json", "w").write(json.dumps(newlist))
#Do stuff
print "waiting"
time.sleep(300) #Time to wait before refreshing Feeds
crawlGroups()
class APIHandler(web.RequestHandler):
def get(self, *args, **kwargs):
self.set_header("Content-Type", "application/json")
j = open("feed.json", "r").read()
limit = self.get_argument("limit", None)
if limit != None:
try:
lnum = int(limit)
j = json.dumps(json.loads(j)[:lnum])
except Exception:
pass
if self.get_argument("pretty", None) != None:
pretty = json.dumps(json.loads(j), indent=4, separators=(',', ': '))
self.write(pretty)
else:
self.write(j)
class IndexHandler(web.RequestHandler):
def get(self, *args, **kwargs):
self.render("templates/index.html")
groups = json.loads(open("groups.json", "r").read())
app = web.Application([
(r'/', IndexHandler),
(r'', IndexHandler),
(r'/api', APIHandler),
(r'/static/(.*)', web.StaticFileHandler, {'path': "static"}),
], debug=True)
if __name__ == '__main__':
_workers.apply_async(crawlGroups)
#crawlGroups()
httpserver.HTTPServer(app).listen(9009)
ioloop.IOLoop.instance().start()