summaryrefslogtreecommitdiff
path: root/wikibania/ban/BanDB.py
diff options
context:
space:
mode:
authorPacien TRAN-GIRARD2014-10-24 18:05:48 +0000
committerPacien TRAN-GIRARD2014-10-24 18:05:48 +0000
commit4403fda939ef42aeffeccb343d74f3dc3b840f91 (patch)
tree63fd704f15f3030f1455aad0ef92403c5d093c70 /wikibania/ban/BanDB.py
parent16529a0d212e1387eacd590c0e5e1b1a13dc2641 (diff)
parentbdf9099df8c2a4636b0ad0e710b73330877eef37 (diff)
downloadwikistats-4403fda939ef42aeffeccb343d74f3dc3b840f91.tar.gz
Merge branch 'refactor' into 'master'
Refactor See merge request !1
Diffstat (limited to 'wikibania/ban/BanDB.py')
-rw-r--r--wikibania/ban/BanDB.py50
1 files changed, 50 insertions, 0 deletions
diff --git a/wikibania/ban/BanDB.py b/wikibania/ban/BanDB.py
new file mode 100644
index 0000000..e83aa3c
--- /dev/null
+++ b/wikibania/ban/BanDB.py
@@ -0,0 +1,50 @@
1import json
2
3from wikibania.ban.Ban import Ban
4from wikibania.wapi.WikipediaQuery import BlockQuery
5
6
7class BanDB:
8 def __init__(self, geoip_looker):
9 self.geoip_looker = geoip_looker
10 self.bans = []
11
12 def list(self):
13 return self.bans
14
15 def load(self, ban_list):
16 for entry in ban_list:
17 ban = Ban(self.geoip_looker)
18 ban.hydrate(entry)
19 self.bans.append(ban)
20
21 def load_file(self, file_name):
22 with open(file_name, "r") as file:
23 entries = json.load(file)
24 self.load(entries)
25
26 def dump(self):
27 return [ban.items() for ban in self.bans]
28
29 def dump_file(self, file_name):
30 with open(file_name, "w") as file:
31 ban_list = self.dump()
32 json.dump(ban_list, file)
33
34 def fetch(self, nb_samples, query_limit=500, continue_token=None):
35 fetch = min(nb_samples, query_limit)
36
37 query = BlockQuery(
38 properties=["user", "timestamp", "expiry"],
39 show=["temp", "ip"],
40 limit=fetch,
41 continue_token=continue_token,
42 )
43 results = query.fetch_result()
44
45 entries = results["query"]["blocks"]
46 self.load(entries)
47
48 if nb_samples - fetch > 0:
49 continue_token = results["query-continue"]["blocks"]["bkcontinue"]
50 self.fetch(nb_samples - fetch, query_limit, continue_token)