-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathwow_fetch.py
151 lines (108 loc) · 4.33 KB
/
wow_fetch.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
import ssl
import urllib.request
from abc import ABC
from bs4 import BeautifulSoup as Bs
from calculate_price import OrderRecord
class WowFetch(ABC):
MAX_FETCH = 20
MIN_GOLD_STOCK = 300
def get_servers_url(self):
pass
def get_order_url(self):
pass
def get_my_account(self):
pass
def get_alliance_id(self):
pass
def get_horde_id(self):
pass
def get_realms_name(self):
pass
def get_keyword(self):
return ""
def is_keyword_include(self):
pass
def get_batch_size(self):
pass
def should_process(self, realm: str):
return False
def fetch_orders(self):
soup = self.__fetch_realms_info()
realms_id, realms_name = self.__process_realms_info(soup)
#print(realms_name)
races_ids = [self.get_alliance_id(), self.get_horde_id()]
order_list = list()
for index in range(len(realms_id)):
realm_id = realms_id[index]
if realm_id.lower() == 'all':
continue
if not self.should_process(realms_name[index]):
continue
print(realms_name[index] + ',', end='')
for raceId in races_ids:
order_record = OrderRecord()
order_record.realm_name = realms_name[index]
soup = self.__fetch_order_info(realm_id, raceId)
cheapest_price, gold_stock, my_account = self.__process_order_info(soup)
order_record.server_price = cheapest_price
order_record.gold_stock = gold_stock
order_record.is_my_account = my_account
order_record.set_alliance(raceId == self.get_alliance_id())
order_list.append(order_record)
# end for
# end for
return order_list
# end
def __process_order_info(self, soup):
#print(soup)
desired_index = -1
gold_stock = 0
# stock - total gold
tag = soup.findAll("span", {"class": "products__statistic-amount"}, limit=WowFetch.MAX_FETCH)
#print('raouf')
#print(len(tag))
for t in range(len(tag)):
value = str(tag[t].text).strip().replace(" ", "").replace('K', '').replace('Gold', '').strip().replace(',', '')
gold_stock = float(value)
if gold_stock >= WowFetch.MIN_GOLD_STOCK or t == WowFetch.MAX_FETCH - 1:
desired_index = t
break
if desired_index < 0:
return 0, 0, 0
# price
tag = soup.findAll("span", {"class": "products__exch-rate"}, limit=WowFetch.MAX_FETCH)
if tag is None:
return 0
tag = tag[desired_index]
value = str(tag.text).strip().replace(" ", "").replace('K', '').replace('1Gold=', '').strip()
cheapest_price = float(value.replace('US$', ''))
# is seller my account
tag = soup.findAll("a", {"class": "seller__name"}, limit=WowFetch.MAX_FETCH)
if tag is None:
return 0
tag = tag[desired_index]
value = str(tag.text).strip().replace(" ", "").strip()
my_account = value == self.get_my_account()
return cheapest_price, gold_stock, my_account
def __fetch_order_info(self, realm_id, race_id):
realm_url = self.get_order_url().format(realm_id, race_id)
context=ssl._create_unverified_context()
sslHandler = urllib.request.HTTPSHandler(context=context)
opener = urllib.request.build_opener(sslHandler)
# cookie to have currencies in dollar
opener.addheaders.append(('Cookie', 'g2g_regional=%7B%22currency%22%3A%22USD%22%2C%22language%22%3A%22en%22%7D'))
html = opener.open(realm_url)
soup = Bs(html, features="html.parser")
return soup
@staticmethod
def __process_realms_info(soup):
realms_id = [str(o['value']) for o in soup.find(id='server').find_all('option')]
realms_name = [str(o.text) for o in soup.find(id='server').find_all('option')]
return realms_id, realms_name
def __fetch_realms_info(self):
ctx = ssl.create_default_context()
ctx.check_hostname = False
ctx.verify_mode = ssl.CERT_NONE
html = urllib.request.urlopen(self.get_servers_url(), context=ctx)
soup = Bs(html, features="html.parser")
return soup