-
Notifications
You must be signed in to change notification settings - Fork 1
/
Copy pathmyntra_link_scraper.py
80 lines (63 loc) · 2.08 KB
/
myntra_link_scraper.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
#! /usr/bin/python3
# myntra_scraper by Architrixs, 15 May 2021
import bs4
import time
from selenium import webdriver
from selenium.webdriver.firefox.options import Options
import sys
def help():
print("""Usage :-
$ ./myntra_link_scraper.py [Arg1: n, Number of pages to look upto] [Arg2: outputFileName.txt]
$ ./myntra_link_scraper.py --help or -h # Show usage
Example: $ ./myntra_link_scraper.py 20 output_links.txt
\n""")
exit()
#example_url = 'https://www.myntra.com/men-tshirts?p=1&rows=100'
product_links = set()
if len(sys.argv)==1 or sys.argv[1]== '--help' or sys.argv[1]=='-h' or len(sys.argv)<3:
help()
if len(sys.argv) == 3:
page_upto = int(sys.argv[1])
output_file_name = sys.argv[-1]
else:
help()
options = Options()
options.headless = True
# Create your driver
driver = webdriver.Firefox(options = options)
def get_product_links(url):
print(url)
try:
print('getting page')
driver.get(url)
elem = driver.find_element_by_class_name('results-base')
code = elem.get_attribute('innerHTML')
except Exception as e:
print(e)
exit()
print("making soup...")
soup_res = bs4.BeautifulSoup(code, 'html.parser')
#print(soup_res)
data = soup_res.find_all('li',{'class':'product-base'})
for d in data:
link = d.find('a')['href']
product_links.add(link)
def get_page_links(page_links):
for url in page_links:
get_product_links(url)
def main():
page_links=['https://www.myntra.com/men-tshirts?p='+str(i)+'&rows=100' for i in range(1,page_upto+1)]
t0 = time.time()
print("starting driver")
get_page_links(page_links)
t1 = time.time()
print(f"{t1-t0} seconds to download {len(page_links)} page links.")
print("Closing driver, please wait...")
driver.quit()
print("Links collected:", len(product_links))
with open(output_file_name,'a' ,encoding="utf-8") as f:
for link in product_links:
f.write(link+'\n')
print("File saved", output_file_name)
if __name__=="__main__":
main()