1+ import re ,sys ,bs4 ,requests ,subprocess ,os
2+ 3+ amz_result = {}
4+ flip_result = {}
5+ snap_result = {}
6+ amazon = 'http://www.amazon.in/s/?url=search-alias%3Daps&field-keywords='
7+ snapdeal = 'http://www.snapdeal.com/search?keyword='
8+ flipkart = 'http://www.flipkart.com/search?q='
9+ 10+ def openweb (url ):
11+ DEVNULL = open (os .devnull ,'w' )
12+ subprocess .call (['xdg-open' ,url ],stdout = DEVNULL ,stderr = subprocess .STDOUT )
13+ 14+ def get_src (site ,args ):
15+ #webbrowser.open(site+args)
16+ req = requests .get (site + args )
17+ if not req :
18+ print 'Connection Failed for ' + site
19+ return None
20+ soup = bs4 .BeautifulSoup (req .text ,"html.parser" )
21+ return soup
22+ 23+ def show_amz (args ):
24+ res = 5
25+ amz_title = '.a-link-normal > .a-text-normal'
26+ soup = get_src (amazon ,args )
27+ if not soup :
28+ return None
29+ title = []
30+ ctr = 5
31+ for name in soup .findAll (True ,{'class' :['a-size-base' , 'a-color-null' ,'s-inline' , 's-access-title' , 'a-text-normal' ]}):
32+ val = name .get ('data-attribute' )
33+ if val != None :
34+ title .append (val )
35+ ctr -= 1
36+ if ctr == 0 :
37+ break
38+ 39+ prices = []
40+ ctr = 0
41+ for pr in soup .select ('a.a-link-normal.a-text-normal > span.a-size-base.a-color-price.s-price.a-text-bold' ):
42+ x = re .findall (r'([\d,円\.]+)' ,str (pr .get_text ))
43+ print 'Amazon ->' ,title [ctr ]+ ': ' ,x [3 ]
44+ ctr += 1
45+ if ctr == 5 :
46+ break
47+ 48+ def show_flip (args ):
49+ flip_title = '.fk-display-block'
50+ soup = get_src (flipkart ,args )
51+ if not soup :
52+ return None
53+ title_link = soup .select (flip_title )
54+ price_link = soup .select ('.pu-final' )
55+ product = []
56+ price = []
57+ res = 5
58+ for title in title_link :
59+ if res == 0 :
60+ break
61+ prd = str (title .get ('title' )).strip ()
62+ if prd != 'None' :
63+ res -= 1
64+ product .append (prd )
65+ 66+ res = 5
67+ for val in price_link :
68+ if res == 0 :
69+ break
70+ pr = str (val .get_text ()).strip ()
71+ if pr != 'None' :
72+ res -= 1
73+ price .append (pr )
74+ 75+ flip_result = dict (zip (price ,product ,))
76+ for price ,prd in flip_result .items ():
77+ print 'FlipKart' + '-> ' + prd + ':' + price
78+ 79+ def show_snap (args ):
80+ res = 5
81+ snapdeal = 'http://www.snapdeal.com/search?keyword='
82+ snap_title = '.product-tuple-description > .product-desc-rating a > .product-title'
83+ soup = get_src (snapdeal ,args )
84+ if not soup :
85+ return None
86+ title_link = soup .select (snap_title )
87+ price_link = soup .select ('.product-price' )
88+ price = []
89+ product = []
90+ for title ,val in zip (title_link ,price_link ):
91+ if res == 0 :
92+ break
93+ res -= 1
94+ product .append (str (title .get_text ())[:- 1 ])
95+ price .append (str (val .get_text ()))
96+ 97+ snap_result = dict (zip (price ,product ))
98+ for price ,prd in snap_result .items ():
99+ print 'SnapDeal' + '-> ' + prd + ':' + price
100+ 101+ def main (args ):
102+ print 'Pulling Request from Web.......\n '
103+ show_flip (args )
104+ print ' ----------------------------------------------------------------------------------------------------------------------------'
105+ show_snap (args )
106+ 107+ print ' ----------------------------------------------------------------------------------------------------------------------------'
108+ show_amz (args )
109+ 110+ print ' ----------------------------------------------------------------------------------------------------------------------------'
111+ if __name__ == '__main__' :
112+ args = sys .argv [1 :]
113+ if not args :
114+ print "Usage:- [search query] => will give results of search query from amazon,flipkart,snapdeal, no args it will give result from main page of these shopping site"
115+ args = ' ' .join (args )
116+ main (args )
117+ query = raw_input ('>> q(quit), open web page with : az(amazon),fk(flipkart),sd(snapdeal) :' )
118+ if query == 'az' :
119+ openweb (amazon + args )
120+ elif query == 'fk' :
121+ openweb (flipkart + args )
122+ elif query == 'sd' :
123+ openweb (snapdeal + args )
124+ else :
125+ sys .exit (1 )
0 commit comments