Subversion Repositories SmartDukaan

Rev

Rev 13935 | Rev 14168 | Go to most recent revision | Details | Compare with Previous | Last modification | View Log | RSS feed

Rev Author Line No. Line
13831 kshitij.so 1
import urllib2
2
from BeautifulSoup import BeautifulSoup
3
import re
4
from sys import exit
5
 
6
class FlipkartScraper:
7
    def __init__(self):
8
        self.count_trials = 0
9
        self.redirectCount = 0
10
 
11
    def read(self, url):
12
        #url = url.replace('www.flipkart.com','163.53.77.21')
13
        url = url.replace('www.flipkart.com','163.53.76.55')
14
        print url
15
        request = urllib2.Request(url)
16
        request.add_header('Accept','text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8')
17
        #request.add_header('Accept-Charset','ISO-8859-1,utf-8;q=0.7,*;q=0.3')
18
        #request.add_header('Accept-Encoding','gzip,deflate,sdch')
19
        request.add_header('Accept-Language','en-US,en;q=0.8,hi;q=0.6')
20
        request.add_header('Connection','keep-alive')
21
        request.add_header('Cookie','T=TI141257426738726661427143281839817329423126740566618323641725716448; __sonar=7237334677420142002; __gads=ID=c8b82101a0e4f451:T=1412574724:S=ALNI_MbPMbEOZj2nAGjM54z8ZHFMqwTOTQ; FK-CMP-DATA=; SN=2.VI11FB3FB6ED9D4693A796AB8C965B3417.SI802C325AC43444858830E870C4FD3324.VS141257426735693951472.1412576209; VID=2.VI11FB3FB6ED9D4693A796AB8C965B3417.1412576209.VS141257426735693951472; NSID=2.SI802C325AC43444858830E870C4FD3324.1412576209.VI11FB3FB6ED9D4693A796AB8C965B3417; __utma=19769839.709301254.1412574234.1412574234.1412574234.1; __utmb=19769839.23.10.1412574234; __utmc=19769839; __utmz=19769839.1412574234.1.1.utmcsr=(direct)|utmccn=(direct)|utmcmd=(none); s_cc=true; gpv_pn=SellerListing%3AMobile%3AMicromax%20Canvas%20Fire%20A093; gpv_pn_t=no%20value; s_sq=%5B%5BB%5D%5D; s_ppv=36')
22
        request.add_header('Cache-Control','max-age=0')
23
        request.add_header('Host','www.flipkart.com')
24
        request.add_header('User-Agent','Mozilla/5.0 (X11; Linux i686) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/37.0.2062.120 Safari/537.36')
25
        response_data = ""
26
        redirect_url = ""
27
        try:
28
            response = urllib2.urlopen(request)
29
            response_data = response.read()
14157 kshitij.so 30
            response.close()
13831 kshitij.so 31
            print "Fetched response from flipkart for %s" %(url)
32
            redirect_url = response.url
33
 
34
        except Exception as e:
35
            print 'ERROR: ', e
36
            print 'Retrying'
37
            self.count_trials += 1
38
 
39
            if self.count_trials < 3:
40
                return self.read(url)
41
 
42
        self.response_data=response_data
43
        return self.createData(url,redirect_url)
44
 
45
    def scrapeRedirectedPage(self,soup,redirect_url):
46
        print soup
47
        print redirect_url
48
        t = soup.find("div" , {"class" : "seller-table fk-user-select-none line"})
49
        print t
50
        table_rows = t.findAll("tr" , {"class" : re.compile('t-row.*')})
51
        print table_rows
52
        for x in table_rows:
53
            print x
54
 
55
    def createData(self,url, redirect_url):
56
        print "Creating soup from flipkart data for %s" %(url)
57
        #redirect_url = redirect_url.replace('www.flipkart.com','163.53.77.21')
58
        print "Redirect url is %s"%(redirect_url)
59
        page=self.response_data.decode("utf-8")
60
        self.soup = BeautifulSoup(page,convertEntities=BeautifulSoup.HTML_ENTITIES)
61
        page = None
62
        self.response_data = None
63
        print "Soup created from flipkart data for %s" %(url)
64
        if (url==redirect_url):
65
            return self.scrape(self.soup,url)
66
        else:
67
            print self.redirectCount
68
            self.redirectCount+=1
69
            if self.redirectCount >5:
70
                raise
71
            return self.read(url)
72
 
73
 
74
 
75
 
76
    def scrape(self,soup,url):
77
        print "Inside json creator for %s" %(url)
78
        info = []
79
        oddSeller = soup.findAll("div" , {"class" : "line seller-item odd "})
80
        for data in oddSeller:
81
            temp={}
82
            try:
83
                businessDays = data.find('span', attrs={'class' : re.compile('fk-deliverable.*')})
84
                shippingTime = businessDays.find('span', attrs={'class' : re.compile('fk-bold')}).string.replace('to','').replace('business days.','').strip().replace('  ','-')
85
                temp['shippingTime']=shippingTime
86
            except:
87
                pass
88
            price = data.find('span', attrs={'class' : re.compile('pxs-final-price.*')}).string.strip('Rs.').strip()
89
            temp['sellingPrice']=float(price)
90
            for sellerInfo in data.findAll("div",{"class":re.compile(".*seller-info*")}):
91
                sellerName = sellerInfo.find('a').string
92
                temp['sellerName'] = sellerName
93
            for metrics in data.find("div",{"class":"fk-text-right"}):
94
                try:
95
                    metric = metrics.findAll('input', {'type': 'submit'})
96
                except AttributeError:
97
                    continue
98
                try:
99
                    inputTags = metric[0]['data-lst-buytrend']
100
                except TypeError:
101
                    continue
102
                dataMetrics = metric[0]['data-listing-metrics']
103
                try:
104
                    buyTrend = inputTags[0:str(inputTags).index('NWSR')].replace('_','')
105
                except ValueError:
106
                    buyTrend = inputTags[0:str(inputTags).index('WSR')].replace('_','')
107
                temp['buyTrend']=buyTrend
108
                dataMetric = dataMetrics.split(';')
109
                sellerCode = dataMetric[0]
110
                temp['sellerCode']=sellerCode
111
                temp['sellingPriceMetric'] = float(dataMetric[1])
112
                if not temp.has_key('shippingTime'):
113
                    print "Populating shipping time from metrics"
114
                    temp['shippingTime'] = dataMetric[3]
115
                temp['sellerScore'] = int(dataMetric[4])
13935 kshitij.so 116
                try:
117
                    temp['shippingFee'] = float(dataMetric[2])
118
                except:
119
                    temp['shippingFee'] = 0.0
120
                temp['sellingPrice'] = temp['sellingPrice'] + temp['shippingFee']  
13831 kshitij.so 121
                info.append(temp)
122
        evenSeller = soup.findAll("div" , {"class" : "line seller-item even "})
123
        for data in evenSeller:
124
            temp={}
125
            price = data.find('span', attrs={'class' : re.compile('pxs-final-price.*')}).string.strip('Rs.')
126
            try:
127
                businessDays = data.find('span', attrs={'class' : re.compile('fk-deliverable.*')})
128
                shippingTime = businessDays.find('span', attrs={'class' : re.compile('fk-bold')}).string.replace('to','').replace('business days.','').strip().replace('  ','-')
129
                temp['shippingTime']=shippingTime
130
            except:
131
                pass
132
            temp['sellingPrice']=float(price)
133
            for sellerInfo in data.findAll("div",{"class":re.compile(".*seller-info*")}):
134
                sellerName = sellerInfo.find('a').string
135
                temp['sellerName'] = sellerName
136
            for metrics in data.find("div",{"class":"fk-text-right"}):
137
                try:
138
                    metric = metrics.findAll('input', {'type': 'submit'})
139
                except AttributeError:
140
                    continue
141
                try:
142
                    inputTags = metric[0]['data-lst-buytrend']
143
                except TypeError:
144
                    continue
145
                dataMetrics = metric[0]['data-listing-metrics']
146
                try:
147
                    buyTrend = inputTags[0:str(inputTags).index('NWSR')].replace('_','')
148
                except ValueError:
149
                    buyTrend = inputTags[0:str(inputTags).index('WSR')].replace('_','')
150
                temp['buyTrend']=buyTrend
151
                dataMetric = dataMetrics.split(';')
152
                temp['sellerCode'] = dataMetric[0] 
153
                temp['sellingPriceMetric'] = float(dataMetric[1])
154
                if not temp.has_key('shippingTime'):
155
                    print "Populating shipping time from metrics"
156
                    temp['shippingTime'] = dataMetric[3]
157
                temp['sellerScore'] = int(dataMetric[4])
13935 kshitij.so 158
                try:
159
                    temp['shippingFee'] = float(dataMetric[2])
160
                except:
161
                    temp['shippingFee'] = 0.0
162
                temp['sellingPrice'] = temp['sellingPrice'] + temp['shippingFee']  
13831 kshitij.so 163
                info.append(temp)
164
        print info
165
        print "Returning Json response from flipkart for %s" %(url)
166
        return info
167
 
168
if __name__ == '__main__':
169
    scraper = FlipkartScraper()
170
    scraper.read('http://www.flipkart.com/ps/MOBDZB3Q8WJNKVHG')