Subversion Repositories SmartDukaan

Rev

Rev 12764 | Rev 12766 | Go to most recent revision | Blame | Compare with Previous | Last modification | View Log | RSS feed

import urllib2
from BeautifulSoup import BeautifulSoup
import re
from sys import exit

class FlipkartScraper:
    def __init__(self):
        self.count_trials = 0
        self.redirectCount = 0
    
    def read(self, url):
        print url.replace('http://www.flipkart.com','163.53.77.21')
        print url
        request = urllib2.Request(url)
        request.add_header('User-Agent', 'Mozilla/5.0 (X11; Linux i686; rv:31.0) Gecko/20100101 Firefox/31.0')
        opener = urllib2.build_opener()
        response_data = ""
        try:
            response = urllib2.urlopen(request)
            response_data = response.read()
            print "Fetched response from flipkart for %s" %(url)
            redirect_url = response.url
            
        except urllib2.HTTPError as e:
            print 'ERROR: ', e
            print 'Retrying'
            self.count_trials += 1
            
            if self.count_trials < 3:
                return self.read(url)
        
        self.response_data=response_data
        return self.createData(url,redirect_url)
    
    def scrapeRedirectedPage(self,soup,redirect_url):
        print soup
        print redirect_url
        t = soup.find("div" , {"class" : "seller-table fk-user-select-none line"})
        print t
        table_rows = t.findAll("tr" , {"class" : re.compile('t-row.*')})
        print table_rows
        for x in table_rows:
            print x
    
    def createData(self,url, redirect_url):
        print "Creating soup from flipkart data for %s" %(url)
        print redirect_url
        page=self.response_data.decode("utf-8")
        self.soup = BeautifulSoup(page,convertEntities=BeautifulSoup.HTML_ENTITIES)
        page = None
        self.response_data = None
        print "Soup created from flipkart data for %s" %(url)
        if (url==redirect_url):
            return self.scrape(self.soup,url)
        else:
            print self.redirectCount
            self.redirectCount+=1
            if self.redirectCount >4:
                raise
            return self.read(url)
            
            
    
    
    def scrape(self,soup,url):
        print "Inside json creator for %s" %(url)
        info = []
        oddSeller = soup.findAll("div" , {"class" : "line seller-item odd "})
        for data in oddSeller:
            temp={}
            try:
                businessDays = data.find('span', attrs={'class' : re.compile('fk-deliverable.*')})
                shippingTime = businessDays.find('span', attrs={'class' : re.compile('fk-bold')}).string.replace('to','').replace('business days.','').strip().replace('  ','-')
                temp['shippingTime']=shippingTime
            except:
                pass
            price = data.find('span', attrs={'class' : re.compile('pxs-final-price.*')}).string.strip('Rs.').strip()
            temp['sellingPrice']=float(price)
            for sellerInfo in data.findAll("div",{"class":re.compile(".*seller-info*")}):
                sellerName = sellerInfo.find('a').string
                temp['sellerName'] = sellerName
            for metrics in data.find("div",{"class":"fk-text-right"}):
                try:
                    metric = metrics.findAll('input', {'type': 'submit'})
                except AttributeError:
                    continue
                try:
                    inputTags = metric[0]['data-lst-buytrend']
                except TypeError:
                    continue
                dataMetrics = metric[0]['data-listing-metrics']
                try:
                    buyTrend = inputTags[0:str(inputTags).index('NWSR')].replace('_','')
                except ValueError:
                    buyTrend = inputTags[0:str(inputTags).index('WSR')].replace('_','')
                temp['buyTrend']=buyTrend
                dataMetric = dataMetrics.split(';')
                sellerCode = dataMetric[0]
                temp['sellerCode']=sellerCode
                temp['sellingPriceMetric'] = float(dataMetric[1])
                if not temp.has_key('shippingTime'):
                    print "Populating shipping time from metrics"
                    temp['shippingTime'] = dataMetric[3]
                temp['sellerScore'] = int(dataMetric[4])
                info.append(temp)
        evenSeller = soup.findAll("div" , {"class" : "line seller-item even "})
        for data in evenSeller:
            temp={}
            price = data.find('span', attrs={'class' : re.compile('pxs-final-price.*')}).string.strip('Rs.')
            try:
                businessDays = data.find('span', attrs={'class' : re.compile('fk-deliverable.*')})
                shippingTime = businessDays.find('span', attrs={'class' : re.compile('fk-bold')}).string.replace('to','').replace('business days.','').strip().replace('  ','-')
                temp['shippingTime']=shippingTime
            except:
                pass
            temp['sellingPrice']=float(price)
            for sellerInfo in data.findAll("div",{"class":re.compile(".*seller-info*")}):
                sellerName = sellerInfo.find('a').string
                temp['sellerName'] = sellerName
            for metrics in data.find("div",{"class":"fk-text-right"}):
                try:
                    metric = metrics.findAll('input', {'type': 'submit'})
                except AttributeError:
                    continue
                try:
                    inputTags = metric[0]['data-lst-buytrend']
                except TypeError:
                    continue
                dataMetrics = metric[0]['data-listing-metrics']
                try:
                    buyTrend = inputTags[0:str(inputTags).index('NWSR')].replace('_','')
                except ValueError:
                    buyTrend = inputTags[0:str(inputTags).index('WSR')].replace('_','')
                temp['buyTrend']=buyTrend
                dataMetric = dataMetrics.split(';')
                temp['sellerCode'] = dataMetric[0] 
                temp['sellingPriceMetric'] = float(dataMetric[1])
                if not temp.has_key('shippingTime'):
                    print "Populating shipping time from metrics"
                    temp['shippingTime'] = dataMetric[3]
                temp['sellerScore'] = int(dataMetric[4])
                info.append(temp)
        print "Returning Json response from flipkart for %s" %(url)
        return info

if __name__ == '__main__':
    scraper = FlipkartScraper()
    print scraper.read('http://www.flipkart.com/ps/MOBDYFURT9PKAPSX')