Subversion Repositories SmartDukaan

Rev

Rev 14746 | Rev 17103 | Go to most recent revision | Blame | Compare with Previous | Last modification | View Log | RSS feed

from BeautifulSoup import BeautifulSoup
from dtr.utils.utils import fetchResponseUsingProxy
from sys import exit
import json
import re
import traceback


headers = { 
            'User-agent':'Mozilla/5.0 (X11; Linux i686) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/37.0.2062.120 Safari/537.36',
            'Accept' : 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8',      
            'Accept-Language' : 'en-US,en;q=0.8',                     
            'Accept-Charset' : 'ISO-8859-1,utf-8;q=0.7,*;q=0.3',
            'Cookie':'T=TI141257426738726661427143281839817329423126740566618323641725716448; __sonar=7237334677420142002; __gads=ID=c8b82101a0e4f451:T=1412574724:S=ALNI_MbPMbEOZj2nAGjM54z8ZHFMqwTOTQ; FK-CMP-DATA=; SN=2.VI11FB3FB6ED9D4693A796AB8C965B3417.SI802C325AC43444858830E870C4FD3324.VS141257426735693951472.1412576209; VID=2.VI11FB3FB6ED9D4693A796AB8C965B3417.1412576209.VS141257426735693951472; NSID=2.SI802C325AC43444858830E870C4FD3324.1412576209.VI11FB3FB6ED9D4693A796AB8C965B3417; __utma=19769839.709301254.1412574234.1412574234.1412574234.1; __utmb=19769839.23.10.1412574234; __utmc=19769839; __utmz=19769839.1412574234.1.1.utmcsr=(direct)|utmccn=(direct)|utmcmd=(none); s_cc=true; gpv_pn=SellerListing%3AMobile%3AMicromax%20Canvas%20Fire%20A093; gpv_pn_t=no%20value; s_sq=%5B%5BB%5D%5D; pincode=110011; s_ppv=36',
            'Connection':'keep-alive',
            'Accept-Encoding' : 'gzip,deflate,sdch'
        }

class FlipkartProductPageScraper:
    def __init__(self):
        self.count_trials = 0
        self.redirectCount = 0
    
    def read(self, url):
        response_data = ""
        redirect_url = ""
        try:
            
            """quick fix,need to add it conf""" 
            
            response_data = fetchResponseUsingProxy(url, headers)                
            print "Fetched response from flipkart for %s" %(url)
            #redirect_url = response.url

        except Exception as e:
            traceback.print_exc()
            print 'ERROR: ', e
            print 'Retrying'
            self.count_trials += 1

            if self.count_trials < 3:
                return self.read(url)

        self.response_data=response_data
        return self.createData(url,redirect_url)
    
    def scrapeRedirectedPage(self,soup,redirect_url):
        print soup
        print redirect_url
        t = soup.find("div" , {"class" : "seller-table fk-user-select-none line"})
        print t
        table_rows = t.findAll("tr" , {"class" : re.compile('t-row.*')})
        print table_rows
        for x in table_rows:
            print x
    
    def createData(self,url, redirect_url):
        print "Creating soup from flipkart data for %s" %(url)
        #redirect_url = redirect_url.replace('www.flipkart.com','163.53.77.21')
        print "Redirect url is %s"%(redirect_url)
        page=self.response_data.decode("utf-8")
        self.soup = BeautifulSoup(page,convertEntities=BeautifulSoup.HTML_ENTITIES)
        page = None
        self.response_data = None
        print "Soup created from flipkart data for %s" %(url)
        print redirect_url
        return self.scrape(self.soup,url)
    
    def scrape(self,soup,url):
        try:
            print "data-config"
            if soup.find('div',{'class':'seller-table-wrap section'}) is None:
                raise
            x = json.loads(soup.find('div',{'class':'seller-table-wrap section'})['data-config'])['dataModel']
            lines = sorted(x, key=lambda k: k['priceInfo'].get('sellingPrice', 0), reverse=False)
            sellingPrice =  float(lines[0]['priceInfo']['sellingPrice'])
            try:
                offerText = lines[0]['offerInfo']['listingOffers'][0]['description']
            except:
                offerText = ""
            buyBoxPrice = float(soup.find('span',{'class':'selling-price omniture-field'})['data-evar48'])
            return {'lowestSp':sellingPrice,'inStock':1,'buyBoxPrice':buyBoxPrice}
        except:
            """No able to parse seller wrap section, probably due to only single seller option"""
            buyBoxPrice = float(soup.find('span',{'class':'selling-price omniture-field'})['data-evar48'])
            sellingPrice = buyBoxPrice
            if soup.find('div',{'class':'out-of-stock'}) is not None:
                inStock = 0
            else:
                inStock = 1
            return {'lowestSp':sellingPrice,'inStock':inStock,'buyBoxPrice':buyBoxPrice}

if __name__ == '__main__':
    scraper = FlipkartProductPageScraper()
    print scraper.read('http://www.flipkart.com/samsung-galaxy-star-advance/p/itmeyfc4vwzwhuva?pid=MOBEYFC44EUDZ9TX')