Rev 12821 | Blame | Compare with Previous | Last modification | View Log | RSS feed
import urllib2from BeautifulSoup import BeautifulSoupimport refrom sys import exitclass FlipkartScraper:def __init__(self):self.count_trials = 0self.redirectCount = 0def read(self, url):url = url.replace('www.flipkart.com','163.53.77.21')print urlrequest = urllib2.Request(url)request.add_header('Accept','text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8')#request.add_header('Accept-Charset','ISO-8859-1,utf-8;q=0.7,*;q=0.3')#request.add_header('Accept-Encoding','gzip,deflate,sdch')request.add_header('Accept-Language','en-US,en;q=0.8,hi;q=0.6')request.add_header('User-Agent', 'Mozilla/5.0 (X11; Linux i686; rv:31.0) Gecko/20100101 Firefox/31.0')request.add_header('Connection','keep-alive')request.add_header('Cookie','T=TI141106533261202044684051011971166779542511900764606324691282640130; __gads=ID=683ebf052dfc3143:T=1411293573:S=ALNI_MZ_Ii5vGWTfpp24h4M8eqj95_ctPA; __sonar=7756033766217071307; buyer=0; is_loggedin=1; km_lv=x; _ga=GA1.2.1763496909.1411627333; kvcd=1411645515976; km_ai=m2z93iskuj81qiid; km_ni=m2z93iskuj81qiid; TGSRC=semcmpid%3Asem_8024046704_brand_goog; GOOGSRC=semcmpid%3Asem_8024046704_brand_goog; currentSession=present; sessionCount=0; prd_day=6|1411762819830; visitCount=0; _we_wk_ss_lsf_=true; FK-CMP-DATA=; s_ppv=42; km_uq=; Tkt=67af0938; SN=2.VI45A1DC8A40884B39A24FBA0584587E3C.SI737D7515E5C94593A5DD0F9D1CFDCD20.VS141165407206939742793.1411654071; VID=2.VI45A1DC8A40884B39A24FBA0584587E3C.1411654071.VS141165407206939742793; NSID=2.SI737D7515E5C94593A5DD0F9D1CFDCD20.1411654071.VI45A1DC8A40884B39A24FBA0584587E3C; __utma=19769839.146415981.1411293538.1411647571.1411654082.5; __utmb=19769839.3.8.1411654082; __utmc=19769839; __utmz=19769839.1411647571.4.4.utmgclid=CMu2ifys_MACFQyTjgodWnMAwQ|utmccn=(not%20set)|utmcmd=(not%20set)|utmctr=(not%20provided); s_cc=true; gpv_pn=SellerListing%3AMobile%3AKarbonn%20K105s; gpv_pn_t=no%20value; s_sq=%5B%5BB%5D%5D')request.add_header('Host','www.flipkart.com')request.add_header('User-Agent','Mozilla/5.0 (X11; Linux i686) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/37.0.2062.120 Safari/537.36')response_data = ""redirect_url = ""try:response = urllib2.urlopen(request)response_data = response.read()print "Fetched response from flipkart for %s" %(url)redirect_url = response.urlexcept Exception as e:print 'ERROR: ', eprint 'Retrying'self.count_trials += 1if self.count_trials < 3:return self.read(url)self.response_data=response_datareturn self.createData(url,redirect_url)def scrapeRedirectedPage(self,soup,redirect_url):print soupprint redirect_urlt = soup.find("div" , {"class" : "seller-table fk-user-select-none line"})print ttable_rows = t.findAll("tr" , {"class" : re.compile('t-row.*')})print table_rowsfor x in table_rows:print xdef createData(self,url, redirect_url):print "Creating soup from flipkart data for %s" %(url)redirect_url = redirect_url.replace('www.flipkart.com','163.53.77.21')print "Redirect url is %s"%(redirect_url)page=self.response_data.decode("utf-8")self.soup = BeautifulSoup(page,convertEntities=BeautifulSoup.HTML_ENTITIES)page = Noneself.response_data = Noneprint "Soup created from flipkart data for %s" %(url)if (url==redirect_url):return self.scrape(self.soup,url)else:print self.redirectCountself.redirectCount+=1if self.redirectCount >5:raisereturn self.read(url)def scrape(self,soup,url):print "Inside json creator for %s" %(url)info = []oddSeller = soup.findAll("div" , {"class" : "line seller-item odd "})for data in oddSeller:temp={}try:businessDays = data.find('span', attrs={'class' : re.compile('fk-deliverable.*')})shippingTime = businessDays.find('span', attrs={'class' : re.compile('fk-bold')}).string.replace('to','').replace('business days.','').strip().replace(' ','-')temp['shippingTime']=shippingTimeexcept:passprice = data.find('span', attrs={'class' : re.compile('pxs-final-price.*')}).string.strip('Rs.').strip()temp['sellingPrice']=float(price)for sellerInfo in data.findAll("div",{"class":re.compile(".*seller-info*")}):sellerName = sellerInfo.find('a').stringtemp['sellerName'] = sellerNamefor metrics in data.find("div",{"class":"fk-text-right"}):try:metric = metrics.findAll('input', {'type': 'submit'})except AttributeError:continuetry:inputTags = metric[0]['data-lst-buytrend']except TypeError:continuedataMetrics = metric[0]['data-listing-metrics']try:buyTrend = inputTags[0:str(inputTags).index('NWSR')].replace('_','')except ValueError:buyTrend = inputTags[0:str(inputTags).index('WSR')].replace('_','')temp['buyTrend']=buyTrenddataMetric = dataMetrics.split(';')sellerCode = dataMetric[0]temp['sellerCode']=sellerCodetemp['sellingPriceMetric'] = float(dataMetric[1])if not temp.has_key('shippingTime'):print "Populating shipping time from metrics"temp['shippingTime'] = dataMetric[3]temp['sellerScore'] = int(dataMetric[4])info.append(temp)evenSeller = soup.findAll("div" , {"class" : "line seller-item even "})for data in evenSeller:temp={}price = data.find('span', attrs={'class' : re.compile('pxs-final-price.*')}).string.strip('Rs.')try:businessDays = data.find('span', attrs={'class' : re.compile('fk-deliverable.*')})shippingTime = businessDays.find('span', attrs={'class' : re.compile('fk-bold')}).string.replace('to','').replace('business days.','').strip().replace(' ','-')temp['shippingTime']=shippingTimeexcept:passtemp['sellingPrice']=float(price)for sellerInfo in data.findAll("div",{"class":re.compile(".*seller-info*")}):sellerName = sellerInfo.find('a').stringtemp['sellerName'] = sellerNamefor metrics in data.find("div",{"class":"fk-text-right"}):try:metric = metrics.findAll('input', {'type': 'submit'})except AttributeError:continuetry:inputTags = metric[0]['data-lst-buytrend']except TypeError:continuedataMetrics = metric[0]['data-listing-metrics']try:buyTrend = inputTags[0:str(inputTags).index('NWSR')].replace('_','')except ValueError:buyTrend = inputTags[0:str(inputTags).index('WSR')].replace('_','')temp['buyTrend']=buyTrenddataMetric = dataMetrics.split(';')temp['sellerCode'] = dataMetric[0]temp['sellingPriceMetric'] = float(dataMetric[1])if not temp.has_key('shippingTime'):print "Populating shipping time from metrics"temp['shippingTime'] = dataMetric[3]temp['sellerScore'] = int(dataMetric[4])info.append(temp)print infoprint "Returning Json response from flipkart for %s" %(url)return infoif __name__ == '__main__':scraper = FlipkartScraper()scraper.read('http://www.flipkart.com/ps/MOBDY45GPWHXH9UY')