Rev 14745 | Blame | Compare with Previous | Last modification | View Log | RSS feed
from BeautifulSoup import BeautifulSoupimport refrom sys import exitfrom dtr.utils.utils import fetchResponseUsingProxyheaders = {'User-agent':'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/535.11 (KHTML, like Gecko) Chrome/17.0.963.56 Safari/535.11','Accept' : 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8','Accept-Language' : 'en-US,en;q=0.8','Accept-Charset' : 'ISO-8859-1,utf-8;q=0.7,*;q=0.3','Cookie':'T=TI141257426738726661427143281839817329423126740566618323641725716448; __sonar=7237334677420142002; __gads=ID=c8b82101a0e4f451:T=1412574724:S=ALNI_MbPMbEOZj2nAGjM54z8ZHFMqwTOTQ; FK-CMP-DATA=; SN=2.VI11FB3FB6ED9D4693A796AB8C965B3417.SI802C325AC43444858830E870C4FD3324.VS141257426735693951472.1412576209; VID=2.VI11FB3FB6ED9D4693A796AB8C965B3417.1412576209.VS141257426735693951472; NSID=2.SI802C325AC43444858830E870C4FD3324.1412576209.VI11FB3FB6ED9D4693A796AB8C965B3417; __utma=19769839.709301254.1412574234.1412574234.1412574234.1; __utmb=19769839.23.10.1412574234; __utmc=19769839; __utmz=19769839.1412574234.1.1.utmcsr=(direct)|utmccn=(direct)|utmcmd=(none); s_cc=true; gpv_pn=SellerListing%3AMobile%3AMicromax%20Canvas%20Fire%20A093; gpv_pn_t=no%20value; s_sq=%5B%5BB%5D%5D; s_ppv=36','Host':'www.flipkart.com','Accept-Encoding' : 'gzip,deflate,sdch'}class FlipkartScraper:def __init__(self):self.count_trials = 0self.redirectCount = 0def read(self, url):try:"""quick fix,need to add it conf"""response_data = fetchResponseUsingProxy(url, headers)except Exception as e:print 'ERROR: ', eprint 'Retrying'self.count_trials += 1if self.count_trials < 3:return self.read(url)self.response_data=response_datareturn self.createSoup(url)# def scrapeRedirectedPage(self,soup,redirect_url):# print soup# print redirect_url# t = soup.find("div" , {"class" : "seller-table fk-user-select-none line"})# print t# table_rows = t.findAll("tr" , {"class" : re.compile('t-row.*')})# print table_rows# for x in table_rows:# print x#def createSoup(self, url):print "Creating soup from flipkart data for %s" %(url)page=self.response_data.decode("utf-8")self.soup = BeautifulSoup(page,convertEntities=BeautifulSoup.HTML_ENTITIES)page = Noneself.response_data = Noneprint "Soup created from flipkart data for %s" %(url)return self.scrape(self.soup,url)def scrape(self,soup,url):print "Inside json creator for %s" %(url)info = []buyBoxInfo = []oddSeller = soup.findAll("div" , {"class" : "line seller-item odd "})for data in oddSeller:temp={}price = data.find('span', attrs={'class' : re.compile('pxs-final-price.*')}).string.strip('Rs.').strip()temp['sellingPrice']=float(price)for metrics in data.find("div",{"class":"fk-text-right"}):try:metric = metrics.findAll('input', {'type': 'submit'})except AttributeError:continuetry:inputTags = metric[0]['data-lst-buytrend']except TypeError:continuedataMetrics = metric[0]['data-listing-metrics']dataMetric = dataMetrics.split(';')temp['sellingPriceMetric'] = float(dataMetric[1])try:temp['shippingFee'] = float(dataMetric[2])except:temp['shippingFee'] = 0.0try:buyTrend = inputTags[0:str(inputTags).index('NWSR')].replace('_','')except ValueError:buyTrend = inputTags[0:str(inputTags).index('WSR')].replace('_','')temp['buyTrend']=buyTrend.strip()temp['sellingPrice'] = temp['sellingPrice'] + temp['shippingFee']if temp['buyTrend'] in ('PrefNCheap','PrefCheap'):buyBoxInfo.append(temp)info.append(temp)evenSeller = soup.findAll("div" , {"class" : "line seller-item even "})for data in evenSeller:temp={}price = data.find('span', attrs={'class' : re.compile('pxs-final-price.*')}).string.strip('Rs.')temp['sellingPrice']=float(price)for metrics in data.find("div",{"class":"fk-text-right"}):try:metric = metrics.findAll('input', {'type': 'submit'})except AttributeError:continuetry:inputTags = metric[0]['data-lst-buytrend']except TypeError:continuedataMetrics = metric[0]['data-listing-metrics']dataMetric = dataMetrics.split(';')temp['sellingPriceMetric'] = float(dataMetric[1])try:temp['shippingFee'] = float(dataMetric[2])except:temp['shippingFee'] = 0.0try:buyTrend = inputTags[0:str(inputTags).index('NWSR')].replace('_','')except ValueError:buyTrend = inputTags[0:str(inputTags).index('WSR')].replace('_','')temp['buyTrend']=buyTrend.strip()temp['sellingPrice'] = temp['sellingPrice'] + temp['shippingFee']if temp['buyTrend'] in ('PrefNCheap','PrefCheap'):buyBoxInfo.append(temp)info.append(temp)print infoprint "==========="print buyBoxInfoprint "Returning Json response from flipkart for %s" %(url)return info, buyBoxInfoif __name__ == '__main__':scraper = FlipkartScraper()x, z = scraper.read('http://www.flipkart.com/ps/MOBDUZSYZCA7HDYW')for y in x:print yprint "==========="for t in z:print t