Subversion Repositories SmartDukaan

Rev

Rev 4203 | Go to most recent revision | Blame | Compare with Previous | Last modification | View Log | RSS feed

'''
Created on 24-Aug-2011

@author: Varun Gupta
'''

from BeautifulSoup import BeautifulSoup
from BaseScraper import BaseScraper

class FlipcartScraper(BaseScraper):
    
    def __init__(self):
        BaseScraper.__init__(self)
        self.url = None
        self.id = None
    
    def setUrl(self, url):
        self.url = url
    
    def scrape(self):
        html = BaseScraper.read(self, self.url)
        self.soup = BeautifulSoup(html)
        self.phones = None
    
    def getPhones(self):
        phones = []
        
        for div in self.soup.findAll('div', {'class': 'fk-product-thumb fkp-medium'}):
            try:
                anchor = div.findAll('a', {'class': 'title fk-anchor-link'})[0]
                name = anchor['title'].strip()
                price = None
                product_url = anchor['href'].strip()
                in_stock = 0 if div.findAll('b').__len__() > 0 else 1
                
                for span in div.findAll('span'):
                    try:
                        if span['class'].find('price final-price') > -1:
                            price = span.string.strip()
                    except KeyError:
                        pass
                try:
                    if price is None:
                        continue
                    else:
                        phones.append({'name': str(name), 'price': str(price), 'product_url': str(product_url), 'in_stock': in_stock})
                
                except UnboundLocalError as e:
                    print e, name
                    print div
                    
                except UnicodeEncodeError as e:
                    print 'Unicode Error', e, name
                    name_ascii = "".join([char if ord(char) < 128 else " " for char in name])
                    print name_ascii
                    phones.append({"name": str(name_ascii), "price": str(price), "in_stock": in_stock, "product_url": str(product_url)})
            except KeyError:
                pass
        self.phones = phones
        return phones
    
    def getNextUrl(self):
        tab_info = self.soup.findAll('div', {'class': 'unit fk-lres-header-text'})[0]('b')
        current_max = int(tab_info[0].string.split('-')[1])
        total = int(tab_info[1].string)
        
        if len(self.phones) > 0:
            base_url = 'http://www.flipkart.com/mobiles/%s' % ('all/' if self.phones[0]['product_url'].find('/tablets/') == -1 else 'tablet-20278/')
            
            if current_max < total:
                return base_url + str(1 + (current_max / 20))
            else:
                return None
        else:
            return None


if __name__ == '__main__':
    s = FlipcartScraper()
    s.setUrl('http://www.flipkart.com/mobiles/all/27')
    s.scrape()
    phones = s.getPhones()
    for p in phones: print p
    print s.getNextUrl()