| 3232 |
varun.gupt |
1 |
'''
|
|
|
2 |
Created on 24-Aug-2011
|
|
|
3 |
|
|
|
4 |
@author: Varun Gupta
|
|
|
5 |
'''
|
|
|
6 |
from BeautifulSoup import BeautifulSoup
|
| 4039 |
varun.gupt |
7 |
from BaseScraper import BaseScraper
|
| 4198 |
varun.gupt |
8 |
from Utils import removePriceFormatting
|
| 3232 |
varun.gupt |
9 |
|
| 4039 |
varun.gupt |
10 |
class InfibeamScraper(BaseScraper):
|
| 3232 |
varun.gupt |
11 |
|
|
|
12 |
def __init__(self):
|
|
|
13 |
self.url = None
|
|
|
14 |
self.id = None
|
|
|
15 |
|
|
|
16 |
def setUrl(self, url):
|
|
|
17 |
self.url = url
|
|
|
18 |
|
|
|
19 |
def scrape(self):
|
| 4039 |
varun.gupt |
20 |
html = BaseScraper.read(self, self.url)
|
| 3232 |
varun.gupt |
21 |
self.soup = BeautifulSoup(html)
|
|
|
22 |
|
| 4039 |
varun.gupt |
23 |
def getPhones(self):
|
| 3232 |
varun.gupt |
24 |
phone_prices = []
|
| 4039 |
varun.gupt |
25 |
for li in self.soup.findAll('ul', {'class': 'srch_result portrait'})[0]('li'):
|
| 5291 |
varun.gupt |
26 |
|
|
|
27 |
name = li.find('span', {'class': 'title'}).contents[1].strip()
|
| 4039 |
varun.gupt |
28 |
try:
|
| 5291 |
varun.gupt |
29 |
price = li.find('div', {'class': 'price'}).find('span', {'class': 'normal'}).string
|
| 4039 |
varun.gupt |
30 |
except IndexError:
|
| 5291 |
varun.gupt |
31 |
price = removePriceFormatting(li.find('span', {'class': 'price'}).contents[-1].strip())
|
|
|
32 |
except AttributeError:
|
|
|
33 |
price = removePriceFormatting(li.find('span', {'class': 'price'}).contents[-1].strip())
|
| 4039 |
varun.gupt |
34 |
|
|
|
35 |
url = li.findAll('a')[0]['href']
|
|
|
36 |
|
|
|
37 |
try:
|
| 4198 |
varun.gupt |
38 |
phone_prices.append({
|
|
|
39 |
'name': str(name),
|
| 5291 |
varun.gupt |
40 |
'price': removePriceFormatting(str(price)),
|
| 4198 |
varun.gupt |
41 |
'source': 'infibeam',
|
|
|
42 |
'in_stock': 1,
|
|
|
43 |
'product_url': str(url)
|
|
|
44 |
})
|
| 4039 |
varun.gupt |
45 |
|
|
|
46 |
except UnicodeEncodeError as e:
|
|
|
47 |
print 'Unicode Error', e, name
|
|
|
48 |
name_ascii = "".join([char if ord(char) < 128 else " " for char in name])
|
|
|
49 |
print name_ascii
|
| 4198 |
varun.gupt |
50 |
phone_prices.append({
|
|
|
51 |
"name": str(name_ascii),
|
| 5291 |
varun.gupt |
52 |
"price": removePriceFormatting(str(price)),
|
| 4198 |
varun.gupt |
53 |
'source': 'infibeam',
|
|
|
54 |
"in_stock": 1,
|
|
|
55 |
"product_url": str(url)
|
|
|
56 |
})
|
| 4039 |
varun.gupt |
57 |
|
| 3232 |
varun.gupt |
58 |
return phone_prices
|
|
|
59 |
|
|
|
60 |
def getNextUrl(self):
|
| 4039 |
varun.gupt |
61 |
b = self.soup.findAll('div', {'class': 'resultsSummary'})[0].findAll('b')
|
|
|
62 |
current_max = int(b[0].string.split('-')[1])
|
|
|
63 |
total_products = int(b[1].string)
|
|
|
64 |
|
|
|
65 |
return 'http://www.infibeam.com/Mobiles/search?page=%d' % (1 + current_max / 20) if current_max < total_products else None
|
| 3232 |
varun.gupt |
66 |
|
| 4199 |
varun.gupt |
67 |
def getDataFromProductPage(self, url):
|
|
|
68 |
html = BaseScraper.read(self, url)
|
|
|
69 |
soup = BeautifulSoup(html)
|
|
|
70 |
name = soup.find('div', {'id': 'ib_details'}).find('h1', {'class': "fn"}).find('span', {'class': "item"}).string.strip()
|
|
|
71 |
price = removePriceFormatting(soup.find('div',{'id': 'priceDiv'}).find('span', {'class': 'infiPrice amount price'}).string)
|
|
|
72 |
in_stock = soup.find('div', {'id': 'colors'}).find('span', {'class':"status"}).string.strip()
|
|
|
73 |
|
|
|
74 |
data = {
|
|
|
75 |
"product_url": str(url),
|
|
|
76 |
"source": "infibeam",
|
|
|
77 |
"price": price,
|
|
|
78 |
"in_stock": 1 if in_stock == 'In Stock.' else 0,
|
|
|
79 |
"name": name
|
|
|
80 |
}
|
|
|
81 |
return data
|
|
|
82 |
|
| 3232 |
varun.gupt |
83 |
if __name__ == '__main__':
|
|
|
84 |
s = InfibeamScraper()
|
| 5291 |
varun.gupt |
85 |
# print s.getDataFromProductPage('http://www.infibeam.com/Mobiles/i-HTC-EVO-3D-Android-Smartphone/P-E-M-HTC-EVO-3D.html?id=Black')
|
|
|
86 |
s.setUrl('http://www.infibeam.com/Mobiles/search?page=5')
|
|
|
87 |
s.scrape()
|
|
|
88 |
products = s.getPhones()
|
|
|
89 |
|
|
|
90 |
print products
|
|
|
91 |
print s.getNextUrl()
|