| 13569 |
amit.gupta |
1 |
'''
|
|
|
2 |
Created on Jan 15, 2015
|
|
|
3 |
|
|
|
4 |
@author: amit
|
|
|
5 |
'''
|
| 14854 |
amit.gupta |
6 |
from bs4 import BeautifulSoup
|
| 13569 |
amit.gupta |
7 |
from bson.binary import Binary
|
| 13680 |
amit.gupta |
8 |
from datetime import datetime, date, timedelta
|
| 13569 |
amit.gupta |
9 |
from dtr import main
|
| 16975 |
amit.gupta |
10 |
from dtr.api.Service import Orders
|
| 13576 |
amit.gupta |
11 |
from dtr.dao import AffiliateInfo, Order, SubOrder
|
| 14398 |
amit.gupta |
12 |
from dtr.main import getBrowserObject, ScrapeException, getStore, ParseException, \
|
|
|
13 |
Store as MStore, ungzipResponse, tprint
|
| 14415 |
amit.gupta |
14 |
from dtr.storage import Mongo
|
| 17387 |
amit.gupta |
15 |
from dtr.storage.DataService import Order_Parse_Info, All_user_addresses, \
|
|
|
16 |
OrdersRaw
|
| 15350 |
amit.gupta |
17 |
from dtr.storage.Mongo import getImgSrc, getDealRank
|
| 16946 |
amit.gupta |
18 |
from dtr.utils import utils
|
| 17387 |
amit.gupta |
19 |
from dtr.utils.utils import fetchResponseUsingProxy, readSSh
|
| 16975 |
amit.gupta |
20 |
from elixir import *
|
| 13662 |
amit.gupta |
21 |
from pprint import pprint
|
| 13569 |
amit.gupta |
22 |
from pymongo import MongoClient
|
| 17387 |
amit.gupta |
23 |
from pyquery import PyQuery
|
| 15350 |
amit.gupta |
24 |
from urlparse import urlparse, parse_qs
|
| 16946 |
amit.gupta |
25 |
from xlrd import open_workbook
|
| 16975 |
amit.gupta |
26 |
import csv
|
| 13569 |
amit.gupta |
27 |
import json
|
| 16946 |
amit.gupta |
28 |
import os.path
|
| 13569 |
amit.gupta |
29 |
import pymongo
|
|
|
30 |
import re
|
| 14443 |
amit.gupta |
31 |
import time
|
| 13662 |
amit.gupta |
32 |
import traceback
|
| 13569 |
amit.gupta |
33 |
import urllib
|
| 17667 |
amit.gupta |
34 |
import urllib2
|
| 13603 |
amit.gupta |
35 |
|
| 13721 |
amit.gupta |
36 |
USERNAME='profittill2@gmail.com'
|
| 13569 |
amit.gupta |
37 |
PASSWORD='spice@2020'
|
| 16946 |
amit.gupta |
38 |
AFFILIATE_URL='http://affiliate.snapdeal.com/login/'
|
| 13569 |
amit.gupta |
39 |
POST_URL='https://api-p03.hasoffers.com/v3/Affiliate_Report.json'
|
|
|
40 |
ORDER_TRACK_URL='https://m.snapdeal.com/orderSummary'
|
|
|
41 |
CONFIG_URL='http://affiliate.snapdeal.com/publisher/js/config.php'
|
|
|
42 |
|
| 17667 |
amit.gupta |
43 |
#NEW_REPORT_URI_TEMPLATE = "http://affiliate.snapdeal.com/affiliate/reports/orders/report/?fromDate=%s&toDate=%s&dump_report=True&request_type=product&status=%s"
|
|
|
44 |
AFF_URL_TEMPLATE = "http://affiliate-feeds.snapdeal.com/feed/api/order?startDate=%s&endDate=%s&status=%s"
|
| 16946 |
amit.gupta |
45 |
#"http://affiliate.snapdeal.com/affiliate/reports/orders/report/?fromDate=2015-04-01&toDate=2015-09-15&dump_report=True&request_type=product&status=cancelled
|
|
|
46 |
#"http://affiliate.snapdeal.com/affiliate/reports/orders/report/?fromDate=07-09-2015&toDate=13-09-2015&dump_report=True&request_type=product&status=cancelled
|
|
|
47 |
|
| 17667 |
amit.gupta |
48 |
AFF_ID = "33550"
|
|
|
49 |
AFF_TOKEN="66d526141b9d39c4b2b4ff76eadc34"
|
|
|
50 |
|
| 20158 |
naman |
51 |
headers = {
|
|
|
52 |
'User-agent':'Mozilla/5.0 (iPhone; CPU iPhone OS 6_0 like Mac OS X) AppleWebKit/536.26 (KHTML, like Gecko) Version/6.0 Mobile/10A5376e Safari/8536.25',
|
|
|
53 |
'Accept' : 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8',
|
|
|
54 |
'Accept-Language' : 'en-US,en;q=0.8',
|
|
|
55 |
'Accept-Charset' : 'ISO-8859-1,utf-8;q=0.7,*;q=0.3',
|
|
|
56 |
'Connection':'keep-alive',
|
|
|
57 |
'Accept-Encoding' : 'gzip,deflate,sdch'
|
|
|
58 |
}
|
|
|
59 |
|
| 13662 |
amit.gupta |
60 |
class Store(MStore):
|
| 13569 |
amit.gupta |
61 |
|
|
|
62 |
'''
|
|
|
63 |
This is to map order statuses of our system to order statuses of snapdeal.
|
|
|
64 |
And our statuses will change accordingly.
|
|
|
65 |
|
|
|
66 |
'''
|
|
|
67 |
OrderStatusMap = {
|
| 17394 |
amit.gupta |
68 |
MStore.ORDER_PLACED : ['in progress', 'pending for verification', 'not available', 'in process',
|
|
|
69 |
'processing', 'processed', 'under verification', 'readying for dispatch',
|
| 20159 |
naman |
70 |
'waiting for courier to pick up', 'processing initiated','prepared for dispatch','dispatching soon',
|
| 20162 |
naman |
71 |
'item packed. dispatching soon.','cancellation requested'],
|
| 17394 |
amit.gupta |
72 |
MStore.ORDER_DELIVERED : ['delivered','delivered successfully!'],
|
| 20159 |
naman |
73 |
MStore.ORDER_SHIPPED : ['in transit', 'dispatched', 'handed over to courier', 'undelivered. update delivery details!','out for delivery',
|
|
|
74 |
'undelivered. edit delivery details!','undelivered' ,'all delivery attempts failed','delivery attempt failed',
|
|
|
75 |
'delivery details updated'],
|
| 20472 |
amit.gupta |
76 |
MStore.ORDER_CANCELLED : ['closed for vendor reallocation', 'cancelled', 'product returned by courier', 'returned', 'n/a', 'courier returned','verification failed'
|
| 17394 |
amit.gupta |
77 |
'a new order placed with a different seller', 'closed', 'cancellation in progress', 'verification failed. order cancelled',
|
| 20159 |
naman |
78 |
'cancelled. payment refunded','closed. new order placed','cancelling','cancelling order','order cancelled',
|
|
|
79 |
'payment failed. order cancelled','returned to snapdeal','pickup sent','refund approved','refund successful!',
|
|
|
80 |
'undelivered. order cancelled','order cancelled. refunded successfully','return request registered','pickup failed. please reschedule',
|
| 20393 |
amit.gupta |
81 |
'replacement order successfully placed','replacement request received', 'verification failed due to nbp. order cancelled',
|
|
|
82 |
'verification failed due to where customer don\'t want order. order cancelled']
|
| 13569 |
amit.gupta |
83 |
}
|
| 13662 |
amit.gupta |
84 |
CONF_CB_AMOUNT = MStore.CONF_CB_DISCOUNTED_PRICE
|
| 13569 |
amit.gupta |
85 |
def __init__(self,store_id):
|
|
|
86 |
super(Store, self).__init__(store_id)
|
|
|
87 |
|
|
|
88 |
def getName(self):
|
|
|
89 |
return "snapdeal"
|
|
|
90 |
|
|
|
91 |
def scrapeAffiliate(self, startDate=None, endDate=None):
|
| 17667 |
amit.gupta |
92 |
|
| 17009 |
amit.gupta |
93 |
endDate=date.today() - timedelta(days=1)
|
| 16946 |
amit.gupta |
94 |
if startDate is None:
|
| 17048 |
amit.gupta |
95 |
startDate = endDate - timedelta(days=45)
|
| 16946 |
amit.gupta |
96 |
|
| 17667 |
amit.gupta |
97 |
endDate = endDate.strftime('%Y-%m-%d')
|
|
|
98 |
startDate = startDate.strftime('%Y-%m-%d')
|
| 16946 |
amit.gupta |
99 |
statuses=['cancelled', 'approved']
|
| 17667 |
amit.gupta |
100 |
|
| 16946 |
amit.gupta |
101 |
for status in statuses:
|
| 17667 |
amit.gupta |
102 |
nextUrl = AFF_URL_TEMPLATE%(startDate, endDate, status)
|
|
|
103 |
while nextUrl:
|
|
|
104 |
req = urllib2.Request(nextUrl)
|
|
|
105 |
nextUrl=''
|
|
|
106 |
req.add_header('Snapdeal-Affiliate-Id', AFF_ID)
|
|
|
107 |
req.add_header('Snapdeal-Token-Id', AFF_TOKEN)
|
|
|
108 |
req.add_header('Accept', 'application/json')
|
|
|
109 |
resp = urllib2.urlopen(req)
|
|
|
110 |
ordersDict = json.loads(resp.read())
|
|
|
111 |
|
|
|
112 |
self._saveToAffiliate(ordersDict['productDetails'], status)
|
| 13569 |
amit.gupta |
113 |
|
| 13662 |
amit.gupta |
114 |
def _setLastSaleDate(self, saleDate):
|
| 13680 |
amit.gupta |
115 |
self.db.lastSaleDtate.update({'storeId':self.store_id}, {'$set':{'saleDate':saleDate}})
|
| 13569 |
amit.gupta |
116 |
|
| 13662 |
amit.gupta |
117 |
|
|
|
118 |
|
|
|
119 |
def _getLastSaleDate(self,):
|
|
|
120 |
lastDaySaleObj = self.db.lastDaySale.find_one({"storeId":self.store_id})
|
|
|
121 |
if lastDaySaleObj is None:
|
|
|
122 |
return datetime.min
|
|
|
123 |
|
| 14854 |
amit.gupta |
124 |
|
|
|
125 |
|
|
|
126 |
def _parseB(self, orderId, subTagId, userId, page, orderSuccessUrl):
|
|
|
127 |
soup = BeautifulSoup(page)
|
|
|
128 |
|
|
|
129 |
orderDetailContainerDivs = soup.body.find("div", {'class':'cardLayoutWrap'}).findAll('div', recursive=False)
|
|
|
130 |
orderDetailDiv = orderDetailContainerDivs.pop(0)
|
|
|
131 |
paymentDetailDiv = orderDetailContainerDivs.pop(0)
|
|
|
132 |
subOrders = orderDetailContainerDivs
|
|
|
133 |
|
|
|
134 |
placedOn = orderDetailDiv.span.text.split(':')[1].strip()
|
|
|
135 |
merchantOrder = Order(orderId, userId, subTagId, self.store_id, orderSuccessUrl)
|
|
|
136 |
merchantOrder.placedOn = placedOn
|
|
|
137 |
merchantOrder.merchantOrderId = parse_qs(urlparse(orderSuccessUrl).query)['order'][0]
|
|
|
138 |
|
|
|
139 |
paymentDivs = paymentDetailDiv.findAll('div', recursive=False)
|
|
|
140 |
paymentDivs.pop(0)
|
|
|
141 |
for orderTr in paymentDivs:
|
|
|
142 |
orderTrString = str(orderTr)
|
|
|
143 |
if "Total Amount Paid" in orderTrString:
|
|
|
144 |
amountPaid = orderTr.div.find('div', {'class':'detailBlock'}).text.strip()
|
|
|
145 |
merchantOrder.paidAmount = int(re.findall(r'\d+', amountPaid)[0])
|
|
|
146 |
elif "Total Amount" in orderTrString:
|
|
|
147 |
merchantOrder.totalAmount = re.findall(r'\d+', orderTrString)[0]
|
|
|
148 |
elif "Delivery Charges" in orderTrString:
|
|
|
149 |
merchantOrder.deliveryCharges = re.findall(r'\d+', orderTrString)[0]
|
|
|
150 |
elif "Discount Applied" in orderTrString:
|
|
|
151 |
merchantOrder.discountApplied = re.findall(r'\d+', orderTrString)[0]
|
|
|
152 |
elif "Offer Discount" in orderTrString:
|
|
|
153 |
merchantOrder.discountApplied = re.findall(r'\d+', orderTrString)[0]
|
|
|
154 |
|
|
|
155 |
merchantSubOrders = []
|
|
|
156 |
for subOrderElement in subOrders:
|
| 14883 |
amit.gupta |
157 |
subOrder = self.parseSubOrderB(subOrderElement, placedOn)
|
|
|
158 |
if subOrder is not None:
|
| 15350 |
amit.gupta |
159 |
dealRank = getDealRank(subOrder.productCode, self.store_id, merchantOrder.userId)
|
|
|
160 |
subOrder.dealRank = dealRank.get('rank')
|
|
|
161 |
subOrder.rankDesc = dealRank.get('description')
|
| 16283 |
amit.gupta |
162 |
subOrder.maxNlc = dealRank.get('maxNlc')
|
|
|
163 |
subOrder.minNlc = dealRank.get('minNlc')
|
|
|
164 |
subOrder.db = dealRank.get('dp')
|
|
|
165 |
subOrder.itemStatus = dealRank.get('status')
|
| 14883 |
amit.gupta |
166 |
merchantSubOrders.append(subOrder)
|
| 14854 |
amit.gupta |
167 |
merchantOrder.subOrders = merchantSubOrders
|
|
|
168 |
return merchantOrder
|
|
|
169 |
|
| 13760 |
amit.gupta |
170 |
def _parse(self, orderId, subTagId, userId, page, orderSuccessUrl):
|
| 13662 |
amit.gupta |
171 |
|
| 13760 |
amit.gupta |
172 |
#page=page.decode("utf-8")
|
| 14854 |
amit.gupta |
173 |
soup = BeautifulSoup(page)
|
| 13760 |
amit.gupta |
174 |
#orderHead = soup.find(name, attrs, recursive, text)
|
|
|
175 |
sections = soup.findAll("section")
|
|
|
176 |
|
|
|
177 |
#print sections
|
|
|
178 |
|
|
|
179 |
order = sections[1]
|
|
|
180 |
orderTrs = order.findAll("tr")
|
|
|
181 |
|
|
|
182 |
placedOn = str(orderTrs[0].findAll("td")[1].text)
|
|
|
183 |
|
|
|
184 |
#Pop two section elements
|
|
|
185 |
sections.pop(0)
|
|
|
186 |
sections.pop(0)
|
|
|
187 |
subOrders = sections
|
|
|
188 |
|
|
|
189 |
|
|
|
190 |
merchantSubOrders = []
|
|
|
191 |
|
|
|
192 |
merchantOrder = Order(orderId, userId, subTagId, self.store_id, orderSuccessUrl)
|
| 14447 |
amit.gupta |
193 |
merchantOrder.placedOn = placedOn
|
| 14023 |
amit.gupta |
194 |
merchantOrder.merchantOrderId = re.findall(r'\d+', str(soup.find("div", {"class":"deals_heading"})))[1]
|
| 13760 |
amit.gupta |
195 |
for orderTr in orderTrs:
|
|
|
196 |
orderTrString = str(orderTr)
|
|
|
197 |
if "Total Amount" in orderTrString:
|
|
|
198 |
merchantOrder.totalAmount = re.findall(r'\d+', orderTrString)[0]
|
|
|
199 |
elif "Delivery Charges" in orderTrString:
|
|
|
200 |
merchantOrder.deliveryCharges = re.findall(r'\d+', orderTrString)[0]
|
|
|
201 |
elif "Discount Applied" in orderTrString:
|
|
|
202 |
merchantOrder.discountApplied = re.findall(r'\d+', orderTrString)[0]
|
|
|
203 |
elif "Paid Amount" in orderTrString:
|
|
|
204 |
merchantOrder.paidAmount = re.findall(r'\d+', orderTrString)[0]
|
|
|
205 |
|
|
|
206 |
for subOrderElement in subOrders:
|
| 13809 |
amit.gupta |
207 |
subOrders = self.parseSubOrder(subOrderElement, placedOn)
|
|
|
208 |
merchantSubOrders.extend(subOrders)
|
| 13760 |
amit.gupta |
209 |
|
|
|
210 |
merchantOrder.subOrders = merchantSubOrders
|
|
|
211 |
return merchantOrder
|
|
|
212 |
|
|
|
213 |
def parseSubOrder(self, subOrderElement, placedOn):
|
| 13809 |
amit.gupta |
214 |
subOrders = []
|
| 13760 |
amit.gupta |
215 |
productUrl = str(subOrderElement.find("a")['href'])
|
|
|
216 |
subTable = subOrderElement.find("table", {"class":"lrPad"})
|
|
|
217 |
subTrs = subTable.findAll("tr")
|
|
|
218 |
unitPrice=None
|
| 14458 |
amit.gupta |
219 |
offerDiscount = 0
|
| 13760 |
amit.gupta |
220 |
deliveryCharges = None
|
|
|
221 |
amountPaid = None
|
| 14458 |
amit.gupta |
222 |
amount = 0
|
| 14473 |
amit.gupta |
223 |
sdCash = 0
|
|
|
224 |
unitPrice = 0
|
| 13760 |
amit.gupta |
225 |
for subTr in subTrs:
|
|
|
226 |
subTrString = str(subTr)
|
|
|
227 |
if "Unit Price" in subTrString:
|
| 14473 |
amit.gupta |
228 |
unitPrice = int(re.findall(r'\d+', subTrString)[0])
|
| 13760 |
amit.gupta |
229 |
if "Quantity" in subTrString:
|
| 14458 |
amit.gupta |
230 |
qty = int(re.findall(r'\d+', subTrString)[0])
|
| 13760 |
amit.gupta |
231 |
elif "Offer Discount" in subTrString:
|
| 14458 |
amit.gupta |
232 |
offerDiscount += int(re.findall(r'\d+', subTrString)[0])
|
| 14861 |
amit.gupta |
233 |
elif "SD Cash" in subTrString:
|
| 14473 |
amit.gupta |
234 |
sdCash = int(re.findall(r'\d+', subTrString)[0])
|
| 13760 |
amit.gupta |
235 |
elif "Delivery Charges" in subTrString:
|
| 14473 |
amit.gupta |
236 |
deliveryCharges = int(re.findall(r'\d+', subTrString)[0])
|
| 13760 |
amit.gupta |
237 |
elif "Subtotal" in subTrString:
|
|
|
238 |
if int(qty) > 0:
|
| 14459 |
amit.gupta |
239 |
amountPaid = int(re.findall(r'\d+', subTrString)[0])/qty
|
| 13760 |
amit.gupta |
240 |
else:
|
| 14459 |
amit.gupta |
241 |
amountPaid = 0
|
| 14458 |
amit.gupta |
242 |
if qty>0:
|
| 14473 |
amit.gupta |
243 |
amount = unitPrice - offerDiscount - sdCash
|
|
|
244 |
amount = 0 if amount < 0 else amount
|
| 13760 |
amit.gupta |
245 |
|
| 14459 |
amit.gupta |
246 |
div1 = subOrderElement.find("div", {"class": "blk lrPad subordrs"})
|
|
|
247 |
if div1 is None:
|
| 13760 |
amit.gupta |
248 |
raise ParseException("subOrder", "Could not Parse suborders for Snapdeal")
|
|
|
249 |
|
| 14459 |
amit.gupta |
250 |
for strDiv in str(div1).split("<div class=\"seperator\"></div>"):
|
|
|
251 |
div = BeautifulSoup(strDiv)
|
| 13760 |
amit.gupta |
252 |
productTitle = str(subOrderElement.find("a").text)
|
|
|
253 |
productUrl = "http://m.snapdeal.com/" + productUrl
|
|
|
254 |
subOrder = SubOrder(productTitle, productUrl, placedOn, amountPaid)
|
|
|
255 |
|
|
|
256 |
subOrder.amountPaid = amountPaid
|
|
|
257 |
subOrder.deliveryCharges = deliveryCharges
|
|
|
258 |
subOrder.offerDiscount = offerDiscount
|
| 13809 |
amit.gupta |
259 |
subOrder.unitPrice = int(unitPrice)
|
| 13760 |
amit.gupta |
260 |
subOrder.productCode = re.findall(r'\d+$', productUrl)[0]
|
| 14415 |
amit.gupta |
261 |
subOrder.imgUrl = Mongo.getImgSrc(subOrder.productCode, self.store_id).get('thumbnail')
|
| 14458 |
amit.gupta |
262 |
cashbackStatus = Store.CB_NA
|
|
|
263 |
cashbackAmount = 0
|
|
|
264 |
percentage = 0
|
|
|
265 |
if amount > 0:
|
|
|
266 |
(cashbackAmount, percentage) = self.getCashbackAmount(subOrder.productCode, amount)
|
| 14473 |
amit.gupta |
267 |
if cashbackAmount > 0:
|
| 14476 |
amit.gupta |
268 |
cashbackStatus = Store.CB_PENDING
|
| 13760 |
amit.gupta |
269 |
subOrder.cashBackStatus = cashbackStatus
|
|
|
270 |
subOrder.cashBackAmount = cashbackAmount
|
| 14458 |
amit.gupta |
271 |
subOrder.cashBackPercentage = percentage
|
| 13760 |
amit.gupta |
272 |
|
|
|
273 |
|
|
|
274 |
trackAnchor = div.find("a")
|
|
|
275 |
if trackAnchor is not None:
|
|
|
276 |
subOrder.tracingkUrl = str(trackAnchor['href'])
|
|
|
277 |
|
|
|
278 |
divStr = str(div)
|
|
|
279 |
divStr = divStr.replace("\n","").replace("\t", "")
|
|
|
280 |
|
|
|
281 |
for line in divStr.split("<br />"):
|
|
|
282 |
if "Suborder ID" in line:
|
|
|
283 |
subOrder.merchantSubOrderId = re.findall(r'\d+', line)[0]
|
|
|
284 |
elif "Status" in line:
|
| 14402 |
amit.gupta |
285 |
print line
|
| 13760 |
amit.gupta |
286 |
subOrder.detailedStatus = re.findall('>(.*?)</span>', line, re.IGNORECASE)[0]
|
|
|
287 |
elif "Est. Shipping Date" in line:
|
|
|
288 |
subOrder.estimatedShippingDate = line.split(":")[1].strip()
|
|
|
289 |
elif "Est. Delivery Date" in line:
|
|
|
290 |
subOrder.estimatedDeliveryDate = line.split(":")[1].strip()
|
|
|
291 |
elif "Courier Name" in line:
|
|
|
292 |
subOrder.courierName = line.split(":")[1].strip()
|
|
|
293 |
elif "Tracking No" in line:
|
|
|
294 |
subOrder.trackingNumber = line.split(":")[1].strip()
|
| 13809 |
amit.gupta |
295 |
subOrders.append(subOrder)
|
|
|
296 |
return subOrders
|
| 13760 |
amit.gupta |
297 |
|
| 14854 |
amit.gupta |
298 |
def parseSubOrderB(self, subOrderElement, placedOn):
|
|
|
299 |
subOrders = []
|
|
|
300 |
prodDivs = subOrderElement.findAll('div', recursive=False)
|
|
|
301 |
prodDetailDiv = prodDivs[1].findAll('div', recursive=False)
|
|
|
302 |
|
|
|
303 |
offerDiscount = 0
|
|
|
304 |
deliveryCharges = None
|
|
|
305 |
amountPaid = 0
|
|
|
306 |
sdCash = 0
|
|
|
307 |
unitPrice = 0
|
|
|
308 |
|
|
|
309 |
paymentDivs = prodDivs[2].findAll('div', recursive=False)
|
|
|
310 |
for paymentDiv in paymentDivs:
|
|
|
311 |
strPaymentDiv = str(paymentDiv)
|
|
|
312 |
if "Unit Price" in strPaymentDiv:
|
| 14883 |
amit.gupta |
313 |
try:
|
|
|
314 |
unitPrice = int(re.findall(r'\d+', strPaymentDiv)[0])
|
|
|
315 |
except:
|
|
|
316 |
return None
|
| 14872 |
amit.gupta |
317 |
elif "Offer Discount" in strPaymentDiv:
|
|
|
318 |
offerDiscount += int(re.findall(r'\d+', strPaymentDiv)[0])
|
|
|
319 |
elif "Discount" in strPaymentDiv:
|
|
|
320 |
offerDiscount += int(re.findall(r'\d+', strPaymentDiv)[0])
|
| 14861 |
amit.gupta |
321 |
elif "SD Cash" in strPaymentDiv:
|
| 14854 |
amit.gupta |
322 |
sdCash = int(re.findall(r'\d+', strPaymentDiv)[0])
|
|
|
323 |
elif "Delivery Charges" in strPaymentDiv:
|
|
|
324 |
deliveryCharges = int(re.findall(r'\d+', strPaymentDiv)[0])
|
|
|
325 |
elif "Subtotal" in strPaymentDiv:
|
|
|
326 |
amountPaid = int(re.findall(r'\d+', paymentDiv.find('div', {'class':'itemPriceDetail'}).text)[0])
|
|
|
327 |
|
|
|
328 |
amount = unitPrice - offerDiscount - sdCash
|
|
|
329 |
|
|
|
330 |
imgDiv = prodDetailDiv[0]
|
|
|
331 |
otherDiv = prodDetailDiv[1]
|
|
|
332 |
productTitle = otherDiv.find('div',{'class':'orderName'}).text.strip()
|
|
|
333 |
|
|
|
334 |
productUrl = imgDiv.a['href']
|
|
|
335 |
subOrder = SubOrder(productTitle, productUrl, placedOn, amountPaid)
|
| 14863 |
amit.gupta |
336 |
subOrder.merchantSubOrderId = prodDivs[0].text.split(':')[1].strip()
|
| 14854 |
amit.gupta |
337 |
subOrder.detailedStatus = otherDiv.find('div',{'class':'orderStatus'}).span.text.strip()
|
| 15882 |
amit.gupta |
338 |
if subOrder.detailedStatus.lower() == "processing":
|
|
|
339 |
processingDetailedStatus = subOrderElement.find('div', {'class':'trackingMessage'}).text.strip()
|
|
|
340 |
if processingDetailedStatus.lower() == 'a new order placed with a different seller':
|
|
|
341 |
subOrder.detailedStatus = processingDetailedStatus
|
| 14854 |
amit.gupta |
342 |
deliveryStatus = otherDiv.find('div',{'class':'orderDelivery'})
|
|
|
343 |
if deliveryStatus is not None:
|
|
|
344 |
delString = deliveryStatus.text.strip()
|
|
|
345 |
arr = delString.split(':')
|
|
|
346 |
if "On" in arr[0]:
|
|
|
347 |
subOrder.deliveredOn = arr[1].strip()
|
|
|
348 |
elif "Exp. Delivery by" in arr[0]:
|
|
|
349 |
subOrder.estimatedDeliveryDate = arr[1].strip()
|
| 15877 |
amit.gupta |
350 |
elif "Est. delivery between" in arr[0]:
|
| 15880 |
amit.gupta |
351 |
subOrder.estimatedDeliveryDate = arr[0].split("between")[1].strip()
|
| 15881 |
amit.gupta |
352 |
elif "Est. shipping between" in arr[0]:
|
|
|
353 |
subOrder.estimatedShippingDate = arr[0].split("between")[1].strip()
|
| 14854 |
amit.gupta |
354 |
else:
|
|
|
355 |
subOrder.estimatedShippingDate = arr[1].strip()
|
|
|
356 |
|
|
|
357 |
subOrder.imgUrl = imgDiv.a.img['src']
|
|
|
358 |
subOrder.productCode = re.findall(r'\d+$', productUrl)[0]
|
|
|
359 |
subOrder.deliveryCharges = deliveryCharges
|
|
|
360 |
subOrder.offerDiscount = offerDiscount
|
|
|
361 |
subOrder.unitPrice = int(unitPrice)
|
|
|
362 |
cashbackStatus = Store.CB_NA
|
|
|
363 |
cashbackAmount = 0
|
|
|
364 |
percentage = 0
|
|
|
365 |
if amountPaid > 0:
|
|
|
366 |
(cashbackAmount, percentage) = self.getCashbackAmount(subOrder.productCode, amount)
|
|
|
367 |
if cashbackAmount > 0:
|
|
|
368 |
cashbackStatus = Store.CB_PENDING
|
|
|
369 |
subOrder.cashBackStatus = cashbackStatus
|
|
|
370 |
subOrder.cashBackAmount = cashbackAmount
|
|
|
371 |
subOrder.cashBackPercentage = percentage
|
|
|
372 |
|
|
|
373 |
|
|
|
374 |
courierDet = subOrderElement.find('div', {'class':'courierDetail'})
|
|
|
375 |
if courierDet is not None:
|
|
|
376 |
subOrder.courierName = courierDet.span.text.strip()
|
|
|
377 |
trackingDet = subOrderElement.find('div', {'class':'trackingNo'})
|
|
|
378 |
if trackingDet is not None:
|
|
|
379 |
subOrder.trackingUrl = trackingDet.span.a['href']
|
|
|
380 |
subOrder.trackingNumber = trackingDet.span.a.text.strip()
|
|
|
381 |
|
|
|
382 |
subOrders.append(subOrder)
|
|
|
383 |
return subOrder
|
| 17387 |
amit.gupta |
384 |
|
| 18038 |
amit.gupta |
385 |
def getOrderJSON(self, pq, supcMap):
|
| 17387 |
amit.gupta |
386 |
#print rawHtml
|
|
|
387 |
# replace_with = {
|
|
|
388 |
# '<': '>',
|
|
|
389 |
# '>': '<',
|
|
|
390 |
# '&': '&',
|
|
|
391 |
# '"': '"', # should be escaped in attributes
|
|
|
392 |
# "'": ''' # should be escaped in attributes
|
|
|
393 |
# }
|
| 20694 |
amit.gupta |
394 |
secondryIdentiferSupcMap = {}
|
|
|
395 |
for scriptTag in pq.items("script"):
|
|
|
396 |
if "var reqData1 =" in scriptTag.text():
|
|
|
397 |
match = re.search("(\[.*?\])",scriptTag.text(), re.DOTALL)
|
|
|
398 |
a = match.group(1)
|
|
|
399 |
if a:
|
|
|
400 |
for mapElement in json.loads(a):
|
|
|
401 |
secondryIdentiferSupcMap[mapElement["pog_id"]] = mapElement["supc"]
|
|
|
402 |
break
|
| 17387 |
amit.gupta |
403 |
jsonValue = pq("#orderJSON").attr("value")
|
|
|
404 |
jsonValue.replace(""", '"')
|
|
|
405 |
jsonValue.replace("&", '&')
|
|
|
406 |
jsonValue.replace(">", '>')
|
|
|
407 |
jsonValue.replace("<", '<')
|
|
|
408 |
jsonValue.replace("'", "'")
|
|
|
409 |
allSupcElements = pq('div.mdt-layout')('div.mdt-card')('div.order-item')
|
|
|
410 |
for supcElement in allSupcElements:
|
|
|
411 |
try:
|
|
|
412 |
supcElement = pq(supcElement)
|
|
|
413 |
title = supcElement('div.order-heading').text().strip()
|
|
|
414 |
productUrl = supcElement.attr("data-href")
|
|
|
415 |
imgUrl = supcElement.find('img').attr('src')
|
| 20694 |
amit.gupta |
416 |
secondaryIdentifier = productUrl.split("/")[-1]
|
|
|
417 |
if secondryIdentiferSupcMap.has_key(secondaryIdentifier):
|
|
|
418 |
supc = secondryIdentiferSupcMap[secondaryIdentifier]
|
|
|
419 |
else:
|
|
|
420 |
supc = self.catalogdb.MasterData.find_one({"secondaryIdentifier": secondaryIdentifier, "source_id":self.store_id})
|
|
|
421 |
if supc:
|
|
|
422 |
supc = supc['identifier']
|
| 17387 |
amit.gupta |
423 |
supcMap[supc] = {'title':title, 'imgUrl':imgUrl, 'productUrl':productUrl}
|
|
|
424 |
except:
|
|
|
425 |
pass
|
|
|
426 |
return json.loads(jsonValue)
|
| 14854 |
amit.gupta |
427 |
|
| 13576 |
amit.gupta |
428 |
def parseOrderRawHtml(self, orderId, subTagId, userId, rawHtml, orderSuccessUrl):
|
| 13760 |
amit.gupta |
429 |
#print merchantOrder
|
| 13796 |
amit.gupta |
430 |
resp = {}
|
| 17855 |
amit.gupta |
431 |
orderPart = re.findall('.*(\?.*?)$', orderSuccessUrl,re.IGNORECASE)[0]
|
|
|
432 |
url = ORDER_TRACK_URL + orderPart
|
|
|
433 |
moId = orderPart.split("order=")[-1].split("&")[0]
|
|
|
434 |
if self.db.merchantOrder.find_one({"merchantOrderId":moId}):
|
| 18163 |
amit.gupta |
435 |
resp['result'] = 'ORDER_ALREADY_CREATED_IGNORED'
|
| 17855 |
amit.gupta |
436 |
return resp
|
| 17387 |
amit.gupta |
437 |
supcMap = {}
|
| 13582 |
amit.gupta |
438 |
try:
|
| 18038 |
amit.gupta |
439 |
pq = PyQuery(rawHtml)
|
| 14854 |
amit.gupta |
440 |
try:
|
| 18038 |
amit.gupta |
441 |
if pq("title").text()=="Webpage not available":
|
|
|
442 |
raise
|
|
|
443 |
orderJSON = self.getOrderJSON(pq, supcMap)
|
| 14854 |
amit.gupta |
444 |
except:
|
| 20694 |
amit.gupta |
445 |
traceback.print_exc()
|
| 18038 |
amit.gupta |
446 |
resp['result'] = 'ORDER_NOT_CREATED_KNOWN'
|
|
|
447 |
return resp
|
|
|
448 |
'''page =fetchResponseUsingProxy(url)
|
| 17387 |
amit.gupta |
449 |
try:
|
|
|
450 |
merchantOrder = self._parseB(orderId, subTagId, userId, page, orderSuccessUrl)
|
|
|
451 |
except:
|
|
|
452 |
traceback.print_exc()
|
| 18038 |
amit.gupta |
453 |
merchantOrder = self._parse(orderId, subTagId, userId, page, orderSuccessUrl)'''
|
|
|
454 |
|
|
|
455 |
merchantOrder = self._parseC(orderId, subTagId, userId, supcMap, orderJSON, orderSuccessUrl)
|
| 14312 |
amit.gupta |
456 |
merchantOrder.orderTrackingUrl = url
|
|
|
457 |
|
|
|
458 |
if self._saveToOrder(todict(merchantOrder)):
|
|
|
459 |
resp['result'] = 'ORDER_CREATED'
|
|
|
460 |
else:
|
|
|
461 |
resp['result'] = 'ORDER_ALREADY_CREATED_IGNORED'
|
| 17387 |
amit.gupta |
462 |
print "=================", resp, orderId, "=============="
|
| 13796 |
amit.gupta |
463 |
return resp
|
| 13582 |
amit.gupta |
464 |
except:
|
|
|
465 |
print "Error occurred"
|
| 13603 |
amit.gupta |
466 |
traceback.print_exc()
|
| 14312 |
amit.gupta |
467 |
resp['result'] = 'ORDER_NOT_CREATED'
|
| 17387 |
amit.gupta |
468 |
print "=================", resp, orderId, "=============="
|
| 13796 |
amit.gupta |
469 |
return resp
|
| 13781 |
amit.gupta |
470 |
|
| 13569 |
amit.gupta |
471 |
|
|
|
472 |
#soup = BeautifulSoup(rawHtml,convertEntities=BeautifulSoup.HTML_ENTITIES)
|
|
|
473 |
#soup.find(name, attrs, recursive, text)
|
| 17387 |
amit.gupta |
474 |
def _parseC(self, orderId, subTagId, userId, supcMap, orderJSON, orderSuccessUrl):
|
| 20694 |
amit.gupta |
475 |
print json.dumps(orderJSON)
|
| 17387 |
amit.gupta |
476 |
merchantOrder = Order(orderId, userId, subTagId, self.store_id, orderSuccessUrl)
|
|
|
477 |
placedOn = datetime.strftime(utils.fromTimeStamp(orderJSON['created']/1000), "%a, %d %b, %Y")
|
|
|
478 |
merchantOrder.placedOn = placedOn
|
|
|
479 |
merchantOrder.merchantOrderId = orderJSON['code']
|
|
|
480 |
merchantOrder.paidAmount = orderJSON['paidAmount']
|
|
|
481 |
merchantOrder.deliveryCharges = orderJSON['shippingCharges']
|
|
|
482 |
merchantOrder.closed= False
|
|
|
483 |
merchantSubOrders = []
|
|
|
484 |
for s in orderJSON['suborders']:
|
|
|
485 |
map1 = supcMap[s['supcCode']]
|
|
|
486 |
|
|
|
487 |
amountPaid = s['paidAmount']
|
|
|
488 |
productTitle = map1['title']
|
|
|
489 |
productUrl = map1['productUrl']
|
|
|
490 |
subOrder = SubOrder(productTitle, productUrl, placedOn, amountPaid)
|
|
|
491 |
if(s.get('deliveryDate') is not None):
|
|
|
492 |
print "Delivered On",
|
| 17394 |
amit.gupta |
493 |
subOrder.deliveredOn = datetime.strftime(utils.fromTimeStamp(s.get('deliveryDate')/1000),'%d %b, %Y')
|
| 17387 |
amit.gupta |
494 |
subOrder.status = MStore.ORDER_DELIVERED
|
|
|
495 |
subOrder.detailedStatus = MStore.ORDER_DELIVERED
|
|
|
496 |
elif s['suborderStatus'].get('macroDescription')== 'Closed':
|
|
|
497 |
if s['suborderStatus'].get('value')== 'Close for vendor reallocation':
|
|
|
498 |
subOrder.detailedStatus = 'Close for vendor reallocation'
|
|
|
499 |
subOrder.status = MStore.ORDER_CANCELLED
|
|
|
500 |
|
|
|
501 |
try:
|
|
|
502 |
subOrder.detailedStatus = s['suborderStatus']['macroDescription']
|
|
|
503 |
subOrder.status = self._getStatusFromDetailedStatus(subOrder.detailedStatus)
|
|
|
504 |
except:
|
|
|
505 |
print "----------------", s['suborderStatus']
|
| 13576 |
amit.gupta |
506 |
|
| 17387 |
amit.gupta |
507 |
subOrder.merchantSubOrderId = s['code']
|
|
|
508 |
subOrder.deliveryCharges = s['shippingCharges']
|
| 17389 |
amit.gupta |
509 |
subOrder.productCode = re.findall(r'\d+$', productUrl)[0]
|
| 18384 |
amit.gupta |
510 |
dealRank = getDealRank(subOrder.productCode, self.store_id, merchantOrder.userId)
|
|
|
511 |
subOrder.dealRank = dealRank.get('rank')
|
|
|
512 |
subOrder.rankDesc = dealRank.get('description')
|
|
|
513 |
subOrder.maxNlc = dealRank.get('maxNlc')
|
|
|
514 |
subOrder.minNlc = dealRank.get('minNlc')
|
|
|
515 |
subOrder.db = dealRank.get('dp')
|
|
|
516 |
subOrder.itemStatus = dealRank.get('status')
|
| 17387 |
amit.gupta |
517 |
subOrder.imgUrl = map1['imgUrl']
|
|
|
518 |
subOrder.unitPrice = s['offerPrice'] -s['internalCashbackValue'] - s['externalCashbackValue']
|
|
|
519 |
subOrder.amount = subOrder.unitPrice - s['offerDiscount'] - s['sdCash']
|
|
|
520 |
try:
|
|
|
521 |
try:
|
|
|
522 |
if s['shipDateRange']['start']==s['shipDateRange']['end']:
|
| 17394 |
amit.gupta |
523 |
subOrder.estimatedShippingDate = datetime.strftime(utils.fromTimeStamp(s['shipDateRange']['start']/1000),'%d %b, %Y')
|
| 17387 |
amit.gupta |
524 |
else:
|
| 17394 |
amit.gupta |
525 |
subOrder.estimatedShippingDate = datetime.strftime(utils.fromTimeStamp(s['shipDateRange']['start']/1000),'%d %b, %Y') + " - " + datetime.strftime(utils.fromTimeStamp(s['shipDateRange']['end']/1000),'%d %b, %Y')
|
| 17387 |
amit.gupta |
526 |
except:
|
|
|
527 |
if s['deliveryDateRange']['start']==s['deliveryDateRange']['end']:
|
| 17394 |
amit.gupta |
528 |
subOrder.estimatedDeliveryDate = datetime.strftime(utils.fromTimeStamp(s['deliveryDateRange']['start']/1000),'%d %b, %Y')
|
| 17387 |
amit.gupta |
529 |
else:
|
| 17394 |
amit.gupta |
530 |
subOrder.estimatedDeliveryDate = datetime.strftime(utils.fromTimeStamp(s['deliveryDateRange']['start']/1000),'%d %b, %Y') + " - " + datetime.strftime(utils.fromTimeStamp(s['deliveryDateRange']['end']/1000),'%d %b, %Y')
|
| 17387 |
amit.gupta |
531 |
except:
|
|
|
532 |
pass
|
|
|
533 |
subOrder.offerDiscount = s['offerDiscount']
|
|
|
534 |
subOrder.unitPrice = s['offerPrice']
|
|
|
535 |
merchantSubOrders.append(subOrder)
|
|
|
536 |
|
|
|
537 |
merchantOrder.subOrders = merchantSubOrders
|
|
|
538 |
self.populateDerivedFields(merchantOrder)
|
|
|
539 |
return merchantOrder
|
|
|
540 |
|
| 13576 |
amit.gupta |
541 |
def _getStatusFromDetailedStatus(self, detailedStatus):
|
|
|
542 |
for key, value in Store.OrderStatusMap.iteritems():
|
| 14678 |
amit.gupta |
543 |
if detailedStatus.lower() in value:
|
| 13576 |
amit.gupta |
544 |
return key
|
| 20393 |
amit.gupta |
545 |
elif 'order cancelled' in detailedStatus.lower():
|
|
|
546 |
return key
|
| 14675 |
amit.gupta |
547 |
print "Detailed Status need to be mapped", detailedStatus, self.store_id
|
| 14861 |
amit.gupta |
548 |
return None
|
| 13569 |
amit.gupta |
549 |
|
| 13610 |
amit.gupta |
550 |
|
| 13569 |
amit.gupta |
551 |
def scrapeStoreOrders(self,):
|
| 13760 |
amit.gupta |
552 |
#collectionMap = {'palcedOn':1}
|
| 13576 |
amit.gupta |
553 |
orders = self._getActiveOrders()
|
|
|
554 |
for order in orders:
|
| 17394 |
amit.gupta |
555 |
order = obj(order)
|
|
|
556 |
print "Order", self.store_name, order.orderId
|
| 14398 |
amit.gupta |
557 |
try:
|
| 17394 |
amit.gupta |
558 |
url = order.orderTrackingUrl
|
| 20158 |
naman |
559 |
page = fetchResponseUsingProxy(url, headers=headers)
|
| 14398 |
amit.gupta |
560 |
#page=page.decode("utf-8")
|
| 14861 |
amit.gupta |
561 |
try:
|
| 17394 |
amit.gupta |
562 |
pq = PyQuery(page)
|
|
|
563 |
subOrderStatusMap={}
|
|
|
564 |
for el in pq('div.cardLayout.pad-10.mb-10'):
|
|
|
565 |
elpq = PyQuery(el)
|
|
|
566 |
try:
|
|
|
567 |
subOrderId = elpq("div.subOrderId").text().split(":")[1].strip()
|
|
|
568 |
subOrderStatusMap[subOrderId] = elpq
|
|
|
569 |
except:
|
|
|
570 |
pass
|
|
|
571 |
closedForReco = {}
|
|
|
572 |
for suborder in order.subOrders:
|
|
|
573 |
if suborder.closed:
|
|
|
574 |
if suborder.merchantSubOrderId in subOrderStatusMap:
|
|
|
575 |
del subOrderStatusMap[suborder.merchantSubOrderId]
|
|
|
576 |
continue
|
|
|
577 |
if subOrderStatusMap.has_key(suborder.merchantSubOrderId):
|
|
|
578 |
elpq = subOrderStatusMap.get(suborder.merchantSubOrderId)
|
|
|
579 |
del subOrderStatusMap[suborder.merchantSubOrderId]
|
|
|
580 |
if elpq("#trackLink").attr("href"):
|
|
|
581 |
suborder.trackingUrl = elpq("#trackLink").attr("href")
|
|
|
582 |
if elpq('span.subOrdStatusText').text():
|
|
|
583 |
suborder.estimatedDeliveryDate=elpq('span.subOrdStatusText').text().strip()
|
|
|
584 |
suborder.detailedStatus = elpq("div.orderStatus span").text().strip()
|
| 19428 |
manas |
585 |
if suborder.detailedStatus in ['Closed. New Order Placed','Closed. Placing New Order'] :
|
| 17394 |
amit.gupta |
586 |
closedForReco[suborder.merchantSubOrderId] = suborder
|
|
|
587 |
suborder.status = self._getStatusFromDetailedStatus(suborder.detailedStatus)
|
|
|
588 |
|
|
|
589 |
if len(closedForReco) == len(subOrderStatusMap) and len(closedForReco)>0:
|
|
|
590 |
productCode = ''
|
|
|
591 |
allProductsSame = True
|
|
|
592 |
for subOrderId, subo in closedForReco.iteritems():
|
|
|
593 |
if productCode == '':
|
|
|
594 |
productCode = subo.productCode
|
|
|
595 |
continue
|
|
|
596 |
if subo.productCode != productCode:
|
|
|
597 |
allProductsSame = False
|
|
|
598 |
break
|
|
|
599 |
subOrderStatusMap
|
|
|
600 |
if allProductsSame:
|
|
|
601 |
print "singlereco", order.orderId
|
|
|
602 |
for key, elpq in subOrderStatusMap.iteritems():
|
| 18160 |
amit.gupta |
603 |
suborderNew = obj(todict(suborder))
|
| 17394 |
amit.gupta |
604 |
suborderNew.merchantSubOrderId = key
|
|
|
605 |
if elpq("#trackLink").attr("href"):
|
|
|
606 |
suborderNew.trackingUrl = elpq("#trackLink").attr("href")
|
|
|
607 |
if elpq('span.subOrdStatusText').text():
|
|
|
608 |
suborderNew.estimatedDeliveryDate=elpq('span.subOrdStatusText').text().strip()
|
|
|
609 |
suborderNew.detailedStatus = elpq("div.orderStatus span").text().strip()
|
| 17462 |
amit.gupta |
610 |
suborderNew.status = self._getStatusFromDetailedStatus(suborderNew.detailedStatus)
|
| 17394 |
amit.gupta |
611 |
order.subOrders.append(suborderNew)
|
|
|
612 |
else:
|
| 17436 |
amit.gupta |
613 |
print "All products not same referring transaction url", order.orderId
|
| 17394 |
amit.gupta |
614 |
else:
|
|
|
615 |
pass
|
|
|
616 |
#Lookout for ordersummary page for exact mapping
|
| 17451 |
amit.gupta |
617 |
self.populateDerivedFields(order, update=True)
|
| 17394 |
amit.gupta |
618 |
self._updateToOrder(todict(order))
|
| 14861 |
amit.gupta |
619 |
except:
|
|
|
620 |
traceback.print_exc()
|
| 17394 |
amit.gupta |
621 |
soup = BeautifulSoup(page)
|
|
|
622 |
try:
|
|
|
623 |
self.tryBParsing(order, soup)
|
|
|
624 |
except:
|
|
|
625 |
traceback.print_exc()
|
|
|
626 |
sections = soup.findAll("section")
|
|
|
627 |
orderEl = sections[1]
|
|
|
628 |
orderTrs = orderEl.findAll("tr")
|
|
|
629 |
|
|
|
630 |
placedOn = str(orderTrs[0].findAll("td")[1].text)
|
|
|
631 |
sections.pop(0)
|
|
|
632 |
sections.pop(0)
|
|
|
633 |
|
|
|
634 |
subOrders = sections
|
|
|
635 |
bulk = self.db.merchantOrder.initialize_ordered_bulk_op()
|
|
|
636 |
closed = True
|
|
|
637 |
for subOrderElement in subOrders:
|
|
|
638 |
div1 = subOrderElement.findAll("div", {"class": "blk lrPad subordrs"})
|
|
|
639 |
if len(div1)<=0:
|
|
|
640 |
raise ParseException("subOrder", "Could not Parse suborders for Snapdeal")
|
|
|
641 |
subOrder = None
|
|
|
642 |
breakFlag = False
|
|
|
643 |
for strDiv in str(div1).split("<div class=\"seperator\"></div>"):
|
|
|
644 |
div = BeautifulSoup(strDiv)
|
|
|
645 |
divStr = str(div)
|
|
|
646 |
divStr = divStr.replace("\n","").replace("\t", "")
|
|
|
647 |
updateMap = {}
|
|
|
648 |
for line in divStr.split("<br />"):
|
|
|
649 |
if "Suborder ID" in line:
|
|
|
650 |
merchantSubOrderId = re.findall(r'\d+', line)[0]
|
|
|
651 |
#break if suborder is inactive
|
|
|
652 |
subOrder = self._isSubOrderActive(order, merchantSubOrderId)
|
|
|
653 |
if subOrder is None:
|
|
|
654 |
subOrders = self.parseSubOrder(subOrderElement, placedOn)
|
|
|
655 |
self.db.merchantOrder.update({"orderId":order['orderId']},{'$push':{"subOrders":{"$each":todict(subOrders)}}})
|
|
|
656 |
print "Added new suborders to Order id - ", order['orderId']
|
|
|
657 |
closed = False
|
|
|
658 |
breakFlag = True
|
|
|
659 |
break
|
|
|
660 |
elif subOrder['closed']:
|
|
|
661 |
breakFlag = True
|
|
|
662 |
break
|
|
|
663 |
else:
|
|
|
664 |
findMap = {"orderId": order['orderId'], "subOrders.merchantSubOrderId": merchantSubOrderId}
|
|
|
665 |
elif "Status :" in line:
|
|
|
666 |
detailedStatus = re.findall('>(.*?)</span>', line, re.IGNORECASE)[0]
|
|
|
667 |
updateMap["subOrders.$.detailedStatus"] = detailedStatus
|
|
|
668 |
status = self._getStatusFromDetailedStatus(detailedStatus)
|
|
|
669 |
closedStatus = status in [Store.ORDER_DELIVERED, Store.ORDER_CANCELLED]
|
|
|
670 |
if status is not None:
|
|
|
671 |
updateMap["subOrders.$.status"] = status
|
|
|
672 |
if detailedStatus == 'Closed For Vendor Reallocation':
|
|
|
673 |
#if it is more than 6hours mark closed.
|
|
|
674 |
closeAt = subOrder.get("closeAt")
|
|
|
675 |
if closeAt is None:
|
|
|
676 |
closeAt = datetime.now() + timedelta(hours=6)
|
|
|
677 |
updateMap["subOrders.$.closeAt"] = datetime.strftime(closeAt,"%Y-%m-%d %H:%M:%S")
|
|
|
678 |
else:
|
|
|
679 |
closeAt = datetime.strptime(closeAt,"%Y-%m-%d %H:%M:%S")
|
|
|
680 |
if datetime.now() > closeAt:
|
|
|
681 |
closedStatus = True
|
|
|
682 |
#Close if not applicable suborders are not closed
|
|
|
683 |
if utils.fromTimeStamp(order['createdOnInt'] + 35*86400*1000) < datetime.now() and subOrder['cashBackStatus']==utils.CB_NA:
|
|
|
684 |
closedStatus=True
|
|
|
685 |
|
|
|
686 |
|
|
|
687 |
if closedStatus:
|
|
|
688 |
#if status is closed then change the paybackStatus accordingly
|
|
|
689 |
updateMap["subOrders.$.closed"] = True
|
|
|
690 |
if status == Store.ORDER_DELIVERED:
|
|
|
691 |
if subOrder.get("cashBackStatus") == Store.CB_PENDING:
|
|
|
692 |
updateMap["subOrders.$.cashBackStatus"] = Store.CB_APPROVED
|
|
|
693 |
elif status == Store.ORDER_CANCELLED:
|
|
|
694 |
if subOrder.get("cashBackStatus") == Store.CB_PENDING:
|
|
|
695 |
updateMap["subOrders.$.cashBackStatus"] = Store.CB_CANCELLED
|
|
|
696 |
|
| 14861 |
amit.gupta |
697 |
else:
|
| 17394 |
amit.gupta |
698 |
closed = False
|
|
|
699 |
elif "Est. Shipping Date" in line:
|
|
|
700 |
estimatedShippingDate = line.split(":")[1].strip()
|
|
|
701 |
updateMap["subOrders.$.estimatedShippingDate"] = estimatedShippingDate
|
|
|
702 |
elif "Est. Delivery Date" in line:
|
|
|
703 |
estimatedDeliveryDate = line.split(":")[1].strip()
|
|
|
704 |
updateMap["subOrders.$.estimatedDeliveryDate"] = estimatedDeliveryDate
|
|
|
705 |
elif "Courier Name" in line:
|
|
|
706 |
courierName = line.split(":")[1].strip()
|
|
|
707 |
updateMap["subOrders.$.courierName"] = courierName
|
|
|
708 |
elif "Tracking No" in line:
|
|
|
709 |
trackingNumber = line.split(":")[1].strip()
|
|
|
710 |
updateMap["subOrders.$.trackingNumber"] = trackingNumber
|
|
|
711 |
|
|
|
712 |
if breakFlag:
|
|
|
713 |
continue
|
|
|
714 |
|
|
|
715 |
bulk.find(findMap).update({'$set' : updateMap})
|
|
|
716 |
bulk.find({'orderId': order['orderId']}).update({'$set':{'closed': closed,"parseError":False}})
|
|
|
717 |
result = bulk.execute()
|
|
|
718 |
tprint(result)
|
| 14398 |
amit.gupta |
719 |
except:
|
|
|
720 |
traceback.print_exc()
|
| 17394 |
amit.gupta |
721 |
tprint("Could not update " + str(order.orderId) + "For store " + self.getName())
|
|
|
722 |
self.db.merchantOrder.update({"orderId":order.orderId}, {"$set":{"parseError":True}})
|
| 13576 |
amit.gupta |
723 |
|
| 14854 |
amit.gupta |
724 |
def tryBParsing(self, order, soup):
|
|
|
725 |
orderDetailContainerDivs = soup.body.find("div", {'class':'cardLayoutWrap'}).findAll('div', recursive=False)
|
|
|
726 |
orderDetailDiv = orderDetailContainerDivs.pop(0)
|
|
|
727 |
placedOn = orderDetailDiv.span.text.split(':')[1].strip()
|
|
|
728 |
|
|
|
729 |
orderDetailContainerDivs.pop(0)
|
|
|
730 |
|
|
|
731 |
subOrders = orderDetailContainerDivs
|
|
|
732 |
bulk = self.db.merchantOrder.initialize_ordered_bulk_op()
|
|
|
733 |
closed = True
|
|
|
734 |
for subOrderElement in subOrders:
|
|
|
735 |
prodDivs = subOrderElement.findAll('div', recursive=False)
|
|
|
736 |
merchantSubOrderId = prodDivs[0].text.split(':')[1].strip()
|
|
|
737 |
subOrder = None
|
| 14861 |
amit.gupta |
738 |
subOrder = self._isSubOrderActive(order, merchantSubOrderId)
|
|
|
739 |
if subOrder is None:
|
| 14864 |
amit.gupta |
740 |
try:
|
| 14883 |
amit.gupta |
741 |
subOrder = self.parseSubOrderB(subOrderElement, placedOn)
|
|
|
742 |
if subOrder is None:
|
|
|
743 |
continue
|
|
|
744 |
self.db.merchantOrder.update({"orderId":order['orderId']},{'$push':{"subOrders":{"$each":todict([subOrder])}}})
|
| 14864 |
amit.gupta |
745 |
print "Added new suborders to Order id - ", order['orderId']
|
|
|
746 |
closed = False
|
|
|
747 |
except:
|
|
|
748 |
pass
|
| 14861 |
amit.gupta |
749 |
continue
|
|
|
750 |
elif subOrder['closed']:
|
|
|
751 |
continue
|
|
|
752 |
else:
|
| 14864 |
amit.gupta |
753 |
prodDetailDiv = prodDivs[1].findAll('div', recursive=False)
|
|
|
754 |
otherDiv = prodDetailDiv[1]
|
|
|
755 |
trackBlock = subOrderElement.find('div',{'class':'trackingDetailsBlock'})
|
| 14861 |
amit.gupta |
756 |
findMap = {"orderId": order['orderId'], "subOrders.merchantSubOrderId": merchantSubOrderId}
|
|
|
757 |
updateMap = {}
|
|
|
758 |
detailedStatus = otherDiv.find('div',{'class':'orderStatus'}).span.text.strip()
|
| 16298 |
amit.gupta |
759 |
|
| 15882 |
amit.gupta |
760 |
if 'A new order placed with a different seller' in str(trackBlock):
|
| 14861 |
amit.gupta |
761 |
#if it is more than 6hours mark closed.
|
|
|
762 |
closeAt = subOrder.get("closeAt")
|
|
|
763 |
if closeAt is None:
|
|
|
764 |
closeAt = datetime.now() + timedelta(hours=6)
|
|
|
765 |
updateMap["subOrders.$.closeAt"] = datetime.strftime(closeAt,"%Y-%m-%d %H:%M:%S")
|
| 16298 |
amit.gupta |
766 |
bulk.find(findMap).update({'$set' : updateMap})
|
| 16301 |
amit.gupta |
767 |
closed=False
|
| 16298 |
amit.gupta |
768 |
continue
|
| 14854 |
amit.gupta |
769 |
else:
|
| 14861 |
amit.gupta |
770 |
closeAt = datetime.strptime(closeAt,"%Y-%m-%d %H:%M:%S")
|
|
|
771 |
if datetime.now() > closeAt:
|
| 16298 |
amit.gupta |
772 |
detailedStatus = 'A new order placed with a different seller'
|
| 14861 |
amit.gupta |
773 |
|
| 16298 |
amit.gupta |
774 |
|
|
|
775 |
status = self._getStatusFromDetailedStatus(detailedStatus)
|
|
|
776 |
closedStatus = status in [Store.ORDER_DELIVERED, Store.ORDER_CANCELLED]
|
|
|
777 |
updateMap["subOrders.$.detailedStatus"] = detailedStatus
|
|
|
778 |
if status is not None:
|
|
|
779 |
updateMap["subOrders.$.status"] = status
|
| 14861 |
amit.gupta |
780 |
|
|
|
781 |
if closedStatus:
|
|
|
782 |
#if status is closed then change the paybackStatus accordingly
|
|
|
783 |
updateMap["subOrders.$.closed"] = True
|
|
|
784 |
if status == Store.ORDER_DELIVERED:
|
|
|
785 |
if subOrder.get("cashBackStatus") == Store.CB_PENDING:
|
|
|
786 |
updateMap["subOrders.$.cashBackStatus"] = Store.CB_APPROVED
|
|
|
787 |
elif status == Store.ORDER_CANCELLED:
|
|
|
788 |
if subOrder.get("cashBackStatus") == Store.CB_PENDING:
|
|
|
789 |
updateMap["subOrders.$.cashBackStatus"] = Store.CB_CANCELLED
|
|
|
790 |
|
|
|
791 |
else:
|
|
|
792 |
closed = False
|
| 14854 |
amit.gupta |
793 |
|
| 14861 |
amit.gupta |
794 |
deliveryStatus = otherDiv.find('div',{'class':'orderDelivery'})
|
|
|
795 |
if deliveryStatus is not None:
|
|
|
796 |
delString = deliveryStatus.text.strip()
|
|
|
797 |
arr = delString.split(':')
|
|
|
798 |
if "On" in arr[0]:
|
|
|
799 |
updateMap['subOrders.$.deliveredOn'] = arr[1].strip()
|
|
|
800 |
elif "Exp. Delivery by" in arr[0]:
|
|
|
801 |
updateMap['subOrders.$.estimatedDeliveryDate'] = arr[1].strip()
|
| 15877 |
amit.gupta |
802 |
elif "Est. delivery between" in arr[0]:
|
|
|
803 |
updateMap['subOrders.$.estimatedDeliveryDate'] = delString.split("between")[1].strip()
|
| 17237 |
amit.gupta |
804 |
elif "Est. shipping between" in arr[0]:
|
|
|
805 |
updateMap['subOrders.$.estimatedShippingDate'] = delString.split("between")[1].strip()
|
| 14861 |
amit.gupta |
806 |
else:
|
|
|
807 |
updateMap['subOrders.$.estimatedShippingDate'] = arr[1].strip()
|
|
|
808 |
courierDet = subOrderElement.find('div', {'class':'courierDetail'})
|
|
|
809 |
if courierDet is not None:
|
|
|
810 |
updateMap['subOrders.$.courierName'] = courierDet.span.text.strip()
|
|
|
811 |
trackingDet = subOrderElement.find('div', {'class':'trackingNo'})
|
|
|
812 |
if trackingDet is not None:
|
|
|
813 |
updateMap['subOrders.$.trackingUrl'] = trackingDet.span.a['href']
|
|
|
814 |
updateMap['subOrders.$.trackingNumber'] = trackingDet.span.a.text.strip()
|
| 14854 |
amit.gupta |
815 |
bulk.find(findMap).update({'$set' : updateMap})
|
|
|
816 |
bulk.find({'orderId': order['orderId']}).update({'$set':{'closed': closed,"parseError":False}})
|
|
|
817 |
result = bulk.execute()
|
|
|
818 |
tprint(result)
|
|
|
819 |
|
| 13576 |
amit.gupta |
820 |
|
| 17234 |
amit.gupta |
821 |
def _saveToAffiliate(self, offers, status):
|
| 16946 |
amit.gupta |
822 |
collection = self.db.snapdealOrderAffiliateInfo1
|
|
|
823 |
#mcollection = self.db.merchantOrder
|
| 14443 |
amit.gupta |
824 |
for offer in offers:
|
| 17667 |
amit.gupta |
825 |
offer = self.covertToObj(offer)
|
| 16946 |
amit.gupta |
826 |
if offer.orderId:
|
| 17048 |
amit.gupta |
827 |
dict1 = todict(offer)
|
| 17166 |
amit.gupta |
828 |
dict1["_id"] = dict1["orderId"] + "-" + dict1["productCode"]
|
| 17234 |
amit.gupta |
829 |
dict1['status'] = status
|
| 17048 |
amit.gupta |
830 |
collection.save(dict1)
|
| 16946 |
amit.gupta |
831 |
# def _saveToAffiliate(self, offers):
|
|
|
832 |
# collection = self.db.snapdealOrderAffiliateInfo
|
|
|
833 |
# mcollection = self.db.merchantOrder
|
|
|
834 |
# for offer in offers:
|
|
|
835 |
# offer = self.covertToObj(offer)
|
|
|
836 |
# collection.update({"adId":offer.adId, "saleAmount":offer.saleAmount, "payOut":offer.payOut},{"$set":todict(offer)}, upsert=True)
|
|
|
837 |
# mcollection.update({"subTagId":offer.subTagId, "storeId":self.store_id, "subOrders.missingAff":True}, {"$set":{"subOrders.$.missingAff":False}})
|
| 13569 |
amit.gupta |
838 |
|
|
|
839 |
|
|
|
840 |
def _getAllOffers(self, br, token):
|
|
|
841 |
allOffers = []
|
|
|
842 |
nextPage = 1
|
|
|
843 |
while True:
|
|
|
844 |
data = getPostData(token, nextPage)
|
|
|
845 |
response = br.open(POST_URL, data)
|
| 13680 |
amit.gupta |
846 |
rmap = json.loads(ungzipResponse(response))
|
| 13569 |
amit.gupta |
847 |
if rmap is not None:
|
|
|
848 |
rmap = rmap['response']
|
| 14492 |
amit.gupta |
849 |
print rmap
|
| 13569 |
amit.gupta |
850 |
if rmap is not None and len(rmap['errors'])==0:
|
|
|
851 |
allOffers += rmap['data']['data']
|
|
|
852 |
nextPage += 1
|
|
|
853 |
if rmap['data']['pageCount']<nextPage:
|
|
|
854 |
break
|
|
|
855 |
|
|
|
856 |
return allOffers
|
|
|
857 |
|
| 16946 |
amit.gupta |
858 |
# def covertToObj(self,offer):
|
|
|
859 |
# offerData = offer['Stat']
|
|
|
860 |
# offer1 = AffiliateInfo(offerData['affiliate_info1'], self.store_id, offerData['conversion_status'], offerData['ad_id'],
|
|
|
861 |
# offerData['datetime'], int(float(offerData['payout'])), offer['Offer']['name'], offerData['ip'], int(float(offerData['conversion_sale_amount'])))
|
|
|
862 |
# offer1.saleTime = int(time.mktime(datetime.strptime(offer1.saleDate, "%Y-%m-%d %H:%M:%S").timetuple()))
|
|
|
863 |
# return offer1
|
| 16975 |
amit.gupta |
864 |
def parseInfo(self,):
|
|
|
865 |
from pyquery import PyQuery as pq
|
| 16985 |
amit.gupta |
866 |
orders = list(session.query(Orders).filter_by(store_id=self.store_id).filter_by(status='ORDER_CREATED').group_by(Orders.user_id).all())
|
| 16975 |
amit.gupta |
867 |
try:
|
|
|
868 |
for order in orders:
|
|
|
869 |
try:
|
|
|
870 |
doc = pq(order.rawhtml)
|
| 16985 |
amit.gupta |
871 |
a1= " ".join(["" if not div.text else div.text.replace("\t","").replace("\n","").strip() for div in pq(doc('article')[-1])('div')]).strip()
|
|
|
872 |
a2 = ",".join(["" if not div.text else div.text.replace("\t","").replace("\n","").replace(" ", "") for div in pq(doc('article')[-2])('div')]).strip()
|
|
|
873 |
user_address = All_user_addresses()
|
|
|
874 |
user_address.address = a1
|
| 16975 |
amit.gupta |
875 |
all = a2.split(",")
|
| 16985 |
amit.gupta |
876 |
user_address.source = 'order'
|
|
|
877 |
user_address.user_id = order.user_id
|
|
|
878 |
#user_address. = all[3].split(":")[1]
|
|
|
879 |
#user_address. = all[2].split(":")[1]
|
|
|
880 |
#orderInfo.mobile = all[-1].split(":")[1]
|
| 16977 |
amit.gupta |
881 |
adSplit = a1.split(",")
|
| 16985 |
amit.gupta |
882 |
user_address.city = adSplit[-2].strip()
|
|
|
883 |
user_address.pincode = adSplit[-1].strip().split(" ")[0]
|
| 16991 |
amit.gupta |
884 |
user_address.state = adSplit[-1].strip().split(" ")[1]
|
| 16975 |
amit.gupta |
885 |
session.commit()
|
|
|
886 |
except:
|
|
|
887 |
session.rollback()
|
|
|
888 |
continue
|
|
|
889 |
finally:
|
|
|
890 |
session.close()
|
| 16946 |
amit.gupta |
891 |
|
| 13569 |
amit.gupta |
892 |
def covertToObj(self,offer):
|
| 17667 |
amit.gupta |
893 |
|
|
|
894 |
offer1 = AffiliateInfo(offer["affiliateSubId1"], 3, None, None, utils.toTimeStamp(datetime.strptime(offer["dateTime"], "%m/%d/%Y %H:%M:%S")),
|
|
|
895 |
offer["commissionEarned"], None, None, offer["sale"])
|
|
|
896 |
offer1.orderId = offer.get('orderCode') if offer.get('orderCode') else None
|
|
|
897 |
offer1.productCode = offer["product"]
|
|
|
898 |
offer1.unitPrice = offer["price"]
|
|
|
899 |
offer1.quantity = offer["quantity"]
|
|
|
900 |
offer1.saleTime = offer["dateTime"]
|
| 13569 |
amit.gupta |
901 |
return offer1
|
|
|
902 |
def getPostData(token, page = 1, limit= 20, startDate=None, endDate=None):
|
| 13680 |
amit.gupta |
903 |
endDate=date.today() + timedelta(days=1)
|
|
|
904 |
startDate=endDate - timedelta(days=31)
|
| 13569 |
amit.gupta |
905 |
|
|
|
906 |
parameters = (
|
|
|
907 |
("page",str(page)),
|
|
|
908 |
("limit",str(limit)),
|
|
|
909 |
("fields[]","Stat.offer_id"),
|
|
|
910 |
("fields[]","Stat.datetime"),
|
|
|
911 |
("fields[]","Offer.name"),
|
|
|
912 |
("fields[]","Stat.conversion_status"),
|
|
|
913 |
("fields[]","Stat.conversion_sale_amount"),
|
|
|
914 |
("fields[]","Stat.payout"),
|
|
|
915 |
("fields[]","Stat.ip"),
|
|
|
916 |
("fields[]","Stat.ad_id"),
|
|
|
917 |
("fields[]","Stat.affiliate_info1"),
|
|
|
918 |
("sort[Stat.datetime]","desc"),
|
|
|
919 |
("filters[Stat.date][conditional]","BETWEEN"),
|
|
|
920 |
("filters[Stat.date][values][]",startDate.strftime('%Y-%m-%d')),
|
|
|
921 |
("filters[Stat.date][values][]",endDate.strftime('%Y-%m-%d')),
|
|
|
922 |
("data_start",startDate.strftime('%Y-%m-%d')),
|
|
|
923 |
("data_end",endDate.strftime('%Y-%m-%d')),
|
|
|
924 |
("Method","getConversions"),
|
|
|
925 |
("NetworkId","jasper"),
|
|
|
926 |
("SessionToken",token),
|
|
|
927 |
)
|
|
|
928 |
#Encode the parameters
|
|
|
929 |
return urllib.urlencode(parameters)
|
|
|
930 |
|
| 16975 |
amit.gupta |
931 |
|
| 13569 |
amit.gupta |
932 |
def main():
|
| 14458 |
amit.gupta |
933 |
#print todict([1,2,"3"])
|
| 14402 |
amit.gupta |
934 |
store = getStore(3)
|
| 20694 |
amit.gupta |
935 |
#store.scrapeStoreOrders()
|
|
|
936 |
store.parseOrderRawHtml(332222, "3232311", 2, readSSh("/home/amit/sample.html"), "https://m.snapdeal.com/purchaseMobileComplete?code=1a011639e72588db39169df568654620&order=17772748329&sdInstant=false")
|
| 17855 |
amit.gupta |
937 |
#store.scrapeAffiliate()
|
| 17387 |
amit.gupta |
938 |
#https://m.snapdeal.com/purchaseMobileComplete?code=3fbc8a02a1c4d3c4e906f46886de0464&order=5808451506
|
|
|
939 |
#https://m.snapdeal.com/purchaseMobileComplete?code=9f4dfa49ff08a16d04c5e4bf519506fc&order=9611672826
|
|
|
940 |
|
| 17394 |
amit.gupta |
941 |
# orders = list(session.query(OrdersRaw).filter_by(store_id=3).filter_by(status='ORDER_NOT_CREATED').all())
|
|
|
942 |
# for o in orders:
|
|
|
943 |
# result = store.parseOrderRawHtml(o.id, o.sub_tag, o.user_id, o.rawhtml, o.order_url)['result']
|
|
|
944 |
# o.status = result
|
|
|
945 |
# session.commit()
|
|
|
946 |
# session.close()
|
| 20308 |
amit.gupta |
947 |
# store.scrapeStoreOrders()
|
| 13662 |
amit.gupta |
948 |
#store._isSubOrderActive(8, "5970688907")
|
| 17237 |
amit.gupta |
949 |
#store.scrapeAffiliate(datetime(2015,4,1))
|
| 14758 |
amit.gupta |
950 |
#store.scrapeStoreOrders()
|
| 16997 |
amit.gupta |
951 |
#store.parseInfo()
|
| 13569 |
amit.gupta |
952 |
|
| 17394 |
amit.gupta |
953 |
class obj(object):
|
|
|
954 |
def __init__(self, d):
|
|
|
955 |
for a, b in d.items():
|
|
|
956 |
if isinstance(b, (list, tuple)):
|
|
|
957 |
setattr(self, a, [obj(x) if isinstance(x, dict) else x for x in b])
|
|
|
958 |
else:
|
|
|
959 |
setattr(self, a, obj(b) if isinstance(b, dict) else b)
|
|
|
960 |
|
| 19428 |
manas |
961 |
|
| 13576 |
amit.gupta |
962 |
def todict(obj, classkey=None):
|
|
|
963 |
if isinstance(obj, dict):
|
|
|
964 |
data = {}
|
|
|
965 |
for (k, v) in obj.items():
|
|
|
966 |
data[k] = todict(v, classkey)
|
|
|
967 |
return data
|
|
|
968 |
elif hasattr(obj, "_ast"):
|
|
|
969 |
return todict(obj._ast())
|
|
|
970 |
elif hasattr(obj, "__iter__"):
|
|
|
971 |
return [todict(v, classkey) for v in obj]
|
|
|
972 |
elif hasattr(obj, "__dict__"):
|
|
|
973 |
data = dict([(key, todict(value, classkey))
|
|
|
974 |
for key, value in obj.__dict__.iteritems()
|
|
|
975 |
if not callable(value) and not key.startswith('_')])
|
|
|
976 |
if classkey is not None and hasattr(obj, "__class__"):
|
|
|
977 |
data[classkey] = obj.__class__.__name__
|
|
|
978 |
return data
|
|
|
979 |
else:
|
|
|
980 |
return obj
|
| 14239 |
amit.gupta |
981 |
|
|
|
982 |
if __name__ == '__main__':
|
|
|
983 |
main()
|