-
Notifications
You must be signed in to change notification settings - Fork 0
/
segmentationApi.py
124 lines (107 loc) · 4.03 KB
/
segmentationApi.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
import json
from datetime import datetime
from django.views.decorators.csrf import csrf_exempt
from bson.json_util import ObjectId
from data import jsonResponse, db, basic_error, basic_failure, basic_success
from external.sheet import append_to_sheet
def get_segment_jobs(request):
master_cache = {} # A cache of master jobs data
lst = db.segment_jobs.aggregate([
{"$group": {
"_id": {
"ref_job": "$ref_job",
"timestamp": "$timestamp"
},
"segments": {
"$push": {
"status": "$job.status",
"english": "$text.english",
"arabic": "$text.arabic",
"date": "$date",
"num": "$segment_number"
}
}
}},
{"$project": {
"_id": 0, "segments": 1,
"ref_job": "$_id.ref_job",
"timestamp": "$_id.timestamp"
}},
{"$sort": {"timestamp": -1}}
])
final = []
for job in lst:
if str(job["ref_job"]) in master_cache:
job.update(master_cache[str(job["ref_job"])])
else:
master = db.jobs.find_one({"_id": job["ref_job"]},
{"_id": False, "job": True, "name": True, "description": True})
if not master:
continue
else:
umaster = {'name': master.get('name', 'Untitled'), 'description': master.get('description', '')}
if 't_id' in master.get('job', {}):
umaster['t_id'] = master['job']['t_id']
if 'customer_count' in master.get('job', {}).get('report', {}):
umaster['count'] = master['job']['report']['customer_count']
master_cache[str(job["ref_job"])] = umaster
job.update(umaster)
final.append(job)
return jsonResponse({"success": True, "data": final})
@csrf_exempt
def post_segment_form(request):
try:
data = json.loads(request.body)
total = data['total']
segments = data['segments']
ref_job = data['ref_job']
t_id = data['t_id']
if db.jobs.count({"_id": ObjectId(ref_job)}) == 0:
return basic_failure
# Step 1: Setup limits
slen = len(segments)
sub_size = int(total) // slen
limits = [
[sub_size*i, sub_size*(i+1)] for i in range(0, slen)
]
limits[-1][1] = total
# Step 2, create db jobs for each segment
result = []
sheet_rows = []
timestamp = datetime.now()
for i, segment in enumerate(segments):
date = segment['date']
res = db.segment_jobs.insert_one({
"ref_job": ObjectId(ref_job),
"timestamp": timestamp,
"segment_number": i+1,
"limits": {
"lower": limits[i][0],
"upper": limits[i][1]
},
"text": {
"english": segment['english'],
"arabic": segment['arabic']
},
"date": date,
"job": {
"status": "pending"
}
})
oid_col = str(res.inserted_id) + ("_segment,%i,%i,%i" % (t_id, limits[i][0], limits[i][1])) # Added _segment
result.append(oid_col)
# Creating the row
date = datetime.fromtimestamp(date / 1000)
start_date = date.strftime("%m/%d/%Y")
hour = date.hour
minute = date.minute
row = ['Once', 'segment', start_date, '', hour, minute, segment['english'], segment['arabic'], oid_col]
sheet_rows.append(row)
if data.get('debug', False):
return jsonResponse({"success": True, "result": sheet_rows})
else:
for row in sheet_rows:
append_to_sheet(row)
return basic_success
except Exception, e:
return basic_error(e)