def import_report(request): if request.method == 'POST': print request.FILES # started = datetime.datetime.now() #form = UploadsForm(request.POST, request.FILES) try: _file_name = request.FILES['file[0]'].name print _file_name data = txt.extract(request.FILES['file[0]'].read()) data = set(data) new_msisdns = clean_data(data) new_file_name = uuid.uuid1() new_file_path = os.path.join( settings.FILTERED_BACTHES_FILE_LOCATION, "%s" % new_file_name) _sfile = open(new_file_path, 'wb+') [_sfile.write('%s\n' % chunk) for chunk in new_msisdns] _sfile.close() res = {"Message": "%s" % new_file_name, "Count": len(new_msisdns)} #task_id = background_filter.delay(new_file_name, new_file_path) #logger.info("Task Id: %s for %s" % (task_id, new_file_name)) # #messages.info(request, "Your file (%s) has been queued, pls check back to download" % _file_name) except Exception, ex: #print str(ex) logger.info("Error %s" % str(ex)) res = {"Message": "NoApplied", "Count": 0}
def background_filter(new_file_name, new_file_path): """ :param new_file_name: :param new_file_path: :return: """ try: logger.info('starting...') start = datetime.now() store = redis.Redis('172.24.6.103', password='******') #msisdns = process_file(new_file_name, _filename) try: file = open(new_file_path, 'r') _data = txt.extract(file.read()) data = set(_data) if len(data) == 0: logger.info('The file is empty') except Exception, exc: logger.error('error extracting file: %s' % str(exc)) for row in list(data): print '%s|%s' % (row, new_file_name) store.sadd(new_file_name, row) logger.info('newpath:%s , file_name:%s' % (new_file_path, new_file_name)) logger.info('done in %s seconds' % (datetime.now() - start))
def process_file(filename, filepath): """ :param filename: :param filepath: :return: """ logger.debug("%s | %s" % (filename, filepath)) store = redis.Redis(settings.REDIS_SERVER, password=settings.REDIS_PASS) try: file = open(filepath, 'r') _data = txt.extract(file.read()) data = set(_data) if len(data) == 0: logger.info('The file is empty') except Exception, exc: logger.error('error extracting file: %s' % str(exc))
def createBroadcast(request): username = request.user.username if request.method == 'POST': result = json.loads(request.body.decode('utf-8')) try: # import pdb # pdb.set_trace() user = User.objects.get(pk=result['user']) broadcast_name = result['broadcast_name'] broadcast_description = result['broadcast_description'] sender = result['sender'] message = result['message'] content_type = result['content_type'] #schedule_start = result['schedule_start'] schedule_start = parse( result['schedule_start']).strftime('%Y-%m-%d %H:%M:%S') end_time = parse(result['end_time']).strftime('%Y-%m-%d %H:%M:%S') base_file = Uploads.objects.get(pk=result['base_file']) print schedule_start print end_time Broadcast.objects.create( user=user, broadcast_name=broadcast_name, broadcast_description=broadcast_description, sender=sender, base_file=base_file, message=message, content_type=content_type, status="Pending") # if schedule_start is None or '': # Broadcast.objects.create( # user=user, # broadcast_name=broadcast_name, # broadcast_description=broadcast_description, # sender=sender, # base_file=base_file, # message=message, # content_type=content_type, # status="Pending" # ) # else: # Broadcast.objects.create( # user=user, # broadcast_name=broadcast_name, # broadcast_description=broadcast_description, # sender=sender, # base_file=base_file, # message=message, # content_type=content_type, # #schedule_start=schedule_start, # #end_date=end_time, # status="Pending" # ) print base_file.name_id taskid = send_campaign.delay(sender, message, base_file.name_id, CONT[content_type]) logger.info("TaskId %s" % taskid) res = {'action': True, "message": "Campaign created!"} except Exception, ex: logger.error("Error processing rquest %s-%s" % (str(ex), username)) res = {'action': False, "message": "Travel request created"} return HttpResponse(json.dumps(res), content_type='application/json')
def print_all(): logger.info("I am man hjvslbsv;vdsjbkflblvjdlsj,k") print "ayanfe"
off_net = [] incomplete = [] for row in data: match = patt.match(str(row.strip())) # logger.info(match) if match: msisdn = normalize(match.group()) if len(msisdn) not in [10, 13]: incomplete.append(msisdn) continue msisdns.add(msisdn) store.sadd(filename, msisdn) else: # invalid msisdn num_errors.append(row) logger.info("Error: %s, Success: %s" % (len(num_errors), len(msisdns))) def background_filter(new_file_name, new_file_path): """ :param new_file_name: :param new_file_path: :return: """ try: logger.info('starting...') start = datetime.now() store = redis.Redis('172.24.6.103', password='******') #msisdns = process_file(new_file_name, _filename) try: