Example #1
0
File: odo.py Project: nschank/ution
def do_edit(data, settings):
	if settings.index < 0 or settings.index >= len(data):
		print ("No item of that index")
		return
	if settings.category is not None:
		if len(settings.category) == 0:
			data[settings.index].category = None
		else:
			data[settings.index].category = settings.category[0]
	if settings.important:
		data[settings.index].important = True
	if settings.unimportant:
		data[settings.index].important = False
	if settings.start is not None:
		try:
			if len(settings.start) == 0:
				data[settings.index].startdate = None
			else:
				data[settings.index].startdate = datetime.today() if "today" in settings.start else dparse.parse(' '.join(settings.start))
		except Exception:
			data[settings.index].startdate = None
	if settings.time is not None:
		try:
			if len(settings.time) == 0:
				data[settings.index].duedate = None
			else:
				data[settings.index].duedate = datetime.today() if "today" in settings.time else dparse.parse(' '.join(settings.time))
		except Exception:
			data[settings.index].duedate = None
	if settings.item is not None:
		data[settings.index].value = ' '.join(settings.item)
	
	data.sort()
	do_print(data)
	rewrite(settings.filename, data)
Example #2
0
 def compileWithAsh(self, tests):
     start_time = datetime.today()
     #print("starting compile of %d tests at %s" % (len(tests),start_time))
     total=len(tests)
     if not pexpect:
           for test in tests:
               self.js_print('%d\tcompiling %s' % (total,test))
               self.compile_test(test)
               (testdir, ext) = splitext(test)
               if exists(testdir+".abc")==False:
                   print("ERROR abc files %s.abc not created" % (testdir))
                   self.ashErrors.append("abc files %s.abc not created" % (testdir))
               total -= 1;
     else:  #pexpect available
         child = pexpect.spawn("java -classpath %s macromedia.asc.embedding.Shell" % self.asc)
         child.logfile = None
         child.expect("\(ash\)")
         child.expect("\(ash\)")
 
         for test in tests:
             if self.debug:
                 print cmd
             else:
                 print "Compiling ", test
                 
             if test.endswith(self.abcasmExt):
                 self.compile_test(test)
             else:
                 arglist = self.parseArgStringToList(self.ascargs)
             
                 (dir, file) = split(test)
                 # look for .asc_args files to specify dir / file level compile args
                 arglist = self.loadAscArgs(arglist, dir, test)
                 
                 cmd = "asc -import %s " % (self.builtinabc)
                 for arg in arglist:
                     cmd += ' %s' % arg
                 
                 for p in self.parents(dir):
                     shell = join(p,"shell.as")
                     if isfile(shell):
                         cmd += " -in " + shell
                         break
                 (testdir, ext) = splitext(test)
                 deps = glob(join(testdir,"*.as"))
                 deps.sort()
                 for util in deps + glob(join(dir,"*Util.as")):
                     cmd += " -in %s" % util #no need to prepend \ to $ when using ash
                 cmd += " %s" % test
             
                 if exists(testdir+".abc"):
                     os.unlink(testdir+".abc")
                 child.sendline(cmd)
                 child.expect("\(ash\)")
                 if not exists(testdir+".abc"):
                     print("ERROR: abc file %s.abc not created, cmd used to compile: %s" % (testdir,cmd))
                     self.ashErrors.append("abc file %s.abc not created, cmd used to compile: %s" % (testdir,cmd))
                 total -= 1;
                 #print("%d remaining, %s" % (total,cmd))
     end_time = datetime.today()
Example #3
0
def index():
    events = db.session.query(Day) \
        .filter(Day.date > datetime.today()) \
        .filter(Day.priority > 50) \
        .order_by(Day.date).limit(10)
    years = range(datetime.today().year, datetime.today().year + 10)
    return render_template('landing.html', days=events, years=years)
Example #4
0
 def _number_of_timesheet(self):
     for s in self:
         domain = [('employee_id', '=', s.id),
                   ('month', '=', datetime.today().strftime('%m')),
                   ('year', '=', datetime.today().strftime('%Y')), ]
         count = len(s.env['account.analytic.line'].search(domain).ids)
         self.worked_time += s.env['account.analytic.line'].search(domain).x_task_worked
         s.timesheet_count = count
Example #5
0
def index(request):
    packages = CustomerPackage.objects.filter(postage_cost=None, shipping_due_date__isnull=True).order_by('-created')
    
    today = datetime.today()
    next_week = (datetime.today() + timedelta(weeks=5))
    
    monthly_packages = CustomerPackage.objects.filter(shipping_due_date__range=(today, next_week), posted__isnull=True).order_by('-created')
    
    return _render(request, "my_admin/home.html", locals())
Example #6
0
def month(month_name):
    months = months_dict()
    mnum = months[month_name.lower()]
    mord = ordinal(mnum)
    day_count = calendar.monthrange(2000, mnum)[1]
    days = db.session.query(Day) \
        .filter(Day.date > datetime.today()) \
        .filter(Day.date < datetime.today() + timedelta(days=365))
    return render_template('month.html', m=month_name, days=days, count=day_count, month=mnum, mord=mord, months=months)
Example #7
0
 def _number_of_monthly_in_count(self):
     for s in self:
         domain = [('product_quot_id', '=', s.id),
                   ('work_type', '=', '1'),
                   ('state', '=', '1'),
                   ('month', '=', datetime.today().strftime('%m')),
                   ('year', '=', datetime.today().strftime('%Y')),
                   ]
         s.month_in_count = len(s.env['res.department.stock.line'].search(domain).ids)
Example #8
0
 def onchange_backdate(self, cr, uid, ids, date):
     if date:
         today = datetime.strptime(datetime.today().strftime("%Y-%m-%d"),"%Y-%m-%d")
         tomorrow = datetime.today() + timedelta(days=1)
         if datetime.strptime(date,"%Y-%m-%d") < today or datetime.strptime(date,"%Y-%m-%d") > tomorrow:
             value = {'date': str(datetime.today())}
             warning = {'title': ('Perhatian !'), 'message' : ('Tanggal minimal hari ini atau besok')}
             return {'value': value, 'warning': warning}
     return True
Example #9
0
 def _need_grade(self):
     for emp in self:
         if emp.user_id:
             domain = [('employee_id', '=', emp.id),
                       ('month', '=', datetime.today().strftime('%m')),
                       ('year', '=', datetime.today().strftime('%Y')), ]
             count = len(emp.env['hr.employee.personality'].search(domain).ids)
             if count == 0:
                 emp.need_grade = True
             else:
                 emp.need_grade = False
 def formalizeTime(actualTime):
     relativeTime = int(actualTime[0])
     clock = ""
     if(actualTime[1] == 'minute' or actualTime[1] == 'minutes'):
         lastMinuteDateTime = datetime.today() - timedelta(minutes = relativeTime)
         return(lastMinuteDateTime.strftime('%Y-%m-%d %H:%M:%S'))
     if(actualTime[1] == 'hour' or actualTime[1] == 'hours'):
         lastHourDateTime = datetime.today() - timedelta(hours = relativeTime)
         return(lastHourDateTime.strftime('%Y-%m-%d %H:%M:%S'))
     if(actualTime[1] == 'day' or actualTime[1] == 'days'):
         lastHourDateTime = datetime.today() - timedelta(days = relativeTime)
         return(lastHourDateTime.strftime('%Y-%m-%d %H:%M:%S'))
Example #11
0
File: odo.py Project: nschank/ution
def color(todo_item):
	if todo_item.duedate is None:
		return ''
	if todo_item.duedate.date() < datetime.today().date():
		return '\033[31m'
	if todo_item.duedate.date() == datetime.today().date():
		return '\033[31;1m'
	if todo_item.duedate.date() <= datetime.today().date() + timedelta(days=2):
		return '\033[33m'
	if todo_item.startdate.date() == datetime.today().date():
		return '\033[32m'
	return ''
Example #12
0
 def reActivateUserGenCode(self,userGenCode,user):
     if(userGenCode):
         self.userGenCode = userGenCode;
         
         self.userGenCode.code = str(self.userGenCode.user_id)+ self.utility.my_random_string(15);
         self.userGenCode.expire_date =  self.utility.plusDate(datetime.today(),30);
         self.userGenCode.create_date = datetime.today();
         self.userGenCode.count = self.userGenCode.count +1;
         self.userGenCode.success = 0;
         return self.userGenCode;
     
     return userGenCode;
Example #13
0
 def _score_monthly(self):
     for s in self:
         domain = [('employee_id', '=', s.id),
                   ('month', '=', datetime.today().strftime('%m')),
                   ('year', '=', datetime.today().strftime('%Y')), ]
         number = len(self.search(domain).ids)
         person = self.search(domain)
         if number:
             if person.employee_id.parent_id.user_id.id == person.user_id.id:
                 self.score = ((person.rvalue + person.dvalue + person.cvalue + person.wvalue) * 4) / (7 * number)
             else:
                 self.score = (person.rvalue + person.dvalue + person.cvalue + person.wvalue) / (7 * number)
Example #14
0
def result_nosig():
    """
    Return the Easy Not Found Generator result without a signature.
    """
    session['date'] = datetime.today().strftime('%m/%d/%y')
    return render_template('result_nosig.html',
                           date=session.get('date'),
                           type=session.get('type'),
                           name=session.get('name'),
                           bride_name=session.get('bride_name'),
                           year=session.get('year'),
                           borough=session.get('borough'),
                           now=datetime.today())
def init_env(binariestorun):

    global doc
    global checkns 
    global rootElement 
    global datetimeElement 
    global durationElement
    global startTime

    doc = minidom.Document()
    doc = minidom.Document()
    checkns = "http://check.sourceforge.net/ns"
    rootElement = doc.createElementNS(checkns, "testsuites")
    datetimeElement = doc.createElementNS(checkns,"datetime")

    doc.appendChild(rootElement)
    rootElement.appendChild(datetimeElement)
    node = doc.createTextNode(datetime.today().strftime("%m.%d.%Y %H:%M:%S"))
    datetimeElement.appendChild(node)
    durationElement = doc.createElementNS(checkns,"duration")
    
    # If the binaries require different environments, which are not 
    # mutually compatible, init_env and deinit_env need to be called 
    # between running the binaries. Another option is to run the whole 
    # script several times with different arguments. 

    processes = []

    # Return a list of system processes that need to be closed after the
    # tests are ran.

    startTime = time.clock()
    return processes
        def delete_old_data(cls, **kw):
            too_old = datetime.today() - timedelta(days=1)

            query = model.Session.query(cls).autoflush(False).filter(cls.date <= too_old).all()
            for i in query:
                model.Session.delete(i)
            return
Example #17
0
 def setUp(self):
     self.client = Client()
     self.factory = RequestFactory()
     self.user = User.objects.create_user(username="******", email="*****@*****.**", password="******")
     self.today = datetime.today()
     self.today.strftime('%Y-%m-%d')
     self.today = str(self.today)
Example #18
0
	def get(self):

		updatedb_name = self.request.get('updatedb_name')
		update = Update(parent=updatedb_key(updatedb_name))
		UpdatePurge = Update.all()

		logging.info('Entered the Purge')

		"""
		for x,y in enumerate(UpdatePurge):
			delta = datetime.today() - UpdatePurge[x].Date
			
			if((UpdatePurge[x].Status == 'Up') and (delta.days <= 1)):
				logging.info(UpdatePurge[x].Status)
				#db.delete(UpdatePurge[x])
				Update.delete(UpdatePurge[x])

		
		"""
		delta = datetime.today() - timedelta(minutes=+20)
		logging.info(delta)
		remove = update.gql("WHERE Date <= :Date AND Status = :Status", Date=delta, Status='Up')
		db.delete(remove)
		
		logging.info('The purge has ended')
Example #19
0
    def gerrit_search(self, instance, owner=None, reviewer=None):
        max_age = datetime.today() - self.modified_after
        max_age = max_age.days * 24 * 3600 + max_age.seconds

        # See https://review.openstack.org/Documentation/cmd-query.html
        # Gerrit doesn't allow filtering by created time, only modified time.
        user_filter = "owner:%s" % owner if owner else "reviewer:%s" % reviewer
        gquery_cmd = [
            "ssh",
            "-p",
            str(instance["port"]),
            instance["url"],
            "gerrit",
            "query",
            "--format",
            "JSON",
            "--comments",
            "--",
            "-age:%ss" % str(max_age),
            user_filter,
        ]
        [stdout, _] = subprocess.Popen(gquery_cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE).communicate()
        issues = str(stdout).split("\n")[:-2]
        issues = map(json.loads, issues)

        # TODO(cjhopman): should we filter abandoned changes?
        issues = map(self.process_gerrit_issue, issues)
        issues = filter(self.filter_issue, issues)
        issues = sorted(issues, key=lambda i: i["modified"], reverse=True)

        return issues
Example #20
0
    def sweep(self):
        mc = ManagementContainer.getInstance()
        pm = mc.getPartitionManager()

        sweepStart = datetime.today()
        purgeRan = False

        p = self.getNextPartition()

        while p is not None and p > 0:
            log("Sweep: checking partition", p.getId())

            if self.isOKToPurge(p):
                # purge is optimistic and always runs setup before checking if it can publish
                purgeId = self.setupPurge(p)
                # check if ok to publish and publish when ok to do so
                if self.isOKToStartNewPurge(pm.listPartitions()):
                    spoolDir = os.path.join(
                        PurgeController(p, None).getSpoolDirectory().getCanonicalPath(), "dispatcher"
                    )
                    if self.purgePartition(purgeId, p, spoolDir):
                        purgeRan = True
            else:
                log("Sweep: skipping partition", p.getId())
                mc.getActiveMailboxStoreManager().markDispatched(p.getId(), 0)

            p = self.getNextPartition()

        log("Sweep: completed")

        return purgeRan
Example #21
0
 def clean_data(self,cr,uid,ids,context=None):
     _logger.info("Clean_data")
     try:
         aujourdhui = datetime.today()
         semaine=timedelta(weeks=1)
         date=aujourdhui - semaine
         idProd=self.pool.get('product.product').search(cr,uid,['|',('active','=',True),('active','=',False),('product_tmpl_id.ingram','=',True),('product_tmpl_id.last_synchro_ingram','<',date)],order='id')
         delete=0
         undelete=0
         use=0
         for i in idProd:
             ids1=self.pool.get('sale.order.line').search(cr,uid,[('product_id','=',i)])
             ids2=self.pool.get('purchase.order.line').search(cr,uid,[('product_id','=',i)])
             ids3=self.pool.get('procurement.order').search(cr,uid,[('product_id','=',i)])
             ids4=self.pool.get('stock.move').search(cr,uid,[('product_id','=',i)])
             ids5=self.pool.get('account.invoice.line').search(cr,uid,[('product_id','=',i)])
             if not ids1 and not ids2 and not ids3 and not ids4 and not ids5:
                 try:
                     self.pool.get('product.product').unlink(cr,uid,[i])
                     delete+=1
                 except:
                     _logger.info('Delete impossible')
                     undelete+=1
             else:
                 self.pool.get('product.product').write(cr,uid,i,{'active':False})
                 use+=1
         _logger.info('Products deleted : %s'%(delete))
         _logger.info('Products non deleted : %s'%(use))
         _logger.info('product cleaned')
         return True
     except:
         _logger.error("Erreur Clean_data")
         self.sendTextMail(cr,uid,ids,"Error products cleaning","An error occured during the cleaning.\n\nDetails: \n\t %s" %(sys.exc_info()[0]))
         return False
    def encabezado(self, obj):

        nacimiento = datetime.strptime(obj.employee_id.birthday, '%Y-%m-%d')
        hoy = datetime.today()

        anios = hoy.year - nacimiento.year
        if hoy.month < nacimiento.month:
            anios -= 1
        elif hoy.month == nacimiento.month and hoy.day < nacimiento.day:
            anios -= 1

        if obj.employee_id.gender == 'male':
            genero = 'Masculino'
        else:
            genero = 'Femenino'

        contrato_ids = self.pool.get('hr.contract').search(self.cr, self.uid, [('employee_id', '=', obj.employee_id.id)])
        for contrato_id in contrato_ids:
            contrato = self.pool.get('hr.contract').browse(self.cr, self.uid, contrato_id)
            fecha_inicio = contrato.date_start
            fecha_fin = contrato.date_end


        if obj.numero_fila == 1:
            return {'nombre':obj.employee_id.name, 'anios': anios, 'genero': genero, 'nacionalidad': obj.employee_id.country_id.name, 'ocupacion': obj.employee_id.job_id.name, 'afiliacion_igss': obj.employee_id.ssnid, 'documento_identificacion': obj.employee_id.identification_id, 'fecha_inicio': fecha_inicio, 'fecha_fin': fecha_fin}
        return {}
 def fetch_humans(self):
     #use current date for demo purposes only
     today_date = datetime.today().date().isoformat()
     tot_humans = self.humansdb.fetch_humans(today_date)
     if tot_humans:
         return tot_humans[0]
     return 0
Example #24
0
def rapport(request):

    if 'date' in request.GET:
        date = request.GET['date']
        date = datetime.strptime(date,"%Y-%m-%d")
    else:
        date = datetime.today()

    lendemain = date + timedelta(days=1)

    # on met les deux dates a minuit
    date = date.replace(hour=0, minute=0, second=0)
    lendemain = lendemain.replace(hour=0, minute=0, second=0)

    ajoutes = Exemplaire.objects.all().filter(date_creation__gt=date,
                                              date_creation__lt=lendemain)
    factures = Facture.objects.all().filter(date_creation__gt=date,
                             date_creation__lt=lendemain)

    nb_ajoutes = ajoutes.count()
    nb_factures = factures.count()
    nb_vendus = sum([f.nb_livres() for f in factures])
    prix_total_vendu = sum([f.prix_total() for f in factures])

    context = {
        'nb_ajoutes':nb_ajoutes,
        'nb_factures':nb_factures,
        'date':date.date(),
        'nb_vendus':nb_vendus,
        'prix_total_vendu':prix_total_vendu,
    }

    return render_to_response('encefal/rapport.html', context)
Example #25
0
  def gerrit_search(self, instance, owner=None, reviewer=None):
    max_age = datetime.today() - self.modified_after
    max_age = max_age.days * 24 * 3600 + max_age.seconds

    # See https://review.openstack.org/Documentation/cmd-query.html
    # Gerrit doesn't allow filtering by created time, only modified time.
    user_filter = 'owner:%s' % owner if owner else 'reviewer:%s' % reviewer
    gquery_cmd = ['ssh', '-p', str(instance['port']), instance['url'],
                  'gerrit', 'query',
                  '--format', 'JSON',
                  '--comments',
                  '--',
                  '-age:%ss' % str(max_age),
                  user_filter]
    [stdout, _] = subprocess.Popen(gquery_cmd, stdout=subprocess.PIPE,
                                   stderr=subprocess.PIPE).communicate()
    issues = str(stdout).split('\n')[:-2]
    issues = map(json.loads, issues)

    # TODO(cjhopman): should we filter abandoned changes?
    issues = map(self.process_gerrit_issue, issues)
    issues = filter(self.filter_issue, issues)
    issues = sorted(issues, key=lambda i: i['modified'], reverse=True)

    return issues
Example #26
0
    def get(self):
        getDate = datetime.now() + timedelta(hours=8) + timedelta(days=1)
        fetchContents = SPContents.query(ndb.AND(
                SPContents.postStatus == "Pending",
                SPContents.postDate > getDate
            )
            ).order(SPContents.postDate).fetch()
        fetchToday = SPContents.query(
            ndb.AND(
                SPContents.postStatus == "Pending",
                ndb.AND(
                    SPContents.postDate >= datetime.today(),
                    SPContents.postDate <= getDate
                    )
                )
            ).order(SPContents.postDate).fetch()
        getUser = self.request.cookies.get('socC')
        getID = self.request.cookies.get('socA')

        if getUser == "Guest" or not getUser:
            page = JINJA_ENV.get_template('pages/indexLoggedIn.html')
            self.response.write(page.render())
        else:
            values = {
                'fetchedPending': fetchContents,
                'fetchedToday': fetchToday,
                'userID' : getID
            }
            page = JINJA_ENV.get_template('pages/index.html')
            self.response.write(page.render(values))
def send_email(hopper, feedtime):
    feedtime = feedtime.strftime("%a %b %d at %I:%M%p")
    d2 = datetime.today() + timedelta(days=1)
    tomorrowfeedtime = location.dusk(local=True, date=d2)
    tomorrowfeedtime = tomorrowfeedtime.strftime("%a %b %m at %I:%M%p")
    to = email_to
    gmail_user = email_gmail_user
    gmail_pwd = email_gmail_pwd
    smtpserver = smtplib.SMTP("smtp.gmail.com", 587)
    smtpserver.ehlo()
    smtpserver.starttls()
    smtpserver.ehlo
    smtpserver.login(gmail_user, gmail_pwd)
    header = (
        "To:" + to + "\n" + "From: " + gmail_user + "\n" + "Subject: CatFeeder: I fed the cats on " + feedtime + "\n"
    )
    # print header
    msg = (
        header
        + "\nI fed the cats using "
        + hopper
        + " hopper(s)\n\nThey were fed on "
        + feedtime
        + ".\n\nI will automaticly fed the cats again on "
        + tomorrowfeedtime
        + ".\n\n"
    )
    print msg
    smtpserver.sendmail(gmail_user, to, msg)
    print "email sent!"
    smtpserver.close()
def XmmFromLogs(t):
    ydate = (datetime.today() - datetime.utcfromtimestamp(0)).days - 1
    filenames = next(os.walk(logsPath))[2]
    for x in filenames:
        ldate = x
        if ldate == str(ydate):
            fpath = x
        #
    #
    yET = json.load(open(ETPath + '/' + fpath))
    tET = [0] * len(yET)
    logs = json.load(open(logsPath + '/' + fpath))
    if debug != 0: print >>sys.stderr, "E: json load %s/%s (%s)" % (logsPath, fpath, logs)
    l = len(t['mmTime'])
    ydur = [-1] * l
    ymm = [-1] * l
    for x in logs:
        if int(x[0]) == pid:
            ydur[safe_int(x[1])] += safe_int(x[2])
        #
    #
    for x in range(l):
        if t['mmTime'][x]:
            ymm[x] = round(safe_float(yET[safe_int(t['crop'][x])]) - ydur[x] / safe_float(t['mmTime'][x]), 4) * -1
            tET[int(t['crop'][x])] = ymm[x]
        else:
            ymm[x] = 0
        #
    #
    return (ymm, tET)
Example #29
0
def parse_end_datetime(string_date):
    if string_date != "":
        date_s = parse_date(string_date)
        end_datetime = datetime.datetime(date_s.year, date_s.month, date_s.day, 23, 59, 59)
    else:
        end_datetime = datetime.today()
    return end_datetime
Example #30
0
def calendar_dev(request):
    if 'uid' in request.session:
        uid = request.session['uid']
        logged_in = 'none'
        user = get_object_or_404(User, id=uid)
    
    from datetime import datetime
    from datetime import timedelta
    if 'c_created' in request.session:
        get_c_data = (datetime.strptime(request.session['c_created'], '%Y-%m-%dT%H') + timedelta(hours=1)) < datetime.now() 
    else:
        get_c_data = True 
    if get_c_data:
        raw_data = {}
        from .models import Calendar
        date_list = []
        for entry in Calendar.objects.all():
            cur_date = datetime.strptime(entry.date,'%Y-%m-%d')
            print cur_date.strftime('%a, %b %d %Y')
            if cur_date >= datetime.today() - timedelta(2) and cur_date not in date_list:date_list.append(cur_date)
            # if len(entry.show_desc) > 500:
            #     entry.show_desc = entry.show_desc[:470]+'...'
            
            if entry.date not in raw_data:
                raw_data[entry.date] = []

            raw_data[entry.date].append({'date':entry.date,
                             'band':entry.show,
                             'support':'support',
                             'venue':entry.venue,
                             'tickets':entry.tickets,
                             'show_link':entry.show_link,
                             'db_id':str(entry.id),
                             'pic_url':entry.pic_url,
                             'show_desc':entry.show_desc})

        cal_data = []
        date_list.sort()
        for date in date_list:
            cal_data.append({'date':date.strftime('%a, %b %d %Y'),'shows':raw_data[date.strftime('%Y-%m-%d')]})
            
  
        request.session['data'] = cal_data
        request.session['c_created'] = datetime.now().strftime('%Y-%m-%dT%H')

    if request.method == "POST":
        if request.POST['action'] == 'plus':
            db_id = request.POST['db_id']
            
            profile = Profile.objects.get(user_auth_id=uid)
            cal_saves = ast.literal_eval(profile.cal_saves)
            if db_id not in cal_saves:
                cal_saves.append(db_id)
            profile.cal_saves = str(cal_saves)
            print profile.user_auth_id
            print profile.cal_saves
            profile.save()
    
    return render(request, 'calendar_dev.html', 
                 {'json_data':json.dumps(request.session['data']),})
Example #31
0
def main():
    # Silence upload.py.
    rietveld.upload.verbosity = 0

    parser = optparse.OptionParser(description=sys.modules[__name__].__doc__)
    parser.add_option('-u',
                      '--user',
                      metavar='<email>',
                      default=os.environ.get('USER'),
                      help='Filter on user, default=%default')
    parser.add_option('-b',
                      '--begin',
                      metavar='<date>',
                      help='Filter issues created after the date (mm/dd/yy)')
    parser.add_option('-e',
                      '--end',
                      metavar='<date>',
                      help='Filter issues created before the date (mm/dd/yy)')
    quarter_begin, quarter_end = get_quarter_of(datetime.today() -
                                                relativedelta(months=2))
    parser.add_option(
        '-Q',
        '--last_quarter',
        action='store_true',
        help='Use last quarter\'s dates, i.e. %s to %s' %
        (quarter_begin.strftime('%Y-%m-%d'), quarter_end.strftime('%Y-%m-%d')))
    parser.add_option('-Y',
                      '--this_year',
                      action='store_true',
                      help='Use this year\'s dates')
    parser.add_option('-w',
                      '--week_of',
                      metavar='<date>',
                      help='Show issues for week of the date (mm/dd/yy)')
    parser.add_option(
        '-W',
        '--last_week',
        action='count',
        help='Show last week\'s issues. Use more times for more weeks.')
    parser.add_option(
        '-a',
        '--auth',
        action='store_true',
        help='Ask to authenticate for instances with no auth cookie')

    activity_types_group = optparse.OptionGroup(
        parser, 'Activity Types',
        'By default, all activity will be looked up and '
        'printed. If any of these are specified, only '
        'those specified will be searched.')
    activity_types_group.add_option('-c',
                                    '--changes',
                                    action='store_true',
                                    help='Show changes.')
    activity_types_group.add_option('-i',
                                    '--issues',
                                    action='store_true',
                                    help='Show issues.')
    activity_types_group.add_option('-r',
                                    '--reviews',
                                    action='store_true',
                                    help='Show reviews.')
    parser.add_option_group(activity_types_group)

    output_format_group = optparse.OptionGroup(
        parser, 'Output Format',
        'By default, all activity will be printed in the '
        'following format: {url} {title}. This can be '
        'changed for either all activity types or '
        'individually for each activity type. The format '
        'is defined as documented for '
        'string.format(...). The variables available for '
        'all activity types are url, title and author. '
        'Format options for specific activity types will '
        'override the generic format.')
    output_format_group.add_option(
        '-f',
        '--output-format',
        metavar='<format>',
        default=u'{url} {title}',
        help='Specifies the format to use when printing all your activity.')
    output_format_group.add_option(
        '--output-format-changes',
        metavar='<format>',
        default=None,
        help='Specifies the format to use when printing changes. Supports the '
        'additional variable {reviewers}')
    output_format_group.add_option(
        '--output-format-issues',
        metavar='<format>',
        default=None,
        help='Specifies the format to use when printing issues. Supports the '
        'additional variable {owner}.')
    output_format_group.add_option(
        '--output-format-reviews',
        metavar='<format>',
        default=None,
        help='Specifies the format to use when printing reviews.')
    output_format_group.add_option(
        '--output-format-heading',
        metavar='<format>',
        default=u'{heading}:',
        help='Specifies the format to use when printing headings.')
    output_format_group.add_option(
        '-m',
        '--markdown',
        action='store_true',
        help='Use markdown-friendly output (overrides --output-format '
        'and --output-format-heading)')
    parser.add_option_group(output_format_group)
    auth.add_auth_options(parser)

    # Remove description formatting
    parser.format_description = (lambda _: parser.description)  # pylint: disable=E1101

    options, args = parser.parse_args()
    options.local_user = os.environ.get('USER')
    if args:
        parser.error('Args unsupported')
    if not options.user:
        parser.error('USER is not set, please use -u')

    options.user = username(options.user)

    if not options.begin:
        if options.last_quarter:
            begin, end = quarter_begin, quarter_end
        elif options.this_year:
            begin, end = get_year_of(datetime.today())
        elif options.week_of:
            begin, end = (get_week_of(
                datetime.strptime(options.week_of, '%m/%d/%y')))
        elif options.last_week:
            begin, end = (
                get_week_of(datetime.today() -
                            timedelta(days=1 + 7 * options.last_week)))
        else:
            begin, end = (get_week_of(datetime.today() - timedelta(days=1)))
    else:
        begin = datetime.strptime(options.begin, '%m/%d/%y')
        if options.end:
            end = datetime.strptime(options.end, '%m/%d/%y')
        else:
            end = datetime.today()
    options.begin, options.end = begin, end

    if options.markdown:
        options.output_format = ' * [{title}]({url})'
        options.output_format_heading = '### {heading} ###'

    print 'Searching for activity by %s' % options.user
    print 'Using range %s to %s' % (options.begin, options.end)

    my_activity = MyActivity(options)

    if not (options.changes or options.reviews or options.issues):
        options.changes = True
        options.issues = True
        options.reviews = True

    # First do any required authentication so none of the user interaction has to
    # wait for actual work.
    if options.changes:
        my_activity.auth_for_changes()
    if options.reviews:
        my_activity.auth_for_reviews()

    print 'Looking up activity.....'

    try:
        if options.changes:
            my_activity.get_changes()
        if options.reviews:
            my_activity.get_reviews()
        if options.issues:
            my_activity.get_issues()
    except auth.AuthenticationError as e:
        print "auth.AuthenticationError: %s" % e

    print '\n\n\n'

    my_activity.print_changes()
    my_activity.print_reviews()
    my_activity.print_issues()
    return 0
Example #32
0
 def dateFormat():
     datetime.today().strftime("%Y-%m-%d HH-mm-ss")
Example #33
0
	  "From: [email protected]",
	  "To: "+toaddrs,
	  "Subject: Brthday reminder from Minja",
	  "Cc: "+",".join(cc),
	  msg
	  ])

	server.sendmail(fromaddr, toaddrs, msg)
	server.quit()


curr_birsdays=[]
all_mails=[]
with open() as f:
	for line in f:
		line=line.strip().split("\t")
		if len(line) != 7:
			print "Warning, wrong line",line
			continue
		try:
			month = int(line[4])
			day = int(line[5])
			year = datetime.today().year
			left = datetime.date(year,month,day)-datetime.today()
			if (left.months==0) and (left.days>=0) and (left.days<0):
				curr_birsdays.append(line[1:])
			all_mails.append(line[-1])
			
for b in curr_birsdays:
#	send_mail([m for m in all_mails if m != b[-1]]," ".join(b[:-1]))
	send_mail(["*****@*****.**","*****@*****.**"]," ".join(b[:-1]))
Example #34
0
        camera.resolution = (1024, 768)
        # Permit the Pi camera to warm up for a few seconds.
        sleep(2)
        # Capture a photo with the current timestamp, flip it and save it in the photo folder.
        camera.capture(DEFAULT_PHOTO_PATH + timestamp + '.jpg')
        photo = Image.open(DEFAULT_PHOTO_PATH + timestamp + '.jpg')
        photo_corrected = photo.rotate(180)
        photo_corrected.save(DEFAULT_PHOTO_PATH + timestamp + '.jpg')
        print("[PICAM] - Captured image!" + DEFAULT_PHOTO_PATH + timestamp + ".jpg")
    pass

    return DEFAULT_PHOTO_PATH + timestamp + '.jpg'


# Time method created by Joe Holloway - https://stackoverflow.com/a/10048290
# Ensures the current time is between a certain period.
def is_time_between(begin_time, end_time, check_time=None):
    check_time = check_time or datetime.utcnow().time()
    if begin_time < end_time:
        return begin_time <= check_time <= end_time
    else:
        return check_time >= begin_time or check_time <= end_time


if __name__ == '__main__':
    # Constant resource, with the measurement entry point working during the working week only.
    while True:
        while is_time_between(time(9, 30), time(18, 30)) and datetime.today().weekday() < 5:
            main()
            sleep(1200)
Example #35
0
    for i in range(len(tablaSec[c])):
        tablaSec[c][i] = tablaSec[c][i][0]
    print(c, end=' | ')

tablaPrin = pd.merge(tablaPrin,
                     tablaSec,
                     how='left',
                     left_on='CODIGO_CLIENTE',
                     right_on='CODIGO_CLIENTE')
print('\nCalculado Promedio de Ventas Mensual')

# DÍAS FACTURA VENCIDA

# In[96]:

hoy = datetime.today().date()

campos = ['CODIGO_CLIENTE', 'min(FECHA_VENCIMIENTO)']
nombreTabla = 'CLNT_Factura_Principal'
#whereStatement="WHERE EMPRESA= 'CECO2' and ANULADA=False and VALOR_TOTAL_SALDO_A_COBRAR>0 and FECHA_VENCIMIENTO<='{blanco}'".format(blanco=hoy)
whereStatement = "WHERE EMPRESA= 'CECO2' and ANULADA=False and VALOR_TOTAL_SALDO_A_COBRAR>0 "
groupStatement = "GROUP BY CODIGO_CLIENTE"
orderStatement = ""

tablaSec = pd.DataFrame()
for c in campos:
    tablaSec[c] = leerTablas(nombreTabla, c, whereStatement, groupStatement,
                             orderStatement)
    for i in range(len(tablaSec[c])):
        tablaSec[c][i] = tablaSec[c][i][0]
    print(c, end=' | ')
    def estadistica(self, request):
        inicio = self.request.query_params.get('inicio', None)
        fin = self.request.query_params.get('fin', None)
        perfil = self.request.query_params.get('perfil', None)
        region = self.request.query_params.get('region', None)

        # eventos = models.EventoDeRobotica.objects.filter(fecha__range=(inicio, fin))
        eventos = models.EventoDeRobotica.objects.all()

        if region:
            eventos = eventos.filter(
                escuela__localidad__distrito__region__numero=region)

        if perfil:
            usuario = models.Perfil.objects.get(
                id=perfil)  # El usuario logeado
            eventos = eventos.filter(
                Q(tallerista=usuario) | Q(acompaniantes=usuario)).distinct()

        total = eventos.count()
        conActaNueva = eventos.filter(acta__gt='').count()
        conActa = conActaNueva
        sinActa = total - conActa

        from datetime import datetime, timedelta

        day = str(datetime.today().strftime("%d/%b/%Y"))
        dt = datetime.strptime(day, '%d/%b/%Y')
        start = dt - timedelta(days=dt.weekday())
        end = start + timedelta(days=6)

        talleresCreadosEstaSemana = eventos.filter(
            fecha_de_creacion__range=(start, end))
        talleresProgramadosParaEstaSemana = eventos.filter(fecha__range=(start,
                                                                         end))

        totalDeTalleres = models.EventoDeRobotica.objects.all().exclude(
            escuela__localidad__distrito__region__numero=None)
        region1 = models.EventoDeRobotica.objects.filter(
            escuela__localidad__distrito__region__numero=1)
        region2 = models.EventoDeRobotica.objects.filter(
            escuela__localidad__distrito__region__numero=2)
        region3 = models.EventoDeRobotica.objects.filter(
            escuela__localidad__distrito__region__numero=3)
        region4 = models.EventoDeRobotica.objects.filter(
            escuela__localidad__distrito__region__numero=4)
        region5 = models.EventoDeRobotica.objects.filter(
            escuela__localidad__distrito__region__numero=5)
        region6 = models.EventoDeRobotica.objects.filter(
            escuela__localidad__distrito__region__numero=6)
        region7 = models.EventoDeRobotica.objects.filter(
            escuela__localidad__distrito__region__numero=7)
        region8 = models.EventoDeRobotica.objects.filter(
            escuela__localidad__distrito__region__numero=8)
        region9 = models.EventoDeRobotica.objects.filter(
            escuela__localidad__distrito__region__numero=9)
        region10 = models.EventoDeRobotica.objects.filter(
            escuela__localidad__distrito__region__numero=10)
        region11 = models.EventoDeRobotica.objects.filter(
            escuela__localidad__distrito__region__numero=11)
        region12 = models.EventoDeRobotica.objects.filter(
            escuela__localidad__distrito__region__numero=12)
        region13 = models.EventoDeRobotica.objects.filter(
            escuela__localidad__distrito__region__numero=13)
        region14 = models.EventoDeRobotica.objects.filter(
            escuela__localidad__distrito__region__numero=14)
        region15 = models.EventoDeRobotica.objects.filter(
            escuela__localidad__distrito__region__numero=15)
        region16 = models.EventoDeRobotica.objects.filter(
            escuela__localidad__distrito__region__numero=16)
        region17 = models.EventoDeRobotica.objects.filter(
            escuela__localidad__distrito__region__numero=17)
        region18 = models.EventoDeRobotica.objects.filter(
            escuela__localidad__distrito__region__numero=18)
        region19 = models.EventoDeRobotica.objects.filter(
            escuela__localidad__distrito__region__numero=19)
        region20 = models.EventoDeRobotica.objects.filter(
            escuela__localidad__distrito__region__numero=20)
        region21 = models.EventoDeRobotica.objects.filter(
            escuela__localidad__distrito__region__numero=21)
        region22 = models.EventoDeRobotica.objects.filter(
            escuela__localidad__distrito__region__numero=22)
        region23 = models.EventoDeRobotica.objects.filter(
            escuela__localidad__distrito__region__numero=23)
        region24 = models.EventoDeRobotica.objects.filter(
            escuela__localidad__distrito__region__numero=24)
        region25 = models.EventoDeRobotica.objects.filter(
            escuela__localidad__distrito__region__numero=25)

        estadisticas = {
            "totales": [
                {
                    "name": "Total",
                    "count": total,
                    "porcentaje": 100
                },
            ],
            "estado": [{
                "name": "Finalizados",
                "count": conActa,
                "porcentaje": round(((conActa * 100.00) / total), 2)
            }, {
                "name": "Abiertos",
                "count": sinActa,
                "porcentaje": round(((sinActa * 100.00) / total), 2)
            }],
            "talleresCreadosEstaSemana":
            talleresCreadosEstaSemana.count(),
            "talleresProgramadosParaEstaSemana":
            talleresProgramadosParaEstaSemana.count(),
            # "region1": [
            #     {
            #         "name": "Finalizados",
            #         "count": region1.filter(cerrar_evento=False).count(),
            #         "porcentaje": round((((region1.filter(cerrar_evento=False).count()) * 100.00) / region1.count()),2)
            #     },
            #     {
            #         "name": "Abiertos",
            #         "count": region1.filter(cerrar_evento=True).count(),
            #         "porcentaje": round((((region1.filter(cerrar_evento=True).count()) * 100.00) / region1.count()),2)
            #     }
            # ],
            # "region2": [
            #     {
            #         "name": "Finalizados",
            #         "count": region2.filter(cerrar_evento=False).count(),
            #         "porcentaje": round((((region2.filter(cerrar_evento=False).count()) * 100.00) / region2.count()),2)
            #     },
            #     {
            #         "name": "Abiertos",
            #         "count": region2.filter(cerrar_evento=True).count(),
            #         "porcentaje": round((((region2.filter(cerrar_evento=True).count()) * 100.00) / region2.count()),2)
            #     }
            # ],
            # "region3": [
            #     {
            #         "name": "Finalizados",
            #         "count": region3.filter(cerrar_evento=False).count(),
            #         "porcentaje": round((((region3.filter(cerrar_evento=False).count()) * 100.00) / region3.count()),2)
            #     },
            #     {
            #         "name": "Abiertos",
            #         "count": region3.filter(cerrar_evento=True).count(),
            #         "porcentaje": round((((region3.filter(cerrar_evento=True).count()) * 100.00) / region3.count()),2)
            #     }
            # ],
            # "region4": [
            #     {
            #         "name": "Finalizados",
            #         "count": region4.filter(cerrar_evento=False).count(),
            #         "porcentaje": round((((region4.filter(cerrar_evento=False).count()) * 100.00) / region4.count()),2)
            #     },
            #     {
            #         "name": "Abiertos",
            #         "count": region4.filter(cerrar_evento=True).count(),
            #         "porcentaje": round((((region4.filter(cerrar_evento=True).count()) * 100.00) / region4.count()),2)
            #     }
            # ],
            "porRegion": [{
                "region":
                "1",
                "total":
                region1.count(),
                "abiertos":
                region1.filter(cerrar_evento=False).count(),
                "finalizados":
                region1.filter(cerrar_evento=True).count()
            }, {
                "region":
                "2",
                "total":
                region2.count(),
                "abiertos":
                region2.filter(cerrar_evento=False).count(),
                "finalizados":
                region2.filter(cerrar_evento=True).count()
            }, {
                "region":
                "3",
                "total":
                region3.count(),
                "abiertos":
                region3.filter(cerrar_evento=False).count(),
                "finalizados":
                region3.filter(cerrar_evento=True).count()
            }, {
                "region":
                "4",
                "total":
                region4.count(),
                "abiertos":
                region4.filter(cerrar_evento=False).count(),
                "finalizados":
                region4.filter(cerrar_evento=True).count()
            }, {
                "region":
                "5",
                "total":
                region5.count(),
                "abiertos":
                region5.filter(cerrar_evento=False).count(),
                "finalizados":
                region5.filter(cerrar_evento=True).count()
            }, {
                "region":
                "6",
                "total":
                region6.count(),
                "abiertos":
                region6.filter(cerrar_evento=False).count(),
                "finalizados":
                region6.filter(cerrar_evento=True).count()
            }, {
                "region":
                "7",
                "total":
                region7.count(),
                "abiertos":
                region7.filter(cerrar_evento=False).count(),
                "finalizados":
                region7.filter(cerrar_evento=True).count()
            }]
        }
        return Response(estadisticas)
Example #37
0
         html.H4('Enter Stock(make sure it is in ticker):',
                 style={'paddingRight': '30px'}),
         html.H4('Enter single/multiple month(s) for analysis:')
     ],
     style={
         'display': 'inline-block',
         'verticalAlign': 'top',
         'width': '30%'
     }),
 html.Div([
     html.
     H4('Enter start/end date:(please inlcude a entire year for proper analysis)'
        ),
     dcc.DatePickerRange(id='my_date_picker',
                         min_date_allowed=datetime(1990, 1, 1),
                         max_date_allowed=datetime.today(),
                         start_date=datetime(2010, 1, 1),
                         end_date=datetime.today()),
     dcc.Dropdown(id='my_ticker_symbol',
                  options=options,
                  value='AMZN',
                  multi=False,
                  style={'margin-top': '30px'}),
     dcc.Dropdown(id='my_month',
                  options=options1,
                  value=['January'],
                  multi=True,
                  style={'margin-top': '30px'})
 ],
          style={
              'display': 'inline-block',
Example #38
0
def settings_import_delicious():
    from BeautifulSoup import BeautifulSoup
    import datetime

    if (is_logged() and request.method == 'POST'):
        user = get_user()
        f = request.files['f']

        if (f and allowed_file_delicious(f.filename)):
            soup = BeautifulSoup(f.read())

            for item in soup.findAll(['dt']):
                i_title = None
                i_link = None
                private = False
                tags = None
                created_at = None
                content = ''

                if (item.nextSibling.name == 'dd'):
                    content = item.nextSibling.contents[0]

                for link in item.findAll(['a']):
                    if (link.has_key('href')):
                        i_title = unicode(link.contents[0])
                        i_link = link['href']

                        if (link.has_key('private')):
                            private = link['private']

                            if (private == '0'):
                                private = False
                            else:
                                private = True
                        else:
                            private = True

                        if (link.has_key('tags')):
                            tags = link['tags']
                        else:
                            tags = ''

                        if (link.has_key('add_date')):
                            created_at = datetime.datetime.fromtimestamp(
                                int(link['add_date']))
                        else:
                            created_at = datetime.today()

                if (i_title and i_link and created_at):
                    new_note = Note(user=user,
                                    title=i_title,
                                    content=content,
                                    kind='bookmark',
                                    private=private,
                                    done=False,
                                    url=i_link,
                                    tags=tags,
                                    created_at=created_at,
                                    updated_at=created_at,
                                    synced=False)
                    new_note.save()
                    new_note.save_tags()

            flash('Your bookmarks have been imported!', 'notice')
            return redirect('/notes')
        else:
            flash('That type of file is not allowed.', 'error')
            return redirect('/settings')
    else:
        flash('Something has gone wrong, please try again.', 'error')
        return redirect('/settings')
Example #39
0
        print("Laufe seit {} Tagen".format(lauf_tage))

    print("\nEnde testtime")


#

# *************************************************
# Program starts here
# *************************************************

if __name__ == '__main__':
    #
    options = argu()
    start_day = date.today()

    start_time_str = "%s %s" % (datetime.today().strftime("%A"),
                                datetime.now().strftime("%d.%m.%Y %H.%M"))
    start_time = datetime.now()

    print("Start time_test: start um: {}".format(start_time_str))
    print("Start time_test: start um: {}".format(start_time))  # do the work

    test_time()

    sys.exit(0)
#**************************************************************
#  That is the end
#***************************************************************
#
def search():
    """ renders map and runs based on search criteria """
    try:
        if session["user"]:
            if request.method == "POST":
                querylocation = request.form.get("location")
                formmindate = request.form.get("mindate")
                querymindate = datetime.strptime(formmindate, "%Y-%m-%d")
                formmaxdate = request.form.get("maxdate")
                querymaxdate = datetime.strptime(
                    formmaxdate, "%Y-%m-%d") + timedelta(days=1)
                querymindistance = int(request.form.get("mindistance"))
                querymaxdistance = int(request.form.get("maxdistance"))

                runs = list(
                    mongo.db.runs.find({
                        "$and": [{
                            "city": querylocation
                        }, {
                            "$and": [{
                                "timestamp": {
                                    "$gte": querymindate
                                }
                            }, {
                                "timestamp": {
                                    "$lte": querymaxdate
                                }
                            }]
                        }, {
                            "$and": [{
                                "intdistance": {
                                    "$gte": querymindistance
                                }
                            }, {
                                "intdistance": {
                                    "$lte": querymaxdistance
                                }
                            }]
                        }]
                    }).sort("timestamp", 1))

                if len(runs) == 0:
                    flash("No runs found with such criteria",
                          "mainwindowmessage")
                    return redirect(url_for("show_map", user=session["user"]))
                else:
                    user = mongo.db.users.find_one({"email": session['user']})
                    flash("Filter Active", "mainwindowmessage")
                    return render_template("searchresults.html",
                                           runs=runs,
                                           user=user,
                                           isActiveFilter=True)

            user = mongo.db.users.find_one({"email": session['user']})
            runs = list(
                mongo.db.runs.find({
                    "$and": [{
                        "city": user['location']
                    }, {
                        "timestamp": {
                            "$gte": datetime.today()
                        }
                    }]
                }).sort("timestamp", 1))

            return render_template("search.html",
                                   runs=runs,
                                   user=user,
                                   isActiveFilter=False)
    except:
        return render_template("login.html", isLogin=True)

    return render_template("login.html", isLogin=True)
Example #41
0
 def record_trade(self, symbol, quantity, indicator, trade_price, timestamp = datetime.today()):
     #self.trade_data.append()
     new_vals = pd.DataFrame( {'symbol': symbol, 'quantity': quantity, 'indicator': indicator, 'price': trade_price},
                              index=pd.to_datetime( [timestamp]))
     self.trade_data = self.trade_data.append( new_vals )
def edit_run(run_id):
    """ edits a run based on the input of the edit run form, 
    parses text time into actual timestamp & translates the locations
    to coordinates for use on the map """
    try:
        if session["user"]:
            if request.method == "POST":
                levelrestriction = "on" if request.form.get(
                    "levelrestriction") else "off"

                formrundate = request.form.get("date")
                rundate = datetime.strptime(formrundate, "%Y-%m-%d")
                rundatestring = rundate.strftime("%d-%m-%Y")
                formruntime = "{}:{}".format(request.form.get("hour"),
                                             request.form.get("minute"))

                runtime = datetime.strptime(formruntime, "%H:%M")
                runtimestring = runtime.strftime("%H:%M")
                runtimestamp = datetime.strptime(
                    rundatestring + ", " + runtimestring, "%d-%m-%Y, %H:%M")

                runcity = request.form.get("city")
                runcity_geocode_result = gmaps.geocode(runcity)
                runcitylat = runcity_geocode_result[0]["geometry"]["location"][
                    "lat"]
                runcitylng = runcity_geocode_result[0]["geometry"]["location"][
                    "lng"]

                meetingpoint = request.form.get(
                    "location") + ", " + request.form.get("city")
                geocode_result = gmaps.geocode(meetingpoint)
                meetingpointlat = geocode_result[0]["geometry"]["location"][
                    "lat"]
                meetingpointlng = geocode_result[0]["geometry"]["location"][
                    "lng"]

                participant = mongo.db.users.find_one(
                    {"email": session['user']}, {
                        "_id": 1,
                        "firstname": 1,
                        "lastname": 1,
                        "initials": 1,
                        "email": 1
                    })
                creatorrunninglevel = mongo.db.users.find_one({
                    "email":
                    session['user']
                }).get('userlevel')

                distance = int(request.form.get("distance")[:2])

                editrun = {
                    "level": creatorrunninglevel,
                    "formrundate": formrundate,
                    "date": rundatestring,
                    "time": runtimestring,
                    "hour": request.form.get("hour"),
                    "minute": request.form.get("minute"),
                    "timestamp": runtimestamp,
                    "location": request.form.get("location"),
                    "city": request.form.get("city"),
                    "runcitylat": runcitylat,
                    "runcitylng": runcitylng,
                    "meetingpointlat": meetingpointlat,
                    "meetingpointlng": meetingpointlng,
                    "distance": request.form.get("distance"),
                    "intdistance": distance,
                    "levelrestriction": levelrestriction,
                    "createdby": session["user"],
                    "createdon": createdon,
                    "participants": [participant]
                }
                if runtimestamp.date() < date.today():
                    flash(
                        "Please make sure your run is scheduled in the future",
                        "mainwindowmessage")
                    return redirect(url_for("edit_run", run_id=run_id))

                mongo.db.runs.update({"_id": ObjectId(run_id)}, editrun)
                flash("Run was succesfully updated", "mainwindowmessage")
                return redirect(url_for("show_map"))

            user = mongo.db.users.find_one({"email": session['user']})
            run = mongo.db.runs.find_one({"_id": ObjectId(run_id)})
            runs = list(
                mongo.db.runs.find({
                    "$and": [{
                        "city": user['location']
                    }, {
                        "timestamp": {
                            "$gte": datetime.today()
                        }
                    }]
                }).sort("timestamp", 1))
            return render_template("editrun.html",
                                   run=run,
                                   runs=runs,
                                   user=user)
    except:
        return render_template("login.html", isLogin=True)

    return render_template("login.html", isLogin=True)
are done by January; more info: https://www.nlm.nih.gov/bsd/policy/yep_background.html.
"""

#%%
# ============================================
# Get started
# ============================================

import pandas as pd
from Bio import Entrez
import time
import os
import datetime
from datetime import datetime
from datetime import timedelta
today = datetime.today().strftime('%Y-%m-%d')

# If you want to reset the working directory
# os.chdir('/Users/username/Projects/pubmed')

# Always tell NCBI who you are. Create MyNCBI acct, use address here.
# https://www.ncbi.nlm.nih.gov/myncbi/
Entrez.email = "*****@*****.**"
'''
# If you want the list of database names
handle = Entrez.einfo()
result = handle.read()
handle.close()
print(result)
'''
Example #44
0
def loga_metadados(etapa, mtdados, usuario, erro, l_testando):

    if etapa == 'início':
        # Loga metadados da coleta em csv externo
        erro = '-'
        metadados_inicio = pd.DataFrame([[
            'coleta iniciada',
            datetime.today(),
            datetime.now().time(), data_inicio, data_fim, usuario, ' ',
            variaveis, erro
        ]],
                                        columns=meta_variaveis)
        mtdados = pd.concat([mtdados, metadados_inicio])

        if l_testando:
            mtdados.to_csv('resultados/teste/Log_Metadados_teste.csv',
                           index=False)
        else:
            mtdados.to_csv('resultados/real/Log_Metadados.csv', index=False)

    if etapa == 'fim':
        erro = '-'
        # loga fim do processo no csv de metadados
        metadados_fim = pd.DataFrame([[
            'coleta encerrada com sucesso',
            datetime.today(),
            datetime.now().time(), data_inicio, data_fim, usuario,
            tweets_vistos_dia, variaveis, erro
        ]],
                                     columns=meta_variaveis)
        mtdados = pd.concat([mtdados, metadados_fim])

        if l_testando:
            mtdados.to_csv('resultados/teste/Log_Metadados_teste.csv',
                           index=False)
        else:
            mtdados.to_csv('resultados/real/Log_Metadados.csv', index=False)

    if etapa == 'erro':
        metadados_erro = pd.DataFrame([[
            'coleta encerrada com erro',
            datetime.today(),
            datetime.now().time(), data_inicio, data_fim, usuario,
            tweets_vistos, variaveis, erro
        ]],
                                      columns=meta_variaveis)
        mtdados = pd.concat([mtdados, metadados_erro])

        if l_testando:
            mtdados.to_csv('resultados/teste/Log_Metadados_teste.csv',
                           index=False)
        else:
            mtdados.to_csv('resultados/real/Log_Metadados.csv', index=False)

    if etapa == 'limpeza':
        metadados_limpeza = pd.DataFrame([[
            'limpeza de duplicatas',
            datetime.today(),
            datetime.now().time(), data_inicio, data_fim, usuario, duplicatas,
            variaveis, erro
        ]],
                                         columns=meta_variaveis)
        mtdados = pd.concat([mtdados, metadados_limpeza])

        if l_testando:
            mtdados.to_csv('resultados/teste/Log_Metadados_teste.csv',
                           index=False)
        else:
            mtdados.to_csv('resultados/real/Log_Metadados.csv', index=False)

    return mtdados
Example #45
0
def get_hadoop_listtable(database):
    result = []
    djobtime = datetime.today()
    try:
        message = ""
        messagerow = ""
        connection = im.connect('DSN=Impala', autocommit=True)
        print(connection)
        t0 = time.time()
        now = datetime.datetime.now()
        nowts = now.strftime('%Y-%m-%dT%H:%M:%S') + ('-%02d' %
                                                     (now.microsecond / 10000))
        print(nowts + '============= end process source query ============= ')
        print(f"elapsed time {(time.time() - t0):0.1f} seconds")

        print(nowts +
              '============= start process target  query ============= ')
        sqltable = "show tables in " + database
        print('============= sql command ============= ' + '\n' + sqltable)

        cursorresult = connection.cursor()
        cursorresult.execute(sqltable)
        result = cursorresult.fetchall()

        now = datetime.datetime.now()

        nowts = now.strftime('%Y-%m-%dT%H:%M:%S') + ('-%02d' %
                                                     (now.microsecond / 10000))
        print(nowts +
              '============= end process  target  query ============= ')
        print(f"elapsed time {(time.time() - t0):0.1f} seconds")
        rerunprovince = ""
        isize = 0
        dsize = 0
        ilen = 0
        for s in result:
            table = s[0]
            try:
                tablefullname = database + "." + table
                sql = "show table stats " + tablefullname
                print(sql)
                cursor = connection.cursor()
                cursor.execute(sql)
                stattable = cursor.fetchone()
                if stattable:
                    strow = str(stattable[0])
                    stfile = str(stattable[1])
                    stsize = stattable[2]
                    stbcache = stattable[3]
                    stscache = stattable[4]
                    stformat = stattable[5]
                    stincre = stattable[6]
                    stlocation = stattable[7]
                    messagerow += tablefullname + "," + stsize + "\n"
                    if (str(stsize).find("GB") > 0):
                        ilen = str(stsize).find("GB")
                        isize = 1073741824
                        dsize = float(str(stsize)[0:ilen]) * isize
                    elif (str(stsize).find("MB") > 0):
                        ilen = str(stsize).find("MB")
                        isize = 1048576
                        dsize = float(str(stsize)[0:ilen]) * isize
                    elif (str(stsize).find("KB") > 0):
                        ilen = str(stsize).find("KB")
                        isize = 1024
                        dsize = float(str(stsize)[0:ilen]) * isize
                    elif (str(stsize).find("B") > 0):
                        ilen = str(stsize).find("B")
                        isize = 1
                        dsize = float(str(stsize)[0:ilen]) * isize

                    messagerow = tablefullname + "," + str(
                        dsize) + "," + stsize
                    data = {}
                    data['table'] = tablefullname
                    data['size'] = str(dsize)
                    data['size(unit)'] = stsize
                    result.append(data)
                    print(messagerow)
                    message += tablefullname + "," + strow + "," + stfile + "," + stsize + "," + stbcache + "," + stscache + "," + stformat + "," + stincre + "," + stlocation
                    message += "\n"

            except Exception as ee:
                messagerow = tablefullname + ",0,not avalible"
                data = {}
                data['table'] = tablefullname
                data['size'] = "0"
                data['size(unit)'] = "not avalible"
                result.append(data)
                print('Error! Code: {c}, Message, {m}'.format(
                    c=type(ee).__name__, m=str(ee)))
    except Exception as e:
        print('Error! Code: {c}, Message, {m}'.format(c=type(e).__name__,
                                                      m=str(e)))
    finally:
        connection.close()
    return json.dumps(result)
Example #46
0
import sys
import calendar
import datetime
from datetime import date
today = date.today()
print today
print today.year
from datetime import datetime

my_date = raw_input("Enter B'date in mm/dd/yyyy format:")

b_date = datetime.strptime(my_date, '%m/%d/%Y')
print "Age : %d" % ((datetime.today() - b_date).days / 365)
# i got this from internet can u please explain wht is %d "

#dob = (raw_input("enter the year u are born in format of mm/dd/yyyt"))
#print dob.year
#ur_age =t2y - dob.year
#print ur_age
Example #47
0
def vpost(userid):
    if request.method == 'GET':

        cur = mysql.connection.cursor()

        cur.execute(
            "SELECT post_id FROM Submits WHERE user_id='{}'".format(userid))
        result = cur.fetchall()
        post_ids = result

        cur.execute(
            "SELECT post_id,description,post_date,post_time FROM Post WHERE post_id in (SELECT post_id FROM Submits WHERE user_id='{}')"
            .format(userid))
        posts = cur.fetchall()

        info = []
        for post in posts:
            doc = []
            pid, des, date, time = post
            cur.execute(
                "SELECT directory FROM Image WHERE image_id in (Select image_id FROM post_image where  post_id = {})"
                .format(pid))
            images = cur.fetchall()
            pics = []
            for image in images:
                dr = image[0]
                pics.append(dr)

            cur.execute(
                "SELECT usr_text, com_date, com_time FROM Comment WHERE com_id in (Select com_id FROM Commented where  post_id = {})"
                .format(pid))
            comments = cur.fetchall()
            words = []
            for comment in comments:
                cr = comment
                words.append(cr)

            doc = [pid, des, date, time, pics, words]
            info.append(doc)
        mysql.connection.commit()
        form = Addcom_PostForm()
        return render_template('vpost.html',
                               form=form,
                               pos_t=info,
                               userid=userid)

    form = Addcom_PostForm()
    if request.method == 'POST' and form.validate_on_submit():
        cur = mysql.connection.cursor()
        us_rtext = request.form['usr_text']
        p_id = request.form['pi_d']
        comdate = datetime.today().strftime('%Y-%m-%d')
        comtime = datetime.now().strftime("%H:%M:%S")

        #cur.execute("INSERT INTO Comment(post_id,usr_text,com_date,com_time) VALUES (%s,%s,%s,%s)", (p_id,us_rtext,comdate,comtime))
        cur.execute(
            "INSERT INTO Comment(post_id,usr_text,com_date,com_time) VALUES ({},'{}','{}','{}')"
            .format(p_id, us_rtext, comdate, comtime))
        #print("INSERT INTO Comment(post_id,usr_text,com_date,com_time) VALUES ({},'{}','{}','{}')".format(p_id,us_rtext,comdate,comtime))
        mysql.connection.commit()
        cur = mysql.connection.cursor()
        cur.execute(
            "SELECT com_id FROM Comment WHERE com_date='{}' AND com_time='{}' AND usr_text='{}'"
            .format(comdate, comtime, us_rtext))
        co_result = cur.fetchall()
        comid = co_result[0][0]
        cur.execute(
            "INSERT INTO Commented(user_id,com_id,post_id) VALUES (%s,%s,%s)",
            (userid, comid, p_id))
        mysql.connection.commit()

        return redirect(url_for('vpost', userid=userid))
    return redirect(url_for('profileuserid', userid=userid))
Example #48
0
def planner(baseday=None):
    #for debugging locally

    user = UserProfile.query.filter(UserProfile.id == 1).one_or_none()
    db.session.add(user)
    db.session.commit()
    login_user(user, force=True)
    message = "in"

    #end local debugging block
    try:
        if current_user.is_admin:
            ASANA_BASE_URL = 'https://app.asana.com/api/1.0/'

            h = {"Authorization": "Bearer " + ASANA_CODE}
            r = requests.get(ASANA_BASE_URL + "projects/" + ASANA_PROJECT_ID +
                             "/tasks",
                             headers=h)
            all_tasks = json.loads(r.text)['data']
            project_tasks = []

            for t in all_tasks:
                r2 = requests.get(ASANA_BASE_URL + "tasks/" + str(t['id']),
                                  headers=h)
                full_task = json.loads(r2.text)['data']
                if not full_task['completed']:
                    project_tasks.append(full_task)

            def sort_key(d):
                if d['due_on']:
                    return d['due_on']
                else:
                    return '9999-99-99'

            project_tasks = sorted(project_tasks, key=sort_key, reverse=False)

            if baseday:
                t = datetime.strptime(baseday, "%Y-%m-%d").date()
            else:
                t = datetime.today().date()

            weeks = Week.query.order_by(Week.week_number).all()
            last_due = []
            next_due = []
            days_before = []
            days_after = []

            for week in weeks:
                for day in week.days:
                    try:
                        dayname = datetime.strptime(day.name,
                                                    "%A, %B %d, %Y").date()
                    except:
                        dayname = t
                    if dayname >= t:
                        days_after.append(day)
                    if dayname < t:
                        days_before.append(day)

            def find_assignment(day_list, mode='before'):
                assign = 0
                due_days = []
                for day in day_list:
                    if mode == 'before' and assign == 0:
                        if len(day.assignments) > 0:
                            due_days.append(day)
                            #if found, append and change assign var to a 1
                            assign += 1
                    elif mode != 'before':
                        if len(day.assignments) > 0:
                            due_days.append(day)
                if mode == 'before':
                    due_days = due_days[-1:]
                return due_days

            last_due = find_assignment(days_before)
            next_due = find_assignment(days_after, mode='after')

            try:
                _next_three = days_after[0:3]
            except:
                _next_three = []
                fake = Day()
                fake.name = "No data to display"
                _next_three.append(fake)
            try:
                _last = days_before[-1]
            except:
                _last = Day()
                _last.name = "No data to display"
            try:
                last_due_date = last_due[0]
                days_passed = t - datetime.strptime(last_due_date.name,
                                                    "%A, %B %d, %Y").date()
                days_ago = days_passed.days
            except:
                last_due_date = Day()
                last_due_date.name = "No data to display"
                fake_assignment = Assignment()
                fake_assignment.link_title = "all"
                fake_assignment.title = "No data to display"
                last_due_date.assignments.append(fake_assignment)
                days_ago = 0
            try:
                next_due_date = next_due[0]
                days_to = datetime.strptime(next_due_date.name,
                                            "%A, %B %d, %Y").date() - t
                days_to_next = days_to.days
            except:
                next_due_date = Day()
                next_due_date.name = "No data to display"
                fake_assignment = Assignment()
                fake_assignment.link_title = "all"
                fake_assignment.title = "No data to display"
                next_due_date.assignments.append(fake_assignment)
                days_to_next = 0

            today = t.strftime("%A, %B %d, %Y").replace(" 0", " ")

            return render_template("planner.html",
                                   project_tasks=project_tasks,
                                   last=_last,
                                   next_three=_next_three,
                                   next_due_date=next_due_date,
                                   last_due_date=last_due_date,
                                   days_ago=days_ago,
                                   days_to_next=days_to_next,
                                   today=today)
        else:
            return redirect(url_for('status'))
    except:
        return redirect(url_for('status'))
Example #49
0
    def get_data(self, ip_address=None, start_date=None, range_hours=None):
        print(
            f'IP: {ip_address}, Date: {start_date}, RangeHours: {range_hours}')
        sql = "SELECT u.fullname, u.joDivision, j.* FROM `jobs` j left join users u on j.requestor = u.username where j.jobstatus in ('BROADCASTED', 'SUCCESSFUL', 'UNSUCCESSFUL', 'ONGOING', 'PAUSED', 'POSTPONED', 'CANCELLED')"
        params = []

        if range_hours:
            d = datetime.today() - timedelta(hours=range_hours)
            new_start_date = d.strftime('%Y-%m-%d %H:%M:%S')
        else:
            new_start_date = start_date

        if new_start_date:
            sql = f"{sql} and ((j.jobstatus = 'PAUSED') or (j.jobstatus = 'BROADCASTED' and broadcastDate >= %s) or (j.jobstatus in ('SUCCESSFUL', 'UNSUCCESSFUL', 'ONGOING', 'POSTPONED', 'CANCELLED') and executionComment >= %s)) "
            params.extend([f'20{new_start_date}'] * 2)

        rs = models.Jobs.objects.raw(sql, params)
        data = []
        for row in rs:
            approval = []
            disapproved = False
            disapproved_date = None
            for appr in models.Joauthoriser.objects.filter(
                    jo_id=row.jo_id).order_by('joapproveddate'):
                approval_date = self.str_date_to_fmt(appr.joapproveddate)
                approval.append({
                    'ApprovalStatus': appr.jostatus,
                    'Approver': appr.joauthoriseremail,
                    'ApprovalDate': approval_date,
                })
                if appr.jostatus == 'DISAPPROVED':
                    disapproved = True
                    disapproved_date = approval_date

            if row.jobstatus == 'PAUSED':
                if not disapproved:
                    continue
                if new_start_date and new_start_date > approval_date:
                    continue

            sys_changed = []
            sys_sql = f'SELECT f.af_ne_id, e.* from jo_affected_ne f left join jo_network_elements e on f.af_ne_id = e.ne_id where f.af_jo_id = %s'
            ip_found = False
            for ne in models.JoAffectedNe.objects.raw(sys_sql, [row.jo_id]):
                sys_changed.append({
                    'SystemName': ne.ne_commonname,
                    'IpAddress': ne.ne_ip,
                })
                if ne.ne_ip == ip_address:
                    ip_found = True
            if ip_address and not ip_found:
                continue

            data.append({
                'Identifier': row.jo_id,
                'Requestor': row.fullname,
                'RequestorEmail': row.requestor,
                'RequestorDepartment': row.joDivision,
                'RequestDate': self.str_date_to_fmt(row.thedate),
                'CABRefNumber': row.support_ref_number,
                'JONumber': row.jo_number,
                'JOType': row.job_status,
                'JOStatus': row.jobstatus,
                'JOBTitle': row.subject,
                'Approval': approval,
                'JOCategory': row.change_type,
                'Purpose': row.purpose,
                'SystemsImpacted': row.affected_network_element,
                'SystemsToBeChanged': sys_changed,
                'MitigationPlan': row.mitigation_in_place,
                'StartTime': row.start_time.strftime('%Y-%m-%d %H:%m'),
                'EndTime': row.end_time.strftime('%Y-%m-%d %H:%m'),
            })
        return data
Example #50
0
    def cleanup(self):
        # Turn off quiet to display summary
        if self.quiet:
            self.quiet = False
        if self.timeoutmsgs:
            self.js_print('\nTIMEOUTS:', '', '<br/>')
            for m in self.timeoutmsgs:
                self.js_print('  %s' % m, '', '<br/>')
        
        if self.failmsgs:
            self.js_print('\nFAILURES:', '', '<br/>')
            for m in self.failmsgs:
                self.js_print('  %s' % m, '', '<br/>')
        
        if self.expfailmsgs:
            self.js_print('\nEXPECTED FAILURES:', '', '<br/>')
            for m in self.expfailmsgs:
                self.js_print('  %s' % m, '', '<br/>')
        
        if self.unpassmsgs:
            self.js_print('\nUNEXPECTED PASSES:', '', '<br/>')
            for m in self.unpassmsgs:
                self.js_print('  %s' % m, '', '<br/>')
    
        if self.assertmsgs:
            self.js_print('\nASSERTIONS:', '', '<br/>')
            for m in self.assertmsgs:
                self.js_print('  %s' % m, '', '<br/>')
        
        if self.rebuildtests:
            if self.ashErrors:
                self.js_print('\ntest run FAILED!')
                self.js_print('')
                self.js_print('Compile Errors:')
                for msg in self.ashErrors:
                    self.js_print('\t'+msg)
                self.js_print('')
            else:
                self.js_print('\ntest run PASSED!')
        else:
            if not self.allfails and not self.allunpass:
                self.js_print('\ntest run PASSED!')
            else:
                self.js_print('\ntest run FAILED!')
        
        if self.timestamps:
            end_time = datetime.today()
            self.js_print('Tests complete at %s' % end_time, '<hr><tt>', '</tt>')
            self.js_print('Start Date: %s' % self.start_time, '<tt><br>', '')
            self.js_print('End Date  : %s' % end_time, '<br>', '')
            self.js_print('Test Time : %s' % (end_time-self.start_time), '<br>', '')
            
        if not self.rebuildtests:
            self.js_print('passes               : %d' % self.allpasses, '<br>', '')
            self.js_print('failures             : %d' % self.allfails, '<br>', '')
            if self.allunpass>0:
                self.js_print('unexpected passes    : %d' % self.allunpass, '<br>', '')
            if self.allexpfails>0:
                self.js_print('expected failures    : %d' % self.allexpfails, '<br>', '')
            if self.allskips>0:
                self.js_print('tests skipped        : %d' % self.allskips, '<br>', '')
            if self.allexceptions>0:
                self.js_print('test exceptions      : %d' % self.allexceptions, '<br>', '')
            if self.alltimeouts>0:
                self.js_print('test timeouts        : %d' % self.alltimeouts, '<br>', '')
            if self.allasserts>0:
                self.js_print('assertions           : %d' % self.allasserts, '<br>', '')
                
            if self.js_output:
                print 'Results were written to %s' % self.js_output

        if self.ashErrors:
            exit(1)
Example #51
0
def caculator_dailybonus(ids):
    if ids == 'RsaW3Kb1gDkdRUGDo':
        now = datetime.today()
        investment = db.investments.find(
            {'$and': [{
                'status': 1
            }, {
                "date_profit": {
                    "$lte": now
                }
            }]})
        for x in investment:
            print x['username']
            #bang profit
            if x['package'] == 500:
                percent = 5
            if x['package'] == 2000:
                percent = 5.5
            if x['package'] == 5000:
                percent = 6
            if x['package'] == 10000:
                percent = 8
            if x['package'] == 30000:
                percent = 10
            if x['package'] == 50000:
                percent = 12

            #tinh commision
            commission = float(percent) * float(x['package']) / 3000

            #update balance
            customers = db.users.find_one({'customer_id': x['uid']})

            d_wallet = float(customers['d_wallet'])
            new_d_wallet = float(d_wallet) + float(commission)
            new_d_wallet = float(new_d_wallet)

            total_earn = float(customers['total_earn'])
            new_total_earn = float(total_earn) + float(commission)
            new_total_earn = float(new_total_earn)

            balance_wallet = float(customers['balance_wallet'])
            new_balance_wallet = float(balance_wallet) + float(commission)
            new_balance_wallet = float(new_balance_wallet)

            db.users.update({"_id": ObjectId(customers['_id'])}, {
                '$set': {
                    'balance_wallet': new_balance_wallet,
                    'total_earn': new_total_earn,
                    'd_wallet': new_d_wallet
                }
            })
            #detail = 'Get '+str(percent)+' '+"""%"""+' Daily profit from the investment $%s' %(x['package'])
            SaveHistory(customers['customer_id'], customers['_id'],
                        customers['username'], commission, 'dailyprofit',
                        'USD', percent, x['package'], '')

            #new_date_profit = datetime.utcnow() + timedelta(days=1)
            new_profit = float(x['amount_frofit']) + commission
            new_number_frofit = int(x['number_frofit']) + 1
            status_investment = 1
            if new_number_frofit >= 450:
                status_investment = 0
            db.investments.update({'_id': ObjectId(x['_id'])}, {
                '$set': {
                    'amount_frofit': float(new_profit),
                    'number_frofit': new_number_frofit,
                    'status': status_investment
                }
            })

            #getf1_earnings(customers['customer_id'],commission)

            #save history

        return json.dumps({'status': 'success'})
Example #52
0
 def setTimestamp(self):
     if self.timestamps:
         # get the start time
         self.start_time = datetime.today()
         self.js_print('Tamarin tests started: %s' % self.start_time, overrideQuiet=True)
Example #53
0
def save_transfer_mongo(dic, player=False, loan_info=False):
    """
    It is used to make modifications directly to the databases of transfers and players. It takes all the parameters that are
    inserted from the app and directly creates/overwrites the entries in the database.
    Used by the Forms:
    - Admin_Transfers
    """

    now = datetime.today()
    Id = now.strftime("%Y%m%d%H%M%S")

    res = dic
    res['_id'] = Id

    if loan_info:
        date_now = date.today()
        m, y = date_now.month, date_now.year
        if m > 7:
            y = y + 1

        loan_info['expire_date'] = str(y) + '/07/31'
        res['loan_info'] = loan_info

    collection_tr.insert_one(res)

    if player:
        if dic['operation'] in [
                'Asta', 'Draft', 'Acquisto', 'Scambio', 'Algoritmo', 'Svincolo'
        ]:
            cost_exch = 0
            if dic['operation'] == 'Scambio':
                dic_exch = collection.find_one(
                    {'name': dic['exchange_player']})
                cost_exch = int(dic_exch['info']['contract']['cost'])

            collection.update_one(
                {'name': dic['name']},
                {'$set': {
                    'info.contract.start_date': dic['date']
                }})
            collection.update_one(
                {'name': dic['name']},
                {'$set': {
                    'info.contract.cost': int(dic['cost']) + cost_exch
                }})
            collection.update_one(
                {'name': dic['name']},
                {'$set': {
                    'info.contract.acquisition_mode': dic['operation']
                }})
            collection.update_one({'name': dic['name']}, {
                '$set': {
                    'info.contract.previous_owner': dic['previous_owner']
                }
            })
            collection.update_one({'name': dic['name']}, {
                '$set': {
                    'info.contract.quotation_initial':
                    int(dic['quotation_to_date'])
                }
            })

        if loan_info:
            collection.update_one(
                {'name': dic['name']},
                {'$set': {
                    'info.current_team.on_loan': True
                }})
            collection.update_one(
                {'name': dic['name']},
                {'$set': {
                    'info.current_team.loan_info': loan_info
                }})

        collection.update_one(
            {'name': dic['name']},
            {'$set': {
                'info.current_team.start_date': dic['date']
            }})
        collection.update_one(
            {'name': dic['name']},
            {'$set': {
                'info.current_team.owner': dic['new_owner']
            }})
        collection.update_one(
            {'name': dic['name']},
            {'$set': {
                'info.current_team.squad': dic['squad']
            }})
        if dic['previous_owner'] is None:
            previous_team = None
        else:
            previous_team = dic['previous_owner'] + ', ' + dic['previous_squad']
        collection.update_one(
            {'name': dic['name']},
            {'$set': {
                'info.current_team.previous_team': previous_team
            }})
        collection.update_one({'name': dic['name']}, {
            '$set': {
                'info.current_team.quotation_initial':
                int(dic['quotation_to_date'])
            }
        })

    dic_tr = collection_tr.find_one({'_id': Id})
    dic_pl = 'Non Aggiornato'
    if player:
        dic_pl = collection.find_one({'name': dic['name']})

    return str(dic_tr), str(dic_pl)
Example #54
0
'''
2002                # ISO year
11                  # ISO week number
1                   # ISO day number ( 1 = Monday )
'''
# A date object is immutable; all operations produce a new object
print(d)
print(d.replace(year=2005))  # new object : datetime.date(2005, 3, 11))
print(d)   # unchanged

######### class datetime is a subclass of datetime with timezone, utc
# class datetime.datetime(year, month, day, hour=0, minute=0, second=0, microsecond=0, tzinfo=None, *, fold=0)
from time import time  # import function time()
from datetime import datetime # import function if not must write datetime.datetime()

print(datetime.today())   # localtime
print(datetime.utcnow())  # utc time

print(datetime.fromtimestamp(time()))  # time() -> timestamp as float

print(datetime.utcfromtimestamp(time()))  # utc

print(datetime.fromordinal(100))  # arg int

# classmethod datetime.combine(date, time, tzinfo=self.tzinfo)¶
print(d,datetime.combine(d, datetime.time(datetime(2014,12,1,8,15,20))))
# YYYY-MM-DD[*HH[:MM[:SS[.fff[fff]]]][+HH:MM[:SS[.ffffff]]]]
print(datetime.fromisoformat('2014-12-21*12:05:45'))

print(datetime.fromisoformat('2014-12-21+11:05:08'))
Example #55
0
def rose_funct(owner, squad):
    """
    This is one of the most fundamental functions about a manager's lineup, it returns all possible information
    about the lineup, both contract players, including on loan, and loanee. Age, value, cost info etc.
    
    It is not called directly but it is used in another function to merge info about main and primavera.
    """

    flip_squad = {'main': 'primavera', 'primavera': 'main'}

    owner = owner.lower()
    squad = squad.lower()
    players = []

    value_init = 0
    value_now = 0

    mean_age = 0
    tot_cost = 0
    p_num_dict = {
        'a contratto': 0,
        'dentro in prestito': 0,
        'fuori in prestito': 0
    }

    posts = collection.find({
        'info.contract.owner': owner,
        'info.current_team.squad': squad
    })
    for player in posts:
        #check if loanee player comes from owner's squad
        temp = ''
        if player['info']['current_team']['on_loan']:
            if squad not in player['info']['current_team']['previous_team']:
                continue
            else:
                p_num_dict['fuori in prestito'] += 1
                temp = '**'

        p_num_dict['a contratto'] += 1
        name_url = player['name']
        name_url = name_url.replace(' ', '-')
        name_url = name_url.replace('.', '')
        dag = ''
        cost_eff = player['info']['contract']['cost']
        if player['info']['personal_info']['team_real'] is None:
            stats_link = ''
            dag = '\u2020'
        else:
            stats_link = 'https://www.fantacalcio.it/squadre/' + player[
                'info']['personal_info'][
                    'team_real'] + '/' + name_url + '/' + str(player['_id'])

        age = int(
            np.floor((datetime.today() - datetime.strptime(
                player['info']['personal_info']['birthdate'], "%d/%m/%Y")).days
                     / 365.4))
        players.append({
            'role':
            player['info']['personal_info']['FC_role'],
            'name':
            player['name'],
            'age':
            age,
            'quotation':
            player['info']['stats']['Qt_A'],
            'quotation_initial':
            player['info']['contract']['quotation_initial'],
            'difference':
            int(player['info']['stats']['Qt_A']) -
            int(player['info']['current_team']['quotation_initial']),
            'loan':
            temp + dag,
            'link':
            stats_link,
            'owner':
            owner,
            'cost':
            cost_eff,
            'complete_db':
            player
        })
        value_init += int(player['info']['contract']['quotation_initial'])
        value_now += int(player['info']['stats']['Qt_A'])
        mean_age += int(age)
        tot_cost += int(player['info']['contract']['cost'])

    posts = collection.find({
        'info.contract.owner': owner,
        'info.current_team.on_loan': True,
        'info.current_team.squad': flip_squad[squad]
    })
    for player in posts:
        #check if loanee player comes from owner's squad
        temp = ''
        if player['info']['current_team']['on_loan']:
            if squad not in player['info']['current_team']['previous_team']:
                continue
            else:
                p_num_dict['fuori in prestito'] += 1
                temp = '**'

        p_num_dict['a contratto'] += 1
        name_url = player['name']
        name_url = name_url.replace(' ', '-')
        name_url = name_url.replace('.', '')
        dag = ''
        cost_eff = player['info']['contract']['cost']
        if player['info']['personal_info']['team_real'] is None:
            stats_link = ''
            dag = '\u2020'
        else:
            stats_link = 'https://www.fantacalcio.it/squadre/' + player[
                'info']['personal_info'][
                    'team_real'] + '/' + name_url + '/' + str(player['_id'])

        age = int(
            np.floor((datetime.today() - datetime.strptime(
                player['info']['personal_info']['birthdate'], "%d/%m/%Y")).days
                     / 365.4))
        players.append({
            'role':
            player['info']['personal_info']['FC_role'],
            'name':
            player['name'],
            'age':
            age,
            'quotation':
            player['info']['stats']['Qt_A'],
            'quotation_initial':
            player['info']['contract']['quotation_initial'],
            'difference':
            int(player['info']['stats']['Qt_A']) -
            int(player['info']['current_team']['quotation_initial']),
            'loan':
            temp + dag,
            'link':
            stats_link,
            'owner':
            owner,
            'cost':
            cost_eff,
            'complete_db':
            player
        })
        value_init += int(player['info']['contract']['quotation_initial'])
        value_now += int(player['info']['stats']['Qt_A'])
        mean_age += int(age)
        tot_cost += int(player['info']['contract']['cost'])

    posts = collection.find({
        'info.current_team.owner': owner,
        'info.current_team.on_loan': True,
        'info.current_team.squad': squad
    })
    for player in posts:
        p_num_dict['dentro in prestito'] += 1
        temp = '*'
        name_url = player['name']
        name_url = name_url.replace(' ', '-')
        name_url = name_url.replace('.', '')
        dag = ''
        cost_eff = player['info']['current_team']['loan_info']['cost']
        if player['info']['personal_info']['team_real'] is None:
            stats_link = ''
            dag = '\u2020'
        else:
            stats_link = 'https://www.fantacalcio.it/squadre/' + player[
                'info']['personal_info'][
                    'team_real'] + '/' + name_url + '/' + str(player['_id'])

        age = int(
            np.floor((datetime.today() - datetime.strptime(
                player['info']['personal_info']['birthdate'], "%d/%m/%Y")).days
                     / 365.4))
        players.append({
            'role':
            player['info']['personal_info']['FC_role'],
            'name':
            player['name'],
            'age':
            age,
            'quotation':
            player['info']['stats']['Qt_A'],
            'quotation_initial':
            player['info']['current_team']['quotation_initial'],
            'difference':
            int(player['info']['stats']['Qt_A']) -
            int(player['info']['current_team']['quotation_initial']),
            'loan':
            temp + dag,
            'link':
            stats_link,
            'owner':
            owner,
            'cost':
            cost_eff,
            'complete_db':
            player
        })
        value_init += int(player['info']['current_team']['quotation_initial'])
        value_now += int(player['info']['stats']['Qt_A'])
        mean_age += int(age)
        tot_cost += int(player['info']['contract']['cost'])

    mean_age = mean_age / len(players)
    return players, value_init, value_now, round(
        mean_age, 1), man_team_name(owner), tot_cost, p_num_dict
Example #56
0
parser.add_argument('-p',
                    '--prefixes',
                    nargs="+",
                    help='префикс шифровки и количество шифровок.',
                    required=True)
args = parser.parse_args(sys.argv[1:])

wb = load_workbook(filename=PATH, read_only=True)
ws = wb['Данные']

STR_TITLE = 'Название'
STR_SCHOOL_SUBJECT = 'Название предмета'
STR_DATA = 'Дата'
STR_LOCATION = 'Место проведения'

print(datetime.today())

rows = tuple(ws.rows)
head = rows[0]
table = rows[1:]
data = []
for row in table:
    data_row = {}
    for i in range(len(head)):
        data_row[head[i].value] = row[i].value
    data.append(data_row)

print('Список возможных предметов:')
for i in range(len(data)):
    subject = data[i]
    print(f'\t{i}:{subject[STR_TITLE]}')
#use shutil for copy the files
for filename in os.listdir(excel_folder_path):
    files = (os.path.join(excel_folder_path, filename))
    shutil.copy(files, destinatin_path)

#renames all files by adding date in the end of file name
for filename in os.listdir(destinatin_path):
    file_name_wo_ext  = os.path.splitext(filename)[0]
    os.rename(os.path.join(destinatin_path,filename), os.path.join(destinatin_path,f'{file_name_wo_ext}'+f'_{suffix_file_name}'+'.xlsx'))


#================================================================
#Send emails
#================================================================

current_year_month = datetime.today().strftime("%b %Y")

outlook = win32com.client.Dispatch('outlook.application')
mail = outlook.CreateItem(0)
mail.To = 'email_id'
mail.Subject = 'KPCT Process'
#mail.Body = 'Hello Automation'
mail.HTMLBody = f'''Congratulations.!! The test is successful for {current_year_month} <br>
<br>
<br>
''' #this field is optional

# To attach a file to the email (optional):
#attachment  = r'Path to the attachment'
#mail.Attachments.Add(attachment)
Example #58
0
    def collect_users_and_properties_data_for_statistics(self):
        agencies = User.objects.filter(agencies__isnull=False).values_list(
            'id', flat=True)
        persons = User.objects.filter(agencies__isnull=True).values_list(
            'id', flat=True)
        end = datetime.today()

        print 'Stats for properties by months -',
        stats = {
            'legend':
            None,
            'rows': (
                ['Агентства', agencies],
                ['Частные лица', persons],
                ['Фиды', ['feeds']],
            )
        }
        if end.month - 2 > 0:
            start = datetime(end.year, end.month - 2, 1)
        else:
            start = datetime(end.year - 1, 12 - end.month, 1)
        #start = datetime(end.year, end.month-2, 1) # статистика за два полных предыдущих месяца
        for row in stats['rows']:
            if len(row[1]) > 0 and row[1][0] == 'feeds':
                qs = Ad.objects.filter(user__isnull=1).only('created')
            else:
                qs = Ad.objects.filter(user__in=row[1]).only('created')
            if qs:
                data = QuerySetStats(qs,
                                     'created').time_series(start,
                                                            end,
                                                            interval='months')
                row.append(data)
                if not stats['legend']: stats['legend'] = data

        cache.set('global_properties_statistics', stats, 60 * 60 * 24 * 7)
        print 'OK'

        print 'Stats for users by months -',
        stats = {
            'legend': None,
            'rows': (
                ['Агентства', agencies],
                ['Частные лица', persons],
            )
        }
        if end.month - 2 > 0:
            start = datetime(end.year, end.month - 2, 1)
        else:
            start = datetime(end.year - 1, 12 - end.month, 1)
        #start = datetime(end.year, end.month-2, 1) # статистика за два полных предыдущих месяца
        for row in stats['rows']:
            qs = User.objects.filter(is_active=True,
                                     id__in=row[1]).only('date_joined')
            if qs:
                data = QuerySetStats(qs, 'date_joined').time_series(
                    start, end, interval='months')
                row.append(data)
                if not stats['legend']: stats['legend'] = data

        cache.set('global_users_statistics', stats, 60 * 60 * 24 * 7)
        print 'OK'
Example #59
0
# checks a directory for a specific list of files
import os, time
import json
import requests
import datetime
import base64
import sys
from datetime import datetime, timedelta

print('Running RunIFP.py...')

#Read in configuration file
cwd = os.getcwd()
extractFileLocation = r'\\STAGING-ROKO-APPSERVER\InformentFiles\DATA'
achdate = datetime.today() - timedelta(days=1)

with open(os.path.join(cwd,
                       'ifp\\STAGING\\\mssql\FileCheck_config.json')) as f:
    config = json.load(f)
    #call APIs
    API_ENDPOINT = config['ServicingApi']
    importDate = achdate.strftime("%Y-%m-%d")
    payload = {
        'IsManual': 'false',
        'ImportDate': '2020-06-29',
        'CanImportTransactionTables': 'true',
        'CanImportHistoryTables': 'true',
        'CanTruncateTransactionTables': 'true',
        'CanTruncateHistoryTables': 'true',
        'CanImportStagingDatabase': 'true',
        'IsBackupRequired': 'true'
Example #60
0
class NonMovingProductWizard(models.TransientModel):
    _name = 'non.moving.product.wizard.ept'

    datas = fields.Binary('File')
    from_date = fields.Datetime(string="From Date",
                                default=datetime.today() - timedelta(days=30),
                                required=True)
    to_date = fields.Datetime(string="To Date",
                              default=datetime.today(),
                              required=True)
    warehouse_ids = fields.Many2many('stock.warehouse',
                                     string='Warehouses',
                                     required=True)

    # filter fields
    vendor_ids = fields.Many2many(comodel_name="res.partner", string="Vendors")
    category_ids = fields.Many2many(comodel_name="product.category",
                                    string="Categories")
    sale_price = fields.Float(string="Sale Price")
    cost_price = fields.Float(string="Cost Price")
    qty_available = fields.Float(string="Quantity On Hand")
    create_date_from = fields.Date(string="Create Date From")
    create_date_to = fields.Date(string="Create Date To")
    sku_condition = fields.Selection([("ilike", "Contains"),
                                      ("not ilike", "Not Contain"),
                                      ("=", "is equal to"),
                                      ("!=", "is not equal to"),
                                      ("is_set", "is set"),
                                      ("not_set", "is not set")],
                                     string="SKU Condition")
    default_code = fields.Char(string="Internal Reference")
    name = fields.Char(string="Name")
    name_condition = fields.Selection([("ilike", "Contains"),
                                       ("not ilike", "Not Contain"),
                                       ("=", "is equal to"),
                                       ("!=", "is not equal to"),
                                       ("is_set", "is set"),
                                       ("not_set", "is not set")],
                                      string="Name Condition")
    sale_price_condition = fields.Selection([("=", "="), ("<=", "<="),
                                             (">=", ">=")],
                                            string="Sale Condition")
    cost_price_condition = fields.Selection([("=", "="), ("<=", "<="),
                                             (">=", ">=")],
                                            string="Cost Condition")
    qty_available_condition = fields.Selection([("=", "="), ("<=", "<="),
                                                (">=", ">=")],
                                               string="QTY Condition")

    @api.constrains('from_date', 'to_date')
    def _check_value(self):
        if any(
                self.filtered(
                    lambda value: value.from_date > str(datetime.today(
                    )) or value.to_date > str(datetime.today()))):
            raise ValidationError(
                _("Please select Dates which are not in Future"))
        if any(self.filtered(lambda value: value.from_date > value.to_date)):
            raise ValidationError(_("Enter the To Date Less than From Date"))

    @api.multi
    def send_non_moving_report_by_email(self):
        from_date = datetime.now() - timedelta(days=7)
        to_date = datetime.now()
        warehouse_ids = self.env["stock.warehouse"].search([
            ("default_non_moving", "=", True)
        ])
        if warehouse_ids:
            new_wizard = self.create({
                "from_date":
                from_date,
                "to_date":
                to_date,
                "warehouse_ids": [(6, 0, warehouse_ids.ids)]
            }).print_non_moving_product(from_cron="Weekly")

    @api.multi
    def print_non_moving_product(self, from_cron=None):
        active_id = self.ids[0]
        from_date = self.from_date
        to_date = self.to_date

        today = datetime.now().strftime("%Y-%m-%d")
        f_name = 'Non Moving Product' + ' ' + today

        warehouse_ids = self.warehouse_ids and self.warehouse_ids.ids or []

        self.get_non_moving_report(today, warehouse_ids, from_date, to_date)
        if self.datas:
            if from_cron == "Weekly":
                # email code starts
                cron_emails_id = self.env["cron.emails"].search(
                    [("report_type", "=", "non_moving_product")], limit=1)
                if cron_emails_id:
                    duration = "{} to {}".format(from_date, to_date)
                    attachment_name = "Non Moving Product Report {}".format(
                        datetime.now().strftime("%Y-%m-%d %H:%M:%S.%f"))
                    attachment_id = self.env['ir.attachment'].create({
                        'name':
                        attachment_name,
                        'type':
                        'binary',
                        'datas':
                        self.datas,
                        'datas_fname':
                        attachment_name + '.xls',
                        'store_fname':
                        attachment_name,
                        'res_model':
                        self._name,
                        'res_id':
                        self.id,
                        'mimetype':
                        'application/x-xls'
                    })
                    # icpSudo = self.env['ir.config_parameter'].sudo()  # it is given all access
                    # email = icpSudo.get_param('eg_product_supplier_report.email_id', default="")

                    subject = "{} Non Moving Product Report".format(from_cron)

                    body_html = "<p>Hello</p></b> Please check {} Non Moving Product Report for duration {}.</b><p>Thanks</p>".format(
                        from_cron, duration)
                    values = {
                        'model': None,
                        'res_id': None,
                        'subject': subject,
                        'body': '',
                        'body_html': body_html,
                        'parent_id': None,
                        'attachment_ids': [(6, 0, [attachment_id.id])] or None,
                        'email_from': "*****@*****.**",
                        'email_to': cron_emails_id.emails,
                    }
                    mail_id = self.env['mail.mail']
                    mail_id.create(values).send()
            else:
                return {
                    'type':
                    'ir.actions.act_url',
                    'url':
                    'web/content/?model=non.moving.product.wizard.ept&download=true&field=datas&id=%s&filename=%s.xls'
                    % (active_id, f_name),
                    'target':
                    'self',
                }

    @api.multi
    def get_non_moving_report(self, today, warehouse_ids, from_date, to_date):
        warehouse_obj = self.env['stock.warehouse']
        warehouse_id_ls = warehouse_obj.search([('id', 'in', warehouse_ids)])
        workbook, header_bold, body_style, qty_cell_style, value_style, days_style = self.create_sheet(
        )
        workbook, sheet_data, row_data = self.add_headings(
            warehouse_id_ls, workbook, header_bold, body_style, qty_cell_style,
            value_style, days_style, from_date, to_date)
        data_dict = self.prepare_data(today, warehouse_ids, from_date, to_date)
        print_data = self.print_data(data_dict, workbook, sheet_data, row_data,
                                     header_bold, body_style, qty_cell_style,
                                     value_style, days_style)

        fp = BytesIO()
        workbook.save(fp)
        fp.seek(0)
        sale_file = base64.encodebytes(fp.read())
        fp.close()
        self.write({'datas': sale_file})

    @api.multi
    def non_moving_on_screen_report(self):
        active_id = self.ids[0]
        from_date = self.from_date
        to_date = self.to_date

        today = datetime.now().strftime("%Y-%m-%d")
        f_name = 'Non Moving Product' + ' ' + today
        warehouse_ids = self.warehouse_ids.ids
        if len(warehouse_ids) > 1:
            raise ValidationError(
                _("Please select only one Warehouse for On Screen Report."))

        non_moving_product_line = self.env['non.moving.product.line']
        non_moving_product_line.search([]).unlink()

        data_dict = self.prepare_data(today, warehouse_ids, from_date, to_date)
        for product_data in data_dict[self.warehouse_ids.id]:
            non_moving_product_line.create({
                'product_id':
                product_data.get('product_id'),
                'image_small':
                product_data.get('image_small'),
                'default_code':
                product_data.get('default_code'),
                'name':
                product_data.get('name'),
                'qty_available':
                product_data.get('qty_available'),
                'rack_location':
                product_data.get('rack_location') or '',
                # 'last_sale_date': product_data.get('last_sale_date') or "",
                'last_day_oldest':
                product_data.get('last_day_oldest') or "",
                'days_lpd':
                product_data.get('days_lpd') or "",
                # 'cost_of_product': product_data.get('cost_of_product'),       # TODO: New Change
                'total_cost':
                product_data.get('total_cost'),
                # 'last_purchase_date': product_data.get('last_purchase_date') or "",
                # 'sales_price': product_data.get('sales_price'),
                'total_sales_price':
                product_data.get('total_sales_price'),
                'sales_of_duration':
                product_data.get('sales_of_duration'),
                'total_sales':
                product_data.get('total_sales'),
                'warehouse_id':
                self.warehouse_ids.id,
                'start_date':
                from_date,
                'end_date':
                to_date,
            })

        action = self.env.ref(
            'non_moving_product_ept.action_non_moving_product_line').read()[0]

        return action

    @api.multi
    def create_sheet(self):
        workbook = xlwt.Workbook()
        borders = Borders()
        header_border = Borders()
        header_border.left, header_border.right, header_border.top, header_border.bottom = Borders.THIN, Borders.THIN, Borders.THIN, Borders.THICK
        borders.left, borders.right, borders.top, borders.bottom = Borders.THIN, Borders.THIN, Borders.THIN, Borders.THIN
        header_bold = xlwt.easyxf(
            "font: bold on, height 200; pattern: pattern solid, fore_colour gray25;alignment: horizontal center ,vertical center"
        )
        header_bold.borders = header_border
        body_style = xlwt.easyxf(
            "font: height 200; alignment: horizontal left")
        body_style.borders = borders

        # # style for different colors in columns
        xlwt.add_palette_colour("light_blue_21", 0x21)
        workbook.set_colour_RGB(0x21, 176, 216, 230)
        qty_cell_style = xlwt.easyxf(
            "font: height 200,bold on, name Arial; align: horiz right, vert center;  pattern: pattern solid, fore_colour light_blue_21;  borders: top thin,right thin,bottom thin,left thin"
        )

        xlwt.add_palette_colour("custom_orange", 0x22)
        workbook.set_colour_RGB(0x22, 255, 204, 153)
        value_style = xlwt.easyxf(
            "font: height 200,bold on, name Arial; align: horiz right, vert center;  pattern: pattern solid, fore_colour custom_orange;  borders: top thin,right thin,bottom thin,left thin"
        )

        xlwt.add_palette_colour("custom_pink", 0x23)
        workbook.set_colour_RGB(0x23, 255, 204, 204)
        days_style = xlwt.easyxf(
            "font: height 200,bold on, name Arial; align: horiz right, vert center;  pattern: pattern solid, fore_colour custom_pink;  borders: top thin,right thin,bottom thin,left thin"
        )

        return workbook, header_bold, body_style, qty_cell_style, value_style, days_style

    @api.multi
    def add_headings(self, warehouse_id_ls, workbook, header_bold, body_style,
                     qty_cell_style, value_style, days_style, from_date,
                     to_date):
        sheet_data = {}
        row_data = {}
        for warehouse in warehouse_id_ls:
            warehouse.name_worksheet = workbook.add_sheet(
                warehouse.display_name, cell_overwrite_ok=True)
            warehouse.name_worksheet.row(7).height = 600
            warehouse.name_worksheet.col(0).width = 4000
            warehouse.name_worksheet.col(1).width = 5000
            warehouse.name_worksheet.col(2).width = 6000
            warehouse.name_worksheet.col(3).width = 4000
            warehouse.name_worksheet.col(4).width = 5000
            warehouse.name_worksheet.col(5).width = 6500

            warehouse.name_worksheet.write(7, 0, 'Product ID', header_bold)
            warehouse.name_worksheet.write(7, 1, 'Product Code', header_bold)
            warehouse.name_worksheet.write(7, 2, 'Product Name', header_bold)
            warehouse.name_worksheet.write(7, 3, 'Available Qty', header_bold)
            warehouse.name_worksheet.write(7, 4, 'Rack Location', header_bold)
            # warehouse.name_worksheet.write(7, 5, 'Last Sale Date', header_bold)  # TODO: New Change
            warehouse.name_worksheet.write(
                7, 5, 'Duration from Last sale\n(In days)', header_bold)
            warehouse.name_worksheet.write(7, 6, 'Days (Last Purchase Date)',
                                           header_bold)
            # warehouse.name_worksheet.write(7, 7, 'Last Purchase Date', header_bold)
            # warehouse.name_worksheet.write(7, 8, 'Unit Cost', header_bold)   # TODO: New Change
            warehouse.name_worksheet.write(7, 7, 'Total Cost', header_bold)
            # warehouse.name_worksheet.write(7, 10, 'Sales Price', header_bold)
            warehouse.name_worksheet.write(7, 8, 'Total Sales Price',
                                           header_bold)
            warehouse.name_worksheet.write(7, 9, 'Sales Of Duration',
                                           header_bold)
            warehouse.name_worksheet.write(7, 10, 'Total Sales', header_bold)

            # Title
            title = "Non Moving Products Report"
            warehouse.name_worksheet.write_merge(0, 0, 0, 10, title,
                                                 header_bold)

            # Date
            string_datefrom = "Date From:"
            string_dateto = "Date To:"

            warehouse.name_worksheet.row(2).height = 300
            warehouse.name_worksheet.write(2, 0, string_datefrom, header_bold)
            warehouse.name_worksheet.write(2, 7, string_dateto, header_bold)
            warehouse.name_worksheet.write(2, 1, from_date)
            warehouse.name_worksheet.col(5).width = 6500
            warehouse.name_worksheet.write(2, 8, to_date)

            # # freezing columns
            warehouse.name_worksheet.set_panes_frozen(True)
            warehouse.name_worksheet.set_horz_split_pos(8)

            # #Get warehouse wise worksheet
            sheet_data.update({warehouse.id: warehouse.name_worksheet})

            # #initialize  worksheet wise row value
            row_data.update({warehouse.name_worksheet: 9})

        return workbook, sheet_data, row_data

    @api.multi
    def get_child_locations(self, location):

        child_list = []
        child_list.append(location.id)
        child_locations_obj = self.env['stock.location'].search([
            ('usage', '=', 'internal'), ('location_id', '=', location.id)
        ])
        if child_locations_obj:
            for child_location in child_locations_obj:
                child_list.append(child_location.id)
                children_loc = self.get_child_locations(child_location)
                for child in children_loc:
                    child_list.append(child)
        return child_list

    @api.multi
    def prepare_data(self, today, warehouse_ids, from_date, to_date):
        data_dict = {}
        location_obj = self.env['stock.location']
        warehouse_obj = self.env['stock.warehouse']
        product_obj = self.env['product.product']
        stock_move_obj = self.env['stock.move']
        warehouse_ids = warehouse_obj.search([('id', 'in', warehouse_ids)])
        for warehouse in warehouse_ids:
            product_list = []
            child_locations = self.get_child_locations(warehouse.lot_stock_id)
            customer_location_ids = self.env['stock.location'].search([
                ('usage', '=', 'customer')
            ]).ids
            vendor_location_ids = self.env['stock.location'].search([
                ('usage', '=', 'supplier')
            ]).ids

            if len(child_locations) == 1:
                tuple_child_locations = tuple(child_locations)
                str_child_locations = str(tuple_child_locations).replace(
                    ',', '')

            else:
                str_child_locations = tuple(child_locations)
            if len(customer_location_ids) == 1:
                tuple_customer_location_ids = tuple(customer_location_ids)
                str_customer_location_ids = str(
                    tuple_customer_location_ids).replace(',', '')
            else:
                str_customer_location_ids = tuple(customer_location_ids)
            if not child_locations or not customer_location_ids:
                return True
            product_list_qry = """select * from stock_move where (location_id in %s and location_dest_id in %s) and state='done' and date >= '%s' and date <='%s'""" % (
                str_child_locations, str_customer_location_ids, from_date,
                to_date)

            self._cr.execute(product_list_qry)
            move_ids = self._cr.dictfetchall()
            for move in move_ids:
                product = move.get('product_id')
                product_list.append(product)
            _logger.info(["======Products====", len(product_list)])
            domain = self.product_with_filter()
            all_internal_product_ids = product_obj.with_context(
                active_test=True).search(domain)  # TODO : New
            non_moving_product_ids = []

            for product in all_internal_product_ids:
                move_ids = stock_move_obj.search([
                    ('location_dest_id', 'in', child_locations),
                    ('product_id', '=', product.id)
                ])
                if move_ids:
                    oldest_date_str = min(move_ids.mapped('date'))
                    oldest_date = str(parser.parse(oldest_date_str).date())
                    if to_date < oldest_date:
                        continue
                if product.id not in product_list:
                    # _logger.info(["======Non-Moving Product====", product.id])
                    non_moving_product_ids.append(product.id)
                else:
                    _logger.info(["======Moving Product====", product.id])
            non_moving_product_ids = product_obj.search([
                ('id', 'in', non_moving_product_ids), ('type', '=', 'product')
            ])
            output_location_ids = location_obj.search([('usage', '=',
                                                        'customer')])

            for product in non_moving_product_ids:
                total_sale = 0  # TODO: New Change
                days_lpd = 0
                sale_of_duration = 0
                if product.rack and product.rack == "BUN":  # TODO: New Change
                    continue
                new_sale_ids = self.env["sale.order"].search([
                    ("confirmation_date", ">=", product.create_date),
                    ("confirmation_date", "<=", to_date),
                    ("state", "!=", "cancel")
                ])
                if new_sale_ids:  # TODO: New Change
                    new_sale_line = self.env["sale.order.line"].search([
                        ("product_id", "=",
                         product.with_context(warehouse=warehouse.id).id),
                        ("order_id", "in", new_sale_ids.ids)
                    ])
                    total_sale = sum(new_sale_line.mapped("product_uom_qty"))
                sale_ids = self.env["sale.order"].search([
                    ("confirmation_date", ">=", from_date),
                    ("confirmation_date", "<=", to_date),
                    ("state", "!=", "cancel")
                ])
                if sale_ids:  # TODO: New Change
                    sale_line = self.env["sale.order.line"].search([
                        ("product_id", "=", product.id),
                        ("order_id", "in", sale_ids.ids)
                    ])
                    sale_of_duration = sum(sale_line.mapped("product_uom_qty"))
                last_sale_date = stock_move_obj.search(
                    [('location_id', 'in', child_locations),
                     ('location_dest_id', 'in', output_location_ids.ids),
                     ('product_id', '=', product.id)],
                    limit=1,
                    order="date desc")
                last_purchase_date = stock_move_obj.search(
                    [('location_id', 'in', vendor_location_ids),
                     ('location_dest_id', 'in', child_locations),
                     ('product_id', '=', product.id)],
                    limit=1,
                    order="date desc")
                if last_purchase_date:
                    last_purchase_date = datetime.strptime(
                        last_purchase_date.date,
                        "%Y-%m-%d %H:%M:%S")  # TODO: New Change
                    new_to_date = datetime.strptime(to_date,
                                                    "%Y-%m-%d %H:%M:%S")
                    days_lpd = (new_to_date - last_purchase_date).days
                qty = product.with_context(
                    warehouse=warehouse.id).qty_available
                quantity = False
                if self.qty_available_condition:
                    if self.qty_available_condition == "=":
                        if qty == self.qty_available:
                            quantity = True
                    elif self.qty_available_condition == "<=":
                        if qty <= self.qty_available:
                            quantity = True
                    elif self.qty_available_condition == ">=":
                        if qty >= self.qty_available:
                            quantity = True
                else:
                    quantity = True
                if quantity:
                    last_day_oldest = self.days_oldest(last_sale_date.date)
                    if data_dict.get(warehouse.id):
                        data_dict.get(warehouse.id).append({
                            'product_id':
                            product.id,
                            'default_code':
                            product.default_code,
                            'name':
                            product.name,
                            'qty_available':
                            product.with_context(
                                warehouse=warehouse.id).qty_available,
                            'rack_location':
                            product.rack or '',
                            # 'last_sale_date': last_sale_date.date or "",
                            'last_day_oldest':
                            last_day_oldest or "",
                            'days_lpd':
                            days_lpd,
                            # 'cost_of_product': product.standard_price,
                            'total_cost':
                            product.with_context(
                                warehouse=warehouse.id).qty_available *
                            product.standard_price,
                            # 'last_purchase_date': last_purchase_date.date or "",
                            # 'sales_price': product.list_price,
                            'total_sales_price':
                            product.with_context(
                                warehouse=warehouse.id).qty_available *
                            product.list_price,
                            'total_sales':
                            total_sale,
                            'sales_of_duration':
                            sale_of_duration,
                            'image_small':
                            product.image_small,
                        })
                    else:
                        data_dict.update({
                            warehouse.id: [{
                                'product_id':
                                product.id,
                                'default_code':
                                product.default_code,
                                'name':
                                product.name,
                                'qty_available':
                                product.with_context(
                                    warehouse=warehouse.id).qty_available,
                                'rack_location':
                                product.rack or '',
                                # 'last_sale_date': last_sale_date.date or "",
                                'last_day_oldest':
                                last_day_oldest or "",
                                'days_lpd':
                                days_lpd,
                                # 'cost_of_product': product.standard_price,
                                'total_cost':
                                product.with_context(
                                    warehouse=warehouse.id).qty_available *
                                product.standard_price,
                                # 'last_purchase_date': last_purchase_date.date or "",
                                # 'sales_price': product.list_price,
                                'total_sales_price':
                                product.with_context(
                                    warehouse=warehouse.id).qty_available *
                                product.list_price,
                                'total_sales':
                                total_sale,
                                'sales_of_duration':
                                sale_of_duration,
                                'image_small':
                                product.image_small,
                            }]
                        })

        return data_dict

    @api.multi
    def days_oldest(self, last_sale_date):
        if not last_sale_date:
            return 0
        today = datetime.strptime(str(datetime.now().date()),
                                  '%Y-%m-%d').strftime('%m-%d-%Y')
        current_date = datetime.strptime(str(today), '%m-%d-%Y').date()
        someday = last_sale_date
        time = someday[:4] + '-' + someday[5:7] + '-' + someday[8:10]
        time_validation = datetime.strptime(str(time),
                                            '%Y-%m-%d').strftime('%m-%d-%Y')
        final_date = datetime.strptime(str(time_validation), '%m-%d-%Y').date()
        diff = current_date - final_date
        return diff.days

    @api.multi
    def print_data(self, data_dict, workbook, sheet_data, row_data,
                   header_bold, body_style, qty_cell_style, value_style,
                   days_style):
        column = 0
        if data_dict:
            for warehouse_id, data_details in data_dict.items():
                for product_data in data_details:
                    row = row_data[sheet_data[warehouse_id]]
                    sheet_data[warehouse_id].row(row).height = 350
                    sheet_data[warehouse_id].write(
                        row, column, product_data.get('product_id'),
                        body_style)
                    sheet_data[warehouse_id].write(
                        row, column + 1,
                        product_data.get('default_code') or '-', body_style)
                    sheet_data[warehouse_id].write(row, column + 2,
                                                   product_data.get('name'),
                                                   body_style)
                    sheet_data[warehouse_id].write(
                        row, column + 3, product_data.get('qty_available'),
                        qty_cell_style)
                    sheet_data[warehouse_id].write(
                        row, column + 4, product_data.get('rack_location'),
                        qty_cell_style)
                    # sheet_data[warehouse_id].write(row, column + 5, product_data.get('last_sale_date'), value_style)
                    sheet_data[warehouse_id].write(
                        row, column + 5, product_data.get('last_day_oldest'),
                        days_style)
                    sheet_data[warehouse_id].write(
                        row, column + 6, product_data.get('days_lpd'),
                        days_style)
                    # sheet_data[warehouse_id].write(row, column + 7, product_data.get('last_purchase_date'),
                    #                                qty_cell_style)
                    # sheet_data[warehouse_id].write(row, column + 8, product_data.get('cost_of_product'), qty_cell_style)
                    sheet_data[warehouse_id].write(
                        row, column + 7, product_data.get('total_cost'),
                        qty_cell_style)
                    # sheet_data[warehouse_id].write(row, column + 10, product_data.get('sales_price'), qty_cell_style)
                    sheet_data[warehouse_id].write(
                        row, column + 8, product_data.get('total_sales_price'),
                        qty_cell_style)
                    sheet_data[warehouse_id].write(
                        row, column + 9, product_data.get('sales_of_duration'),
                        days_style)
                    sheet_data[warehouse_id].write(
                        row, column + 10, product_data.get('total_sales'),
                        days_style)
                    row += 1
                    # Increse row
                    row_data.update({sheet_data[warehouse_id]: row})

        else:
            return False

    @api.multi
    def product_with_filter(self):
        domain = []
        if self.category_ids:
            domain.append(('categ_ids', 'in', self.category_ids.ids))
        if self.vendor_ids:
            supplier_info_ids = self.env['product.supplierinfo'].search([
                ('name', 'in', self.vendor_ids.ids)
            ])
            domain.append(('seller_ids', 'in', supplier_info_ids.ids))
        if self.cost_price_condition:
            domain.append(
                ('standard_price', self.cost_price_condition, self.cost_price))
        if self.sale_price_condition:
            domain.append(
                ('lst_price', self.sale_price_condition, self.sale_price))
        if self.create_date_from:
            domain.append(('create_date', '>=', self.create_date_from))
        if self.create_date_to:
            domain.append(('create_date', '<=', self.create_date_to))
        if self.sku_condition and self.default_code:
            if self.sku_condition in ["is_set", "not_set"]:
                if self.sku_condition == "is_set":
                    domain.append(('default_code', 'not in', [None, False,
                                                              ""]))
                else:
                    domain.append(('default_code', 'in', [None, False, ""]))
            else:
                domain.append(
                    ('default_code', self.sku_condition, self.default_code))
        if self.name_condition and self.name:
            if self.name_condition in ["is_set", "not_set"]:
                if self.name_condition == "is_set":
                    domain.append(('name', 'not in', [None, False, ""]))
                else:
                    domain.append(('name', 'in', [None, False, ""]))
            else:
                domain.append(('name', self.name_condition, self.name))
        return domain