Example #1
0
    def run(self):

        while True:

            try:
                r = self.intrade.prices(self.contracts, timestamp = self.last_new,
                                        depth = 10)
                self.last_new = r['@lastUpdateTime']
                new_info = False
                for contract in format_prices(r):
                    if contract:
                        new_info = True
                        self.data.price.save(contract)
                if new_info:
                    print ' '.join(['New price data recorded for',
                                    str(self.contracts), 'at',
                                    str(datetime.today())])

            except intrade_api.MarketHoursError:
                print 'Prices call disabled outside of market hours'
            except intrade_api.IntradeError:
                print ' '.join(['Intrade Error in PriceScraper at',
                                str(datetime.today()), '... see Mongo logs'])
                self.logger.error('Intrade Error in PriceScraper', exc_info = True)
            except:
                print ' '.join(['Unexpected Error detected in PriceScraper at',
                                str(datetime.today()), '... see Mongo logs'])
                self.logger.error('Unexpected Error in PriceScraper', exc_info = True)
            finally:
                time.sleep(1)
Example #2
0
def format_data(data):
    def string_to_date(item):
        date_string = datetime.strptime(item, '%a %d %b %y').strftime('%d/%m/%Y')
        return date_string

    for i,due_date in enumerate(data["Due Date"]):
        if due_date == "TODAY": # If the today's date is the due date, the website displays "TODAY"
            data["Due Date"][i] = datetime.today().strftime('%d/%m/%Y')
            data["Issue Date"][i] = datetime.today().strftime('%d/%m/%Y')
        else:
            data["Due Date"][i] = string_to_date(due_date)
            data["Issue Date"][i] = string_to_date(data["Issue Date"][i])

    df_semesters = pd.DataFrame(data, columns=['Issue Date','Module'])
    df_semesters['Issue Date'] = pd.to_datetime(df_semesters['Issue Date'], dayfirst = [True], format = "%d/%m/%Y")
    df_semesters.sort_values(['Issue Date'], ascending = [True], inplace = [True])
    unique_modules = pd.unique(df_semesters.Module.ravel())

    df = pd.DataFrame(data, columns=['Due Date','Module', 'Coursework Title', 'Weight', 'Mark‡', 'Final Mark‡'])
    # Don't have to convert to pd.datetime because we don't have to sort it

    formatted_data = {}

    for i,module in enumerate(unique_modules):
        df_dummy = df[df['Module'] == module]
        dict_dummy = pd.DataFrame.to_dict(df_dummy, 'list')
        formatted_data[i] = dict_dummy
    
    for module in formatted_data:
        formatted_data[module]['Mark'] = formatted_data[module].pop('Mark‡')
        formatted_data[module]['Final Mark'] = formatted_data[module].pop('Final Mark‡')

    return formatted_data
Example #3
0
 def GetClasses(self, classDescIds=None, 
                      classIds=None, 
                      staffIds=None, 
                      startDateTime=datetime.today(), 
                      endDateTime=datetime.today(), 
                      clientId=None, 
                      programIds=None, 
                      sessionTypeIds=None, 
                      locationIds=None, 
                      semesterIds=None, 
                      hideCanceledClasses=False, 
                      schedulingWindow=False):
     result = ClassServiceMethods().GetClasses(classDescIds, 
                                               classIds, 
                                               staffIds, 
                                               startDateTime, 
                                               endDateTime, 
                                               clientId, 
                                               programIds, 
                                               sessionTypeIds, 
                                               locationIds, 
                                               semesterIds, 
                                               hideCanceledClasses, 
                                               schedulingWindow)
     print str(result)
Example #4
0
    def test_replace_dtypes(self):
        # int
        df = DataFrame({'ints': [1, 2, 3]})
        result = df.replace(1, 0)
        expected = DataFrame({'ints': [0, 2, 3]})
        assert_frame_equal(result, expected)

        df = DataFrame({'ints': [1, 2, 3]}, dtype=np.int32)
        result = df.replace(1, 0)
        expected = DataFrame({'ints': [0, 2, 3]}, dtype=np.int32)
        assert_frame_equal(result, expected)

        df = DataFrame({'ints': [1, 2, 3]}, dtype=np.int16)
        result = df.replace(1, 0)
        expected = DataFrame({'ints': [0, 2, 3]}, dtype=np.int16)
        assert_frame_equal(result, expected)

        # bools
        df = DataFrame({'bools': [True, False, True]})
        result = df.replace(False, True)
        self.assertTrue(result.values.all())

        # complex blocks
        df = DataFrame({'complex': [1j, 2j, 3j]})
        result = df.replace(1j, 0j)
        expected = DataFrame({'complex': [0j, 2j, 3j]})
        assert_frame_equal(result, expected)

        # datetime blocks
        prev = datetime.today()
        now = datetime.today()
        df = DataFrame({'datetime64': Index([prev, now, prev])})
        result = df.replace(prev, now)
        expected = DataFrame({'datetime64': Index([now] * 3)})
        assert_frame_equal(result, expected)
Example #5
0
def _parse_date(item):

    try:
        date_string = item['data']['date']
        parsed_date = parser.parse(date_string, fuzzy=True, default=datetime.today())

    except ValueError:
        try:
            date_chunks = date_string.split()
            parsed_date = parser.parse(date_chunks[0], fuzzy=True, default=datetime.today())

        except:
            try:
                alternate_date_string = item['meta']['parsedDate']
                parsed_date = parser.parse(alternate_date_string, fuzzy=True, default=datetime.today())

            except ValueError:
                try:
                    date_chunks = alternate_date_string.split()
                    parsed_date = parser.parse(date_chunks[0], fuzzy=True, default=datetime.today())

                except:
                    parsed_date = datetime.today()

    return parsed_date
Example #6
0
 def start_requests(self):
     return [
         FormRequest(
             "http://www0.parlamento.gub.uy/forms/IntCpo.asp?Cuerpo=S",
             formdata={
                 "Fecha": self.start.strftime(self.date_fmt),
                 "Cuerpo": "S",
                 "Integracion": "S",
                 "Desde": "15021985",
                 "Hasta": datetime.today().strftime(self.date_fmt),
                 "Dummy": datetime.today().strftime(self.date_fmt),
                 "TipoLeg": "Act",
                 "Orden": "Legislador",
                 "Integracion": "S",
             },
             callback=self.parse_dip,
         ),
         FormRequest(
             "http://www0.parlamento.gub.uy/forms/IntCpo.asp?Cuerpo=D",
             formdata={
                 "Fecha": self.start.strftime(self.date_fmt),
                 "Cuerpo": "D",
                 "Integracion": "S",
                 "Desde": "15021985",
                 "Hasta": datetime.today().strftime(self.date_fmt),
                 "Dummy": datetime.today().strftime(self.date_fmt),
                 "TipoLeg": "Act",
                 "Orden": "Legislador",
                 "Integracion": "S",
             },
             callback=self.parse_sen,
         ),
     ]
Example #7
0
 def GetSemesters(self, semesterIds=None, 
                        startDate=datetime.today(), 
                        endDate=datetime.today()):
     result = ClassServiceMethods().GetSemesters(semesterIds, 
                                                 startDate, 
                                                 endDate)
     print str(result)
Example #8
0
 def pretty_created_date(self):
     if self.created_date.date() == datetime.today().date():
         return 'Today'
     elif self.created_date.date() == datetime.today().date() - timedelta(days=1):
         return 'Yesterday'
     else:
         return self.created_date.strftime('%b %d')
    def run(self):
        self.logMsg("Started")
        
        self.updateRecent()
        lastRun = datetime.today()
        lastProfilePath = xbmc.translatePath('special://profile')
        
        while (xbmc.abortRequested == False):
            td = datetime.today() - lastRun
            secTotal = td.seconds
            
            profilePath = xbmc.translatePath('special://profile')
            
            updateInterval = 60
            if (xbmc.Player().isPlaying()):
                updateInterval = 300
                
            if(secTotal > updateInterval or lastProfilePath != profilePath):
                self.updateRecent()
                lastRun = datetime.today()

            lastProfilePath = profilePath
            
            xbmc.sleep(3000)
                        
        self.logMsg("Exited")
Example #10
0
def index(req, scope):
    ''' Display Dashboard '''

    summary = {}
    if datetime.today().weekday() == 6:
        period = ReportPeriod.objects.latest()
    else:
        period = ReportPeriod.from_day(datetime.today())
    summary['period'] = period
    summary['total_units'] = len(scope.health_units())
    summary['up2date'] = len(filter(lambda hc: hc.up2date(),
                                               scope.health_units()))
    summary['missing'] = summary['total_units'] - summary['up2date']

    recent = []
    for report in EpidemiologicalReport.objects.filter(
                            _status=EpidemiologicalReport.STATUS_COMPLETED,
                            clinic__in=scope.health_units())\
                                            .order_by('-completed_on')[:10]:
        recent.append({'id': report.id,
                       'date': report.completed_on.strftime("%a %H:%M"),
                       'by': ('%s %s' %
                                    (report.completed_by().first_name,
                                     report.completed_by().last_name)).title(),
                       'by_id': report.completed_by().id,
                       'clinic': report.clinic,
                       'clinic_id': report.clinic.id})

    return render_to_response(req, 'findug/index.html',
                              {'summary': summary, 'scope': scope,
                               'recent': recent})
Example #11
0
def map(req, scope):
    ''' Map view '''

    if datetime.today().weekday() == 6:
        previous_period = ReportPeriod.objects.latest()
    else:
        previous_period = ReportPeriod.from_day(datetime.today())

    all = []
    for location in scope.health_units():
        loc = {}
        loc['obj'] = location
        loc['name'] = unicode(location)
        loc['type'] = location.type.name.lower().replace(' ', '')
        act_reports = ACTConsumptionReport.objects.filter(
                                            reporter__location=location)\
                                                .filter(period=previous_period)
        if not act_reports:
            loc['act_unknown'] = True
        else:
            rpt = act_reports[0]
            if rpt.yellow_balance:
                loc['yellow'] = True
            if rpt.blue_balance:
                loc['blue'] = True
            if rpt.brown_balance:
                loc['brown'] = True
            if rpt.green_balance:
                loc['green'] = True

        all.append(loc)

    return render_to_response(req, 'findug/map.html',
                              {'scope': scope, 'locations': all})
Example #12
0
    def handle(self, *args, **options):
        log_type = options['log_type']
        if log_type not in self.types:
            msg = 'Type not valid, must be one of: %s' % self.types
            log.debug(msg)
            raise CommandError(msg)

        dir_ = not options['dir']
        if dir_:
            log.debug('No directory specified, making temp.')
            dir_ = tempfile.mkdtemp()

        # Default to yesterday for backwards compat.
        day = (datetime.today() if options['today']
               else datetime.today() - timedelta(days=1))

        date = (datetime.strptime(options['date'], '%Y-%m-%d')
                if options['date'] else day).date()
        filename = os.path.join(dir_, '{0}.{1}.log'.format(
            date.strftime('%Y-%m-%d'), log_type))

        generate_log(date, filename, log_type)
        log.debug('Log generated to: %s', filename)
        push(filename)
        if not options['dir']:
            log.debug('No directory specified, cleaning log after upload.')
            os.remove(filename)
def publish_Fake_Sensor_Values_to_MQTT():
	threading.Timer(3.0, publish_Fake_Sensor_Values_to_MQTT).start()
	global toggle
	if toggle == 0:
		Humidity_Fake_Value = float("{0:.2f}".format(random.uniform(50, 100)))

		Humidity_Data = {}
		Humidity_Data['Sensor_ID'] = "Dummy-1"
		Humidity_Data['Date'] = (datetime.today()).strftime("%d-%b-%Y %H:%M:%S:%f")
		Humidity_Data['Humidity'] = Humidity_Fake_Value
		humidity_json_data = json.dumps(Humidity_Data)

		print "Publishing fake Humidity Value: " + str(Humidity_Fake_Value) + "..."
		publish_To_Topic (MQTT_Topic_Humidity, humidity_json_data)
		toggle = 1

	else:
		Temperature_Fake_Value = float("{0:.2f}".format(random.uniform(1, 30)))

		Temperature_Data = {}
		Temperature_Data['Sensor_ID'] = "Dummy-2"
		Temperature_Data['Date'] = (datetime.today()).strftime("%d-%b-%Y %H:%M:%S:%f")
		Temperature_Data['Temperature'] = Temperature_Fake_Value
		temperature_json_data = json.dumps(Temperature_Data)

		print "Publishing fake Temperature Value: " + str(Temperature_Fake_Value) + "..."
		publish_To_Topic (MQTT_Topic_Temperature, temperature_json_data)
		toggle = 0
Example #14
0
def action_activate():
    aggregates = novaclient.get_aggregates()
    dry_run_txt = 'DRY-RUN: ' if options.dry_run else ''
    for aggregate in aggregates:
        # do not use on central1 or placeholder1
        if aggregate not in legacy_aggregate:
            continue
        print '=============== %s ================' % aggregate
        metadata = novaclient.get_aggregate(aggregate)
        # Enable this aggregate
        if aggregate == options.aggregate:
            for h in metadata.hosts:
                print '%sEnable %s' % (dry_run_txt, h)
                novaclient.enable_host(h)
            tags = {'enabled': datetime.today(), 'disabled': None, 'mail': None}
            if not options.dry_run:
                novaclient.update_aggregate(aggregate, tags)
        else: # Disable everything else
            for h in metadata.hosts:
                services = novaclient.get_service(h)
                if services[0].status == 'enabled':
                    print '%sDisable %s' % (dry_run_txt, h)
                    novaclient.disable_host(h)
            tags = {'disabled': datetime.today(), 'enabled': None}
            if not options.dry_run:
                novaclient.update_aggregate(aggregate, tags)
Example #15
0
def createUser(username, password, isPrivate=False):
	if username == '':
		return (False, 'must supply a username')
	if not username.isalnum():
		return (False, 'username must be alphanumeric')
	if userExists(username):
		return (False, 'username already exists')

	hash_salt = createPasswordSalt()
	hashed_password = hashPassword(password, hash_salt)

	newUserSettings = UserSettings()
	newUserSettings.private = isPrivate
	newUserSettings.save()

	newUserProfile = UserProfile()
	newUserProfile.save()

	newUser = User(
		name=username,
		password_hash=hashed_password,
		last_online=datetime.today(),
		creation_date=datetime.today(),
		password_salt=hash_salt,
		settings=newUserSettings,
		profile=newUserProfile)
	newUser.save()

	return (True, 'success')
Example #16
0
    def _query_owned_and_shared(self, user):
        # if the user is not authenticated, nothing should be returned
        if not user.is_authenticated():
            return Q(id=None)

        # for which experiments does the user have read access
        # based on USER permissions?
        query = Q(objectacls__pluginId=django_user,
                  objectacls__entityId=str(user.id),
                  objectacls__canRead=True) &\
            (Q(objectacls__effectiveDate__lte=datetime.today())
             | Q(objectacls__effectiveDate__isnull=True)) &\
            (Q(objectacls__expiryDate__gte=datetime.today())
             | Q(objectacls__expiryDate__isnull=True))

        # for which does experiments does the user have read access
        # based on GROUP permissions
        for name, group in user.get_profile().ext_groups:
            query |= Q(objectacls__pluginId=name,
                       objectacls__entityId=str(group),
                       objectacls__canRead=True) &\
                (Q(objectacls__effectiveDate__lte=datetime.today())
                 | Q(objectacls__effectiveDate__isnull=True)) &\
                (Q(objectacls__expiryDate__gte=datetime.today())
                 | Q(objectacls__expiryDate__isnull=True))
        return query
    def update_delegate(self):
        """Called by CRON to update delegated children.
        :param Recordset self: empty Recordset
        """
        children = self.search([('state', 'not in', ['F', 'P'])])
        children_to_delegate = self.copy()
        children_to_undelegate = list()

        for child in children:
            if child.date_delegation:
                if datetime.strptime(child.date_delegation, DF) \
                   <= datetime.today() and child.is_available:
                    children_to_delegate |= child

                if child.date_end_delegation and \
                   datetime.strptime(child.date_end_delegation, DF) <= \
                   datetime.today():
                    children_to_undelegate.append(child.id)

        children_to_delegate.write({'state': 'D'})

        self.env['undelegate.child.wizard'].with_context(
            active_ids=children_to_undelegate).undelegate()

        return True
Example #18
0
	def __init__(self, log=None):
		self.log 						= log
		self.cur 						= self.postgres_conn.cursor(cursor_factory=psycopg2.extras.DictCursor)
		self.today 					= datetime.today().replace(hour=0, minute=0, second=0)
		self.today_ts 			= int(time.mktime(self.today.timetuple()))
		self.yesterday 			= self.today - timedelta(days=1)
		self.yesterday_ts 	= int(time.mktime(self.yesterday.timetuple()))
		self.week_start 		= self.today - timedelta(days=self.today.weekday())
		self.week_start_ts 	= int(time.mktime(self.week_start.timetuple()))
		self.month_start 		= datetime(datetime.today().year, datetime.today().month, 1)
		self.month_start_ts = int(time.mktime(self.month_start.timetuple()))
		self.year_start 		= datetime(datetime.today().year, 1, 1)
		self.year_start_ts 	= int(time.mktime(self.year_start.timetuple()))

		self.splitted_keys = None

		self.brands_groups = []
		self.cards_by_purchases = []

		self.cards_data 								= []
		self.genders_data								= []
		self.brands_cards_added					= []
		self.brands_customers_data			= []
		self.brands_filled_profiles			= []
		self.brands_genders_data				= []
		self.brands_participants_counts = []

		self.shops_cards_added 					= []
		self.shops_customers_data 			= []
		self.shops_filled_profiles 			= []
		self.shops_genders_data 				= []
		self.shops_participants_counts 	= []
Example #19
0
File: msh.py Project: PRIHLOP/msh
    def _send_response(self, email_from, msg):
        self.__send_lock.acquire()
        if not msg is None:
            print "[%s] Sending response to '%s'" % (datetime.today().strftime('%d/%m/%y %H:%M'), email_from)
            recipients = [email_from, self.get_param_str('Mail', 'SEND_COPY_TO')]
            message = "%s%s%s\n%s" % ('From: %s \n' % (self.get_param_str('Main', 'BOT_NAME')),
                                      'To: %s \n' % (email_from),
                                      'Subject: Report %s \n' % (datetime.today().strftime('%d/%m/%y %H:%M')),
                                       msg)

            if is_enabled(self.get_param_str("Mail", "USE_SSL")):
                session = smtplib.SMTP_SSL(self.get_param_str("Mail", "SMTP_SERVER"),
                                           self.get_param_int("Mail", "SMTP_SSL_PORT"))
            else:
                session = smtplib.SMTP(self.get_param_str("Mail", "SMTP_SERVER"),
                                       self.get_param_int("Mail", "SMTP_PORT"))
            #if
            if is_enabled(self.get_param_str("Debug", "NETWORK_COMM_LOGGING")):
                session.set_debuglevel(100)
            #if
            session.login(self.get_param_str("Mail", "EMAIL_USER"),
                          self.get_param_str("Mail", "EMAIL_PASS"))
            session.sendmail(self.get_param_str("Mail", "EMAIL_USER"),
                             recipients,
                             message)
            session.quit()
        #if
        self.__send_lock.release()
	def test_getDateToPrint(self):
		from datetime import datetime, timedelta
		inst = self._makeOne()
		inst.request = DummyRequest()
		
		#Test1 : wrong inputName
		self.assertEqual(inst.getDateToPrint('coucou'), 0)
		
		#Test2 : input by default for 'toDate'
		today=datetime.today()
		self.assertEqual(inst.getDateToPrint('toDate'), today.strftime('%d/%m/%y'))
		
		#Test3 : input by default for 'fromDate'
		date=datetime.today()-timedelta(hours=24)
		self.assertEqual(inst.getDateToPrint('fromDate'), date.strftime('%d/%m/%y'))
		
		#Test4 : input by default for 'toHour'
		today=datetime.today()
		self.assertEqual(inst.getDateToPrint('toHour'), today.strftime('%H:%M'))
		
		#Test5 : input by default for 'fromHour'
		today=datetime.today()
		self.assertEqual(inst.getDateToPrint('fromHour'), today.strftime('%H:%M'))
		
		#Test6 : input already in GET variable
		inst.request.GET['fromDate']='23/11/11'
		self.assertEqual(inst.getDateToPrint('fromDate'), '23/11/11')
	def test_getDate(self):
		from datetime import datetime, timedelta
		inst = self._makeOne()
		inst.request = DummyRequest()
		
		#Test1 deltaTime&custom=0
		today=datetime.today()
		(fromTime,toTime)=inst.getDate()
		self.assertEqual(fromTime-timedelta(microseconds=fromTime.microsecond), today-timedelta(seconds=86400, microseconds=today.microsecond))
		self.assertEqual(toTime-timedelta(microseconds=toTime.microsecond), today-timedelta(microseconds=today.microsecond))
		
		#Test2 deltaTime=int<>0&custom=0
		inst.request.GET['deltaTime']=64600
		today=datetime.today()
		(fromTime,toTime)=inst.getDate()
		self.assertEqual(fromTime-timedelta(microseconds=fromTime.microsecond), today-timedelta(seconds=64600, microseconds=today.microsecond))
		self.assertEqual(toTime-timedelta(microseconds=toTime.microsecond), today-timedelta(microseconds=today.microsecond))
		
		#Test3 deltaTime=0&custom=1
		inst.request.GET['custom']=1
		inst.request.GET['fromDate']='23/11/11'
		inst.request.GET['fromHour']='10:00'
		inst.request.GET['toDate']='26/12/12'
		inst.request.GET['toHour']='11:10'
		(fromTime,toTime)=inst.getDate()
		fromTimeTest=datetime(2011, 11, 23, 10, 00)
		toTimeTest=datetime(2012, 12, 26, 11, 10)
		self.assertEqual(fromTime, fromTimeTest)
		self.assertEqual(toTime, toTimeTest)
Example #22
0
    def get_articles_in_range_by_author(self, author,
                                        range_start, range_end='',
                                        terms=''):
        """
        This function returns all of the article IDs
        of the articles a given 'author' published
        between 'range_start' and 'range_end'
        Optionally, additional terms may be provided
        """
        # if there's no rangeEnd set, we'll set it to the current year/month
        if range_end == '':
            range_end = "%s/%s" % (str(datetime.today().year),
                                   str(datetime.today().month))

        E.email = self.email
        term = "%s [AU] %s:%s[DP] %s" % (author, range_start, range_end, terms)

        handle = E.esearch(db="pubmed", term=term)
        record = E.read(handle)
        handle.close()

        articles = []
        for article_id in record['IdList']:
            if article_id not in articles:
                articles.append(article_id)
        print "got %d articles" % len(articles)
        return articles
Example #23
0
def train(args):
    """
    """
    format = "%Y-%m-%d %H:%M:%S"
    starttime = datetime.today().strftime(format)
    xg_train, xg_test, param = preprocess(args)

    watchlist = [(xg_train, 'train'), (xg_test, 'test')]
    bst = xgb.train(param, xg_train, int(args.num_round), watchlist)
    bst.save_model(args.output)
    if args.fmap is not None:
        bst.dump_model(args.output+'.dump', args.fmap, with_stats=True)
        print bst.get_fscore(args.fmap)
    else:
        bst.dump_model(args.output+'.dump', with_stats=True)
        print bst.get_fscore()
    # get prediction
    pred = bst.predict(xg_test)

    test_y = xg_test.get_label()
    print ('predicting, classification error=%f' % (sum(int(pred[i]) != test_y[i]
                                                        for i in range(len(test_y))) / float(len(test_y))))

    print "start time    : %s !" % starttime
    print "finished time : %s !" % datetime.today().strftime(format)
	def get_busses(self):
		if self.last_request is None or (self.last_request - datetime.today()) > timedelta(minutes=1):
			self.data = self.get_busses_raw()
			self.last_request = datetime.today()
		else:
			print "Using cached data; too soon to make new request"
		return self.data
    def __init__(self, request, response):
        super(BaseHandler, self).__init__(request, response)
        self.start_time = datetime.today()
        self.resp = None
        self.template_file = None
        self.template_values = {}

        one_day = datetime.today() + timedelta(days=1)
        value = 'false'

        user = users.get_current_user()
        if user:
            logging.info('email: {}'.format(user.email()))
            logging.info('nickname: {}'.format(user.nickname()))
            logging.info('userid: {}'.format(user.user_id()))
        else:
            logging.info('user is not a current gmail account')

        if users.is_current_user_admin():
            value = 'true'

        logging.info('admin value: {}'.format(value))

        self.response.set_cookie(
            'isFreightAdmin',
            value=value,
            expires=one_day,
            overwrite=True
        )
Example #26
0
 def is_subscriber(user):
     if Subscription.objects.filter(user__id=user.id,
                             date_from__lt=datetime.today(),
                             date_to__gt=datetime.today()).count()>0:
         return True
     else:
         return False
Example #27
0
    def highlighted_status(self):
        if self.perception_state == PERCEPTION_STATE_PAYMENT_RECEIVED:
            return 0

        elif (not self.through_date
              and self.estimated_through_date
              and self.estimated_through_date < datetime.today().date()):
            return 2

        elif (not self.perception_date
              and self.estimated_perception_date
              and self.estimated_perception_date < datetime.today().date()):
            return 2

        elif (not self.through_date
              and self.estimated_through_date
              and self.estimated_through_date < (datetime.today().date() + timedelta(days=settings.NEXT_INVOICE_DAYS))):
            return 1

        elif (not self.perception_date
              and self.estimated_perception_date
              and self.estimated_perception_date < (datetime.today().date() + timedelta(days=settings.NEXT_INVOICE_DAYS))):
            return 1

        return 0
Example #28
0
def predict(args):
    """

    """
    format = "%Y-%m-%d %H:%M:%S"
    starttime = datetime.today().strftime(format)
    f = open(args.output, 'ab')
    bst = xgb.Booster({'nthread': args.nthread}, model_file=args.model)
    # pd.read_csv(args.input, sep=',', header=None, iterator=True) #also return TextFileReader object which is iterable
    reader = pd.read_csv(args.input, sep=',', header=None, chunksize=int(args.batch_size))
    num = 0
    for chunk in reader:
        start = datetime.now()
        num += 1
        data = chunk.values
        test_X = data[:, args.xindex:]
        test_aid = data[:, :args.xindex]

        xg_test = xgb.DMatrix(test_X)
        pred = bst.predict(xg_test)  # objective is softmax, so 1D array is returned
        if len(pred) == len(test_aid):
            np.savetxt(f, list(zip(test_aid, pred)), fmt="%d", delimiter=",")
        else:
            print("number does not match at %s chunk, so skipped!" % num)

        if num % 10 == 0:
            f.flush()
        print "finishing %s batch in %s seconds!" % (num, (datetime.now() - start).total_seconds())
    f.flush()
    f.close()
    print "start time    : %s !" % starttime
    print "finished time : %s !" % datetime.today().strftime(format)
 def Ordering__using_memory_separating_File(self):
     print "Starting Ordering the Calculating  in Separating File", datetime.today()
     
     for indice in range(len(self.preparedParameter.featuresChoice)):
         
         fw = open(self.filePathOrdered +  '.' +str(self.preparedParameter.featuresChoice[indice]) + '.txt', 'w')
         
         fr = open(self.filepathResult +  '.' +str(self.preparedParameter.featuresChoice[indice]) + '.txt', 'r')
         print "reading file", str(self.preparedParameter.featuresChoice[indice])
         lines = fr.readlines()
         data = []
         element = 0
         qty = len(lines)
         for line in lines:
             element = element + 1
             if element % 2000 == 0:
                 self.printProgressofEvents(element, qty, "Buffering Calculations to ordering: ")
             
             cols = line.split(':')
             data.append([float(cols[0]), cols[1], cols[2]])
         del lines
         print "ordering file", str(self.preparedParameter.featuresChoice[indice])
         orderData = sorted(data, key=lambda value: value[0], reverse=True)
         element = 0
         for item in orderData:
             element = element + 1
             self.printProgressofEvents(element, self.qtyDataCalculated, "Saving Data Ordered: ")
             if element == 301:
                 break
             fw.write(str(item[0]) +'\t' + item[1] + '\t' + item[2] )
         fw.close()
         fr.close()
         del data
         del orderData
         print "Ordering the Calculating  in Separating File FINISHED", datetime.today()
Example #30
0
    def read_from_db_core(self,date_pre):
        active_list={"last_day":(datetime.today()-timedelta(days=1)).strftime("%Y-%m-%d")}
        today=datetime.today().strftime("%Y-%m-%d")
        active_list['hist']={}
        date_post=(datetime.strptime(self.date, "%Y-%m-%d")+timedelta(days=1)).strftime("%Y-%m-%d")

        sql=("select T2.cur_day day,T2.push_times times,count(T2.client_id) num"
             " from sdk2.sdk_clients T1 left join ad_pull_log T2 on T1.client_id=T2.client_id"
             " where T1.create_time>='%s' and T1.create_time<'%s' and T1.app_id='%s' and T2.cur_day>='%s' and T2.cur_day<'%s'"
             " group by date(T2.cur_day),T2.push_times" % (self.date,date_post,self.app_id,date_pre,today))


        sqlres=DB_G.query(sql)

        for appres in sqlres:
            day=appres['day'].strftime("%Y-%m-%d")
            if day not in active_list['hist']:
                active_list['hist'][day]={'1':0,'2-4':0,'5-10':0,'>10':0}

            if appres['times']==1:
                active_list['hist'][day]['1']+=appres['num']
            elif 4>=appres['times']>=2:
                active_list['hist'][day]['2-4']+=appres['num']
            elif 10>=appres['times']>=5:
                active_list['hist'][day]['5-10']+=appres['num']
            elif appres['times']>10:
                active_list['hist'][day]['>10']+=appres['num']

        return active_list
USER = config[config_param]['USER']
PASSWORD = config[config_param]['PASSWORD']
HOST = config[config_param]['HOST']
PORT = config[config_param]['PORT']

config_mail = 'CONFIG_MAIL'

SENDER_NAME_SMTP = config[config_mail]['SENDER_NAME']
SENDER_SMTP = config[config_mail]['SENDER']
RECEIVERS_SMTP = config[config_mail]['RECEIVERS']
HOST_SMTP = config[config_mail]['HOST']
PORT_SMTP = config[config_mail]['PORT']
USERNAME_SMTP = config[config_mail]['USERNAME']
PASSWORD_SMTP = config[config_mail]['PASSWSORD']

now = datetime.today()
PATH_LOG = '/home/ec2-pentaho/pentaho/unp/Facebook/pagina/log/'
name_log = PATH_LOG + 'log_pagina_' + now.strftime("%Y_%m_%d") + '.log'
logging.basicConfig(
    filename=name_log,
    filemode='a',
    format='%(asctime)s - %(name)s - %(levelname)s - %(message)s',
    level=logging.INFO)


def sendMail(subject, message):
    print("Enviando mensaje")
    print(subject)
    print(message)
    msg = MIMEMultipart('alternative')
    msg['Subject'] = subject
Example #32
0
class BookingsList(ListAPIView):
    queryset = Booking.objects.filter(date__gte=datetime.today())
    serializer_class = BookingSerializer
    def get_queryset(self):
        queryset = Booking.objects.filter(date__gte=datetime.today(), user=self.request.user)
#starting menu
while True:
	print("\n 1. Anime Airing today \n 2. Top Anime \n 3. Search for Anime \n 4. Get Anime List") 
	print("100. Exit\n")
	choice = int(input("Enter your choice: "))
	if choice == 1:
		#getting dict
		scheduled = jikan.schedule()

		#getting anime for day
		print("\nAnime airing today: ")

		#print(scheduled)
		#printing anime airing today
		day = datetime.today().strftime('%A').lower()
		for items in scheduled[day]:
			print("\nTitle: ", items['title'])
			print("Score: ", items['score'])
			print("Synopsis: ", items['synopsis'])

	elif choice == 2:
		print("\nTop Animes: ")

		#getting dict
		top_anime = jikan.top(type='anime')

		#printing all the stuff
		for items in top_anime['top']:
			print("\nRank: ", items['rank'])
			print("Title: ", items['title'])
Example #34
0
def is_it_daytime(lat,lon,delta_minutes):
   astral = Astral()
   sunrise = astral.sunrise_utc(datetime.today(),lat,lon)
   sunset = astral.sunset_utc(datetime.today(),lat,lon)
   return sunrise + timedelta(minutes=delta_minutes) < datetime.now(UTC()) < sunset - timedelta(minutes=delta_minutes)
Example #35
0
    name="Darold So",
    email="*****@*****.**",
    picture='https://avatars0.githubusercontent.com/u/9425789?v=3&s=460')
session.add(User1)
session.commit()

genre1 = Genre(name="Rock", user=User1)
session.add(genre1)
session.commit()

artist1 = Artist(
    name="Guns N' Roses",
    biography=
    """At a time when pop was dominated by dance music and pop-metal, Guns N' Roses brought raw, ugly rock & roll crashing back into the charts. They were not nice boys; nice boys don't play rock & roll. They were ugly, misogynistic, and violent; they were also funny, vulnerable, and occasionally sensitive, as their breakthrough hit, \"Sweet Child O' Mine,\" showed. While Slash and Izzy Stradlin ferociously spit out dueling guitar riffs worthy of Aerosmith or the Stones, Axl Rose screeched out his tales of sex, drugs, and apathy in the big city. Meanwhile, bassist Duff McKagan and drummer Steven Adler were a limber rhythm section who kept the music loose and powerful.
    Guns N' Roses' music was basic and gritty, with a solid hard, bluesy base; they were dark, sleazy, dirty, and honest -- everything that good hard rock and heavy metal should be. There was something refreshing about a band that could provoke everything from devotion to hatred, especially since both sides were equally right. There hadn't been a hard rock band this raw or talented in years, and they were given added weight by Rose's primal rage, the sound of confused, frustrated white trash vying for a piece of the pie. As the '80s became the '90s, there simply wasn't a more interesting band around, but owing to intra-band friction and the emergence of alternative rock, Rose's supporting cast eventually left, and he spent over 15 years recording before the long-delayed Chinese Democracy appeared in 2008.""",
    created_at=datetime.today(),
    genre=genre1,
    user=User1)
session.add(artist1)
session.commit()

artist2 = Artist(
    name="Linkin Park",
    biography=
    """Although rooted in alternative metal, Linkin Park became one of the most successful acts of the 2000s by welcoming elements of hip-hop, modern rock, and atmospheric electronica into their music. The band's rise was indebted to the aggressive rap-rock movement made popular by the likes of Korn and Limp Bizkit, a movement that paired grunge's alienation with a bold, buzzing soundtrack.
    Linkin Park added a unique spin to that formula, however, focusing as much on the vocal interplay between singer Chester Bennington and rapper Mike Shinoda as the band's muscled instrumentation, which layered DJ effects atop heavy, processed guitars. While the group's sales never eclipsed those of its tremendously successful debut, Hybrid Theory, few alt-metal bands rivaled Linkin Park during the band's heyday.""",
    created_at=datetime.today(),
    genre=genre1,
    user=User1)
session.add(artist2)
session.commit()
	def parse(self, response):
		agent = user_agent_rotator.get_random_user_agent()
		options.add_argument(f"user-agent={agent}")
		self.driver = webdriver.Chrome(str(Path(Path.cwd(), "chromedriver.exe")), chrome_options=options)
		self.driver.set_window_size(randrange(1100, 1200), randrange(800, 900))
		self.driver.get("https://www.idealista.com/en/venta-viviendas/las-palmas/gran-canaria/con-precio-hasta_200000,metros-cuadrados-mas-de_40,pisos,de-dos-dormitorios,de-tres-dormitorios,de-cuatro-cinco-habitaciones-o-mas,aireacondicionado,ultimas-plantas,plantas-intermedias,obra-nueva,buen-estado/")
		sleep(2)
		body = self.driver.find_element_by_css_selector('body')
		sleep(1)
		body.send_keys(Keys.PAGE_DOWN)
		sleep(1)
		body.send_keys(Keys.PAGE_UP)
		sleep(1)
		body.send_keys(Keys.PAGE_DOWN)
		body.send_keys(Keys.HOME)

		sel = Selector(text=self.driver.page_source)	

		pages = sel.xpath('.//span[@class="breadcrumb-info"]/text()').extract()[1]
		pages = pages.replace(",", "").split(" ")[0]
		pages = int(pages) / 30 
		pages_count = int(pages) + 1

		self.driver.quit()


		for page in range (pages_count):
			self.driver = webdriver.Chrome(str(Path(Path.cwd(), "chromedriver.exe")), chrome_options=options)
			# self.driver = webdriver.Firefox(executable_path=str(Path(Path.cwd(), "geckodriver.exe")))
			self.driver.set_window_size(randrange(1100, 1200), randrange(800, 900))
			self.driver.get(f"https://www.idealista.com/en/venta-viviendas/las-palmas/gran-canaria/con-precio-hasta_200000,metros-cuadrados-mas-de_40,pisos,de-dos-dormitorios,de-tres-dormitorios,de-cuatro-cinco-habitaciones-o-mas,aireacondicionado,ultimas-plantas,plantas-intermedias,obra-nueva,buen-estado/pagina-{page}.htm")
			sleep(1)
			body = self.driver.find_element_by_css_selector('body')
			sleep(1)
			body.send_keys(Keys.END)
			sleep(1)
			body.send_keys(Keys.HOME)

			try:
				picture = self.driver.find_elements_by_css_selector('picture')[randrange(1, 5)]
				hov = ActionChains(driver).move_to_element(picture)
				hov.perform()
			except:
				pass	

			sel = Selector(text=self.driver.page_source)
			adverts = sel.xpath('//article[contains(@class, "item")]')

			for advert in adverts:
				try:
					l = ItemLoader(item=IslandScraperItem(), selector=advert)
					title = advert.xpath('.//a[contains(@class, "item-link")]/@title').extract_first()
					link_string = advert.xpath('.//a[contains(@class, "item-link")]/@href').extract_first()
					link = "https://www.idealista.com" + link_string
					address = title.split(" in ")[1]
					address_list = address.split(", ")
					locality = address_list[-1]	
					area = ""
					if len(address_list) > 1:				
						area =  address.split(", ")[-2]							
					price_string = advert.xpath('.//span[contains(@class, "item-price")]/text()').extract_first()
					price = price_string.replace(",", "")
					beds_string = advert.xpath('.//span[contains(@class, "item-detail")]/text()').extract_first()
					beds = beds_string.strip()
					size_string = advert.xpath('.//span[contains(@class, "item-detail")]/text()')[1].extract()
					size = size_string.strip()
					try:
						floor_string = advert.xpath('.//span[contains(@class, "item-detail")]/text()')[2].extract()
						floor = floor_string.replace("Floor", "").strip()
					except:
						floor = "1"		
					date = datetime.today().strftime('%Y-%m-%d')

				except:
					pass	

				l.add_value('title', title)		
				l.add_value('island', "Gran Canaria")		
				l.add_value('locality', locality)
				l.add_value('price', price)
				l.add_value('beds', beds)
				l.add_value('size', size)
				l.add_value('link', link)	
				l.add_value('date', date)
				l.add_value('ad_type', "sale")
				yield l.load_item()	

			sleep(1)		
			self.driver.quit()		
Example #37
0
def update_AWS():
    # Download data currently in Database
    df_fb = pd.read_sql_query('''SELECT * FROM facebook_spend;''', cnx)

    # Get latest dates updated
    max_db_date_plus_one = str(
        pd.to_datetime(max(df_fb['Date'])) + timedelta(1))[:10]
    print(max_db_date_plus_one)
    yesterday = str(datetime.today() - timedelta(1))[:10]

    # Update if necessary
    if max_db_date_plus_one < yesterday:
        new_data = my_account.get_insights(fields=[
            'spend', 'clicks', 'impressions', 'campaign_name', 'actions',
            'action_values'
        ],
                                           params={
                                               'time_range': {
                                                   'since':
                                                   max_db_date_plus_one,
                                                   'until': yesterday
                                               },
                                               'level': 'campaign',
                                               'time_increment': '1'
                                           })
        df_fb_new = pd.DataFrame(new_data)
        df_fb_new['purchase_value'] = df_fb_new['action_values'].apply(
            purchase_value).astype(float)
        df_fb_new['spend'] = df_fb_new['spend'].astype(float)
        df_fb_new['purchases'] = df_fb_new['actions'].apply(purchases_clean)
        df_fb_new['Channel'] = 'Facebook'
        df_fb_clean = df_fb_new[[
            'Channel', 'date_start', 'campaign_name', 'spend', 'impressions',
            'clicks', 'purchases', 'purchase_value'
        ]].copy()
        df_fb_clean = df_fb_clean.rename(
            columns={
                'date_start': 'Date',
                'campaign_name': 'Campaign',
                'spend': 'Spend',
                'impressions': 'Impressions',
                'clicks': 'Clicks',
                'purchases': 'Conversions',
                'purchase_value': 'Revenue'
            })
        df_fb_clean['Category'] = 'Paid Social - Facebook'
        df_fb_clean['Tuesday_Week'] = df_fb_clean['Date'].apply(tuesday_week)
        df_fb_clean['Impressions'] = df_fb_clean['Impressions'].astype(int)
        df_fb_clean['Clicks'] = df_fb_clean['Clicks'].astype(int)
        df_fb_clean['Conversions'] = df_fb_clean['Conversions'].fillna(
            0).astype(int)

        # Append to existing table
        # sqlEngine = create_engine('mysql+pymysql://root:@127.0.0.1/test', pool_recycle=3600)
        dbConnection = cnx.connect()

        tableName = 'facebook_spend'

        try:
            frame = df_fb_clean.to_sql(tableName,
                                       dbConnection,
                                       if_exists='append')
        except ValueError as vx:
            print(vx)
        except Exception as ex:
            print(ex)
        else:
            print("Table %s updated successfully." % tableName)
        finally:
            dbConnection.close()
    else:
        print("Data already updated")
Example #38
0
File: logic.py Project: m0dts/GB3KM
def comms_timer():
    global comms485
    comms485.read_devices()

    for tmr in config["Timers"]:
        for n in range(comms485.devicecount):
            BeaconTx = False
            ActivityTx = False
            info = comms485.list_devices(n)
            if info != {}:  #if not empty
                name = info["INFO"].split(',')[0]
                #print name
                if tmr["Name"] == name:
                    if tmr["Enabled"] == "True":
                        weekday = int(time.strftime("%w"))
                        for day in tmr["Days"].split(','):
                            if int(day) == weekday:
                                on = datetime.strptime(
                                    datetime.today().strftime("%Y-%m-%d " +
                                                              tmr["BeaconOn"]),
                                    '%Y-%m-%d %H:%M')
                                off = datetime.strptime(
                                    datetime.today().strftime(
                                        "%Y-%m-%d " + tmr["BeaconOff"]),
                                    '%Y-%m-%d %H:%M')
                                if datetime.today() > on:
                                    if datetime.today() < off:
                                        #print "Tx On:"+name
                                        BeaconTx = True
                                    else:
                                        #print "Tx Off:"+name
                                        BeaconTx = False
                                else:
                                    BeaconTx = False
                                    #print "Tx Off:"+name

                                #Enable Activity Start
                                if RepeaterInUse:
                                    on = datetime.strptime(
                                        datetime.today().strftime(
                                            "%Y-%m-%d " + tmr["OperatingOn"]),
                                        '%Y-%m-%d %H:%M')
                                    off = datetime.strptime(
                                        datetime.today().strftime(
                                            "%Y-%m-%d " + tmr["OperatingOff"]),
                                        '%Y-%m-%d %H:%M')
                                    if datetime.today() > on:
                                        if datetime.today() < off:
                                            ##print "Tx Off:"+name
                                            ActivityTx = True
                                        else:
                                            #print "Tx Off:"+name
                                            ActivityTx = False
                                    else:
                                        ActivityTx = False
                                        #print "Tx Off:"+name
                                else:
                                    ActivityTx = False
                    else:
                        BeaconTx = False
                    #update device
                    if BeaconTx or ActivityTx:
                        comms485.enable_output(n, 4)
                        print "Tx On:" + name
                    else:
                        comms485.disable_output(n, 4)
                        print "Tx Off:" + name
Example #39
0
    def get_spawnpoints(swLat, swLng, neLat, neLng, oSwLat=None, oSwLng=None,
                        oNeLat=None, oNeLng=None, timestamp=0, geofences=None,
                        exclude_geofences=None):
        query = db.session.query(
            TrsSpawn.latitude, TrsSpawn.longitude,
            TrsSpawn.spawnpoint.label('spawnpoint_id'), TrsSpawn.spawndef,
            TrsSpawn.first_detection, TrsSpawn.last_non_scanned,
            TrsSpawn.last_scanned, TrsSpawn.calc_endminsec.label('end_time')
        )

        if timestamp > 0:
            # If timestamp is known only send last scanned spawn points.
            t = datetime.fromtimestamp(timestamp / 1000)
            query = query.filter(
                (TrsSpawn.last_scanned > t) | (TrsSpawn.last_non_scanned > t)
            )

        if swLat and swLng and neLat and neLng:
            query = query.filter(
                TrsSpawn.latitude >= swLat,
                TrsSpawn.longitude >= swLng,
                TrsSpawn.latitude <= neLat,
                TrsSpawn.longitude <= neLng
            )

        if oSwLat and oSwLng and oNeLat and oNeLng:
            # Exclude spawn points within old boundaries.
            query = query.filter(
                ~and_(
                    TrsSpawn.latitude >= oSwLat,
                    TrsSpawn.longitude >= oSwLng,
                    TrsSpawn.latitude <= oNeLat,
                    TrsSpawn.longitude <= oNeLng
                )
            )

        if geofences:
            sql = geofences_to_query(geofences, 'trs_spawn')
            query = query.filter(text(sql))

        if exclude_geofences:
            sql = geofences_to_query(exclude_geofences, 'trs_spawn')
            query = query.filter(~text(sql))

        result = query.all()

        spawnpoints = []
        ts = time.time()
        utc_offset = datetime.fromtimestamp(ts) - datetime.utcfromtimestamp(ts)
        for sp in result:
            sp = sp._asdict()
            if sp['last_non_scanned'] is not None:
                sp['last_non_scanned'] = sp['last_non_scanned'] - utc_offset
            if sp['end_time'] is not None:
                if sp['last_scanned'] is not None:
                    sp['last_scanned'] = sp['last_scanned'] - utc_offset
                end_time_split = sp['end_time'].split(':')
                end_time_seconds = int(end_time_split[1])
                end_time_minutes = int(end_time_split[0])
                despawn_time = datetime.today().replace(
                    minute=end_time_minutes, second=end_time_seconds,
                    microsecond=0
                )
                if despawn_time <= datetime.today():
                    despawn_time += timedelta(hours=1)
                sp['despawn_time'] = despawn_time - utc_offset
                if sp['spawndef'] == 15:
                    sp['spawn_time'] = sp['despawn_time'] - timedelta(hours=1)
                else:
                    sp['spawn_time'] = (sp['despawn_time']
                                        - timedelta(minutes=30))
                del sp['end_time']
            spawnpoints.append(sp)

        return spawnpoints
Example #40
0
def model(params):
    # Queue
    queue = Queue()

    # Initialize checkpoint directory
    directory_name = datetime.today().strftime("%d-%m-%Y-%H-%M-%S")
    checkpoint_directory = os.path.join("checkpoint", directory_name)

    # Process Target
    def train(params, checkpoint_directory, queue):
        # Hyper-parameters
        embedding_neuron = params['embedding_neuron']
        lstm_params = params['lstm']
        lstm_num_layer = lstm_params['layer']
        optimizer = params['optimizer']
        batch_size = params['batch_size']

        # Debug
        print("[Params]", params)

        # Initialize checkpoint directory
        tensorboard_directory = os.path.join(checkpoint_directory, "tensorboard")
        os.makedirs(checkpoint_directory)
        os.makedirs(tensorboard_directory)

        # Sequential model
        model = Sequential()

        # Embedding layer
        model.add(Embedding(constant.NUM_CHARS, embedding_neuron,
                            input_length=num_step))

        for i in range(lstm_num_layer):
            neuron = lstm_params['neuron'][i]
            dropout_rate = lstm_params['dropout'][i]

            # LSTM layer
            lstm = LSTM(neuron, return_sequences=True, unroll=True,
                        dropout=dropout_rate, recurrent_dropout=dropout_rate)

            # Bidirectional LSTM
            bi_lstm = Bidirectional(lstm)
            model.add(bi_lstm)

            # LSTM dropout
            model.add(Dropout(dropout_rate))

        # RNN
        model.add(TimeDistributed(Dense(constant.NUM_TAGS, activation="softmax"),
                                  input_shape=(num_step, lstm_params['neuron'][-1])))

        # Compile
        model.compile(loss="categorical_crossentropy", optimizer=optimizer,
                      metrics=["categorical_accuracy"])

        # Save model architecture to file
        with open(os.path.join(checkpoint_directory, "model.json"), "w") as file:
            file.write(model.to_json())

        # Save model config to file
        with open(os.path.join(checkpoint_directory, "model_config.txt"), "w") as file:
            pprint(model.get_config(), stream=file)

        # Display model summary before train
        model.summary()

        # Callback
        params = DottableDict({
            "es_enable": False,
            "es_min_delta": 0,
            "es_patience": 0
        })
        path = DottableDict({
            "checkpoint": checkpoint_directory,
            "tensorboard": tensorboard_directory,
            "loss_log": os.path.join(checkpoint_directory, "loss.csv"),
            "score_log": os.path.join(checkpoint_directory, "score.csv")
        })
        callbacks = CustomCallback(params, path).callbacks

        # Train
        model.fit(x_train, y_train, validation_data=(x_test, y_test),
                  epochs=epochs, batch_size=batch_size, verbose=2,
                  callbacks=callbacks, shuffle=shuffle)

        # Evaluate
        _, accuracy = model.evaluate(x_test, y_test, verbose=0)

        # Debug
        print("[Validation] categorical_accuracy:", accuracy)
        print("")

        # Put accuracy to queue
        queue.put(accuracy)

    # Spawn process for training model to prevent memory leak
    process = Process(target=train, args=(params, checkpoint_directory, queue))
    process.start()

    # Get accuracy from queue
    accuracy = queue.get()

    return {"loss": -accuracy, "status": STATUS_OK, "params": params,
            "checkpoint_directory": checkpoint_directory}
Example #41
0
def main():
    for data_page in cf_data_crawler:

        # Lấy thông tin về web cần lấy link
        tail_file = "_" + datetime.today().strftime('%d%m%Y') + ".csv"
        file_url_csv = cf.path_folder_url + data_page[0] + '.csv'
        file_name = data_page[0].replace("urls_", "")
        file_data_csv = cf.path_folder_data_raw + 'data_' + file_name + tail_file
        file_err_csv = cf.path_folder_data_raw + 'err_' + file_name + tail_file
        arr_selectors = data_page[1]
        print("[**] Loading url data from file : " + file_url_csv, end=" => ")
        try:
            csv_data = pd.read_csv(file_url_csv)
            utl.push_header_to_file(file_data_csv, cf.field_header_file_scrap)
            utl.push_header_to_file(file_err_csv, cf.field_header_file_err)
            print("OK !!!!!!!")
        except Exception as ex:
            print("[Error] : Can't open file !!!")
            continue
        print("[*] Starting crawler from url....")
        data_urls = csv_data[csv_data.columns[1]].values
        data_stt = csv_data[csv_data.columns[0]].values
        data_rooms = []

        for i_time in range(0, len(data_urls), EVERY_TIME):
            urls = []
            end_url = i_time + EVERY_TIME
            if end_url >= len(data_urls):
                end_url = len(data_urls)
            print("[-] Scraping data from url : [" + str(i_time) + " -> " +
                  str(end_url) + "]",
                  end=" =>")
            for i in range(i_time, end_url, 1):
                urls.append([data_stt[i], data_urls[i]
                             ])  # đẩy cả stt và url vào lúc tra cho dễ

            url_errs = []
            # Triển khai đa luồng scraping
            threads = [
                threading.Thread(target=data_crawler,
                                 args=(url, arr_selectors, data_rooms,
                                       url_errs)) for url in urls
            ]
            utl.run_thread(threads)

            if url_errs:  # Có lỗi xảy ra
                print("[ Done ] : " + str(EVERY_TIME - len(url_errs)) + "/" +
                      str(EVERY_TIME) + ". " + str(len(url_errs)) +
                      " failed ! -> push to file :" + file_err_csv)
                utl.push_data_to_exist_file(url_errs, file_err_csv)

            else:  # Hoàn thành không lỗi
                print(" [OK] !!!!!!")
                if len(data_rooms) >= LIMIT_PUSH_DATA:
                    utl.push_data_to_exist_file(data_rooms, file_data_csv)
                    data_rooms.clear()
                    print("[Done] Over : Reset data ;)")
        if data_rooms:
            utl.push_data_to_exist_file(data_rooms, file_data_csv)
        print(
            "====================================================================="
        )
Example #42
0
    #	model.load_weights(model_filename)
    model.load_weights(model_basename)
# model_reverse.load_weights(model_basename + '_reverse')

num_iters = 100  # Number of iterations for training
epochs_per_iter = 10  # Number of iterations before we save our model
batch_size = 16  # Number of training examples pushed to the GPU per batch.
# Larger batch sizes require more memory, but training will be faster
print('Starting training!')
no_files = 603
cur_iter = 0
while cur_iter < num_iters:
    f = open("result.txt", 'a')
    f.write('====================================================')
    f.write('Forward iter : ' + str(cur_iter) + '  |  ')
    f.write(str(datetime.today().strftime("%H-%M-%S")))
    f.write('\n\n')
    f.close()

    for no in range(1, 603):
        if no == 221 or no == 425 or no == 445:
            continue

        if no / 10 < 1:
            filename = '00' + str(no)
        else:
            if no / 100 < 1:
                filename = '0' + str(no)
            else:
                filename = str(no)
        # get data
                psutil.ZombieProcess):
            return False


# Create log file
LOG_FORMAT = "%(levelname)s %(asctime)s: %(message)s"
logging.basicConfig(filename="AutoBoot.log",
                    level=logging.DEBUG,
                    format=LOG_FORMAT)
logger = logging.getLogger()

# Clear log file if no ERRORS and log file is over 30 days old
with open('AutoBoot.log') as input_log:
    check_date = input_log.readline().split()
    past = datetime.strptime(check_date[1], '%Y-%m-%d')
    present = datetime.today()
    check_log = input_log.read()
    if (present - past).days > 30 and not 'ERROR' in check_log:
        open('AutoBoot.log', 'w').close()
    else:
        pass

logger.debug("Running AutoBoot"
             )  # write to debug so script starting date can be established
# List of Programs that need to be checked
with open('check_list.txt') as input_txt:
    check_list = input_txt.read().splitlines()

# Check if Programs in list are running
for process in check_list:
    if check_running(process):
Example #44
0
    def get_pokestops(swLat, swLng, neLat, neLng, oSwLat=None, oSwLng=None,
                      oNeLat=None, oNeLng=None, timestamp=0,
                      eventless_stops=True, quests=True, invasions=True,
                      lures=True, geofences=None, exclude_geofences=None):
        columns = [
            'pokestop_id', 'name', 'image', 'latitude', 'longitude',
            'last_updated', 'incident_grunt_type', 'incident_expiration',
            'active_fort_modifier', 'lure_expiration'
        ]

        if quests:
            quest_columns = [
                'GUID', 'quest_timestamp', 'quest_task', 'quest_type',
                'quest_stardust', 'quest_pokemon_id', 'quest_pokemon_form_id',
                'quest_pokemon_costume_id', 'quest_reward_type',
                'quest_item_id', 'quest_item_amount'
            ]
            hours = int(args.quest_reset_time.split(':')[0])
            minutes = int(args.quest_reset_time.split(':')[1])
            reset_time = datetime.today().replace(
                hour=hours, minute=minutes, second=0, microsecond=0
            )
            reset_timestamp = datetime.timestamp(reset_time)
            query = (
                db.session.query(Pokestop, TrsQuest)
                .outerjoin(
                    TrsQuest,
                    and_(
                        Pokestop.pokestop_id == TrsQuest.GUID,
                        TrsQuest.quest_timestamp >= reset_timestamp
                    )
                )
                .options(
                    Load(Pokestop).load_only(*columns),
                    Load(TrsQuest).load_only(*quest_columns)
                )
            )
        else:
            query = Pokestop.query.options(load_only(*columns))

        if not eventless_stops:
            conds = []
            if quests:
                conds.append(TrsQuest.GUID.isnot(None))
            if invasions:
                conds.append(Pokestop.incident_expiration > datetime.utcnow())
            if lures:
                conds.append(Pokestop.lure_expiration > datetime.utcnow())
            query = query.filter(or_(*conds))

        if timestamp > 0:
            # If timestamp is known only send last scanned PokéStops.
            t = datetime.utcfromtimestamp(timestamp / 1000)
            query = query.filter(Pokestop.last_updated > t)

        if swLat and swLng and neLat and neLng:
            query = query.filter(
                Pokestop.latitude >= swLat,
                Pokestop.longitude >= swLng,
                Pokestop.latitude <= neLat,
                Pokestop.longitude <= neLng
            )

        if oSwLat and oSwLng and oNeLat and oNeLng:
            # Exclude PokéStops within old boundaries.
            query = query.filter(
                ~and_(
                    Pokestop.latitude >= oSwLat,
                    Pokestop.longitude >= oSwLng,
                    Pokestop.latitude <= oNeLat,
                    Pokestop.longitude <= oNeLng
                )
            )

        if geofences:
            sql = geofences_to_query(geofences, 'pokestop')
            query = query.filter(text(sql))

        if exclude_geofences:
            sql = geofences_to_query(exclude_geofences, 'pokestop')
            query = query.filter(~text(sql))

        result = query.all()

        now = datetime.utcnow()
        pokestops = []
        for r in result:
            pokestop_orm = r[0] if quests else r
            quest_orm = r[1] if quests else None
            pokestop = orm_to_dict(pokestop_orm)
            if quest_orm is not None:
                pokestop['quest'] = {
                    'scanned_at': quest_orm.quest_timestamp * 1000,
                    'task': quest_orm.quest_task,
                    'reward_type': quest_orm.quest_reward_type,
                    'item_id': quest_orm.quest_item_id,
                    'item_amount': quest_orm.quest_item_amount,
                    'pokemon_id': quest_orm.quest_pokemon_id,
                    'form_id': quest_orm.quest_pokemon_form_id,
                    'costume_id': quest_orm.quest_pokemon_costume_id,
                    'stardust': quest_orm.quest_stardust
                }
            else:
                pokestop['quest'] = None
            if (pokestop['incident_expiration'] is not None
                    and (pokestop['incident_expiration'] < now
                         or not invasions)):
                pokestop['incident_grunt_type'] = None
                pokestop['incident_expiration'] = None
            if (pokestop['lure_expiration'] is not None
                    and (pokestop['lure_expiration'] < now or not lures)):
                pokestop['active_fort_modifier'] = None
                pokestop['lure_expiration'] = None
            pokestops.append(pokestop)

        return pokestops
Example #45
0
local_type_id = None

for t in travel_types:
    if t["value"] == "local_type":
        local_type_id = t["id"]

for i in range(int(args['number'])):
    patient = Patient()
    patient.first_name = names.get_first_name()
    patient.second_name = names.get_last_name()
    patient.iin = f"{randint(0, 999999999):09}"
    bornDate = randomDate(datetime.strptime('01.01.1950', '%d.%m.%Y'),
                          datetime.strptime('31.12.2001', '%d.%m.%Y'))
    bornDate = datetime.strftime(bornDate, "%Y-%m-%dT00:00:00+06:00")
    patient.dob = bornDate
    patient.created_date = datetime.strftime(datetime.today(),
                                             "%Y-%m-%dT00:00:00+06:00")
    patient.travel_type_id = local_type_id
    patient.region_id = 10

    toPoint = kzcities[randint(0, len(kzcities) - 1)]
    # insert address
    additionalLat = randint(0, 200)
    additionalLon = randint(0, 200)
    address = Address()
    address.country_id = 88
    address.city = toPoint['name'] + f"{additionalLat} {additionalLon}"
    address.lat = toPoint['latitude'] + float(additionalLat) / 1000
    address.lng = toPoint['longitude'] + float(additionalLon) / 1000
    address.insert()
    patient.home_address_id = address.id
Example #46
0
libinfo = {'__file__': libinfo_py}
exec(compile(open(libinfo_py, "rb").read(), libinfo_py, 'exec'), libinfo,
     libinfo)

LIB_PATH = libinfo['find_lib_path']()
__version__ = libinfo['__version__']

# set by the CD pipeline
is_release = os.environ.get("IS_RELEASE", "").strip()

# set by the travis build pipeline
travis_tag = os.environ.get("TRAVIS_TAG", "").strip()

# nightly build tag
if not travis_tag and not is_release:
    __version__ += 'b{0}'.format(datetime.today().strftime('%Y%m%d'))

# patch build tag
elif travis_tag.startswith('patch-'):
    __version__ = os.environ['TRAVIS_TAG'].split('-')[1]

DEPENDENCIES = [
    'numpy<2.0.0,>1.16.0', 'requests>=2.20.0,<3', 'graphviz<0.9.0,>=0.8.1',
    'contextvars;python_version<"3.7"'
]

shutil.rmtree(os.path.join(CURRENT_DIR, 'mxnet'), ignore_errors=True)
shutil.rmtree(os.path.join(CURRENT_DIR, 'dmlc_tracker'), ignore_errors=True)
shutil.copytree(os.path.join(CURRENT_DIR, 'mxnet-build/python/mxnet'),
                os.path.join(CURRENT_DIR, 'mxnet'))
shutil.copytree(
Example #47
0
 def __init__(self):
     self.start = time.time()
     self.d_ = dt.today()
     self.timestarted = self.d_.strftime("%d-%m-%Y %H:%M:%S")
     self.parseArgs()
Example #48
0
    def update(self):
        self.data.update()
        waste_data = self.data.data

        try:
            if waste_data:
                if self.type in waste_data:
                    collection_date = datetime.strptime(
                        waste_data[self.type], "%Y-%m-%d"
                    ).date()

                    # Date in date format "%Y-%m-%d"
                    self._year_month_day_date = str(collection_date)

                    if collection_date:
                        # Set the values of the sensor
                        self._last_update = datetime.today().strftime("%d-%m-%Y %H:%M")

                        # Is the collection date today?
                        self._is_collection_date_today = date.today() == collection_date

                        # Days until collection date
                        delta = collection_date - date.today()
                        self._days_until_collection_date = delta.days

                        # Only show the value if the date is lesser than or equal to (today + timespan_in_days)
                        if collection_date <= date.today() + relativedelta(days=int(self.timespan_in_days)):
                            #if the date does not contain a named day or month, return the date as normal
                            if (self.date_format.find('a') == -1 and self.date_format.find('A') == -1
                            and self.date_format.find('b') == -1 and self.date_format.find('B') == -1):
                                self._state = collection_date.strftime(self.date_format)
                            #else convert the named values to the locale names
                            else:
                                edited_date_format = self.date_format.replace('%a', 'EEE')
                                edited_date_format = edited_date_format.replace('%A', 'EEEE')
                                edited_date_format = edited_date_format.replace('%b', 'MMM')
                                edited_date_format = edited_date_format.replace('%B', 'MMMM')

                                #half babel, half date string... something like EEEE 04-MMMM-2020
                                half_babel_half_date = collection_date.strftime(edited_date_format)

                                #replace the digits with qquoted digits 01 --> '01'
                                half_babel_half_date = re.sub(r"(\d+)", r"'\1'", half_babel_half_date)
                                #transform the EEE, EEEE etc... to a real locale date, with babel
                                locale_date = format_date(collection_date, half_babel_half_date, locale=self.locale)

                                self._state = locale_date
                        else:
                            self._hidden = True
                    else:
                        raise ValueError()
                else:
                    raise ValueError()
            else:
                raise ValueError()
        except ValueError:
            self._state = None
            self._hidden = True
            self._days_until_collection_date = None
            self._year_month_day_date = None
            self._is_collection_date_today = False
            self._last_update = datetime.today().strftime("%d-%m-%Y %H:%M")
    def __init__(self,instrument,root):
        Frame.__init__(self,root)

        self.menu=Menu(self)
        root.config(menu=self.menu)

        self.today=datetime.today().date()
        self.lastday=self.today


        self.optionMenu = Menu(self.menu)
        self.menu.add_cascade(label="Options", menu=self.optionMenu)


        self.optionMenu.add_command(label="Site Details",command=self.showSiteDetails)
        self.historyMenu=Menu(self.optionMenu)
        self.optionMenu.add_cascade(label="History",menu=self.historyMenu)
        self.historyMenu.add_command(label="Daily",command=self.dailyDataPlot)
        self.historyMenu.add_command(label="Monthly",command=self.monthlyDataResult)
        self.historyMenu.add_command(label="Yearly",command=self.yearlyDataResult)




        self.Xtick=np.arange(0,23*60,30)
        #self.Xtick=range(0,40)
        self.Xticklabels=[]

        for r in range(0,23):
            self.Xticklabels.append(str(r)+":"+"00")
            self.Xticklabels.append(str(r)+":"+"30")
            
                ######Edit axis limits for actual time, time.sleep(),yticks,xticks,xticklabels
        ######
        ######

        self.fig = Figure(figsize=(4,4), dpi=100)
        #f.add_axes([0,100,0,100])
        #self.fig.autofmt_xdate(bottom=0.2, rotation=180, ha='right')
        self.fig.set_tight_layout(True)

        self.a = self.fig.add_subplot(111,xlabel="Time -->",ylabel="Pnow(KW)-->",xticks=self.Xtick,xticklabels=self.Xticklabels,yticks=range(0,1000,100))
        ####Graph axis parameters
        self.a.axis([4*60,21*60,0,5000])

        for label in self.a.get_xmajorticklabels():
            label.set_rotation(70)
            label.set_horizontalalignment("right")
        #self.a.axhline(y=0)
        #self.a.axvline(x=0)
        self.a.spines['left'].set_smart_bounds(True)
        self.a.spines['bottom'].set_smart_bounds(True)


        self.canvas = FigureCanvasTkAgg(self.fig,master=self)
        self.canvas.draw()
        self.canvas.get_tk_widget().grid(row=3,column=0,columnspan=3,sticky="N")#.pack(side="right", fill="both", expand=True)

        ###########
        ###########
        ###########
        
        self.labelId=Label(self,text="Device Id : "+ str(self.deviceId),fg="black",bg="white")
        self.labelId.grid(padx=10,pady=5,ipadx=40,row=1,column=1,sticky="N")

        self.labelCap=Label(self,text=" INSTALLED CAPACITY 05 KW ",fg="black",bg="white")
        self.labelCap.grid(padx=10,pady=5,ipadx=10,row=2,column=1,sticky="W")

        self.labelDT=Label(self,text=" Date and Time : " + str(self.Time),fg="blue",bg="white")
        self.labelDT.grid(padx=10,pady=5,row=2,column=0,sticky="W")

        self.labelVac=Label(self,text=" Vac : "+ str(self.Vac1)+"V / "+str(self.Vac2)+"V / "+str(self.Vac3)+"V",fg="blue",bg="white")
        self.labelVac.grid(padx=10,pady=5,ipadx=20,row=5,column=0,sticky="W")

        self.labelVpv=Label(self,text=" Vpv : "+ str(self.Vpv1)+"V / "+str(self.Vpv2)+"V",fg="red",bg="white")
        self.labelVpv.grid(padx=10,pady=5,ipadx=20,row=6,column=0,sticky="W")

        self.labelIac=Label(self,text=" Iac : "+ str(self.Iac1)+"A / "+str(self.Iac2)+"A / "+str(self.Iac3)+"A",fg="green",bg="white")
        self.labelIac.grid(padx=10,pady=5,ipadx=20,row=7,column=0,sticky="W")

        self.labelIpv=Label(self,text=" Ipv : "+ str(self.Ipv1)+"A / "+str(self.Ipv2)+"A",fg="magenta",bg="white")
        self.labelIpv.grid(padx=10,pady=5,ipadx=20,row=8,column=0,sticky="W")

        self.labelPnow=Label(self,text=" Pnow : "+str(self.Pnow)+"kW",fg="yellow",bg="white")
        self.labelPnow.grid(padx=10,pady=5,ipadx=20,row=5,column=2,sticky="W")

        self.labelEtoday=Label(self,text=" Etoday : "+str(self.Etoday)+"kW",fg="brown",bg="white")
        self.labelEtoday.grid(padx=10,pady=5,ipadx=20,row=6,column=2,sticky="W")

        self.labelEall=Label(self,text=" Eall : "+str(self.Eall)+"kW",fg="purple",bg="white")
        self.labelEall.grid(padx=10,pady=5,ipadx=20,row=7,column=2,sticky="W")
    def parse_lazarus_output(coreGene):
        ancRecFile = open("%s/ancestral/ancestor.out.txt" % coreGene, "r")
        for i,line in enumerate(ancRecFile):
            if i == 13:
                record = SeqRecord(Seq(line.strip(), IUPAC.ambiguous_dna), 
                    id=coreGene, description="")
        return record
    pool = ThreadPool(args.threads)
    recs = pool.map(parse_lazarus_output, coreGenes)
    SeqIO.write(recs, "ancestralGenes.fa", "fasta")

                

check_paths()
args = get_args()
current_datetime = datetime.today().strftime("%d-%m-%Y-%H%M")
wd = os.getcwd() + "/"
kvmap = {'projectname':'coreAlignment'}
genomes = {}
orderedGenomes = []
with open(args.genomes, "r") as inFile:
    for line in inFile:
        genomes[line.strip()[-4:]] = line.strip()
        orderedGenomes.append(line.strip())
og = orderedGenomes[-1]
groupsDict = read_groups_file(args.groups)
coreGenes = get_core_genes(groupsDict, genomes)
for n in coreGenes:
    try:
        os.mkdir(n)
        os.mkdir(n + "/alignment")
Example #51
0
def userdata_generator(user_list,
                       user_main_list,
                       user_bill,
                       date=datetime.today()):
    df = {
        'user_hr_id': [],
        'services_total': [],
        'compensations_total': [],
        'prev_month_net': [],
        'total_net_month': [],
        'podushka': [],
        'deadline': [],
        'payout_rate': [],
        'zp_cash': [],
        'office_fees': [],
        'account': [],
        'account_plus_minus': [],
        'cash': [],
        'social': [],
        'date_reports': [],
        'date_services': [],
        'date_income_data': [],
        'date_account': [],
        'date_reconciliation': [],
        'qty_of_reconciliations': [],
    }
    for index, user in user_list.iterrows():
        hr_id = user['hr_id']
        services = pandas.read_csv('base/data_set/services.csv')
        for ind, service in services.iterrows():
            if service['UID'] == hr_id:
                services_total = eval(service['SERV and COMP'])[0]

        compensations = pandas.read_csv('base/data_set/services.csv')
        for ind, compensation in compensations.iterrows():
            if compensation['UID'] == hr_id:
                compensations_total = eval(compensation['SERV and COMP'])[1]

        fees = pandas.read_csv('base/data_set/fees.csv')
        for ind, fee in fees.iterrows():
            if fee['hr_id'] == hr_id:
                office_fees = fee['SUM']

        payout_rate = 0.37
        deadline = 1

        for ind, user_main in user_main_list.iterrows():
            if user_main['user_hr_id'] == hr_id:
                prev_month_net = user_main['prev_month_net']
                total_net_month = user_main['adj_net_month']

        for ind, bill in user_bill.iterrows():
            if bill['user_hr_id'] == hr_id and bill['bill_id'] == 'Account':
                account = bill['amount']

        if total_net_month <= 0:
            podushka = total_net_month
            zp_cash = 0
        else:
            podushka = random.randint(0, total_net_month)
            zp_cash = int((total_net_month - podushka) * payout_rate)
        while True:
            if zp_cash == 0 and account < 0:
                account_plus_minus = 0
                break
            else:
                account_plus_minus = random.randint(-200, 200)
                if account + account_plus_minus > 0 and account_plus_minus <= zp_cash:
                    break
        withdrawal = random.randint(0, (zp_cash - account_plus_minus))
        social = zp_cash - account_plus_minus - withdrawal

        df['user_hr_id'].append(hr_id)
        df['services_total'].append(services_total)
        df['compensations_total'].append(compensations_total)
        df['office_fees'].append(office_fees)
        df['prev_month_net'].append(prev_month_net)
        df['total_net_month'].append(total_net_month)
        df['podushka'].append(podushka)
        df['zp_cash'].append(zp_cash)
        df['account'].append(account)
        df['account_plus_minus'].append(account_plus_minus)
        df['social'].append(social)
        df['cash'].append(zp_cash)
        df['payout_rate'].append(payout_rate)
        df['deadline'].append(deadline)
        df['date_reports'].append(date)
        df['date_account'].append(date)
        df['date_services'].append(date)
        df['date_income_data'].append(date)
        df['date_reconciliation'].append(date)
        df['qty_of_reconciliations'].append(1)

    return df
class mainWindow(Frame):

    
    today=str(datetime.today().date())
    lastday=today
    filePath="/home/prashant/01050007/01050007_Data/"
    if os.path.exists(filePath+str(today)+".csv"):

        f=open(filePath+str(today)+".csv","a")
    else:
        f=open(filePath+str(today)+".csv","w")
        f.write("Time,Vac1,Vac2,Vac3,Vpv1,Vpv2,Iac1,Iac2,Iac3,Ipv1,Ipv2,Pnow,Etoday,Eall,Fault Code\n")
    f.close() 
    deviceId=1
    Time=[]
    Vac1=0
    Vac2=0
    Vac3=0
    Vpv1=0
    Vpv2=0
    Iac1=0
    Iac2=0
    Iac3=0
    Ipv1=0
    Ipv2=0
    Pnow=0
    Etoday=0
    Eall=0
    faultData=0

    def __init__(self,instrument,root):
        Frame.__init__(self,root)

        self.menu=Menu(self)
        root.config(menu=self.menu)

        self.today=datetime.today().date()
        self.lastday=self.today


        self.optionMenu = Menu(self.menu)
        self.menu.add_cascade(label="Options", menu=self.optionMenu)


        self.optionMenu.add_command(label="Site Details",command=self.showSiteDetails)
        self.historyMenu=Menu(self.optionMenu)
        self.optionMenu.add_cascade(label="History",menu=self.historyMenu)
        self.historyMenu.add_command(label="Daily",command=self.dailyDataPlot)
        self.historyMenu.add_command(label="Monthly",command=self.monthlyDataResult)
        self.historyMenu.add_command(label="Yearly",command=self.yearlyDataResult)




        self.Xtick=np.arange(0,23*60,30)
        #self.Xtick=range(0,40)
        self.Xticklabels=[]

        for r in range(0,23):
            self.Xticklabels.append(str(r)+":"+"00")
            self.Xticklabels.append(str(r)+":"+"30")
            
                ######Edit axis limits for actual time, time.sleep(),yticks,xticks,xticklabels
        ######
        ######

        self.fig = Figure(figsize=(4,4), dpi=100)
        #f.add_axes([0,100,0,100])
        #self.fig.autofmt_xdate(bottom=0.2, rotation=180, ha='right')
        self.fig.set_tight_layout(True)

        self.a = self.fig.add_subplot(111,xlabel="Time -->",ylabel="Pnow(KW)-->",xticks=self.Xtick,xticklabels=self.Xticklabels,yticks=range(0,1000,100))
        ####Graph axis parameters
        self.a.axis([4*60,21*60,0,5000])

        for label in self.a.get_xmajorticklabels():
            label.set_rotation(70)
            label.set_horizontalalignment("right")
        #self.a.axhline(y=0)
        #self.a.axvline(x=0)
        self.a.spines['left'].set_smart_bounds(True)
        self.a.spines['bottom'].set_smart_bounds(True)


        self.canvas = FigureCanvasTkAgg(self.fig,master=self)
        self.canvas.draw()
        self.canvas.get_tk_widget().grid(row=3,column=0,columnspan=3,sticky="N")#.pack(side="right", fill="both", expand=True)

        ###########
        ###########
        ###########
        
        self.labelId=Label(self,text="Device Id : "+ str(self.deviceId),fg="black",bg="white")
        self.labelId.grid(padx=10,pady=5,ipadx=40,row=1,column=1,sticky="N")

        self.labelCap=Label(self,text=" INSTALLED CAPACITY 05 KW ",fg="black",bg="white")
        self.labelCap.grid(padx=10,pady=5,ipadx=10,row=2,column=1,sticky="W")

        self.labelDT=Label(self,text=" Date and Time : " + str(self.Time),fg="blue",bg="white")
        self.labelDT.grid(padx=10,pady=5,row=2,column=0,sticky="W")

        self.labelVac=Label(self,text=" Vac : "+ str(self.Vac1)+"V / "+str(self.Vac2)+"V / "+str(self.Vac3)+"V",fg="blue",bg="white")
        self.labelVac.grid(padx=10,pady=5,ipadx=20,row=5,column=0,sticky="W")

        self.labelVpv=Label(self,text=" Vpv : "+ str(self.Vpv1)+"V / "+str(self.Vpv2)+"V",fg="red",bg="white")
        self.labelVpv.grid(padx=10,pady=5,ipadx=20,row=6,column=0,sticky="W")

        self.labelIac=Label(self,text=" Iac : "+ str(self.Iac1)+"A / "+str(self.Iac2)+"A / "+str(self.Iac3)+"A",fg="green",bg="white")
        self.labelIac.grid(padx=10,pady=5,ipadx=20,row=7,column=0,sticky="W")

        self.labelIpv=Label(self,text=" Ipv : "+ str(self.Ipv1)+"A / "+str(self.Ipv2)+"A",fg="magenta",bg="white")
        self.labelIpv.grid(padx=10,pady=5,ipadx=20,row=8,column=0,sticky="W")

        self.labelPnow=Label(self,text=" Pnow : "+str(self.Pnow)+"kW",fg="yellow",bg="white")
        self.labelPnow.grid(padx=10,pady=5,ipadx=20,row=5,column=2,sticky="W")

        self.labelEtoday=Label(self,text=" Etoday : "+str(self.Etoday)+"kW",fg="brown",bg="white")
        self.labelEtoday.grid(padx=10,pady=5,ipadx=20,row=6,column=2,sticky="W")

        self.labelEall=Label(self,text=" Eall : "+str(self.Eall)+"kW",fg="purple",bg="white")
        self.labelEall.grid(padx=10,pady=5,ipadx=20,row=7,column=2,sticky="W")

        #########
        #########
        #########
        

        
    def showSiteDetails(self):
        
        print "Site Details:"
        
        siteFile=open(self.filePath+"siteDetails.txt","r")
        info=siteFile.read()
        tp=Toplevel(self)
        Label(tp,text=info,fg="white",bg="black").pack(fill=X)
        
        


    def blank(self):
        print "oops!!!!!!!!!!!!!!!!!! blank"

        
    def dailyDataPlot(self):

        


        dateMax=31

        def getAndPlot():
            #path="/home/prashant/pythonGeneratedFiles/"
            
            m=int(mm.get())
            d=int(dd.get())
            fileName=yyyy.get()
            if m<10:
                fileName=fileName+"-0"+str(m)+"-"
            else:
                fileName=fileName+"-"+str(m)+"-"
            if d<10:
                fileName=fileName+"0"+str(d)
            else:
                fileName=fileName+str(d)
                
            print fileName
            fp=open(self.filePath+fileName+".csv","r")
            
            index=10

            #First row specifies particular parameter name            
            print fp.readline()
                        
            Xdata=[]
            Ydata=[]
            xtick=[]


            splitedData=fp.readline().split(",")
            Ydata.append(int(splitedData[index]))
            time=datetime.strptime(splitedData[0],"%H:%M")
            startTime=time
            Xdata.append(time.hour*60+time.minute)
            xtick.append(str(startTime.hour)+":"+str(startTime.minute))


            for row in fp:
                splitedData=row.split(",") 
                Ydata.append(int(splitedData[index]))
                time=datetime.strptime(splitedData[0],"%H:%M")
                Xdata.append(time.hour*60+time.minute)
                if ((time.hour-startTime.hour)*60+(time.minute-startTime.minute))>29:
                    startTime=time
                    xtick.append(str(startTime.hour)+":"+str(startTime.minute))
                



            Etoday=splitedData[11]
            runTime=time-datetime.strptime(xtick[0],"%H:%M")
            print runTime
            fig=plt.figure("Device Id:"+str(self.deviceId)+"     Date:"+fileName)
            plt.scatter(Xdata,Ydata)
            temp= np.arange(min(Xdata), max(Xdata), 30)



            plt.xlabel("Time\nRun Time :"+str(runTime)+"\nEtoday = "+str(Etoday)+" KWh")
            plt.ylabel("Power")
            plt.tight_layout()
            plt.grid(True)
            plt.xticks(temp,xtick,rotate=70)
            plt.plot(Xdata,Ydata)
            plt.show()

        
        def updateMaxDate():
            global dateMax
            y=int(yyyy.get())
            m=int(mm.get())
            if m == 1 or m==3 or m==5 or m==7 or m==8 or m==10 or m==12:
                dateMax=31
            elif m==2:
                if y%4==0:
                    if y%400==0 and y%100!=0:
                        dateMax=29
                    else:
                        dateMax=28
                else:
                    dateMax=28
            else:
                dateMax=30
            dd.config(to=dateMax)
           

         
        win= Toplevel(self)

        Label(win,text="DD:").grid(row=2,column=1)#.pack(side="left")#.grid(row=2,column=2)

        dd = Spinbox(win, from_=1, to=dateMax, state="readonly",width=2)
        dd.grid(row=2,column=1)#.pack(side="left")#grid(row=2,column=1)

        Label(win,text="MM:").grid(row=2,column=2)#.pack(side="left")#.grid(row=2,column=2)

        mm = Spinbox(win, from_=1, to=12, state="readonly",command=updateMaxDate,width=2)
        mm.grid(row=2,column=2)#.pack(side="left")#.grid(row=2,column=2)

        Label(win,text="YYYY:").grid(row=2,column=3)#.pack(side="left")#.grid(row=2,column=3)

        yyyy = Spinbox(win, from_=2005, to=2050, state="readonly",command=updateMaxDate,width=5)
        yyyy.grid(row=2,column=3)#.pack(side="left")#.grid(row=2,column=3)
        
        button=Button(win,text=" PLOT ",command=getAndPlot)
        button.grid(row=3,column=2)#.pack(side="left")#.grid(row=3,column=2)

        
        
        #self.updateGUItasks()

    def createMonthlyDataFile(self):

        path="/home/prashant/pythonGeneratedFiles/"
        year="2015"

        powerIndex=10        
        for m in range(1,13):
            totalEnergy=0
            temp=datetime(1900,1,1,0,0,0)
            duration=datetime(1900,1,1,0,0,0)
            peak=0
        
            mFile=year
            if m<10:
                mFile=mFile+"-0"+str(m)
            else:
                mFile=mFile+"-"+str(m)
            
            fm=open(mFile+".csv","w")
            fm.write("Total energy,Peak,Duration\n")
            print "Creating Month :",m," File..............."        
            if m == 1 or m==3 or m==5 or m==7 or m==8 or m==10 or m==12:
                dateMax=31
            elif m==2:
                dateMax=28
            else:
                dateMax=30
            
            for d in range(1,dateMax+1):
                fileName=year        
                
                if m<10:
                    fileName=fileName+"-0"+str(m)+"-"
                else:
                    fileName=fileName+"-"+str(m)+"-"
                if d<10:
                    fileName=fileName+"0"+str(d)
                else:
                    fileName=fileName+str(d)
                if os.path.exists(path+fileName+".csv"):
                    
                    
                    fp=open(path+fileName+".csv","r")
                    print fp.readline().split(",")
                    
                    startTime=datetime.strptime(fp.readline().split(",")[0],"%H:%M")
                    for fileData in fp:
                        singleData=fileData.split(",")
                        if int(singleData[powerIndex]) > peak:
                            peak=singleData[powerIndex]
                    
                    endTime=datetime.strptime(singleData[0],"%H:%M")
                    temp=endTime-startTime
                    duration=duration + temp
                    fp.close()
                    totalEnergy=totalEnergy+int(singleData[11])
            print "Month :"+ str(m)+"-"+year
            print "Total Energy=",totalEnergy
            print "Duration :",duration
            print "Peak :",peak
            fm.write(str(totalEnergy))
            fm.write(",")
            fm.write(str(peak))
            fm.write(",")
            fm.write(str(duration))
            fm.close()


            
    def createYearlyDataFile(self):
        energy=0
        peak=0
        duration=datetime(1900,1,1,0,0,0)
        temp=datetime(1900,1,1,0,0,0)

        fname="2015"#str(datetime.today().year)
        fy=open(fname+".csv","w")
        fy.write("Total energy,Peak,Duration\n")
            
        for m in range(1,13):#int(datetime.today().month)+1):
            fname="2015"
            if m<10:
                fname=fname+"-0"+str(m)
            else:
                fname=fname+"-"+str(m)
            #print fname
            fm=open(fname+".csv","r")
            fm.readline()
            data=fm.readline().split(",")
            if int(data[1])>peak:
                peak=int(data[1])
            energy=energy+int(data[0])
            
            #            temp=
            #print temp
            #print duration
            temp=datetime.strptime(data[2],"%Y-%m-%d %H:%M:%S")-datetime(1900,1,1,0,0,0)
            duration=duration + temp

            
        fy.write(str(energy))
        fy.write(",")
        fy.write(str(peak))
        fy.write(",")
        fy.write(str(duration))
        fy.close()



####    Add duration
####
####

    """                
    def updateYearlyData(self):
        #duration=datetime(1900,1,1,0,0,0)
      
        fname=str(datetime.today().year)
        if os.path.exists(self.filePath+fname+".csv"):
            fy=open(self.filePath+fname+".csv","r")
            fy.readline()
            data=fy.readline().split(",")
            fy.close()
            
            totalEnergy=int(data[0])+self.Etoday
            
            peak=int(data[1])
        else:
            totalEnergy=0
            peak=0
        if     
        m=int(datetime.today().month)    

        if m<10:
            fname=fname+"-0"+str(m)
        else:
            fname=fname+"-"+str(m)
                    
        fm=open(fname+".csv","r")
        fm.readline()
        data=fm.readline().split(",")
        if int(data[1])>peak:
            peak=int(data[1])

        #duration=datetime.strptime(data[2],"%Y-%m-%d %H:%M:%S")#+duration
        fy=open(self.filePath+str(datetime.today().year)+".csv","w")
        fy.write("Total energy,Peak,Duration\n")
        fy.write(str(totalEnergy))
        fy.write(",")
        fy.write(str(peak))
        #fy.write(",")
        #fy.write(str(duration))
        fy.close()
        """
    


        
    def updateMonthlyData(self):
        
        y,m,d=self.lastday.split("-")
        if os.path.exists(self.filePath+y+"-"+m+".csv"):
            
            fm=open(self.filePath+y+"-"+m+".csv","r")
            print fm.readline()
            data=fm.readline().split(",")
            fm.close()
            totalEnergy=int(data[0])+self.Etoday
            peak=int(data[1])
            mDur=datetime.strptime(data[2],"%Y-%m-%D %H:%M:%S")
        else:
            totalEnergy=self.Etoday
            peak=0
            mDur=datetime(1900,1,1,0,0,0)
            
        print self.f.readline()
        startTime=datetime.strptime(self.f.readline().split(",")[0],"%H:%M")
        for p in self.f:
            splittedData=p.split(",")
            temp=int(splittedData[10])
            if peak<temp:
                peak=temp
                
        duration=datetime.strptime(splittedData[0],"%H:%M")-startTime
        
        totDur=mDur+duration
        
        fm=open(self.filePath+y+"-"+m+".csv","w")
        fm.write("Total energy,Peak,Duration\n")
        fm.write(str(totalEnergy))
        fm.write(",")
        fm.write(str(peak))
        fm.write(",")
        fm.write(str(totDur))
        fm.close()
        
       
    def yearlyDataResult(self):


        monthList=[31,59,90,120,151,181,212,243,273,304,334,365]
        fileName="2015"
        filePath=self.filePath+fileName+".csv"
        readFile=open(filePath,"r")
        readFile.readline()
        data=readFile.readline().split(",")
        Eyear=data[0]
        peak=data[1]
        duration=datetime.strptime(data[2],"%Y-%m-%d %H:%M:%S")
        tp=Toplevel(self)
        tp.title("Year : 2015")
        Label(tp,text="Total Energy used = "+Eyear+" W\nPeak Power used in Year = "+peak+" W\n Total duration energy generated = "+str(monthList[duration.month-1]*24+duration.hour+duration.day*24)+" hours "+str(duration.minute)+" minutes",bg="white",fg="blue").pack(side="top",fill=X)
           
        
        

       
    def monthlyDataResult(self):
            

        def findEnergyUsed():
            
           m=month.get()
           if int(m)<10:
               strM="0"+str(m)
           else:
               strM=str(m)
           fileName="2015-"+strM
           filePath=self.filePath+fileName+".csv"
           readFile=open(filePath,"r")
           readFile.readline()
           data=readFile.readline().split(",")
           Emonth=data[0]
           peak=data[1]
           duration=datetime.strptime(data[2],"%Y-%m-%d %H:%M:%S")
           #tp=Toplevel(self)
           #tp.title("Year : 2015")
           Label(win,text="Total Energy used = "+Emonth+" W\nPeak Power in Month = "+peak+" W\n Duration energy generated = "+str(int(duration.hour)+int(duration.day)*24)+" hours "+str(duration.minute)+" minutes",bg="white",fg="blue").grid(column=0,row=2,columnspan=2,sticky="N")
                
        win= Toplevel(self)
        win.title("Year : 2015")
        Label(win,text="Select Month:",bg="white").grid(row=0,column=0,sticky="W")
        month=Spinbox(win, from_=1, to=12, state="readonly",width=2,bg="white")
        month.grid(row=0,column=1,sticky="E")
        button=Button(win,text=" DONE ",command=findEnergyUsed)
        button.grid(row=1,column=0,sticky="E")

     
    def fetchBasicData(self,data):    
        self.faultData=data[faultCodeAddr]
        self.Vac1=data[Vac1Addr]/10
        self.Vac2=data[Vac2Addr]/10
        self.Vac3=data[Vac3Addr]/10
        self.Vpv1=data[Vpv1Addr]/10
        self.Vpv2=data[Vpv2Addr]/10
        self.Iac1=data[Iac1Addr]/10
        self.Iac2=data[Iac2Addr]/10
        self.Iac3=data[Iac3Addr]/10
        self.Ipv1=(data[Ipv1AddrL]+data[Ipv1AddrH]*65535)/10
        self.Ipv2=(data[Ipv2AddrL]+data[Ipv2AddrH]*65535)/10
        self.Pnow=(data[PnowAddrL]+data[PnowAddrH]*65535)/10
        self.Etoday=(data[EtodayAddrL]+data[EtodayAddrH]*65535)/10
        self.Eall=(data[EallAddrL]+data[EallAddrH]*65535)/10
        self.Time=str(datetime.today().hour)+":"+str(datetime.today().minute)
        
        
    def guiUpdate(self):

        self.labelDT.config(text=" Date and Time : " + str(self.Time))
        self.labelVac.config(text=" Vac : "+ str(self.Vac1)+" V / "+str(self.Vac2)+" V / "+str(self.Vac3)+" V")
        self.labelVpv.config(text=" Vpv : "+ str(self.Vpv1)+" V / "+str(self.Vpv2)+" V")
        self.labelIac.config(text=" Iac : "+ str(self.Iac1)+" A / "+str(self.Iac2)+" A / "+str(self.Iac3)+" A")
        self.labelIpv.config(text=" Ipv : "+ str(self.Ipv1)+" A / "+str(self.Ipv2)+" A")
        self.labelPnow.config(text=" Pnow : "+str(self.Pnow)+" W")    
        self.labelEtoday.config(text=" Etoday : "+str(self.Etoday)+" KWh")
        self.labelEall.config(text=" Eall : "+str(self.Eall)+" KWh")
        self.update_idletasks()
        self.update()


    def makeCSVfile(self):
        self.today = str(datetime.today().date())
        if(str(self.today)==str(self.lastday)):
            self.f=open(self.filePath+str(self.today)+".csv","a")            
            self.f.write(str(self.Time)+","+str(self.Vac1)+","+str(self.Vac2)+","+str(self.Vac3)+","+str(self.Vpv1)+","+str(self.Vpv2)+","+str(self.Iac1)+","+str(self.Iac2)+","+str(self.Iac3)+","+str(self.Ipv1)+","+str(self.Ipv2)+",")
            self.f.write(str(self.Pnow)+","+str(self.Etoday)+","+str(self.Eall)+",")
            self.f.write(str(self.faultData))
            self.f.write("\n")
            self.f.close()
            
        else:
            self.f.close()
            self.f=open(self.filePath+str(self.lastday)+".csv","r")
            self.updateMonthlyData()
            self.f.close()
            #self.updateYearlyData()
            self.f=open(self.filePath+str(self.today)+".csv","a")
            self.f.write("Time,Vac1,Vac2,Vac3,Vpv1,Vpv2,Iac1,Iac2,Iac3,Ipv1,Ipv2,Pnow,Etoday,Eall,Fault Code\n")
        self.lastday=self.today

    def updateGUItasks(self):
        self.update_idletasks()
        self.update()
        
    def plotDataLive1(self,xdata,ydata):
        
            
        i=0
        xtick=[]
        for j in range(7,20):
            xtick.append(str(j)+":"+"00")
            xtick.append(str(j)+":"+"30")
            
        xtickBand=np.arange(420,20*60+30,30)

        print xtick
        plt.axis([420,20*60+30,0,65535])
        plt.xticks(xtickBand,xtick)
        plt.grid(True)
        plt.tight_layout()
            
        plt.ion()
        y=[]
        x=[]
        while i<600:
            y.append()

            plt.pause(0.05)
            
            plt.plot(x,y)
            
            #plt.show()
            #plt.draw()
            i=i+1
            #plt.show()
        plt.ioff()
        plt.show()
        
    
    
    def mainFunction(self,startingAddr,totalRegisters,registerType):

        def animate():
            #xList.append(count)
            #yList.append(self.Pnow)

            ####    Live graph...............
            ####
            xList.append(int(datetime.today().second))

            
            #xList.append(int(self.Time.split(":")[0])*60+int(self.Time.split(":")[1])-420)
            yList.append(int(self.Pnow)/10000000)
            self.a.scatter(xList,yList)
            self.a.plot(xList,yList)
            self.fig.canvas.draw()


        #self.createMonthlyDataFile()
        #self.createYearlyDataFile()   

        ###############################################################         Main loop
        count=0
        xList=[]
        yList=[]
        while True:#count<40:
    
                dataArray=[]
                print " count = " + str(count) +"\n"
                reg=startingAddr
                while reg<totalRegisters+startingAddr:
                    try:
                        dataReceived = instrument.read_register(reg,0,registerType)
                        dataArray.append(dataReceived)
                        reg=reg+1
                        self.updateGUItasks()

                    except ValueError,TypeError:
                        reg=reg+1
                        dataArray.append(-1)

                    except IOError:
                        #print "no response"
                        while True:
                            try:
                                print "no response"
                                instrument.read_registers(0,1,4)
                            except IOError:
                                continue
                            except ValueError,TypeError:
                                print "",
                            break
Example #53
0
def main():
    # Silence upload.py.
    rietveld.upload.verbosity = 0

    parser = optparse.OptionParser(description=sys.modules[__name__].__doc__)
    parser.add_option('-u',
                      '--user',
                      metavar='<email>',
                      default=os.environ.get('USER'),
                      help='Filter on user, default=%default')
    parser.add_option('-b',
                      '--begin',
                      metavar='<date>',
                      help='Filter issues created after the date (mm/dd/yy)')
    parser.add_option('-e',
                      '--end',
                      metavar='<date>',
                      help='Filter issues created before the date (mm/dd/yy)')
    quarter_begin, quarter_end = get_quarter_of(datetime.today() -
                                                relativedelta(months=2))
    parser.add_option(
        '-Q',
        '--last_quarter',
        action='store_true',
        help='Use last quarter\'s dates, i.e. %s to %s' %
        (quarter_begin.strftime('%Y-%m-%d'), quarter_end.strftime('%Y-%m-%d')))
    parser.add_option('-Y',
                      '--this_year',
                      action='store_true',
                      help='Use this year\'s dates')
    parser.add_option('-w',
                      '--week_of',
                      metavar='<date>',
                      help='Show issues for week of the date (mm/dd/yy)')
    parser.add_option(
        '-W',
        '--last_week',
        action='count',
        help='Show last week\'s issues. Use more times for more weeks.')
    parser.add_option(
        '-a',
        '--auth',
        action='store_true',
        help='Ask to authenticate for instances with no auth cookie')
    parser.add_option('-d',
                      '--deltas',
                      action='store_true',
                      help='Fetch deltas for changes.')
    parser.add_option(
        '--no-referenced-issues',
        action='store_true',
        help='Do not fetch issues referenced by owned changes. Useful in '
        'combination with --changes-by-issue when you only want to list '
        'issues that have also been modified in the same time period.')
    parser.add_option(
        '--skip-own-issues-without-changes',
        action='store_true',
        help='Skips listing own issues without changes when showing changes '
        'grouped by referenced issue(s). See --changes-by-issue for more '
        'details.')

    activity_types_group = optparse.OptionGroup(
        parser, 'Activity Types',
        'By default, all activity will be looked up and '
        'printed. If any of these are specified, only '
        'those specified will be searched.')
    activity_types_group.add_option('-c',
                                    '--changes',
                                    action='store_true',
                                    help='Show changes.')
    activity_types_group.add_option('-i',
                                    '--issues',
                                    action='store_true',
                                    help='Show issues.')
    activity_types_group.add_option('-r',
                                    '--reviews',
                                    action='store_true',
                                    help='Show reviews.')
    activity_types_group.add_option(
        '--changes-by-issue',
        action='store_true',
        help='Show changes grouped by referenced issue(s).')
    parser.add_option_group(activity_types_group)

    output_format_group = optparse.OptionGroup(
        parser, 'Output Format',
        'By default, all activity will be printed in the '
        'following format: {url} {title}. This can be '
        'changed for either all activity types or '
        'individually for each activity type. The format '
        'is defined as documented for '
        'string.format(...). The variables available for '
        'all activity types are url, title and author. '
        'Format options for specific activity types will '
        'override the generic format.')
    output_format_group.add_option(
        '-f',
        '--output-format',
        metavar='<format>',
        default=u'{url} {title}',
        help='Specifies the format to use when printing all your activity.')
    output_format_group.add_option(
        '--output-format-changes',
        metavar='<format>',
        default=None,
        help='Specifies the format to use when printing changes. Supports the '
        'additional variable {reviewers}')
    output_format_group.add_option(
        '--output-format-issues',
        metavar='<format>',
        default=None,
        help='Specifies the format to use when printing issues. Supports the '
        'additional variable {owner}.')
    output_format_group.add_option(
        '--output-format-reviews',
        metavar='<format>',
        default=None,
        help='Specifies the format to use when printing reviews.')
    output_format_group.add_option(
        '--output-format-heading',
        metavar='<format>',
        default=u'{heading}:',
        help='Specifies the format to use when printing headings.')
    output_format_group.add_option(
        '--output-format-no-url',
        default='{title}',
        help='Specifies the format to use when printing activity without url.')
    output_format_group.add_option(
        '-m',
        '--markdown',
        action='store_true',
        help='Use markdown-friendly output (overrides --output-format '
        'and --output-format-heading)')
    output_format_group.add_option(
        '-j',
        '--json',
        action='store_true',
        help='Output json data (overrides other format options)')
    parser.add_option_group(output_format_group)
    auth.add_auth_options(parser)

    parser.add_option('-v',
                      '--verbose',
                      action='store_const',
                      dest='verbosity',
                      default=logging.WARN,
                      const=logging.INFO,
                      help='Output extra informational messages.')
    parser.add_option('-q',
                      '--quiet',
                      action='store_const',
                      dest='verbosity',
                      const=logging.ERROR,
                      help='Suppress non-error messages.')
    parser.add_option(
        '-o',
        '--output',
        metavar='<file>',
        help='Where to output the results. By default prints to stdout.')

    # Remove description formatting
    parser.format_description = (lambda _: parser.description)  # pylint: disable=no-member

    options, args = parser.parse_args()
    options.local_user = os.environ.get('USER')
    if args:
        parser.error('Args unsupported')
    if not options.user:
        parser.error('USER is not set, please use -u')
    options.user = username(options.user)

    logging.basicConfig(level=options.verbosity)

    # python-keyring provides easy access to the system keyring.
    try:
        import keyring  # pylint: disable=unused-import,unused-variable,F0401
    except ImportError:
        logging.warning('Consider installing python-keyring')

    if not options.begin:
        if options.last_quarter:
            begin, end = quarter_begin, quarter_end
        elif options.this_year:
            begin, end = get_year_of(datetime.today())
        elif options.week_of:
            begin, end = (get_week_of(
                datetime.strptime(options.week_of, '%m/%d/%y')))
        elif options.last_week:
            begin, end = (
                get_week_of(datetime.today() -
                            timedelta(days=1 + 7 * options.last_week)))
        else:
            begin, end = (get_week_of(datetime.today() - timedelta(days=1)))
    else:
        begin = dateutil.parser.parse(options.begin)
        if options.end:
            end = dateutil.parser.parse(options.end)
        else:
            end = datetime.today()
    options.begin, options.end = begin, end

    if options.markdown:
        options.output_format_heading = '### {heading}\n'
        options.output_format = '  * [{title}]({url})'
        options.output_format_no_url = '  * {title}'
    logging.info('Searching for activity by %s', options.user)
    logging.info('Using range %s to %s', options.begin, options.end)

    my_activity = MyActivity(options)
    my_activity.show_progress('Loading data')

    if not (options.changes or options.reviews or options.issues
            or options.changes_by_issue):
        options.changes = True
        options.issues = True
        options.reviews = True

    # First do any required authentication so none of the user interaction has to
    # wait for actual work.
    if options.changes or options.changes_by_issue:
        my_activity.auth_for_changes()
    if options.reviews:
        my_activity.auth_for_reviews()

    logging.info('Looking up activity.....')

    try:
        if options.changes or options.changes_by_issue:
            my_activity.get_changes()
        if options.reviews:
            my_activity.get_reviews()
        if options.issues or options.changes_by_issue:
            my_activity.get_issues()
        if not options.no_referenced_issues:
            my_activity.get_referenced_issues()
    except auth.AuthenticationError as e:
        logging.error('auth.AuthenticationError: %s', e)

    my_activity.show_progress('\n')

    my_activity.print_access_errors()

    output_file = None
    try:
        if options.output:
            output_file = open(options.output, 'w')
            logging.info('Printing output to "%s"', options.output)
            sys.stdout = output_file
    except (IOError, OSError) as e:
        logging.error('Unable to write output: %s', e)
    else:
        if options.json:
            my_activity.dump_json()
        else:
            if options.changes:
                my_activity.print_changes()
            if options.reviews:
                my_activity.print_reviews()
            if options.issues:
                my_activity.print_issues()
            if options.changes_by_issue:
                my_activity.print_changes_by_issue(
                    options.skip_own_issues_without_changes)
    finally:
        if output_file:
            logging.info('Done printing to file.')
            sys.stdout = sys.__stdout__
            output_file.close()

    return 0
Example #54
0
def train():
    # define dataset
    num_train_imgs = len(open(cfg.train_data_file, 'r').readlines())
    num_train_batch = int(math.ceil(float(num_train_imgs) / cfg.batch_size))
    num_test_imgs = len(open(cfg.test_data_file, 'r').readlines())
    num_test_batch = int(math.ceil(float(num_test_imgs) / 2))

    train_dataset = tf.data.TextLineDataset(cfg.train_data_file)
    train_dataset = train_dataset.shuffle(num_train_imgs)
    train_dataset = train_dataset.batch(cfg.batch_size)
    train_dataset = train_dataset.map(lambda x: tf.py_func(get_data,inp=[x, True], 
                                    Tout=[tf.float32, tf.float32, tf.float32, tf.float32, tf.float32, tf.float32, tf.float32]),
                                    num_parallel_calls=7)
    train_dataset = train_dataset.prefetch(3)
    
    test_dataset = tf.data.TextLineDataset(cfg.test_data_file)
    test_dataset = test_dataset.batch(2)
    test_dataset = test_dataset.map(lambda x: tf.py_func(get_data,inp=[x, False], 
                                    Tout=[tf.float32, tf.float32, tf.float32, tf.float32, tf.float32, tf.float32, tf.float32]),
                                    num_parallel_calls=7)
    test_dataset = test_dataset.prefetch(3)

    iterator = tf.data.Iterator.from_structure(train_dataset.output_types, train_dataset.output_shapes)
    trainset_init_op = iterator.make_initializer(train_dataset)
    testset_init_op = iterator.make_initializer(test_dataset)

    input_data, batch_hm, batch_wh, batch_reg, batch_reg_mask, batch_ind, batch_pose = iterator.get_next()
    input_data.set_shape([None, cfg.input_image_h, cfg.input_image_w, 3])
    batch_hm.set_shape([None,cfg.input_image_h//cfg.down_ratio, cfg.input_image_w//cfg.down_ratio, cfg.num_classes])
    batch_wh.set_shape([None, cfg.max_objs, 2])
    batch_reg.set_shape([None, cfg.max_objs, 2])
    batch_reg_mask.set_shape([None, cfg.max_objs])
    batch_ind.set_shape([None, cfg.max_objs])
    batch_pose.set_shape([None, cfg.max_objs, cfg.depth])


    # training flag 
    is_training = tf.placeholder(dtype=tf.bool, name='is_training')
    
    # difine model and loss
    model = CenterNet(input_data, is_training)
    with tf.variable_scope('loss'):
        hm_loss, wh_loss, reg_loss, pose_loss = model.compute_loss(batch_hm, batch_wh, batch_reg, batch_reg_mask, batch_ind, batch_pose)
        total_loss = hm_loss + wh_loss + reg_loss + pose_loss
    

    # define train op
    if cfg.lr_type=="CosineAnnealing":
        global_step = tf.Variable(1.0, dtype=tf.float64, trainable=False, name='global_step')
        warmup_steps = tf.constant(cfg.warm_up_epochs * num_train_batch, dtype=tf.float64, name='warmup_steps')
        train_steps = tf.constant(cfg.epochs * num_train_batch, dtype=tf.float64, name='train_steps')
        learning_rate = tf.cond(
            pred=global_step < warmup_steps,
            true_fn=lambda: global_step / warmup_steps * cfg.init_lr,
            false_fn=lambda: cfg.end_lr + 0.5 * (cfg.init_lr - cfg.end_lr) *
                                (1 + tf.cos(
                                    (global_step - warmup_steps) / (train_steps - warmup_steps) * np.pi))
        )
        global_step_update = tf.assign_add(global_step, 1.0)

        optimizer = tf.train.AdamOptimizer(learning_rate).minimize(total_loss)
        with tf.control_dependencies(tf.get_collection(tf.GraphKeys.UPDATE_OPS)):
            with tf.control_dependencies([optimizer, global_step_update]):
                train_op = tf.no_op()

    else:
        global_step = tf.Variable(0, trainable=False)
        if cfg.lr_type=="exponential":
            learning_rate = tf.train.exponential_decay(cfg.lr,
                                                    global_step,
                                                    cfg.lr_decay_steps,
                                                    cfg.lr_decay_rate,
                                                    staircase=True)
        elif cfg.lr_type=="piecewise":
            learning_rate = tf.train.piecewise_constant(global_step, cfg.lr_boundaries, cfg.lr_piecewise)
        optimizer = tf.train.AdamOptimizer(learning_rate)
        update_ops = tf.get_collection(tf.GraphKeys.UPDATE_OPS)
        with tf.control_dependencies(update_ops):
            train_op = optimizer.minimize(total_loss, global_step=global_step)

    saver  = tf.train.Saver(tf.global_variables(), max_to_keep=1)
    
    
    with tf.Session() as sess:
        test_epoch_loss = 0
        with tf.name_scope('summary'):
            tf.summary.scalar("learning_rate", learning_rate)
            tf.summary.scalar("hm_loss", hm_loss)
            tf.summary.scalar("wh_loss", wh_loss)
            tf.summary.scalar("reg_loss", reg_loss)
            tf.summary.scalar("pose_loss", pose_loss)
            tf.summary.scalar("total_loss", total_loss)

            test_scalar = tf.placeholder(tf.float32)
            logdir = "./log/"
            if os.path.exists(logdir): shutil.rmtree(logdir)
            os.mkdir(logdir)
            write_op = tf.summary.merge_all()
            summary_writer  = tf.summary.FileWriter(logdir, graph=sess.graph)
            test_summary = tf.summary.scalar("test_loss", test_scalar)
        
        # train 
        sess.run(tf.global_variables_initializer())
        if cfg.pre_train:
            load_weights(sess,'./pretrained_weights/resnet18.npy')

        writer = tf.compat.v1.summary.FileWriter("./checkpoint", sess.graph)
        #ot_nodes = ['detector/hm/Sigmoid', "detector/wh/BiasAdd", "detector/reg/BiasAdd"]
        ot_nodes = cfg.ot_nodes
        #ot_nodes = ['detector/Conv2D_1', 'detector/Conv2D_3', 'detector/Conv2D_5']
        today = datetime.today().strftime('%Y_%m_%d')
        #saver.restore(sess, "./checkpoint/2021_05_09-centernet_test_person_loss=0.7642.ckpt-80")
        #saver.restore(sess,tf.train.latest_checkpoint('checkpoint'))
        
        for epoch in range(1, 1+cfg.epochs):
            
            pbar = tqdm(range(num_train_batch))
            train_epoch_loss, test_epoch_loss = [], []
            sess.run(trainset_init_op)
            for i in pbar:
                _, summary, train_step_loss, global_step_val = sess.run(
                    [train_op, write_op, total_loss, global_step],feed_dict={is_training:True})

                train_epoch_loss.append(train_step_loss)
                summary_writer.add_summary(summary, global_step_val)
                pbar.set_description("train loss: %.2f" %train_step_loss)

            sess.run(testset_init_op)
            for j in range(num_test_batch):
                test_step_loss = sess.run( total_loss, feed_dict={is_training:False})
                test_epoch_loss.append(test_step_loss)

            train_epoch_loss, test_epoch_loss = np.mean(train_epoch_loss), np.mean(test_epoch_loss)
            test_summ = test_summary.eval(feed_dict={test_scalar: test_epoch_loss})
            summary_writer.add_summary(test_summ, epoch)
            ckpt_file = "./checkpoint/" + today + "-centernet_test_person_loss=%.4f.ckpt" % test_epoch_loss
            log_time = time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(time.time()))
            print("=> Epoch: %2d Time: %s Train loss: %.2f Test loss: %.2f Saving %s ..."
                            %(epoch, log_time, train_epoch_loss, test_epoch_loss, ckpt_file))
            
            saver.save(sess, ckpt_file, global_step=epoch)
Example #55
0
    def test_03_po_return_and_modify(self):
        """Change the picking code of the delivery to internal. Make a PO for 10 units, go to the
        picking and return 5, edit the PO line to 15 units.
        The purpose of the test is to check the consistencies across the received quantities and the
        procurement quantities.
        """
        # Change the code of the picking type delivery
        self.env['stock.picking.type'].search([('code', '=', 'outgoing')]).write({'code': 'internal'})

        # Sell and deliver 10 units
        item1 = self.product_id_1
        uom_unit = self.env.ref('uom.product_uom_unit')
        po1 = self.env['purchase.order'].create({
            'partner_id': self.partner_a.id,
            'order_line': [
                (0, 0, {
                    'name': item1.name,
                    'product_id': item1.id,
                    'product_qty': 10,
                    'product_uom': uom_unit.id,
                    'price_unit': 123.0,
                    'date_planned': datetime.today().strftime(DEFAULT_SERVER_DATETIME_FORMAT),
                }),
            ],
        })
        po1.button_confirm()

        picking = po1.picking_ids
        wiz_act = picking.button_validate()
        wiz = Form(self.env[wiz_act['res_model']].with_context(wiz_act['context'])).save()
        wiz.process()

        # Return 5 units
        stock_return_picking_form = Form(self.env['stock.return.picking'].with_context(
            active_ids=picking.ids,
            active_id=picking.ids[0],
            active_model='stock.picking'
        ))
        return_wiz = stock_return_picking_form.save()
        for return_move in return_wiz.product_return_moves:
            return_move.write({
                'quantity': 5,
                'to_refund': True
            })
        res = return_wiz.create_returns()
        return_pick = self.env['stock.picking'].browse(res['res_id'])
        wiz_act = return_pick.button_validate()
        wiz = Form(self.env[wiz_act['res_model']].with_context(wiz_act['context'])).save()
        wiz.process()

        self.assertEqual(po1.order_line.qty_received, 5)

        # Deliver 15 instead of 10.
        po1.write({
            'order_line': [
                (1, po1.order_line[0].id, {'product_qty': 15}),
            ]
        })

        # A new move of 10 unit (15 - 5 units)
        self.assertEqual(po1.order_line.qty_received, 5)
        self.assertEqual(po1.picking_ids[-1].move_ids.product_qty, 10)
Example #56
0
from datetime import datetime
import csv
with open('user_posts_1641812829207516.csv') as File:
    tfidfReader = csv.reader(File)
    i = 1
    for row in tfidfReader:
        datetime_object = datetime.strptime(row[3][:-4], "%Y-%m-%dT%H:%M:%S+")
        month = datetime_object.month
        year = datetime_object.year
        day = datetime_object.day
        hour = datetime_object.hour
        minute = datetime_object.minute
        sec = datetime_object.second
        toto = datetime.today()
        zozo = datetime(year, month, day)
        print(zozo.weekday())
        i += 1
 def fetch_revenu_sharing_results(entity):
     try:
         return RevenuSharingResult.objects.filter(entity=entity,
                                                   created=datetime.today())
     except Transaction.DoesNotExist:
         return None
def get_day():
    day = datetime.today().strftime("%A")
    return day
    'sasl.password': secret
})

def acked(err, msg):
    """Delivery report callback called (from flush()) on successful or failed delivery of the message."""
    if err is not None:
        print("failed to deliver message: {}".format(err.str()))
    else:
        print("produced to: {} [{}] @ {}".format(msg.topic(), msg.partition(), msg.offset()))

print("~ Price Tracker for the Webshop 1% under "+SHOP+" ~")
print("~~ Get price from "+SHOP+": ")
iters = 0  # counts the number of price checks done
while True:
    iters += 1
    print("\nCheck #", iters, "on:", datetime.today())
    # call website
    page    = requests.get(URL, headers=headers)
    soup    = BeautifulSoup(page.content, 'html.parser')
    # Get title
    #title   = soup.find(id="productTitle")
    titles   = soup.select('#model-selection > bundle-selection > store-provider > div.as-l-container.as-bundleselection-container > div > div.as-bundleselection-modelvariationsbox.row > div > div.as-macbundle.column.large-4.small-12.as-macbundle-offset2 > div > bundle-selector > div.as-slide-swapper.as-macbtr-details > div.as-macbtr-options.as-bundleselection-modelshown.acc_MWTJ2D\/A.rs-noAnimation > div > h3')
    title_text = productName
    for title in titles:
        title_text = title.get_text(strip=True)
    # Get price
    #price   = soup.find(id="priceblock_ourprice")
    prices = soup.select('#model-selection > bundle-selection > store-provider > div.as-l-container.as-bundleselection-container > div > div.as-bundleselection-modelvariationsbox.row > div > div.as-macbundle.column.large-4.small-12.as-macbundle-offset2 > div > bundle-selector > div.as-slide-swapper.as-macbtr-details > div.as-macbtr-options.as-bundleselection-modelshown.acc_MWTJ2D\/A.rs-noAnimation > div > div.as-price > span.as-price-currentprice > span')
    price_text = 'No price'
    price1 = 1199.00
    for price in prices:
Example #60
0
    async def apply(self, ctx: commands.Context):
        """Apply to be a staff member."""
        role_add = get(ctx.guild.roles, name="Staff Applicant")
        app_data = await self.config.guild(ctx.guild).app_questions.all()
        user_data = self.config.member(ctx.author)

        channel = get(ctx.guild.text_channels, name="staff-applications")
        if ctx.guild not in self.antispam:
            self.antispam[ctx.guild] = {}
        if ctx.author not in self.antispam[ctx.guild]:
            self.antispam[ctx.guild][ctx.author] = AntiSpam([(timedelta(days=2), 1)])
        if self.antispam[ctx.guild][ctx.author].spammy:
            return await ctx.send(
                f"{ctx.author.mention} uh you're doing this way too frequently, and we don't need more than one application from you. Don't call us, we will maybe call you...LOL",
                delete_after=10,
            )
        if role_add is None:
            return await ctx.send("Uh oh. Looks like your Admins haven't added the required role.")
        if role_add.position > ctx.guild.me.top_role.position:
            return await ctx.send(
                "The staff applicant role is above me, and I need it below me if I am to assign it on completion. Tell your admins"
            )

        if channel is None:
            return await ctx.send(
                "Uh oh. Looks like your Admins haven't added the required channel."
            )
        available_positions = await self.config.guild(ctx.guild).positions_available()
        if available_positions is None:
            fill_this = "Reply with the position you are applying for to continue."
        else:
            list_positions = "\n".join(available_positions)
            fill_this = "Reply with the desired position from this list to continue\n`{}`".format(
                list_positions
            )
        try:
            await ctx.author.send(
                f"Let's do this! You have maximum of __5 minutes__ for each question.\n{fill_this}\n\n*To cancel at anytime respond with `cancel`*\n\n*DISCLAIMER: Your responses are stored for proper function of this feature*"
            )
        except discord.Forbidden:
            return await ctx.send(
                f"{ctx.author.mention} I can't DM you. Do you have them closed?", delete_after=10
            )
        await ctx.send(f"Okay, {ctx.author.mention}, I've sent you a DM.", delete_after=7)

        def check(m):
            return m.author == ctx.author and m.channel == ctx.author.dm_channel

        try:
            position = await self.bot.wait_for("message", timeout=300, check=check)
            if position.content.lower() == "cancel":
                return await ctx.author.send("Application has been canceled.")
            await user_data.position.set(position.content)
        except asyncio.TimeoutError:
            try:
                await ctx.author.send("You took too long. Try again, please.")
            except discord.HTTPException:
                return await ctx.send(f"Thanks for nothing, {ctx.author.mention}")
            return
        await ctx.author.send(app_data["name"])
        try:
            name = await self.bot.wait_for("message", timeout=300, check=check)
            if name.content.lower() == "cancel":
                return await ctx.author.send("Application has been canceled.")
            await user_data.name.set(name.content)
        except asyncio.TimeoutError:
            try:
                await ctx.author.send("You took too long. Try again, please.")
            except discord.HTTPException:
                return await ctx.send(f"Thanks for nothing, {ctx.author.mention}")
            return
        await ctx.author.send(app_data["timezone"])
        try:
            timezone = await self.bot.wait_for("message", timeout=300, check=check)
            if timezone.content.lower() == "cancel":
                return await ctx.author.send("Application has been canceled.")
            await user_data.timezone.set(timezone.content)
        except asyncio.TimeoutError:
            try:
                await ctx.author.send("You took too long. Try again, please.")
            except discord.HTTPException:
                return await ctx.send(f"Thanks for nothing, {ctx.author.mention}")
            return
        await ctx.author.send(app_data["age"])
        try:
            age = await self.bot.wait_for("message", timeout=300, check=check)
            if age.content.lower() == "cancel":
                return await ctx.author.send("Application has been canceled.")
            a = age.content
            b = str(datetime.today())
            c = b[:4]
            d = int(c)
            try:
                e = int(a)
                yearmath = d - e
                total_age = f"YOB: {a}\n{yearmath} years old"
            except Exception:
                total_age = f"Recorded response of `{a}`. Could not calculate age."

            await user_data.age.set(total_age)

        except asyncio.TimeoutError:
            try:
                await ctx.author.send("You took too long. Try again, please.")
            except discord.HTTPException:
                return await ctx.send(f"Thanks for nothing, {ctx.author.mention}")
            return

        await ctx.author.send(app_data["days"])
        try:
            days = await self.bot.wait_for("message", timeout=300, check=check)
            if days.content.lower() == "cancel":
                return await ctx.author.send("Application has been canceled.")
            await user_data.days.set(days.content)
        except asyncio.TimeoutError:
            try:
                await ctx.author.send("You took too long. Try again, please.")
            except discord.HTTPException:
                return await ctx.send(f"Thanks for nothing, {ctx.author.mention}")
            return
        await ctx.author.send(app_data["hours"])
        try:
            hours = await self.bot.wait_for("message", timeout=300, check=check)
            if hours.content.lower() == "cancel":
                return await ctx.author.send("Application has been canceled.")
            await user_data.hours.set(hours.content)
        except asyncio.TimeoutError:
            try:
                await ctx.author.send("You took too long. Try again, please.")
            except discord.HTTPException:
                return await ctx.send(f"Thanks for nothing, {ctx.author.mention}")
            return
        await ctx.author.send(app_data["experience"])
        try:
            experience = await self.bot.wait_for("message", timeout=300, check=check)
            if experience.content.lower() == "cancel":
                return await ctx.author.send("Application has been canceled.")
            await user_data.experience.set(experience.content)
        except asyncio.TimeoutError:
            try:
                await ctx.author.send("You took too long. Try again, please.")
            except discord.HTTPException:
                return await ctx.send(f"Thanks for nothing, {ctx.author.mention}")
            return
        await ctx.author.send(app_data["reasonforinterest"])
        try:
            reasonforinterest = await self.bot.wait_for("message", timeout=300, check=check)
            if reasonforinterest.content.lower() == "cancel":
                return await ctx.author.send("Application has been canceled.")
            await user_data.reasonforinterest.set(reasonforinterest.content)
        except asyncio.TimeoutError:
            try:
                await ctx.author.send("You took too long. Try again, please.")
            except discord.HTTPException:
                return await ctx.send(f"Thanks for nothing, {ctx.author.mention}")
            return
        check_8 = app_data["question8"]
        if check_8 is not None:
            await ctx.author.send(app_data["question8"])
            try:
                answer8 = await self.bot.wait_for("message", timeout=300, check=check)
                if answer8.content.lower() == "cancel":
                    return await ctx.author.send("Application has been canceled.")
                await user_data.answer8.set(answer8.content)
            except asyncio.TimeoutError:
                try:
                    await ctx.author.send("You took too long. Try again, please.")
                except discord.HTTPException:
                    return await ctx.send(f"Thanks for nothing, {ctx.author.mention}")
                return
        check_9 = app_data["question9"]
        if check_9 is not None:
            await ctx.author.send(app_data["question9"])
            try:
                answer9 = await self.bot.wait_for("message", timeout=300, check=check)
                if answer9.content.lower() == "cancel":
                    return await ctx.author.send("Application has been canceled.")
                await user_data.answer9.set(answer9.content)
            except asyncio.TimeoutError:
                try:
                    await ctx.author.send("You took too long. Try again, please.")
                except discord.HTTPException:
                    return await ctx.send(f"Thanks for nothing, {ctx.author.mention}")
                return
        check_10 = app_data["question10"]
        if check_10 is not None:
            await ctx.author.send(app_data["question10"])
            try:
                answer10 = await self.bot.wait_for("message", timeout=300, check=check)
                if answer10.content.lower() == "cancel":
                    return await ctx.author.send("Application has been canceled.")
                await user_data.answer10.set(answer10.content)
            except asyncio.TimeoutError:
                try:
                    await ctx.author.send("You took too long. Try again, please.")
                except discord.HTTPException:
                    return await ctx.send(f"Thanks for nothing, {ctx.author.mention}")
                return
        check_11 = app_data["question11"]
        if check_11 is not None:
            await ctx.author.send(app_data["question11"])
            try:
                answer11 = await self.bot.wait_for("message", timeout=300, check=check)
                if answer11.content.lower() == "cancel":
                    return await ctx.author.send("Application has been canceled.")
                await user_data.answer11.set(answer11.content)
            except asyncio.TimeoutError:
                try:
                    await ctx.author.send("You took too long. Try again, please.")
                except discord.HTTPException:
                    return await ctx.send(f"Thanks for nothing, {ctx.author.mention}")
                return
        check_12 = app_data["question12"]
        if check_12 is not None:
            await ctx.author.send(app_data["question12"])
            try:
                answer12 = await self.bot.wait_for("message", timeout=300, check=check)
                if answer12.content.lower() == "cancel":
                    return await ctx.author.send("Application has been canceled.")
                await user_data.answer12.set(answer12.content)
            except asyncio.TimeoutError:
                try:
                    await ctx.author.send("You took too long. Try again, please.")
                except discord.HTTPException:
                    return await ctx.send(f"Thanks for nothing, {ctx.author.mention}")
                return

        await ctx.author.send(app_data["finalcomments"])
        try:
            finalcomments = await self.bot.wait_for("message", timeout=300, check=check)
            if finalcomments.content.lower() == "cancel":
                return await ctx.author.send("Application has been canceled.")
            await user_data.finalcomments.set(finalcomments.content)
        except asyncio.TimeoutError:
            return await ctx.author.send("You took too long. Try again, please.")

        embed = discord.Embed(color=await ctx.embed_colour(), timestamp=datetime.utcnow())
        embed.set_author(
            name=f"Applicant: {ctx.author.name} | ID: {ctx.author.id}",
            icon_url=ctx.author.avatar_url,
        )
        embed.set_footer(
            text=f"{ctx.author.name}#{ctx.author.discriminator} UserID: {ctx.author.id}"
        )
        embed.title = f"Application for {position.content}"
        embed.add_field(
            name="Applicant Name:",
            value=f"Mention: {ctx.author.mention}\nPreferred: " + name.content,
            inline=True,
        )
        embed.add_field(
            name="Age",
            value=total_age,
            inline=True,
        )
        embed.add_field(name="Timezone:", value=timezone.content, inline=True)
        embed.add_field(name="Desired position:", value=position.content, inline=True)
        embed.add_field(name="Active days/week:", value=days.content, inline=True)
        embed.add_field(name="Active hours/day:", value=hours.content, inline=True)
        embed.add_field(
            name="{}...".format(app_data["reasonforinterest"][:197]).replace("$", "\\$"),
            value=reasonforinterest.content,
            inline=False,
        )
        embed.add_field(name="Previous experience:", value=experience.content, inline=False)

        if check_8 is not None:
            embed.add_field(
                name="{}...".format(app_data["question8"][:197]).replace("$", "\\$"),
                value=answer8.content,
                inline=False,
            )
        if check_9 is not None:
            embed.add_field(
                name="{}...".format(app_data["question9"][:197]).replace("$", "\\$"),
                value=answer9.content,
                inline=False,
            )
        if check_10 is not None:
            embed.add_field(
                name="{}...".format(app_data["question10"][:197]).replace("$", "\\$"),
                value=answer10.content,
                inline=False,
            )
        if check_11 is not None:
            embed.add_field(
                name="{}...".format(app_data["question11"][:197]).replace("$", "\\$"),
                value=answer11.content,
                inline=False,
            )
        if check_12 is not None:
            embed.add_field(
                name="{}...".format(app_data["question12"][:197]).replace("$", "\\$"),
                value=answer12.content,
                inline=False,
            )
        embed.add_field(name="Final Comments", value=finalcomments.content, inline=False)
        try:
            webhook = None
            for hook in await channel.webhooks():
                if hook.name == ctx.guild.me.name:
                    webhook = hook
            if webhook is None:
                webhook = await channel.create_webhook(name=ctx.guild.me.name)

            await webhook.send(
                embed=embed, username=ctx.guild.me.display_name, avatar_url=ctx.guild.me.avatar_url
            )
        except Exception as e:
            log.info(f"{e} occurred in {ctx.author.name} | {ctx.author.id} application")
            try:
                return await ctx.author.send(
                    "Seems your responses were too verbose. Let's try again, but without the life stories."
                )
            except Exception:
                return
        # except discord.HTTPException:
        #     return await ctx.author.send(
        #         "Your final application was too long to resolve as an embed. Give this another shot, keeping answers a bit shorter."
        #     )
        # except commands.CommandInvokeError:
        #     return await ctx.author.send(
        #         "You need to start over but this time when it asks for year of birth, respond only with a 4 digit year i.e `1999`"
        #     )
        await ctx.author.add_roles(role_add)

        try:
            await ctx.author.send(
                f"Your application has been sent to {ctx.guild.name} Admins! Thanks for your interest!"
            )
        except commands.CommandInvokeError:
            return await ctx.send(
                f"{ctx.author.mention} I sent your app to the admins. Thanks for closing dms early tho rude ass"
            )
        self.antispam[ctx.guild][ctx.author].stamp()
        # lets save the embed instead of calling on it again
        await self.save_application(embed=embed, applicant=ctx.author)

        await self.config.member(ctx.author).app_check.set(True)