def ban_user(self, user): ban = Ban(sender=user, created=datetime.datetime.now(), host=self.fqdn) self.dbinsert(ban) sendMail('Bloqueado el envío de correo del usuario %s' % user, mail_tpl % (user, self.max_time, self.max_email, self.fqdn), self.smtp_server, self.tls_required, self.recipients, user)
def inform(subject, msg): """inform user """ from utils import sendMail print("send eamil...") sendMail(subject, msg) print(subject, "\n", msg)
def contact_post(): email = request.POST.get('email') comment = request.POST.get('comment') if (email == '' or comment == ''): return render(request, 'contact', {'error': 'Must fill in both fields', 'email': email, 'comment': comment}) elif ('@' not in email): return render(request, 'contact', {'error': 'Invalid email address', 'email': email, 'comment': comment, 'title': 'Contact Us'}) sendMail(email, comment) return render(request, 'contact', {'success': True})
def main(argv=argv, cfg_file=cfg_file, update=1): """ The function is the client entry point. """ start_dir = os.getcwd() os.chdir(join(start_dir, dirname(argv[0]), dirname(cfg_file))) cfg_file = join(os.getcwd(), os.path.basename(cfg_file)) loadConfiguration(cfg_file) os.chdir(start_dir) path.append(config['servers']['path']) path.append(config['configobj']['path']) # this import must stay here, after the appending of configobj path to path import storage storage.init(config['storage']['path']) # this import must stay here, after the appending of configobj path to path from gui import Gui try: app = QApplication([]) app.setStyle(QStyleFactory.create("Cleanlooks")) gui = Gui(cfg_file) if not update: gui.displayWarning(PROJECT_NAME, gui._text['UpdateFail']) gui.show() exit(app.exec_()) except Exception, e: print 'Fatal Exception:', e info = getExceptionInfo() fd = open(join(config['exceptions']['save_path'], 'exception.txt'), 'a+') fd.write(info) fd.close() if config['exceptions']['send_email']: try: sendMail("DevClient fatal exception: %s" % e, info) except Exception, e: print 'Error while sending email:', e
coinsFull = coins.copy() ## only look at relevant coins every hour if runType == 'hourly': ## movement over 5 hours previousCoins = previousCoinsFull[ (previousCoinsFull.market_cap.astype(float) > 100e6) & (previousCoinsFull.time.astype(int) == 5)].reset_index(drop = True) coins = coinsFull[ (coinsFull.market_cap.astype(float) > 100e6)].reset_index(drop = True) coinDiff = pd.merge(coins[['ticker','price']], previousCoins[['ticker','price']], on = ['ticker']) coinDiff[['price_x','price_y']] = coinDiff[['price_x','price_y']].astype(float) coinDiff['diffPercent'] = ((coinDiff['price_x'] - coinDiff['price_y']) / coinDiff['price_y'])*100 ## flag coims who have moved more than x percent in 1 hour, try to catch massive riders movedX = coinDiff[ abs(coinDiff.diffPercent.values) > 15].reset_index(drop = True) if movedX.shape[0] > 0: logTime = str(np.datetime64(datetime.now()).astype('datetime64[m]')).replace('-','').replace(':','').replace('T','') sendMail(msgText= logTime, msgTable=movedX, subjectText = 'crypto 15 percent movers in 5 hr' + logTime) ## 1 hour stuff first previousCoins = previousCoinsFull[ (previousCoinsFull.market_cap.astype(float) > 100e6) & (previousCoinsFull.time.astype(int) == 1)].reset_index(drop = True) coins = coinsFull[ (coinsFull.market_cap.astype(float) > 100e6)].reset_index(drop = True) coinDiff = pd.merge(coins[['ticker','price']], previousCoins[['ticker','price']], on = ['ticker']) coinDiff[['price_x','price_y']] = coinDiff[['price_x','price_y']].astype(float) coinDiff['diffPercent'] = ((coinDiff['price_x'] - coinDiff['price_y']) / coinDiff['price_y'])*100 ## flag coims who have moved more than x percent in 1 hour, try to catch massive riders movedX = coinDiff[ abs(coinDiff.diffPercent.values) > 7].reset_index(drop = True) previousCoinsFull = previousCoinsFull[ previousCoinsFull.time.astype(int)<5].reset_index(drop = True) previousCoinsFull.time = (previousCoinsFull.time.astype(int) + 1).astype(str) coinsFull.time = 1 coinsFull = pd.concat([coinsFull, previousCoinsFull]).reset_index(drop = True)
def user_add(request): ad_suffix = GVSIGOL_LDAP['AD'] if not ad_suffix: show_pass_form = True else: show_pass_form = False if request.method == 'POST': form = UserCreateForm(request.POST) if form.is_valid(): assigned_groups = [] is_staff = False if 'is_staff' in form.data: is_staff = True is_superuser = False if 'is_superuser' in form.data: is_superuser = True is_staff = True assigned_groups = [] for key in form.data: if 'group-' in key: assigned_groups.append(int(key.split('-')[1])) try: if form.data['password1'] == form.data['password2']: user = User(username=form.data['username'].lower(), first_name=u''.join( form.data['first_name']).encode('utf-8'), last_name=u''.join( form.data['last_name']).encode('utf-8'), email=form.data['email'].lower(), is_superuser=is_superuser, is_staff=is_staff) user.set_password(form.data['password1']) user.save() #admin_group = UserGroup.objects.get(name__exact='admin') aux = UserGroup.objects.filter(name="admin") if aux.count() > 1: print "WARNING: table gvsigol_auth_usergroup inconsistent !!!!!!!!!!!" admin_group = aux[0] if user.is_superuser: core_services.ldap_add_user(user, form.data['password1'], True) core_services.ldap_add_group_member(user, admin_group) usergroup_user = UserGroupUser(user=user, user_group=admin_group) usergroup_user.save() else: core_services.ldap_add_user(user, form.data['password1'], False) #core_services.ldap_add_group_member(user, admin_group) for ag in assigned_groups: user_group = UserGroup.objects.get(id=ag) usergroup_user = UserGroupUser(user=user, user_group=user_group) usergroup_user.save() core_services.ldap_add_group_member(user, user_group) #User backend if is_superuser or is_staff: ugroup = UserGroup( name='ug_' + form.data['username'].lower(), description=_(u'User group for') + ': ' + form.data['username'].lower()) ugroup.save() ugroup_user = UserGroupUser(user=user, user_group=ugroup) ugroup_user.save() core_services.ldap_add_group(ugroup) core_services.add_data_directory(ugroup) core_services.ldap_add_group_member(user, ugroup) url = mapservice_backend.getBaseUrl() + '/' ws_name = 'ws_' + form.data['username'].lower() if mapservice_backend.createWorkspace( ws_name, url + ws_name, '', url + ws_name + '/wms', url + ws_name + '/wfs', url + ws_name + '/wcs', url + 'gwc/service/wms'): # save it on DB if successfully created newWs = Workspace( name=ws_name, description='', uri=url + ws_name, wms_endpoint=url + ws_name + '/wms', wfs_endpoint=url + ws_name + '/wfs', wcs_endpoint=url + ws_name + '/wcs', cache_endpoint=url + 'gwc/service/wms', created_by=user.username, is_public=False) newWs.save() ds_name = 'ds_' + form.data['username'].lower() services_utils.create_datastore( request, user.username, ds_name, newWs) mapservice_backend.reload_nodes() auth_utils.sendMail(user, form.data['password1']) return redirect('user_list') except Exception as e: print "ERROR: Problem creating user " + str(e) errors = [] #errors.append({'message': _("The username already exists")}) groups = auth_utils.get_all_groups() return render_to_response( 'user_add.html', { 'form': form, 'groups': groups, 'errors': errors, 'show_pass_form': show_pass_form }, context_instance=RequestContext(request)) else: groups = auth_utils.get_all_groups() return render_to_response('user_add.html', { 'form': form, 'groups': groups, 'show_pass_form': show_pass_form }, context_instance=RequestContext(request)) else: form = UserCreateForm() groups = auth_utils.get_all_groups() return render_to_response('user_add.html', { 'form': form, 'groups': groups, 'show_pass_form': show_pass_form }, context_instance=RequestContext(request))
df = pd.concat(dataFrames) now = datetime.datetime.now().strftime('%Y:%m:%d:%H.%M.%S') df['timeStr'] = now # df_file = '/Users/kamalfaik/pyCharmProjects/coinMarketCap/df_file_test.csv' df_file = "/Users/kamalfaik/Google Drive/Business/bitcoin/coinmarketcap/df_file.csv" if osp.exists(df_file): df_history = pd.read_csv(df_file) newTickers = [ ticker for ticker in spottedTickers if ticker not in df_history.ticker.values ] df_all = pd.concat([df_history, df]) else: df_all = df df_all.to_csv(df_file, index=False) header = ['NEW SPOTTED CRYPTOS'] message = [] newResults = [ result for result in results if result['ticker'] in newTickers ] if len(newResults) > 0: for result in newResults: header += [result['ticker']] message += [result['ticker']] message += [result['message']] messageToSend = " ".join(header) + '\n' + '\n'.join(message) utils.sendMail(messageToSend, df_file) else: print('nothing to send {}'.format(now))
metastore_dbName = config_list['meta_db_dbName'] dbmeta_Url = config_list['meta_db_dbUrl'] dbmeta_User = config_list['meta_db_dbUser'] dbmeta_Pwd = base64.b64decode(config_list['meta_db_dbPwd']) dbmeta_Port = config_list['meta_db_dbPort'] emailSender = config_list['email_sender'] emailReceiver = config_list['email_receivers'] # dbtgt_classpath = config_list['tgt_db_beeline_classPath'] if len(sys.argv) < 4: error = 1 err_msg = "datasync_driver: main[{0}]: ERROR: Mandatory input arguments not passed".format( error) print "ERROR: " + err_msg error_table_list = "" sendMail(emailSender, emailReceiver, err_msg, error_table_list, 0, config_list['env']) sys.exit(1) input_schema = sys.argv[1] load_type = sys.argv[2] data_path = sys.argv[3] load_group_id = None input_tablename_list = None input_multiprocessor = None # Special logic to mirror table from one schema in GP to a different schema in HIVE is_special_logic = False # input_source_schema = "eservice" # load_type = "INCREMENTAL" # data_path = "GP2HDFS" # load_group__id = 1
metastore_dbName = config_list['meta_db_dbName'] dbmeta_Url = config_list['meta_db_dbUrl'] dbmeta_User = config_list['meta_db_dbUser'] dbmeta_Pwd = base64.b64decode(config_list['meta_db_dbPwd']) dbmeta_Port = config_list['meta_db_dbPort'] emailSender = config_list['email_sender'] emailReceiver = config_list['email_receivers'] # dbtgt_classpath = config_list['tgt_db_beeline_classPath'] if len(sys.argv) < 4: error = 1 err_msg = "ERROR: Mandatory input arguments not passed" print print_hdr + err_msg error_table_list = "" sendMail(emailSender, emailReceiver, err_msg, error_table_list, 0, config_list['env'], "ERROR") sys.exit(1) input_schema = sys.argv[1] load_type = sys.argv[2] data_path = sys.argv[3] load_group_id = None input_tablename_list = None input_multiprocessor = None # Special logic to mirror table from one schema in GP to a different schema in HIVE is_special_logic = False load_id = None system_name = None print_hdr = "[datasync_driver: main: " + data_path + ": " + input_schema + "] - "
def scrapy_job(): #scrapy_funi() #analysis() info = news() utils.sendMail('我的资讯', info)
def searchProduct(): product_name = st.text_input('enter the product name to show') amz_urls = st.text_input('Enter url for amazon') flip_urls = st.text_input('Enter url for flipkart') myn_urls = st.text_input('Enter url for Myntra') btn = st.button('Check tracker') if (amz_urls or flip_urls or myn_urls) and btn: df = [] if amz_urls: details = sc.extract_amazon_data(amz_urls) df.append(details) if flip_urls: details = sc.extract_flipkart_data(flip_urls) df.append(details) if myn_urls: details = sc.extract_myntra_data(myn_urls) df.append(details) st.write(df) st.markdown(f""" <h2>Name : </h2> <h3>{df[0]['name']}</h3> <h2>Name : </h2> <h3>{df[0]}</h3> """, unsafe_allow_html=True) st.subheader('Run Tracker ') time_gap = st.select_slider( "How Much time difference between each tracking call", [ 'No delay', '10 sec', '10 mins', '1 hour', '12 hours', '1 day', '3 days' ]) mail_addr = st.text_input("Enter Your Mail") btn2 = st.button('Run Tracker continously') if (amz_urls or flip_urls or myn_urls) and btn2 and mail_addr: if time_gap == '10 sec': wait = 10 elif time_gap == '10 mins': wait = 60 * 10 elif time_gap == '1 hour': wait = 60 * 60 elif time_gap == '12 hours': wait = 60 * 60 * 12 elif time_gap == '1 day': wait = 60 * 60 * 24 elif time_gap == '3 day': wait = 60 * 60 * 24 * 3 elif time_gap == 'No delay': wait = 0 else: wait = 5 dfs = [] while True: if amz_urls: details = sc.extract_amazon_data(amz_urls) details['date'] = datetime.utcnow() product = Products(name=details['name'], price=details['price'], deal=details['deal'], url=details['url'], date=details['date'], website=details['website']) dfs.append(details) sess.add(product) sess.commit() if flip_urls: details = sc.extract_flipkart_data(flip_urls) details['date'] = datetime.utcnow() product = Products(name=details['name'], price=details['price'], deal=details['deal'], url=details['url'], date=details['date'], website=details['website']) dfs.append(details) sess.add(product) sess.commit() if myn_urls: details = sc.extract_myntra_data(myn_urls) details['date'] = datetime.utcnow() product = Products(name=details['name'], price=details['price'], deal=details['deal'], url=details['url'], date=details['date'], website=details['website']) dfs.append(details) sess.add(product) sess.commit() data = pd.DataFrame(dfs) data['date'] = pd.to_datetime(data['date']) fig = px.line(data_frame=data, x=data.index, y=data['price'], line_group='name', color='website') plot_area.subheader('graphical output') plot_area.plotly_chart(fig) data_area.write(data) if mail_addr and data.shape[0] >= 12: lowest_value = 0 amzdata = data[data.website == 'amazon'] flipkartdata = data[data.website == 'flipkart'] myntradata = data[data.website == 'myntra'] if amz_urls: if amzdata.iloc[-1]['price'] < amzdata.iloc[-2]['price']: if sendMail(mail_addr, amz_urls, product_name, amzdata.iloc[-1]['price'], amzdata.iloc[-1]['website']): st.success( 'Price fell at amazon , mail notification sent to {mail_addr}' ) if flip_urls: if flipkartdata.iloc[-1]['price'] < flipkartdata.iloc[-2][ 'price']: if sendMail(mail_addr, flip_urls, product_name, flipkartdata.iloc[-1]['price'], flipkartdata.iloc[-1]['website']): st.success( 'Price fell at flipkart , mail notification sent to {mail_addr}' ) if myn_urls: if myntradata.iloc[-1]['price'] < myntradata.iloc[-2][ 'price']: if sendMail(mail_addr, myn_urls, product_name, myntradata.iloc[-1]['price'], myntradata.iloc[-1]['website']): st.success( 'Price fell at myntra , mail notification sent to {mail_addr}' ) time.sleep(wait) op = st.sidebar.checkbox("show tracked product data manually") if op: try: df = pd.read_sql('products', engine) st.write(df) except Exception as e: st.error('No tracking data available')
def emailReport(report=None, price=None, buyCurve=None, baseCurve=None, sellCurve=None, sellCurveB=None, buyLocations=None, sellLocations=None, volData=None, periodList=[730, 180, 60], sendAsMail=True, saveLocation=None, changePeriod='', chartTitleKey='chart', periodCharts=True, patternCharts=False, **kwargs): ''' produce charts and email reports ''' dte = str(datetime.datetime.now().strftime('%Y-%m-%d %HH %MM %SS')) msgImageData = [] if periodCharts: assert price is not None and buyCurve is not None and baseCurve is not None and \ sellCurve is not None and buyLocations is not None and sellLocations is not None and \ report is not None, \ ' must provide "report, price, buyCurve, sellCurve, baseCurve, buyLocations, sellLocations" for ' \ 'period charts' splitLen = int(np.ceil(price.columns.unique().shape[0] / 3)) n = 0 for i in range(splitLen): if n < price.columns.shape[0]: fig, ax = plt.subplots(nrows=3, ncols=len(periodList), figsize=(30, 15)) for row in ax: if n < price.columns.unique().shape[0]: m = 0 for col in row: # volCurve = volData[:,n][-periodList[m]:] dataPlot = pd.DataFrame(price[price.columns[n]][-periodList[m]:], columns=[price.columns[n]]) longCurve = baseCurve[:, n][-periodList[m]:] shortCurve = buyCurve[:, n][-periodList[m]:] sellerCurve = sellCurve[:, n][-periodList[m]:] sellerCurveB = sellCurveB[:, n][-periodList[m]:] buys = buyLocations[:, n][-periodList[m]:] sells = sellLocations[:, n][-periodList[m]:] dataPlot['Fast'] = shortCurve dataPlot['Slow'] = longCurve dataPlot['Sell'] = sellerCurve dataPlot['SellB'] = sellerCurveB col.plot(dataPlot) col.scatter(dataPlot.index, buys, c='r', s=50) col.scatter(dataPlot.index, sells, c='b', s=50) col.set_title(price.columns[n] + '_' + str(periodList[m])) plt.setp(col.xaxis.get_majorticklabels(), rotation=45) m = m + 1 n = n + 1 plt.tight_layout() if sendAsMail: assert saveLocation is not None, 'must specify a save location for charts when sending email' loc = '%s\\%s %s %s %s.jpg' % (saveLocation, chartTitleKey, changePeriod, str(i), dte) plt.savefig(loc) msgImageData.append(loc) plt.close() elif patternCharts: ## plot 3 rows per fig splitLen = int(np.ceil((report.shape[0] + 1) / (3 * len(periodList)))) for i in range(splitLen): n = 0 fig, ax = plt.subplots(nrows=3, ncols=len(periodList), figsize=(30, 15)) for row in ax: for col in row: if n < report.shape[0]: # volCurve = volData[:,n][-periodList[m]:] dataPlot = list(str(report.pattern[n])) col.plot(dataPlot) col.set_title(report.ticker[n] + '_' + str(report.n[n]) + '_' + str(report.period[n])) n = n + 1 plt.tight_layout() if sendAsMail: loc = saveLocation + '\\' + chartTitleKey + ' ' + str(i) + ' ' + dte + '.jpg' plt.savefig(loc) msgImageData.append(loc) else: msgImageData = None if sendAsMail: sendMail(msgText=dte, msgTable=report, subjectText=chartTitleKey + ' - ' + dte, msgImageData=msgImageData, **kwargs)
# if warning enabled calc target and compare days = monthrange(year, month)[1] targetData = int(capValue) / days * now log.info('Current usage = {0}, target = {1}, threshold={2}'.format( usedData, targetData, targetData * warnThreshold)) if usedData > targetData * warnThreshold: smtpUser = utils.getConfigValue(args, XFINITY_SMTP_USER) warnEmailTo = utils.getConfigValue(args, XFINITY_EMAIL_TO) or smtpUser warnEmailFrom = utils.getConfigValue(args, XFINITY_EMAIL_FROM) or warnEmailTo if not warnEmailTo: log.warn('Mail disabled: no recipient email setup') else: utils.sendMail( warnEmailFrom, warnEmailTo, 'Xfinity usage = {0:.0f} GB (target is {1:.0f} GB)\n\n'.format( usedData, targetData), gSheetUrl if gSheetId else '') # if we have no spreadsheet, simply output the json if not gSheetId: log.info('No Google spreadsheet specified.') finish(args, usageData, False) # use creds to create a client to interact with the Google Drive API log.info('Connecting to Google spreadsheet') book = utils.openGoogleSheet(gSheetId) # update current usage log.info('Updating data sheet') dataSheet = book.get_worksheet(DATA_SHEET_INDEX) dataSheet.update_acell(DATE_CELL, execDate)