def _getConn(self,ForceReconnect=False): if ForceReconnect or self.__conn == None or stime() - self.__connectedAt >= self.__timeOut: self.__conn = httpclient.HTTPSConnection(ServiceURL) self.__connectedAt = stime() return self.__conn else: return self.__conn
def _getconn(self): if stime( ) - self.__connectedAt >= self.__timeOut or self.__conn == None: self.__conn = httpclient.HTTPSConnection(LINKHUB_ServiceURL) self.__connectedAt = stime() return self.__conn else: return self.__conn
def _getConn(self): if stime() - self.__connectedAt >= self.__timeOut or self.__conn == None: self.__conn = httpclient.HTTPSConnection(ServiceURL) self.__connectedAt = stime() return self.__conn else: return self.__conn
def _getConn(self): if stime( ) - self.__connectedAt >= self.__timeOut or self.__conn == None: self.__conn = httpclient.HTTPSConnection( ServiceURL_TEST if self.IsTest else ServiceURL_REAL) self.__connectedAt = stime() return self.__conn else: return self.__conn
def __init__(self,name=None): if name==None: self.name = stime("%d")+"-"+stime("%m")+"-"+stime("%Y")+"_"+stime("%H")+'h'+stime("%M") else: self.name = name self.name = "results/"+self.name self.full = self.name+"/record.log" if g.graphing or g.logging or g.search_tree in ["complete","end"] or g.graph_current_solution: os.makedirs(self.name)
def main(): carioM = Gametogenese('MASCULINO') gametaMasculino = get_GAMETAdf(carioM) if choice([0, 1]) == 0: boy = True else: girl = True boy = False print(magenta + '[+] O GÁMETA MASCULINO ESCOLHIDO FOI: {0}{1}'.format( gametaMasculino[1], cyan)) print(gametaMasculino[0]) carioF = Gametogenese('FEMININO') gametaFeminino = get_GAMETAdf(carioF) print(magenta + '[+] O GÁMETA FEMININO ESCOLHIDO FOI: {0}{1}'.format( gametaFeminino[1], cyan)) print(gametaFeminino[0]) print( '\n' + red + '[***] A inicializar o processo de CARIOGAMIA dos gâmetas selecionados...\n' ) if boy: print(magenta + "[+] O GÂMETA MASCULINO TRAZ UM CROMOSSOMA SEXUAL Y.") print(green + "[!] É UM RAPAZ!") cromoSex = ['Y', 'X'] else: print(magenta + "[+] O GÂMETA MASCULINO TRAZ UM CROMOSSOMA SEXUAL X.") print(green + "[!] É UM RAPARIGA!") cromoSex = ['X', 'X'] chaves = [i for i in ncromo] valores = [] cariotipodobebe = {} for genePAI, geneMAE in zip(gametaMasculino[1].values(), gametaFeminino[1].values()): valores.append([genePAI, geneMAE]) cariotipodobebe.update(dict(zip(chaves, valores))) dataframeBEBE = pd.DataFrame( cariotipodobebe.items(), columns=['Número do par de cromossomas', 'Atributos']) dataframeBEBE.insert(1, "Características", caracteristicas) dataframeBEBE = dataframeBEBE.append( { 'Número do par de cromossomas': 23, 'Características': 'Cromossomas Sexuais', 'Atributos': cromoSex }, ignore_index=True) print(cyan, dataframeBEBE) if input( blue + '[?] Deseja gravar o DataFrame do cariótipo do bebé como ficheiro csv? (será gravado em {0}/OUTPUT) ' .format(getcwd())).lower() in yes: now = stime("%Y,%m,%d,%H,%M,%S") t = now.split(',') dataframeBEBE.to_csv('OUTPUT/cariotipoBEBE' + '_'.join(t) + '.csv') else: print(red + "[!!!] O programa será encerrado...") exit(0)
def plot_prediction_grid(xx, yy, predicted_grid, predictors, outcomes): """ Plot KNN predictions for every point on the grid.""" types = len(set(outcomes)) c_bg = np.zeros((types, 3)) c_ob = np.zeros((types, 3)) for i in range(types): c_bg_i = np.array([ random.randint(100, 255) / 255, random.randint(100, 255) / 255, random.randint(100, 255) / 255 ]) c_ob_i = (c_bg_i * 255 - 50) / 255 c_bg[i] = c_bg_i c_ob[i] = c_ob_i background_colormap = lcm(c_bg) observation_colormap = (c_ob) plt.figure(figsize=(10, 10)) plt.pcolormesh(xx, yy, predicted_grid, cmap=background_colormap, alpha=0.5) xs = np.array(predictors[:, 0]) ys = np.array(predictors[:, 1]) outcomes = np.array(outcomes) for i in range(types): to_plot = outcomes == i plt.scatter(xs[to_plot], ys[to_plot], s=50, color=observation_colormap[i], label="Class " + str(i + 1)) plt.xlabel('Variable 1') plt.ylabel('Variable 2') x_labels = np.linspace(np.min(xx), np.max(xx), 5) y_labels = np.linspace(np.min(yy), np.max(yy), 5) plt.xticks(x_labels, rotation="vertical") plt.yticks(y_labels) plt.xlim(np.min(xx), np.max(xx)) plt.ylim(np.min(yy), np.max(yy)) plt.legend(loc="lower right") if not os.path.exists("Plots"): os.makedirs("Plots") filename = "Plots\plot_" + stime("%d-%m-%Y_%H-%M-%S") + ".pdf" plt.savefig(filename) plt.show()
def __init__(self, LinkID, SecretKey, timeOut=15): """ 생성자. args LinkID : 링크허브에서 발급받은 LinkID SecretKey : 링크허브에서 발급받은 SecretKey """ self.__linkID = LinkID self.__secretKey = SecretKey self.__scopes = ["member"] self.__tokenCache = {} self.__conn = None self.__connectedAt = stime() self.__timeOut = timeOut
def __init__(self,LinkID,SecretKey,TimeOut = 60): """ 생성자. args LinkID : 링크허브에서 발급받은 LinkID SecretKey : 링크허브에서 발급받은 SecretKey """ self.__linkID = LinkID self.__secretKey = SecretKey self.__scopes = ["170"] self.__tokenCache = None self.__conn = None self.__connectedAt = stime() self.__timeOut = TimeOut
def PingScan(h): portScan = nmap.PortScanner() portScan.scan(hosts=h, arguments='-sn') EstadoHOST = pd.DataFrame(columns=['Host', 'Hostname', 'Estado']) for host, index in zip(portScan.all_hosts(), [i for i in range(len(portScan.all_hosts()))]): EstadoHOST.loc[index] = get_HostScanData(portScan, host) print(yellow + '[*] A inserir {0} no DataFrame...'.format(host)) print(green + '[+] DataFrame pronto.') print(magenta, EstadoHOST) print('\n') if input( cyan + 'Deseja gravar o DataFrame como ficheiro csv? (será gravado em {0}/OUTPUT) ' .format(getcwd())) in yes: now = stime("%Y,%m,%d,%H,%M,%S") t = now.split(',') EstadoHOST.to_csv('OUTPUT/pingDF' + '_'.join(t) + '.csv')
def now_timestamp(): return int(stime()) # seconds since epoch
def __init__(self, timeOut=15): self.__conn = httpclient.HTTPSConnection(LINKHUB_ServiceURL) self.__connectedAt = stime() self.__timeOut = timeOut
from snapshot_selenium import snapshot from retrying import retry import random from pyecharts.datasets.coordinates import get_coordinate, search_coordinates_by_keyword from pyecharts.components import Image from pyecharts.options import ComponentTitleOpts from MyQR import myqr import os from pyecharts.datasets import register_url from pyecharts.faker import Collector, Faker from pyecharts.datasets import register_url import json from collections import Counter import time t1 = time.stime() with open('E:/splash/tommy/getOfflineStoreListOrParms.json', 'r', encoding='utf-8') as data_file: json_data = data_file.read() data = json.loads(json_data) # print(data) # type(data) provinces_list = [] cities_list = [] store_coord = dict() shanghai_store_coord = dict() beijing_store_coord = dict() t2 = time.time() print(f'文件加载 耗时{t2-t1}秒')