def buildHeader(self, ID, flags, QDCOUNT, ANCOUNT, NSCOUNT, ARCOUNT): """ given parameter, build out the header @return: None=error, byte string=OK """ if (len(flags) != 8 or ID is None or QDCOUNT is None or ANCOUNT is None or NSCOUNT is None or ARCOUNT is None): log_err( 'buildHeader: ERROR in input' "ID:{}, flags:{}, QDCOUNT:{}, ANCOUNT:{}, NSCOUNT:{}, ARCOUNT:{}" .format(ID, flags, QDCOUNT, ANCOUNT, NSCOUNT, ARCOUNT)) return None header = b'' header += short_to_byte2(ID) # number of bit for each flag flags_bit = [1, 4, 1, 1, 1, 1, 3, 4] holder = '' for i in range(len(flags_bit)): holder += int_to_bit(flags[i], flags_bit[i]) log_print('flags:{}'.format(holder)) header += short_to_byte1(int(holder[:8], 2)) header += short_to_byte1(int(holder[8:], 2)) header += short_to_byte2(QDCOUNT) header += short_to_byte2(ANCOUNT) header += short_to_byte2(NSCOUNT) header += short_to_byte2(ARCOUNT) if len(header) != 12: log_err("buildHeader:: ERROR size != 12 bytes") return None log_print("buildHeader produces:{}".format(header)) return header
def getRecord(self, bytes, idx=0): """ get record info from bytes @return -1=ERROR idx=OK """ name_format, idx = byte2_to_short(bytes, idx) if (name_format >> 14) == 3: """ ptr format """ ptr = (name_format & ((1 << 14) - 1)) self.NAME, _ = get_qname(bytes, ptr) else: self.NAME, idx = get_qname(bytes, idx) log_print('getRecord:: NAME:{}'.format(self.NAME)) self.TYPE, idx = byte2_to_short(bytes, idx) if self.TYPE != 1: log_err("getRecord:: TYPE {} != 1".format(self.TYPE)) self.CLASS, idx = byte2_to_short(bytes, idx) if self.CLASS != 1: log_err("getRecord:: CLASS {} != 1".format(self.CLASS)) self.TTL, idx = byte4_to_int(bytes, idx) if self.TTL != 1: log_err("getRecord:: TTL {} != 1".format(self.TTL)) self.RDLENGTH, idx = byte2_to_short(bytes, idx) if self.RDLENGTH != 4: log_err('getRecord:: RDLENGTH {} != 4'.format(self.RDLENGTH)) return -1 for i in range(4): self.RDATA += "{}.".format(bytes[idx]) idx += 1 log_print("getRecord: answer ip:{}".format(self.RDATA)) return idx
def get_data(index_list): """ Return the entire database in the form of a cleaned panda dataframe """ with sqlite3.connect(DB_FILE, detect_types=sqlite3.PARSE_DECLTYPES|\ sqlite3.PARSE_COLNAMES) as connection: data_frame = pd.read_sql_query(f"select * from QUOTES WHERE Gas_index in"+\ f"({','.join('?'*len(index_list))})", connection, params=[str(e) for e in index_list.keys()], parse_dates=['Trading_day']) log_print("Data extracted from database") return data_frame
def buildRecord(self, domain_name, data): """ build the record @return None=ERROR byte string=OK """ record = b"" qname = build_qname(domain_name) if qname is None: return None record += qname record += short_to_byte2(1) # type record += short_to_byte2(1) # class record += int_to_byte4(0) # TTL record += short_to_byte2(len(4)) # always 4 for ip addr for part in data.split('.'): record += short_to_byte1(int(part)) log_print("buildRecord: record:\n{}".format(record)) return record
def fetch_cotations(index_list): """ Get cotations """ for index in index_list.keys(): req = requests.post(index_list[index], HEADERS) content = BeautifulSoup(req.json()['html'], 'html.parser') cotation_table = content.div.table cotation_list = [] #Product name extraction, we need to scrap the firs header since it the Trading #Day product_list = [] for elements in cotation_table.thead.find_all("th"): product_list.append(elements.string) product_list = product_list[1:] #Data extraction for row in cotation_table.tbody.find_all('tr'): cotation_set = {} data_list = [] for cell in row.find_all('td'): data_list.append(cell.string) date_component = [int(i) for i in data_list[0].split('-')] cot_date = date(date_component[0], date_component[1], date_component[2]) #print(cot_date) for k, data in enumerate(data_list[1:]): #print(data) price = (lambda x: float(x) if x != None else 'NULL')(data) #Cleaning price from erroneous data eg. data < 1€/MWh if price != 'NULL': if price < 1: price = 'NULL' cotation_set.update({product_list[k]: price}) cotation_list.append(Cotation(index, cot_date, cotation_set)) for cotation in cotation_list: data = {'trading_day': cotation.date.strftime("%Y-%m-%d"), 'Gas_index':cotation.index} data = {**data, **cotation.cotation_set} da.insert_data(data) log_print("Data inserted")
def getHeader(self, bytes, idx=0): """ get header info from bytes, used when receiving request @return -1=error idx=ok """ self.ID, idx = byte2_to_short(bytes, idx) log_print('ID:{}'.format(self.ID)) # deal with flags later flags, idx = byte2_to_short(bytes, idx) self.QR = (flags >> 15) # shift 15 bits off self.Opcode = ((flags & int('0' + '1' * 15, 2)) >> (11)) self.AA = ((flags & int('0' * 5 + '1' * 11, 2)) >> (10)) self.TC = ((flags & int('0' * 6 + '1' * 10, 2)) >> (9)) self.RD = ((flags & int('0' * 7 + '1' * 9, 2)) >> (8)) self.RA = ((flags & int('0' * 8 + '1' * 8, 2)) >> (7)) self.Z = ((flags & int('0' * 9 + '1' * 7, 2)) >> (4)) self.RCODE = (flags & int('0' * 12 + '1' * 4, 2)) # get rest of the fields self.QDCOUNT, idx = byte2_to_short(bytes, idx) self.ANCOUNT, idx = byte2_to_short(bytes, idx) self.NSCOUNT, idx = byte2_to_short(bytes, idx) self.ARCOUNT, idx = byte2_to_short(bytes, idx) log_print("QDCOUNT:{}, ANCOUNT:{}, NSCOUNT:{}, ARCOUNT:{}".format( self.QDCOUNT, self.ANCOUNT, self.NSCOUNT, self.ARCOUNT)) if ( # error handling self.QDCOUNT != 1 or self.NSCOUNT != 0 or self.ARCOUNT != 0): log_print('getHeader:: ERROR: count is wrong') log_err("QDCOUNT:{}, ANCOUNT:{}, NSCOUNT:{}, ARCOUNT:{}".format( self.QDCOUNT, self.ANCOUNT, self.NSCOUNT, self.ARCOUNT)) return -1 return idx
def init_database(): """ Init database """ log_print("DB initalization phase started") connection = sqlite3.connect(DB_FILE) # Table creations connection.executescript(f"DROP TABLE IF EXISTS {QT}") connection.commit() connection.executescript(f"CREATE TABLE {QT} ('Trading_day' TEXT NOT NULL,"+\ "'Gas_index' TEXT NOT NULL," +\ "'Season+1' FLOAT," +\ "'Season+2' float,"+\ "'Season+3' float,"+\ "'Season+4' float,"+\ "'Calendar+1' float,"+\ "'Calendar+2' float,"+\ "'Calendar+3' float,"+\ "'Calendar+4' float,"+\ "PRIMARY KEY ('TRADING_DAY', 'Gas_index'))") connection.commit() connection.close() log_print("DB initialized !")
def buildQuestion(self, domain_name, qtype, qclass): """ build a DNS question @return: None=error, byte string=OK """ questions = b'' qname = build_qname(domain_name) if qname is None: return None questions += qname log_print("buildQuestion:: domain name:{}".format(questions)) questions += short_to_byte2(qtype) log_print("buildQuestion:: qtype:{}".format(qtype)) questions += short_to_byte2(qclass) log_print("buildQuestion:: qclass:{}".format(qclass)) log_print("buildQuestion:: DNSquestion:\n{}".format(questions)) return questions
def start(self): # read args, and setups self.read_args() # run server """ blocking; one request a time """ sock_dns = socket.socket(socket.AF_INET, socket.SOCK_STREAM) sock_dns.bind(config.server_addr) log_print("server listening on {}:{}".format(*config.server_addr)) sock_dns.listen(10) while True: sock_client, client_addr = sock_dns.accept() log_print('connection from {} at socket {}'.format( client_addr, sock_client)) req_header = pr.DNSHeader() req_question = pr.DNSQuestion() try: request = sock_client.recv(4096) log_print("request:{}".format(request)) # parsing idx = 0 idx = req_header.getHeader(request, idx) idx = req_question.getQuestion(request, idx) except Exception as ex: log_err("socket exception:{}".format(ex)) sock_client.close() # processing request res_ip = None if self.geo_based: res_ip = self.servers[client_addr[0]] else: res_ip = self.getip_round_robin() res_header = pr.DNSHeader() res_question = pr.DNSQuestion() res_record = pr.DNSRecord() res_flags = 33792 # pre-set res_header = res_header.buildHeader(req_header.ID, res_flags, 1, 1, 0, 0) res_question = res_question.buildQuestion(req_question.QNAME, req_question.QTYPE, req_question.QTYPE) res_record = res_record.buildRecord(req_question.QNAME, res_ip) response = res_header + res_question + res_record sock_client.sendall(response) sock_client.close()
def getQuestion(self, bytes, idx=0): """ get question info from bytes @return -1=Error idx=OK """ self.QNAME, idx = get_qname(bytes, idx) log_print('getQuestion:: domain name :{}'.format(self.QNAME)) # getting QTYPE self.QTYPE, idx = byte2_to_short(bytes, idx) if self.QTYPE != 1: log_err("getQuestion:: wrong QTYPE:{}".format(self.QTYPE)) return -1 log_print("getQuestion:: QTYPE :{}".format(self.QTYPE)) # getting QCLASS self.QCLASS, idx = byte2_to_short(bytes, idx) if self.QCLASS != 1: log_err("getQuestion:: wrong QCLASS:{}".format(self.QCLASS)) return -1 log_print("getQuestion:: QCLASS :{}".format(self.QCLASS)) return idx
def extract_cotations(index_list, previous_date): """ Return the synthetic indexes and associated email """ #If no more recent Trading day then abort if previous_date >= da.get_last_date(): log_print("Pas de nouvelle cotation, arrêt de la procédure") return #Else compute df = da.get_data(index_list) #Integration of a synthetic value to compute trends for all indexes syn_df = df.groupby('Trading_day', as_index=False, sort=False)['Calendar+1'].mean() syn_df['Calendar+2'] = df.groupby('Trading_day',\ as_index=False, sort=False)['Calendar+2'].mean()['Calendar+2'] syn_df['Calendar+3'] = df.groupby('Trading_day',\ as_index=False, sort=False)['Calendar+3'].mean()['Calendar+3'] syn_df['Synthetic'] = (syn_df['Calendar+1'] + syn_df['Calendar+2'] +\ syn_df['Calendar+3'])/3 #Classement par date descendante utilisée pour les opérations suivantes syn_df = syn_df.sort_values('Trading_day', ascending=False) log_print("Synthetic index ready") #Prix lors de l'appel d'offre origin_synthetic = round(syn_df.loc[syn_df['Trading_day'] == '2018-06-06']\ ['Synthetic'].item(), 2) #Dernier prix disponible a priori le closing de la veille. last_synthetic = round(syn_df.iloc[0]['Synthetic'].item(), 2) #Since we sorted we can use it #Evaluation of price evolution trends. tendance = 'BAISSE' previous_synthetic = round(syn_df.iloc[1]['Synthetic'].item(), 2) if last_synthetic > previous_synthetic: tendance = 'HAUSSE' tendance_pct = round((last_synthetic - previous_synthetic)/previous_synthetic*100, 2) texte = "\nRappel: L'index de prix synthétique est une moyenne sur les hubs " +\ "PEG Nord et TTF à partir des produits Cal+1, Cal+2 et Cal+3.\n " +\ "Les prix sont obtenus via Powernext avec un jour de décalage et l'index est un " +\ "proxy pour les variations du prix de la molécule des offres fournisseurs.\n" +\ f"\nIndex du 6 juin : <b>{origin_synthetic} €/MWh</b>\n"+\ f"Index à date : <b>{last_synthetic} €/MWh</b>\n"+\ "Ecart de l'index à date vs l'index de l'appel d'offre) du 6 juin : "+\ f"<b>{round((last_synthetic - origin_synthetic)/origin_synthetic*100,2)}%</b>\n" +\ f"\nTendance de l'index par rapport à la cotation précédente : {tendance} de " +\ f"<b>{abs(tendance_pct)}%</b>" #Computation of graphics fig = plt.figure() plt.plot(syn_df['Trading_day'], syn_df['Synthetic']) plt.suptitle("Evolution du prix de l'index gazier", fontsize=15) plt.xlabel('Date', fontsize=13) plt.ylabel('€/MWh', fontsize=13) #Size fig.set_size_inches(9, 6) #Annotate plt.annotate("Appel d'offre du 6 juin 2018", xy=(date(2018, 6, 6), origin_synthetic), xycoords='data', xytext=(-90, -50), textcoords='offset points', fontsize=10, arrowprops=dict(arrowstyle="->", connectionstyle="arc3,rad=.2")) picture_path = os.path.join(FILE_PATH, 'Data', 'img.png') fig.savefig(picture_path) #plt.show() #Emails sending for receiver in RECEIVERS_EMAIL_LIST: send_html_email(receiver, f"Evolution prix du gaz: {last_synthetic}€/MWH en {tendance}", texte, picture_path) log_print("Emails sent")
picture_path = os.path.join(FILE_PATH, 'Data', 'img.png') fig.savefig(picture_path) #plt.show() #Emails sending for receiver in RECEIVERS_EMAIL_LIST: send_html_email(receiver, f"Evolution prix du gaz: {last_synthetic}€/MWH en {tendance}", texte, picture_path) log_print("Emails sent") if __name__ == "__main__": try: log_print("Démarrage de la procédure") #Get the last Trading day previously inserted PREVIOUS_DATE = da.get_last_date() #Récupération des données et intégration dans la base fetch_cotations(INDEX_LIST) extract_cotations(INDEX_LIST, PREVIOUS_DATE) log_print("Fin de la procédure") except Exception as exception: log_print("Erreur dans la " + str(exception)) send_email(SUPPORT_EMAIL, "Le processus de récupération des données gaz a planté", "L'erreur est :" + str(exception))
def parse_geo_based(self, file_name): """ parse the geo based input """ lines = [] try: with open(file_name, 'r') as f: lines = f.readlines() except Exception as ex: log_err("parse geo cannot open file {}, exception:{}".format( file_name, ex)) return None idx = num_nodes = num_links = 0 client_ip = {} server_ip = {} costs = [] # read num nodes num_nodes = int(lines[idx].split(' ')[1]) idx += 1 # read nodes info for i in range(num_nodes): costs.append([1000] * num_nodes) # build costs (id, node_type, ip) = lines[idx].split(' ') id = int(id) if node_type == "CLIENT": client_ip[id] = ip elif node_type == "SERVER": server_ip[id] = ip idx += 1 print("client ip:\n{}\nserver ip:\n{}".format(client_ip, server_ip)) # read num link num_links = int(lines[idx].split(' ')[1]) idx += 1 for i in range(num_links): (node1, node2, cost) = [int(n) for n in lines[idx].split(' ')] costs[node1][node2] = cost costs[node2][node1] = cost idx += 1 log_matrix(costs) # run floyd-warshall shortest path for i in range(num_nodes): for x in range(num_nodes): for y in range(num_nodes): if (x != y and costs[x][y] > (costs[x][i] + costs[i][y])): costs[x][y] = costs[x][i] + costs[i][y] costs[y][x] = costs[x][i] + costs[i][y] log_matrix(costs) result_ip = {} # [client ip] -> closest server ip for cid in client_ip: # for each client ip cip = client_ip[cid] mincost = 1000 for i in range(num_nodes): # get closest server ip if costs[cid][i] < mincost and i in server_ip: mincost = costs[cid][i] # update mincost result_ip[cip] = server_ip[i] # store server ip # print result_ip for cip in result_ip: log_print("client {} closest to server {}".format( cip, result_ip[cip])) return result_ip
def getip_round_robin(self): """return the rr ip""" ip_rr = self.servers[self.rr_counter] self.rr_counter = (self.rr_counter + 1) % len(self.servers) log_print("round-robin ip: {}".format(ip_rr)) return ip_rr