def loadTrainingData(self, batch_no, max_step): cnx = connect() try: nc_cursor = cnx.cursor(buffered=True) nc_cursor.execute(nclsQry) row = nc_cursor.fetchone() nclass = int(row[0]) nc_cursor.close() cursor = cnx.cursor(buffered=True) query = ('SELECT ' ' uuid, code, klid, score ' 'FROM' ' kpts30 ' 'WHERE ' " flag = 'TRN_{}'") cursor.execute(query.format(batch_no)) kpts = cursor.fetchall() cursor.close() r = Parallel(n_jobs=num_cores)(delayed(getSeries)( uuid, code, klid, score, nclass, max_step, self.time_shift) for uuid, code, klid, score in kpts) uuids, data, labels, seqlen = zip(*r) # data = [batch, max_step, feature*time_shift] # labels = [batch, label] one-hot labels # seqlen = [batch] return np.array(uuids), np.array(data), np.array(labels), np.array( seqlen) except: print(sys.exc_info()[0]) raise finally: cnx.close()
def change_signal_config(host, port, login, password, nodeid, portid, signalid, wavelength, mode): connection = d.connect(host, port, login, password) # connection to NETCONF server namespace = '''<sdm_node xmlns="urn:cttc:params:xml:ns:yang:sdm_node">''' signal_config = ''' <?xml version="1.0" encoding="UTF-8"?> <config xmlns="urn:ietf:params:xml:ns:netconf:base:1.0"> <sdm_node xmlns="urn:cttc:params:xml:ns:yang:sdm_node"> <node-id>''' + nodeid + '''</node-id> <port> <port-id>''' + portid + '''</port-id> <signal> <signal-id>''' + signalid + '''</signal-id> <wavelength>''' + wavelength + '''</wavelength> <mode>''' + mode + '''</mode> </signal> </port> </sdm_node> </config> ''' try: d.edit_config(connection, signal_config, session_running, operation_replace) # edit configuration print("node configuration edited\nnew configuration:") d.get_config(connection, namespace, session_running) # get node configuration except Exception as e: print(e) finally: connection.close_session()
def getBatch(code, s, e, max_step, time_shift): ''' [max_step, feature*time_shift], length ''' cnx = connect() fcursor = cnx.cursor(buffered=True) global ftQuery try: fcursor.execute(ftQuery, (code, code, s, e, max_step + time_shift)) col_names = fcursor.column_names featSize = len(col_names) total = fcursor.rowcount rows = fcursor.fetchall() batch = [] for t in range(time_shift + 1): steps = np.zeros((max_step, featSize), dtype='f') offset = max_step + time_shift - total s = max(0, t - offset) e = total - time_shift + t for i, row in enumerate(rows[s:e]): steps[i + offset] = [col for col in row] batch.append(steps) return np.concatenate(batch, 1), total - time_shift except: print(sys.exc_info()[0]) raise finally: fcursor.close() cnx.close()
def change_signal_config(host, port, login, password, nodeid, location, componentid, param1, param2, param3, param4, wdmid, portid, signalid, wavelength, mode, core): connection = d.connect(host, port, login, password) # connection to NETCONF server namespace = '''<bluespace-node xmlns="urn:cttc:params:xml:ns:yang:bluespace_node">''' component_config = ''' <?xml version="1.0" encoding="UTF-8"?> <config xmlns="urn:ietf:params:xml:ns:netconf:base:1.0"> <bluespace-node xmlns="urn:cttc:params:xml:ns:yang:bluespace_node"> <bluespace-node-id>''' + nodeid + '''</bluespace-node-id> <location>''' + location + '''</location> <components> <component-id>''' + componentid + '''</component-id> <analog-rof> <param1>''' + param1 + '''</param1> </analog-rof> <digital-rof> <param2>''' + param2 + '''</param2> </digital-rof> <optical-beam-forming> <param3>''' + param3 + '''</param3> </optical-beam-forming> <ethernet> <param4>''' + param4 + '''</param4> </ethernet> <sdm-wdm> <wdm-id>''' + wdmid + '''</wdm-id> <port> <port-id>''' + portid + '''</port-id> <signal> <signal-id>''' + signalid + '''</signal-id> <wavelength>''' + wavelength + '''</wavelength> <mode>''' + mode + '''</mode> <core>''' + core + '''</core> </signal> </port> </sdm-wdm> </components> </bluespace-node> </config> ''' try: d.edit_config(connection, component_config, session_running, operation_replace) # edit configuration print("node configuration edited\nnew configuration:") d.get_config(connection, namespace, session_running) # get node configuration except Exception as e: print(e) finally: connection.close_session()
def loadTestSet(max_step): cnx = connect() try: nc_cursor = cnx.cursor(buffered=True) nc_cursor.execute(nclsQry) row = nc_cursor.fetchone() nclass = int(row[0]) print('{} num class: {}'.format(strftime("%H:%M:%S"), nclass)) nc_cursor.close() cursor = cnx.cursor(buffered=True) pick = ( "SELECT " " distinct flag " "FROM " " kpts " "WHERE " " flag LIKE 'TEST\\_%' " "ORDER BY RAND() " "LIMIT 1" ) cursor.execute(pick) row = cursor.fetchone() print('{} selected test set: {}'.format(strftime("%H:%M:%S"), row[0])) query = ( "SELECT " " uuid, code, klid, score " "FROM " " kpts " "WHERE " " flag = '{}' " ) cursor.execute(query.format(row[0])) kpts = cursor.fetchall() cursor.close() data = [] # [batch, max_step, feature*time_shift] labels = [] # [batch, label] one-hot labels seqlen = [] # [batch] uuids = [] for (uuid, code, klid, score) in kpts: uuids.append(uuid) label = np.zeros(nclass, dtype=np.int8) label[int(score)+nclass//2] = 1 labels.append(label) s = max(0, klid-max_step+1-TIME_SHIFT) batch, total = getBatch(cnx, code, s, klid, max_step) data.append(batch) seqlen.append(total) return uuids, np.array(data), np.array(labels), np.array(seqlen) except: print(sys.exc_info()[0]) raise finally: cnx.close()
def main(): parser = ArgumentParser() parser.add_argument('type', choices=list(recorders)) parser.add_argument('area', ) parser.add_argument('value') parser.add_argument('--at', default=Timestamp.now(), type=Timestamp) args = parser.parse_args() recorder = recorders[args.type] session = Session(connect(), future=True) value = float(config.tanks.get(args.value, args.value)) recorder(session, args.area, value, args.at) session.commit() print(f'recorded {args.area} {args.type} of {value} at {args.at}')
def loadTestSet(self, max_step): global num_cores cnx = connect() try: nc_cursor = cnx.cursor(buffered=True) nc_cursor.execute(nclsQry) row = nc_cursor.fetchone() nclass = int(row[0]) print('{} num class: {}'.format(strftime("%H:%M:%S"), nclass)) nc_cursor.close() cursor = cnx.cursor(buffered=True) pick = ("SELECT " " distinct flag " "FROM " " kpts30 " "WHERE " " flag LIKE 'TEST\\_%' " "ORDER BY RAND() " "LIMIT 1") cursor.execute(pick) row = cursor.fetchone() print('{} selected test set: {}'.format(strftime("%H:%M:%S"), row[0])) query = ("SELECT " " uuid, code, klid, score " "FROM " " kpts30 " "WHERE " " flag = '{}' ") cursor.execute(query.format(row[0])) kpts = cursor.fetchall() cursor.close() r = Parallel(n_jobs=num_cores)(delayed(getSeries)( uuid, code, klid, score, nclass, max_step, self.time_shift) for uuid, code, klid, score in kpts) uuids, data, labels, seqlen = zip(*r) # data = [batch, max_step, feature*time_shift] # labels = [batch, label] one-hot labels # seqlen = [batch] return np.array(uuids), np.array(data), np.array(labels), np.array( seqlen) except: print(sys.exc_info()[0]) raise finally: cnx.close()
def create_node_config(host, port, login, password, config_file, session, operation, namespace): connection = d.connect(host, port, login, password) # connection to NETCONF server try: f = open(config_file) # open configuration file d.edit_config(connection, f.read(), session, operation) # create node configuration f.close() print("new node configuration created\nnew configuration:") d.get_config(connection, namespace, session) # get node configuration except Exception as e: print(e) finally: connection.close_session()
def loadTrainingData(batch_no, max_step): cnx = connect() try: nc_cursor = cnx.cursor(buffered=True) nc_cursor.execute(nclsQry) row = nc_cursor.fetchone() nclass = int(row[0]) nc_cursor.close() cursor = cnx.cursor(buffered=True) query = ( 'SELECT ' ' uuid, code, klid, score ' 'FROM' ' kpts ' 'WHERE ' " flag = 'TRN_{}'" ) # print(query) cursor.execute(query.format(batch_no)) kpts = cursor.fetchall() cursor.close() data = [] # [batch, max_step, feature*time_shift] labels = [] # [batch, label] one-hot labels seqlen = [] # [batch] uuids = [] for (uuid, code, klid, score) in kpts: uuids.append(uuid) label = np.zeros((nclass), dtype=np.int8) label[int(score)+nclass//2] = 1 labels.append(label) s = max(0, klid-max_step+1-TIME_SHIFT) batch, total = getBatch(cnx, code, s, klid, max_step) data.append(batch) seqlen.append(total) # pprint(data) # print("\n") # pprint(len(labels)) return uuids, np.array(data), np.array(labels), np.array(seqlen) except: print(sys.exc_info()[0]) raise finally: cnx.close()
def main(): parser = ArgumentParser() group = parser.add_mutually_exclusive_group() group.add_argument( '--height', type=int, help='height to drop in tank, defaults to current down to min') group.add_argument('--no-refill', action='store_false', dest='refill') args = parser.parse_args() session = Session(connect(), future=True) refilling_mm = 0 if args.height: mm = args.height print(f'Using {mm}mm of tank water with no refilling:') else: tank_levels = session.query(Water.mm).filter_by(area_name='tanks') current = tank_levels.order_by( Water.timestamp.desc()).limit(1).scalar() if args.refill: mm = current - config.tanks.tap if current > config.tanks.tap else 0 refilling_mm = min(current, config.tanks.tap) - config.tanks.min print( f'Starting from {current} mm, ' f'using {mm}mm without refilling, {refilling_mm}mm while refilling:' ) else: mm = current - config.tanks.min print(f'Starting from {current} mm, using {mm}mm:') tank_area = session.query(Area.size).filter_by(name='tanks').scalar() volume_without_refill = mm_to_m3(mm, tank_area) volume_with_refill = mm_to_m3(refilling_mm, tank_area) refill_rate = mm_to_m3(config.tanks.refill, tank_area) for area in session.query(Area).where(Area.irrigation_rate > 0): area_rate = mm_to_m3(area.irrigation_rate, area.size) runtime = 0 runtime += volume_without_refill / area_rate runtime += volume_with_refill / (area_rate - refill_rate) print(f'Can water {area.name} for {runtime:.0f} mins.')
def main(args=None): now = datetime.now() parser = ArgumentParser() parser.add_argument('dataset') parser.add_argument('variables', nargs='*') group = parser.add_mutually_exclusive_group() group.add_argument('--start', type=pd.Timestamp) group.add_argument('--days', type=int) parser.add_argument('--end', type=pd.Timestamp, default=now) parser.add_argument('--debug', action='store_true') parser.add_argument('--force', action='store_true') args = parser.parse_args(args) start = (pd.Timestamp.now() - timedelta(days=args.days)).floor('D') if args.days else args.start session = Session(connect(), future=True) parameter_sets = [] if args.dataset == 'config': for dataset, variables in config.observations.items(): if start: dataset_start = start else: rows = session.execute( select(Observation.variable, func.max(Observation.timestamp)). where(Observation.variable.in_(variables)). group_by(Observation.variable) ) try: dataset_start = min(row[1] for row in rows)+timedelta(seconds=1) except ValueError: parser.error(f'Need to explicitly download {dataset} as least once!') raise parameter_sets.append((dataset, variables.data, dataset_start, args.end)) else: if not args.variables: parser.error('variables must be specified') parameter_sets.append((args.dataset, args.variables, start or now-timedelta(days=7), args.end)) for parameter_set in parameter_sets: sync(session, *parameter_set, debug=args.debug, force=args.force)
def main(): now = datetime.now() parser = ArgumentParser() parser.add_argument('--start', default=now - timedelta(days=7), type=parse_date) parser.add_argument('--end', default=now, type=parse_date) args = parser.parse_args() download(['config']) session = Session(connect(), future=True) print() rain = reading_uni_rainfall(session, args.start, args.end) tank_current = config.tanks.tap tank_pct = tank_current / config.tanks.max print(f'Tanks currently at {tank_current:.0f}mm ({tank_pct:.0%})') tank_size = session.query(Area.size).filter_by(name='tanks').scalar() tank_volume = mm_to_m3(tank_current, tank_size) tank_tap = mm_to_m3(config.tanks.tap, tank_size) tank_min = mm_to_m3(config.tanks.min, tank_size) refill_rate = mm_to_m3(config.tanks.refill, tank_size) print() for area in session.query(Area).where(Area.irrigation_rate > 0): print_mm_still_needed(area.name, area.irrigation_rate, rain) area_rate = mm_to_m3(area.irrigation_rate, area.size) runtime = 0 if tank_volume > tank_tap: runtime += (tank_volume - tank_tap) / area_rate runtime += (max(tank_volume, tank_tap) - tank_min) / (area_rate - refill_rate) print(f'Tank can support {runtime:.0f} mins watering.')
if total_content != '': if post_count > 1: subject = "{} New Posts from {}".format(post_count, subject_subreddits) else: subject = "New Post from " + subject_subreddits log("subject: " + subject) log("to: " + username) log("---") log(debug_content) reddit.send_pm(subject, total_content, username, account) if __name__ == '__main__': account = reddit.connect() db = data.connect() most_recent_time = data.get_most_recent_time() interval = 30 while True: reddit.update_users(db, account) global_sub_list = data.subreddits(db) subs = data.subreddits(db) user_list = data.users(db) if global_sub_list: try: posts_by_subreddit = reddit.fetch_posts(subs, account) except: print("Error fetching posts.") posts_by_subreddit_by_users = {} most_recent_time, posts_by_subreddit = get_new_posts( posts_by_subreddit, most_recent_time)
def main(args, locationApp): config = json.load(args.config) def normalizeUrl(url): if not url: return None if '://' not in url: url = 'http://' + url scheme, netloc, path, params, query, fragment, = urlparse(url) return '%s://%s/%s%s' % (scheme, netloc.rstrip('/'), path.lstrip('/'), '?' + query if query else '') def queryResults(): reader = csv.reader(args.organizations) next(reader) return [(name, int(forks)) for name, forks in reader] def obeyRateLimit(headers): remaining = int(headers['X-RateLimit-Remaining']) reset = long(headers['X-RateLimit-Reset']) if remaining < 3: tts = reset - time.time() + 1 print 'sleep until {time:%H:%M:%S} due to GitHub rate limit'.format(time = datetime.today() + timedelta(seconds = tts)) time.sleep(tts) def createOrganization(name, forks): request = urllib2.urlopen('https://api.github.com/orgs/%s?access_token=%s' % (name, config['access_token'])) obeyRateLimit(request.headers) response = json.load(request) return Organization( id = response['id'], name = response['login'], location = response.get('location', None), public_repos = response.get('public_repos', 0), forks_2014 = forks, url_github = response.get('html_url', None), url_site = normalizeUrl(response.get('blog', None)), created_at = parse(response['created_at']), suggested_country = locationApp.countryCode(response.get('location', None)) ) db = data.connect(config['db']) with db.session(autocommit = True) as session: disappeared = set([v for v, in session.query(NotFound.name).all()]) rows = queryResults() for pos, (name, forks) in enumerate(rows, start = 1): startTime = time.time() org = session.query(Organization).filter(func.lower(Organization.name) == name).first() if org: org.forks_2014 = forks org.suggested_country = locationApp.countryCode(org.location) elif name not in disappeared: try: org = createOrganization(name, forks) session.add(org) except urllib2.HTTPError as e: if e.code <> 404: raise print e session.add(NotFound(name = name)) elapsed = (time.time() - startTime) * 1000 print '%d. %s, elapsed %dms, left %d' % (pos, name, elapsed, len(rows) - pos)
''' project: Lockbox file: lockbox.py authors: david rademacher & welton king v desc: a credentials storage app ''' import cmd, data from classes import * # variables state = states.START_UP profile = user() conn = data.connect() # handles login and account creation def start(): global state, profile, conn logged = False while not logged: inp = cmd.start() if inp == 'c': profile = data.create_account(conn, cmd.get_profile()) # create if inp == 'l': profile = data.verify_account(conn, cmd.get_profile()) # login if inp == 'q': cmd.quit() + data.quit(conn) # quit logged = cmd.login_response(profile) state = states.MAIN_MENU
""" This module implements the main test of configuration methods to manipulate a NETCONF server Copyright (c) 2017-2018 Laura Rodriguez Navas <*****@*****.**> """ import data as t if __name__ == '__main__': host = '10.1.7.67' port = 830 connection = t.connect(host, port, 'root', 'netlabN.') # connection to NETCONF server filter1 = '''<turing-machine xmlns="http://example.net/turing-machine">''' # model turing-machine filter2 = '''<interfaces xmlns="urn:ietf:params:xml:ns:yang:ietf-interfaces">''' # model ietf-interfaces filter3 = '''<sdm_node xmlns="urn:cttc:params:xml:ns:yang:sdm_node">''' # model sdm_node filter4 = '''<transceiver xmlns="urn:sliceable-transceiver-sdm">''' # model sliceable-transceiver-sdm # datastore sessions session_startup = 'startup' session_running = 'running' session_candidate = 'candidate' # operations operation_merge = 'merge' operation_replace = 'replace' edit_data = ''' <config xmlns="urn:ietf:params:xml:ns:netconf:base:1.0" xmlns:nc="urn:ietf:params:xml:ns:netconf:base:1.0"> <turing-machine xmlns="http://example.net/turing-machine"> <transition-function>