def main2(): rels = list( Es.query_relations(-1, Es.by_name('comms').get_bearers(), None, now(), deref_who=True, deref_with=True)) lut = {} for rel in rels: if rel['from'] is None: rel['from'] = Es.DT_MIN if not rel['with'] in lut: lut[rel['with']] = {} v = (now() - rel['from']).days / 365.0 if rel['who'] not in lut[rel['with']] or \ lut[rel['with']][rel['who']] > v: lut[rel['with']][rel['who']] = v pairs = lut.items() for comm, members in pairs: print(six.text_type(comm.humanName)) mpairs = members.items() mpairs.sort(key=lambda x: x[1]) for member, time in mpairs: print(' %-20s%.2f' % (member.name, time))
def main2(): rels = list(Es.query_relations(-1, Es.by_name('comms').get_bearers(), None, now(), deref_who=True, deref_with=True)) lut = {} for rel in rels: if rel['from'] is None: rel['from'] = Es.DT_MIN if not rel['with'] in lut: lut[rel['with']] = {} v = (now() - rel['from']).days / 365.0 if rel['who'] not in lut[rel['with']] or \ lut[rel['with']][rel['who']] > v: lut[rel['with']][rel['who']] = v pairs = lut.items() for comm, members in pairs: print(six.text_type(comm.humanName)) mpairs = members.items() mpairs.sort(key=lambda x: x[1]) for member, time in mpairs: print(' %-20s%.2f' % (member.name, time))
def insert_video_info(video_info_list, file_name): _ = video_info_list[2] insert_sql = '''INSERT INTO `video_info`( site, channel_id, published_at, video_id, title, description, custom_description, duration, user, created_at, modified_at, extension ) VALUES ("{}", "{}", "{}", "{}", "{}", "{}", "{}", "{}", "{}", "{}", "{}", "{}")'''.format( "YT", # site video_info_list[1], # channel_id _.translate(_.maketrans("T", " ", "Z")), # published_at video_info_list[3], # video_id video_info_list[4], # title video_info_list[5], # description "", # custom_description common.d2s(video_info_list[6]), # duration config["general"]["user"], # user common.now(3), # created_at common.now(3), # modified_at re.sub("[.]", "", os.path.splitext(file_name)[1]) # extension ) insert_data(conn, insert_sql)
def dirs_size(): sizes = [] if not common.check_config_sections(['disks', 'dirs_size']): return sizes for directory in common.config['disks']['dirs_size']: if not os.path.exists(directory): common.process_exception("%s is not exists. skip..." % directory) cmd = "du -s %s" % directory size=subprocess.Popen([cmd], stdout=subprocess.PIPE, shell=True).\ communicate()[0].split()[0] size = common.kb_to_mb(size) date = common.now() print "DSIZE date: %s directory: %s size: %s" % ( date, directory, size, ) sizes.append({ "date": date, "t": "DSIZE", "d1": common.HOSTNAME, "d2": directory, "V": size }) return sizes
def apache_stats(): apache_logs_stats = [] if not common.check_config_sections(['apache_logs',]): return apache_logs_stats for website in common.config['apache_logs']: website_name = website.keys()[0] website_config = website.values()[0] log_file_pattern = website_config.get('file', None) if not log_file_pattern: common.process_exception("no logfile pattern for website %s" % website) url_filter_string = website_config.get('url_filter', None) url_regex = re.compile(url_filter_string) if url_filter_string else None count, avg_time = process_logs(log_file_pattern, url_regex) date = common.now() print "LOGS date: %s website: %s count: %s duration: %s" % (date, website_name, count, avg_time) apache_logs_stats.extend([ {'date': date, 't': 'LOG_REQUESTS-COUNT', 'd1': common.HOSTNAME, 'd2': website_name, 'V': count}, {'date': date, 't': 'LOG_REQUESTS-DURATION', 'd1': common.HOSTNAME, 'd2': website_name, 'V': avg_time}, ]) return apache_logs_stats
def _create_modelinfo(from_path, dbname, db): data = common.loadcfg(from_path) if data and len(data) > 0: data = {k.lower():v for k,v in data.items()} ## TODO: empty data and other sanity checks created_on = common.now() timestamp = common.timestamp_from_datestring(created_on) uuid = common.createUUID('uuid') data['uuid'] = uuid data['created_on'] = created_on data['timestamp'] = timestamp data['filename'] = from_path.split(os.path.sep)[-1] data['filepath'] = from_path data['dbname'] = dbname data['rel_num'] = str(data['rel_num']) try: tblname = annonutils.get_tblname('MODELINFO') # annonutils.create_unique_index(db, tblname, 'created_on') annonutils.create_unique_index(db, tblname, 'weights_path') collection = db.get_collection(tblname) collection.update_one( {'created_on': data['created_on']} ,{'$setOnInsert': data} ,upsert=True ) except pymongo.errors.PyMongoError as e: print(e.details) return uuid
def cpu_stats(): cpu_times = psutil.cpu_times() user_system = common.s_to_milliseconds(cpu_times.user + cpu_times.system) iowait = common.s_to_milliseconds(cpu_times.iowait) date = common.now() print "CPU date: %s total: %s iowait: %s" % ( date, user_system, iowait, ) cpu_times = [ { "date": date, "t": "CPU-IOWAIT", "d1": common.HOSTNAME, "V": iowait }, { "date": date, "t": "CPU-TOTAL", "d1": common.HOSTNAME, "V": user_system }, ] return cpu_times
def process_request(self, env, request, user_uuid=None, task_id=None, item_id=None, params=None, ext=None): #~ print '' #~ print 'process_request: ', request, user_uuid, task_id, item_id, params user_info = {} is_admin = task_id == 0 if is_admin: task = self.admin else: task = self.get_task() if self.under_maintenance: return {'status': common.UNDER_MAINTAINANCE, 'data': None} if request == 'login': return {'status': common.RESPONSE, 'data': self.login(params[0], params[1], is_admin, env)} if ext: obj = task else: if self.admin.safe_mode: now = common.now() if common.hour_diff(now - self.last_users_update) > 1: self.update_users() user_info = self.get_user_info(user_uuid, is_admin, env) if not user_info: return {'status': common.NOT_LOGGED, 'data': common.NOT_LOGGED} obj = task if task: obj = task.item_by_ID(item_id) return {'status': common.RESPONSE, 'data': self.get_response(is_admin, env, request, user_info, task_id, obj, params, ext)}
def mem_stats(): mem = psutil.virtual_memory() real_used = common.b_to_mb(mem.used - mem.buffers) swap_used = common.b_to_mb(psutil.swap_memory().used) date = common.now() print "MEM date: %s used: %s swap_used: %s" % ( date, real_used, swap_used, ) mem_stats = [ { "date": date, "t": "MEM-USED", "d1": common.HOSTNAME, "V": real_used }, { "date": date, "t": "MEM-SWAP", "d1": common.HOSTNAME, "V": swap_used }, ] return mem_stats
def network_stats(): network_bytes = [] if not common.check_config_sections(['networking', 'interfaces']): return network_bytes if hasattr(psutil, 'network_io_counters'): counters = psutil.network_io_counters(pernic=True) else: counters = psutil.net_io_counters(True) for interface in common.CONFIG['networking']['interfaces']: counter = counters.get(interface, None) if not counter: common.process_exception('cannot find counters for interface %s. skip..' % interface) continue date = common.now() mb_rcv = common.b_to_mb(counter.bytes_recv) mb_sent = common.b_to_mb(counter.bytes_sent) logging.info ("NET date: %s interface: %s recv: %s sent: %s", date, interface, mb_rcv, mb_sent, ) network_bytes.extend([ {"date": date, "t": "NET-RCV", "d1": common.HOSTNAME, "d2": interface, "V": mb_rcv}, {"date": date, "t": "NET-SENT", "d1": common.HOSTNAME, "d2": interface, "V": mb_sent}, ]) return network_bytes
def disks_stats(): usages = [] if not common.check_config_sections(['disks', 'mount_points']): return usages for mount_point in common.config['disks']['mount_points']: try: fs_stats = psutil.disk_usage(mount_point) except OSError as e: common.process_exception(e) continue used = common.b_to_mb(fs_stats.used) date = common.now() print "DISK date: %s mount_point: %s used: %s" % ( date, mount_point, used, ) usages.append({ "date": date, "t": "DISK-USAGE", "d1": common.HOSTNAME, "d2": mount_point, "V": used }) return usages
def create_modelinfo(args, cfg, db): log.info("----------------------------->") from_path = args.from_path if not from_path: raise Exception('from_path not defined') if not os.path.exists(from_path) or not os.path.isfile(from_path): raise Exception('File does not exists: {}'.format(from_path)) ##TODO: for the entire directory data = common.loadcfg(from_path) if data and len(data) > 0: data = {k.lower():v for k,v in data.items()} ## TODO: empty data and other sanity checks created_on = common.now() timestamp = common.timestamp_from_datestring(created_on) uuid = common.createUUID('uuid') data['uuid'] = uuid data['created_on'] = created_on data['timestamp'] = timestamp tblname = annonutils.get_tblname('MODELINFO') # annonutils.create_unique_index(db, tblname, 'created_on') annonutils.create_unique_index(db, tblname, 'weights_path') collection = db.get_collection(tblname) collection.update_one( {'created_on': data['created_on']} ,{'$setOnInsert': data} ,upsert=True )
def interactive_edit(self): """ Interactive edit. Call interactive_edit from DbProperty by providing default the currect datetime. """ if not self.value and self.auto_add_now: self.value = common.now() elif self.auto_now: self.value = common.now() elif self.editable: super(CreatedOnProperty, self).interactive_edit(default=default) return self.value
def generate_dev( dirpath: str, author: str, name: str, description: str, packages: Iterable[Tuple[str, str, str]], cmp: str, cat: str, signals: Iterable[str], keywords: str, version: str, create_date: Optional[str], ) -> None: category = 'dev' for (size_metric, size_imperial, pkg_name) in packages: lines = [] fmt_params = { 'size_metric': size_metric, 'size_imperial': size_imperial, } # type: Dict[str, Any] full_name = name.format(**fmt_params) full_desc = description.format(**fmt_params) def _uuid(identifier: str) -> str: return uuid(category, full_name, identifier) # UUIDs uuid_dev = _uuid('dev') pkg = uuid('pkg', pkg_name, 'pkg', create=False) pads = [uuid('pkg', pkg_name, 'pad-{}'.format(i), create=False) for i in range(1, 3)] print('Generating dev "{}": {}'.format(full_name, uuid_dev)) # General info lines.append('(librepcb_device {}'.format(uuid_dev)) lines.append(' (name "{}")'.format(full_name)) lines.append(' (description "{}\\n\\nGenerated with {}")'.format(full_desc, generator)) lines.append(' (keywords "{},{},{}")'.format(size_metric, size_imperial, keywords)) lines.append(' (author "{}")'.format(author)) lines.append(' (version "{}")'.format(version)) lines.append(' (created {})'.format(create_date or now())) lines.append(' (deprecated false)') lines.append(' (category {})'.format(cat)) lines.append(' (component {})'.format(cmp)) lines.append(' (package {})'.format(pkg)) for (pad, signal) in sorted(zip(pads, signals)): lines.append(' (pad {} (signal {}))'.format(pad, signal)) lines.append(')') dev_dir_path = path.join(dirpath, uuid_dev) if not (path.exists(dev_dir_path) and path.isdir(dev_dir_path)): makedirs(dev_dir_path) with open(path.join(dev_dir_path, '.librepcb-dev'), 'w') as f: f.write('0.1\n') with open(path.join(dev_dir_path, 'device.lp'), 'w') as f: f.write('\n'.join(lines)) f.write('\n')
def generate_dev( dirpath: str, diameter: float, height: float, pitch: float, lead_width: float, author: str, version: str, create_date: Optional[str], ) -> None: name = 'Capacitor Radial ⌀{}x{}/{}mm'.format(diameter, height, pitch) variant = get_variant(diameter, height, pitch, lead_width) def _uuid(identifier: str) -> str: return uuid('dev', variant, identifier) device = Device( uuid=_uuid('dev'), name=Name(name), description=Description( 'Generic polarized radial electrolytic capacitor.\\n\\n' + 'Diameter: {} mm\\n'.format(diameter) + 'Height: {} mm\\n'.format(height) + 'Lead Spacing: {} mm\\n'.format(pitch) + 'Max. Lead Diameter: {} mm\\n\\n'.format(lead_width) + 'Generated with {}'.format(generator)), keywords=Keywords( 'electrolytic,capacitor,polarized,radial,c,cap,cpol'), author=Author(author), version=Version(version), created=Created(create_date or now()), deprecated=Deprecated(False), category=Category('c011cc6b-b762-498e-8494-d1994f3043cf'), component_uuid=ComponentUUID('c54375c5-7149-4ded-95c5-7462f7301ee7'), package_uuid=PackageUUID(uuid('pkg', variant, 'pkg')), ) device.add_pad( ComponentPad( uuid=uuid('pkg', variant, 'pad-plus'), signal=SignalUUID('e010ecbb-6210-4da3-9270-ebd58656dbf0'), )) device.add_pad( ComponentPad( uuid=uuid('pkg', variant, 'pad-minus'), signal=SignalUUID('af3ffca8-0085-4edb-a775-fcb759f63411'), )) # write files pkg_dir_path = path.join(dirpath, device.uuid) if not (path.exists(pkg_dir_path) and path.isdir(pkg_dir_path)): makedirs(pkg_dir_path) with open(path.join(pkg_dir_path, '.librepcb-dev'), 'w') as f: f.write('0.1\n') with open(path.join(pkg_dir_path, 'device.lp'), 'w') as f: f.write(str(device)) f.write('\n') print('Wrote device {}'.format(name))
def _write_new_lines(self, fname): last_accessed = self.last_accessed[fname] with open(os.path.join(self.path, fname), 'r') as f: for line in f.readlines(): time_made = int(line.strip().split(INLINE_SEP)[0]) if time_made > last_accessed: print("found new lines in %s" % fname) write_kbfs_data_to_client(self.fifo_filename, line) self.last_accessed[fname] = now()
def __init__(self): self.admin = adm_server.task self.admin.server = self self.task = None self.task_lock = Lock() self.users = {} self.roles = None self.last_users_update = common.now() self.under_maintenance = False
def send_message(self, m, names, channel): if (names, channel) not in self._subs: raise Exception("Can't send message on a channel you're not" "subscribed to") fname = self._get_fifo_in_name(names, channel) with open(fname, 'w') as f: f.write(INLINE_SEP.join([str(now()), self.sender, b64encode(str.encode(m)).decode()]) + "\n")
def cpu_stats(): cpu_times = psutil.cpu_times() user_system = common.s_to_milliseconds(cpu_times.user + cpu_times.system) iowait = common.s_to_milliseconds(cpu_times.iowait) date = common.now() print "CPU date: %s total: %s iowait: %s" % (date, user_system, iowait, ) cpu_times = [ {"date": date, "t": "CPU-IOWAIT", "d1": common.HOSTNAME, "V": iowait}, {"date": date, "t": "CPU-TOTAL", "d1": common.HOSTNAME, "V": user_system}, ] return cpu_times
def mem_stats(): mem = psutil.virtual_memory() real_used = common.b_to_mb(mem.used - mem.buffers) swap_used = common.b_to_mb(psutil.swap_memory().used) date = common.now() print "MEM date: %s used: %s swap_used: %s" % (date, real_used, swap_used, ) mem_stats = [ {"date": date, "t": "MEM-USED", "d1": common.HOSTNAME, "V": real_used}, {"date": date, "t": "MEM-SWAP", "d1": common.HOSTNAME, "V": swap_used}, ] return mem_stats
def create_archive(object): """ Recieves list of files or path to create archive from. :return: name of archive """ name = str("{0}.tar.gz".format(common.now())) tar = tarfile.open(name, "w:gz") if isinstance(object, list): for fl in object: tar.add(fl["local_file_path"]) else: tar.add(object) tar.close() return name
def display(video_id, channel_id): if error_renamer == 1: message = str('{:s} [FILE] "{:s}" Could not rename.'.format( common.now(2), video_id ) ) write_log(message) print(message) elif error_creator == 1: message = str('{:s} [FILE] "{:s}" Could not create folder "{:s}."'.format( common.now(2), video_id, channel_id ) ) write_log(message) print(message) elif error_mover == 1: message = str('{:s} [FILE] "{:s}" Could not move file to "{:s}."'.format( common.now(2), video_id, channel_id ) ) write_log(message) print(message) else: message = str('{:s} [FILE] "{:s}" Successfully. Destination folder: "{:s}."'.format( common.now(2), video_id, channel_id ) ) write_log(message) print(message)
def login(self, log, psw_hash, admin, env): privileges = None if not admin and self.task.on_login: user_uuid, user_info = self.task.on_login(self.task, env, admin, log, psw_hash) else: user_id, user_info = self.admin.login(self.admin, log, psw_hash, admin) user_uuid = None if user_id: for key in self.users.iterkeys(): if self.users[key][0] == user_id: del self.users[key] break user_uuid = str(uuid.uuid4()) self.users[user_uuid] = (user_id, user_info, common.now()) return user_uuid, ''
def login(self, log, psw_hash, admin, env): privileges = None if not admin and self.task.on_login: user_uuid, user_info = self.task.on_login(self.task, env, admin, log, psw_hash) else: user_id, user_info = self.admin.login(log, psw_hash, admin) user_uuid = None if user_id: for key in self.users.iterkeys(): if self.users[key][0] == user_id: del self.users[key] break user_uuid = str(uuid.uuid4()) self.users[user_uuid] = (user_id, user_info, common.now()) return user_uuid
def _gen_id(self): # generate id value = '' while True: value = hashlib.sha256(value + str(common.now())).hexdigest() # check if in collection if len(common.git_repo.keys()) == 0: break elif not reduce(lambda x, y: x or y, map(lambda x: value in x, common.git_repo.keys())): break return value
def get_video_info(video_id): video_info_list = [] items = youtube.videos().list(part="snippet, contentDetails", id=video_id).execute()["items"][0] resp_save_dest = "data/resp/yt/v/" timestamp = common.now(1) # UTC+0 common.make_dir(resp_save_dest) common.write_json(resp_save_dest + timestamp + " " + video_id + ".json", items) video_info_list.append(items["snippet"]["channelTitle"]) # 0 video_info_list.append(items["snippet"]["channelId"]) # 1 video_info_list.append(items["snippet"]["publishedAt"]) # 2 video_info_list.append(items["id"]) # 3 video_info_list.append(items["snippet"]["title"]) # 4 video_info_list.append(items["snippet"]["description"]) # 5 video_info_list.append(items["contentDetails"]["duration"]) # 6 return video_info_list # Throw video_info_list to fileproc.
def main3(): member_age = {} for rel in Es.query_relations(-1, Es.by_name('leden'), None, None, deref_who=True): if rel['who'] not in member_age: member_age[rel['who']] = 0 member_age[rel['who']] = max(member_age[rel['who']], (now() - rel['from']).days / 365.0) # for comm in Es.by_name('comms').get_bearers(): for comm in [Es.by_name('draai')]: print(six.text_type(comm.humanName)) members = [(m, member_age.get(m)) for m in comm.get_members()] members.sort(key=lambda x: x[1]) for member in members: print(" %-20s%.2f" % (six.text_type(member[0].name), member[1] if member[1] else -1))
def dirs_size(): sizes = [] if not common.check_config_sections(['disks', 'dirs_size']): return sizes for directory in common.config['disks']['dirs_size']: if not os.path.exists(directory): common.process_exception("%s is not exists. skip..." % directory ) cmd="du -s %s" % directory size=subprocess.Popen([cmd], stdout=subprocess.PIPE, shell=True).\ communicate()[0].split()[0] size = common.kb_to_mb(size) date = common.now() print "DSIZE date: %s directory: %s size: %s" % (date, directory, size, ) sizes.append({"date": date, "t":"DSIZE", "d1": common.HOSTNAME, "d2": directory, "V":size}) return sizes
def disks_stats(): usages=[] if not common.check_config_sections(['disks', 'mount_points']): return usages for mount_point in common.config['disks']['mount_points']: try: fs_stats = psutil.disk_usage(mount_point) except OSError as e: common.process_exception(e) continue used = common.b_to_mb(fs_stats.used) date = common.now() print "DISK date: %s mount_point: %s used: %s" % (date, mount_point, used, ) usages.append({"date": date, "t":"DISK-USAGE", "d1": common.HOSTNAME, "d2": mount_point, "V":used}) return usages
async def att_set(self, ctx, *args): if ctx.message.author.id not in ATTENDANCE_ADMINS: raise Exception(_('You\'re not allowed to use this command.')) voice = ctx.message.author.voice if not voice: raise Exception(_('You\'re not in a voice channel.')) channel = voice.channel members = [] for member in channel.members: members.append(clear_member_name(member.name)) column_name = ' '.join(args) if not column_name: column_name = "{author_name} {time:%Y.%m.%d %H:%M:%S}".format( author_name=ctx.author.name, time=now(tz=TIME_ZONE)) attendance_sheet = AttendanceSheet() attendance_sheet.process(members, column_name) await ctx.send( _('{mention} Attendance stored in column \'{column_name}\''). format(column_name=column_name))
def test_user_subscribe(self): msg = weixin.Message() msg.ToUserName = '******' msg.FromUserName = OPENID msg.CreateTime = now() msg.MsgType = 'event' msg.Event = 'subscribe' self.assertEqual({}, self.getsession()) rsp = self.app.post('/api/callback', data=msg.xml()) self.assertEqual(SUCCESS, rsp.data.decode('utf-8')) user = self.service.find_user(OPENID) self.assertEqual(OPENID, user['openid']) self.assertEqual({'openid': OPENID}, self.getsession()) event = self.service.find_events(OPENID, 'subscribe')[0] self.assertEqual(OPENID, event['openid']) self.assertEqual('subscribe', event['type'])
def _gen_id(self): # generate id value = '' while True: value = hashlib.sha256( value + str(common.now()) ).hexdigest() # check if in collection if len(common.git_repo.keys()) == 0: break elif not reduce(lambda x, y: x or y, map(lambda x: value in x, common.git_repo.keys()) ): break return value
def main3(): member_age = {} for rel in Es.query_relations(-1, Es.by_name('leden'), None, None, deref_who=True): if rel['who'] not in member_age: member_age[rel['who']] = 0 member_age[rel['who']] = max(member_age[rel['who']], (now() - rel['from']).days / 365.0) # for comm in Es.by_name('comms').get_bearers(): for comm in [Es.by_name('draai')]: print(six.text_type(comm.humanName)) members = [(m, member_age.get(m)) for m in comm.get_members()] members.sort(key=lambda x: x[1]) for member in members: print(" %-20s%.2f" % (six.text_type( member[0].name), member[1] if member[1] else -1))
def io_stats(): io_perdev = [] if not common.check_config_sections(['disks', 'block_devs']): return io_perdev counters = psutil.disk_io_counters(perdisk=True) for dev in common.config['disks']['block_devs']: counter = counters.get(dev, None) if not counter: common.process_exception('cannot find counters for block device %s. skip..' % dev) continue date = common.now() print "DISK date: %s block_dev: %s reads: %s writes: %s" % (date, dev, counter.read_count, counter.write_count, ) io_perdev.extend([ {"date": date, "t": "DISK-READS", "d1": common.HOSTNAME, "d2": dev, "V": counter.read_count}, {"date": date, "t": "DISK-WRITES", "d1": common.HOSTNAME, "d2": dev, "V": counter.write_count}, ]) return io_perdev
def network_stats(): network_bytes = [] if not common.check_config_sections(['networking', 'interfaces']): return network_bytes counters = psutil.net_io_counters(True) for interface in common.config['networking']['interfaces']: counter = counters.get(interface, None) if not counter: common.process_exception( 'cannot find counters for interface %s. skip..' % interface) continue date = common.now() mb_rcv = common.b_to_mb(counter.bytes_recv) mb_sent = common.b_to_mb(counter.bytes_sent) print "NET date: %s interface: %s recv: %s sent: %s" % ( date, interface, mb_rcv, mb_sent, ) network_bytes.extend([ { "date": date, "t": "NET-RCV", "d1": common.HOSTNAME, "d2": interface, "V": mb_rcv }, { "date": date, "t": "NET-SENT", "d1": common.HOSTNAME, "d2": interface, "V": mb_sent }, ]) return network_bytes
def mem_stats(): if hasattr(psutil, 'virtual_memory'): mem = psutil.virtual_memory() real_used = common.b_to_mb(mem.used - mem.buffers) swap_used = common.b_to_mb(psutil.swap_memory().used) else: phymem = psutil.phymem_usage() virtmem = psutil.virtmem_usage() real_used = common.b_to_mb(phymem.used) swap_used = common.b_to_mb(virtmem.used) date = common.now() logging.info ("MEM date: %s used: %s swap_used: %s", date, real_used, swap_used, ) mem_stats = [ {"date": date, "t": "MEM-USED", "d1": common.HOSTNAME, "V": real_used}, {"date": date, "t": "MEM-SWAP", "d1": common.HOSTNAME, "V": swap_used}, ] return mem_stats
def io_stats(): io_perdev = [] if not common.check_config_sections(['disks', 'block_devs']): return io_perdev counters = psutil.disk_io_counters(perdisk=True) for dev in common.config['disks']['block_devs']: counter = counters.get(dev, None) if not counter: common.process_exception( 'cannot find counters for block device %s. skip..' % dev) continue date = common.now() print "DISK date: %s block_dev: %s reads: %s writes: %s" % ( date, dev, counter.read_count, counter.write_count, ) io_perdev.extend([ { "date": date, "t": "DISK-READS", "d1": common.HOSTNAME, "d2": dev, "V": counter.read_count }, { "date": date, "t": "DISK-WRITES", "d1": common.HOSTNAME, "d2": dev, "V": counter.write_count }, ]) return io_perdev
async def pt_add(self, ctx, item_name: str, trigger_type: str, value: int): resolved_item_name = self.resolve_item_name(item_name) price_trigger = PriceTrigger(user_mention=author_mention(ctx), item_name=resolved_item_name, trigger_type=trigger_type, value=value, notified_datetime=now()) self.data.add_price_trigger(price_trigger) price_point = self.data.get_item_last_price_point(resolved_item_name) await ctx.send( _('{mention} You will be notified when {trigger_description} {value:,} for {item_name}. ' 'Current price: {price:,}, volume: {volume:,}.').format( mention=author_mention(ctx), trigger_description=self.data. price_trigger_types[trigger_type].description, value=value, item_name=resolved_item_name, price=price_point.price, volume=price_point.volume)) logging.info( 'discord_server.Prices.price_trigger_add Price trigger {item_name} ' 'was added for {mention}.'.format(item_name=resolved_item_name, mention=author_mention(ctx)))
def process_request(self, env, request, user_uuid, task_id, item_id, params, web): #~ print '' #~ print 'process_request: ', request, user_uuid, task_id, item_id, params, web is_admin = task_id == 0 user_info = {} if self.under_maintenance: return {'status': common.UNDER_MAINTAINANCE, 'data': None} if request == 'login': return { 'status': common.RESPONSE, 'data': self.login(params[0], params[1], is_admin, env) } if is_admin: task = self.admin else: task = self.get_task() if self.admin.safe_mode: now = common.now() if common.hour_diff(now - self.last_users_update) > 1: self.update_users() user_info = self.get_user_info(user_uuid, is_admin, env) if not user_info: return {'status': common.NOT_LOGGED, 'data': common.NOT_LOGGED} obj = task if task: obj = task.item_by_ID(item_id) return { 'status': common.RESPONSE, 'data': self.get_response(is_admin, env, request, user_info, task_id, obj, params, web) }
def prepare_file_list(filepath, mask, period, delay_period): """ Creates list of files from filepath that were modified since last sync but not later than "period" days before today and have name with "mask" pattern. :param filepath: root folder. :param mask: pattern for file name. :param period: number of days file shouldn't be older than """ result = [] norm_path = path.normpath(filepath) if not path.exists(norm_path): logging.error(u'Bad path: {}'.format(norm_path)) raise Exception("Bad path.") else: rem_dir = path.normpath(os.path.basename(norm_path)) # print rem_dir for root, dirs, files in os.walk(norm_path): for name in files: local_file = path.join(root, name) file_path = path.join(root[len(norm_path):], name) if path.exists(local_file): mod_time = path.getmtime(local_file) now = common.now() if (mod_time >= LAST_SYNC_TIME) and (now - mod_time <= period * common.DAY) \ and (fnmatch(local_file, mask))\ and (now - mod_time >= delay_period * common.MIN ): # TODO optimization: if you have to backup all directory than skip previous check fl = {"local_file_path": local_file, "remote_dir_path": rem_dir, "file_path": file_path} result.append(fl) return result
def generate_dev( dirpath: str, author: str, name: str, name_lower: str, kind: str, cmpcat: str, keywords: str, min_pads: int, max_pads: int, pad_drills: Iterable[float], create_date: Optional[str], ): category = 'dev' for i in range(min_pads, max_pads + 1): for drill in pad_drills: lines = [] variant = '1x{}-D{:.1f}'.format(i, drill) broad_variant = '1x{}'.format(i) def _uuid(identifier): return uuid(category, kind, variant, identifier) uuid_dev = _uuid('dev') uuid_cmp = uuid('cmp', kind, broad_variant, 'cmp') uuid_signals = [ uuid('cmp', kind, broad_variant, 'signal-{}'.format(p)) for p in range(i) ] uuid_pkg = uuid('pkg', kind, variant, 'pkg') uuid_pads = [ uuid('pkg', kind, variant, 'pad-{}'.format(p)) for p in range(i) ] # General info lines.append('(librepcb_device {}'.format(uuid_dev)) lines.append(' (name "{} 1x{} ⌀{:.1f}mm")'.format(name, i, drill)) lines.append(' (description "A 1x{} {} with {}mm pin spacing ' 'and {:.1f}mm drill holes.\\n\\n' 'Generated with {}")'.format(i, name_lower, spacing, drill, generator)) lines.append(' (keywords "connector, 1x{}, d{:.1f}, {}")'.format( i, drill, keywords)) lines.append(' (author "{}")'.format(author)) lines.append(' (version "0.1")') lines.append(' (created {})'.format(create_date or now())) lines.append(' (deprecated false)') lines.append(' (category {})'.format(cmpcat)) lines.append(' (component {})'.format(uuid_cmp)) lines.append(' (package {})'.format(uuid_pkg)) for j in range(1, i + 1): lines.append(' (pad {} (signal {}))'.format( uuid_pads[j - 1], uuid_signals[j - 1])) lines.append(')') dev_dir_path = path.join(dirpath, uuid_dev) if not (path.exists(dev_dir_path) and path.isdir(dev_dir_path)): makedirs(dev_dir_path) with open(path.join(dev_dir_path, '.librepcb-dev'), 'w') as f: f.write('0.1\n') with open(path.join(dev_dir_path, 'device.lp'), 'w') as f: f.write('\n'.join(lines)) f.write('\n') print('1x{} {} ⌀{:.1f}mm: Wrote device {}'.format( i, kind, drill, uuid_dev))
def generate_cmp( dirpath: str, author: str, name: str, name_lower: str, kind: str, cmpcat: str, keywords: str, default_value: str, min_pads: int, max_pads: int, create_date: Optional[str], ): category = 'cmp' for i in range(min_pads, max_pads + 1): lines = [] variant = '1x{}'.format(i) def _uuid(identifier): return uuid(category, kind, variant, identifier) uuid_cmp = _uuid('cmp') uuid_pins = [ uuid('sym', kind, variant, 'pin-{}'.format(p)) for p in range(i) ] uuid_signals = [_uuid('signal-{}'.format(p)) for p in range(i)] uuid_variant = _uuid('variant-default') uuid_gate = _uuid('gate-default') uuid_symbol = uuid('sym', kind, variant, 'sym') # General info lines.append('(librepcb_component {}'.format(uuid_cmp)) lines.append(' (name "{} 1x{}")'.format(name, i)) lines.append(' (description "A 1x{} {}.\\n\\n' 'Generated with {}")'.format(i, name_lower, generator)) lines.append(' (keywords "connector, 1x{}, {}")'.format(i, keywords)) lines.append(' (author "{}")'.format(author)) lines.append(' (version "0.1")') lines.append(' (created {})'.format(create_date or now())) lines.append(' (deprecated false)') lines.append(' (category {})'.format(cmpcat)) lines.append(' (schematic_only false)') lines.append(' (default_value "{}")'.format(default_value)) lines.append(' (prefix "J")') for j in range(1, i + 1): lines.append(' (signal {} (name "{}") (role passive)'.format( uuid_signals[j - 1], j)) lines.append( ' (required false) (negated false) (clock false) (forced_net "")' ) lines.append(' )') lines.append(' (variant {} (norm "")'.format(uuid_variant)) lines.append(' (name "default")') lines.append(' (description "")') lines.append(' (gate {}'.format(uuid_gate)) lines.append(' (symbol {})'.format(uuid_symbol)) lines.append( ' (position 0.0 0.0) (rotation 0.0) (required true) (suffix "")') for j in range(1, i + 1): lines.append(' (pin {} (signal {}) (text pin))'.format( uuid_pins[j - 1], uuid_signals[j - 1], )) lines.append(' )') lines.append(' )') lines.append(')') cmp_dir_path = path.join(dirpath, uuid_cmp) if not (path.exists(cmp_dir_path) and path.isdir(cmp_dir_path)): makedirs(cmp_dir_path) with open(path.join(cmp_dir_path, '.librepcb-cmp'), 'w') as f: f.write('0.1\n') with open(path.join(cmp_dir_path, 'component.lp'), 'w') as f: f.write('\n'.join(lines)) f.write('\n') print('1x{} {}: Wrote component {}'.format(i, kind, uuid_cmp))
def generate_sym( dirpath: str, author: str, name: str, name_lower: str, kind: str, cmpcat: str, keywords: str, min_pads: int, max_pads: int, create_date: Optional[str], ): category = 'sym' for i in range(min_pads, max_pads + 1): lines = [] variant = '1x{}'.format(i) def _uuid(identifier): return uuid(category, kind, variant, identifier) uuid_sym = _uuid('sym') uuid_pins = [_uuid('pin-{}'.format(p)) for p in range(i)] uuid_polygon = _uuid('polygon-contour') uuid_decoration = _uuid('polygon-decoration') uuid_text_name = _uuid('text-name') uuid_text_value = _uuid('text-value') # General info lines.append('(librepcb_symbol {}'.format(uuid_sym)) lines.append(' (name "{} 1x{}")'.format(name, i)) lines.append(' (description "A 1x{} {}.\\n\\n' 'Generated with {}")'.format(i, name_lower, generator)) lines.append(' (keywords "connector, 1x{}, {}")'.format(i, keywords)) lines.append(' (author "{}")'.format(author)) lines.append(' (version "0.1")') lines.append(' (created {})'.format(create_date or now())) lines.append(' (deprecated false)') lines.append(' (category {})'.format(cmpcat)) for j in range(1, i + 1): lines.append(' (pin {} (name "{}")'.format(uuid_pins[j - 1], j)) lines.append( ' (position 5.08 {}) (rotation 180.0) (length 3.81)'.format( get_y(j, i, spacing, True))) lines.append(' )') # Polygons y_max, y_min = get_rectangle_bounds(i, spacing, spacing, True) lines.append(' (polygon {} (layer sym_outlines)'.format(uuid_polygon)) lines.append( ' (width {}) (fill false) (grab_area true)'.format(line_width)) lines.append(' (vertex (position -{} {}) (angle 0.0))'.format( spacing, ff(y_max))) lines.append(' (vertex (position {} {}) (angle 0.0))'.format( spacing, ff(y_max))) lines.append(' (vertex (position {} {}) (angle 0.0))'.format( spacing, ff(y_min))) lines.append(' (vertex (position -{} {}) (angle 0.0))'.format( spacing, ff(y_min))) lines.append(' (vertex (position -{} {}) (angle 0.0))'.format( spacing, ff(y_max))) lines.append(' )') # Decorations if kind == KIND_HEADER: # Headers: Small rectangle for j in range(1, i + 1): y = get_y(j, i, spacing, True) dx = spacing / 8 * 1.5 dy = spacing / 8 / 1.5 lines.append(' (polygon {} (layer sym_outlines)'.format( uuid_decoration)) lines.append( ' (width {}) (fill true) (grab_area true)'.format( line_width)) vertex = ' (vertex (position {} {}) (angle 0.0))' lines.append(vertex.format(ff(spacing / 2 - dx), ff(y + dy))) lines.append(vertex.format(ff(spacing / 2 + dx), ff(y + dy))) lines.append(vertex.format(ff(spacing / 2 + dx), ff(y - dy))) lines.append(vertex.format(ff(spacing / 2 - dx), ff(y - dy))) lines.append(vertex.format(ff(spacing / 2 - dx), ff(y + dy))) lines.append(' )') elif kind == KIND_SOCKET: # Sockets: Small semicircle for j in range(1, i + 1): y = get_y(j, i, spacing, True) d = spacing / 4 * 0.75 w = line_width * 0.75 lines.append(' (polygon {} (layer sym_outlines)'.format( uuid_decoration)) lines.append( ' (width {}) (fill false) (grab_area false)'.format(w)) lines.append( ' (vertex (position {} {}) (angle 135.0))'.format( ff(spacing / 2 + d * 0.5 - d - w), ff(y - d)), ) lines.append(' (vertex (position {} {}) (angle 0.0))'.format( ff(spacing / 2 + d * 0.5 - d - w), ff(y + d))) lines.append(' )') # Text y_max, y_min = get_rectangle_bounds(i, spacing, spacing, True) lines.append( ' (text {} (layer sym_names) (value "{{{{NAME}}}}")'.format( uuid_text_name)) lines.append( ' (align center bottom) (height {}) (position 0.0 {}) (rotation 0.0)' .format( ff(sym_text_height), ff(y_max), )) lines.append(' )') lines.append( ' (text {} (layer sym_names) (value "{{{{VALUE}}}}")'.format( uuid_text_value)) lines.append( ' (align center top) (height {}) (position 0.0 {}) (rotation 0.0)' .format( ff(sym_text_height), ff(y_min), )) lines.append(' )') lines.append(')') sym_dir_path = path.join(dirpath, uuid_sym) if not (path.exists(sym_dir_path) and path.isdir(sym_dir_path)): makedirs(sym_dir_path) with open(path.join(sym_dir_path, '.librepcb-sym'), 'w') as f: f.write('0.1\n') with open(path.join(sym_dir_path, 'symbol.lp'), 'w') as f: f.write('\n'.join(lines)) f.write('\n') print('1x{} {}: Wrote symbol {}'.format(i, kind, uuid_sym))
FILE = '../data/points-all.json' URL = 'http://tmc.mooc.fi/mooc/courses/15/points.csv' import json import csv import urllib2 import common as c ## load previous data if needed try: data = json.load( open( FILE, 'r') ) except IOError: data = {} web = csv.DictReader( urllib2.urlopen( URL ) ) for line in web: username = line['Username']; if username not in data: data[ username ] = { 'data' : [] } data[username]['group'] = c.group( line['Username'] ) d = { 'time' : c.now(), 'points' : int( line['Total'] ) } data[username]['data'].append( d ) json.dump( data, open( FILE, 'w') )
def update_users(self): now = common.now() for key in self.users.keys(): if common.hour_diff(now - self.users[key][2]) > 12: self.logout(key) self.last_users_update = common.now()
def generate_pkg( dirpath: str, author: str, name: str, name_lower: str, kind: str, pkgcat: str, keywords: str, min_pads: int, max_pads: int, top_offset: float, pad_drills: Iterable[float], generate_silkscreen: Callable[[List[str], str, str, str, int, float], None], create_date: Optional[str], ): category = 'pkg' for i in range(min_pads, max_pads + 1): for drill in pad_drills: lines = [] variant = '1x{}-D{:.1f}'.format(i, drill) def _uuid(identifier): return uuid(category, kind, variant, identifier) uuid_pkg = _uuid('pkg') uuid_pads = [_uuid('pad-{}'.format(p)) for p in range(i)] uuid_footprint = _uuid('footprint-default') uuid_text_name = _uuid('text-name') uuid_text_value = _uuid('text-value') # General info lines.append('(librepcb_package {}'.format(uuid_pkg)) lines.append(' (name "{} 1x{} ⌀{:.1f}mm")'.format(name, i, drill)) lines.append(' (description "A 1x{} {} with {}mm pin spacing ' 'and {:.1f}mm drill holes.\\n\\n' 'Generated with {}")'.format(i, name_lower, spacing, drill, generator)) lines.append(' (keywords "connector, 1x{}, d{:.1f}, {}")'.format( i, drill, keywords)) lines.append(' (author "{}")'.format(author)) lines.append(' (version "0.1")') lines.append(' (created {})'.format(create_date or now())) lines.append(' (deprecated false)') lines.append(' (category {})'.format(pkgcat)) for j in range(1, i + 1): lines.append(' (pad {} (name "{}"))'.format( uuid_pads[j - 1], j)) lines.append(' (footprint {}'.format(uuid_footprint)) lines.append(' (name "default")') lines.append(' (description "")') # Pads for j in range(1, i + 1): y = get_y(j, i, spacing, False) shape = 'rect' if j == 1 else 'round' lines.append(' (pad {} (side tht) (shape {})'.format( uuid_pads[j - 1], shape)) lines.append( ' (position 0.0 {}) (rotation 0.0) (size {} {}) (drill {})' .format( y, pad_size[0], pad_size[1], drill, )) lines.append(' )') # Silkscreen generate_silkscreen(lines, category, kind, variant, i, top_offset) # Labels y_max, y_min = get_rectangle_bounds(i, spacing, top_offset + 1.27, False) text_attrs = '(height {}) (stroke_width 0.2) ' \ '(letter_spacing auto) (line_spacing auto)'.format(pkg_text_height) lines.append( ' (stroke_text {} (layer top_names)'.format(uuid_text_name)) lines.append(' {}'.format(text_attrs)) lines.append( ' (align center bottom) (position 0.0 {}) (rotation 0.0)'. format(ff(y_max), )) lines.append( ' (auto_rotate true) (mirror false) (value "{{NAME}}")') lines.append(' )') lines.append( ' (stroke_text {} (layer top_values)'.format(uuid_text_value)) lines.append(' {}'.format(text_attrs)) lines.append( ' (align center top) (position 0.0 {}) (rotation 0.0)'. format(ff(y_min), )) lines.append( ' (auto_rotate true) (mirror false) (value "{{VALUE}}")') lines.append(' )') lines.append(' )') lines.append(')') pkg_dir_path = path.join(dirpath, uuid_pkg) if not (path.exists(pkg_dir_path) and path.isdir(pkg_dir_path)): makedirs(pkg_dir_path) with open(path.join(pkg_dir_path, '.librepcb-pkg'), 'w') as f: f.write('0.1\n') with open(path.join(pkg_dir_path, 'package.lp'), 'w') as f: f.write('\n'.join(lines)) f.write('\n') print('1x{} {} ⌀{:.1f}mm: Wrote package {}'.format( i, kind, drill, uuid_pkg))
for dev in common.domain_xml(dom).findall("devices/disk/target"): devname = dev.get("dev") block_info["devices"].append(devname) info = dom.blockInfo(devname) block_info[devname] = { # logical size in bytes of the image # (how much storage the guest will see) "capacity": info[0], # host storage in bytes occupied by # the image (such as highest allocated # extent if there are no holes, similar to 'du') "allocation": info[1], # host physical size in bytes of the # image container (last offset, similar to 'ls') "physical": info[2] } print json.dumps(OrderedDict([ ("timestamp", common.now()), ("nova", common.nova_metadata(dom)), ("uuid", dom.UUIDString()), ("name", dom.name()), ("id", dom.ID()), ("block_info", block_info) ])) except Exception, e: print json.dumps({"timestamp": common.now(), "error": "%s" % e})
def save_sync_time(): with open(common.SYNC_FILE_PATH, 'wb') as fl: fl.write(str(common.now()))
def generate_pkg( dirpath: str, author: str, name: str, description: str, pkgcat: str, keywords: str, config: DfnConfig, make_exposed: bool, create_date: Optional[str] = None, ) -> str: category = 'pkg' lines = [] full_name = name.format(length=fd(config.length), width=fd(config.width), height=fd(config.height_nominal), pin_count=config.pin_count, pitch=fd(config.pitch)) # Add pad length for otherwise identical names/packages if config.print_pad: full_name += "P{:s}".format(fd(config.lead_length)) if make_exposed: # According to: http://www.ocipcdc.org/archive/What_is_New_in_IPC-7351C_03_11_2015.pdf exp_width = fd(config.exposed_width) exp_length = fd(config.exposed_length) if exp_width == exp_length: full_name += "T{}".format(exp_width) else: full_name += "T{}X{}".format(exp_width, exp_length) # Override name if specified if config.name: full_name = config.name full_description = description.format(height=config.height_nominal, pin_count=config.pin_count, pitch=config.pitch, width=config.width, length=config.length) if make_exposed: full_description += "\\nExposed Pad: {:.2f} x {:.2f} mm".format( config.exposed_width, config.exposed_length) if config.print_pad: full_description += "\\nPad length: {:.2f} mm".format(config.lead_length) def _uuid(identifier: str) -> str: return uuid(category, full_name, identifier) uuid_pkg = _uuid('pkg') uuid_pads = [_uuid('pad-{}'.format(p)) for p in range(1, config.pin_count + 1)] if make_exposed: uuid_exp = _uuid('exposed') print('Generating {}: {}'.format(full_name, uuid_pkg)) # General info lines.append('(librepcb_package {}'.format(uuid_pkg)) lines.append(' (name "{}")'.format(full_name)) lines.append(' (description "{}\\n\\nGenerated with {}")'.format(full_description, GENERATOR_NAME)) if config.keywords: lines.append(' (keywords "dfn{},{},{}")'.format(config.pin_count, keywords, config.keywords.lower())) else: lines.append(' (keywords "dfn{},{}")'.format(config.pin_count, keywords)) lines.append(' (author "{}")'.format(author)) lines.append(' (version "0.1.1")') lines.append(' (created {})'.format(create_date or now())) lines.append(' (deprecated false)') lines.append(' (category {})'.format(pkgcat)) # Create Pad UUIDs for p in range(1, config.pin_count + 1): lines.append(' (pad {} (name "{}"))'.format(uuid_pads[p - 1], p)) if make_exposed: lines.append(' (pad {} (name "{}"))'.format(uuid_exp, 'ExposedPad')) # Create Footprint function def _generate_footprint(key: str, name: str, pad_extension: float) -> None: # Create Meta-data uuid_footprint = _uuid('footprint-{}'.format(key)) lines.append(' (footprint {}'.format(uuid_footprint)) lines.append(' (name "{}")'.format(name)) lines.append(' (description "")') pad_length = config.lead_length + config.toe_heel + pad_extension exposed_length = config.exposed_length abs_pad_pos_x = (config.width / 2) - (config.lead_length / 2) + (config.toe_heel / 2) + (pad_extension / 2) # Check clearance and make pads smaller if required if make_exposed: clearance = (config.width / 2) - config.lead_length - (exposed_length / 2) if clearance < MIN_CLEARANCE: print("Increasing clearance from {:.2f} to {:.2f}".format(clearance, MIN_CLEARANCE)) d_clearance = (MIN_CLEARANCE - clearance) / 2 pad_length = pad_length - d_clearance exposed_length = exposed_length - 2 * d_clearance abs_pad_pos_x = abs_pad_pos_x + (d_clearance / 2) if exposed_length < MIN_TRACE: print("Increasing exposed path width from {:.2f} to {:.2f}".format(exposed_length, MIN_TRACE)) d_exp = MIN_TRACE - exposed_length exposed_length = exposed_length + d_exp pad_length = pad_length - (d_exp / 2) abs_pad_pos_x = abs_pad_pos_x + (d_exp / 4) # Place pads for pad_idx, pad_nr in enumerate(range(1, config.pin_count + 1)): half_n_pads = config.pin_count // 2 pad_pos_y = get_y(pad_idx % half_n_pads + 1, half_n_pads, config.pitch, False) if pad_idx < (config.pin_count / 2): pad_pos_x = - abs_pad_pos_x else: pad_pos_x = abs_pad_pos_x pad_pos_y = - pad_pos_y lines.append(' (pad {} (side top) (shape rect)'.format(uuid_pads[pad_idx])) lines.append(' (position {} {}) (rotation 0.0) (size {} {}) (drill 0.0)'.format( ff(pad_pos_x), ff(pad_pos_y), ff(pad_length), ff(config.lead_width))) lines.append(' )') # Make exposed pad, if required # TODO: Handle pin1_corner_dx_dy in config once custom pad shapes are possible if make_exposed: lines.append(' (pad {} (side top) (shape rect)'.format(uuid_exp)) lines.append(' (position 0.0 0.0) (rotation 0.0) (size {} {}) (drill 0.0)'.format( ff(exposed_length), ff(config.exposed_width))) lines.append(' )') # Measure clearance pad-exposed pad clearance = abs(pad_pos_x) - (pad_length / 2) - (exposed_length / 2) if np.around(clearance, decimals=2) < MIN_CLEARANCE: print("Warning: minimal clearance violated in {}: {:.4f} < {:.2f}".format(full_name, clearance, MIN_CLEARANCE)) # Create Silk Screen (lines and dot only) silk_down = (config.length / 2 - SILKSCREEN_OFFSET - get_y(1, half_n_pads, config.pitch, False) - config.lead_width / 2 - SILKSCREEN_LINE_WIDTH / 2) # required for round ending of line # Measure clearance silkscreen to exposed pad silk_top_line_height = config.length / 2 if make_exposed: silk_clearance = silk_top_line_height - (SILKSCREEN_LINE_WIDTH / 2) - (config.exposed_width / 2) if np.around(silk_clearance, decimals=2) < SILKSCREEN_OFFSET: silk_top_line_height = silk_top_line_height + (SILKSCREEN_OFFSET - silk_clearance) silk_down = silk_down + (SILKSCREEN_OFFSET - silk_clearance) print("Increasing exp-silk clearance from {:.4f} to {:.2f}".format(silk_clearance, SILKSCREEN_OFFSET)) for idx, silkscreen_pos in enumerate([-1, 1]): uuid_silkscreen_poly = _uuid('polygon-silkscreen-{}-{}'.format(key, idx)) lines.append(' (polygon {} (layer top_placement)'.format(uuid_silkscreen_poly)) lines.append(' (width {}) (fill false) (grab_area false)'.format( SILKSCREEN_LINE_WIDTH)) lines.append(' (vertex (position {} {}) (angle 0.0))'.format( ff(-config.width / 2), ff(silkscreen_pos * (silk_top_line_height - silk_down)))) # If this is negative, the silkscreen line has to be moved away from # the real position, in order to keep the required distance to the # pad. We then only draw a single line, so we can omit the parts below. if silk_down > 0: lines.append(' (vertex (position {} {}) (angle 0.0))'.format( ff(-config.width / 2), ff(silkscreen_pos * silk_top_line_height))) lines.append(' (vertex (position {} {}) (angle 0.0))'.format( ff(config.width / 2), ff(silkscreen_pos * silk_top_line_height))) lines.append(' (vertex (position {} {}) (angle 0.0))'.format( ff(config.width / 2), ff(silkscreen_pos * (silk_top_line_height - silk_down)))) lines.append(' )') # Create leads on docu uuid_leads = [_uuid('lead-{}'.format(p)) for p in range(1, config.pin_count + 1)] for pad_idx, pad_nr in enumerate(range(1, config.pin_count + 1)): lead_uuid = uuid_leads[pad_idx] # Make silkscreen lead exact pad width and length half_n_pads = config.pin_count // 2 pad_pos_y = get_y(pad_idx % half_n_pads + 1, half_n_pads, config.pitch, False) if pad_idx >= (config.pin_count / 2): pad_pos_y = - pad_pos_y y_min = pad_pos_y - config.lead_width / 2 y_max = pad_pos_y + config.lead_width / 2 x_max = config.width / 2 x_min = x_max - config.lead_length if pad_idx < (config.pin_count / 2): x_min, x_max = - x_min, - x_max # Convert numbers to librepcb format x_min_str, x_max_str = ff(x_min), ff(x_max) y_min_str, y_max_str = ff(y_min), ff(y_max) lines.append(' (polygon {} (layer top_documentation)'.format(lead_uuid)) lines.append(' (width 0.0) (fill true) (grab_area false)') lines.append(' (vertex (position {} {}) (angle 0.0))'.format(x_min_str, y_max_str)) lines.append(' (vertex (position {} {}) (angle 0.0))'.format(x_max_str, y_max_str)) lines.append(' (vertex (position {} {}) (angle 0.0))'.format(x_max_str, y_min_str)) lines.append(' (vertex (position {} {}) (angle 0.0))'.format(x_min_str, y_min_str)) lines.append(' (vertex (position {} {}) (angle 0.0))'.format(x_min_str, y_max_str)) lines.append(' )') # Create exposed pad on docu if make_exposed: uuid_docu_exposed = _uuid('lead-exposed') x_min, x_max = - config.exposed_length / 2, config.exposed_length / 2 y_min, y_max = - config.exposed_width / 2, config.exposed_width / 2 lines.append(' (polygon {} (layer top_documentation)'.format(uuid_docu_exposed)) lines.append(' (width 0.0) (fill true) (grab_area false)') lines.append(' (vertex (position {} {}) (angle 0.0))'.format(x_min, y_max)) lines.append(' (vertex (position {} {}) (angle 0.0))'.format(x_max, y_max)) lines.append(' (vertex (position {} {}) (angle 0.0))'.format(x_max, y_min)) lines.append(' (vertex (position {} {}) (angle 0.0))'.format(x_min, y_min)) lines.append(' (vertex (position {} {}) (angle 0.0))'.format(x_min, y_max)) lines.append(' )') # Create body outline on docu uuid_body_outline = _uuid('body-outline') outline_line_width = 0.2 dx = config.width / 2 - outline_line_width / 2 dy = config.length / 2 - outline_line_width / 2 lines.append(' (polygon {} (layer top_documentation)'.format(uuid_body_outline)) lines.append(' (width {}) (fill false) (grab_area false)'.format(outline_line_width)) lines.append(' (vertex (position {} {}) (angle 0.0))'.format(-dx, dy)) lines.append(' (vertex (position {} {}) (angle 0.0))'.format(dx, dy)) lines.append(' (vertex (position {} {}) (angle 0.0))'.format(dx, -dy)) lines.append(' (vertex (position {} {}) (angle 0.0))'.format(-dx, -dy)) lines.append(' (vertex (position {} {}) (angle 0.0))'.format(-dx, dy)) lines.append(' )') if config.extended_doc_fn: config.extended_doc_fn(config, _uuid, lines) # As discussed in https://github.com/LibrePCB-Libraries/LibrePCB_Base.lplib/pull/16 # the silkscreen circle should have size SILKSCREEN_LINE_WIDTH for small packages, # and twice the size for larger packages. We define small to be either W or L <3.0mm # and large if both W and L >= 3.0mm if config.width >= 3.0 and config.length >= 3.0: silkscreen_circ_dia = 2.0 * SILKSCREEN_LINE_WIDTH else: silkscreen_circ_dia = SILKSCREEN_LINE_WIDTH if silkscreen_circ_dia == SILKSCREEN_LINE_WIDTH: silk_circ_y = config.length / 2 + silkscreen_circ_dia silk_circ_x = -config.width / 2 - SILKSCREEN_LINE_WIDTH else: silk_circ_y = config.length / 2 + SILKSCREEN_LINE_WIDTH / 2 silk_circ_x = -config.width / 2 - silkscreen_circ_dia # Move silkscreen circle upwards if the line is moved too if silk_down < 0: silk_circ_y = silk_circ_y - silk_down uuid_silkscreen_circ = _uuid('circle-silkscreen-{}'.format(key)) lines.append(' (circle {} (layer top_placement)'.format(uuid_silkscreen_circ)) lines.append(' (width 0.0) (fill true) (grab_area false) ' '(diameter {}) (position {} {})'.format( ff(silkscreen_circ_dia), ff(silk_circ_x), ff(silk_circ_y) )) lines.append(' )') # Add name and value labels uuid_text_name = _uuid('text-name-{}'.format(key)) uuid_text_value = _uuid('text-value-{}'.format(key)) lines.append(' (stroke_text {} (layer top_names)'.format(uuid_text_name)) lines.append(' {}'.format(TEXT_ATTRS)) lines.append(' (align center bottom) (position 0.0 {}) (rotation 0.0)'.format( config.length / 2 + LABEL_OFFSET)) lines.append(' (auto_rotate true) (mirror false) (value "{{NAME}}")') lines.append(' )') lines.append(' (stroke_text {} (layer top_values)'.format(uuid_text_value)) lines.append(' {}'.format(TEXT_ATTRS)) lines.append(' (align center top) (position 0.0 {}) (rotation 0.0)'.format( -config.length / 2 - LABEL_OFFSET)) lines.append(' (auto_rotate true) (mirror false) (value "{{VALUE}}")') lines.append(' )') # Closing parenthese for footprint lines.append(' )') # Apply function to available footprints _generate_footprint('reflow', 'reflow', 0.0) _generate_footprint('hand-soldering', 'hand soldering', 0.3) # Final closing parenthese lines.append(')') # Save package pkg_dir_path = path.join(dirpath, uuid_pkg) if not (path.exists(pkg_dir_path) and path.isdir(pkg_dir_path)): makedirs(pkg_dir_path) with open(path.join(pkg_dir_path, '.librepcb-pkg'), 'w') as f: f.write('0.1\n') with open(path.join(pkg_dir_path, 'package.lp'), 'w') as f: f.write('\n'.join(lines)) f.write('\n') return full_name
if os.path.isfile("confidentials.json"): if check_confidentials() == 0: confidentials = common.read_json("confidentials.json") work_dir = "." if os.path.isdir(work_dir): conn = connect() link_list = [] for line in read_txt(txt_file_name): line = line.replace("\r", "").replace("\n", "") if line.strip() != "": video_id = line.split("youtu.be/")[1] if video_exists(video_id) == 1: message = str( '{:s} [LINK] "{:s}" Video already exists.'.format( common.now(2), video_id)) write_log(message) print(message) else: link = "https://youtu.be/" + video_id + "\n" if link not in link_list: link_list.append(link) else: message = str( '{:s} [LINK] "{:s}" Link already exists.'. format(common.now(2), video_id)) write_log(message) print(message) write_txt(txt_file_name, link_list) input() else:
# And let's load the config file into common-space common.load_configuration(args.configfile) # For simplicity, shift to the repo root path = common.get_path() chdir(path) # Load checks and actions into the common-space # Sources are loaded by the checks themselves # Likewise, filters are loaded by actions common.get_thing('checks', common.checks) common.get_thing('actions', common.actions) while True: messages = [] now = common.now(update=True) for check in common.checks: if check.last_check + check.interval < now: for name, source in check.sources.items(): if source.last_check < now: if source.run(): source.last_check = now check_messages = check.run() if check_messages: check.degraded = False check.last_check = now check.update(check_messages) else: if not check.degraded: if check.degrade(): check.degraded = True