def get_control_image_path(self, test_name): if os.path.isfile(test_name): return path # else try and find matching test image script_folder = os.path.dirname(os.path.realpath(sys.argv[0])) control_images_folder = os.path.join( script_folder, '../tests/testdata/control_images') matching_control_images = [x[0] for x in os.walk(control_images_folder) if test_name in x[0]] if len(matching_control_images) > 1: QMessageBox.warning( self, 'Result', 'Found multiple matching control images for {}'.format(test_name)) return None elif len(matching_control_images) == 0: QMessageBox.warning( self, 'Result', 'No matching control images found for {}'.format(test_name)) return None found_control_image_path = matching_control_images[0] # check for a single matching expected image images = glob.glob(os.path.join(found_control_image_path, '*.png')) filtered_images = [i for i in images if not i[-9:] == '_mask.png'] if len(filtered_images) > 1: error( 'Found multiple matching control images for {}'.format(test_name)) elif len(filtered_images) == 0: error('No matching control images found for {}'.format(test_name)) found_image = filtered_images[0] print('Found matching control image: {}'.format(found_image)) return found_image
def upgrade(version): """ Upgrading program :param version: version name for VK Stats """ log_write(_("Creating a temporary directory...")) tmpdir = tempfile.mktemp(prefix="sysrq-") os.mkdir(tmpdir) log_write(_("Downloading the new version...")) archive_file = "{}/VK_Stats.zip".format(tmpdir) request.urlretrieve("https://github.com/CyberTailor/vk-stats/releases/download/{0}/Stats-{0}.zip".format(version), filename=archive_file) log_write(_("Unpacking an archive...")) archive = zipfile.ZipFile(archive_file) try: archive.extractall(path=SCRIPTDIR) # extract ZIP to script directory except PermissionError: if console: print(_("Please, upgrade the program using package manager or installer")) exit() else: error(primary=_("Can't upgrade"), secondary=_("Please, upgrade the program using package manager or installer")) log_write(_("Exiting...")) exit()
def initializeIceConnection(self): """ Establishes the two-way Ice connection and adds the authenticator to the configured servers """ ice = self.communicator() if cfg.ice.secret: debug('Using shared ice secret') ice.getImplicitContext().put("secret", cfg.ice.secret) elif not cfg.glacier.enabled: warning('Consider using an ice secret to improve security') if cfg.glacier.enabled: #info('Connecting to Glacier2 server (%s:%d)', glacier_host, glacier_port) error('Glacier support not implemented yet') #TODO: Implement this info('Connecting to Ice server (%s:%d)', cfg.ice.host, cfg.ice.port) base = ice.stringToProxy('Meta:tcp -h %s -p %d' % (cfg.ice.host, cfg.ice.port)) self.meta = Murmur.MetaPrx.uncheckedCast(base) adapter = ice.createObjectAdapterWithEndpoints('Callback.Client', 'tcp -h %s' % cfg.ice.host) adapter.activate() metacbprx = adapter.addWithUUID(metaCallback(self)) self.metacb = Murmur.MetaCallbackPrx.uncheckedCast(metacbprx) authprx = adapter.addWithUUID(DjangoAuthenticator()) self.auth = Murmur.ServerUpdatingAuthenticatorPrx.uncheckedCast(authprx) return self.attachCallbacks()
def attachCallbacks(self, quiet = False): """ Attaches all callbacks for meta and authenticators """ # Ice.ConnectionRefusedException #debug('Attaching callbacks') try: if not quiet: info('Attaching meta callback') self.meta.addCallback(self.metacb) for server in self.meta.getBootedServers(): if not cfg.murmur.servers or server.id() in cfg.murmur.servers: if not quiet: info('Setting authenticator for virtual server %d', server.id()) server.setAuthenticator(self.auth) except (Murmur.InvalidSecretException, Ice.UnknownUserException, Ice.ConnectionRefusedException) as e: if isinstance(e, Ice.ConnectionRefusedException): error('Server refused connection') elif isinstance(e, Murmur.InvalidSecretException) or \ isinstance(e, Ice.UnknownUserException) and (e.unknown == 'Murmur::InvalidSecretException'): error('Invalid ice secret') else: # We do not actually want to handle this one, re-raise it raise e self.connected = False return False self.connected = True return True
def get_configfiles(log): ''' Download lircd.conf and perhaps lircmd.conf, ''' def error(ex, uri): ''' Handle download error. ''' text = "Cannot download %s : %s" % (uri, str(ex)) view.show_error("Download error", text) if not model.config.lircd_conf \ or model.config.lircd_conf == _MANUAL_REMOTE_INSTALL: # pylint: disable=bad-indentation text = "No lircd.conf defined, skipping" view.show_warning("Download error", text) return log for item in ['lircd_conf', 'lircmd_conf']: if not getattr(model.config, item): continue src = getattr(model.config, item) if os.path.exists(src): shutil.copy2(src, '.') log += 'Info: Copied %s to %s\n' % (str(src), os.getcwd()) continue uri = os.path.join(_REMOTES_BASE_URI, src) path = os.path.join(result_dir, os.path.basename(src)) try: urllib.request.urlretrieve(uri + '?format=raw', path) log += 'Info: Downloaded %s to %s\n' % (str(uri), str(path)) except urllib.error.HTTPError as ex: error(ex, uri) return log
def get_configfiles(log): ''' Download lircd.conf and perhaps lircmd.conf, ''' def error(ex, uri): ''' Handle download error. ''' text = "Cannot download %s : %s" % (uri, str(ex)) view.show_error("Download error", text) if 'lircd_conf' not in config or not config['lircd_conf'] \ or config['lircd_conf'] == _MANUAL_REMOTE_INSTALL: # pylint: disable=bad-indentation text = "No lircd.conf defined, skipping" view.show_warning("Download error", text) return log for item in ['lircd_conf', 'lircmd_conf']: if item not in config: continue uri = os.path.join(_REMOTES_BASE_URI, config[item]) path = os.path.join(result_dir, os.path.basename(config[item])) try: urllib.request.urlretrieve(uri + '?format=raw', path) log += 'Info: Downloaded %s to %s\n' % (str(uri), str(path)) except urllib.error.HTTPError as ex: error(ex, uri) return log
def execute(cls, *args, **kwargs): if "threadDB__retry_execution__" in kwargs: # Have a magic keyword so we can call ourselves while preventing # an infinite loopg del kwargs["threadDB__retry_execution__"] retry = False else: retry = True c = cls.cursor() try: c.execute(*args, **kwargs) except db.OperationalError as e: error('Database operational error %d: %s', e.args[0], e.args[1]) c.close() cls.invalidate_connection() if retry: # Make sure we only retry once info('Retrying database operation') kwargs["threadDB__retry_execution__"] = True c = cls.execute(*args, **kwargs) else: error('Database operation failed ultimately') raise threadDbException() return c
def call_api(method, *, token, params): """ Calling VK API :param method: method name from https://vk.com/dev/methods :param params: parameters for method (dict) :param token: access_token :return: result of calling API method """ params.update({"access_token": token, "v": api_ver}) data = urlencode(params) headers = {"Content-length": str(len(data))} url = "https://api.vk.com/method/" + method req = request.Request(url, data=bytes(data, encoding="utf-8"), headers=headers) result = None while result is None: try: result = json.loads(request.urlopen(req, timeout=5).read().decode("utf-8")) except (urllib.error.URLError, socket.error) as err: log_write(_("Error: {}. Waiting for 10 seconds...").format(err)) time.sleep(10) if "error" in result: if console: log_write("VK API {error_code}: {error_msg}".format(**result["error"]), to=sys.stderr) exit() else: error(primary="VK API {error_code}".format(**result["error"]), secondary=result["error"]["error_msg"]) time.sleep(0.33) return result["response"]
def get_feed(url): from meds.object import Object import feedparser as fp result = {} try: data = get_url(url) except Exception as ex: error("%s %s" % (url, get_exception())) ; return result = fp.parse(data) if "entries" in result: for entry in result["entries"][::-1]: yield Object(entry)
def updateMask(control_image_path, rendered_image_path, mask_image_path): control_image = imageFromPath(control_image_path) if not control_image: error("Could not read control image {}".format(control_image_path)) rendered_image = imageFromPath(rendered_image_path) if not rendered_image: error("Could not read rendered image {}".format(rendered_image_path)) if not rendered_image.width() == control_image.width() or not rendered_image.height() == control_image.height(): print( ( "Size mismatch - control image is {}x{}, rendered image is {}x{}".format( control_image.width(), control_image.height(), rendered_image.width(), rendered_image.height() ) ) ) max_width = min(rendered_image.width(), control_image.width()) max_height = min(rendered_image.height(), control_image.height()) # read current mask, if it exist mask_image = imageFromPath(mask_image_path) if mask_image.isNull(): print("Mask image does not exist, creating {}".format(mask_image_path)) mask_image = QImage(control_image.width(), control_image.height(), QImage.Format_ARGB32) mask_image.fill(QColor(0, 0, 0)) # loop through pixels in rendered image and compare mismatch_count = 0 linebytes = max_width * 4 for y in range(max_height): control_scanline = control_image.constScanLine(y).asstring(linebytes) rendered_scanline = rendered_image.constScanLine(y).asstring(linebytes) mask_scanline = mask_image.scanLine(y).asstring(linebytes) for x in range(max_width): currentTolerance = qRed(struct.unpack("I", mask_scanline[x * 4 : x * 4 + 4])[0]) if currentTolerance == 255: # ignore pixel continue expected_rgb = struct.unpack("I", control_scanline[x * 4 : x * 4 + 4])[0] rendered_rgb = struct.unpack("I", rendered_scanline[x * 4 : x * 4 + 4])[0] difference = colorDiff(expected_rgb, rendered_rgb) if difference > currentTolerance: # update mask image mask_image.setPixel(x, y, qRgb(difference, difference, difference)) mismatch_count += 1 if mismatch_count: # update mask mask_image.save(mask_image_path, "png") print("Updated {} pixels in {}".format(mismatch_count, mask_image_path)) else: print("No mismatches in {}".format(mask_image_path))
def newfunc(*args, **kws): if 'current' in kws: current = kws["current"] else: current = args[-1] if not current or 'secret' not in current.ctx or current.ctx['secret'] != cfg.ice.secret: error('Server transmitted invalid secret. Possible injection attempt.') raise Murmur.InvalidSecretException() return func(*args, **kws)
def getControlImagePath(path): if os.path.isfile(path): return path # else try and find matching test image script_folder = os.path.dirname(os.path.realpath(sys.argv[0])) control_images_folder = os.path.join(script_folder, "../tests/testdata/control_images") matching_control_images = [x[0] for x in os.walk(control_images_folder) if path in x[0]] if len(matching_control_images) > 1: error("Found multiple matching control images for {}".format(path)) elif len(matching_control_images) == 0: error("No matching control images found for {}".format(path)) found_control_image_path = matching_control_images[0] # check for a single matching expected image images = glob.glob(os.path.join(found_control_image_path, "*.png")) filtered_images = [i for i in images if not i[-9:] == "_mask.png"] if len(filtered_images) > 1: error("Found multiple matching control images for {}".format(path)) elif len(filtered_images) == 0: error("No matching control images found for {}".format(path)) found_image = filtered_images[0] print("Found matching control image: {}".format(found_image)) return found_image
def __handle(self, msg='', token=None, error=xml.dom.SyntaxErr, neverraise=False, args=None): """ handles all calls logs or raises exception """ if self.enabled: if error is None: error = xml.dom.SyntaxErr line, col = None, None if token: if isinstance(token, tuple): value, line, col = token[1], token[2], token[3] else: value, line, col = token.value, token.line, token.col msg = '%s [%s:%s: %s]' % ( msg, line, col, value) if error and self.raiseExceptions and not neverraise: if isinstance(error, urllib.error.HTTPError) or isinstance(error, urllib.error.URLError): raise elif issubclass(error, xml.dom.DOMException): error.line = line error.col = col raise error(msg) else: self._logcall(msg)
def create_connection(address, timeout=_GLOBAL_DEFAULT_TIMEOUT, source_address=None): """Connect to *address* and return the socket object. Convenience function. Connect to *address* (a 2-tuple ``(host, port)``) and return the socket object. Passing the optional *timeout* parameter will set the timeout on the socket instance before attempting to connect. If no *timeout* is supplied, the global default timeout setting returned by :func:`getdefaulttimeout` is used. If *source_address* is set it must be a tuple of (host, port) for the socket to bind as a source address before making the connection. An host of '' or port 0 tells the OS to use the default. """ host, port = address err = None for res in socket.getaddrinfo(host, port, 0, socket.SOCK_STREAM): af, socktype, proto, canonname, sa = res sock = None try: sock = socket.socket(af, socktype, proto) if timeout is not _GLOBAL_DEFAULT_TIMEOUT: sock.settimeout(timeout) if source_address: sock.bind(source_address) sock.connect(sa) return sock except error: err = True if sock is not None: sock.close() if err: raise else: raise error("getaddrinfo returns an empty list")
def load_images(self, control_image_path, rendered_image_path, mask_image_path): self.control_image = imageFromPath(control_image_path) if not self.control_image: error('Could not read control image {}'.format(control_image_path)) self.rendered_image = imageFromPath(rendered_image_path) if not self.rendered_image: error( 'Could not read rendered image {}'.format(rendered_image_path)) if not self.rendered_image.width() == self.control_image.width() or not self.rendered_image.height() == self.control_image.height(): print( 'Size mismatch - control image is {}x{}, rendered image is {}x{}'.format(self.control_image.width(), self.control_image.height( ), self.rendered_image.width( ), self.rendered_image.height())) max_width = min( self.rendered_image.width(), self.control_image.width()) max_height = min( self.rendered_image.height(), self.control_image.height()) # read current mask, if it exist self.mask_image = imageFromPath(mask_image_path) if self.mask_image.isNull(): print( 'Mask image does not exist, creating {}'.format(mask_image_path)) self.mask_image = QImage( self.control_image.width(), self.control_image.height(), QImage.Format_ARGB32) self.mask_image.fill(QColor(0, 0, 0)) self.diff_image = self.create_diff_image( self.control_image, self.rendered_image, self.mask_image) if not self.diff_image: self.load_next() return self.control_label.setPixmap(QPixmap.fromImage(self.control_image)) self.rendered_label.setPixmap(QPixmap.fromImage(self.rendered_image)) self.mask_label.setPixmap(QPixmap.fromImage(self.mask_image)) self.diff_label.setPixmap(QPixmap.fromImage(self.diff_image)) self.preview_mask()
def started(self, server, current = None): """ This function is called when a virtual server is started and makes sure an authenticator gets attached if needed. """ if not cfg.murmur.servers or server.id() in cfg.murmur.servers: info('Setting authenticator for virtual server %d', server.id()) try: server.setAuthenticator(app.auth) # Apparently this server was restarted without us noticing except (Murmur.InvalidSecretException, Ice.UnknownUserException) as e: if hasattr(e, "unknown") and e.unknown != "Murmur::InvalidSecretException": # Special handling for Murmur 1.2.2 servers with invalid slice files raise e error('Invalid ice secret') return else: debug('Virtual server %d got started', server.id())
def __data_parser__(self, data): try: if data['mods']['itemlist']['data']['auctions']: search_results = data['mods']['itemlist']['data']['auctions'] return [{ 'intro': result["raw_title"], 'price': float(result["view_price"]), 'delivery': colorful_text(result["view_fee"], Fore.RED) if float(result["view_fee"]) > 0 else result["view_fee"], 'sales': int(result["view_sales"].split('人')[0]), 'belong': colorful_text("天猫", Fore.CYAN) if result.get('shopcard', {}).get('isTmall', False) else "淘宝", 'url': result["detail_url"] } for result in search_results] error('Ops, get no goods..') return [] except KeyError: error('Ops, some key error happened..') return []
def checkConnection(self): """ Tries reapplies all callbacks to make sure the authenticator survives server restarts and disconnects. """ #debug('Watchdog run') try: if not self.attachCallbacks(quiet = not self.failedWatch): self.failedWatch = True else: self.failedWatch = False except Ice.Exception as e: error('Failed connection check, will retry in next watchdog run (%ds)', cfg.ice.watchdog) debug(str(e)) self.failedWatch = True # Renew the timer self.watchdog = Timer(cfg.ice.watchdog, self.checkConnection) self.watchdog.start()
def connection(cls): tid = _thread.get_ident() try: con = cls.db_connections[tid] except: info('Connecting to database server (%s %s:%d %s) for thread %d', cfg.database.lib, cfg.database.host, cfg.database.port, cfg.database.name, tid) try: con = db.connect(host = cfg.database.host, port = cfg.database.port, user = cfg.database.user, password = cfg.database.password, database = cfg.database.name) con.autocommit = True except db.Error as e: error('Could not connect to database: %s', str(e)) raise threadDbException() cls.db_connections[tid] = con return con
def updateMask(control_image_path, rendered_image_path, mask_image_path): control_image = imageFromPath(control_image_path) if not control_image: error('Could not read control image {}'.format(control_image_path)) rendered_image = imageFromPath(rendered_image_path) if not rendered_image: error('Could not read rendered image {}'.format(rendered_image_path)) if not rendered_image.width() == control_image.width( ) or not rendered_image.height() == control_image.height(): print( ('Size mismatch - control image is {}x{}, rendered image is {}x{}'. format(control_image.width(), control_image.height(), rendered_image.width(), rendered_image.height()))) max_width = min(rendered_image.width(), control_image.width()) max_height = min(rendered_image.height(), control_image.height()) # read current mask, if it exist mask_image = imageFromPath(mask_image_path) if mask_image.isNull(): print('Mask image does not exist, creating {}'.format(mask_image_path)) mask_image = QImage(control_image.width(), control_image.height(), QImage.Format_ARGB32) mask_image.fill(QColor(0, 0, 0)) # loop through pixels in rendered image and compare mismatch_count = 0 linebytes = max_width * 4 for y in range(max_height): control_scanline = control_image.constScanLine(y).asstring(linebytes) rendered_scanline = rendered_image.constScanLine(y).asstring(linebytes) mask_scanline = mask_image.scanLine(y).asstring(linebytes) for x in range(max_width): currentTolerance = qRed( struct.unpack('I', mask_scanline[x * 4:x * 4 + 4])[0]) if currentTolerance == 255: # ignore pixel continue expected_rgb = struct.unpack('I', control_scanline[x * 4:x * 4 + 4])[0] rendered_rgb = struct.unpack('I', rendered_scanline[x * 4:x * 4 + 4])[0] difference = colorDiff(expected_rgb, rendered_rgb) if difference > currentTolerance: # update mask image mask_image.setPixel(x, y, qRgb(difference, difference, difference)) mismatch_count += 1 if mismatch_count: # update mask mask_image.save(mask_image_path, "png") print('Updated {} pixels in {}'.format(mismatch_count, mask_image_path)) else: print('No mismatches in {}'.format(mask_image_path))
def __init__(self, originalFields={}, fieldTypes={}): # Store a safe copy for posible later use self.originalFields = copy.deepcopy(originalFields) # Make sure we don't chnage the originals in case the caller uses them fields = copy.deepcopy(originalFields) delim = ':' #get the arguments from standard in form = cgi.FieldStorage() # preserve the original submission from the form or URL self.cgiFieldStorage = form nots = [] keys = list(form.keys()) # 1st pass - Get operators and values. for key in keys: # due differences in the way browsers use escape characters # (or don't) in the text they submit, we first retrieve the # value and then unquote the key (TR 2739) item = form[key] key = urllib.parse.unquote(key) # determine the type of the argument. If none specified, # use the default if string.find(key, delim) != -1: fieldType = string.split(key, delim)[0] fieldName = string.split(key, delim)[1] elif key in fieldTypes: fieldType = fieldTypes[key] fieldName = key else: raise KeyError(key) if fieldType == 'op': fields[fieldName]['op'] = item.value elif fieldType == 'not': nots.append(fieldName) elif fieldType == 'string': fields[fieldName]['val'] = item.value elif fieldType == 'int': fields[fieldName]['val'] = \ string.atoi(item.value) elif fieldType == 'float': try: fields[fieldName]['val'] = \ string.atof(item.value) except: raise error('Unable to convert the ' \ + 'value "' + str(item.value) \ + '" to a number for field "' \ + fieldName + '".') elif fieldType == 'int_list': if type(item) is ListType: fields[fieldName]['val'] = [] for miniItem in item: fields[fieldName]['val'].append( string.atoi(miniItem.value)) elif type(item.value) is StringType: fields[fieldName]['val'] = [] for s in string.split(item.value, ','): fields[fieldName]['val'].append(string.atoi(s)) else: # It's an instance fields[fieldName]['val'] = \ item.value elif fieldType == 'string_list': if type(item) is ListType: fields[fieldName]['val'] = [] for miniItem in item: fields[fieldName]['val'].append(miniItem.value) elif type(item.value) is StringType: tmpItem = item.value tmpItem = tmpItem.strip() #Double-Quoted strings should not be split if (tmpItem[:1] == "\"" and tmpItem[-1:] == "\""): tmpItem = tmpItem[1:-1] fields[fieldName]['val'] = [tmpItem] elif (tmpItem[-1:] == ","): #strip any trailing comma tmpItem = tmpItem[:-1] fields[fieldName]['val'] = [tmpItem] else: item.value = item.value.replace(', ', ',') fields[fieldName]['val'] = \ string.split(item.value, ',') else: # It's an instance fields[fieldName]['val'] = \ item.value elif fieldType == 'option_list': if type(item) is ListType: fields[fieldName]['val'] = [] for miniItem in item: fields[fieldName]['val'].append(miniItem.value) else: # It's an instance fields[fieldName]['val'] = [item.value] # Now that the initial construction is taken care of, save the # operators and values before they are made into SQL. displayFields = copy.deepcopy(fields) # Modify the operators for display self.displayFields = processDisplayFields(displayFields, nots) # 2nd pass - Modify values as necessary. Delete field if None. for key in list(fields.keys()): if fields[key]['val'] is None: if fields[key]['op'] == 'is null': fields[key]['val'] = 'null' fields[key]['op'] = 'is' elif fields[key]['op'] == 'is not null': fields[key]['val'] = 'null' fields[key]['op'] = 'is not' else: del fields[key] elif fields[key]['op'] == 'begins': fields[key]['op'] = 'like' if type(fields[key]['val']) is StringType: fields[key]['val'] = fields[key]['val']\ + '%' elif type(fields[key]['val']) is ListType: for i in range(len(fields[key]['val'])): fields[key]['val'][i] = \ fields[key]['val'][i] \ + '%' elif fields[key]['op'] == 'ends': fields[key]['op'] = 'like' if type(fields[key]['val']) is StringType: fields[key]['val'] = '%' \ + fields[key]['val'] elif type(fields[key]['val']) is ListType: for i in range(len(fields[key]['val'])): fields[key]['val'][i] = '%' \ + fields[key]['val'][i] elif fields[key]['op'] == 'contains': fields[key]['op'] = 'like' if type(fields[key]['val']) is StringType: fields[key]['val'] = '%' \ + fields[key]['val'] \ + '%' elif type(fields[key]['val']) is ListType: for i in range(len(fields[key]['val'])): fields[key]['val'][i] = '%' \ + fields[key]['val'][i]\ + '%' # Modify operators if NOT has been checked. for key in nots: if key in fields: if fields[key]['op'] == '=': fields[key]['op'] = '!=' elif fields[key]['op'] == 'is': fields[key]['op'] = 'is not' else: # The operator is 'like', 'begins', etc. fields[key]['op'] = 'not ' \ + fields[key]['op'] self.fields = fields
def getStats(url): try: try: soup = BeautifulSoup( urllib.request.urlopen(url).read().decode('utf-8', 'strict'), 'html.parser') except urllib.error.HTTPError as e: error(e) return [-1, -1, -1] # if bad url ratings = soup.find_all('button') likes = ratings[24] dislikes = ratings[26] likes = str(likes).split('>') likes = likes[likes.__len__() - 3] likes = likes[:likes.__len__() - 6] if likes == "Statistics": # if there is an additional button present likes = ratings[25] dislikes = ratings[27] likes = str(likes).split('>') likes = likes[likes.__len__() - 3] likes = likes[:likes.__len__() - 6] dislikes = str(dislikes).split('>') dislikes = dislikes[dislikes.__len__() - 3] dislikes = dislikes[:dislikes.__len__() - 6] elif likes == "Transcript": # if there is an additional button present likes = ratings[26] dislikes = ratings[28] likes = str(likes).split('>') likes = likes[likes.__len__() - 3] likes = likes[:likes.__len__() - 6] dislikes = str(dislikes).split('>') dislikes = dislikes[dislikes.__len__() - 3] dislikes = dislikes[:dislikes.__len__() - 6] else: # if normal dislikes = str(dislikes).split('>') dislikes = dislikes[dislikes.__len__() - 3] dislikes = dislikes[:dislikes.__len__() - 6] Views = soup.find_all('div', class_="watch-view-count") views = Views[0] views = (str(views).split('>'))[1] views = views[:views.__len__() - 11] except IndexError: '' try: if str(views) == "No": # if no views views = '0' except UnboundLocalError as e: views = '0' try: l = int(removeCommas(likes)) except ValueError and AttributeError and UnboundLocalError: l = 0 pass try: d = int(removeCommas(dislikes)) except ValueError and AttributeError and UnboundLocalError: d = 0 pass try: v = int(removeCommas(views)) except ValueError and AttributeError: v = 0 pass return [l, d, v]
def main(): PATH = os.path.dirname(os.path.realpath(__file__)) json_data = open('config.json', 'r') # pdb.set_trace() data = json.load(json_data) data = renameKeys(data) #####NAME############ exp_name = data['name'] #####ALGORITHM####### algo = data['algorithm']['bmu'] if algo == "normal": iAlg = 0 elif algo == "prog": iAlg = 1 thread_num = data['algorithm']['threads'] #####DATA############ data_paths = data['data']['path'] file_type = data['data']['file_type'] delimiter = data['data']['delimiter'] cols = None #####PARAMETERS###### topology = data['parameters']['topology'] neigF = data['parameters']['neighborhood_fcion'] neigR = data['parameters']['neighborhood_radius'] mapSizeX = data['parameters']['map_size_x'] mapSizeY = data['parameters']['map_size_y'] dataRep = data['parameters']['iteration'] lrF = data['parameters']['learning_rate_fcion'] learningRate = data['parameters']['learning_rate'] prob_iter = data['parameters']['probing_iter'] if iAlg != 1: prob_iter = 0 if lrF != "linear" and lrF != "inverse_t" and lrF != "exp_decay": error( "Learning rate function " + lrF + ' does not exist\nchoose: "linear", "inverse_t", or "exp_decay" function! ') #####OUTPUT########## visual = data['output']['visualization'] visual = 1 if visual else 0 # aka in c: visual = visual ? 1 : 0 errors = data['output']['error'] errors = 1 if errors else 0 web = data['output']['web'] web = 1 if web else 0 #####OPTIONAL_INFO### opt = data.get('optional_info') if opt: is_names = opt.get('names') is_names = 1 if is_names else 0 names = opt['names_path'] is_clas = opt.get('classes') is_clas = 1 if is_clas else 0 clas = opt['classes_path'] else: is_names = 0 names = None is_clas = 0 names = None #####EXCEPTIONS###### if algo == "probing" and lrF != "exp_decay": error("The Probing algorithm is supported only with exp_decey learning rate function!") if topology == "hexa" and lrF == "exp_decay": error("Combination hexa topology and exp_decey is not yet supported!") # clean old results os.system("rm -rf ./result/") os.system("mkdir result") # make temp file os.system("rm -rf ./.temp/") os.system("mkdir .temp") files = [] for path in data_paths: if os.path.isdir(path): for infile in (glob.glob(path + "/*")): files.append(infile) else: files.append(path) print("Processing files: ") for f in files: print(" " + f) if delimiter != ' ': # convert to " " delimiter os.system("sed -i 's/" + delimiter + "/ /g' " + f) if is_clas: os.system('cp ' + clas + ' .temp/classes.shuffle') if is_names: os.system('cp ' + names + ' .temp/names.shuffle') rows = 0 if lrF == "exp_decay": # cmd = getShufflerCmd(f,rows,cols,is_names,is_clas,names,clas) # startProgramm(cmd) for f in files: cols = getCols(f) rows += getRows(f) cmd = 'cat ' + f + ' >> .temp/data.shuffle' # change to stream. startProgrammDummy(cmd) else: cmd = 'echo "' + str(cols) + '" >> ./temp/data.shuffle' for f in files: cols = getCols(f) rows += getRows(f) cmd = 'cat ' + f + ' >> .temp/data.shuffle' startProgrammDummy(cmd) if visual: os.makedirs('result/neurons') if web: for i in range(mapSizeX): for j in range(mapSizeY): os.makedirs('result/neurons/' + str(i) + '_' + str(j)) PARAM_STR = f + "_" + str(rows) + "_" + str(cols) + "_" + \ str(mapSizeX) + "_" + str(mapSizeY) + "_" + str(dataRep) + "_" + str(learningRate) if lrF == "exp_decay": cmd = PATH + '/bin/som ' + str(rows) + " " + str(cols) + " " + \ str(mapSizeX) + " " + str(mapSizeY) + " " + str(dataRep) + " " + str(learningRate) + " " + str( is_clas) + " " + \ str(thread_num) + " " + str(iAlg) + " " + str(prob_iter) startProgramm(cmd) else: cmd = PATH + "/som_pak-3.1/randinit -xdim " + str(mapSizeX) + " -ydim " + str(mapSizeY) + \ " -din .temp/data.shuffle -cout .temp/code1.map -topol " + topology + " -neigh " + neigF startProgramm(cmd) cmd = PATH + "/som_pak-3.1/vsom -din .temp/data.shuffle -cin .temp/code1.map -cout .temp/code1.map -rlen " + \ str(rows * dataRep) + " -alpha " + str(learningRate) + " -radius " + str( neigR) + " -alpha_type " + lrF + " -rand " + \ str(random.randrange(1, 1000000000)) startProgramm(cmd) cmd = PATH + '/bin/som ' + str(rows) + " " + str(cols) + " " + \ str(mapSizeX) + " " + str(mapSizeY) + " " + str(is_clas) startProgramm(cmd) if errors: cmd = PATH + '/bin/errorCounter ' + str(rows) + ' ' + str(mapSizeX) + " " + \ str(mapSizeY) + ' ' + os.getcwd() + "/" + PARAM_STR + ' ' + str(is_clas) startProgramm(cmd) if visual: cmd = PATH + '/bin/neuronsDrawer result/neurons/nnmatrix.png' + ' ' + \ str(mapSizeX) + " " + str(mapSizeY) + ' ' + str(rows) startProgramm(cmd) # head, tail = os.path.split( 'result/neuroDraw/' + PARAM_STR + '.png' ) # os.system("mkdir -p " + head) cmd = 'gnuplot ' + '.temp/neurons.gnuplot' # startProgramm(cmd) cmd = PATH + '/bin/neuronsDrawer2 result/neurons/umatrix.png' + ' ' + str(mapSizeX) + \ " " + str(mapSizeY) + ' ' + str(rows) startProgramm(cmd) # head, tail = os.path.split( 'result/neuroDraw2/' + PARAM_STR + '.png' ) # os.system("mkdir -p " + head) cmd = 'gnuplot ' + '.temp/neurons.gnuplot' # startProgramm(cmd) draw(mapSizeX, mapSizeY, 10) # TODO change to real value of classes from 10. if web: os.system("mkdir result/vendor") os.system("cp -r " + PATH + "/src/web/vendor result/"); os.system("mkdir result/lib") os.system("cp -r " + PATH + "/src/web/lib result/"); # os.system("cp src/web/display.html result/display.html"); # os.system("cp " +PATH + "/src/web/style.css result/style.css"); cmd = PATH + '/bin/divider ' + str(rows) + " " + str(cols) + " " + \ str(mapSizeX) + " " + str(mapSizeY) + " " + str(is_names) + " " + \ exp_name startProgramm(cmd) os.system("rm -rf ./.temp") json_data.close()
error(e) continue # determine tracks to be the same, add no entry if setting != -1: # if new track under existing artist print(">NEW ENTRY UNDER:\t" + info[0] + '-' + info[1] + ' ' + info[2]) new = True if setting != -1: # if not a test toCurrent(musicFile, 0) # send track to /Current/ if not new: # if there hasn't already been a print print(">NEW ENTRY:\t\t" + info[0] + '-' + info[1] + ' ' + info[2]) music.put(entry.Artist, entry) appendData(entry) if setting != -1: # if not a test print(">FILE UPDATED:\t" + str(FileName) + " in /Music/ with " + str(musicFileList.__len__()) + ' tracks') # print(music.__str__()) def isEnoughData(): currCount = glob.glob(CurrentMusicPath + '*.mp3').__len__() newCount = glob.glob(NewMusicPath + '*.mp3').__len__() if currCount + newCount >= IDEAL_SIZE: return True else: return False if __name__ == "__main__": error("Please run from main.py")
def startProgrammDummy(cmd): # print "Command to run:", cmd err = os.system(cmd) if err != 0: error("bad return value of command: " + cmd)
def updateCSV(setting=0): if setting == -1: # if testing mode path = TestMusicPath FileName = 'Test.csv' else: path = NewMusicPath FileName = 'MusicData.csv' ytPath = 'https://www.youtube.com/watch?v=' # grabbing all the files in the set path musicFileList = glob.glob(path + '*.mp3') # creating a dictionary and shoving those in there saveData(music) # Setting up the basic CSV saveHeader( dataList= "'ARTIST', 'TITLE', 'URL', 'LIKES', 'DISLIKES', 'VIEWS', 'USED?', 'LIKES to TOTAL RATIO', " "'LIKES to DISLIKES RATIO', 'LIKES to VIEWS RATIO'") for musicFile in musicFileList: file = musicFile[path.__len__():] new = False # bool to represent a new track under an existing artist try: info = getTrackInfo(file) # (artist, title, url) data = getStats(ytPath + info[2]) # (likes, dislikes, views) ratios = getRatios( data ) # (likesToTotalRatio, likeToDislikeRatio, likeToViewRatio) except urllib.request.HTTPError and ValueError as e: error(str(e) + ' ' + file) continue if data[0] == -1 and data[1] == -1 and data[2] == -1: # if bad url print('>BAD URL FOUND:\t ' + str(musicFile)) os.remove(musicFile) continue entry = Data(force_to_unicode(info[1]), force_to_unicode(info[0]), force_to_unicode(info[2]), force_to_unicode(data[0]), force_to_unicode(data[1]), force_to_unicode(data[2]), False, float(ratios[0]), float(ratios[1]), float(ratios[2])) if music.has(info[0] ): # if there is an existing entry under the same artist # get the song from the entry lst = music.get(info[0]) for el in lst: if entry.Title == el.Title: # if the song names match if setting != -1: # if not a test print(">FILE DUPLICATE FOUND:\t" + str(musicFile)[str(NewMusicPath).__len__():]) try: keeper = onlyKeepOne(el, entry) if keeper == el: os.remove(musicFile) else: music.remove(info[0], el) music.put(info[0], entry) os.remove(musicFile) except FileNotFoundError as e: error(e) continue # determine tracks to be the same, add no entry if setting != -1: # if new track under existing artist print(">NEW ENTRY UNDER:\t" + info[0] + '-' + info[1] + ' ' + info[2]) new = True if setting != -1: # if not a test toCurrent(musicFile, 0) # send track to /Current/ if not new: # if there hasn't already been a print print(">NEW ENTRY:\t\t" + info[0] + '-' + info[1] + ' ' + info[2]) music.put(entry.Artist, entry) appendData(entry) if setting != -1: # if not a test print(">FILE UPDATED:\t" + str(FileName) + " in /Music/ with " + str(musicFileList.__len__()) + ' tracks')
def main(): p = args_parser('download recent puzzles') args = get_args(parser=p) outf = open_output() today = datetime.date.today() todaystr = today.strftime("%Y-%m-%d") sources_tsv = '' puzzle_sources = xd_puzzle_sources() new_recents_tsv = [] # some downloads may fail, track the last successful ones most_recent = {} # download new puzzles since most recent download for row in metadb.xd_recent_downloads().values(): pubid = row.pubid latest_date = datestr_to_datetime(row.date) # by default, keep the previous one most_recent[pubid] = row.date if pubid not in puzzle_sources: warn("unknown puzzle source for '%s', skipping" % pubid) continue puzsrc = puzzle_sources[pubid] if not puzsrc.urlfmt or puzsrc.urlfmt.startswith("#"): warn("no source url for '%s', skipping" % pubid) continue from_date = latest_date to_date = today dates_to_get = get_dates_between(from_date, to_date, int(puzsrc.freq)) if not dates_to_get: warn("*** %s: nothing to get since %s" % (pubid, from_date)) continue summary("*** %s: downloading %d puzzles from %s to %s" % (pubid, len(dates_to_get), from_date, to_date)) for dt in sorted(dates_to_get): try: xdid = construct_xdid(pubid, dt) url = dt.strftime(puzsrc.urlfmt) fn = "%s.%s" % (xdid, puzsrc.ext) debug("downloading '%s' from '%s'" % (fn, url)) response = urllib.request.urlopen(url) content = response.read() outf.write_file(fn, content) most_recent[pubid] = todaystr except (urllib.error.HTTPError, urllib.error.URLError) as err: error('%s [%s] %s: %s' % (xdid, err.code, err.reason, url)) except Exception as e: error(str(e)) sources_tsv += xd_sources_row(fn, url, todaystr) for k, v in most_recent.items(): new_recents_tsv.append(xd_recent_download(k, v)) if sources_tsv: outf.write_file("sources.tsv", xd_sources_header + sources_tsv) if new_recents_tsv: # on filesystem open(metadb.RECENT_DOWNLOADS_TSV, "w").write(xd_recents_header + "".join(sorted(new_recents_tsv)))
# WHITESPACE_ONLY # SIMPLE_OPTIMIZATIONS # ADVANCED_OPTIMIZATIONS ('compilation_level', 'SIMPLE_OPTIMIZATIONS'), ('output_format', 'text'), ('output_info', 'compiled_code'), ('js_code', code), ]) headers = {"Content-type": "application/x-www-form-urlencoded"} conn = http.client.HTTPSConnection('closure-compiler.appspot.com') conn.request('POST', '/compile', params, headers) response = conn.getresponse() data = response.read() code = data.decode("utf-8") if code == "" or code == "\n": error("Compiler Error") conn.close() exit() else: if code.count("com.google.javascript.jscomp") > 1: error("Compiler Error") conn.close() exit() else: info(code) file_object = open(js_path, "w") file_object.write('"use strict";\n' + code) file_object.close() conn.close() hr()
parser.error('Locale mapping name not given') if (not options.output): parser.error('Output directory name not given') if (not options.cacheOutput): parser.error('Output cache file name not given') response = download_content("https://twosky.adtidy.org/api/v1/download?format=xml&language=%s&filename=%s&project=%s" % (options.locale, options.input, options.projectId)) #print('SERVER RESPONSE:') #print(response) if response != "": with open(options.cacheOutput, "wb") as f: f.write(response) else: error("!!!Error downloading description file for locale: " + options.locale) # Indexes for string IDs short_desc_idx = -1 title_idx = -1 desc_idx = -1 # Open xlsx file and look for indexes wb = xlrd.open_workbook(options.cacheOutput) s = wb.sheet_by_index(0) ids = s.col(0) for i in range(len(ids)): val = ids[i].value if val == 'TITLE': title_idx = i elif val == 'DESCRIPTION':
extract_tar(package_name) package_update_count += 1 # exit out of loop after we download package break if not package_update_count: print("all packages are up to date") if __name__ == '__main__': error = lambda text: configuration.color_error + "error:" + \ configuration.color_normal + " " + text #get username if getuser() == "root" and not configuration.root_execute: print(error("this program is not allowed to be used as the root user")) exit(1) #get current directory directory = os.getcwd() #change all colors to empty strings if colored_output is set to False in configuration.py if not configuration.colored_output: configuration.color_normal = "" configuration.color_error = "" configuration.color_successful = "" configuration.color_progress = "" #make all the arguments parser = argparse.ArgumentParser() parser.add_argument("-S", "-d", "--download", metavar="",\