def __init__(self, cbk_download_info_func=None, cbk_download_end_func=None): self.commons = Commons() # Download information. self.download_info = { 'total_download_files': 0, 'downloaded_files': 0, 'total_download_bytes': 0, 'downloaded_bytes': 0, 'current_file_url': '', 'current_file_path': '', 'current_tmp_file': '' } # Get sussesfull downloads. self.downloaded = INIFile( os.path.join(self.commons['settings_path'], 'downloads.ini')) # Get failed downloads. self.failed = INIFile( os.path.join(self.commons['settings_path'], 'failed.ini')) self.exit = False self.clean_downloads = True self.downloads = {} self.no_delete = set() self.cbk_download_info_func = cbk_download_info_func self.cbk_download_end_func = cbk_download_end_func self.dwn_thr = threading.Thread(target=self.download_thread) self.dwn_thr.start()
def __init__(self, filename='', extras={}): self.commons = Commons() main_ini_file = INIFile(self.commons['ini_file']) main_arch = main_ini_file['DEFAULT']['Arch'][0] self.flags = {'LE': '<=', 'GE': '>=', 'EQ': '==', 'LT': '<', 'GT': '>'} self.archs_ix86 = {} for i in range(3, 7): self.archs_ix86[str(i).join(['i', '86'])] = '' self.archs_ix86['noarch'] = '' self.archs_Amd64 = {'x86_64': '', 'noarch': ''} self.archs_ia64 = {'ia64': '', 'noarch': ''} self.allowed_archs = [] if main_arch in self.archs_ix86: self.allowed_archs = self.archs_ix86 elif main_arch in self.archs_Amd64: self.allowed_archs = self.archs_Amd64 elif main_arch in self.archs_ia64: self.allowed_archs = self.archs_ia64 self.package = {} self.package_tag = {'package': ''} self.entry_tag = {'rpm:entry': ''} self.arch_tag = {'arch': ''} self.provides_requires = {'rpm:provides': [], 'rpm:requires': []} self.catch_tags_contents = { 'name': '', 'arch': '', 'summary': '', 'description': '', 'rpm:group': '' } self.catch_tags_attrs = { 'version': ['ver', 'rel'], 'size': ['package'], 'location': ['href'] } self.catch_content = {'False': ''} self.lock_arch = {'False': ''} self.cur_tag = '' self.cur_prov_req_tag = '' self.contents = '' self.extras = extras self.packages = [] self.requires = {} self.provides = {} try: xml.sax.parse(gzip.open(filename), self) except: pass
def __init__(self): self.commons = Commons() self.main_ini_file = INIFile(self.commons['ini_file']) self.plugin = Pluginator().get_plugin('repo', self.main_ini_file['plugin']['plugin_name'][0]) self.local_folder = os.path.join(os.path.expanduser("~"), '.sushi-huh') self.local_main_ini_file = INIFile(os.path.join(self.local_folder, 'src', 'settings', 'sushi-huh.ini')) self.send_files_to_pc() self.get_provides() if list(self.local_main_ini_file.keys()) == []: self.set_packmanager()
class Sync: def __init__(self): self.commons = Commons() self.main_ini_file = INIFile(self.commons['ini_file']) self.plugin = Pluginator().get_plugin('repo', self.main_ini_file['plugin']['plugin_name'][0]) self.local_folder = os.path.join(os.path.expanduser("~"), '.sushi-huh') self.local_main_ini_file = INIFile(os.path.join(self.local_folder, 'src', 'settings', 'sushi-huh.ini')) self.send_files_to_pc() self.get_provides() if list(self.local_main_ini_file.keys()) == []: self.set_packmanager() """ send_files_to_pc() -> None Send the files in your flash drive to your PC. """ def send_files_to_pc(self): self.commons.copy_move(self.commons['settings_path'], os.path.join(self.local_folder, 'src', 'settings'), False) self.commons.copy_move(self.commons['download_path'], os.path.join(self.local_folder, 'src', 'downloads'), True) """ get_provides() -> None Read all provides in your system. """ def get_provides(self): packages = [] provides = {} if self.main_ini_file['plugin']['repository_type'][0].lower() == 'rpm': rpmp = RPMProvides() packages = rpmp.packages provides = rpmp.provides else: avst = AvailableStatus('/var/lib/dpkg/available') packages = avst.packages provides = avst.provides pickle.dump((packages, provides), open(os.path.join(self.commons['settings_path'], 'provides.db'), 'wb')) """ set_packmanager() -> None Configure automatically your package manager. """ def set_packmanager(self): self.plugin.set_packmanager()
def __init__(self, urlbase=''): html.parser.HTMLParser.__init__(self) self.codenames = [] self.commons = Commons() try: codenames_file = open( os.path.join(self.commons['settings_path'], 'ubuntu_codenames.txt'), 'r') self.codenames = codenames_file.read().split('\n') self.codenames = self.codenames[:len(self.codenames) - 1] codenames_file.close() except: try: url = urllib.request.urlopen(urlbase) out = url.read().decode() url.close() except: return if urlbase.startswith('ftp://'): for line in out.split('\r\n'): self.codenames += [line[line.rfind(' ') + 1:]] else: self.feed(out) codenames_file = open( os.path.join(self.commons['settings_path'], 'ubuntu_codenames.txt'), 'w') for codename in self.codenames: codenames_file.write(codename + '\n') codenames_file.close()
def __init__(self): self.version_system = VersionSystem() self.commons = Commons() self.first_time_run = not os.path.exists(self.commons['packages_db']) self.packages_db = self.commons.get_temp(''.join( [os.path.splitext(self.commons['packages_db'])[0], '.db'])) if self.first_time_run: if os.path.exists(self.packages_db): os.remove(self.packages_db) else: # Uncompress the database in the temporal directory. if not os.path.exists(self.packages_db): packages_zip = zipfile.ZipFile(self.commons['packages_db']) packages_zip.extractall(tempfile.gettempdir()) packages_zip.close() self.local_packages = [] self.local_provides = {} self.downloaded_packages = [] try: self.local_packages, self.local_provides = pickle.load( open( os.path.join(self.commons['settings_path'], 'provides.db'), 'rb')) except: pass self.update_downloades() self.packages_cols = [ 'package_id', 'name', 'version', 'release', 'arch', 'groupname', 'gui', 'repository', 'size', 'summary', 'description', 'url', 'path', 'status' ] self.packages = Table(self.packages_cols) self.requires = {} self.provides = {} if not self.first_time_run: # Load the database. self.packages.table, self.packages.sorted_by, self.requires, self.provides = pickle.load( open(self.packages_db, 'rb'))
def __init__(self, port=0, make_threading=True): threading.Thread.__init__(self) # Load common functions. self.commons = Commons() self.deafult_lang = 'en' self.phrase_book = None self.port = port self.make_threading = make_threading # Close Sushi, huh? self.exit = False
class MediaCFG(dict): """ __init__(mirror='', cfg_file='') mirror = Mirror to CFG file. cfg_file = CFG file. """ def __init__(self, repo='', fp_media_cfg=None): self.commons = Commons() if fp_media_cfg == None: return self.ini_file = INIFile(fp_media_cfg) for tag in self.ini_file: if 'name' in self.ini_file[tag]: self[repo + '/' + self.ini_file[tag]['name'][0]] = {} self[repo + '/' + self.ini_file[tag]['name'][0]]['name'] = tag if 'updates_for' in self.ini_file[tag]: self[repo + '/' + self.ini_file[tag]['name'] [0]]['updates_for'] = self.ini_file[ self.ini_file[tag]['updates_for'][0]]['name'][0] if 'noauto' in self.ini_file[tag]: self[repo + '/' + self.ini_file[tag]['name'][0]]['enabled'] = False else: self[repo + '/' + self.ini_file[tag]['name'][0]]['enabled'] = True """ make_path(mirror='', relative='') -> (str, str) Return the full path from the mirror and the relative path. mirror = Mirror path. relative = Relative path. """ def get_media_info_paths(self, mirror='', path='', relative=''): url = self.commons.urljoin(mirror, 'media', relative) if '../' in relative: path = '/'.join([path, relative.replace('../', '')]) else: path = '/'.join([path, 'media', relative]) return url, path
def __init__(self, repo='', fp_media_cfg=None): self.commons = Commons() if fp_media_cfg == None: return self.ini_file = INIFile(fp_media_cfg) for tag in self.ini_file: if 'name' in self.ini_file[tag]: self[repo + '/' + self.ini_file[tag]['name'][0]] = {} self[repo + '/' + self.ini_file[tag]['name'][0]]['name'] = tag if 'updates_for' in self.ini_file[tag]: self[repo + '/' + self.ini_file[tag]['name'] [0]]['updates_for'] = self.ini_file[ self.ini_file[tag]['updates_for'][0]]['name'][0] if 'noauto' in self.ini_file[tag]: self[repo + '/' + self.ini_file[tag]['name'][0]]['enabled'] = False else: self[repo + '/' + self.ini_file[tag]['name'][0]]['enabled'] = True
def __init__(self, os_lang='en'): self.commons = Commons() self.valid_chars = {} for i in range(0, 127): self.valid_chars[chr(i)] = chr(i) self.os_lang = os_lang self.default_lang = '' self.catch_content = False self.contents = '' self.cur_src_phrase = '' self.cur_dst_phrase = '' try: xml.sax.parse(open(self.commons['lang_file']), self) except: pass
def __init__(self, urls=[], timeout=None): self.commons = Commons() self.timeout = timeout ping_threads = [] self.ping_results = {} for url in set(urls): ping_thread = threading.Thread(target=self.do_ping, kwargs={'url': url}) ping_threads += [ping_thread] ping_thread.start() for ping_thread in ping_threads: ping_thread.join() for url in self.ping_results: if self.ping_results[url] == [None, None]: self.do_ping(url)
class PackagesDB: def __init__(self): self.version_system = VersionSystem() self.commons = Commons() self.first_time_run = not os.path.exists(self.commons['packages_db']) self.packages_db = self.commons.get_temp(''.join( [os.path.splitext(self.commons['packages_db'])[0], '.db'])) if self.first_time_run: if os.path.exists(self.packages_db): os.remove(self.packages_db) else: # Uncompress the database in the temporal directory. if not os.path.exists(self.packages_db): packages_zip = zipfile.ZipFile(self.commons['packages_db']) packages_zip.extractall(tempfile.gettempdir()) packages_zip.close() self.local_packages = [] self.local_provides = {} self.downloaded_packages = [] try: self.local_packages, self.local_provides = pickle.load( open( os.path.join(self.commons['settings_path'], 'provides.db'), 'rb')) except: pass self.update_downloades() self.packages_cols = [ 'package_id', 'name', 'version', 'release', 'arch', 'groupname', 'gui', 'repository', 'size', 'summary', 'description', 'url', 'path', 'status' ] self.packages = Table(self.packages_cols) self.requires = {} self.provides = {} if not self.first_time_run: # Load the database. self.packages.table, self.packages.sorted_by, self.requires, self.provides = pickle.load( open(self.packages_db, 'rb')) """ update_downloades() Collect information about downloades packages. """ def update_downloades(self): try: downloads_ini_file = INIFile( os.path.join(self.commons['settings_path'], 'downloads.ini')) self.downloaded_packages = [ filename[filename.rfind('/') + 1:] for filename in downloads_ini_file['files'].keys() ] except: self.downloaded_packages = [] """ make_sync() Determine the status of the packages. """ def make_sync(self): self.update_downloades() sync_table = [] for package in self.packages.table: filename = self.get_filename(package) if filename in self.local_packages: package[13] = 2 elif filename in self.downloaded_packages: package[13] = 1 else: package[13] = 0 sync_table += [package] self.packages.table = sync_table self.commit() """ get_filename(full_filename='') Return a filename with out the path. full_filename = Full path. """ def get_filename(self, full_filename=''): return full_filename[0][full_filename[0].rfind('/') + 1:] """ close() Close the packages database. """ def close(self): if self.first_time_run: self.commit() os.remove(self.packages_db) """ commit() Save the changes to the data base. """ def commit(self): # Dump all information to a file using the Python 3 binary file format. pickle.dump((self.packages.table, self.packages.sorted_by, self.requires, self.provides), open(self.packages_db, 'wb')) # Compress the database in a zip file. packages_zip = zipfile.ZipFile(self.commons['packages_db'], 'w', zipfile.ZIP_DEFLATED) packages_zip.write( self.packages_db, ''.join([ os.path.splitext(os.path.basename( self.commons['packages_db']))[0], '.db' ])) packages_zip.close() """ get_groups() -> [] Returns all available packages groups. """ def get_groups(self): groupname_index = self.packages.cols.index('groupname') return sorted(set(row[groupname_index] for row in self.packages.table)) """ get_description(package_id='') -> [] Returns a full information about the requested package. package_id = Package id. """ def get_description(self, package_id=''): return self.packages.find_exact(package_id, 'package_id').table[0] """ get_packages(groupname='') -> [] Returns a list of packages sorted by name where "groupname" column is equal to groupname. groupname = Group name. """ def get_packages(self, groupname=''): return self.packages.find_exact(groupname, 'groupname', 'name').table """ search(require=[], keywords=[]) -> [] Returns a list of packages sorted by name containing the keywords in "name", "summary" or "description" columns. keywords = Keywords to search. """ def search(self, keywords=[]): return self.packages.find_contains(keywords, ['name', 'summary', 'description'], 'name').table """ carry_out_dependency(require=[], provide=[]) -> bool Returns True if the provide is compatible with the require. require = [name, flag, version]. provide = [name, flag, version] """ def carry_out_dependency(self, require=[], provide=[]): # !s+sc return require[1] == '' or (require[1] != '' and self.version_system.compare_versions( provide[1], require[1], require[2])) """ get_dependencies(packages=[], suggests_are_requires=False) -> [] Return a full list of the packages dependencies id that you must download for install the needed packages. filenames = List of packages id. suggests_are_requires = Also download the suggested packages """ def get_dependencies(self, packages=[], suggests_are_requires=False): old_requires = [] new_requires = [] old_packages = set() new_packages = set(packages) total_size = 0 # For each package in new_packages. while new_packages != set(): package_id = new_packages.pop() old_packages.add(package_id) try: package_info = self.requires[package_id] total_size += package_info[0] # Find all their requires. for require in package_info[1]: if not require in old_requires: new_requires += [require] # Y nessesary, also find all their suggest: if suggests_are_requires: for suggest in package_info[2]: if not suggest in old_requires: new_requires += [suggest] except: pass # Now find all packages that provides each require. while new_requires != []: require = new_requires.pop() old_requires += [require] if not self.in_local_provides(require): try: provides = self.provides[require[0]] except: provides = [] # Append valid packages to new_packages. for provide in provides: if self.carry_out_dependency(require, provide): if not provide[ 2] in old_packages and not self.get_filename( provide[2] ) in self.downloaded_packages: new_packages.add(provide[2]) return sorted(old_packages), total_size def in_local_provides(self, dependency): try: return self.carry_out_dependency( dependency, [dependency[0], '==', self.local_provides[dependency[0]]]) except: return False """ resolve_files(packages=[]) -> {} Returns {full_local_path: [full_download_url, pacakge_size]} packages = List of packages id. """ def resolve_files(self, packages=[]): url_index = self.packages_cols.index('url') path_index = self.packages_cols.index('path') size_index = self.packages_cols.index('size') files = {} for package in set(packages): try: pkg_info = self.get_description(package) files[pkg_info[path_index]] = [ pkg_info[url_index], pkg_info[size_index] ] except: pass return files
def __init__(self, filename='', extras={}): try: f = gzip.open(filename) except: return data = ''.join([chr(c) for c in f.read()]) f.close() self.commons = Commons() lines = [] for l in data.split('\n'): if l != '': lines += [l] self.packages = [] self.provides = {} self.requires = {} for line in lines: keys = line.split('@') if keys[1] == 'provides': suggests = [] requires = [] provs = keys[2:] elif keys[1] == 'obsoletes': obsoletes = keys[2:] elif keys[1] == 'conflicts': conflicts = keys[2:] elif keys[1] == 'requires': requires = keys[2:] elif keys[1] == 'suggests': suggests = keys[2:] elif keys[1] == 'summary': summary = keys[2] elif keys[1] == 'filesize': size = int(keys[2]) elif keys[1] == 'info': filename = keys[2] + '.rpm' group = keys[5] name, version, release, arch = self.get_external_info(keys[2]) try: description = extras['ixl'][filename] except: description = '' package_id = '/'.join([extras['section'], filename]) package = [package_id, name, version, release, arch, group, 'No', extras['section'], size, summary, description, self.commons.urljoin(extras['url'], filename), '/'.join([extras['path'], filename]), 0] self.packages += [package] self.requires[package_id] = [size, [self.split_dependency(require) for require in requires], [self.split_dependency(suggest) for suggest in suggests]] for provide in provs: prov = self.split_dependency(provide) + [package_id] if prov[0] in self.provides: self.provides[prov[0]] += [prov[1:]] else: self.provides[prov[0]] = [prov[1:]]
class SynthesisHdlistCz: """ __init__(filename='') Load the synthesis.hdlist.cz type files in tables. filename = File to load. """ def __init__(self, filename='', extras={}): try: f = gzip.open(filename) except: return data = ''.join([chr(c) for c in f.read()]) f.close() self.commons = Commons() lines = [] for l in data.split('\n'): if l != '': lines += [l] self.packages = [] self.provides = {} self.requires = {} for line in lines: keys = line.split('@') if keys[1] == 'provides': suggests = [] requires = [] provs = keys[2:] elif keys[1] == 'obsoletes': obsoletes = keys[2:] elif keys[1] == 'conflicts': conflicts = keys[2:] elif keys[1] == 'requires': requires = keys[2:] elif keys[1] == 'suggests': suggests = keys[2:] elif keys[1] == 'summary': summary = keys[2] elif keys[1] == 'filesize': size = int(keys[2]) elif keys[1] == 'info': filename = keys[2] + '.rpm' group = keys[5] name, version, release, arch = self.get_external_info(keys[2]) try: description = extras['ixl'][filename] except: description = '' package_id = '/'.join([extras['section'], filename]) package = [package_id, name, version, release, arch, group, 'No', extras['section'], size, summary, description, self.commons.urljoin(extras['url'], filename), '/'.join([extras['path'], filename]), 0] self.packages += [package] self.requires[package_id] = [size, [self.split_dependency(require) for require in requires], [self.split_dependency(suggest) for suggest in suggests]] for provide in provs: prov = self.split_dependency(provide) + [package_id] if prov[0] in self.provides: self.provides[prov[0]] += [prov[1:]] else: self.provides[prov[0]] = [prov[1:]] """ get_external_info(filename='') Split finename in (name, version, release, arch) tuple. filename = RPM package name. """ def get_external_info(self, filename=''): arch = filename[filename.rfind('.') + 1:] filename = filename[: filename.rfind('.')] release = filename[filename.rfind('-') + 1:] filename = filename[: filename.rfind('-')] version = filename[filename.rfind('-') + 1:] name = filename[: filename.rfind('-')] return name, version, release, arch """ split_dependency(name='') -> [str, str, str] Return a provide or require splited in a [name, flag, version]. name = Name of the dependecy. """ def split_dependency(self, name=''): flag = '' version = '' if '[' in name: _name = name.split('[') name = _name[0] if '*' in _name[1]: flag = '' version = '' else: version = _name[1].replace(']', '') version = version.split(' ') flag = version[0] version = version[1] else: flag = '' version = '' return [name, flag, version]
def __init__(self, user_defs={}, clone=False, get_files_func=None): commons = Commons() self.main_ini_file = INIFile(commons['ini_file']) pluginator = Pluginator() self.status_keys = [ 'options', 'plugin', 'defaults', 'mirrors', 'repositories', 'clone', 'ready' ] self.closed = False self.cur_status = 0 self.cur_status_key = self.status_keys[self.cur_status] self.options = {} # Append information about the program to the main INI file. if not 'program' in self.main_ini_file: self.main_ini_file['program'] = {} self.main_ini_file['program']['name'] = [commons['program_name']] self.main_ini_file['program']['version'] = [ commons['program_version'] ] self.main_ini_file.save() # Default options if not 'options' in self.main_ini_file: self.main_ini_file['options'] = {} self.main_ini_file['options']['program_skin'] = [ commons['program_skin'] ] self.main_ini_file['options']['wallpaper'] = [commons['wallpaper']] self.main_ini_file['options']['suggests_are_requires'] = [ commons['suggests_are_requires'] ] self.main_ini_file['options']['default_lang'] = [ commons['default_lang'] ] self.main_ini_file.save() self.cur_status += 1 self.cur_status_key = self.status_keys[self.cur_status] # Set the plugin. if not 'plugin' in self.main_ini_file: if 'plugin' in user_defs: self.main_ini_file['plugin'] = pluginator.get_plugin_info( 'repo', user_defs['plugin']) self.main_ini_file.save() else: self.options = {'plugin': pluginator.find_plugins('repo')} return self.cur_status += 1 self.cur_status_key = self.status_keys[self.cur_status] # Get plugin. self.plugin = pluginator.get_plugin( 'repo', self.main_ini_file['plugin']['plugin_name'][0]) self.plugin.get_files = get_files_func plugin_ini_file = self.plugin.get_plugin_ini_file() # Set keys. if not 'DEFAULT' in self.main_ini_file: if 'defaults' in user_defs: for key in user_defs['defaults']: if key in plugin_ini_file['DEFAULT']: self.main_ini_file.set_pair( 'DEFAULT', key, [user_defs['defaults'][key]]) self.main_ini_file.save() else: self.options = { 'defaults': { 'DEFAULT': plugin_ini_file['DEFAULT'] } } for key in plugin_ini_file['DEFAULT']: self.options['defaults'][key] = plugin_ini_file[key] return self.cur_status += 1 self.cur_status_key = self.status_keys[self.cur_status] # Set Mirrors. if not 'mirror' in self.main_ini_file: if 'mirrors' in user_defs: for mirror in user_defs['mirrors']: if mirror != '': self.main_ini_file.set_pair( 'mirror', mirror, [user_defs['mirrors'][mirror]]) self.main_ini_file.save() else: self.options = {'mirrors': self.plugin.get_mirror_list()} return self.cur_status += 1 self.cur_status_key = self.status_keys[self.cur_status] # Set the sections. if not 'repo' in self.main_ini_file: if 'repositories' in user_defs: sections = self.plugin.get_sections() for repo in sections: self.main_ini_file.set_pair('repo', repo, sorted(sections[repo].keys())) for section in sections[repo]: self.main_ini_file.set_pair( section, 'name', [sections[repo][section]['name']]) if 'updates_for' in sections[repo][section]: self.main_ini_file.set_pair( section, 'updates_for', [sections[repo][section]['updates_for']]) if section in user_defs['repositories']: self.main_ini_file.set_pair( section, 'enabled', [True]) else: self.main_ini_file.set_pair( section, 'enabled', [False]) self.main_ini_file.save() else: self.options = {'repositories': self.plugin.get_sections()} return self.cur_status += 1 self.cur_status_key = self.status_keys[self.cur_status] if clone: self.clone_repo()
class Downloader(): """ __init__(cbk_download_info_func=None, cbk_download_end_func=None) cbk_download_info_func = None cbk_download_end_func = None """ def __init__(self, cbk_download_info_func=None, cbk_download_end_func=None): self.commons = Commons() # Download information. self.download_info = { 'total_download_files': 0, 'downloaded_files': 0, 'total_download_bytes': 0, 'downloaded_bytes': 0, 'current_file_url': '', 'current_file_path': '', 'current_tmp_file': '' } # Get sussesfull downloads. self.downloaded = INIFile( os.path.join(self.commons['settings_path'], 'downloads.ini')) # Get failed downloads. self.failed = INIFile( os.path.join(self.commons['settings_path'], 'failed.ini')) self.exit = False self.clean_downloads = True self.downloads = {} self.no_delete = set() self.cbk_download_info_func = cbk_download_info_func self.cbk_download_end_func = cbk_download_end_func self.dwn_thr = threading.Thread(target=self.download_thread) self.dwn_thr.start() """ download_thread() -> None Create a thread to download the packages. """ def download_thread(self): while not self.exit: # If no more download in the download list, clear download info. if self.downloads == {} and self.clean_downloads: self.download_info = { 'total_download_files': 0, 'downloaded_files': 0, 'total_download_bytes': 0, 'downloaded_bytes': 0, 'current_file_url': '', 'current_file_path': '', 'current_tmp_file': '' } if self.cbk_download_info_func != None: self.cbk_download_info_func(self.download_info) downloads_copy = self.downloads.copy() for filename in downloads_copy: if self.downloads[filename] == None: # If the file is allready downloaded, remove it from download list. if not filename in self.no_delete: del self.downloads[filename] else: url = self.downloads[filename] path = self.commons.rel2abs(filename) tmp = self.commons.get_temp(path) self.download_info['current_file_url'] = url self.download_info['current_file_path'] = path self.download_info['current_tmp_file'] = tmp try: self.retrieve(url, tmp) self.commons.copy_move(tmp, path, True) self.downloaded.set_pair('files', filename, [self.downloads[filename]]) except: self.failed.set_pair('files', filename, [self.downloads[filename]]) self.downloads[filename] = None self.download_info['downloaded_files'] += 1 self.downloaded.save() self.failed.save() if downloads_copy != self.downloads and self.cbk_download_end_func != None: self.cbk_download_end_func() """ retrieve(url, path) -> None Download a file from url to path. url = Source URL. path = Dest path. """ def retrieve(self, url='', path=''): # Number of bytes to read each time. buff_size = 1024 # Open the Source file. in_stream = urllib_23.urlopen(url) # Open the Dest file. out_stream = open(path, 'wb') # Read with a buffer size of buff_size. data = in_stream.read(buff_size) while (data != b'') and (not self.exit): out_stream.write(data) self.download_info['downloaded_bytes'] += len(data) data = in_stream.read(buff_size) if self.cbk_download_info_func != None: self.cbk_download_info_func(self.download_info) out_stream.close() in_stream.close() if self.exit: os.remove(path) """ get_files(files={}, wait=False, openfiles=False, cache=False, filemode='rb') -> {filename: fileptr} files = Files to retrieve {filename: url}. wait = Wait until all downloads are finished. openfiles = Open files after download. cache = In cache mode the files are downloaded only once. filemode = File open mode, if openfiles=True. """ def get_files(self, files={}, wait=False, openfiles=False, cache=False, filemode='rb'): # Unneeded # -> Backport _fs = {} for filename in files: if type(files[filename]) == type([]): _fs[str(filename)] = [ str(files[filename][0]), files[filename][1] ] else: _fs[str(filename)] = str(files[filename]) files = _fs # <- # Dont clean the download list until the new files was added. self.clean_downloads = False # First check if the URLs exist. ping_urls = [] for filename in files: if type(files[filename]) == type(''): ping_urls += [files[filename]] pinger = Ping(ping_urls) total_download_files = 0 total_download_bytes = 0 download_files = {} for filename in files: if type(files[filename]) == type(''): url = files[filename] filesize = pinger.ping_results[url][0] else: url = files[filename][0] filesize = files[filename][1] if filesize != None and (not cache or (cache and not os.path.exists( self.commons.rel2abs(filename)))): download_files[filename] = url total_download_files += 1 total_download_bytes += filesize else: download_files[filename] = None self.failed.set_pair('files', filename, [url]) if wait: self.no_delete.add(filename) self.failed.save() self.download_info['total_download_files'] += total_download_files self.download_info['total_download_bytes'] += total_download_bytes self.downloads.update(download_files) if wait: w = True while w: ready = True for filename in files: if self.downloads[filename] != None: ready &= False if ready: w = False for filename in files: self.no_delete.discard(filename) fp_list = {} if openfiles: for filename in files: full_path = self.commons.rel2abs(filename) try: fp_list[filename] = open(full_path, filemode) except: fp_list[filename] = None # All files was added, now the download list can be cleaned. self.clean_downloads = True return fp_list
class PrimaryXmlGz(xml.sax.handler.ContentHandler): """ __init__(filename='', extras={}) filename = File name to parse. extras = Extra values to construct the packages table. """ def __init__(self, filename='', extras={}): self.commons = Commons() main_ini_file = INIFile(self.commons['ini_file']) main_arch = main_ini_file['DEFAULT']['Arch'][0] self.flags = {'LE': '<=', 'GE': '>=', 'EQ': '==', 'LT': '<', 'GT': '>'} self.archs_ix86 = {} for i in range(3, 7): self.archs_ix86[str(i).join(['i', '86'])] = '' self.archs_ix86['noarch'] = '' self.archs_Amd64 = {'x86_64': '', 'noarch': ''} self.archs_ia64 = {'ia64': '', 'noarch': ''} self.allowed_archs = [] if main_arch in self.archs_ix86: self.allowed_archs = self.archs_ix86 elif main_arch in self.archs_Amd64: self.allowed_archs = self.archs_Amd64 elif main_arch in self.archs_ia64: self.allowed_archs = self.archs_ia64 self.package = {} self.package_tag = {'package': ''} self.entry_tag = {'rpm:entry': ''} self.arch_tag = {'arch': ''} self.provides_requires = {'rpm:provides': [], 'rpm:requires': []} self.catch_tags_contents = { 'name': '', 'arch': '', 'summary': '', 'description': '', 'rpm:group': '' } self.catch_tags_attrs = { 'version': ['ver', 'rel'], 'size': ['package'], 'location': ['href'] } self.catch_content = {'False': ''} self.lock_arch = {'False': ''} self.cur_tag = '' self.cur_prov_req_tag = '' self.contents = '' self.extras = extras self.packages = [] self.requires = {} self.provides = {} try: xml.sax.parse(gzip.open(filename), self) except: pass """ startElement(name, attrs) -> None Callback function. """ def startElement(self, name, attrs): # Im use try/except instead of if/else because is more fast. try: ok = self.lock_arch['False'] try: ok = self.package_tag[name] self.package = {} except: pass try: ok = self.catch_tags_contents[name] self.package[name] = '' self.catch_content = {'True': ''} self.cur_tag = name except: pass try: for attr in self.catch_tags_attrs[name]: self.package['/'.join([name, attr])] = attrs.getValue(attr) except: pass try: ok = self.provides_requires[name] self.provides_requires[name] = [] self.cur_prov_req_tag = name except: pass try: ok = self.entry_tag[name] try: flag = [ self.flags[attrs.getValue('flags')], attrs.getValue('ver') ] except: flag = ['', ''] self.provides_requires[self.cur_prov_req_tag] += [ [attrs.getValue('name')] + flag ] except: pass except: pass """ characters(content) -> None Callback function. """ def characters(self, content): try: ok = self.catch_content['True'] self.contents += content except: pass """ endElement(name) -> None Callback function. """ def endElement(self, name): try: ok = self.catch_content['True'] self.package[self.cur_tag] = self.contents self.catch_content = {'False': ''} self.cur_tag = '' self.contents = '' except: pass try: ok = self.arch_tag[name] try: ok = self.allowed_archs[self.package[name]] except: self.lock_arch = {'True': ''} except: pass try: ok = self.provides_requires[name] self.cur_prov_req_tag = '' except: pass try: ok = self.package_tag[name] try: ok = self.lock_arch['False'] package_id = ''.join(['/', self.package['location/href']]) package_id = '/'.join([ self.extras['section'], package_id[package_id.rfind('/') + 1:] ]) size = int(self.package['size/package']) pkg = [ package_id, self.package['name'], self.package['version/ver'], self.package['version/rel'], self.package['arch'], self.package['rpm:group'], 'No', self.extras['section'], size, self.package['summary'], self.package['description'], self.commons.urljoin(self.extras['url'], self.package['location/href']), '/'.join( [self.extras['path'], self.package['location/href']]), 0 ] self.packages += [pkg] self.requires[package_id] = [ size, self.provides_requires['rpm:requires'], [] ] for provide in self.provides_requires['rpm:provides']: prov = provide + [package_id] if prov[0] in self.provides: self.provides[prov[0]] += [prov[1:]] else: self.provides[prov[0]] = [prov[1:]] except: pass self.lock_arch = {'False': ''} except: pass
# Email : [email protected] # Web-Site: http://sushi-huh.sourceforge.net/ # # Main module. import os import sys # The paths were are located the Python modules. folders = ['lib'] # Import the paths. for folder in folders: path = os.path.join('il_cuore', folder) if not path in sys.path: sys.path += [path] from sushi_huh_Commons import Commons from sushi_huh_MainServer import MainServer if __name__ == "__main__": commons = Commons() servers = [MainServer(commons['server_port'], True)] for server in servers: server.start() for server in servers: server.join()
class PackagesGz: """ __init__(filename='', extras={}) filename = File name to parse. extras = Extra values to construct the packages table. """ def __init__(self, filename='', extras={}): self.commons = Commons() self.replaces = {' (= ': ' (== ', ' (<< ': ' (< ', ' (>> ': ' (> '} try: f = gzip.open(filename) data = f.read() f.close() except: return # Each package is separated by a two new lines characters. packages_info = data.split(b'\n\n') packages_info = packages_info[:len(packages_info) - 1] sys.__stdout__.write(str(packages_info) + '\n') self.packages = [] self.provides = {} self.requires = {} # Each package is transformed in a {key: value}. for package_info in packages_info: lines = package_info.split(b'\n') package = {} cur_tag = '' for line in lines: # This line is part of a previous multiline value. if ord(str(line[0])) == 32 or ord(str(line[0])) == 9: try: package[cur_tag] += '\n' + line.decode() except: package[cur_tag] += '\n' + line.decode('iso8859_15') # This is a line of {key: value}, and can be the first line of a multiline value. else: key, value = line.split(b': ', 1) cur_tag = key.decode() try: package[cur_tag] = value.decode() except: package[cur_tag] = value.decode('iso8859_15') try: pkgname = package['Package'] except: pkgname = self.get_package(package['Filename']) try: requires = self.parse_deps(package['Depends']) except: requires = [] try: suggests += self.parse_deps(package['Recommends']) except: suggests = [] try: suggests += self.parse_deps(package['Suggests']) except: pass version, release = self.split_version_release(package['Version']) summary, description = self.split_summary_description( package['Description']) package_id = ''.join([ extras['section'], '/', pkgname, '_', package['Version'], '_', package['Architecture'], '.deb' ]) url = self.commons.urljoin(extras['url'], package['Filename']) path = '/'.join(['downloads', extras['repo'], package['Filename']]) # Construct the package row. pkg = [ package_id, pkgname, version, release, package['Architecture'], package['Section'], 'No', extras['section'], int(package['Size']), summary, description, url, path, 0 ] # And add this to the table. self.packages += [pkg] self.requires[package_id] = [ int(package['Size']), requires, suggests ] provs = [[pkgname, '==', package['Version']]] try: provs += self.parse_deps(package['Provides']) except: pass for provide in provs: prov = provide + [package_id] if prov[0] in self.provides: self.provides[prov[0]] += [prov[1:]] else: self.provides[prov[0]] = [prov[1:]] """ split_version_release(version_release='') -> (str, str) Split in (version, release). version_release = The string to split. """ def split_version_release(self, version_release=''): if '-' in version_release: version = version_release[:version_release.rfind('-')] release = version_release[version_release.rfind('-') + 1:] else: version = version_release release = '' return version, release """ split_summary_description(summary_description='') -> (str, str) Split in (summary, description). summary_description = The string to split. """ def split_summary_description(self, summary_description=''): summary = summary_description[:summary_description.find('\n')] description = summary_description[summary_description.find('\n') + 1:] return summary, description """ get_package(filename='') -> str Returns the name of a package. filename = Full file name. """ def get_package(self, filename=''): filename = filename[filename.rfind('/') + 1:] return filename[:filename.find('_')] """ parse_deps(deps='') -> [] Convert a string of dependencies in a list suitable for comparison. deps = string of dependencies. """ def parse_deps(self, deps=''): for replace in self.replaces: deps = deps.replace(replace, self.replaces[replace]) deps = deps.replace(' | ', ', ').split(', ') deps = [self.split_dependency(dep) for dep in deps] return deps """ split_dependency(name='') -> ['', '', ''] Return a provide or require splited in a [name, flag, version]. name = keyword. """ def split_dependency(self, name=''): flag = '' version = '' if ' (' in name: _name = name.split(' (') name = _name[0] if '*' in _name[1]: flag = '' version = '' else: version = _name[1].replace(')', '') version = version.split(' ') flag = version[0] version = version[1] else: flag = '' version = '' return [name, flag, version]
def __init__(self, filename='', extras={}): self.commons = Commons() self.replaces = {' (= ': ' (== ', ' (<< ': ' (< ', ' (>> ': ' (> '} try: f = gzip.open(filename) data = f.read() f.close() except: return # Each package is separated by a two new lines characters. packages_info = data.split(b'\n\n') packages_info = packages_info[:len(packages_info) - 1] sys.__stdout__.write(str(packages_info) + '\n') self.packages = [] self.provides = {} self.requires = {} # Each package is transformed in a {key: value}. for package_info in packages_info: lines = package_info.split(b'\n') package = {} cur_tag = '' for line in lines: # This line is part of a previous multiline value. if ord(str(line[0])) == 32 or ord(str(line[0])) == 9: try: package[cur_tag] += '\n' + line.decode() except: package[cur_tag] += '\n' + line.decode('iso8859_15') # This is a line of {key: value}, and can be the first line of a multiline value. else: key, value = line.split(b': ', 1) cur_tag = key.decode() try: package[cur_tag] = value.decode() except: package[cur_tag] = value.decode('iso8859_15') try: pkgname = package['Package'] except: pkgname = self.get_package(package['Filename']) try: requires = self.parse_deps(package['Depends']) except: requires = [] try: suggests += self.parse_deps(package['Recommends']) except: suggests = [] try: suggests += self.parse_deps(package['Suggests']) except: pass version, release = self.split_version_release(package['Version']) summary, description = self.split_summary_description( package['Description']) package_id = ''.join([ extras['section'], '/', pkgname, '_', package['Version'], '_', package['Architecture'], '.deb' ]) url = self.commons.urljoin(extras['url'], package['Filename']) path = '/'.join(['downloads', extras['repo'], package['Filename']]) # Construct the package row. pkg = [ package_id, pkgname, version, release, package['Architecture'], package['Section'], 'No', extras['section'], int(package['Size']), summary, description, url, path, 0 ] # And add this to the table. self.packages += [pkg] self.requires[package_id] = [ int(package['Size']), requires, suggests ] provs = [[pkgname, '==', package['Version']]] try: provs += self.parse_deps(package['Provides']) except: pass for provide in provs: prov = provide + [package_id] if prov[0] in self.provides: self.provides[prov[0]] += [prov[1:]] else: self.provides[prov[0]] = [prov[1:]]
def __init__(self): self.commons = Commons()
def __init__(self, filename='', content='', **shared_var): try: if filename == '': raw_html = content else: f = open(filename, 'r') raw_html = f.read() f.close() except: old_stderr = sys.stderr sys.stderr = StringIO.StringIO() type_, value, traceback_ = sys.exc_info() traceback.print_exception(type_, value, traceback_) sys.stderr.seek(0) error = sys.stderr.read() sys.stderr.close() sys.stderr = old_stderr self.html = self.error_msg_to_html(error) return commons = Commons() raw_html_copy = raw_html[:] pycodes = {} pycodes_list = [] # Reads all code pieces... python_head = '<?pyhp' python_tail = '?>' match_head = raw_html_copy.find(python_head) while match_head != -1: match_tail = raw_html_copy.find(python_tail) code_from = code_to = raw_html_copy[match_head + len(python_head):match_tail] code_from = ''.join([python_head, code_from, python_tail]) pycodes[code_from] = code_to.strip() pycodes_list += [code_from] raw_html_copy = raw_html_copy[match_tail + len(python_tail):] match_head = raw_html_copy.find(python_head) python_script = '' # and join all code pieces with a print('<?pyhpout?>') instruction in a single code piece. for pycode in pycodes_list: python_script += 'print(\'<?pyhpout?>\')\n' + pycodes[pycode] + '\n' old_stdout = sys.stdout sys.stdout = StringIO.StringIO() error = '' # Executes the code. try: exec(python_script, globals(), locals()) except: old_stderr = sys.stderr sys.stderr = StringIO.StringIO() type_, value, traceback_ = sys.exc_info() traceback.print_exception(type_, value, traceback_) sys.stderr.seek(0) error = sys.stderr.read() sys.stderr.close() sys.stderr = old_stderr sys.stdout.seek(0) output = sys.stdout.read() sys.stdout.close() sys.stdout = old_stdout if error != '': self.html = self.error_msg_to_html(error) return # Obtains the output for each piece of code. outputs = output.split('<?pyhpout?>\n') del outputs[0] n_code = 0 # Replace the pieces of code with they output. for output in outputs: pycodes[pycodes_list[n_code]] = output[:len(output) - 1] n_code += 1 self.html = raw_html for pycode in pycodes: self.html = self.html.replace(pycode, pycodes[pycode])