def remove(self): if not self._query: for i in self.items: self.items.remove(i) if not self._query: if self.user_id == settings.USER_ID and self.name != '': Changelog.record(self.user, 'removelist', self.name) state.db.session.delete(self) state.db.session.commit()
def edit(self, data): changed = {} for key in data: if key == 'id': continue if data[key] != self.data.get(key): self.data[key] = data[key] changed[key] = data[key] if changed: self.save() user = state.user() Changelog.record(user, 'editmeta', self.key, self.value, changed) return changed
def remove_items(self, items): from item.models import Item for item_id in items: i = Item.get(item_id) if i in self.items: self.items.remove(i) i.update() state.db.session.add(self) state.db.session.commit() if self.user_id == settings.USER_ID and self.name != '': Changelog.record(self.user, 'removelistitems', self.name, items) self.user.clear_smart_list_cache() self.user.clear_list_cache()
def add_items(self, items): from item.models import Item for item_id in items: i = Item.get(item_id) if i: self.items.append(i) if self.user_id == settings.USER_ID: i.queue_download() i.update() state.db.session.add(self) state.db.session.commit() if self.user_id == settings.USER_ID and self.name != '': Changelog.record(self.user, 'addlistitems', self.name, items) self.user.clear_smart_list_cache() self.user.clear_list_cache()
def create(cls, user_id, name, query=None): prefix = name n = 2 while cls.get(user_id, name): name = '%s [%s]' % (prefix, n) n += 1 l = cls(user_id=user_id, name=name) l._query = query l.type = 'smart' if l._query else 'static' l.index_ = cls.query.filter_by(user_id=user_id).count() state.db.session.add(l) state.db.session.commit() if user_id == settings.USER_ID: if not l._query and name != '': Changelog.record(state.user(), 'addlist', l.name) return l
def api_pushChanges(user_id, changes): logger.debug('pushChanges no longer used, ignored') return True user = User.get(user_id) if not Changelog.apply_changes(user, changes): logger.debug('FAILED TO APPLY CHANGE') state.nodes.queue(user_id, 'pullChanges') return False return True
def remove_file(self): for f in self.files.all(): path = f.fullpath() if os.path.exists(path): os.unlink(path) remove_empty_folders(os.path.dirname(path)) state.db.session.delete(f) user = state.user() if user in self.users: self.users.remove(user) for l in self.lists.filter_by(user_id=user.id): l.items.remove(self) state.db.session.commit() if not self.users: self.delete() else: self.update() Transfer.query.filter_by(item_id=self.id).delete() Changelog.record(user, 'removeitem', self.id)
def update_meta(self, data): update = False record = {} for key in self.meta_keys: if key in data: if self.meta.get(key) != data[key]: record[key] = data[key] self.meta[key] = data[key] update = True for key in list(self.meta): if key not in self.meta_keys: del self.meta[key] update = True if update: self.update() self.modified = datetime.utcnow() self.save() user = state.user() if record and user in self.users: Changelog.record(user, 'edititem', self.id, record)
def sortLists(data): ''' takes { ids } ''' n = 0 logger.debug('sortLists %s', data) lists = [] for id in data['ids']: l = models.List.get(id) l.index_ = n n += 1 if l.type == 'static': lists.append(l.name) state.db.session.add(l) state.db.session.commit() if lists: Changelog.record(state.user(), 'orderlists', lists) return {}
def sortLists(data): """ takes { ids } """ n = 0 logger.debug("sortLists %s", data) lists = [] for id in data["ids"]: l = models.List.get(id) l.index_ = n n += 1 if l.type == "static": lists.append(l.name) state.db.session.add(l) state.db.session.commit() if lists: Changelog.record(state.user(), "orderlists", lists) return {}
def save_file(self, content): u = state.user() f = File.get(self.id) content_id = media.get_id(data=content) if content_id != self.id: logger.debug('INVALID CONTENT %s vs %s', self.id, content_id) return False if not f: path = 'Downloads/%s.%s' % (self.id, self.info['extension']) info = self.info.copy() for key in ('mediastate', 'coverRatio', 'previewRatio'): if key in info: del info[key] f = File.get_or_create(self.id, info, path=path) path = self.get_path() if not os.path.exists(path): ox.makedirs(os.path.dirname(path)) with open(path, 'wb') as fd: fd.write(content) if u not in self.users: self.add_user(u) t = Transfer.get_or_create(self.id) t.progress = 1 t.save() self.added = datetime.utcnow() Changelog.record(u, 'additem', self.id, f.info) self.update() f.move() self.update_icons() self.save() trigger_event('transfer', { 'id': self.id, 'progress': 1 }) return True else: logger.debug('TRIED TO SAVE EXISTING FILE!!!') t = Transfer.get_or_create(self.id) t.progress = 1 t.save() self.update() return False
def update_peering(self, peered, username=None): was_peering = self.peered if peered: logging.debug('update_peering, pending: %s queued: %s', self.pending, self.queued) self.queued = self.pending != 'sent' self.pending = '' if username: self.info['username'] = username self.update_name() # FIXME: need to set peered to False to not trigger changelog event # before other side receives acceptPeering request self.peered = False self.save() if not was_peering: Changelog.record(state.user(), 'addpeer', self.id, self.nickname) self.peered = True self.save() else: self.pending = '' self.peered = False self.queued = False self.update_name() self.save() List.query.filter_by(user_id=self.id).delete() for i in self.items: i.users.remove(self) if not i.users: i.delete() Changelog.query.filter_by(user_id=self.id).delete() if self.id in settings.ui['showFolder']: del settings.ui['showFolder'][self.id] self.clear_list_cache() self.save() if was_peering: Changelog.record(state.user(), 'removepeer', self.id) self.save()
def editList(data): """ takes { id name query } """ logger.debug("editList %s", data) l = models.List.get_or_create(data["id"]) name = l.name if "name" in data: l.name = data["name"] if "query" in data and l.type != "smart": raise Exception("query only for smart lists") if "query" in data and l.type == "smart": validate_query(data["query"]) l._query = data["query"] if l.type == "static" and name != l.name: Changelog.record(state.user(), "editlist", name, {"name": l.name}) l.save() l.user.clear_smart_list_cache() return l.json()
def editList(data): ''' takes { id name query } ''' logger.debug('editList %s', data) l = models.List.get_or_create(data['id']) name = l.name if 'name' in data: l.name = data['name'] if 'query' in data and l.type != 'smart': raise Exception('query only for smart lists') if 'query' in data and l.type == 'smart': validate_query(data['query']) l._query = data['query'] if l.type == 'static' and name != l.name: Changelog.record(state.user(), 'editlist', name, {'name': l.name}) l.save() l.user.clear_smart_list_cache() return l.json()
def update_primaryid(self, key=None, id=None, scrape=True): if key is None and id is None: if 'primaryid' not in self.meta: return else: key = self.meta['primaryid'][0] record = {} if id: if not key in self.meta or not key in self.meta[key]: self.meta[key] = list(set([id] + self.meta.get(key, []))) self.meta['primaryid'] = [key, id] record[key] = id else: if key in self.meta: del self.meta[key] if 'primaryid' in self.meta: del self.meta['primaryid'] record[key] = '' for k in self.id_keys: if k != key: if k in self.meta: del self.meta[k] logger.debug('set primaryid %s %s', key, id) # get metadata from external resources if scrape: self.scrape() self.update_icons() self.modified = datetime.utcnow() self.save() #if not scrape: # Scrape.get_or_create(self.id) for f in self.files.all(): f.move() user = state.user() if user in self.users: Changelog.record(user, 'edititem', self.id, record)
def add_file(id, f, prefix, from_=None): user = state.user() path = f[len(prefix) :] data = media.metadata(f, from_) file = File.get_or_create(id, data, path) item = file.item if "primaryid" in file.info: del file.info["primaryid"] state.db.session.add(file) if "primaryid" in item.info: item.meta["primaryid"] = item.info.pop("primaryid") state.db.session.add(item) item.add_user(user) Changelog.record(user, "additem", item.id, file.info) item.added = datetime.utcnow() if state.online: item.scrape() # Changelog.record(user, 'edititem', item.id, dict([item.meta['primaryid']])) Changelog.record(user, "edititem", item.id, item.meta) item.update_icons() item.modified = datetime.utcnow() item.update() # Scrape.get_or_create(item.id) return file
def add_file(id, f, prefix, from_=None): user = state.user() path = f[len(prefix):] data = media.metadata(f, from_) file = File.get_or_create(id, data, path) item = file.item if 'primaryid' in file.info: del file.info['primaryid'] state.db.session.add(file) if 'primaryid' in item.info: item.meta['primaryid'] = item.info.pop('primaryid') state.db.session.add(item) item.add_user(user) Changelog.record(user, 'additem', item.id, file.info) item.added = datetime.utcnow() if state.online: item.scrape() #Changelog.record(user, 'edititem', item.id, dict([item.meta['primaryid']])) Changelog.record(user, 'edititem', item.id, item.meta) item.update_icons() item.modified = datetime.utcnow() item.update() #Scrape.get_or_create(item.id) return file
def pullChanges(self): if not self.online or not self.user.peered: return True last = Changelog.query.filter_by(user_id=self.user_id).order_by('-revision').first() from_revision = last.revision + 1 if last else 0 try: changes = self.request('pullChanges', from_revision) except: self.online = False logger.debug('%s went offline', self.user.name) return False if not changes: return False with db.session(): r = Changelog.apply_changes(self.user, changes) return r
def __init__(self, config_path, context, environment=None): self.config = SaasConfig(config_path, context) self.changelog = Changelog(self) if context: self.config.switch_context(context) logger.info("Current context: %s" % self.config.current()) self._default_hash_length = 6 self._services = None self._environment = None if environment and environment != "None": self._environment = environment self.load_from_config()
def pullChanges(self): if not self.online or not self.user.peered: return True last = Changelog.query.filter_by( user_id=self.user_id).order_by('-revision').first() from_revision = last.revision + 1 if last else 0 try: changes = self.request('pullChanges', from_revision) except: self.online = False logger.debug('%s went offline', self.user.name) return False if not changes: return False with db.session(): r = Changelog.apply_changes(self.user, changes) return r
def changelog(self): """ Return a Changelog object for the changelog.Debian.gz of the present .deb package. Return None if no changelog can be found. """ if self.__pkgname is None: self.__updatePkgName() for fname in [ CHANGELOG_DEBIAN % self.__pkgname, CHANGELOG_NATIVE % self.__pkgname ]: if self.data.has_file(fname): gz = gzip.GzipFile(fileobj=self.data.get_file(fname)) raw_changelog = gz.read() gz.close() return Changelog(raw_changelog) return None
def reset(self): user = state.user() Changelog.record(user, 'resetmeta', self.key, self.value) state.db.session.delete(self) state.db.session.commit() self.update_items()
import sys from parametros import Parametros from changelog import Changelog args = sys.argv[1:] params = Parametros(args=args, address='vhs4h1809', port=30050, user='******', password='******', schema='SYN4TDF_EVOLUCAO') gerador = Changelog(params) if (params.ok): if params.getTipo() == '-i': gerador.changelogInsert(params) elif params.getTipo() == '-t': gerador.changelogTabela(params) elif params.getTipo() == '-p': gerador.changelogProcedure(params) # hanna = Hanna(address='vhs4h1809', port=30050, user='******', password='******') # hanna.conectar() # procedure = hanna.getProcedureSQL('SYN4TDF_EVOLUCAO','P_GERA_DIEF_PI') # table = hanna.getTableSQL('SYN4TDF_EVOLUCAO','DIEF_PI_REGISTRO_TIPO_50') # insert = hanna.getDadosTableCSV('SYN4TDF_EVOLUCAO','OBRIGACOES_COD_MUN_BA') # print(insert)
def generate_changelogs(pid, branch, pat): ''' Generates changelogs from MR descriptions on the given branch :param pid: The project ID of the project to work with :param branch: The name of the target branch for which we look for MRs :param pat: The personal access token to use ''' # Load the latest MR date on the branch working_branch = Branch(pid, branch) working_branch.load_latest_mr_date(str(pid)+'.txt') # Show when the merge date of the MR we processed last run last_mr_date = working_branch.get_latest_mr_date() if last_mr_date: print("Latest merge date:") last_processed_date = dateparser.parse(last_mr_date) print(" {} - {}".format(branch, last_processed_date.strftime(DATE_FORMAT))) # Fetch new MRs on the branch mrs, status_code = working_branch.fetch_new_mrs(pat) # Didn't fetch any MRs if not mrs: print("No new MRs to parse.") return print("Parsing merge requests. This may take a while...") print("--------------------") # Fun facts to track mrs_parsed = 0 cls_generated = 0 # This is stored to make sure we parse all MRs merged after # the MR most lately processed in the previous script run, # even if the MRs were merged "out of order" by issue number. initial_latest_mr_date = dateparser.parse(working_branch.get_latest_mr_date()) # Keep fetching changelogs until there are none left to parse while mrs: if status_code == 429: print("Rate limit hit for the API. Try running the script again later.") break if status_code != 200: print("Failed to fetch additional MRs! (Status code: {})".format(status_code)) break # Go through all the MRs and make changelogs for merge_request in mrs: iid = merge_request.get("iid") title = merge_request.get("title") # Check if this MR was merged later than the current most lately merged MR latest_mr_date = dateparser.parse(working_branch.get_latest_mr_date()) if merge_request.get("merged_at") is None: continue mr_merged_date = dateparser.parse(merge_request.get("merged_at")) # This can happen if someone updates the MR between the script being run # So this prevents re-processing processed MRs if mr_merged_date < initial_latest_mr_date: continue # Update the latest MR date if mr_merged_date > latest_mr_date: working_branch.set_latest_mr_date(merge_request.get("merged_at")) # Find an author for the CL (this is for fallbacks) author = merge_request.get("author") user = "******" if author: user = author.get("username") # Make the changelog print("Parsing MR #{} {}".format(iid, title)) changelog = Changelog() success = changelog.parse_changelog(merge_request.get("description")) mrs_parsed += 1 # No changelog found :( if not success: continue if not changelog.author: changelog.set_author(user) # Fallback to gitlab username as CL author # make a YAML file for the changelog file_name = "{}-merge_request-{}".format(user, merge_request.get("id")) changelog.dump_yaml(file_name) print("Generated changelog for MR #{} {}".format(iid, file_name)) cls_generated += 1 # Fetch new MRs to process mrs, status_code = working_branch.fetch_new_mrs(pat) print("--------------------") # High quality fluffprint print("Parsed {} merge request{}.".format(mrs_parsed, "" if mrs_parsed == 1 else "s")) print("{} changelog{} generated.".format(cls_generated, " was" if cls_generated == 1 else "s were")) # Save our progress on changelog generation working_branch.save_latest_mr_date(str(pid)+'.txt')
def run (self): """ """ #Create folders and copy sources files DEBIAN_DIR = os.path.join(self.build_dir,'debian') DATA_DIR = os.path.join(self.build_dir,self.debian_package) mkpath(DEBIAN_DIR) mkpath(self.dist_dir) #mkpath(os.path.join(DATA_DIR,'usr','bin')) self.bdist_dir = DATA_DIR install = self.reinitialize_command('install', reinit_subcommands=1) install.root = self.bdist_dir if self.install_purelib is not None: install.install_purelib = self.install_purelib install.skip_build = 0 install.warn_dir = 1 self.run_command('install') #Create the debian rules rules = Rules(self.debian_package,DATA_DIR) dirs = rules.dirs open(os.path.join(DEBIAN_DIR,"rules"),"w").write(unicode(rules.getContent()).encode('utf-8')) os.chmod(os.path.join(DEBIAN_DIR,"rules"),0755) #Create the debian compat open(os.path.join(DEBIAN_DIR,"compat"),"w").write("5\n") #Create the debian dirs open(os.path.join(DEBIAN_DIR,"dirs"),"w").write("\n".join(dirs)) #Create the debian changelog d=datetime.now() self.buildDate=d.strftime("%a, %d %b %Y %H:%M:%S +0000") clog = Changelog(self.debian_package,self.version,self.buildversion,self.changelog,self.distribution.get_maintainer(),self.distribution.get_maintainer_email(),self.buildDate) open(os.path.join(DEBIAN_DIR,"changelog"),"w").write(unicode(clog.getContent()).encode('utf-8')) #Create the pre/post inst/rm Script if self.preinst is not None: self.mkscript(self.preinst ,os.path.join(DEBIAN_DIR,"preinst")) if self.postinst is not None: self.mkscript(self.postinst,os.path.join(DEBIAN_DIR,"postinst")) if self.prere is not None: self.mkscript(self.prere ,os.path.join(DEBIAN_DIR,"prerm")) if self.postre is not None: self.mkscript(self.postre ,os.path.join(DEBIAN_DIR,"postrm")) #Create the control file control = Control(self.debian_package, self.section, self.distribution.get_maintainer(), self.distribution.get_maintainer_email(), self.architecture, self.depends, self.suggests, self.description, self.long_description, self.conflicts, self.replaces, optionnal = { 'XB-Maemo-Display-Name':self.Maemo_Display_Name, 'XB-Maemo-Upgrade-Description':self.Maemo_Upgrade_Description, 'XSBC-Bugtracker':self.Maemo_Bugtracker, 'XB-Maemo-Icon-26':self.getIconContent(self.Maemo_Icon_26), 'XB-Maemo-Flags':self.Maemo_Flags, 'XB-Meego-Desktop-Entry-Filename':self.MeeGo_Desktop_Entry_Filename } ) open(os.path.join(DEBIAN_DIR,"control"),"w").write(unicode(control.getContent()).encode('utf-8')) #Create the debian licence file licence = Licence(self.copyright, self.distribution.get_maintainer(), self.distribution.get_maintainer_email(), self.buildDate, str(datetime.now().year)) open(os.path.join(DEBIAN_DIR,"copyright"),"w").write(unicode(licence.getContent()).encode('utf-8')) #Delete tar if already exist as it will made add to the same tar tarpath = os.path.join(self.dist_dir,self.debian_package+'_'+self.version+'-'+self.buildversion+'.tar.gz') if os.path.exists(tarpath): os.remove(tarpath) #Now create the tar.gz import tarfile def reset(tarinfo): tarinfo.uid = tarinfo.gid = 0 tarinfo.uname = tarinfo.gname = "root" return tarinfo tar = tarfile.open(tarpath, 'w:gz') #tar.add(self.dist_dir,'.') tar.add(self.build_dir,'.') tar.close() #Clean the build dir remove_tree(DEBIAN_DIR) remove_tree(DATA_DIR) #Create the Dsc file import locale try: old_locale,iso=locale.getlocale(locale.LC_TIME) locale.setlocale(locale.LC_TIME,'en_US') except: pass dsccontent = Dsc("%s-%s"%(self.version,self.buildversion), self.depends, (os.path.join(self.dist_dir,self.debian_package+'_'+self.version+'-'+self.buildversion+'.tar.gz'),), Format='1.0', Source=self.debian_package, Version="%s-%s"%(self.version,self.buildversion), Maintainer="%s <%s>"%(self.distribution.get_maintainer(),self.distribution.get_maintainer_email()), Architecture="%s"%self.architecture, ) f = open(os.path.join(self.dist_dir,self.debian_package+'_'+self.version+'-'+self.buildversion+'.dsc'),"wb") f.write(unicode(dsccontent._getContent()).encode('utf-8')) f.close() #Changes file changescontent = Changes( "%s <%s>"%(self.distribution.get_maintainer(),self.distribution.get_maintainer_email()), "%s"%self.description, "%s"%self.changelog, ( "%s.tar.gz"%os.path.join(self.dist_dir,self.debian_package+'_'+self.version+'-'+self.buildversion), "%s.dsc"%os.path.join(self.dist_dir,self.debian_package+'_'+self.version+'-'+self.buildversion), ), "%s"%self.section, "%s"%self.repository, Format='1.7', Date=time.strftime("%a, %d %b %Y %H:%M:%S +0000", time.gmtime()), Source="%s"%self.debian_package, Architecture="%s"%self.architecture, Version="%s-%s"%(self.version,self.buildversion), Distribution="%s"%self.repository, Urgency="%s"%self.urgency, Maintainer="%s <%s>"%(self.distribution.get_maintainer(),self.distribution.get_maintainer_email()) ) f = open(os.path.join(self.dist_dir,self.debian_package+'_'+self.version+'-'+self.buildversion+'.changes'),"wb") f.write(unicode(changescontent.getContent()).encode('utf-8')) f.close() try: locale.setlocale(locale.LC_TIME,old_locale) except: pass
from changelog import Changelog # the 'changelog' module is in the src folder from index import Index from homework import Homework app = Flask(__name__) app.config.from_object('config') app.add_url_rule('/', view_func=Index.as_view('index'), methods=['GET', 'POST']) app.add_url_rule('/changelog', view_func=Changelog.as_view('changelog'), methods=['GET']) app.add_url_rule('/homework', view_func=Homework.as_view('homework'), methods=['GET', 'POST']) app.add_url_rule('/search/', view_func=results.MainResults.as_view('results'), methods=['GET', 'POST']) app.add_url_rule('/search/<searchQuery>', view_func=results.DirectURLInput.as_view('optionalresults'), methods=['GET', 'POST']) if __name__ == '__main__':