def new_order(): newdict = {} if request.method == 'POST': data = request.form.to_dict() #print(data) for key in data: if data[key] != '': newdict[key] = data[key] #print("Added to dict") if (not newdict): flash('Your order is empty.', 'info') return redirect(url_for('main.new_order')) order = Order(author=current_user) db.session.add(order) db.session.commit() for key in newdict: #print(key, newdict[key]) if (newdict[key] == 'on'): temp = key + "-quantity" quantity = 1 if temp not in newdict else int(newdict[temp]) content = Content(cart=order, item_id=int(key), quantity=int(quantity)) db.session.add(content) db.session.commit() return redirect(url_for('main.home')) items = Item.query.order_by(collate(Item.name, 'NOCASE')).all() return render_template('new_order.html', items=items)
def index(self): maintainers = models.Maintainer.query.order_by( collate(models.Maintainer.email, 'NOCASE')).all() form = FollowSetupForm() choices = [] defaults = [] form_mapping = {} follows = request.cookies.get('follows', '').split() for maintainer in maintainers: choices.append((maintainer.id, maintainer.email)) form_mapping[maintainer.id] = maintainer if maintainer.email in follows: defaults.append(maintainer.id) form.maintainers.choices = choices form.maintainers.default = defaults if form.validate_on_submit(): followed_maintainers = set() for choice in choices: if choice[0] in form.maintainers.data: followed_maintainers.add(choice[1]) response = current_app.make_response( redirect(url_for('GrumpyView:index'))) # FIXME: This will fail with too many following (usually string value length above 4093); move this to session eventually. If that is delayed, we could at least make it fit more by omitting @gentoo.org in those cases (and suffixing it back after cookie read for defaults handling) response.set_cookie('follows', value=' '.join(followed_maintainers)) return response form.process() return render_template("setup.html", mapping=form_mapping, form=form)
def read_feeds(title_pattern=None): t_feeds = get_table("Feeds") t_items = get_table("Items") q = sql.select([t_feeds]) if title_pattern: q = q.where(t_feeds.c.Title.like(f"%{title_pattern}%")) q = q.order_by(sql.collate(t_feeds.c.Title, "NOCASE")) with get_connection() as conn: feeds = conn.execute(q).fetchall() for feed_it in feeds: rows = [] for item_it in parse_feed(feed_it).entries: try: row_it = read_item(item_it) except AttributeError: continue row_it["FeedID"] = feed_it["FeedID"] rows.append(row_it) q = t_items.insert() q = q.prefix_with("OR IGNORE", dialect="sqlite") with get_connection() as conn: conn.execute(q, rows) q = (t_feeds.update().values(Updated=func.now()).where( t_feeds.c.FeedID == feed_it["FeedID"])) with get_connection() as conn: conn.execute(q)
def _page(): categories = (Category.query.order_by(collate(Category.name, 'NOCASE')).all()) menu_items = [ MenuItem(category.name, '/category/%r' % category.id) for category in categories ] return Page(menu_items)
def fix_case_duplication(): # Find duplications based on case people = dbs\ .query(Person.name, func.count(Person.name))\ .group_by(collate(Person.name, 'NOCASE'))\ .having(func.count(Person.name) > 1)\ .all() for p in people: merge_person(p.name)
def getAlarmObjectTypes(objectid=0): """ Get all possible alarmobject types :param objectid: objectid as integer :return: list of :py:mod:`emonitor.modules.alarmobjects.alarmobjectfile.AlarmObjectFile` """ if objectid != 0: return AlarmObjectFile.query.filter_by(id=objectid).all() else: return AlarmObjectFile.query.order_by(collate(AlarmObjectFile.name, 'NOCASE')).all()
def getAlarmObjectTypes(objectid=0): """ Get all possible alarmobject types :param objectid: objectid as integer :return: list of :py:mod:`emonitor.modules.alarmobjects.alarmobjectfile.AlarmObjectFile` """ if objectid != 0: return AlarmObjectFile.query.filter_by(id=objectid).all() else: return AlarmObjectFile.query.order_by( collate(AlarmObjectFile.name, 'NOCASE')).all()
def getAlarmObjectTypes(id=0): """ Get list of AlarmObjectTypes :param id: id of :py:mod:`emonitor.modules.alarmobjects.alarmobjecttype.AlarmObjectType` :return: list or single :py:mod:`emonitor.modules.alarmobjects.alarmobjecttype.AlarmObjectType` """ if id != 0: return AlarmObjectType.query.filter_by(id=id).first() else: try: return AlarmObjectType.query.order_by(collate(AlarmObjectType.name, 'NOCASE')).all() except: return AlarmObjectType.query.order_by(AlarmObjectType.name).all()
def getAlarmObjectTypes(id=0): """ Get list of AlarmObjectTypes :param id: id of :py:mod:`emonitor.modules.alarmobjects.alarmobjecttype.AlarmObjectType` :return: list or single :py:mod:`emonitor.modules.alarmobjects.alarmobjecttype.AlarmObjectType` """ if id != 0: return AlarmObjectType.query.filter_by(id=id).first() else: try: return AlarmObjectType.query.order_by( collate(AlarmObjectType.name, 'NOCASE')).all() except: return AlarmObjectType.query.order_by( AlarmObjectType.name).all()
def write_xml(f, user_id=None, tag=None): t_feeds = get_table("Feeds") t_tags2feeds = get_table("Tags2Feeds") t_tags = get_table("Tags") q = sql.select([t_feeds]) if user_id and tag: q = q.select_from(t_feeds.join(t_tags2feeds).join(t_tags)) q = q.where( sql.and_( t_tags.c.UserID == user_id, t_tags.c.Name == tag, )) q = q.order_by(sql.collate(t_feeds.c.Title, "NOCASE")) with get_connection() as conn: rows = conn.execute(q).fetchall() root = etree.Element("root") for row_it in rows: title_it = etree.Element("title") title_it.text = row_it["Title"] link_it = etree.Element("link") link_it.text = row_it["Link"] lang_it = etree.Element("lang") lang_it.text = Language[row_it["Language"]].value feed_it = etree.Element("feed") feed_it.append(title_it) feed_it.append(link_it) feed_it.append(lang_it) root.append(feed_it) s = etree.tostring( root, encoding="unicode", pretty_print=True, ) f.write(s)
def get_albums(): albums = (Album.query.order_by(collate(Album.title, 'NOCASE')).all()) if len(albums) == 0: flash('No albums found') return render('album_list.html', _page(), albums=albums)
def week(): currentWeek = datetime.today().strftime("%V") lastWeek = str(int(currentWeek) - 1) WeeklyStats = db.session.query( PlayerStatsTable, func.max(PlayerStatsTable.TimeStamp)).filter( PlayerStatsTable.KW == currentWeek).group_by( PlayerStatsTable.playerName).order_by( collate(PlayerStatsTable.playerName, 'NOCASE'), PlayerStatsTable.id.desc()).all() lastWeekStats = db.session.query( PlayerStatsTable, func.max(PlayerStatsTable.TimeStamp)).filter( PlayerStatsTable.KW == lastWeek).group_by( PlayerStatsTable.playerName).order_by( collate(PlayerStatsTable.playerName, 'NOCASE'), PlayerStatsTable.id.desc()).all() WeeklyFireStats = db.session.query( FireStormTable, func.max(FireStormTable.TimeStamp)).filter( FireStormTable.KW == currentWeek).group_by( FireStormTable.playerName).order_by( collate(FireStormTable.playerName, 'NOCASE'), FireStormTable.id.desc()).all() lastWeekFireStats = db.session.query( FireStormTable, func.max(FireStormTable.TimeStamp)).filter( FireStormTable.KW == lastWeek).group_by( FireStormTable.playerName).order_by( collate(FireStormTable.playerName, 'NOCASE'), FireStormTable.id.desc()).all() def percent(a, b): return round((a / b * 100), 2) if b else 0 def performancePerMin(playTime, stat): return round(stat / (playTime // 60), 2) if playTime else 0 BaseStats = [] for i in range(0, len(WeeklyStats)): for j in range(0, len(lastWeekStats)): if WeeklyStats[i][0].playerName == lastWeekStats[j][0].playerName: BaseStats.append(lastWeekStats[j]) if len(BaseStats) != (i + 1): lastCurrentWeekStat = db.session.query( PlayerStatsTable, func.min(PlayerStatsTable.TimePlayed)).filter( PlayerStatsTable.playerName == WeeklyStats[i] [0].playerName, PlayerStatsTable.KW == currentWeek).group_by( PlayerStatsTable.playerName).first() BaseStats.append(lastCurrentWeekStat) FireStats = [] for i in range(0, len(WeeklyFireStats)): for j in range(0, len(lastWeekFireStats)): if WeeklyFireStats[i][0].playerName == lastWeekFireStats[j][ 0].playerName: FireStats.append(lastWeekFireStats[j]) if len(FireStats) != (i + 1): lastCurrentWeekStat = db.session.query( FireStormTable, func.min(FireStormTable.TimePlayed)).filter( FireStormTable.playerName == WeeklyFireStats[i] [0].playerName, FireStormTable.KW == currentWeek).group_by( FireStormTable.playerName).first() FireStats.append(lastCurrentWeekStat) firstDayOfWeek = datetime.today() start = firstDayOfWeek - timedelta(days=firstDayOfWeek.weekday()) end = start + timedelta(days=6) WeekStartEnd = [start.strftime('%d-%b-%Y'), end.strftime('%d-%b-%Y')] cet = pytz.timezone('CET') timeDiffDict = {} for i in range(0, len(WeeklyStats)): timestr = WeeklyStats[i][0].lastUpdated timestr2 = lastWeekStats[i][0].lastUpdated currentTime = datetime.strptime(timestr, '%Y-%m-%dT%H:%M:%SZ') timeSinceUpdate = datetime.strptime(timestr2, '%Y-%m-%dT%H:%M:%SZ') timeDiff = (currentTime - timeSinceUpdate).total_seconds() hours, seconds = timeDiff // 3600, timeDiff % 3600 minutes = seconds // 60 timeDiffString = str(int(hours)) + "h:" + str(int(minutes)) + "m" timeDiffDict[WeeklyStats[i][0].playerName] = timeDiffString offset = cet.utcoffset(currentTime, is_dst=True) currentTime += offset timeSinceUpdate += offset WeeklyStats[i][0].lastUpdated = currentTime.strftime('%d-%m/%H:%M') BaseStats[i][0].lastUpdated = timeSinceUpdate.strftime('%d-%m/%H:%M') performanceDict = [] percentDict = [] for i in WeeklyStats: percentDict.append(dict()) performanceDict.append(dict()) for key, value in WeeklyStats[0][0].__dict__.items(): if type(value) is (int or float): weeklyPercent = [] for i in range(0, len(WeeklyStats)): diff = getattr(WeeklyStats[i][0], key) - getattr( BaseStats[i][0], key) weeklyPercent.append(diff) maxVal = max(weeklyPercent) for j in range(0, len(WeeklyStats)): diffTime = WeeklyStats[j][0].TimePlayed - BaseStats[j][ 0].TimePlayed percentDict[j][key] = percent(weeklyPercent[j], maxVal) performanceDict[j][key] = performancePerMin( diffTime, weeklyPercent[j]) weeklyPercent = [] for j in range(0, len(WeeklyStats)): if (WeeklyStats[j][0].ShotsTaken - BaseStats[j][0].ShotsTaken): weeklyPercent.append( (WeeklyStats[j][0].ShotsHit - BaseStats[j][0].ShotsHit) / (WeeklyStats[j][0].ShotsTaken - BaseStats[j][0].ShotsTaken) * 100) maxVal = max(weeklyPercent) for j in range(0, len(WeeklyStats)): percentDict[j]['Acc'] = percent(weeklyPercent[j], maxVal) weeklyPercent = [] for j in range(0, len(WeeklyStats)): if (WeeklyStats[j][0].Kills - BaseStats[j][0].Kills): weeklyPercent.append( (WeeklyStats[j][0].Headshots - BaseStats[j][0].Headshots) / (WeeklyStats[j][0].Kills - BaseStats[j][0].Kills) * 100) maxVal = max(weeklyPercent) for j in range(0, len(WeeklyStats)): percentDict[j]['HeadPercent'] = percent(weeklyPercent[j], maxVal) weeklyPercent = [] for j in range(0, len(WeeklyStats)): if (WeeklyStats[j][0].Deaths - BaseStats[j][0].Deaths): weeklyPercent.append( (WeeklyStats[j][0].Kills - BaseStats[j][0].Kills) / (WeeklyStats[j][0].Deaths - BaseStats[j][0].Deaths)) maxVal = max(weeklyPercent) for j in range(0, len(WeeklyStats)): percentDict[j]['KD'] = percent(weeklyPercent[j], maxVal) return render_template('week.html', WeeklyStats=WeeklyStats, BaseStats=BaseStats, WeeklyFireStats=WeeklyFireStats, FireStats=FireStats, WeekStartEnd=WeekStartEnd, timeDiffDict=timeDiffDict, percentDict=percentDict, performanceDict=performanceDict)
def get_game_by_name(self, game_name): sql = self.table.select().\ where(self.table.columns.name == game_name).\ order_by(asc(collate(self.table.columns.name, "NOCASE"))) return self.database.select(sql)
def get_game_by_path(self, path): sql = self.table.select().\ where(self.table.columns.path == path).\ order_by(asc(collate(self.table.columns.name, "NOCASE"))) return self.database.select(sql)
def get_games(self): sql = self.table.select().order_by( asc(collate(self.table.columns.name, "NOCASE"))) return self.database.select(sql)
def main(): # pylint: disable=C0111 args = _parse_args() logger.setup_stdout_logger(args.verbose) args.old_db = os.path.abspath(args.old_db) args.new_db = os.path.abspath(args.new_db) args.output = os.path.abspath(args.output) print "Using old:", args.old_db, "new:", args.new_db, "out:", args.output print "Copying", args.new_db, "to", args.output shutil.copyfile(args.new_db, args.output) db_old = MetricsDb.MetricsDb('sqlite:///' + args.old_db) db_out = MetricsDb.MetricsDb('sqlite:///' + args.output) with db_old.get_session() as old_session, db_out.get_session() as out_session: print "\nTrying to match files between old and new database..." # Manual file mapping filemap = {} failed_mappings = 0 old_files = {file_: None for file_, in old_session.query(File.file)} new_files = {file_: None for file_, in out_session.query(File.file)} for oldfile in old_files: finalmatch = oldfile if oldfile in queried_filematching: finalmatch = queried_filematching[oldfile] elif oldfile not in new_files: searchstring = '%' + os.sep + os.path.basename(oldfile) + '%' matches = [match for match, in out_session.query(File.file)\ .filter(File.file.like(searchstring))\ .order_by(collate(File.file, 'NOCASE'))] # Filter out any files that already have a perfect match between old_files and new_files matches = [file_ for file_ in matches if file_ not in old_files] if len(matches) > 0: close_matches = difflib.get_close_matches(oldfile, matches)[:10] if len(close_matches) > 0: if len(close_matches) == 1: finalmatch = close_matches[0] else: choice = _query_user_for_filemapping(oldfile, close_matches) if choice == 0: finalmatch = oldfile else: finalmatch = matches[choice - 1] queried_filematching[oldfile] = finalmatch else: failed_mappings += 1 filemap[oldfile] = finalmatch _generate_filemapping('autogenerated_filemapping.py', queried_filematching) print "Failed to map", failed_mappings, "out of", len(old_files) print "Selecting defect modifications from", args.old_db, "and inserting into", args.output for defect in FancyBar().iter(old_session.query(DefectModification).all()): db_out.insert_defect_modification(out_session, filemap.get(defect.file.file, defect.file.file), defect.version.version, defect.function.function, defect.defect_id, defect.user.user, defect.date) out_session.commit() print "Selecting change_metrics from", args.old_db, "and inserting into", args.output for cm in FancyBar().iter(old_session.query(ChangeMetric).all()): db_out.insert_change_metric(out_session, filemap.get(cm.file.file, cm.file.file), cm.version.version, cm.function.function, date_=cm.date, user=cm.user.user, added=cm.added, changed=cm.changed, deleted=cm.deleted, nloc=cm.nloc, token_count=cm.token_count, parameter_count=cm.parameter_count, cyclomatic_complexity=cm.cyclomatic_complexity) out_session.commit() print "done"
def cli(ctx, name, dname, license_key, ips_version, force, enable, ssl, spdy, gzip, cache, install, dev): """ Downloads and installs a new instance of the latest Invision Power Suite release. """ assert isinstance(ctx, Context) login_session = ctx.get_login() log = logging.getLogger('ipsv.new') ctx.cache = cache # Prompt for our desired license def get_license(): """ Prompt the user for a license selection @rtype: ips_vagrant.scraper.licenses.LicenseMeta """ licenses = Licenses(login_session).get() user_license = license_key or ctx.config.get('User', 'LicenseKey') # If we already have a license key saved, skip the prompt and use it instead if user_license: licenses = {license.license_key: license for license in licenses} if user_license in licenses: return licenses[user_license] # Ask the user to select a license key opt = choice([(key, '{u} ({k})'.format(u=license.community_url, k=license.license_key)) for key, license in enumerate(licenses)], 1, 'Which license key would you like to use?') license = licenses[opt] # Should we save this license? if click.confirm( 'Would you like to save and use this license for future requests?', True): ctx.log.debug( 'Saving license key {k}'.format(k=license.license_key)) ctx.config.set('User', 'LicenseKey', license.license_key) with open(ctx.config_path, 'wb') as configfile: ctx.config.write(configfile) return license # Get the latest IPS release lmeta = get_license() p = Echo('Fetching IPS version information...') ips = IpsManager(ctx, lmeta) p.done() if ips_version: if ips_version == 'latest_dev': v = ips.dev_version if not v: click.secho( 'There is no IPS development release available for download', err=True, fg='red', bold=True) raise Exception( 'There is no IPS development release available for download' ) p = Echo('Downloading IPS development release {vs}...'.format( vs=v.version.vstring)) else: ips_version = Version(ips_version) v = ips.versions[ips_version.vtuple] p = Echo( 'Fetching IPS version {iv}'.format(iv=ips_version.vstring)) else: v = ips.latest p = Echo( 'Downloading IPS release {vs}...'.format(vs=v.version.vstring)) filename = ips.get(v, cache) p.done() # Parse the specific domain and make sure it's valid log.debug('Parsing domain name: %s', dname) dname = domain_parse(dname) if ssl is None: ssl = dname.scheme == 'https' log.debug('Domain name parsed: %s', dname) domain = Domain.get_or_create(dname) # Make sure this site does not already exist p = Echo('Constructing site data...') site = ctx.db.query(Site).filter(Site.domain == domain).filter( collate(Site.name, 'NOCASE') == name).count() if site: p.done(p.FAIL) log.error('Site already exists') click.secho( 'An installation named "{s}" has already been created for the domain {d}' .format(s=name, d=dname.hostname), err=True, fg='red', bold=True) raise click.Abort # Create the site database entry site = Site(name=name, domain=domain, license_key=lmeta.license_key, version=v.version.vstring, ssl=ssl, spdy=spdy, gzip=gzip, enabled=enable, in_dev=dev) status = p.OK if os.path.exists(site.root): if not force: p.done(p.FAIL) click.secho( "Installation path already exists and --force was not passed:\n{p}" .format(p=site.root), err=True, fg='red', bold=True) log.info('Aborting installation, path already exists: {p}'.format( p=site.root)) raise click.Abort log.warn( 'Overwriting existing installation path: {p}'.format(p=site.root)) status = p.WARN ctx.db.add(site) ctx.db.commit() p.done(status) # Construct the HTTP path p = Echo('Constructing paths and configuration files...') site.write_nginx_config() p.done() # Generate SSL certificates if enabled if ssl: p = Echo('Generating SSL certificate...') ssl_path = os.path.join(ctx.config.get('Paths', 'NginxSSL'), domain.name) if not os.path.exists(ssl_path): log.debug('Creating new SSL path: %s', ssl_path) os.makedirs(ssl_path, 0o755) sc = CertificateFactory(site).get() site.ssl_key = sc.key site.ssl_certificate = sc.certificate with open(os.path.join(ssl_path, '{s}.key'.format(s=site.slug)), 'w') as f: f.write(sc.key) with open(os.path.join(ssl_path, '{s}.pem').format(s=site.slug), 'w') as f: f.write(sc.certificate) p.done() # Create a symlink if this site is being enabled if site.enabled: site.enable(force) # Restart Nginx p = Echo('Restarting web server...') FNULL = open(os.devnull, 'w') subprocess.check_call(['service', 'nginx', 'restart'], stdout=FNULL, stderr=subprocess.STDOUT) p.done() # Extract IPS setup files p = Echo('Extracting setup files to tmp...') tmpdir = tempfile.mkdtemp('ips') setup_zip = os.path.join(tmpdir, 'setup.zip') setup_dir = os.path.join(tmpdir, 'setup') os.mkdir(setup_dir) log.info('Extracting setup files') shutil.copyfile(filename, setup_zip) with zipfile.ZipFile(setup_zip) as z: namelist = z.namelist() if re.match(r'^ips_\w{5}\/?$', namelist[0]): log.debug('Setup directory matched: %s', namelist[0]) else: log.error('No setup directory matched, unable to continue') raise Exception('Unrecognized setup file format, aborting') z.extractall(setup_dir) log.debug('Setup files extracted to: %s', setup_dir) p.done() p = MarkerProgressBar('Copying setup files...') setup_tmpdir = os.path.join(setup_dir, namelist[0]) for dirname, dirnames, filenames in p(os.walk(setup_tmpdir)): for filepath in dirnames: site_path = os.path.join(site.root, dirname.replace(setup_tmpdir, ''), filepath) if not os.path.exists(site_path): log.debug('Creating directory: %s', site_path) os.mkdir(site_path, 0o755) for filepath in filenames: tmp_path = os.path.join(dirname, filepath) site_path = os.path.join(site.root, dirname.replace(setup_tmpdir, ''), filepath) shutil.copy(tmp_path, site_path) log.info('Setup files copied to: %s', site.root) shutil.rmtree(tmpdir) # Apply proper permissions # p = MarkerProgressBar('Setting file permissions...') writeable_dirs = ['uploads', 'plugins', 'applications', 'datastore'] for wdir in writeable_dirs: log.debug('Setting file permissions in %s', wdir) os.chmod(os.path.join(site.root, wdir), 0o777) p = MarkerProgressBar('Setting file permissions...', nl=False) for dirname, dirnames, filenames in p( os.walk(os.path.join(site.root, wdir))): for filename in filenames: os.chmod(os.path.join(dirname, filename), 0o666) for filename in dirnames: os.chmod(os.path.join(dirname, filename), 0o777) Echo('Setting file permissions...').done() shutil.move(os.path.join(site.root, 'conf_global.dist.php'), os.path.join(site.root, 'conf_global.php')) os.chmod(os.path.join(site.root, 'conf_global.php'), 0o777) # Run the installation if install: p = Echo('Initializing installer...') i = installer(v.version, ctx, site, force) p.done() i.start() else: db_info = None if click.confirm( 'Would you like to create the database for this installation now?', default=True): db_info = create_database(site) click.echo('------') if db_info: db_name, db_user, db_pass = db_info log.debug('MySQL Database Name: %s', db_name) log.debug('MySQL Database User: %s', db_user) log.debug('MySQL Database Password: %s', db_pass) click.secho('MySQL Database Name: {dbn}'.format(dbn=db_name), bold=True) click.secho('MySQL Database User: {dbu}'.format(dbu=db_user), bold=True) click.secho('MySQL Database Password: {dbp}'.format(dbp=db_pass), bold=True) click.secho( 'IPS is now ready to be installed. To proceed with the installation, follow the link below', fg='yellow', bold=True) click.echo('{schema}://{host}'.format( schema='https' if site.ssl else 'http', host=site.domain.name))
def test_collate(self): left = column('left') right = "some collation" left.comparator.operate(operators.collate, right).compare( collate(left, right) )
def cli(ctx, name, dname, license_key, ips_version, force, enable, ssl, spdy, gzip, cache, install, dev): """ Downloads and installs a new instance of the latest Invision Power Suite release. """ assert isinstance(ctx, Context) login_session = ctx.get_login() log = logging.getLogger("ipsv.new") ctx.cache = cache # Prompt for our desired license def get_license(): """ Prompt the user for a license selection @rtype: ips_vagrant.scraper.licenses.LicenseMeta """ licenses = Licenses(login_session).get() user_license = license_key or ctx.config.get("User", "LicenseKey") # If we already have a license key saved, skip the prompt and use it instead if user_license: licenses = {license.license_key: license for license in licenses} if user_license in licenses: return licenses[user_license] # Ask the user to select a license key opt = choice( [ (key, "{u} ({k})".format(u=license.community_url, k=license.license_key)) for key, license in enumerate(licenses) ], 1, "Which license key would you like to use?", ) license = licenses[opt] # Should we save this license? if click.confirm("Would you like to save and use this license for future requests?", True): ctx.log.debug("Saving license key {k}".format(k=license.license_key)) ctx.config.set("User", "LicenseKey", license.license_key) with open(ctx.config_path, "wb") as configfile: ctx.config.write(configfile) return license # Get the latest IPS release lmeta = get_license() p = Echo("Fetching IPS version information...") ips = IpsManager(ctx, lmeta) p.done() if ips_version: if ips_version == "latest_dev": v = ips.dev_version if not v: click.secho("There is no IPS development release available for download", err=True, fg="red", bold=True) raise Exception("There is no IPS development release available for download") p = Echo("Downloading IPS development release {vs}...".format(vs=v.version.vstring)) else: ips_version = Version(ips_version) v = ips.versions[ips_version.vtuple] p = Echo("Fetching IPS version {iv}".format(iv=ips_version.vstring)) else: v = ips.latest p = Echo("Downloading IPS release {vs}...".format(vs=v.version.vstring)) filename = ips.get(v, cache) p.done() # Parse the specific domain and make sure it's valid log.debug("Parsing domain name: %s", dname) dname = domain_parse(dname) if ssl is None: ssl = dname.scheme == "https" log.debug("Domain name parsed: %s", dname) domain = Domain.get_or_create(dname) # Make sure this site does not already exist p = Echo("Constructing site data...") site = ctx.db.query(Site).filter(Site.domain == domain).filter(collate(Site.name, "NOCASE") == name).count() if site: p.done(p.FAIL) log.error("Site already exists") click.secho( 'An installation named "{s}" has already been created for the domain {d}'.format(s=name, d=dname.hostname), err=True, fg="red", bold=True, ) raise click.Abort # Create the site database entry site = Site( name=name, domain=domain, license_key=lmeta.license_key, version=v.version.vstring, ssl=ssl, spdy=spdy, gzip=gzip, enabled=enable, in_dev=dev, ) status = p.OK if os.path.exists(site.root): if not force: p.done(p.FAIL) click.secho( "Installation path already exists and --force was not passed:\n{p}".format(p=site.root), err=True, fg="red", bold=True, ) log.info("Aborting installation, path already exists: {p}".format(p=site.root)) raise click.Abort log.warn("Overwriting existing installation path: {p}".format(p=site.root)) status = p.WARN ctx.db.add(site) ctx.db.commit() p.done(status) # Construct the HTTP path p = Echo("Constructing paths and configuration files...") site.write_nginx_config() p.done() # Generate SSL certificates if enabled if ssl: p = Echo("Generating SSL certificate...") ssl_path = os.path.join(ctx.config.get("Paths", "NginxSSL"), domain.name) if not os.path.exists(ssl_path): log.debug("Creating new SSL path: %s", ssl_path) os.makedirs(ssl_path, 0o755) sc = CertificateFactory(site).get() site.ssl_key = sc.key site.ssl_certificate = sc.certificate with open(os.path.join(ssl_path, "{s}.key".format(s=site.slug)), "w") as f: f.write(sc.key) with open(os.path.join(ssl_path, "{s}.pem").format(s=site.slug), "w") as f: f.write(sc.certificate) p.done() # Create a symlink if this site is being enabled if site.enabled: site.enable(force) # Restart Nginx p = Echo("Restarting web server...") FNULL = open(os.devnull, "w") subprocess.check_call(["service", "nginx", "restart"], stdout=FNULL, stderr=subprocess.STDOUT) p.done() # Extract IPS setup files p = Echo("Extracting setup files to tmp...") tmpdir = tempfile.mkdtemp("ips") setup_zip = os.path.join(tmpdir, "setup.zip") setup_dir = os.path.join(tmpdir, "setup") os.mkdir(setup_dir) log.info("Extracting setup files") shutil.copyfile(filename, setup_zip) with zipfile.ZipFile(setup_zip) as z: namelist = z.namelist() if re.match(r"^ips_\w{5}\/?$", namelist[0]): log.debug("Setup directory matched: %s", namelist[0]) else: log.error("No setup directory matched, unable to continue") raise Exception("Unrecognized setup file format, aborting") z.extractall(setup_dir) log.debug("Setup files extracted to: %s", setup_dir) p.done() p = MarkerProgressBar("Copying setup files...") setup_tmpdir = os.path.join(setup_dir, namelist[0]) for dirname, dirnames, filenames in p(os.walk(setup_tmpdir)): for filepath in dirnames: site_path = os.path.join(site.root, dirname.replace(setup_tmpdir, ""), filepath) if not os.path.exists(site_path): log.debug("Creating directory: %s", site_path) os.mkdir(site_path, 0o755) for filepath in filenames: tmp_path = os.path.join(dirname, filepath) site_path = os.path.join(site.root, dirname.replace(setup_tmpdir, ""), filepath) shutil.copy(tmp_path, site_path) log.info("Setup files copied to: %s", site.root) shutil.rmtree(tmpdir) # Apply proper permissions # p = MarkerProgressBar('Setting file permissions...') writeable_dirs = ["uploads", "plugins", "applications", "datastore"] for wdir in writeable_dirs: log.debug("Setting file permissions in %s", wdir) os.chmod(os.path.join(site.root, wdir), 0o777) p = MarkerProgressBar("Setting file permissions...", nl=False) for dirname, dirnames, filenames in p(os.walk(os.path.join(site.root, wdir))): for filename in filenames: os.chmod(os.path.join(dirname, filename), 0o666) for filename in dirnames: os.chmod(os.path.join(dirname, filename), 0o777) Echo("Setting file permissions...").done() shutil.move(os.path.join(site.root, "conf_global.dist.php"), os.path.join(site.root, "conf_global.php")) os.chmod(os.path.join(site.root, "conf_global.php"), 0o777) # Run the installation if install: p = Echo("Initializing installer...") i = installer(v.version, ctx, site, force) p.done() i.start() else: db_info = None if click.confirm("Would you like to create the database for this installation now?", default=True): db_info = create_database(site) click.echo("------") if db_info: db_name, db_user, db_pass = db_info log.debug("MySQL Database Name: %s", db_name) log.debug("MySQL Database User: %s", db_user) log.debug("MySQL Database Password: %s", db_pass) click.secho("MySQL Database Name: {dbn}".format(dbn=db_name), bold=True) click.secho("MySQL Database User: {dbu}".format(dbu=db_user), bold=True) click.secho("MySQL Database Password: {dbp}".format(dbp=db_pass), bold=True) click.secho( "IPS is now ready to be installed. To proceed with the installation, follow the link below", fg="yellow", bold=True, ) click.echo("{schema}://{host}".format(schema="https" if site.ssl else "http", host=site.domain.name))
def __init__(self, request, query, data): """ :param request: Flask 'request' instance :param query: base query defining the set of records we consider :param columns: dictionary specify columns to query and sort """ request_data = json.loads(request.values.get("args")) self._draw = None self._fail = False try: self._draw = int(request_data['draw']) self._start = int(request_data['start']) self._length = int(request_data['length']) self._filter_value = str(request_data['search']['value']) self._columns = request_data['columns'] self._order = request_data['order'] except KeyError: self._fail = True self._fail_msg = 'The server could not interpret the AJAX payload from the website front-end' return self._query = query self._total_records = get_count(self._query) self._data = data # filter if being used if self._filter_value: filter_columns = [] for item in self._data: item_data = self._data[item] if 'search' in item_data: search_col = item_data['search'] if 'search_collation' in item_data: collation = item_data['search_collation'] filter_columns.append(collate(search_col, collation).contains(self._filter_value)) else: filter_columns.append(search_col.contains(self._filter_value)) i = len(filter_columns) if i == 1: self._query = self._query.filter(filter_columns[0]) elif i > 1: self._query = self._query.filter(or_(x for x in filter_columns)) # count number of filtered records self._filtered_records = get_count(self._query) # impose specified ordering for item in self._order: # col_id is an index into the _columns array col_id = int(item['column']) dir = str(item['dir']) col_name = str(self._columns[col_id]['data']) if col_name in self._data: item_data = self._data[col_name] if 'order' in item_data: order_col = item_data['order'] if dir == 'asc': if isinstance(order_col, Iterable): self._query = self._query.order_by(*(x.asc() for x in order_col)) else: self._query = self._query.order_by(order_col.asc()) else: if isinstance(order_col, Iterable): self._query = self._query.order_by(*(x.desc() for x in order_col)) else: self._query = self._query.order_by(order_col.desc()) # impose limit on number of records retrieved if self._length > 0: self._query = self._query.limit(self._length) self._query = self._query.offset(self._start)
def restaurants(): return session.query(Restaurant)\ .order_by(collate(Restaurant.name, 'NOCASE'))\ .all()
def cli(ctx, name, dname, license_key, ips_version, force, enable, ssl, spdy, gzip, cache, install, dev): """ Downloads and installs a new instance of the latest Invision Power Suite release. """ assert isinstance(ctx, Context) login_session = ctx.get_login() log = logging.getLogger('ipsv.new') ctx.cache = cache # Prompt for our desired license def get_license(): """ Prompt the user for a license selection @rtype: ips_vagrant.scraper.licenses.LicenseMeta """ licenses = Licenses(login_session).get() user_license = license_key or ctx.config.get('User', 'LicenseKey') # If we already have a license key saved, skip the prompt and use it instead if user_license: licenses = {license.license_key: license for license in licenses} if user_license in licenses: return licenses[user_license] # Ask the user to select a license key opt = choice([ (key, '{u} ({k})'.format(u=license.community_url, k=license.license_key)) for key, license in enumerate(licenses) ], 1, 'Which license key would you like to use?') license = licenses[opt] # Should we save this license? if click.confirm('Would you like to save and use this license for future requests?', True): ctx.log.debug('Saving license key {k}'.format(k=license.license_key)) ctx.config.set('User', 'LicenseKey', license.license_key) with open(ctx.config_path, 'wb') as configfile: ctx.config.write(configfile) return license # Get the latest IPS release lmeta = get_license() p = Echo('Fetching IPS version information...') ips = IpsManager(ctx, lmeta) p.done() if ips_version: if ips_version == 'latest_dev': v = ips.dev_version if not v: click.secho('There is no IPS development release available for download', err=True, fg='red', bold=True) raise Exception('There is no IPS development release available for download') p = Echo('Downloading IPS development release {vs}...'.format(vs=v.version.vstring)) else: ips_version = Version(ips_version) v = ips.versions[ips_version.vtuple] p = Echo('Fetching IPS version {iv}'.format(iv=ips_version.vstring)) else: v = ips.latest p = Echo('Downloading IPS release {vs}...'.format(vs=v.version.vstring)) filename = ips.get(v, cache) p.done() # Parse the specific domain and make sure it's valid log.debug('Parsing domain name: %s', dname) dname = domain_parse(dname) if ssl is None: ssl = dname.scheme == 'https' log.debug('Domain name parsed: %s', dname) domain = Domain.get_or_create(dname) # Make sure this site does not already exist p = Echo('Constructing site data...') site = ctx.db.query(Site).filter(Site.domain == domain).filter(collate(Site.name, 'NOCASE') == name).count() if site: p.done(p.FAIL) log.error('Site already exists') click.secho('An installation named "{s}" has already been created for the domain {d}' .format(s=name, d=dname.hostname), err=True, fg='red', bold=True) raise click.Abort # Create the site database entry site = Site(name=name, domain=domain, license_key=lmeta.license_key, version=v.version.vstring, ssl=ssl, spdy=spdy, gzip=gzip, enabled=enable, in_dev=dev) status = p.OK if os.path.exists(site.root): if not force: p.done(p.FAIL) click.secho("Installation path already exists and --force was not passed:\n{p}".format(p=site.root), err=True, fg='red', bold=True) log.info('Aborting installation, path already exists: {p}'.format(p=site.root)) raise click.Abort log.warn('Overwriting existing installation path: {p}'.format(p=site.root)) status = p.WARN ctx.db.add(site) ctx.db.commit() p.done(status) # Construct the HTTP path p = Echo('Constructing paths and configuration files...') site.write_nginx_config() p.done() # Generate SSL certificates if enabled if ssl: p = Echo('Generating SSL certificate...') ssl_path = os.path.join(ctx.config.get('Paths', 'NginxSSL'), domain.name) if not os.path.exists(ssl_path): log.debug('Creating new SSL path: %s', ssl_path) os.makedirs(ssl_path, 0o755) sc = CertificateFactory(site).get() site.ssl_key = sc.key site.ssl_certificate = sc.certificate with open(os.path.join(ssl_path, '{s}.key'.format(s=site.slug)), 'w') as f: f.write(sc.key) with open(os.path.join(ssl_path, '{s}.pem').format(s=site.slug), 'w') as f: f.write(sc.certificate) p.done() # Create a symlink if this site is being enabled if site.enabled: site.enable(force) # Restart Nginx p = Echo('Restarting web server...') FNULL = open(os.devnull, 'w') subprocess.check_call(['service', 'nginx', 'restart'], stdout=FNULL, stderr=subprocess.STDOUT) p.done() # Extract IPS setup files p = Echo('Extracting setup files to tmp...') tmpdir = tempfile.mkdtemp('ips') setup_zip = os.path.join(tmpdir, 'setup.zip') setup_dir = os.path.join(tmpdir, 'setup') os.mkdir(setup_dir) log.info('Extracting setup files') shutil.copyfile(filename, setup_zip) with zipfile.ZipFile(setup_zip) as z: namelist = z.namelist() if re.match(r'^ips_\w{5}\/?$', namelist[0]): log.debug('Setup directory matched: %s', namelist[0]) else: log.error('No setup directory matched, unable to continue') raise Exception('Unrecognized setup file format, aborting') z.extractall(setup_dir) log.debug('Setup files extracted to: %s', setup_dir) p.done() p = MarkerProgressBar('Copying setup files...') setup_tmpdir = os.path.join(setup_dir, namelist[0]) for dirname, dirnames, filenames in p(os.walk(setup_tmpdir)): for filepath in dirnames: site_path = os.path.join(site.root, dirname.replace(setup_tmpdir, ''), filepath) if not os.path.exists(site_path): log.debug('Creating directory: %s', site_path) os.mkdir(site_path, 0o755) for filepath in filenames: tmp_path = os.path.join(dirname, filepath) site_path = os.path.join(site.root, dirname.replace(setup_tmpdir, ''), filepath) shutil.copy(tmp_path, site_path) log.info('Setup files copied to: %s', site.root) shutil.rmtree(tmpdir) # Apply proper permissions # p = MarkerProgressBar('Setting file permissions...') writeable_dirs = ['uploads', 'plugins', 'applications', 'datastore'] for wdir in writeable_dirs: log.debug('Setting file permissions in %s', wdir) os.chmod(os.path.join(site.root, wdir), 0o777) p = MarkerProgressBar('Setting file permissions...', nl=False) for dirname, dirnames, filenames in p(os.walk(os.path.join(site.root, wdir))): for filename in filenames: os.chmod(os.path.join(dirname, filename), 0o666) for filename in dirnames: os.chmod(os.path.join(dirname, filename), 0o777) Echo('Setting file permissions...').done() shutil.move(os.path.join(site.root, 'conf_global.dist.php'), os.path.join(site.root, 'conf_global.php')) os.chmod(os.path.join(site.root, 'conf_global.php'), 0o777) # Run the installation if install: p = Echo('Initializing installer...') i = installer(v.version, ctx, site, force) p.done() i.start() else: click.echo('------') click.secho('IPS is now ready to be installed. To proceed with the installation, follow the link below', fg='yellow', bold=True) click.echo('{schema}://{host}'.format(schema='https' if site.ssl else 'http', host=site.domain.name))
def get_stories(): stories = (Story.query.order_by(collate(Story.title, 'NOCASE')).all()) if len(stories) == 0: flash('No stories found') return render('story_list.html', _page(), stories=stories)
def list_crankers(self): return Athlete.query.filter( Athlete.is_handcrank == True).order_by( collate(Athlete.name_first, 'NOCASE'), collate(Athlete.name_last, 'NOCASE') ).all()
def main(): # pylint: disable=C0111 args = _parse_args() logger.setup_stdout_logger(args.verbose) args.old_db = os.path.abspath(args.old_db) args.new_db = os.path.abspath(args.new_db) args.output = os.path.abspath(args.output) print "Using old:", args.old_db, "new:", args.new_db, "out:", args.output print "Copying", args.new_db, "to", args.output shutil.copyfile(args.new_db, args.output) db_old = MetricsDb.MetricsDb('sqlite:///' + args.old_db) db_out = MetricsDb.MetricsDb('sqlite:///' + args.output) with db_old.get_session() as old_session, db_out.get_session( ) as out_session: print "\nTrying to match files between old and new database..." # Manual file mapping filemap = {} failed_mappings = 0 old_files = {file_: None for file_, in old_session.query(File.file)} new_files = {file_: None for file_, in out_session.query(File.file)} for oldfile in old_files: finalmatch = oldfile if oldfile in queried_filematching: finalmatch = queried_filematching[oldfile] elif oldfile not in new_files: searchstring = '%' + os.sep + os.path.basename(oldfile) + '%' matches = [match for match, in out_session.query(File.file)\ .filter(File.file.like(searchstring))\ .order_by(collate(File.file, 'NOCASE'))] # Filter out any files that already have a perfect match between old_files and new_files matches = [ file_ for file_ in matches if file_ not in old_files ] if len(matches) > 0: close_matches = difflib.get_close_matches( oldfile, matches)[:10] if len(close_matches) > 0: if len(close_matches) == 1: finalmatch = close_matches[0] else: choice = _query_user_for_filemapping( oldfile, close_matches) if choice == 0: finalmatch = oldfile else: finalmatch = matches[choice - 1] queried_filematching[oldfile] = finalmatch else: failed_mappings += 1 filemap[oldfile] = finalmatch _generate_filemapping('autogenerated_filemapping.py', queried_filematching) print "Failed to map", failed_mappings, "out of", len(old_files) print "Selecting defect modifications from", args.old_db, "and inserting into", args.output for defect in FancyBar().iter( old_session.query(DefectModification).all()): db_out.insert_defect_modification( out_session, filemap.get(defect.file.file, defect.file.file), defect.version.version, defect.function.function, defect.defect_id, defect.user.user, defect.date) out_session.commit() print "Selecting change_metrics from", args.old_db, "and inserting into", args.output for cm in FancyBar().iter(old_session.query(ChangeMetric).all()): db_out.insert_change_metric( out_session, filemap.get(cm.file.file, cm.file.file), cm.version.version, cm.function.function, date_=cm.date, user=cm.user.user, added=cm.added, changed=cm.changed, deleted=cm.deleted, nloc=cm.nloc, token_count=cm.token_count, parameter_count=cm.parameter_count, cyclomatic_complexity=cm.cyclomatic_complexity) out_session.commit() print "done"
def getMenu(restaurant_id): return session.query(MenuItem)\ .filter_by(restaurant_id=restaurant_id)\ .order_by(collate(MenuItem.name, 'NOCASE'))\ .all()
def load(self, params=None): """Execute SQL ``SELECT`` and populate ``rows`` attribute. Loads a maximum of ``MAX_RECS`` records at a time. ``params`` dict example:: { 'desc': False, 'order_by': 'title', 'where': { 'date': { 'operator': 'range', 'param': (0, 1403845140) }, 'search': { 'operator': '=', 'param': 'Google'} } } } :param dict params: dict of various parameters from which to construct additional SQL clauses eg. ``WHERE``, ``ORDER BY``, etc. """ rows = Node() # FIXME: Maybe we should use kwargs instead of params? params = params or {} # WHERE where = params.get('where', None) if where is not None: where = self._get_where_clause(where) # ORDER BY order_by = params.get('order_by', None) # Do a numeric ordering first, as suggested here # (http://stackoverflow.com/a/4204641), and then a case-insensitive one order_by = (order_by and [self.table.columns[order_by] + 0, collate(self.table.columns[order_by], 'NOCASE')]) if order_by is not None and params.get('desc', False): order_by = [desc(col) for col in order_by] # OFFSET page = params.get('page', 0) offset = page * self.MAX_RECS # A little optimization to avoid doing more queries when we # already loaded everything if page > 0 and offset >= self.total_recs: return rows # Flat flat = params.get('flat', False) if flat: flat_where = operator.ne( self.table.columns[self.FLAT_COLUMN], None) where = and_(where, flat_where) if where is not None else flat_where # noqa with closing(sqlite3.connect(self.db_file)) as conn: conn.row_factory = lambda cursor, row: list(row) # ^^ make result lists mutable so we can change values in # the GTK TreeModel that uses this datasource. conn.create_function('rank', 1, rank) # TODO: ^^ only if search term in params with closing(conn.cursor()) as cursor: self._ensure_temp_view(cursor) if page == 0: # set the total record count the only the first time the # record set is requested res = self.select( conn, self.table, [func.count(1)], where=where) self.total_recs = int(list(res)[0][0]) if self.PARENT_ID_COLUMN and not flat: rows.extend( self._load_tree_rows( conn, where, order_by, params.get('parent_id', None))) else: query = self.select( conn, self.table, self.table.columns, where=where, limit=self.MAX_RECS, offset=offset, order_by=order_by) for row in query: rows.append(Node(data=row)) rows.children_len = len(rows) return rows
def load(self, params=None): """Execute SQL ``SELECT`` and populate ``rows`` attribute. Loads a maximum of ``MAX_RECS`` records at a time. ``params`` dict example:: { 'desc': False, 'order_by': 'title', 'where': { 'date': { 'operator': 'range', 'param': (0, 1403845140) }, 'search': { 'operator': '=', 'param': 'Google'} } } } :param dict params: dict of various parameters from which to construct additional SQL clauses eg. ``WHERE``, ``ORDER BY``, etc. """ rows = Node() # FIXME: Maybe we should use kwargs instead of params? params = params or {} # WHERE where = params.get('where', None) if where is not None: where = self._get_where_clause(where) # ORDER BY order_by = params.get('order_by', None) # Do a numeric ordering first, as suggested here # (http://stackoverflow.com/a/4204641), and then a case-insensitive one order_by = (order_by and [ self.table.columns[order_by] + 0, collate(self.table.columns[order_by], 'NOCASE') ]) if order_by is not None and params.get('desc', False): order_by = [desc(col) for col in order_by] # OFFSET page = params.get('page', 0) offset = page * self.MAX_RECS # A little optimization to avoid doing more queries when we # already loaded everything if page > 0 and offset >= self.total_recs: return rows # Flat flat = params.get('flat', False) if flat: flat_where = operator.ne(self.table.columns[self.FLAT_COLUMN], None) where = and_( where, flat_where) if where is not None else flat_where # noqa with closing(sqlite3.connect(self.db_file)) as conn: conn.row_factory = lambda cursor, row: list(row) # ^^ make result lists mutable so we can change values in # the GTK TreeModel that uses this datasource. conn.create_function('rank', 1, rank) # TODO: ^^ only if search term in params with closing(conn.cursor()) as cursor: self._ensure_temp_view(cursor) if page == 0: # set the total record count the only the first time the # record set is requested res = self.select(conn, self.table, [func.count(1)], where=where) self.total_recs = int(list(res)[0][0]) if self.PARENT_ID_COLUMN and not flat: rows.extend( self._load_tree_rows(conn, where, order_by, params.get('parent_id', None))) else: query = self.select(conn, self.table, self.table.columns, where=where, limit=self.MAX_RECS, offset=offset, order_by=order_by) for row in query: rows.append(Node(data=row)) rows.children_len = len(rows) return rows
def all(self, order=None): """ Returns all transactions from the database """ if order is None: return TableWrapper.all(self, order=collate(Category.name, 'NOCASE')) else: return TableWrapper.all(self, order=order)