def index(page=1): posts = None form = PostForm() if form.validate_on_submit(): file_u = request.files['image'].read() image = Image.open(io.BytesIO(file_u)) scale = 1024 / image.size[0] image = image.resize((int(image.size[0] * scale), int(image.size[1] * scale)), Image.ANTIALIAS) img_byte_arr = io.BytesIO() image.save(img_byte_arr, format="PNG") file_u = img_byte_arr.getvalue() if request.files['image']: file_a = 'data:{};base64,{}'.format(request.files['image'].content_type, encode(file_u, 'base64').decode('utf-8')) post_create = Post.create(user=g.user.id, data=form.content.data, image=file_a) else: post_create = Post.create(user=g.user.id, data=form.content.data) for user in User.select(): user.sendmail_to(name=g.user.username, subject="TDIC Post", msg_text='{} posted: "{}".' .format(g.user.username, form.content.data), link=url_for("view_post", id=post_create.id) ) flash('Posted!') try: if current_user.is_authenticated: posts = Post.select().paginate(page, 21) except InternalError: DB.rollback() return render_template('index.html', posts=posts, page=page, options=True, form=form)
def admin_login(): """ Login for an admin account """ if g.admin is not None: return redirect(url_for('admin_home', admin_id=g.admin['project_id'])) form = LoginForm(request.form) if form.validate_on_submit(): # On submit, grab name & password project_name = form.project_name.data password = form.password.data # Try login db = DB() resp = db.auth(project_name, password) if resp['status'] and resp['admin']: session['admin_project_id'] = resp['project_id'] admin_detail = db.get_project_detail(session['admin_project_id']) admin_id = admin_detail['project_id'] return redirect(url_for('admin_home', admin_id=admin_id)) elif not resp['admin']: flash(u'Invalid admin account!') else: flash(resp['message']) return render_template('admin_login.html', form=form)
def create(): """ Page to create a new project account """ form = CreateForm(request.form) if form.validate_on_submit(): # On submit, grab form information project_name = form.project_name.data email = form.email.data password = form.password.data hashed_password = generate_password_hash(password) description = form.description.data # Create the account db = DB() resp = db.create(project_name, password, hashed_password, description=description, email=email) if resp['status']: flash(u'Project successfully created!') return redirect( url_for('admin_home', admin_id=g.admin['project_id'])) else: flash(resp['message']) return render_template('create.html', form=form)
def reset(): """reset DB using drop_all()""" DB.drop_all() DB.create_all() add_or_update_user("elonmusk") add_or_update_user("AOC") return render_template("base.html", title="RESET", users=User.query.all())
def create_app(): """Create and configuring an instance of the Flask application""" # location of the Flask application and look in the current directory app = Flask(__name__) app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:///db.sqlite3' app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False # with app.app_context(): DB.init_app(app) # DB.init_app(app) # initialize the DB with this application, DB is an instance of SQLAlchemy class # add a decorator @app.route('/') def root(): # DB.drop_all() # DB.create_all() # app_user = User(id=1, name='app_user') # DB.session.add(app_user) # DB.session.commit() # return render_template('base.html', # title='Home', # users=User.query.all()) return "text" # PS muse: when you are using Title = Home you are using Jinja2 return app
async def add(self, pet_rescues): db = DB() query = 'INSERT INTO PetRescue (guild_name, guild_id, channel_name, channel_id, message_id, pet_id, ' \ 'alert_message_id, pet_message_id, start_time, lang, mention) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)' channel_type = self.message.channel.type if channel_type == discord.ChannelType.private: channel_name = self.message.channel.recipient.name else: channel_name = self.message.channel.name params = [ self.message.guild.name if self.message.guild else '<Private Message>', self.message.guild.id if self.message.guild else 0, channel_name, self.message.channel.id, self.message.id, self.pet['id'], self.alert_message.id if self.alert_message else 0, self.pet_message.id, self.start_time, self.lang, str(self.mention), ] lock = asyncio.Lock() async with lock: db.cursor.execute(query, params) db.commit() pet_rescues.append(self)
def details(product_id=None, msg=None): product_id = product_id or request.args.get('id') if not product_id: return redirect(url_for('catalogue')) product = DB.get_product(product_id) rows = DB.get_comments(product_id) comments = list() for row in rows: comment = dict() comment['name'] = ' '.join(row[:2]) comment['text'] = row[2] comment['date'], comment['time'] = row[3].split() comment['time'] = comment['time'].split('.')[0] comments.append(comment) r = make_response( render_template('details.html', message=msg, product=product, comments=comments) ) r.headers.set('X-XSS-Protection', '0') return r
def main() : POST_LIMIT = 10 STREAMING_SUBS = ['mlbstreams', 'nbastreams', 'soccerstreams'] #, 'nflstreams', 'nhlstreams', 'cfbstreams', 'ncaabballstreams', 'boxingstreams', 'mmastreams'] db = DB.initialize(rebuild = False); # Connect to Reddit reddit_obj = Reddit.connect() # Create submissions and comments for sub in STREAMING_SUBS: submissions_with_comments = Reddit.scrape_posts(reddit_obj, sub, POST_LIMIT) Submission.create_with_comments(submissions_with_comments, db) # Parse comments for streams comment_data = Comment.get_all(db) stream_list = Reddit.parse_comments_for_streams(comment_data, db) # Create streams for stream in stream_list: Stream.create(stream, db) # Print the matches table from SQLite Submission.print_all(db) # Close the DB connection DB.close(db)
def home(project_name, task_id=None): """ Renders a project account's homepage """ db = DB() value = db.get_project_data_size(project_name) # Loads project details if an admin if g.admin is not None: _aload_project(project_name) # Loads in terms # count for panel project_detail = g.project if project_detail['collectors']: project_detail['num_collectors'] = len(project_detail['collectors']) for collector in project_detail['collectors']: collector['num_terms'] = 0 if collector['terms_list'] is not None: collector['num_terms'] = len(collector['terms_list']) else: project_detail['num_collectors'] = 0 project_detail['totalsize'] = value return render_template('home.html', project_detail=project_detail)
def login(): """ Handles project account authentication """ if g.project is not None: return redirect(url_for('home', project_name=g.project['project_name'])) form = LoginForm(request.form) if form.validate_on_submit(): # On submit, grab name & password project_name = form.project_name.data password = form.password.data # Try login db = DB() resp = db.auth(project_name, password) if resp['status']: session['project_id'] = resp['project_id'] project_detail = db.get_project_detail(session['project_id']) project_name = project_detail['project_name'] return redirect(url_for('home', project_name=project_name)) else: flash(resp['message']) return render_template('login.html', form=form)
def main(): POST_LIMIT = 10 STREAMING_SUBS = [ 'mlbstreams', 'nbastreams', 'soccerstreams' ] #, 'nflstreams', 'nhlstreams', 'cfbstreams', 'ncaabballstreams', 'boxingstreams', 'mmastreams'] db = DB.initialize(rebuild=False) # Connect to Reddit reddit_obj = Reddit.connect() # Create submissions and comments for sub in STREAMING_SUBS: submissions_with_comments = Reddit.scrape_posts( reddit_obj, sub, POST_LIMIT) Submission.create_with_comments(submissions_with_comments, db) # Parse comments for streams comment_data = Comment.get_all(db) stream_list = Reddit.parse_comments_for_streams(comment_data, db) # Create streams for stream in stream_list: Stream.create(stream, db) # Print the matches table from SQLite Submission.print_all(db) # Close the DB connection DB.close(db)
def landing(): DB.drop_all() DB.create_all() example_users = ["yestrella14", "elonmusk", "rihanna", "katyperry"] for user in example_users: add_or_update_users(user) return render_template("my_twitapp.html", title="Lambda Lesson 2 Unit 3 Sprint 3", users=User.query.all()) @app.route('/compare', methods=['POST']) def compare(): user1 = request.values["selected_user_1"] user2 = request.values['selected_user_2'] tweet_text = request.values['tweet_text'] if user1 == user2: message = "Cannot compare the same to itself" else: prediction = predict_user(user1, user2, tweet_text) message = prediction + " is more likely to have said " + tweet_text return render_template("prediction.html", title="compare tweets", message=message)
def config(self, api_port): logger.info('Configuring alcazard with state at {}', self.state_path) api_port = api_port or DEFAULT_PORT os.makedirs(self.state_path, exist_ok=True) DB.init(self.db_path) with DB: self._init_db() config = Config.select().first() if not config: from transmission.params import DEFAULT_TRANSMISSION_SETTINGS_TEMPLATE config = Config( is_fully_configured=True, transmission_settings= DEFAULT_TRANSMISSION_SETTINGS_TEMPLATE, is_dht_enabled=False, local_port_pools_fmt='9091-9291', peer_port_pools_fmt='21413-21613', ) config.api_port = api_port config.save() logger.info('Saved configuration - done.')
def create_app(): """Create Flask Application""" app = Flask(__name__) app.config["SQLALCHEMY_DATABASE_URI"] = getenv("DATABASE_URI") app.config["SQLALCHEMY_TRACK_MODIFICATIONS"] = False DB.init_app(app) @app.route("/") def root(): """At end point '/'""" return render_template("base.html", title="Home", users=User.query.all()) # @app.route('/update') # def update(): # """Updates each user""" # update_all_users() # return render_template("base.html", title="Users Updated!", users=User.query.all()) @app.route("/reset") def reset(): """reset DB using drop_all()""" DB.drop_all() DB.create_all() add_or_update_user("elonmusk") add_or_update_user("AOC") return render_template("base.html", title="RESET", users=User.query.all()) return app
def collector_control(collector_id): """ POST control route for collector forms """ collector_form = ProcessControlForm(request.form) task = None # On form submit controls the processor if request.method == 'POST' and collector_form.validate(): command = request.form['control'].lower() task_args = { 'process': 'collect', 'project': g.project, 'collector_id': collector_id } db = DB() collector = db.get_collector_detail(g.project['project_id'], collector_id) network = collector['collector']['network'] if command == 'start': task = start_daemon.apply_async(kwargs=task_args, queue='stack-start') elif command == 'stop': task = stop_daemon.apply_async(kwargs=task_args, queue='stack-stop') elif command == 'restart': task = restart_daemon.apply_async(kwargs=task_args, queue='stack-start') return redirect(url_for('collector', project_name=g.project['project_name'], network=network, collector_id=collector_id, task_id=task.task_id))
def get(guild_id): db = DB() result = db.cursor.execute(f'SELECT * FROM Ban WHERE guild_id = ?;', (guild_id, )) ban = result.fetchone() db.close() return ban
def comment(): email = current_user.email product_id = request.form['product_id'] comment = request.form['comment'] detect_attack(comment) DB.add_comment(email, product_id, comment) return redirect(url_for('details', id=product_id))
def register_user(): data = request.get_json() database = DB() if data.get("name", None) is not None and data.get("email", None) is not None: response = database.create_users(data) if not response: return bad_request_response(message="Failed! User was not created") return jsonify({"code": 200, "message": "Success", "data": response})
def decorated_function(*args, **kwargs): g.admin = None if 'admin_project_id' in session: db = DB() resp = db.get_project_detail(session['admin_project_id']) if resp['status']: g.admin = resp return f(*args, **kwargs)
def _aload_project(project_name): """ Utility method to load an admin project detail if an admin is viewing their control page """ db = DB() resp = db.stack_config.find_one({'project_name': project_name}) g.project = db.get_project_detail(str(resp['_id'])) session['project_id'] = str(resp['_id'])
def create_app(): app = Flask(__name__) #add config for database app.config['SQLALCHEMY_DATABASE_URI'] = config('DATABASE_URL') #stop tracking modifications on sqlalchemy config app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False #have the database know about the app DB.init_app(app) @app.route('/') def root(): users = User.query.all() return render_template('base.html', title='Home', users=users) @app.route('/reset') def reset(): DB.drop_all() DB.create_all() return render_template('base.html', title='Reset', users=[]) @app.route('/user', methods=['POST']) @app.route('/user/<name>', methods=['GET']) def user(name=None, message=''): name = name or request.values['user_name'] try: if request.method == 'POST': add_or_update_user(name) message = "User {} successfully added".format(name) tweets = User.query.filter(User.name == name).one().tweets except Exception as e: message = "Error adding {}: {}".format(name, e) tweets = [] return render_template('user.html', title=name, tweets=tweets, message=message) @app.route('/compare', methods=['POST']) def compare(message=''): user1, user2 = sorted( [request.values['user1'], request.values['user2']]) if user1 == user2: message = 'Cannot compare a user to themselves!' else: prediction = predict_user(user1, user2, request.values['tweet_text']) message = '"{}" is more likely to be said by {} than {}'.format( request.values['tweet_text'], user1 if prediction else user2, user2 if prediction else user1) return render_template('prediction.html', title='Prediction', message=message) return app
def about(): db = DB('main', 'bio') bio = db.get_bio() bio_text = 'None' if bio['status']: bio_text = bio['bio'] return render_template("about.html", bio=bio_text)
def create_db(): """Creates application DB.""" from models import DB url = app.config.get('SQLALCHEMY_DATABASE_URI', 'sqlite://') if url.startswith('sqlite:////'): path = url[10:] if not os.path.exists(path): os.makedirs(path) DB.create_all() DB.session.commit()
def youtube_download_task(y_id, video_format): """таск скачивания видео""" dbase = DB() status = dbase.sget(y_id, video_format, 'status') if status: print('Already running?') return dbase.sset(y_id, video_format, 'status', 0) filename = YouTube.filename.format(y_id=y_id, format=video_format) url = YouTube.url_format.format(y_id) ydl_opts = { 'postprocessors': [{ 'key': 'ExecAfterDownload', # TODO can be mkv 'exec_cmd': 'mv {} ' + f'{filename}.mp4', }], 'format': f'{video_format}+bestaudio', 'outtmpl': f'tmp{settings.FILE_DELIMITER}{filename}', 'progress_hooks': [yt_dl_hook], # 'merge_output_format': 'mp4', } os.chdir(settings.DOWNLOAD_PATH) with youtube_dl.YoutubeDL(ydl_opts) as ydl: try: ydl.download([url]) except youtube_dl.utils.DownloadError as exc: dbase.sset(y_id, video_format, 'error', f'{exc.__class__.__name__}: {exc}') raise dbase.sset(y_id, video_format, 'status', 100)
def network_home(project_name, network, task_id=None): """ Renders a project account's homepage """ # Loads project details if an admin if g.admin is not None: _aload_project(project_name) # Grabs collectors for the given network if not g.project['collectors']: collectors = None else: collectors = [c for c in g.project['collectors'] if c['network'] == network] for collector in collectors: collector['num_terms'] = 0 if collector['terms_list'] is not None: collector['num_terms'] = len(collector['terms_list']) g.project['num_collectors'] = len(collectors) processor_form = ProcessControlForm(request.form) inserter_form = ProcessControlForm(request.form) # Loads processor active status db = DB() resp = db.check_process_status(g.project['project_id'], 'process', module=network) processor_active_status = resp['message'] # Loads inserter active status resp = db.check_process_status(g.project['project_id'], 'insert', module=network) inserter_active_status = resp['message'] # Loads count of tweets in the storage DB count = db.get_storage_counts(g.project['project_id'], network) # If a start/stop/restart is in progress, display the status task_status = None if task_id: resp = celery.AsyncResult(task_id) if resp.state == 'PENDING': processor_task_status = 'Processor/Inserter start/shutdown still in progress...' else: processor_task_status = 'Processor/Inserter start/shutdown completed.' return render_template('network_home.html', network=network, collectors=collectors, project_detail=g.project, processor_active_status=processor_active_status, inserter_active_status=inserter_active_status, task_status=task_status, count=count, processor_form=processor_form, inserter_form=inserter_form)
def create_app(): """Create and configure an instance of the Flask application.""" app = Flask(__name__) app.config['SQLALCHEMY_DATABASE_URI'] = os.environ.get('DATABASE_URL') app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False DB.init_app(app) @app.route('/') def root(): users = User.query.all() return render_template("base.html", title='Home', users=users) @app.route('/user', methods=['POST']) @app.route('/user/<name>', methods=['GET']) def user(name=None, message=''): name = name or request.values['user_name'] try: if request.method == "POST": add_or_update_user(name) message = "User {} successfully added!".format(name) tweets = User.query.filter(User.name == name).one().tweets except Exception as e: message = "Error adding {}: {}".format(name, e) tweets = [] return render_template("user.html", title=name, tweets=tweets, message=message) @app.route('/compare', methods=['POST']) def compare(message=''): user1, user2 = sorted( [request.values['user1'], request.values['user2']]) if user1 == user2: message = 'Cannot compare a user to themselves!' else: prediction = predict_user(user1, user2, request.values['tweet_text']) message = '"{}" is more likely to be said by {} than {}'.format( request.values['tweet_text'], user1 if prediction else user2, user2 if prediction else user1) return render_template('prediction.html', title='Prediction', message=message) @app.route('/reset') def reset(): DB.drop_all() DB.create_all() add_users() return render_template('base.html', title='Reset database!', users=[]) return app
def create_app(): app = Flask(__name__) app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:///db.sqlite3' DB.init_app(app) @app.route('/') def root(): return("Welcome to TweetVsTweet") return(app)
def create_app(): app = Flask(__name__) app.config[ 'SQLALCHEMY_DATABASE_URI'] = 'sqlite:///db.sqlite3' #have app know the database DB.init_app(app) #have database know about the app @app.route('/') def root(): return 'Welcome to TwitOff!' return app
async def load_rescues(cls, client): db = DB() db_result = db.cursor.execute('SELECT * FROM PetRescue;').fetchall() rescues = [] broken_rescues = [] for i, entry in enumerate(db_result, start=1): log.debug(f'Loading pet rescue {i} of {len(db_result)}') pet = client.expander.pets[entry['pet_id']].copy() client.expander.translate_pet(pet, entry['lang']) try: channel = await client.fetch_channel(entry['channel_id']) guild = None if not isinstance(channel, discord.DMChannel): guild = channel.guild message = FakeMessage('author', guild, channel, 'content') if entry['message_id']: message = await channel.fetch_message(entry['message_id']) except discord.errors.DiscordException: broken_rescues.append(entry['id']) continue rescue = PetRescue( pet=pet, time_left=0, message=message, mention=entry['mention'], lang=entry['lang'], answer_method=client.answer, config=client.pet_rescue_config, ) try: if entry['alert_message_id']: rescue.alert_message = await channel.fetch_message( entry['alert_message_id']) rescue.pet_message = await channel.fetch_message( entry['pet_message_id']) except discord.errors.DiscordException: broken_rescues.append(entry['id']) continue rescue.start_time = entry['start_time'] rescues.append(rescue) db.close() if broken_rescues: log.debug( f'Pruning {len(broken_rescues)} broken pet rescues from the database: {broken_rescues}.' ) for rescue_id in broken_rescues: await cls.delete_by_id(rescue_id=rescue_id) return rescues
def reset_database(): DB.drop_all() DB.create_all() role = Role() role.name = 'Administrator' user = User() user.public_id = 'd38924fb-9417-4a50-b715-01f805c28063' # password user.password = '******' user.name = 'admin' user.email = '*****@*****.**' user.role = role coffee_machine = CoffeeMachine() coffee_machine.name = 'Winston' coffee_machine.repository = 'hidden-firefly' coffee_type = CoffeeType() coffee_type.name = 'Arabica' coffee_brand = CoffeeBrand() coffee_brand.name = 'Dallmayr' coffee_product = CoffeeProduct() coffee_product.name = 'Dallmayr Prodomo' coffee_product.coffee_brand = coffee_brand coffee_product.coffee_type = coffee_type profile = Profile() profile.name = 'Morning' profile.water_in_percent = 32 * 1000 profile.coffee_strength_in_percent = 42 profile.user = user job = Job() current_time = time.time() job.create_date = current_time job.coffee_strength_in_percent = 66 job.water_in_percent = 44 job.price = 10 job.doses = 1 job.user = user job.coffee_machine = coffee_machine job.coffee_product = coffee_product DB.session.add(job) DB.session.commit()
def load_subscriptions(self): db = DB() result = db.cursor.execute(f'SELECT * FROM Subscription;') subscriptions = result.fetchall() self._subscriptions = { f'{s["guild_id"]}-{s["channel_id"]}': { 'guild_id': s['guild_id'], 'guild_name': s['guild'], 'channel_id': s['channel_id'], 'channel_name': s['channel'], 'pc': bool(s['pc']), 'switch': bool(s['switch']) } for s in subscriptions } db.close()
def update(self): if self.request.method == 'POST': try: _net_if = self.request.POST.get('net_if', '') if DB.objects.exists(): _db = DB.objects.latest('id') _db.net_if = _net_if _nets = psutil.net_if_addrs() _db.net_address = _nets[_net_if][0].address _db.websocket_port = self.request.POST.get( 'websocket_port', 9002) _db.save() else: _new_db = DB() _new_db.net_if = self.request.POST.get('net_if', '') _new_db.websocket_port = self.request.POST.get( 'websocket_port', 9002) _nets = psutil.net_if_addrs() _new_db.net_address = _nets[_net_if][0].address _new_db.save() return True except Exception, e: print "Exception(Control_System update) : ", e
def reconstruct(self): self.ts = {} self.db = DB.from_data() self.executed_ids.clear() self.executed_uids.clear() self.log, self.buffer = [], self.log self.apply_updates()
def add(self): if self.request.method == 'POST': try: new_db = DB() new_db.uid = uuid.uuid4().hex new_db.name = self.request.POST.get('sensor_name', '') try: new_db.min = float( self.request.POST.get('sensor_range_min', 0.0)) except ValueError: new_db.min = 0.0 try: new_db.max = float( self.request.POST.get('sensor_range_max', 1.0)) except ValueError: new_db.max = 1.0 try: new_db.unit = self.request.POST.get('sensor_data_unit', '') except ValueError: new_db.unit = '' try: new_db.buffersize = int( self.request.POST.get('sensor_data_buffer_size', 100)) except ValueError: new_db.buffersize = 100 new_db.save() return True except Exception, e: print "Exception : ", e
def load(self): db = DB() result = db.cursor.execute(f'SELECT * FROM Bookmark;') bookmarks = result.fetchall() self.bookmarks = { b['id']: { 'id': b['id'], 'author_id': b['author_id'], 'author_name': b['author_name'], 'description': b['description'], 'team_code': b['team_code'], 'created': b['created'], } for b in bookmarks } db.close()
def _extract_concepts(document,terminology,MaxMatcher): """ document: db: terminology: MaxMatcher: returns: Concept List """ # Set threshold op = Operation() threshold = 0.95 doc_token = document #print "len(doc_token) " , len(doc_token) candidate_concepts = [] #Prepare a dictionary for MaxMatcher result of tokens. for token_row in doc_token: if token_row not in MaxMatcher.keys(): extracted_concepts = DB._execute("select cid, sig from "+ terminology +"_mm where word = '" + token_row + "'") MaxMatcher[token_row] = extracted_concepts for current_token_counter in range(len(doc_token)-3): #skip the last 3 token current_token = doc_token[current_token_counter] skip_counter = 0 # Number of skips skip_limit = 2 #Skip limit extracted_concepts = MaxMatcher[current_token] current_token_concepts = Set() current_token_score = dict() for c in extracted_concepts: # Create T_c current_token_concepts.add(c[0]) current_token_score[c[0]] = c[1] next_token_counter = 1 # Next word counter next_token = doc_token[ current_token_counter + next_token_counter ] # t is the next word while (skip_counter < skip_limit): extracted_concepts = MaxMatcher[next_token] next_token_concepts = Set() next_token_score = dict() for c in extracted_concepts: next_token_concepts.add(c[0]) next_token_score[c[0]] = c[1] mutual_concepts = next_token_concepts & current_token_concepts if len(mutual_concepts) == 0: skip_counter = skip_counter + 1 else: current_token_concepts = mutual_concepts for c in current_token_concepts: current_token_score[c] += next_token_score[c] next_token_counter += 1 if (current_token_counter + next_token_counter) < len (doc_token): next_token = doc_token[ current_token_counter + next_token_counter ] else: break candidate_concepts = op.union( candidate_concepts , [c for c in current_token_concepts if current_token_score[c]>threshold]) #print "-----------------------------------------------" #print document #print candidate_concepts #print "-----------------------------------------------" return candidate_concepts
def admin_home(admin_id): """ Homepage for an admin account """ project_list = [] db = DB() resp = db.get_project_list() if resp['status']: for project in resp['project_list']: if 'admin' in project.keys() and not project['admin']: project_list.append(project) return render_template('admin_home.html', admin_detail=g.admin, project_list=project_list)
def __init__(self, project_id, process_name, network): self.project_id = project_id self.process_name = process_name self.network = network # Sets up connection w/ project config DB & loads in collector info self.db = DB() project = self.db.get_project_detail(self.project_id) self.project_name = project['project_name'] configdb = project['project_config_db'] project_db = self.db.connection[configdb] self.project_db = project_db.config # Sets up logdir and logging logdir = app.config['LOGDIR'] + '/' + self.project_name + '-' + self.project_id + '/logs' if not os.path.exists(logdir): os.makedirs(logdir) # Sets logger w/ name collector_name and level INFO self.logger = logging.getLogger('Processor') self.logger.setLevel(logging.INFO) # Sets up logging file handler logfile = logdir + '/%s.log' % self.process_name # TODO - port logging rotation params to Mongo for user control later / these default values good handler = logging.handlers.TimedRotatingFileHandler(logfile, when='D', backupCount=30) handler.setLevel(logging.INFO) # Formats format = '%(asctime)s %(name)-12s %(levelname)-8s %(message)s' dateformat = '%m-%d %H:%M' formatter = logging.Formatter(format, dateformat) handler.setFormatter(formatter) # Adds handler to logger to finish self.logger.addHandler(handler) self.log('STACK processor for project %s initiated.' % self.project_name) # Sets up data directory self.datadir = app.config['DATADIR'] + '/' + self.project_name + '-' + self.project_id # Establish connections to data directories self.raw = self.datadir + '/' + self.network + '/raw' self.archive = self.datadir + '/' + self.network + '/archive' self.queue = self.datadir + '/' + self.network + '/queue' self.error = self.datadir + '/' + self.network + '/error' if not os.path.exists(self.raw): os.makedirs(self.raw) if not os.path.exists(self.archive): os.makedirs(self.archive) if not os.path.exists(self.queue): os.makedirs(self.queue) if not os.path.exists(self.error): os.makedirs(self.error) self.log('STACK processor setup completed. Now starting...')
def secret(token): db = DB() cat = db.get_db("category") items = db.get_db("items") pages = db.get_db("pages") news = db.get_db("news") i_news = db.get_db("index_news") i_slider = db.get_db("index_slider") dealers = db.get_db("dealers") if token == app.config["FULL_RM_WRITE"]: run_convert() cat.drop() items.drop() pages.drop() news.drop() i_news.drop() i_slider.drop() dealers.drop() utility.import_db.save_category_to_db() utility.import_db.save_items_to_db() utility.import_db.save_pages_to_db() utility.import_db.save_news_to_db() utility.import_db.save_index_news_to_db() utility.import_db.save_index_slider_to_db() utility.import_db.save_dealers_to_db() return "Full RM/WRITE BASE \n" else: return "your token - '%s' didn't pass" % token
def setup(): """ Called on a new install to setup an admin account """ form = SetupForm(request.form) if form.validate_on_submit(): # On submit, grab form information project_name = form.project_name.data password = form.password.data hashed_password = generate_password_hash(password) # Create the account db = DB() resp = db.create(project_name, password, hashed_password, admin=True) if resp['status']: flash(u'Project successfully created!') return redirect(url_for('index')) else: flash(resp['message']) return render_template('setup.html', form=form)
def index(): """ Loads the STACK homepage w/ list of project accounts """ start_workers() db = DB() resp = db.get_project_list() project_list = None admins = None if resp and resp['project_list']: project_list = resp['project_list'] admins = [project for project in project_list if 'admin' in project.keys() and project['admin'] == 1] # Renders index of at least one admin account exists, if not calls the new install setup if admins: return render_template('index.html', project_list=project_list) else: return redirect(url_for('setup'))
def Indexing(self): ''' IR Indexing Operations - Elimination of Stopwords - ''' DB._execute("DELETE from collection_index") print "Indexing is started..." tp = TextProcessor() Collection._load() Collection._load_tags() #loading document with PMID, tags and abstracts for doc in Collection._documents: index_list = [] for term in doc.abstract: index_list.append(term) if GlobalVariables.global_context_activated: for term in doc.tag: index_list.append(term) index_list = tp.EliminateStopWords(index_list) index_list = tp.Stem(index_list) doc.set_index(index_list) print "Indexing is Done!"
def create(): """ Page to create a new project account """ form = CreateForm(request.form) if form.validate_on_submit(): # On submit, grab form information project_name = form.project_name.data email = form.email.data password = form.password.data hashed_password = generate_password_hash(password) description = form.description.data # Create the account db = DB() resp = db.create(project_name, password, hashed_password, description=description, email=email) if resp['status']: flash(u'Project successfully created!') return redirect(url_for('admin_home', admin_id=g.admin['project_id'])) else: flash(resp['message']) return render_template('create.html', form=form)
def collector(project_name, network, collector_id, task_id=None): """ Loads the detail / control page for a collector """ # Redirects an admin back to the homepage b/c nothing is loaded into the session yet if g.project is None: flash(u'Please navigate to the New Collector page from your homepage panel.') return redirect(url_for('index')) form = ProcessControlForm(request.form) # Loads collector info for the page db = DB() resp = db.get_collector_detail(g.project['project_id'], collector_id) collector = resp['collector'] # Loads active status resp = db.check_process_status(g.project['project_id'], 'collect', collector_id=collector_id) active_status = resp['message'] # If a start/stop/restart is in progress, display the status task_status = None if task_id: resp = celery.AsyncResult(task_id) if resp.state == 'PENDING': task_status = 'Collector start/shutdown still in progress...' else: task_status = 'Collector start/shutdown completed.' return render_template( 'collector.html', collector=collector, active_status=active_status, form=form, task_status=task_status )
def catalogue(query=None): rows = DB.get_products(query)[:20] images = list() directory = os.path.join(os.getcwd(), 'static/images/') files = os.listdir(directory) for i in range(len(rows)): images.append('static/images/' + random.choice(files)) r = make_response( render_template('catalogue.html', query=query, products=rows, images=images, dim=(140, 170), columns=3) ) r.headers.set('X-XSS-Protection', '0') return r
def create_purchases_text(user_email, user_id): rows = DB.get_purchases(user_email) with open('purchase_records/'+str(user_id)+'.csv', 'w') as f: for row in rows: f.write('{}, {}\n'.format(row[0], row[1]))
from flask.ext.sqlalchemy import SQLAlchemy from models import DB, Team from fllipit import APP import api import random DB.app = APP DB.create_all() teams = [] for i in range(80): team = Team( number=i, name="My Team %i" % (i), affiliation="Some %i School" % (i), round1=random.randrange(0, 400, 1), round2=random.randrange(0, 400, 1), round3=random.randrange(0, 400, 1), round4=random.randrange(0, 400, 1), round5=random.randrange(0, 400, 1)) teams.append(team) # Top 12 for team in api.rankTeams(teams)[:12]: team.advanceTo4 = True team.elim1=random.randrange(0, 400, 1) # Top 4 for team in sorted(teams, key=lambda x: x.elim1, reverse=True)[:4]: team.advanceTo5 = True team.elim2=random.randrange(0, 400, 1)
def index(request): dbs = DB.objects() return render_to_response('index.html', {'dbs': dbs})
def __init__(self, project_id, collector_id, process_name): self.project_id = project_id self.collector_id = collector_id self.process_name = process_name self.collecting_data = False # Sets up connection w/ project config DB & loads in collector info self.db = DB() project = self.db.get_project_detail(self.project_id) if project['status']: self.project_name = project['project_name'] configdb = project['project_config_db'] project_db = self.db.connection[configdb] self.project_db = project_db.config resp = self.db.get_collector_detail(self.project_id, self.collector_id) if resp['status']: collector_info = resp['collector'] # Load in collector info self.collector_name = collector_info['collector_name'] self.network = collector_info['network'] self.api = collector_info['api'] self.collection_type = collector_info['collection_type'] self.params = collector_info['params'] self.terms_list = collector_info['terms_list'] self.languages = collector_info['languages'] self.locations = collector_info['location'] self.auth = collector_info['api_auth'] # TODO - file format to Mongo # TODO - less then hour = warning self.file_format = '%Y%m%d-%H' # If this is a streaming collector if self.collection_type == 'realtime': self.project_db.update({'_id': ObjectId(self.collector_id)}, {'$set': {'stream_limits': []}}) # Sets up logdir and logging logdir = app.config['LOGDIR'] + '/' + self.project_name + '-' + self.project_id + '/logs' if not os.path.exists(logdir): os.makedirs(logdir) # Sets logger w/ name collector_name and level INFO self.logger = logging.getLogger(self.collector_name) self.logger.setLevel(logging.INFO) # Sets up logging file handler logfile = logdir + '/%s.log' % self.process_name # TODO - logging params # TODO - port logging rotation params to Mongo for user control later / these default values good handler = logging.handlers.TimedRotatingFileHandler(logfile, when='D', backupCount=30) handler.setLevel(logging.INFO) # Formats format = '%(asctime)s %(name)-12s %(levelname)-8s %(message)s' dateformat = '%m-%d %H:%M' formatter = logging.Formatter(format, dateformat) handler.setFormatter(formatter) # Adds handler to logger to finish self.logger.addHandler(handler) self.log('STACK collector %s initiated.' % self.collector_name) # Sets up rawdir self.rawdir = app.config['DATADIR'] + '/' + self.project_name + '-' + self.project_id + '/' + self.network + '/raw' if not os.path.exists(self.rawdir): os.makedirs(self.rawdir) self.log('All raw files and directories set. Now starting collector...')
class BaseCollector(object): """ Extensible base class for all STACK collectors """ def __init__(self, project_id, collector_id, process_name): self.project_id = project_id self.collector_id = collector_id self.process_name = process_name self.collecting_data = False # Sets up connection w/ project config DB & loads in collector info self.db = DB() project = self.db.get_project_detail(self.project_id) if project['status']: self.project_name = project['project_name'] configdb = project['project_config_db'] project_db = self.db.connection[configdb] self.project_db = project_db.config resp = self.db.get_collector_detail(self.project_id, self.collector_id) if resp['status']: collector_info = resp['collector'] # Load in collector info self.collector_name = collector_info['collector_name'] self.network = collector_info['network'] self.api = collector_info['api'] self.collection_type = collector_info['collection_type'] self.params = collector_info['params'] self.terms_list = collector_info['terms_list'] self.languages = collector_info['languages'] self.locations = collector_info['location'] self.auth = collector_info['api_auth'] # TODO - file format to Mongo # TODO - less then hour = warning self.file_format = '%Y%m%d-%H' # If this is a streaming collector if self.collection_type == 'realtime': self.project_db.update({'_id': ObjectId(self.collector_id)}, {'$set': {'stream_limits': []}}) # Sets up logdir and logging logdir = app.config['LOGDIR'] + '/' + self.project_name + '-' + self.project_id + '/logs' if not os.path.exists(logdir): os.makedirs(logdir) # Sets logger w/ name collector_name and level INFO self.logger = logging.getLogger(self.collector_name) self.logger.setLevel(logging.INFO) # Sets up logging file handler logfile = logdir + '/%s.log' % self.process_name # TODO - logging params # TODO - port logging rotation params to Mongo for user control later / these default values good handler = logging.handlers.TimedRotatingFileHandler(logfile, when='D', backupCount=30) handler.setLevel(logging.INFO) # Formats format = '%(asctime)s %(name)-12s %(levelname)-8s %(message)s' dateformat = '%m-%d %H:%M' formatter = logging.Formatter(format, dateformat) handler.setFormatter(formatter) # Adds handler to logger to finish self.logger.addHandler(handler) self.log('STACK collector %s initiated.' % self.collector_name) # Sets up rawdir self.rawdir = app.config['DATADIR'] + '/' + self.project_name + '-' + self.project_id + '/' + self.network + '/raw' if not os.path.exists(self.rawdir): os.makedirs(self.rawdir) self.log('All raw files and directories set. Now starting collector...') def go(self): """ Starts and maintains the loop that monitors the collection thread. Threads are maintained in the extended versions of the class """ # Checks if we're supposed to be running self.run_flag = self.check_flags()['run'] self.collect_flag = 0 self.update_flag = 0 if self.run_flag: self.log('Starting Facebook collector %s with signal %d' % (self.process_name, self.run_flag)) self.set_active(1) # If run_flag is set - begin the loop while self.run_flag: try: flags = self.check_flags() self.run_flag = flags['run'] self.collect_flag = flags['collect'] self.update_flag = flags['update'] except Exception as e: self.log('Mongo connection refused with exception: %s' % e, level='warn') # If we've been flagged to stop or update and we're collecting - shut it down if self.collecting_data and (self.update_flag or not self.collect_flag or not self.run_flag): self.stop_thread() # If we've been flagged to start and we're not collecting - start it up if self.collect_flag and threading.activeCount() == 1: self.start_thread() time.sleep(2) self.log('Exiting Facebook collection.') self.set_active(0) def write(self, data): """ Called to write raw data to raw file - handles rotation """ timestr = time.strftime(self.file_format) filename = self.rawdir + '/' + timestr + '-' + self.collector_name + '-' + self.collector_id + '-out.json' if not os.path.isfile(filename): self.log('Creating new raw file: %s' % filename) with open(filename, 'a') as rawfile: rawfile.write(json.dumps(data).encode('utf-8')) rawfile.write('\n') def log(self, message, level='info', thread='MAIN:'): """ Logs messages to process logfile """ message = str(message) if level == 'warn': self.logger.warning(thread + ' ' + message) elif level == 'error': self.logger.error(thread + ' ' + message) else: self.logger.info(thread + ' ' + message) def check_flags(self): """ Quick method to grab and return all Mongo flags for given Collector instance """ resp = self.db.get_collector_detail(self.project_id, self.collector_id) collector = resp['collector'] return { 'run': collector['collector']['run'], 'collect': collector['collector']['collect'], 'update': collector['collector']['update'], 'active': collector['active'] } def set_active(self, active): """ Quick method to set the active flag to 1 or 0 """ self.project_db.update({'_id': ObjectId(self.collector_id)}, {'$set': {'active': active}}) def start_thread(self): """ Modify this method when extending the class to manage the actual collection thread """ def stop_thread(self): """
def main(): """ Main function to launch the parser Parser arguments : dimensions metrics application siteid table since """ parser = argparse.ArgumentParser(description='Process resource to reach') parser.add_argument('--dimensions', type=str, default="", help='Dimensions you want. Seperate by a ","') parser.add_argument('--metrics', type=str, help='Metrics you want separate by a ","') parser.add_argument('--application', type=str, help='ga for Google Analytic', default="ga") parser.add_argument('--siteid', type=str, help='Site id') parser.add_argument('--table', type=str, help='Table name') parser.add_argument('--mode', type=str, help='Table name', default="prod") parser.add_argument('--since', type=str, default="month", help='Retrieve info since', choices=['day', 'week', 'month', 'year'] ) args = parser.parse_args() #Setting mode debug = False if args.mode == "debug": debug = True print time.time() print "get_token" #Retrieve token access_token = get_token() if debug == True: print time.time() print "Make call google" #Retrieve api informations rep = call_google_api(access_token, args) if debug ==True: print time.time() content = rep.json() if debug == True: print time.time() print content headers = content.get("columnHeaders") query_infos = content.get("query") rows = content.get("rows") table_name = query_infos.get("metrics") site_id = query_infos.get("ids")[3:] if args.dimensions != "": table_name = "_".join(args.metrics.split(",") + args.dimensions.split(",")) else: table_name = "_".join(args.metrics.split(",")) d = DB() elements = {"site_id" : "INTEGER", "from_date" : "DATE", "to_date" : "DATE", "infos" : "STRING", "data": "STRING" } model_to_save = d.get_model(table_name.encode('ascii', 'ignore'), elements) model_to_save.infos = headers model_to_save.site_id = site_id model_to_save.to_date = datetime.datetime.today().isoformat() model_to_save.from_date = get_start_date(args.since) model_to_save.data = json.dumps(content.get(u"rows"))[1:-1] model_to_save.save() if debug == True: print time.time() print "Finish"
import csv, sqlite3 # Will use these to dump database contents to csv file import os from models import (DB, Buyer, Supplier, Product, Staff, Courier, Unit, Descriptor, Brand, Stock, Order) from peewee import * try: os.mkdir('csv') except FileExistsError: pass db = sqlite3.connect('peewee.db') for table in DB.get_tables(): with open('csv/{}s.csv'.format(table), 'w') as csv_file: try : writer = csv.writer(csv_file) cur = db.cursor() cur.execute('SELECT * FROM {}'.format(table)) writer.writerow([description[0] for description in cur.description]) writer.writerows(cur) except sqlite3.OperationalError: pass print('Dump taken with much success')
def work(): db = DB('main', 'work') resp = db.get_work_list() if resp['status']: work_list = [item for item in resp['data']] return render_template("work.html", work_list=work_list)
class BaseInserter(object): """ Extensible base class for all STACK processors NOTE - when extending, must initiate connections to network specific data directories! """ def __init__(self, project_id, process_name, network): self.project_id = project_id self.process_name = process_name self.network = network # Sets up connection w/ project config DB & loads in collector info self.db = DB() project = self.db.get_project_detail(self.project_id) self.project_name = project['project_name'] # Grabs connection to project config DB configdb = project['project_config_db'] project_db = self.db.connection[configdb] self.project_db = project_db.config # Grabs connection to insertion DB # NOTE - on init, need to connect to appropriate network collection db_name = self.project_name + '_' + self.project_id self.insert_db = self.db.connection[db_name] # Sets up logdir and logging logdir = app.config['LOGDIR'] + '/' + self.project_name + '-' + self.project_id + '/logs' if not os.path.exists(logdir): os.makedirs(logdir) # Sets logger w/ name collector_name and level INFO self.logger = logging.getLogger('Inserter') self.logger.setLevel(logging.INFO) # Sets up logging file handler logfile = logdir + '/%s.log' % self.process_name # TODO - port logging rotation params to Mongo for user control later / these default values good handler = logging.handlers.TimedRotatingFileHandler(logfile, when='D', backupCount=30) handler.setLevel(logging.INFO) # Formats format = '%(asctime)s %(name)-12s %(levelname)-8s %(message)s' dateformat = '%m-%d %H:%M' formatter = logging.Formatter(format, dateformat) handler.setFormatter(formatter) # Adds handler to logger to finish self.logger.addHandler(handler) self.log('STACK inserter for project %s initiated.' % self.project_name) # Sets up data directory self.datadir = app.config['DATADIR'] + '/' + self.project_name + '-' + self.project_id # Establish connections to data directories self.raw = self.datadir + '/' + self.network + '/raw' self.archive = self.datadir + '/' + self.network + '/archive' self.queue = self.datadir + '/' + self.network + '/queue' self.error = self.datadir + '/' + self.network + '/error' if not os.path.exists(self.raw): os.makedirs(self.raw) if not os.path.exists(self.archive): os.makedirs(self.archive) if not os.path.exists(self.queue): os.makedirs(self.queue) if not os.path.exists(self.error): os.makedirs(self.error) self.log('STACK processor setup completed. Now starting...') def go(self): """ Runs the processor """ self.run_flag = self.check_flags()['run'] self.restart_flag = 0 if self.run_flag: self.log('Starting inserter %s with signal %d' % (self.process_name, self.run_flag)) self.set_active(1) while self.run_flag: # Call function to process files self.insert() # Lastly, see if the run status has changed try: flags = self.check_flags() self.run_flag = flags['run'] self.restart_flag = flags['restart'] except Exception as e: self.log('Mongo connection refused with exception when attempting to check flags: %s' % e, level='warn') self.log('Will keep running the processing until reconnect is established.', level='warn') # Clean up upon run loop conclude self.log('Exiting inserter.') self.set_active(0) def log(self, message, level='info', thread='MAIN:'): """ Logs messages to process logfile """ message = str(message) if level == 'warn': self.logger.warning(thread + ' ' + message) elif level == 'error': self.logger.error(thread + ' ' + message) else: self.logger.info(thread + ' ' + message) def check_flags(self): """ Quick method to grab and return all Mongo flags for given Collector instance """ resp = self.project_db.find_one({'module': self.network}) return { 'run': resp['inserter']['run'], 'restart': resp['inserter']['restart'] } def set_active(self, active): """ Quick method to set the active flag to 1 or 0 """ self.project_db.update({'module': self.network}, {'$set': {'inserter_active': active}}) def insert(self): """