def google_callback(resp): access_token = resp['access_token'] session['access_token'] = access_token, '' if access_token: r = requests.get('https://www.googleapis.com/oauth2/v1/userinfo', headers={'Authorization': 'OAuth ' + access_token}) subscriptions = requests.get( 'http://www.google.com/reader/api/0/subscription/list', headers={'Authorization': 'OAuth ' + access_token}) import pprint pprint.pprint(subscriptions) if r.ok: data = loads(r.text) oauth_id = data['id'] user = User.load(oauth_id) or User.add(**data) login_user(user) if subscriptions.ok: # outline = opml.parse(subscriptions.text) from lxml import etree root = etree.XML(subscriptions.text) subs = [asd.text for asd in root.xpath('//string["title"]')] next_url = session.get('next') or url_for('index', subs=subs) else: next_url = session.get('next') or url_for('index') return redirect(next_url) return redirect(url_for('login'))
def google_callback(resp): access_token = resp['access_token'] session['access_token'] = access_token, '' if access_token: r = requests.get('https://www.googleapis.com/oauth2/v1/userinfo', headers={'Authorization': 'OAuth ' + access_token}) subscriptions = requests.get('http://www.google.com/reader/api/0/subscription/list', headers={'Authorization': 'OAuth ' + access_token}) import pprint;pprint.pprint(subscriptions) if r.ok: data = loads(r.text) oauth_id = data['id'] user = User.load(oauth_id) or User.add(**data) login_user(user) if subscriptions.ok: # outline = opml.parse(subscriptions.text) from lxml import etree root = etree.XML(subscriptions.text) subs = [asd.text for asd in root.xpath('//string["title"]')] next_url = session.get('next') or url_for('index', subs=subs) else: next_url = session.get('next') or url_for('index') return redirect(next_url) return redirect(url_for('login'))
def index(): # db.create_all() db.session.add(User(name='111')) # 添加 db.session.commit() user_info = User.query(User).filter(User.name == 'name').first() #查询 print(user_info) return jsonify({'user': user_info.name})
def register(): myForm = LoginForm(request.form) if request.method == "POST": user = User(myForm.username.data, myForm.password.data) user.insertOne() return "Register Success" return render_template("login2.html", form=myForm)
def setup_initial_data(db): from argon2 import PasswordHasher ph = PasswordHasher() u1 = User(name="User 1", password=ph.hash("password1")) u2 = User(name="User 2", password=ph.hash("password2")) c1 = Client(name="client_u1_1", user=u1, token="aaaaAAAAbbbbBBBB0000111-C1") c2 = Client(name="client_u1_2", user=u1, token="aaaaAAAAbbbbBBBB0000111-C2") a1 = Application( registration_id="app_c1_1", client=c1, routing_token="aaaaAAAAbbbbBBBB0000111-A1", ) a2 = Application( registration_id="app_c1_2", client=c1, routing_token="aaaaAAAAbbbbBBBB0000111-A2", ) m1 = Message(data='{"name":"message1"}', priority=Priority.NORMAL, time_to_live=0, target=a1) m2 = Message(data='{"name":"message2"}', priority=Priority.NORMAL, time_to_live=0, target=a1) db.session.add_all([u1, u2, c1, c2, a1, a2, m1, m2]) db.session.commit()
def db_test(session=None): u = User() u.username = '******' session.add(u) session.commit() session.delete(u) session.commit()
def user(): myForm = LoginForm(request.form) if request.method == "POST": user = User(myForm.username.data, myForm.password.data) if user.isExist(): return redirect("http://www.jikexueyuan.com") else: message = "Login Failed" return render_template("login2.html", message=message, form=myForm) return render_template("login2.html", form=myForm)
def test_save_user(): to_save = User("1234", "twit") to_save.save() # loaded_user = User.get_by_oauth_id("1234") # assert to_save.screenname == loaded_user.screenname == "twit" assert loaded_user.created_date is not None assert loaded_user.updated_date is not None assert loaded_user.moderator is not None
def google_callback(resp): access_token = resp['access_token'] session['access_token'] = access_token, '' if access_token: r = requests.get('https://www.googleapis.com/oauth2/v1/userinfo', headers={'Authorization': 'OAuth ' + access_token}) if r.ok: data = loads(r.text) oauth_id = data['id'] user = User.load(oauth_id) or User.add(**data) login_user(user) next_url = session.get('next') or url_for('index') return redirect(next_url) return redirect(url_for('login'))
async def removeUser(loop): # 创建连接池 db_dict = {'user': '******', 'password': '', 'db': 'test'} await orm.create_pool(loop=loop, **db_dict) u = User(id='1234') await u.remove() await orm.close_pool()
def make_mod(user): user = User.get_by_oauth_id(user) if user is not None: user.make_mod() return "yep" else: return "no"
def SignUp(self, request, context): """Creates a new db user object""" log = get_logger() result = TokenReply() try: log = log.bind(password=request.password, email=request.email) if len(request.password) < 6: raise ValueError(f"Password too short. Received length {request.password}") token = User.create(email=request.email.lower(), password=request.password, phone=request.phone, name=request.name) if token: result = TokenReply(token=token) except IntegrityError as err: context.set_code(grpc.StatusCode.INVALID_ARGUMENT) context.set_details(f"Duplicate email {request.email}") except ValueError as err: context.set_code(grpc.StatusCode.INVALID_ARGUMENT) context.set_details(str(err)) except Exception as err: context.set_code(grpc.StatusCode.INTERNAL) context.set_details(str(err)) finally: handle_resp('signup', context, log) return result
def add_user(): name = request.form.get('name', default=None) password = request.form.get('password', default=None) email = request.form.get('email', default=None) sex = request.form.get('sex', default=None) db = DBSession() u = db.query(User).filter(User.name == name).first() if u is not None: db.close() error_message = '用户名' + name + '已存在,请重新输入' res = { 'code': 100, 'message': error_message, 'data': { 'email': email, 'name': name } } return render_template('user/register.html', result=res) else: user = User(id=get_uuid(), name=name, password=password, email=email, sex=sex, create_time=datetime.now()) db.add(user) db.commit() db.close() res = {'code': 200, 'message': 'success'} return render_template('login.html', result=res)
async def editUser(loop): # 创建连接池 db_dict = {'user': '******', 'password': '', 'db': 'test'} await orm.create_pool(loop=loop, **db_dict) u = User(id='1234', name='Test1234') await u.update() await orm.close_pool()
def create_user(username, password, first_name, last_name, is_admin=False, session=None): salt = ''.join(random.sample(string.printable, 20)) hash = hashlib.sha512() hash.update((password + salt).encode()) salted_password = hash.hexdigest() s = session or get_session() new_user = User(username=username, first_name=first_name, last_name=last_name, password=salted_password, salt=salt, is_admin=is_admin) s.add(new_user) s.flush() if session is None: s.commit() return new_user
async def addUser(loop): # 创建连接池 db_dict = {'user': '******', 'password': '', 'db': 'test'} await orm.create_pool(loop=loop, **db_dict) # await connDB(loop) u = User(id='9999', name='Test129999') await u.save() # await closeDB(loop) await orm.close_pool()
async def findUserById(loop): # 创建连接池 db_dict = {'user': '******', 'password': '', 'db': 'test'} await orm.create_pool(loop=loop, **db_dict) u = User() rs = await u.findById('9999') print(rs) await orm.close_pool()
def _start_callback(self, update, context): LOGGER.info('User {} started bot'.format( update.effective_user.username)) self._send_message(update, context, self._answers['start']) with session_scope(self._session) as session: if not session.query(User).filter_by( id=update.effective_user.id).count(): session.add( User(id=update.effective_user.id, name=update.effective_user.username)) self._send_task(update, context)
def post(body): name = body["name"] password = body["password"] from argon2 import PasswordHasher ph = PasswordHasher() user = User(name=name, password=ph.hash(password)) try: db.session.add(user) db.session.commit() except SQLAlchemyError: return NoContent, 400 return jsonify(User.query.filter_by(name=name).one().as_dict()), 201
def generate(): """ This generator progressively returns data to the browser. """ try: rk.extract_code(request.url) yield yield_json(output="Requesting session token...") rk.request_token() # store to the database User.from_rk(rk) yield yield_json(output="Accessing profile...") profile = rk.get_profile() items = list(rk.get_fitness_items()) nitems = len(items) yield yield_json(profile=profile, nitems=nitems, state='complete') raise StopIteration # FIXME pass this off to a celery task for n, item in enumerate(items): item = rk.get_fitness_item(item) yield yield_json(n=n, nitems=nitems) yield yield_json( output="Transfer complete. Terminating connection", state='complete') except StandardError as e: yield yield_json( output="CARRIER TERMINATED", error=e.message, state='failed') raise e
async def findUser(loop): # 创建连接池 db_dict = {'user': '******', 'password': '', 'db': 'test'} await orm.create_pool(loop=loop, **db_dict) u = User() # rs = await u.find() #查找所有用户 rs = await u.find(where='name = \'Test\'') # 查找用户名为Test的用户 for item in rs: print('name: %s, value: %s' % (item['name'], item['id'])) await orm.close_pool()
def add_user(body): """ Creates a new user Only the admin user can create users. :param body: The user account to create :type body: dict | bytes :rtype: None """ try: if connexion.request.is_json: get_db().add(User(**body)) get_db().commit() return NoContent, 201 return NoContent, 400 except Exception as e: return {'message': str(e)}, 500
async def authenticate( self, request: HTTPConnection ) -> Optional[Tuple[AuthCredentials, AuthUser]]: if JWT_AUTH_HEADER not in request.headers: return AuthCredentials(scopes=[]), AuthUser(user_id=None) auth = request.headers[JWT_AUTH_HEADER] try: scheme, token = auth.split() payload = await _Authenticate.verify(token) except Exception as exc: return AuthCredentials( scopes=[], error_message=str(exc)), AuthUser(user_id=None) scopes = User.get_permission(user_id=payload.user_id) return AuthCredentials( scopes=scopes, logged_in=True), AuthUser(user_id=payload.user_id)
def oauth_authorized(resp): if not resp: flash('Access denied. Reason: {0} Error: {1}'.format(request.args['error_reason'], request.args['error_description']), 'danger') return redirect('/') session['oauth_token'] = (resp['access_token'], '') me = facebook.get('/me') user = User.load(me.data['id']) or User(email=me.data['email'], oauth_id=me.data['id'], token=resp['access_token']) user.token = resp['access_token'] if not user.id: Task(title='Task 1', text='Task 1 text', priority=TaskPriority.MEDIUM, status=TaskStatus.PENDING, created=datetime.now(), user=user) dbsession.add(user) dbsession.commit() login_user(user) flash('Logged in as {0}'.format(me.data['email']), 'success') return redirect(session.get('next') or '/')
def Login(self, request, context): """Login user and return a JWT token if succeed""" log = get_logger() result = TokenReply() try: email = request.email.lower() password = request.password log = log.bind(email=email, method="login") token = User.login(email, password) if token: result = TokenReply(token=token) except NoResultFound as err: context.set_code(grpc.StatusCode.NOT_FOUND) context.set_details('Invalid username') except ValueError as err: context.set_code(grpc.StatusCode.PERMISSION_DENIED) context.set_details(str(err)) except Exception as err: context.set_code(grpc.StatusCode.INTERNAL) context.set_details(str(err)) finally: handle_resp('login', context, log) return result
def get_user(self, data, **kwargs): return User(**data)
def moderators(): return render_template('moderators.html', users=User.get_all())
def load_user(user_id): return User.load(user_id)
app.add_api("openapi.yml", resolver=RestyResolver("api"), strict_validation=True) app.app.config.from_object(config_object) db.init_app(app.app) app.app.app_context().push() db.create_all() return app def flask_app(): """Returns an acutal flask app for using 'flask shell'""" return create_app().app if __name__ == "__main__": CONFIG = configs.DevConfig if get_debug_flag() else configs.ProdConfig app = create_app(CONFIG) if CONFIG.USER and CONFIG.PASS: try: User.query.filter_by(name=CONFIG.USER).one() except NoResultFound: from argon2 import PasswordHasher ph = PasswordHasher() user = User(name=CONFIG.USER, password=ph.hash(CONFIG.PASS)) db.session.add(user) db.session.commit() app.run(port=CONFIG.APP_PORT, debug=CONFIG.DEBUG)
#!/usr/bin/env python __author__ = "Vilhelm Prytz" __email__ = "*****@*****.**" import sys from pathlib import Path # add parent folder sys.path.append(str(Path(__file__).parent.parent.absolute())) from app import db, app # noqa: E402 from orm import User # noqa: E402 email = input("Enter email: ") user = User(email=email) with app.app_context(): db.session.add(user) db.session.commit()
async def register(*_, create: dict = None) -> User: create_user = CreateUser(**create) create_user.password = Password.encode(create_user.password) return User.create(**create_user.dict())
def showUser(username, password): uu = User(username, password) u = User.selectOne(uu) return u
def get_commits(slug, repos_folder): contributors = {} counter = itertools.count(start=1) basic_classifier = BasicFileTypeClassifier() session = SessionWrapper.new() try: folder_name = slugToFolderName(slug) folder_path = os.path.join(repos_folder, folder_name) min_commit = datetime.now(timezone.utc) max_commit = min_commit - timedelta(days=100 * 365) total_commits = 0 if not os.path.exists(folder_path): return slug try: db_repo = session.query(Repo).filter_by(slug=slug).one() except exc.NoResultFound: db_repo = Repo(slug, min_commit, max_commit, total_commits) session.add(db_repo) session.commit() except exc.MultipleResultsFound: logger.warning(msg="Found multiple results querying for repo %s." % slug) pass git_repo = pygit2.Repository(folder_path) last = git_repo[git_repo.head.target] # Fetch all commits as an iterator, and iterate it for c in git_repo.walk(last.id, pygit2.GIT_SORT_TIME): commit = CommitWrapper(c) total_commits += 1 sha = commit.getSha() authored_datetime = commit.getAuthoredDate() committed_datetime = commit.getCommittedDate() if authored_datetime < min_commit: min_commit = authored_datetime if authored_datetime > max_commit: max_commit = authored_datetime (author_name, author_email) = commit.getAuthor() (author_name_l, author_email_l) = (author_name.lower(), author_email.lower()) (committer_name, committer_email) = commit.getCommitter() (committer_name_l, committer_email_l) = (committer_name.lower(), committer_email.lower()) if (author_name_l, author_email_l) not in contributors: contributors[(author_name_l, author_email_l)] = next(counter) author_id = contributors[(author_name_l, author_email_l)] if (committer_name_l, committer_email_l) not in contributors: contributors[(committer_name_l, committer_email_l)] = next(counter) committer_id = contributors[(committer_name_l, committer_email_l)] message = commit.getMessage() if message is not None: issue_ids = commit.getIssueIds() if len(issue_ids) >= 1: num_valid_issues = 0 for issue_id in issue_ids: try: # was session_travis gh_issue = session.query(GhIssue).filter( and_(GhIssue.slug == slug, GhIssue.issue_number == issue_id)).one() except exc.MultipleResultsFound: logger.warning( msg="{0}: Issue {1} has multiple entries.". format(slug, issue_id)) continue except exc.NoResultFound: logger.warning( msg= "{0}: Issue {1} no entry found in the issue table." .format(slug, issue_id)) continue try: db_link = session.query(IssueLink).filter( and_( IssueLink.repo_id == db_repo.id, IssueLink.sha == sha, IssueLink.issue_number == issue_id)).one() except exc.NoResultFound: # why authored_datetime and not commited_datetime ???????? ## TODO delta_open = ( authored_datetime - gh_issue.created_at.replace(tzinfo=pytz.utc) ).total_seconds() ### closed at is important!!!!!!!! ## TODO if gh_issue.closed_at is not None: delta_closed = ( authored_datetime - gh_issue.closed_at.replace(tzinfo=pytz.utc) ).total_seconds() if delta_open > 0 and delta_closed <= 0 and gh_issue.pr_num is None: num_valid_issues += 1 else: delta_closed = None db_link = IssueLink(db_repo.id, sha, issue_id, gh_issue.pr_num is not None, delta_open, delta_closed) session.add(db_link) if not num_valid_issues: continue first_msg_line = message.split('\n')[0] parents = commit.getParents() num_parents = len(parents) if not num_parents: continue sha_parent = parents[0].hex diff = commit.getDiff(git_repo) try: db_commit = session.query(Commit).filter_by( sha=sha).one() except exc.NoResultFound: db_commit = Commit(db_repo.id, sha, authored_datetime, author_id, committer_id, first_msg_line, num_parents, diff.stats.insertions, diff.stats.deletions, diff.stats.files_changed) session.add(db_commit) session.commit() # TODO parte da estrarre in un altro script blamed_commits = {} for patch in diff: old_file = patch.delta.old_file.path label = basic_classifier.labelFile(old_file) # Ignore changes to documentation files if label == basic_classifier.DOC: continue line_labels = {} blame_counter = {} for hunk in patch.hunks: if hunk.old_lines: for hl in hunk.lines: if hl.origin == '-': line_labels[ hl. old_lineno] = basic_classifier.labelDiffLine( hl.content.replace('\r', '').replace( '\n', '')) try: for bh in git_repo.blame( old_file, newest_commit=sha_parent, min_line=hunk.old_start, max_line=hunk.old_start + hunk.old_lines - 1): blamed_sha = str(bh.final_commit_id) if blamed_sha in blamed_commits: blamed_commit = blamed_commits[ blamed_sha] else: try: blamed_commit = CommitWrapper( git_repo.revparse_single( blamed_sha)) blamed_commits[ blamed_sha] = blamed_commit blamed_parents = blamed_commit.getParents( ) blamed_num_parents = len( blamed_parents) if not blamed_num_parents: ins = None dels = None files = None else: blamed_diff = blamed_commit.getDiff( git_repo) ins = blamed_diff.stats.insertions dels = blamed_diff.stats.deletions files = blamed_diff.stats.files_changed # Ignore commits that changed more than 100 files if files >= 100: continue try: blamed_db_commit = session.query( Commit).filter_by( sha=blamed_sha ).one() except exc.MultipleResultsFound: logger.warning( msg= "{0}: Multiple rows for blamed sha {1}." .format( slug, blamed_sha)) traceback.print_exc() except exc.NoResultFound: blamed_authored_datetime = blamed_commit.getAuthoredDate( ) ( blamed_author_name, blamed_author_email ) = blamed_commit.getAuthor( ) (blamed_author_name_l, blamed_author_email_l) = ( blamed_author_name. lower(), blamed_author_email. lower()) ( blamed_committer_name, blamed_committer_email ) = blamed_commit.getCommitter( ) (blamed_committer_name_l, blamed_committer_email_l ) = ( blamed_committer_name. lower(), blamed_committer_email .lower()) if (blamed_author_name_l, blamed_author_email_l ) not in contributors: contributors[( blamed_author_name_l, blamed_author_email_l )] = next(counter) blamed_author_id = contributors[ (blamed_author_name_l, blamed_author_email_l )] if (blamed_committer_name_l, blamed_committer_email_l ) not in contributors: contributors[( blamed_committer_name_l, blamed_committer_email_l )] = next(counter) blamed_committer_id = contributors[ (blamed_committer_name_l, blamed_committer_email_l )] blamed_message = blamed_commit.getMessage( ) blamed_first_msg_line = blamed_message.split( '\n')[0] blamed_db_commit = Commit( db_repo.id, blamed_sha, blamed_authored_datetime, blamed_author_id, blamed_committer_id, blamed_first_msg_line, blamed_num_parents, ins, dels, files) session.add( blamed_db_commit) session.commit() except Exception as e: logger.error( msg= "{0}: revparse error {1}:\t{2}" .format( slug, blamed_sha, e)) traceback.print_exc() for line_num in range( bh.final_start_line_number, bh.final_start_line_number + bh.lines_in_hunk): if line_labels[ line_num] == basic_classifier.CG_CODE: blame_counter.setdefault( blamed_sha, 0) blame_counter[blamed_sha] += 1 except Exception as e: logger.error( msg="{0} blame error {1}:\t{2}".format( slug, sha, e)) for blamed_sha, num_lines in blame_counter.items(): b = Blame(db_repo.id, sha, old_file, label, blamed_sha, num_lines) session.add(b) session.commit() for (name, email), user_id in sorted(contributors.items(), key=lambda e: e[1]): try: db_user = session.query(User).filter( and_(User.name == func.binary(name), User.email == func.binary(email), User.repo_id == db_repo.id)).one() except exc.NoResultFound: db_user = User(db_repo.id, user_id, name, email) session.add(db_user) except exc.MultipleResultsFound: # FIXME this should'nt be happening # is it because we allow name aliases during mining? # How do we deal with it now? logger.warning( msg="Multiple entries for user \'{0}\' <{1}> in repo {2}". format(name, email, db_repo.slug)) db_repo.min_commit = min_commit db_repo.max_commit = max_commit db_repo.total_commits = total_commits session.add(db_repo) session.commit() return slug except Exception as e: logger.error(msg="{0}: unknown error:\t{1}".format(slug, e)) traceback.print_exc() finally: return slug
def get_commits(slug, repos_folder): contributors = {} counter = itertools.count(start=1) basic_classifier = BasicFileTypeClassifier() session = SessionWrapper.new() try: folder_name = slugToFolderName(slug) folder_path = os.path.join(repos_folder, folder_name) min_commit = datetime.now(timezone.utc) max_commit = min_commit - timedelta(days=100 * 365) total_commits = 0 if not os.path.exists(folder_path): return slug try: db_repo = session.query(Repo).filter_by(slug=slug).one() # the reason why we return here is to skip analyzing # again a repo in case of crashing exception that forces # the script to be run again logger.info( msg="Skipping analysis of commits from %s, already in the db" % slug) #return slug except exc.NoResultFound: db_repo = Repo(slug, min_commit, max_commit, total_commits) session.add(db_repo) session.commit() except exc.MultipleResultsFound: logger.warning(msg="Found multiple results querying for repo %s." % slug) pass git_repo = pygit2.Repository(folder_path) last = git_repo[git_repo.head.target] # Fetch all commits as an iterator, and iterate it for c in git_repo.walk(last.id, pygit2.GIT_SORT_TIME): commit = CommitWrapper(c) total_commits += 1 sha = commit.getSha() authored_datetime = commit.getAuthoredDate() committed_datetime = commit.getCommittedDate() if authored_datetime < min_commit: min_commit = authored_datetime if authored_datetime > max_commit: max_commit = authored_datetime (author_name, author_email) = commit.getAuthor() (author_name_l, author_email_l) = (author_name.lower(), author_email.lower()) (committer_name, committer_email) = commit.getCommitter() (committer_name_l, committer_email_l) = (committer_name.lower(), committer_email.lower()) if (author_name_l, author_email_l) not in contributors: contributors[(author_name_l, author_email_l)] = next(counter) author_id = contributors[(author_name_l, author_email_l)] if (committer_name_l, committer_email_l) not in contributors: contributors[(committer_name_l, committer_email_l)] = next(counter) committer_id = contributors[(committer_name_l, committer_email_l)] parents = commit.getParents() num_parents = len(parents) if not num_parents: continue message = commit.getMessage().strip() try: db_commit = session.query(Commit).filter_by(repo_id=db_repo.id, sha=sha).one() continue # if already present, stop and go on analyzing the next one except exc.NoResultFound: diff = commit.getDiff(git_repo) loc_added = diff.stats.insertions loc_deleted = diff.stats.deletions num_files_touched = diff.stats.files_changed # get info about changes to src files in the new commit all_files, src_files, num_src_files_touched, src_loc_added, src_loc_deleted = \ CommitWrapper.get_src_changes(basic_classifier, diff) try: db_commit = Commit(db_repo.id, sha, authored_datetime, author_id, committer_id, message, num_parents, loc_added, loc_deleted, num_files_touched, all_files, src_loc_added, src_loc_deleted, num_src_files_touched, src_files) session.add(db_commit) # required to flush the pending data before adding to the CommitFiles table below session.commit() except: all_files = "" src_files = "" message = "" db_commit = Commit(db_repo.id, sha, authored_datetime, author_id, committer_id, message, num_parents, loc_added, loc_deleted, num_files_touched, all_files, src_loc_added, src_loc_deleted, num_src_files_touched, src_files) session.add(db_commit) # required to flush the pending data before adding to the CommitFiles table below session.commit() # parse changed files per diff for patch in diff: commit_file = os.path.basename(patch.delta.new_file.path) try: commit_file = session.query(CommitFiles).filter_by( commit_sha=sha, repo_slug=slug, file=commit_file).one() continue # if already present, stop and go on analyzing the next one except exc.NoResultFound: lang = basic_classifier.labelFile(commit_file) loc_ins = 0 loc_del = 0 for hunk in patch.hunks: for hl in hunk.lines: if hl.origin == '-': loc_del -= 1 elif hl.origin == '+': loc_ins += 1 commit_file = CommitFiles(db_repo.id, db_repo.slug, sha, commit_file, loc_ins, loc_del, lang) session.add(commit_file) session.commit() if message is not None: issue_id_results = commit.getIssueIds() if len(issue_id_results) >= 1: num_valid_issues = 0 for (line_num, issue_ids) in issue_id_results: for issue_id in issue_ids: logger.info(msg="Analyzing {0} issue {1}.".format( slug, issue_id)) try: gh_issue = session.query(GhIssue).filter( and_(GhIssue.slug == slug, GhIssue.issue_number == issue_id)).one() except exc.MultipleResultsFound: logger.warning( msg="{0}: Issue {1} has multiple entries.". format(slug, issue_id)) continue except exc.NoResultFound: logger.warning( msg= "{0}: Issue {1} no entry found in the issue table." .format(slug, issue_id)) continue try: db_link = session.query(IssueLink).filter( and_(IssueLink.repo_id == db_repo.id, IssueLink.sha == sha, IssueLink.issue_number == issue_id)).one() print(db_repo.id, "Touch") continue except exc.NoResultFound: delta_open = ( authored_datetime - gh_issue.created_at.replace( tzinfo=pytz.utc)).total_seconds() if gh_issue.closed_at is not None: delta_closed = ( authored_datetime - gh_issue.closed_at.replace( tzinfo=pytz.utc)).total_seconds() if delta_open > 0 and delta_closed <= 0 and gh_issue.pr_num is None: num_valid_issues += 1 else: delta_closed = None db_link = IssueLink( db_repo.id, sha, line_num, issue_id, gh_issue.pr_num is not None, delta_open, delta_closed) session.add(db_link) for (name, email), user_id in sorted(contributors.items(), key=lambda e: e[1]): try: db_user = session.query(User).filter( and_(User.name == func.binary(name), User.email == func.binary(email), User.repo_id == db_repo.id)).one() except exc.NoResultFound: db_user = User(db_repo.id, user_id, name, email) session.add(db_user) except exc.MultipleResultsFound: # Would this happens because we allow name aliases during mining? # Should we deal with it? And how? logger.warning( msg="Multiple entries for user \'{0}\' <{1}> in repo {2}". format(name, email, db_repo.slug)) db_repo.min_commit = min_commit db_repo.max_commit = max_commit db_repo.total_commits = total_commits session.add(db_repo) session.commit() return slug except Exception as e: logger.error(msg="{0}: unknown error:\t{1}".format(slug, e)) traceback.print_exc() finally: return slug
def import_user_dicts(users=None, _user=None, session=None): ''' Import a list of dicts containing user info Args: users (list): user dictionaries _user (int): User id of admin making inventory changes Returns: dict: a dictionary that contains information about the function run :: data = {'status': either 'finished' or 'failed', 'message': message to be returned to the UI, 'log': message to be added to ShakeCast log and should contain info on error} ''' if isinstance(_user, int): _user = session.query(User).filter(User.shakecast_id == _user).first() if users is not None: for user in users: username = user.get('USERNAME', user.get('username', '')) # input validation if not username: continue # get existing user u = session.query(User).filter(User.username == username).all() if u: u = u[0] else: u = User() u.username = username u.group_string = user.get('GROUP', user.get('group_string', '')) u.user_type = user.get('USER_TYPE', user.get('user_type', '')) u.full_name = user.get('FULL_NAME', user.get('full_name', '')) u.phone_number = user.get('PHONE_NUMBER', user.get('phone_number', '')) delivery = user.get('DELIVERY', user.get('delivery', False)) if delivery: u.mms = delivery.get('MMS', delivery.get('mms', delivery.get('PAGER', delivery.get('pager', '')))) u.updated = time.time() if _user is not None: if u.updated_by is None: u.updated_by = _user.username elif _user.username not in u.updated_by: updated_lst = u.updated_by.split(',') updated_lst += [_user.username] u.updated_by = ','.join(updated_lst) # set the user's password and email if they haven't changed it # themselves if (u.updated_by is None or _user is None or u.username not in u.updated_by or _user.username == u.username): u.email = user.get('EMAIL_ADDRESS', user.get('email', '')) password = user.get('PASSWORD', user.get('password', None)) if password is not None: u.password = generate_password_hash(password, method='pbkdf2:sha512') session.add(u) session.commit() add_users_to_groups(session=session) session.commit() log_message = '' status = 'finished' data = {'status': status, 'message': {'from': 'import_user_dicts', 'title': 'User Upload', 'message': 'User update complete', 'success': True}, 'log': log_message} return data
def import_user_dicts(users=None, _user=None, session=None): ''' Import a list of dicts containing user info Args: users (list): user dictionaries _user (int): User id of admin making inventory changes Returns: dict: a dictionary that contains information about the function run :: data = {'status': either 'finished' or 'failed', 'message': message to be returned to the UI, 'log': message to be added to ShakeCast log and should contain info on error} ''' if isinstance(_user, int): _user = session.query(User).filter(User.shakecast_id == _user).first() if users is not None: for user in users: username = user.get('USERNAME', user.get('username', '')) # input validation if not username: continue # get existing user u = session.query(User).filter(User.username == username).all() if u: u = u[0] else: u = User() u.username = username u.group_string = user.get('GROUP', user.get('group_string', '')) u.user_type = user.get('USER_TYPE', user.get('user_type', '')) u.full_name = user.get('FULL_NAME', user.get('full_name', '')) u.phone_number = user.get('PHONE_NUMBER', user.get('phone_number', '')) delivery = user.get('DELIVERY', user.get('delivery', False)) if delivery: u.mms = delivery.get( 'MMS', delivery.get( 'mms', delivery.get('PAGER', delivery.get('pager', '')))) u.updated = time.time() if _user is not None: if u.updated_by is None: u.updated_by = _user.username elif _user.username not in u.updated_by: updated_lst = u.updated_by.split(',') updated_lst += [_user.username] u.updated_by = ','.join(updated_lst) # set the user's password and email if they haven't changed it # themselves if (u.updated_by is None or _user is None or u.username not in u.updated_by or _user.username == u.username): u.email = user.get('EMAIL_ADDRESS', user.get('email', '')) password = user.get('PASSWORD', user.get('password', None)) if password is not None: u.password = generate_password_hash(password, method='pbkdf2:sha512') session.add(u) session.commit() add_users_to_groups(session=session) session.commit() log_message = '' status = 'finished' data = { 'status': status, 'message': { 'from': 'import_user_dicts', 'title': 'User Upload', 'message': 'User update complete', 'success': True }, 'log': log_message } return data
def get_user(): user = User.get_by_oauth_id("12345") if user is None: user = User("12345", "twit2") user.save() return user