示例#1
0
def getMockRewinder(logger, repoID, credentials):
    logger.debug("getMock, repoID: " + str(repoID) +
                 " maxing out calls to github.")
    for i in range(0, 1000):
        repoJSON = util.get_json(
            logger, "https://api.github.com/repositories/" + str(repoID),
            credentials)
        repo_name = repoJSON['name']
    default_branch = repoJSON['default_branch']
    logger.debug("getMock - default branch name: " + default_branch)
    clone_url = repoJSON['clone_url']
    masterURL = re.sub(r'({.*)$', '/' + default_branch,
                       repoJSON['branches_url'])
    masterJSON = util.get_json(logger, masterURL, credentials)
    masterSHA = masterJSON['commit']['sha']
    masterDate = masterJSON['commit']['commit']['committer']['date']
    masterDateS = calendar.timegm(
        dateutil.parser.parse(masterDate).utctimetuple())
    sourceJSON = json.dumps({
        "type": "Mock_Github",
        "meta": {
            "repoID": str(repoID),
            "default_branch": str(default_branch),
            "clone_url": clone_url,
            "name": repo_name
        },
        "data": {
            "sha": masterSHA,
            "commitS": masterDateS
        }
    })
    return rewinder.MockRewinder(repoID, sourceJSON)
示例#2
0
    def refresh_repos(self, lastRepo):
        self.log("refresh_repos, lastRepo: " + str(lastRepo))
        sinceLastJSON = util.get_json(
            self.logger,
            'https://api.github.com/repositories?since=' + str(lastRepo),
            self.credentials, True)
        if sinceLastJSON is None or len(str(sinceLastJSON)) < 3:
            self.exhausted = True
            return
        repoID = "errorRepoID"
        projectCounter = 0
        for projectJSON in sinceLastJSON:
            repoID = projectJSON['id']
            self.lastRepoID = repoID
            if repoID > self.stop:
                self.logger.info(
                    "GiPyS - refresh_repos, stopped refreshing with stop:" +
                    str(self.stop) + " and current repoID: " + str(repoID))
                return
            try:
                languagesJSON = util.get_json(self.logger,
                                              projectJSON['languages_url'],
                                              self.credentials, True)
                pythonProjectFound = "Python" in languagesJSON
                self.logger.info("GiPyS - refresh_repos, repoID:" +
                                 str(repoID) + " hasPython: " +
                                 str(pythonProjectFound) + " name: " +
                                 projectJSON['name'])
                self.logger.debug("GiPyS - refresh_repos, since:" +
                                  str(self.nextID) + " len(repos): " +
                                  str(len(self.repos)) + " languagesJSON: " +
                                  str(languagesJSON))
                if pythonProjectFound:
                    self.repos.append(projectJSON['id'])
            except KeyboardInterrupt:
                raise
            except:
                self.logger.warning(
                    "GiPyS - refresh_repos, failed to finish entire page of repos starting at lastRepo: "
                    + str(lastRepo) + " but succeeded up to but excluding: " +
                    str(repoID) + " which is the first " +
                    str(projectCounter) +
                    " projects.  this troublesome url will be skipped.")
                self.nRRFailures += 1
            projectCounter += 1
            self.nProjects += 1

        # repos can be empty if there are no python projects on this page
        if len(self.repos) == 0:
            self.logger.info(
                "GiPyS - refresh_repos, no repos found on page starting at:" +
                str(lastRepo))
            self.refresh_repos(repoID)
            return
        self.logger.info("GiPyS - refresh_repos, finished refreshing, found " +
                         str(len(self.repos)) + " Python repos in " +
                         str(projectCounter) +
                         " Github projects with last scanned repoID: " +
                         str(repoID))
示例#3
0
def get_articles(
    article_id: str = None,
    article_title: str = None,
    tag: str = None,
    page_num: int = 1,
    page_size: int = 30,
    current_user: schemas.UserVerify = Depends(deps.get_current_user)
) -> JSONResponse:
    """ Return All Articles"""
    if article_id is not None:
        data = crud_articles.get_article(article_id=article_id)
        if data is None:
            return JSONResponse(status_code=500,
                                content={"message": "No Records Found"})
        json_compatible_item_data = jsonable_encoder(data)
        return JSONResponse(status_code=200, content=json_compatible_item_data)
    else:
        data = crud_articles.get_all_articles(tag=tag,
                                              article_title=article_title,
                                              page_num=page_num,
                                              page_size=page_size)
        if data is None:
            return JSONResponse(status_code=500,
                                content={"message": "No Articles Found"})
        return JSONResponse(status_code=200,
                            content={
                                "total_pages": data.pages,
                                "total_items": data.total_items,
                                "page_data": {
                                    "page_num": page_num,
                                    "item_count": data.page_size,
                                    "items": get_json(data.items)
                                }
                            })
示例#4
0
 def __init__(self):
     self.current_data_json = get_json(config.data_json_filename)
     # self.patients_file = get_xlsx(config.patients_xlsx, "patients.xlsx")
     self.patients_and_inspections_file = get_xlsx(
         config.patients_and_inspections_xlsx, "patients_and_inspections.xlsx"
     )
     # self.patients_sheet = self.patients_file["Sheet1"]
     # self.inspections_file = get_xlsx(config.inspections_xlsx, "inspections.xlsx")
     # self.inspections_sheet = self.inspections_file["モトデータ"]
     # self.main_summary_sheet = self.inspections_file["総括表"]
     self.patients_sheet = self.patients_and_inspections_file["【公開OK】府HP用(陽性者ベース)"]
     self.inspections_sheet = self.patients_and_inspections_file["【公開OK】コロナサイト用(日付ベース)"]
     self.main_summary_sheet = self.patients_and_inspections_file["【公開OK】ピポット集計"]
     self.contacts1_file = get_xlsx(config.contacts1_xlsx, "contacts1.xlsx")
     self.contacts1_sheet = self.contacts1_file["Sheet1"]
     self.contacts2_file = get_xlsx(config.contacts2_xlsx, "contacts2.xlsx")
     self.contacts2_sheet = self.contacts2_file["Sheet1"]
     self.patients_count = 3
     self.inspections_count = 3
     self.contacts1_count = 3
     self.contacts2_count = 4
     self._data_json = {}
     self._patients_json = {}
     self._patients_summary_json = {}
     self._inspections_summary_json = {}
     self._contacts1_summary_json = {}
     self._contacts2_summary_json = {}
     self._treated_summary_json = {}
     self._main_summary_json = {}
     self.last_update = datetime.today().astimezone(jst).strftime("%Y/%m/%d %H:%M")
     self.get_patients()
     self.get_inspections()
     self.get_contacts1()
     self.get_contacts2()
示例#5
0
def get_users(
    user_id: str = None,
    page_num: int = 1,
    current_user: schemas.UserVerify = Depends(deps.get_current_user)
) -> JSONResponse:
    """ Return All Users"""
    if user_id is not None:
        db_user = crud_users.get_user_id(id=user_id)
        if db_user is None:
            return JSONResponse(status_code=500,
                                content={"message": "No User Found"})
        return JSONResponse(status_code=200, content=db_user.to_json())
    else:
        db_user = crud_users.get_all_user(page_num=page_num)
        if db_user is None:
            return JSONResponse(status_code=500,
                                content={"message": "No Users Found"})
        return JSONResponse(status_code=200,
                            content={
                                "total_pages": db_user.pages,
                                "total_items": db_user.total_items,
                                "page_data": {
                                    "page_num": page_num,
                                    "item_count": db_user.page_size,
                                    "items": get_json(db_user.items)
                                }
                            })
def login_user(user: schemas.UserLogIn) -> JSONResponse:
    """ Login user and Return Access Token"""
    db_user = get_active_user(email=user.email)
    print(db_user)
    if db_user is None:
        return JSONResponse(status_code=400,
                            content={"message": "Invalid Credentials"})
    else:
        is_password_correct = crud_login.check_username_password(
            email=user.email,
            password=user.password)
        if is_password_correct is False:
            return JSONResponse(status_code=400,
                                content={"message": "Invalid Credentials"})
        else:
            uid = str(uuid.uuid4().hex)
            crud_login.login_user(user=user, session_id=uid)
            access_token_expires = timedelta(
                minutes=ProjectSettings.ACCESS_TOKEN_EXPIRE_MINUTES)
            token = access_token.create_access_token(
                data={"sub": user.email},
                expires_delta=access_token_expires)
            return JSONResponse(status_code=200,
                                content={"access_token": token,
                                         "token_type": "Bearer",
                                         "session_id": uid,
                                         "user": get_json(get_user(
                                             email=user.email))})
示例#7
0
def get_price(horizon_host, pair):
    """return last trade price as DatedPrice"""
    print "fetching latest price for:" + pair["name"]
    params = make_trade_params(pair)
    json_result = get_json(horizon_host + "/trades", params)
    try:
        trade_record = json_result["_embedded"]["records"][0]
    except IndexError:
        return DatedPrice(date=datetime.utcfromtimestamp(0), price=0)
    price = float(trade_record["price"]["n"]) / float(trade_record["price"]["d"])
    timestamp = parser.parse(trade_record["ledger_close_time"], ignoretz=True)
    return DatedPrice(date=timestamp, price=price)
示例#8
0
 def get_prediction(input_path):
     input_img = load_img(input_path, single_input_shape, grayscale)
     with get_evaluation_context():
         return jsonify(
             json.loads(
                 get_json(
                     decode_predictions(
                         model.predict(input_img)
                     )
                 )
             )
         )
示例#9
0
def aggregate_pair(horizon_host, pair, start, end, resolution):
    """
    fetch all trades from given time period and aggregate
    :return a tuple of (base_volume, counter_volume, trade_count)
    """
    print "aggregating pair:", pair["name"]
    values = (0, 0, 0)
    params = make_aggregation_params(pair, start, end, resolution)
    url = horizon_host + "/trade_aggregations?" + urlencode(params)
    consumed = False
    while not consumed:
        print "fetching url:", url
        json_result = get_json(url)
        records = json_result['_embedded']['records']
        for record in records:
            values = sum_tuples(values, record_to_tuple(record))
        consumed = len(records) < PAGE_LIMIT
        url = json_result["_links"]["next"]["href"]
    return values
示例#10
0
base_asset_code = "EURT"
base_asset_issuer = "GAP5LETOV6YIE62YAM56STDANPRDO7ZFDBGSNHJQIYGGKSMOZAHOOS2S"
counter_asset_code = "PHP"
counter_asset_issuer = "GBUQWP3BOUZX34TOND2QV7QQ7K7VJTG6VSE7WMLBTMDJLLAW7YKGU6EP"

[[pair]]
name = "BTC_XEL"
base_asset_code = "BTC"
base_asset_issuer = "GATEMHCCKCY67ZUCKTROYN24ZYT5GK4EQZ65JJLDHKHRUZI3EUEKMTCH"
counter_asset_code = "XEL"
counter_asset_issuer = "GAXELY4AOIRVONF7V25BUPDNKZYIVT6CWURG7R2I6NQU26IQSQODBVCS"
''')

next_url = "https://horizon.stellar.org/assets?limit=200"
while True:
    o = get_json(next_url)
    next_url = o['_links']['next']['href']
    records = o['_embedded']['records']

    for r in records:
        key = r['asset_code'] + ':' + r['asset_issuer']
        if key not in whitelist:
            if r['asset_code'] in blacklist_code:
                continue

            if r['flags']['auth_required']:
                account_limit = min_account_holders_auth_required
            else:
                account_limit = min_account_holders
            if r['num_accounts'] < account_limit:
                continue
示例#11
0
def nCommitsGithubRewinder(logger, repoID, repo_path, report_path,
                           uniqueSourceID, n, rewinder_type, credentials):
    # this is the path of the new directory to clone the project into
    repoIDPath = repo_path + str(repoID) + "/"
    logger.debug("Factory, repoIDPath: " + repoIDPath)

    # get metadata for sourceJSON
    repoJSON = util.get_json(
        logger, "https://api.github.com/repositories/" + str(repoID),
        credentials, True)
    repo_name = repoJSON['name']
    default_branch = repoJSON['default_branch']
    logger.debug("nCoGiRe - default branch name: " + default_branch)
    clone_url = repoJSON['clone_url']

    # make sure that repo_path exists, or create it.
    if not os.path.exists(repo_path):
        logger.critical(
            "Factory - nCoGiRe, this path does not exist!! repo_path: " +
            repo_path)
        os.mkdir(repo_path)

    # try to empty this folder and clone into a new folder in repo_path
    for try_i in range(1, 4):
        if (try_i > 1):
            logger.info("nCoGiRe, attempt " + str(try_i) +
                        " to remove contents of repo_path: " + str(repo_path) +
                        " and then clone using clone_url: " + clone_url)
        try:
            # try to erase everything in repo_path
            repoPathContents = [
                os.path.join(repo_path, f) for f in os.listdir(repo_path)
            ]
            logger.info("nCoGiRe, repoPathContents: " + str(repoPathContents))
            if repoPathContents:
                # shuffle: don't always get stuck behind a problem directory
                random.shuffle(repoPathContents)
                for x in repoPathContents:
                    try:
                        util.erasePath(x, logger)
                    except Exception as ex:
                        logger.warning(
                            "exception when trying to erasePath on: " + x +
                            " ex: " + str(ex))
                # I've had enough problems erasing the repo_path.  forget it.
                # all that matters is that we can clone into an empty directory,
                # and in the long run of course we want to avoid running out of
                # disk space, so trying to erase all these directories is good

            # make a new directory just for this new project - this should always succeed and allow git to clone, since all repoIDs are unique
            os.mkdir(repoIDPath)
            git.repo.base.Repo.clone_from(clone_url,
                                          repoIDPath,
                                          None,
                                          branch=default_branch)
            break
        except Exception as e:
            logger.info("Factory - nCoGiRe, unexpected error: " + str(e))

    # just to be safe, always do these sanity checks.
    # the repoIDPath should exist and contain the new ".git" every time.
    if not os.path.exists(repoIDPath):
        logger.info("Factory - nCoGiRe, path missing!")
        raise RuntimeWarning(
            "do not proceed with repoIDPath directory missing - you will rewind the tour_de_source project!"
        )
    repoIDPathContents = [
        os.path.join(repoIDPath, f) for f in os.listdir(repoIDPath)
    ]
    logger.info("contents of repoIDPath: " + str(repoIDPathContents))
    if not repoIDPathContents:
        logger.info("Factory - nCoGiRe, repoIDPathContents is empty!")
        raise RuntimeWarning(
            "do not proceed with repoIDPath directory empty - you will rewind the tour_de_source project!"
        )
    if repoIDPath + ".git" not in repoIDPathContents:
        logger.info(
            "Factory - nCoGiRe, repoIDPathContents has no git!  cloning did not succeed."
        )
        raise RuntimeWarning(
            "do not proceed with repoIDPath directory having no .git folder - you will rewind the tour_de_source project!"
        )

    # now get the time and sha for commits
    commitList = []
    repoGit = git.Git(repoIDPath)
    hexshas = repoGit.log("--format=format:\"%ct %H\"").split('\n')
    for line in hexshas:
        cleanedLine = line[line.find('"') + 1:line.rfind('"')]
        pair = cleanedLine.split(' ')

        # this json is what couples each rewinded data source to a particular github project version.  For bitbucket or another source, many of the inner fields should be different, but the three root fields: type, meta and data should be the same for all future sourceJson objects like this.
        sourceJson = json.dumps({
            "type": "Github",
            "meta": {
                "repoID": str(repoID),
                "default_branch": str(default_branch),
                "clone_url": clone_url,
                "name": repo_name
            },
            "data": {
                "sha": str(pair[1]),
                "commitS": str(pair[0])
            }
        })
        # logger.info("sourceJSON created: " + sourceJson)
        if len(pair) == 2:
            commitList.append((pair[0], pair[1], sourceJson))

    # here we are sorting by the first field of the tuple: the commit time
    commitList.sort(reverse=True)
    nCommits = len(commitList)
    if nCommits <= n:
        # return a rewinder with all commits
        stack = commitList
    else:
        # return a rewinder with n commits, spaced out about evenly
        unitFloat = nCommits / float(n - 1)
        stack = []

        # always put the most recent first
        stack.append(commitList[0])

        # put the rest
        for multiplier in range(1, n):
            commitIndex = int(multiplier * unitFloat) - 1
            stack.append(commitList[commitIndex])

    # remember python lists pop at the tail...
    stack.reverse()
    return rewinder.GitRewinder(repoIDPath, rewinder_type, uniqueSourceID,
                                stack)
示例#12
0
文件: server.py 项目: wavelets/quiver
 def get_prediction(input_path):
     input_img = load_img(input_path, single_input_shape)
     with graph.as_default():
         return jsonify(
             json.loads(
                 get_json(decode_predictions(model.predict(input_img)))))