Пример #1
0
async def get_dailies(session: ClientSession, header: dict,
                      wfwx_stations: List[WFWXWeatherStation],
                      time_of_interest: datetime) -> List[dict]:
    """ Get the daily actuals/forecasts for the given station ids. """
    # build a list of wfwx station id's
    wfwx_station_ids = [wfwx_station.wfwx_id for wfwx_station in wfwx_stations]

    timestamp_of_interset = math.floor(time_of_interest.timestamp() * 1000)

    # for local dev, we can use redis to reduce load in prod, and generally just makes development faster.
    # for production, it's more tricky - we don't want to put too much load on the wf1 api, but we don't
    # want stale values either. We default to 5 minutes, or 300 seconds.
    cache_expiry_seconds = config.get(
        'REDIS_DAILIES_BY_STATION_CODE_CACHE_EXPIRY', 300)
    use_cache = cache_expiry_seconds is not None and config.get(
        'REDIS_USE') == 'True'

    dailies_iterator = fetch_paged_response_generator(
        session,
        header,
        BuildQueryDailiesByStationCode(timestamp_of_interset,
                                       timestamp_of_interset,
                                       wfwx_station_ids),
        'dailies',
        use_cache=use_cache,
        cache_expiry_seconds=cache_expiry_seconds)

    return dailies_iterator
Пример #2
0
def install_split_artifacts(dn):
    '''Create the .meta files for a split system

    Given a list of artifacts to split, writes new .meta files to
    the baserock dir in dn['install'] and copies the files from the
    sandbox to the dn['install']

    '''
    all_splits = []
    for i in app.defs.defaults.get_split_rules('stratum'):
        all_splits += [i['artifact']]
    for index, content in enumerate(dn['contents']):
        for stratum, artifacts in content.items():
            if artifacts == []:
                if config.get('default-splits', []) != []:
                    for split in config.get('default-splits'):
                        artifacts += [app.defs.get(stratum)['name'] + split]
                else:
                    for split in all_splits:
                        artifacts += [os.path.basename(stratum) + split]

        dn['contents'][index] = {stratum: artifacts}

    for content in dn['contents']:
        key = content.keys()[0]
        stratum = app.defs.get(key)
        move_required_files(dn, stratum, content[key])
Пример #3
0
def init_logging(flask_app, config):
    """
    Set up logging.

    Flask automatically writes to stderr in debug mode, so we only configure
    the Flask log in production mode.
    """

    log_string_format = '%(asctime)s [%(name)s] %(levelname)s: %(message)s'
    log_date_format = '%Y-%m-%d %H:%M:%S'
    log_formatter = logging.Formatter(log_string_format, log_date_format)

    try:
        log_level = getattr(logging, config.get('logging', 'log_level').upper())
    except AttributeError:
        raise ValueError("Invalid log level: %s" % log_level)

    if not flask_app.debug:
        log_file = config.get('logging', 'log_file')
        log_handler = logging.FileHandler(log_file)
        log_handler.setLevel(log_level)
        log_handler.setFormatter(log_formatter)

        flask_app.logger.addHandler(log_handler)

    if flask_app.debug_db:
        # SQL Alchemy logging is very verbose and is only turned on when
        # explicitly asked for.
        db_log_handler = logging.StreamHandler(sys.stderr)
        db_log_handler.setFormatter(log_formatter)

        db_logger = logging.getLogger('sqlalchemy.engine')
        db_logger.setLevel(logging.INFO)
        db_logger.addHandler(db_log_handler)
Пример #4
0
def init_logging(flask_app, config):
    """
    Set up logging.

    Flask automatically writes to stderr in debug mode, so we only configure
    the Flask log in production mode.
    """

    log_string_format = '%(asctime)s [%(name)s] %(levelname)s: %(message)s'
    log_date_format = '%Y-%m-%d %H:%M:%S'
    log_formatter = logging.Formatter(log_string_format, log_date_format)

    try:
        log_level = getattr(logging,
                            config.get('logging', 'log_level').upper())
    except AttributeError:
        raise ValueError("Invalid log level: %s" % log_level)

    if not flask_app.debug:
        log_file = config.get('logging', 'log_file')
        log_handler = logging.FileHandler(log_file)
        log_handler.setLevel(log_level)
        log_handler.setFormatter(log_formatter)

        flask_app.logger.addHandler(log_handler)

    if flask_app.debug_db:
        # SQL Alchemy logging is very verbose and is only turned on when
        # explicitly asked for.
        db_log_handler = logging.StreamHandler(sys.stderr)
        db_log_handler.setFormatter(log_formatter)

        db_logger = logging.getLogger('sqlalchemy.engine')
        db_logger.setLevel(logging.INFO)
        db_logger.addHandler(db_log_handler)
Пример #5
0
    def post(self):
        args = password_reset_post.parse_args()
        email = args['email']
        user = user_model.get_by_email(email)
        if user is None:
            api.abort(404, 'User not found')
        token = user.get_session_token(expiration=21600, password_reset=True)
        if mail.is_email_valid(config.get('mail_sender')):
            sender = config.get('mail_sender')
        else:
            app_id = app_identity.get_application_id()
            sender = "%s <noreply@%s.appspotmail.com>" % (app_id, app_id)
        try:
            message = mail.EmailMessage()
            message.sender = sender
            message.subject = 'Password reset request'
            message.to = email
            message.body = '''
You're receiving this email because you requested a password reset.
Visit the following page to reset your password:

%s?reset_token=%s
''' % (config.get('password_reset_url'), token)
            message.send()
        except:
            api.abort(500)
        return '', 200
Пример #6
0
def send_rocketchat_notification(text: str, exc_info: Exception) -> dict:
    """ Sends message with specified text to configured Rocketchat channel.

    We don't want this method to raise any exceptions, as we don't want to
    unintentionally break any kind of error management flow. (We only use
    rocket chat notification when something goes wrong).

    If you want to know if this method worked or not, you'll have to inspect
    the response.
    """
    full_message = f"{datetime.now(tz=timezone.utc).isoformat()}\n{text}\n\
        {config.get('HOSTNAME')}: {exc_info}\n\
        {traceback.format_exception(etype=type(exc_info),value=exc_info,tb=exc_info.__traceback__)}"
    result = None
    try:
        response = requests.post(
            config.get('ROCKET_URL_POST_MESSAGE'),
            headers={
                'X-Auth-Token': config.get('ROCKET_AUTH_TOKEN'),
                'X-User-Id': config.get('ROCKET_USER_ID'),
                'Content-Type': 'application/json'
            },
            json={
                'channel': config.get('ROCKET_CHANNEL'),
                'text': full_message
            }
        )
        result = response.json()
    except Exception as exception:  # pylint: disable=broad-except
        # not doing exc_info=exception - as this causes a lot of noise, and we're more interested
        # in the main code!
        logger.error('failed to send rocket chat notification %s', exception)
    return result
Пример #7
0
def install_split_artifacts(dn):
    '''Create the .meta files for a split system

    Given a list of artifacts to split, writes new .meta files to
    the baserock dir in dn['install'] and copies the files from the
    sandbox to the dn['install']

    '''
    all_splits = []
    for i in app.defs.defaults.get_split_rules('stratum'):
        all_splits += [i['artifact']]
    for index, content in enumerate(dn['contents']):
        for stratum, artifacts in content.items():
            if artifacts == []:
                if config.get('default-splits', []) != []:
                    for split in config.get('default-splits'):
                        artifacts += [app.defs.get(stratum)['name'] + split]
                else:
                    for split in all_splits:
                        artifacts += [os.path.basename(stratum) + split]

        dn['contents'][index] = {stratum: artifacts}

    for content in dn['contents']:
        key = content.keys()[0]
        stratum = app.defs.get(key)
        move_required_files(dn, stratum, content[key])
Пример #8
0
def retrieve_metadata(bibcode, search_identifiers=False):
    """
    From the API retrieve the set of metadata we want to know about the record.
    """
    params = {
        "q": search_identifiers and 'identifier:"{0}"'.format(bibcode) or 'bibcode:"{0}"'.format(bibcode),
        "fl": "author,bibcode,identifier",
    }
    r = requests.get(
        config.get("API_SOLR_QUERY_ENDPOINT"),
        params=params,
        headers={"Accept": "application/json", "Authorization": "Bearer:%s" % config.get("API_TOKEN")},
    )
    if r.status_code != 200:
        raise Exception("{}\n{}\n{}".format(r.status_code, params, r.text))
    else:
        data = r.json().get("response", {})
        if data.get("numFound") == 1:
            docs = data.get("docs", [])
            return docs[0]
        elif data.get("numFound") == 0:
            if search_identifiers:
                bibcode_cache.setdefault(bibcode, {})  # insert to prevent failed retrievals
                raise IgnorableException(u"No metadata found for identifier:{0}".format(bibcode))
            else:
                return retrieve_metadata(bibcode, search_identifiers=True)
        else:
            if data.get("numFound") > 10:
                raise IgnorableException(u"Insane num of results for {0} ({1})".format(bibcode, data.get("numFound")))
            docs = data.get("docs", [])
            for d in docs:
                for ir in d.get("identifier", []):
                    if ir.lower().strip() == bibcode.lower().strip():
                        return d
            raise IgnorableException(u"More than one document found for {0}".format(bibcode))
Пример #9
0
def retrieve_metadata(bibcode, search_identifiers=False):
    """
    From the API retrieve the set of metadata we want to know about the record.
    """
    params={
            'q': search_identifiers and 'identifier:"{0}"'.format(bibcode) or 'bibcode:"{0}"'.format(bibcode),
            'fl': 'author,bibcode,identifier'
            }
    r = requests.get(config.get('API_SOLR_QUERY_ENDPOINT'),
         params=params,
         headers={'Accept': 'application/json', 'Authorization': 'Bearer:%s' % config.get('API_TOKEN')})
    if r.status_code != 200:
        raise Exception('{}\n{}\n{}'.format(r.status_code, params, r.text))
    else:
        data = r.json().get('response', {})
        if data.get('numFound') == 1:
            docs = data.get('docs', [])
            return docs[0]
        elif data.get('numFound') == 0:
            if search_identifiers:
                bibcode_cache.setdefault(bibcode, {}) # insert to prevent failed retrievals
                raise IgnorableException(u'No metadata found for identifier:{0}'.format(bibcode))
            else:
                return retrieve_metadata(bibcode, search_identifiers=True)
        else:
            if data.get('numFound') > 10:
                raise IgnorableException(u'Insane num of results for {0} ({1})'.format(bibcode, data.get('numFound')))
            docs = data.get('docs', [])
            for d in docs:
                for ir in d.get('identifier', []):
                    if ir.lower().strip() == bibcode.lower().strip():
                        return d
            raise IgnorableException(u'More than one document found for {0}'.format(bibcode))
Пример #10
0
def relpath(path):
    """
    Prefix relative URLs with the Dash app name in case that
    the app is deployed on dev environments.
    """
    if config.get('base_path'):
        return '/{}{}'.format(config.get('base_path'), path)
    return path
Пример #11
0
def plain(dataset=None):
    dns = config.get('Instance')['PublicDNS']
    port = config.get('Instance')['ShinyPort']
    protocol = config.get('Instance')['Protocol']

    address = '{0}://{1}:{2}/{3}/'.format(protocol, dns, port, dataset)

    return render_template('plain_page.html', dataset=dataset,
                                        analysis_address=address)
Пример #12
0
 def __init__(self):
     self.host = config.get("default", "FTP_HOST")
     self.user = config.get("default", "FTP_USER")
     passw = config.get("default", "FTP_PASSWORD")
     logging.info("setting up FTPClient for {}@{}".format(
         self.user, self.host))
     try:
         self.client = FTP(self.host, timeout=10)
         self.client.login(user=self.user, passwd=passw)
     except Exception as e:
         logging.error(e)
Пример #13
0
    def validate_schema(self, schemas, data):
        if schemas == {} or \
                config.get('schema-validation', False) is False:
            return
        try:
            jsonschema.validate(data, schemas[data.get('kind', None)])
        except jsonschema.exceptions.ValidationError as e:
            if config.get('schema-validation') == 'strict':
                exit(data, 'ERROR: schema validation failed:\n', e)

            log(data, 'WARNING: schema validation failed:')
            print e
Пример #14
0
    def post(self):
        req_data = self.post_parser.parse_args()
        user = User.query.filter_by(email=req_data.get("email")).first()
        if not user:
            return "Wrong email or password!", 404

        try:
            token = user.encode_user_token(user.id,
                                           config.get("TOKEN_EXPIRES"),
                                           config.get("JWT_SECRET"))
            return {"token": token.decode()}, 200
        except Exception as e:
            return "", 404
Пример #15
0
    def _fix_keys(self, dn, base=None):
        '''Normalizes keys for a definition dict and its contents

        Some definitions have a 'morph' field which is a relative path. Others
        only have a 'name' field, which has no directory part. A few do not
        have a 'name'

        This sets our key to be 'path', and fixes any missed 'name' to be
        the same as 'path' but replacing '/' by '-'

        '''

        exit = (config.get('check-definitions') == 'exit')

        if dn.get('morph'):
            if not os.path.isfile(dn.get('morph')):
                log('DEFINITION', 'WARNING: missing', dn['morph'], exit=exit)
            dn['path'] = self._demorph(dn.pop('morph'))

        if 'path' not in dn:
            if 'name' not in dn:
                log(dn, 'No path, no name?', exit=True)
            if config.get('artifact-version', 0) in range(0, 4):
                dn['path'] = dn['name']
            else:
                dn['path'] = os.path.join(self._demorph(base), dn['name'])
                if os.path.isfile(dn['path'] + '.morph'):
                    # morph file exists, but is not mentioned in stratum
                    # so we ignore it
                    log(dn,
                        'WARNING: ignoring',
                        dn['path'] + '.morph',
                        exit=exit)
                    dn['path'] += '.default'

        dn['path'] = self._demorph(dn['path'])
        dn.setdefault('name', os.path.basename(dn['path']))

        if dn.get('name') == config['target']:
            config['target'] = dn['path']

        n = self._demorph(os.path.basename(dn['name']))
        p = self._demorph(os.path.basename(dn['path']))
        if os.path.splitext(p)[0] not in n:
            log('MORPHS',
                'WARNING: %s wrong name' % dn['path'],
                dn['name'],
                exit=exit)

        for system in (dn.get('systems', []) + dn.get('subsystems', [])):
            self._fix_keys(system)
Пример #16
0
    def file_done(file):
        """
        will be called whenever a Process is finished
        :param file: the File object of the File that is done
        """

        # delete from "processes"
        ProcessRepository.processes.pop(file.id)

        # remove original file from disk if desired
        if config.getboolean("encoding", "delete_old_file"):
            os.remove(file.filename)

        # rename file if desired
        if config.getboolean("encoding", "rename_enabled"):
            rename_search = config.get("encoding", "rename_search")
            rename_replace = config.get("encoding", "rename_replace")

            # get pathinfo
            pathinfo = os.path.split(file.filename)
            path = pathinfo[0]
            old_filename = pathinfo[1]

            # only rename if match occurs
            if re.match(rename_search, old_filename):
                new_filename = re.sub(rename_search, rename_replace,
                                      old_filename)
                # rename output_filename (created by ffmpeg, see process.py) to new_filename
                os.rename(path + os.sep + file.output_filename,
                          path + os.sep + new_filename)

        # update status to "finished"
        db.session.query(File).filter_by(id=file.id).update(
            dict(status=StatusMap.finished.value))
        db.session.commit()

        # check if it's necessary to start new processes
        ProcessRepository.check_and_start_processes()

        # notify client
        socketio.emit(
            "file_done", {
                "data": {
                    "id": file.id,
                    "count_active": ProcessRepository.count_processes_active(),
                    "count_queued": ProcessRepository.count_processes_queued(),
                    "count_total": ProcessRepository.count_processes_total(),
                }
            })

        app.logger.debug("Done with encoding of %s" % file.filename)
Пример #17
0
def prepare_fetch_hourlies_query(raw_station: dict, start_timestamp: datetime,
                                 end_timestamp: datetime):
    """ Prepare url and params to fetch hourly readings from the WFWX Fireweather API.
    """
    base_url = config.get('WFWX_BASE_URL')

    logger.debug('requesting historic data from %s to %s', start_timestamp,
                 end_timestamp)

    # Prepare query params and query:
    query_start_timestamp = math.floor(start_timestamp.timestamp() * 1000)
    query_end_timestamp = math.floor(end_timestamp.timestamp() * 1000)

    station_id = raw_station['id']
    params = {
        'startTimestamp': query_start_timestamp,
        'endTimestamp': query_end_timestamp,
        'stationId': station_id
    }
    endpoint = (
        '/v1/hourlies/search/'
        'findHourliesByWeatherTimestampBetweenAndStationIdEqualsOrderByWeatherTimestampAsc'
    )
    url = f'{base_url}{endpoint}'

    return url, params
Пример #18
0
def install_dependencies(dn, dependencies=None):
    '''Install recursed dependencies of dn into dn's sandbox.'''

    if dependencies is None:
        dependencies = dn.get('build-depends', [])

    log(dn, 'Installing dependencies\n', dependencies, verbose=True)
    shuffle(dependencies)
    for it in dependencies:
        dependency = app.defs.get(it)
        if os.path.exists(
                os.path.join(dn['sandbox'], 'baserock',
                             dependency['name'] + '.meta')):
            # dependency has already been installed
            log(dn, 'Already did', dependency['name'], verbose=True)
            continue

        install_dependencies(dn, dependency.get('build-depends', []))
        if (it in dn['build-depends']) or \
            (dependency.get('build-mode', 'staging') ==
                dn.get('build-mode', 'staging')):
            compose(dependency)
            if dependency.get('contents'):
                install_dependencies(dn, dependency['contents'])
            sandbox.install(dn, dependency)
    if config.get('log-verbose'):
        sandbox.list_files(dn)
Пример #19
0
async def fetch_detailed_geojson_stations(
    session: ClientSession, headers: dict, query_builder: BuildQuery
) -> Tuple[Dict[int, GeoJsonDetailedWeatherStation], Dict[str, int]]:
    """ Fetch and marshall geojson station data"""
    stations = {}
    id_to_code_map = {}
    # 1 week seems a reasonable period to cache stations for.
    redis_station_cache_expiry: Final = int(
        config.get('REDIS_STATION_CACHE_EXPIRY', 604800))
    # Put the stations in a nice dictionary.
    async for raw_station in fetch_paged_response_generator(
        session, headers, query_builder, 'stations', True,
        redis_station_cache_expiry):
        station_code = raw_station.get('stationCode')
        station_status = raw_station.get('stationStatus', {}).get('id')
        # Because we can't filter on status in the RSQL, we have to manually exclude stations that are
        # not active.
        if is_station_valid(raw_station):
            id_to_code_map[raw_station.get('id')] = station_code
            geojson_station = GeoJsonDetailedWeatherStation(
                properties=DetailedWeatherStationProperties(
                    code=station_code, name=raw_station.get('displayLabel')),
                geometry=WeatherStationGeometry(coordinates=[
                    raw_station.get('longitude'),
                    raw_station.get('latitude')
                ]))
            stations[station_code] = geojson_station
        else:
            logger.debug('station %s, status %s', station_code, station_status)

    return stations, id_to_code_map
Пример #20
0
def write_metadata(dn):
    if dn.get('kind', 'chunk') == 'chunk':
        write_chunk_metafile(dn)
    elif dn.get('kind', 'chunk') == 'stratum':
        write_stratum_metafiles(dn)
    if config.get('check-overlaps', 'ignore') != 'ignore':
        check_overlaps(dn)
Пример #21
0
async def get_stations_by_codes(
        station_codes: List[int]) -> List[WeatherStation]:
    """ Get a list of stations by code, from WFWX Fireweather API. """
    logger.info('Using WFWX to retrieve stations by code')
    with EcodivisionSeasons(','.join([str(code) for code in station_codes
                                      ])) as eco_division:
        async with ClientSession() as session:
            header = await get_auth_header(session)
            stations = []
            # 1 week seems a reasonable period to cache stations for.
            redis_station_cache_expiry: Final = int(
                config.get('REDIS_STATION_CACHE_EXPIRY', 604800))
            # Iterate through "raw" station data.
            iterator = fetch_paged_response_generator(
                session,
                header,
                BuildQueryByStationCode(station_codes),
                'stations',
                use_cache=True,
                cache_expiry_seconds=redis_station_cache_expiry)
            async for raw_station in iterator:
                # If the station is valid, add it to our list of stations.
                if is_station_valid(raw_station):
                    stations.append(parse_station(raw_station, eco_division))
            logger.debug('total stations: %d', len(stations))
            return stations
Пример #22
0
def get_current_user():
    try:
        if not config.get('DEV_SERVER'):
            return oauth.get_current_user(SCOPES)
    except oauth.Error as e:
        pass
    return users.get_current_user()
Пример #23
0
def is_current_user_admin():
    try:
        if not config.get('DEV_SERVER'):
            return oauth.is_current_user_admin(SCOPES)
    except oauth.Error as e:
        pass
    return users.is_current_user_admin()
Пример #24
0
def get_client_id():
    try:
        if not config.get('DEV_SERVER'):
            return oauth.get_client_id(SCOPES)
    except oauth.Error as e:
        pass
    return None
Пример #25
0
def install_dependencies(dn, dependencies=None):
    '''Install recursed dependencies of dn into dn's sandbox.'''

    if dependencies is None:
        dependencies = dn.get('build-depends', [])

    log(dn, 'Installing dependencies\n', dependencies, verbose=True)
    shuffle(dependencies)
    for it in dependencies:
        dependency = app.defs.get(it)
        if os.path.exists(os.path.join(dn['sandbox'], 'baserock',
                                       dependency['name'] + '.meta')):
            # dependency has already been installed
            log(dn, 'Already did', dependency['name'], verbose=True)
            continue

        install_dependencies(dn, dependency.get('build-depends', []))
        if (it in dn['build-depends']) or \
            (dependency.get('build-mode', 'staging') ==
                dn.get('build-mode', 'staging')):
            compose(dependency)
            if dependency.get('contents'):
                install_dependencies(dn, dependency['contents'])
            sandbox.install(dn, dependency)
    if config.get('log-verbose'):
        sandbox.list_files(dn)
Пример #26
0
def CreateAvatar(user):
    image_w = 500
    image_h = 500

    color = (random.randint(0, 255), random.randint(0, 255), random.randint(0, 255))

    img = Image.new('RGB', (image_w, image_h), color=color)
    draw = ImageDraw.Draw(img)
    font = ImageFont.truetype(config.get('ROOT_PATH')+'/static/fonts/Roboto-Bold.ttf', size=300)

    user_letters = user['first_name'][0]+user['last_name'][0]

    text_w, text_h = draw.textsize(user_letters, font=font)

    draw.text(
        (
            (image_w - text_w)/2,
            (image_h - text_h - 80)/2,
        ),
        user_letters, 
        font=font, 
        fill=(int(color[0] - (color[0]/10)), int(color[1] - (color[1]/10)), int(color[2]  - (color[2]/10))),
    )

    image_path = user['id']+'/avatar.png'

    img.save(config['UPLOAD_FOLDER_PROFILE']+image_path)

    webp.cwebp(config['UPLOAD_FOLDER_PROFILE']+image_path, config['UPLOAD_FOLDER_PROFILE']+user['id']+'/avatar.webp', "-q 90")

    return image_path
Пример #27
0
def run_build(dn):
    ''' This is where we run ./configure, make, make install (for example).
    By the time we get here, all dependencies for component have already
    been assembled.
    '''

    if config.get('mode', 'normal') == 'no-build':
        log(dn, 'SKIPPING BUILD: artifact will be empty')
        return

    if dn.get('build-mode') != 'bootstrap':
        sandbox.ldconfig(dn)

    if dn.get('repo'):
        repos.checkout(dn)
        dn['SOURCE_DATE_EPOCH'] = repos.source_date_epoch(dn['checkout'])

    get_build_commands(dn)
    env_vars = sandbox.env_vars_for_build(dn)

    log(dn, 'Logging build commands to %s' % dn['log'])
    for build_step in app.defs.defaults.build_steps:
        if dn.get(build_step):
            log(dn, 'Running', build_step)
        for command in dn.get(build_step, []):
            command = 'false' if command is False else command
            command = 'true' if command is True else command
            sandbox.run_sandboxed(dn, command, env=env_vars,
                                  allow_parallel=('build' in build_step))
    if dn.get('devices'):
        sandbox.create_devices(dn)

    with open(dn['log'], "a") as logfile:
        time_elapsed = elapsed(dn['start-time'])
        logfile.write('Elapsed_time: %s\n' % time_elapsed)
Пример #28
0
def install_contents(dn, contents=None):
    ''' Install contents (recursively) into dn['sandbox'] '''

    if contents is None:
        contents = dn.get('contents', [])

    log(dn, 'Installing contents\n', contents, verbose=True)

    shuffle(contents)
    for it in contents:
        item = app.defs.get(it)
        if os.path.exists(os.path.join(dn['sandbox'],
                                       'baserock', item['name'] + '.meta')):
            # content has already been installed
            log(dn, 'Already installed', item['name'], verbose=True)
            continue

        for i in item.get('contents', []):
            install_contents(dn, [i])

        if item.get('build-mode', 'staging') != 'bootstrap':
            if not get_cache(item):
                compose(item)
            sandbox.install(dn, item)

    if config.get('log-verbose'):
        log(dn, 'Added contents\n', contents)
        sandbox.list_files(dn)
Пример #29
0
    def parse_files(self, directory):
        schemas = self.load_schemas()
        with chdir(directory):
            for dirname, dirnames, filenames in os.walk('.'):
                filenames.sort()
                dirnames.sort()
                if '.git' in dirnames:
                    dirnames.remove('.git')
                for filename in filenames:
                    if filename.endswith(('.def', '.morph')):
                        path = os.path.join(dirname, filename)
                        data = self._load(path)
                        if data is not None:
                            self.validate_schema(schemas, data)
                            data['path'] = path[2:]
                            self._fix_keys(data)
                            self._tidy_and_insert_recursively(data)

        if config.get('mode') == 'parse-only':
            with open(config['result-file'], 'w') as f:
                f.write(
                    yaml.dump(self._data,
                              default_flow_style=False,
                              Dumper=ExplicitDumper))
            log('RESULT', 'Parsed definitions data in yaml format is at',
                config['result-file'])
            os._exit(0)
Пример #30
0
def get_ads_orcid_profile(orcidid):
    r = requests.get(config.get('API_ORCID_EXPORT_PROFILE') % orcidid,
                 headers={'Accept': 'application/json', 'Authorization': 'Bearer:%s' % config.get('API_TOKEN')})
    if r.status_code != 200:
        return None
    else:
        return r.json()
Пример #31
0
def install_contents(dn, contents=None):
    ''' Install contents (recursively) into dn['sandbox'] '''

    if contents is None:
        contents = dn.get('contents', [])

    log(dn, 'Installing contents\n', contents, verbose=True)

    shuffle(contents)
    for it in contents:
        item = app.defs.get(it)
        if os.path.exists(
                os.path.join(dn['sandbox'], 'baserock',
                             item['name'] + '.meta')):
            # content has already been installed
            log(dn, 'Already installed', item['name'], verbose=True)
            continue

        for i in item.get('contents', []):
            install_contents(dn, [i])

        if item.get('build-mode', 'staging') != 'bootstrap':
            if not get_cache(item):
                compose(item)
            sandbox.install(dn, item)

    if config.get('log-verbose'):
        log(dn, 'Added contents\n', contents)
        sandbox.list_files(dn)
Пример #32
0
def layout(params):
    # This function must return a layout for the homepage

    return dbc.Container(
        [
            html.Div(
                [
                    html.H5('Choose a page to view'),
                ],
                className='px-3 py-3 pt-md-5 pb-md-4 mx-auto text-center'
            ),
            dbc.CardDeck(
                [
                    dbc.Card(
                        [
                            dbc.CardHeader(page.get('name', '')),
                            dbc.CardBody(
                                [
                                    html.P(page.get('description', '')),
                                    html.A('View', href=page.get('path', '/'), className='btn btn-block btn-outline-primary')
                                ],
                                className='d-flex flex-column'
                            )
                        ],
                        className='mb-3 mx-auto'
                    )
                    for page in config.get('pages', [])
                ],
                className='homepage mb-3'
            )
        ],
    )
Пример #33
0
def get_public_orcid_profile(orcidid):
    r = requests.get(config.get('API_ORCID_PROFILE_ENDPOINT') % orcidid,
                 headers={'Accept': 'application/json'})
    if r.status_code != 200:
        return None
    else:
        return r.json()
Пример #34
0
def write_metadata(defs, component):
    if component.get('kind', 'chunk') == 'chunk':
        write_chunk_metafile(defs, component)
    elif component.get('kind', 'chunk') == 'stratum':
        write_stratum_metafiles(defs, component)
    if config.get('check-overlaps', 'ignore') != 'ignore':
        check_overlaps(defs, component)
Пример #35
0
    def run(self):
        """
        run the encoding
        """
        # probe file first
        frame_count = self.ffmpeg_probe_frame_count()

        if frame_count == -1:
            app.logger.debug("Probing of " + self.file.filename +
                             " failed - aborting...")
            ProcessRepository.file_failed(self.file)
            return

        # app.logger.debug("Probing of " + file.filename + " successful.. frame count: " + str(frame_count))
        split_path = os.path.split(self.file.filename)
        path = split_path[0]
        original_filename = split_path[1]
        filename_noext = os.path.split(
            os.path.splitext(original_filename)[0])[1]
        # form output filename and store it in self.file for later use
        self.file.output_filename = filename_noext + ".pyencode"

        cmd = ["ffmpeg"]
        cmd.extend(["-i", self.file.filename])
        # add parameters from config
        cmd.extend(shlex.split(config.get("encoding", "parameters")))
        cmd.extend(["-y", path + os.sep + self.file.output_filename])

        app.logger.debug("Starting encoding of " + self.file.filename +
                         " with %s" % " ".join(cmd))

        for info in self.run_ffmpeg(cmd, frame_count):
            if info["return_code"] != -1:
                app.logger.debug(
                    "Error occured while running ffmpeg. Last lines of output: "
                )
                app.logger.debug("\n".join(info["last_lines"]))
                ProcessRepository.file_failed(self.file)
                return

            # store information in database
            # convert kB to bytes
            info["ffmpeg_size"] *= 1024

            # we don't need the return_code anymore (and don't want to store it)
            info.pop("return_code")

            # update file in DB
            File.query.filter_by(id=self.file.id).update(info)
            db.session.commit()

            # update self.file
            for k in info:
                setattr(self.file, k, info[k])

            # tell ProcessRepository there's some progress going on
            ProcessRepository.file_progress(self.file)

        if self.active:
            ProcessRepository.file_done(self.file)
Пример #36
0
    def file_done(file):
        """
        will be called whenever a Process is finished
        :param file: the File object of the File that is done
        """

        # delete from "processes"
        ProcessRepository.processes.pop(file.id)

        # remove original file from disk if desired
        if config.getboolean("encoding", "delete_old_file"):
            os.remove(file.filename)

        # rename file if desired
        if config.getboolean("encoding", "rename_enabled"):
            rename_search = config.get("encoding", "rename_search")
            rename_replace = config.get("encoding", "rename_replace")

            # get pathinfo
            pathinfo = os.path.split(file.filename)
            path = pathinfo[0]
            old_filename = pathinfo[1]

            # only rename if match occurs
            if re.match(rename_search, old_filename):
                new_filename = re.sub(rename_search, rename_replace, old_filename)
                # rename output_filename (created by ffmpeg, see process.py) to new_filename
                os.rename(path + os.sep + file.output_filename, path + os.sep + new_filename)

        # update status to "finished"
        db.session.query(File).filter_by(id=file.id).update(dict(status=StatusMap.finished.value))
        db.session.commit()

        # check if it's necessary to start new processes
        ProcessRepository.check_and_start_processes()

        # notify client
        socketio.emit("file_done", {
            "data": {
                "id": file.id,
                "count_active": ProcessRepository.count_processes_active(),
                "count_queued": ProcessRepository.count_processes_queued(),
                "count_total": ProcessRepository.count_processes_total(),
            }
        })

        app.logger.debug("Done with encoding of %s" % file.filename)
Пример #37
0
    def _fix_keys(self, dn, base=None):
        '''Normalizes keys for a definition dict and its contents

        Some definitions have a 'morph' field which is a relative path. Others
        only have a 'name' field, which has no directory part. A few do not
        have a 'name'

        This sets our key to be 'path', and fixes any missed 'name' to be
        the same as 'path' but replacing '/' by '-'

        '''
        
        exit = (config.get('check-definitions') == 'exit')
        
        if dn.get('morph'):
            if not os.path.isfile(dn.get('morph')):
                log('DEFINITION', 'WARNING: missing', dn['morph'], exit=exit)
            dn['path'] = self._demorph(dn.pop('morph'))

        if 'path' not in dn:
            if 'name' not in dn:
                log(dn, 'No path, no name?', exit=True)
            if config.get('artifact-version') in range(0, 4):
                dn['path'] = dn['name']
            else:
                dn['path'] = os.path.join(self._demorph(base), dn['name'])
                if os.path.isfile(dn['path'] + '.morph'):
                    # morph file exists, but is not mentioned in stratum
                    # so we ignore it
                    log(dn, 'WARNING: ignoring', dn['path'] + '.morph',
                        exit=exit)
                    dn['path'] += '.default'

        dn['path'] = self._demorph(dn['path'])
        dn.setdefault('name', os.path.basename(dn['path']))

        if dn.get('name') == config['target']:
            config['target'] = dn['path']

        n = self._demorph(os.path.basename(dn['name']))
        p = self._demorph(os.path.basename(dn['path']))
        if os.path.splitext(p)[0] not in n:
            log('MORPHS', 'WARNING: %s wrong name' % dn['path'], dn['name'],
                exit=exit)

        for system in (dn.get('systems', []) + dn.get('subsystems', [])):
            self._fix_keys(system)
Пример #38
0
    def _tidy_and_insert_recursively(self, dn):
        '''Insert a definition and its contents into the dictionary.

        Takes a dict containing the content of a definition file.

        Inserts the definitions referenced or defined in the
        'build-depends' and 'contents' keys of `definition` into the
        dictionary, and then inserts `definition` itself into the
        dictionary.

        '''
        # handle morph syntax oddities...
        for index, component in enumerate(dn.get('build-depends', [])):
            self._fix_keys(component)
            dn['build-depends'][index] = self._insert(component)

        # The 'contents' field in the internal data model corresponds to the
        # 'chunks' field in a stratum .morph file, or the 'strata' field in a
        # system .morph file.
        dn['contents'] = dn.get('contents', [])

        if type(dn.get('chunks', [])) is not list:
            log('DEFINITIONS', 'WARNING: %s chunks must be list:' % dn['path'],
                dn.get('chunks', []), exit=True)

        if type(dn.get('strata', [])) is not list:
            log('DEFINITIONS', 'WARNING: %s strata must be list:' % dn['path'],
                dn.get('strata', []), exit=True)

        dn['contents'] += dn.pop('chunks', []) + dn.pop('strata', [])

        lookup = {}
        for index, component in enumerate(dn['contents']):
            self._fix_keys(component, dn['path'])
            lookup[component['name']] = component['path']
            if component['name'] == dn['name']:
                log(dn, 'WARNING: %s contains' % dn['path'], dn['name'])

            for x, it in enumerate(component.get('build-depends', [])):
                if it not in lookup:
                    # it is defined as a build depend, but hasn't actually been
                    # defined yet...
                    dependency = {'name': it}
                    self._fix_keys(dependency,  dn['path'])
                    lookup[it] = dependency['path']
                component['build-depends'][x] = lookup[it]

            component['build-depends'] = (dn.get('build-depends', []) +
                                          component.get('build-depends', []))

            if config.get('artifact-version', 0) not in [0, 1, 2, 3, 4, 5]:
                c = self._data.get(component['path'])
                if c and 'build-depends' in c:
                    component['build-depends'] += c['build-depends']

            splits = component.get('artifacts', [])
            dn['contents'][index] = {self._insert(component): splits}

        return self._insert(dn)
Пример #39
0
def prepare_fetch_dailies_for_all_stations_query(time_of_interest: datetime,
                                                 page_count: int):
    """ Prepare url and params for fetching dailies(that's forecast and observations for noon) for all.
    stations. """
    base_url = config.get('WFWX_BASE_URL')
    noon_date = _get_noon_date(time_of_interest)
    timestamp = int(noon_date.timestamp() * 1000)
    # one could filter on recordType.id==FORECAST or recordType.id==ACTUAL but we want it all.
    params = {
        'query': f'weatherTimestamp=={timestamp}',
        'page': page_count,
        'size': config.get('WFWX_MAX_PAGE_SIZE', 1000)
    }
    endpoint = ('/v1/dailies/rsql')
    url = f'{base_url}{endpoint}'
    logger.info('%s         %s', url, params)
    return url, params
Пример #40
0
async def get_client() -> Generator[Tuple[AioBaseClient, str], None, None]:
    """ Return AioBaseClient client and bucket
    """
    server = config.get('OBJECT_STORE_SERVER')
    user_id = config.get('OBJECT_STORE_USER_ID')
    secret_key = config.get('OBJECT_STORE_SECRET')
    bucket = config.get('OBJECT_STORE_BUCKET')

    session = get_session()
    async with session.create_client('s3',
                                     endpoint_url=f'https://{server}',
                                     aws_secret_access_key=secret_key,
                                     aws_access_key_id=user_id) as client:
        try:
            yield client, bucket
        finally:
            del client
Пример #41
0
def test_get(app: Flask) -> None:
    with app.app_context():  # type: ignore
        app.config = Mock()
        app.config.get = Mock(return_value='value')  # type: ignore

        assert get('key') == 'value'

        app.config.get.assert_called_once_with('key')  # type: ignore
Пример #42
0
def packagemanager():
    #是否已同意许可证协议
    license_config = {
        'True': packagemanager_already_accept,
        'False': pacekagemanager_new
    }
    #返回view
    return license_config[config.get("License", "License")](request)
def get(obj):

    if obj.get('template'):
        template = obj['template']
    else:
        template = config.get()['defaults']['template']

    name = obj['name']
    return render_template(template + '''.html''', name=name)
Пример #44
0
def compose(dn):
    '''Work through defs tree, building and assembling until target exists'''

    if type(dn) is not dict:
        dn = app.defs.get(dn)

    # if we can't calculate cache key, we can't create this component
    if cache_key(dn) is False:
        if 'tried' not in dn:
            log(dn, 'No cache_key, so skipping compose')
            dn['tried'] = True
        return False

    # if dn is already cached, we're done
    if get_cache(dn):
        return cache_key(dn)

    log(dn, "Composing", dn['name'], verbose=True)

    # if we have a kbas, look there to see if this component exists
    if config.get('kbas-url') and not config.get('reproduce'):
        with claim(dn):
            if get_remote(dn):
                config['counter'].increment()
                return cache_key(dn)

    # we only work with user-specified arch
    if 'arch' in dn and dn['arch'] != config['arch']:
        return None

    # Create composite components (strata, systems, clusters)
    systems = dn.get('systems', [])
    shuffle(systems)
    for system in systems:
        for s in system.get('subsystems', []):
            subsystem = app.defs.get(s['path'])
            compose(subsystem)
        compose(system['path'])

    with sandbox.setup(dn):
        install_contents(dn)
        build(dn)  # bring in 'build-depends', and run make

    return cache_key(dn)
Пример #45
0
def compose(dn):
    '''Work through defs tree, building and assembling until target exists'''

    if type(dn) is not dict:
        dn = app.defs.get(dn)

    # if we can't calculate cache key, we can't create this component
    if cache_key(dn) is False:
        if 'tried' not in dn:
            log(dn, 'No cache_key, so skipping compose')
            dn['tried'] = True
        return False

    # if dn is already cached, we're done
    if get_cache(dn):
        return cache_key(dn)

    log(dn, "Composing", dn['name'], verbose=True)

    # if we have a kbas, look there to see if this component exists
    if config.get('kbas-url') and not config.get('reproduce'):
        with claim(dn):
            if get_remote(dn):
                config['counter'].increment()
                return cache_key(dn)

    # we only work with user-specified arch
    if 'arch' in dn and dn['arch'] != config['arch']:
        return None

    # Create composite components (strata, systems, clusters)
    systems = dn.get('systems', [])
    shuffle(systems)
    for system in systems:
        for s in system.get('subsystems', []):
            subsystem = app.defs.get(s['path'])
            compose(subsystem)
        compose(system['path'])

    with sandbox.setup(dn):
        install_contents(dn)
        build(dn)     # bring in 'build-depends', and run make

    return cache_key(dn)
Пример #46
0
    def run(self):
        """
        run the encoding
        """
        # probe file first
        frame_count = self.ffmpeg_probe_frame_count()

        if frame_count == -1:
            app.logger.debug("Probing of " + self.file.filename + " failed - aborting...")
            ProcessRepository.file_failed(self.file)
            return

        # app.logger.debug("Probing of " + file.filename + " successful.. frame count: " + str(frame_count))
        split_path = os.path.split(self.file.filename)
        path = split_path[0]
        original_filename = split_path[1]
        filename_noext = os.path.split(os.path.splitext(original_filename)[0])[1]
        # form output filename and store it in self.file for later use
        self.file.output_filename = filename_noext + ".pyencode"

        cmd = ["ffmpeg"]
        cmd.extend(["-i", self.file.filename])
        # add parameters from config
        cmd.extend(shlex.split(config.get("encoding", "parameters")))
        cmd.extend(["-y", path + os.sep + self.file.output_filename])

        app.logger.debug("Starting encoding of " + self.file.filename + " with %s" % " ".join(cmd))

        for info in self.run_ffmpeg(cmd, frame_count):
            if info["return_code"] != -1:
                app.logger.debug("Error occured while running ffmpeg. Last lines of output: ")
                app.logger.debug("\n".join(info["last_lines"]))
                ProcessRepository.file_failed(self.file)
                return

            # store information in database
            # convert kB to bytes
            info["ffmpeg_size"] *= 1024

            # we don't need the return_code anymore (and don't want to store it)
            info.pop("return_code")

            # update file in DB
            File.query.filter_by(id=self.file.id).update(info)
            db.session.commit()

            # update self.file
            for k in info:
                setattr(self.file, k, info[k])

            # tell ProcessRepository there's some progress going on
            ProcessRepository.file_progress(self.file)

        if self.active:
            ProcessRepository.file_done(self.file)
Пример #47
0
 def verify_session_token(cls, token):
     if token is None:
         return False, None
     s = Serializer(config.get('secret_key'))
     try:
         data = s.loads(token)
     except SignatureExpired:
         return False, None
     except BadSignature:
         return False, None
     return True, data
Пример #48
0
def write_metafile(rules, splits, dn):
    metadata = {'products': [{'artifact': a,
                              'components': sorted(set(splits[a]))}
                             for a, r in rules]}

    if dn.get('kind', 'chunk') == 'chunk':
        metadata['repo'] = dn.get('repo')
        metadata['ref'] = dn.get('ref')
    else:
        if config.get('artifact-version', 0) not in range(0, 2):
            metadata['repo'] = config['defdir']
            metadata['ref'] = config['def-version']

    if config.get('artifact-version', 0) not in range(0, 1):
        metadata['cache'] = dn.get('cache')

    meta = os.path.join(dn['baserockdir'], dn['name'] + '.meta')

    with open(meta, "w") as f:
        yaml.safe_dump(metadata, f, default_flow_style=False)
Пример #49
0
def install_contents(defs, component, contents=None):
    ''' Install contents (recursively) into component['sandbox'] '''

    component = defs.get(component)
    if contents is None:
        contents = component.get('contents', [])

    log(component, 'Installing contents\n', contents, verbose=True)

    shuffle(contents)
    for it in contents:
        this = defs.get(it)
        if os.path.exists(os.path.join(component['sandbox'],
                                       'baserock', this['name'] + '.meta')):
            # content has already been installed
            log(component, 'Already installed', this['name'], verbose=True)
            continue

        if component.get('kind', 'chunk') == 'system':
            artifacts = []
            for content in component['contents']:
                if content.keys()[0] == this['path']:
                    artifacts = content[this['path']]
                    break

            if artifacts != [] or config.get('default-splits', []) != []:
                compose(defs, this)
                install_split_artifacts(defs, component, this, artifacts)
                continue

        for i in this.get('contents', []):
            install_contents(defs, component, [i])

        if this.get('build-mode', 'staging') != 'bootstrap':
            if not get_cache(defs, this):
                compose(defs, this)
            sandbox.install(defs, component, this)

    if config.get('log-verbose'):
        log(component, 'Added contents\n', contents)
        sandbox.list_files(component)
Пример #50
0
    def _insert(self, new_def):
        '''Insert a new definition into the dictionary, return the key.

        Takes a dict representing a single definition.

        If a definition with the same 'path' doesn't exist, just add
        `new_def` to the dictionary.

        If a definition with the same 'path' already exists, extend the
        existing definition with the contents of `new_def` unless it
        and the new definition both contain a 'ref'. If any keys are
        duplicated in the existing definition, output a warning.

        If `new_def` contains a sha: field (which needs to be 40 chars),
        this overrides ref:  field

        '''

        exit = (config.get('check-definitions') == 'exit')

        dn = self._data.get(new_def['path'])
        if dn:
            if (dn.get('ref') is None or new_def.get('ref') is None):
                for key in new_def:
                    if key is not 'name':
                        dn[key] = new_def[key]

            # If a sha was specified, we want to build it instead of the ref
            # but preserve the ref in the output <target>.yml file.
            if dn.get('sha'):
                if len(dn['sha']) != 40:
                    log(new_def, 'ERROR: invalid sha:', dn['sha'], exit=True)
                dn['orig_ref'] = dn['ref']
                dn['ref'] = dn['sha']

            if dn['name'] != new_def['name']:
                log(new_def, 'WARNING: %s also named as' % new_def['name'],
                    dn['name'], exit=exit)
                dn['name'] = new_def['name']

            for key in new_def:
                if dn.get(key) and new_def[key] and dn[key] != new_def[key]:
                    log(new_def,
                        'WARNING: multiple definitions of %s \n' % key,
                        '%s | %s' % (dn.get(key), new_def[key]), exit=exit)
        else:
            self._data[new_def['path']] = new_def

        return new_def['path']
Пример #51
0
def log_changes(dn, tmpdir, old_defs, ref):
    do_git_log = False
    old_def = old_defs.get(dn['path'])
    log_file = os.path.join(tmpdir, dn['name'])
    with open(log_file, 'w') as f:
        keys = set(dn) - set(['tree', 'cache'])
        for key in keys:
            try:
                old_value = old_def.get(key)
            except:
                old_value = None
            if dn[key] != old_value:
                f.write('[%s] Value changed: %s\n' % (dn['path'], key))
                if type(dn[key]) is str:
                    f.write('%s | %s\n' % (old_value, dn[key]))
                if type(dn[key]) is not str and type(dn[key]) is not float:
                    if old_value:
                        for x in old_value:
                            f.write(repr(x))
                    f.write('\n                vvv\n')
                    if dn[key]:
                        for x in dn[key]:
                            f.write(repr(x))
                f.write('\n\n')

        if dn.get('kind', 'chunk') == 'chunk' and config.get('release-cmd'):
            log(dn, 'Logging git change history', tmpdir)
            try:
                gitdir = os.path.join(config['gits'],
                                      get_repo_name(dn['repo']))
                if not os.path.exists(gitdir):
                    mirror(dn['name'], dn['repo'])
                elif not mirror_has_ref(gitdir, ref):
                    update_mirror(dn['name'], dn['repo'], gitdir)
                with chdir(gitdir):
                    text = dn['ref'] + '..'
                    if old_def and old_def.get('ref'):
                        text += old_def['ref']
                    f.write(check_output(config['release-command'] + [text]))
            except:
                log(dn, 'WARNING: Failed to log git changes')
    if os.stat(log_file).st_size == 0:
        os.remove(log_file)
Пример #52
0
def write_stratum_metafiles(stratum):
    '''Write the .meta files for a stratum to the baserock dir

    The split rules are used to divide up the installed components into
    artifacts in the 'products' list in the stratum .meta file. Each artifact
    contains a list of chunk artifacts which match the stratum splitting rules

    '''

    log(stratum['name'], 'Splitting', stratum.get('kind'))
    rules, splits = compile_rules(stratum)

    for item in stratum['contents']:
        chunk = app.defs.get(item)
        if chunk.get('build-mode', 'staging') == 'bootstrap':
            continue

        metadata = get_metadata(chunk)
        split_metadata = {'ref': metadata.get('ref'),
                          'repo': metadata.get('repo'),
                          'products': []}

        if config.get('artifact-version', 0) not in range(0, 1):
            split_metadata['cache'] = metadata.get('cache')

        chunk_artifacts = app.defs.get(chunk).get('artifacts', {})
        for artifact, target in chunk_artifacts.items():
            splits[target].append(artifact)

        for product in metadata['products']:
            for artifact, rule in rules:
                if rule.match(product['artifact']):
                    split_metadata['products'].append(product)
                    splits[artifact].append(product['artifact'])
                    break

        meta = os.path.join(stratum['baserockdir'], chunk['name'] + '.meta')

        with open(meta, "w") as f:
            yaml.safe_dump(split_metadata, f, default_flow_style=False)

    write_metafile(rules, splits, stratum)
Пример #53
0
def check_overlaps(dn):
    if set(config['new-overlaps']) <= set(config['overlaps']):
        config['new-overlaps'] = []
        return

    overlaps_found = False
    config['new-overlaps'] = list(set(config['new-overlaps']))
    for path in config['new-overlaps']:
        log(dn, 'WARNING: overlapping path', path)
        for filename in os.listdir(dn['baserockdir']):
            with open(os.path.join(dn['baserockdir'], filename)) as f:
                for line in f:
                    if path[1:] in line:
                        log(filename, 'WARNING: overlap at', path[1:])
                        overlaps_found = True
                        break
        if config.get('check-overlaps') == 'exit':
            log(dn, 'Overlaps found', config['new-overlaps'], exit=True)
    config['overlaps'] = list(set(config['new-overlaps'] + config['overlaps']))
    config['new-overlaps'] = []
Пример #54
0
def get_build_commands(dn):
    '''Get commands specified in d, plus commands implied by build-system

    The containing definition may point to another definition file (using
    the 'path' field in YBD's internal data model) that contains build
    instructions, or it may only specify a predefined build system, using
    'build-system' field.

    The definition containing build instructions can specify a predefined
    build-system and then override some or all of the command sequences it
    defines.

    If the definition file doesn't exist and no build-system is specified,
    this function will scan the contents the checked-out source repo and try
    to autodetect what build system is used.

    '''

    if dn.get('kind', None) == "system":
        # Systems must run their integration scripts as install commands
        dn['install-commands'] = gather_integration_commands(dn)
        return

    exit = True if config.get('check-definitions') == 'exit' else False
    if 'build-system' in dn:
        bs = dn['build-system']
        log(dn, 'Defined build system is', bs)
    else:
        files = os.listdir(dn['checkout'])
        bs = app.defs.defaults.detect_build_system(files)
        if bs == 'manual' and 'install-commands' not in dn:
            if dn.get('kind', 'chunk') == 'chunk':
                print dn
                log(dn, 'WARNING: No install-commands, manual build-system',
                    exit=exit)
        log(dn, 'WARNING: Assumed build system is', bs)

    for build_step in app.defs.defaults.build_steps:
        if dn.get(build_step, None) is None:
            commands = app.defs.defaults.build_systems[bs].get(build_step, [])
            dn[build_step] = commands
Пример #55
0
def parse_article(article, share):
    if article.is_parsed():
        return notify_new_article(article, share)

    endpoint = config['ARTICLE_PARSE_ENDPOINT']

    payload = {
        'token': config.get('DIFFBOT_TOKEN', None),
        'url': article.url
    }

    resp = requests.get(endpoint, params=payload)

    if not resp.ok:
        app.logger.error('Failed to parse %s (error code: %s). Reason: %s'
                % (article, resp.status_code, resp.json() if resp.json() else 'unknown'))
        return

    json = resp.json()

    if 'resolved_url' in json:
        article.url = json['resolved_url']
    elif 'url' in json:
        article.url = json['url']
    else:
        app.logger.warning("Parse article did not return any url for %s (%s)" % (article, json))

    article.icon = json.get('icon')
    article.title = (json['title'][:252] + '...') if len(json['title']) > 255 else json['title']
    article.text = json['text']
    article.date = json['date']
    article.author = json.get('author', '')
    article.parse_date = datetime.utcnow()
    share.parsed = True

    article.save()
    share.save()

    notify_new_article(article, share)
Пример #56
0
    def _insert(self, new_def):
        '''Insert a new definition into the dictionary, return the key.

        Takes a dict representing a single definition.

        If a definition with the same 'path' doesn't exist, just add
        `new_def` to the dictionary.

        If a definition with the same 'path' already exists, extend the
        existing definition with the contents of `new_def` unless it
        and the new definition both contain a 'ref'. If any keys are
        duplicated in the existing definition, output a warning.

        '''
        
        exit = (config.get('check-definitions') == 'exit')
        
        dn = self._data.get(new_def['path'])
        if dn:
            if (dn.get('ref') is None or new_def.get('ref') is None):
                for key in new_def:
                    if key is not 'name':
                        dn[key] = new_def[key]

            if dn['name'] != new_def['name']:
                log(new_def, 'WARNING: %s also named as' % new_def['name'],
                    dn['name'], exit=exit)
                dn['name'] = new_def['name']

            for key in new_def:
                if dn.get(key) and new_def[key] and dn[key] != new_def[key]:
                    log(new_def,
                        'WARNING: multiple definitions of %s \n' % key,
                        '%s | %s' % (dn.get(key), new_def[key]), exit=exit)
        else:
            self._data[new_def['path']] = new_def

        return new_def['path']
Пример #57
0
    def _run(self, args, config):
        """ Main entry point. """

        try:
            solr_url = config.get("solr", "url").rstrip("/") + "/"
            solr = scorched.SolrInterface(solr_url)
        except:
            raise cli.CliError("Unable to connect to solr: %s" % solr_url)

        if args.action in ("add", "add-all"):
            database_config = dict(config.items("database"))
            db = app.database.get_engine(database_config)

            if args.action == "add":
                self.add_models(db, solr, args.models.split(","))
            else:
                self.add_models(db, solr)

            solr.optimize()
            self._logger.info("Added requested documents and optimized index.")

        elif args.action in ("delete", "delete-all"):
            if args.action == "delete":
                self.delete_models(solr, args.models.split(","))
            else:
                solr.delete_all()

            solr.optimize()
            self._logger.info("Deleted requested documents and optimized index.")

        elif args.action == "optimize":
            solr.optimize()
            self._logger.info("Optimized index.")

        elif args.action == "schema":
            schema_url = urljoin(solr_url, "schema")
            self.schema(schema_url)
Пример #58
0
def move_required_files(dn, stratum, artifacts):
    log(dn, 'Installing %s artifacts' % stratum['name'], artifacts)
    stratum_metadata = get_metadata(stratum)
    split_stratum_metadata = {}
    split_stratum_metadata['products'] = []
    to_keep = []
    for product in stratum_metadata['products']:
        for artifact in artifacts:
            if artifact == product['artifact']:
                to_keep += product['components']
                split_stratum_metadata['products'].append(product)

    log(dn, 'Splitting artifacts:', artifacts, verbose=True)
    log(dn, 'Splitting components:', to_keep, verbose=True)

    baserockpath = os.path.join(dn['install'], 'baserock')
    if not os.path.isdir(baserockpath):
        os.mkdir(baserockpath)
    split_stratum_metafile = os.path.join(baserockpath,
                                          stratum['name'] + '.meta')
    with open(split_stratum_metafile, "w") as f:
        yaml.safe_dump(split_stratum_metadata, f, default_flow_style=False)

    for path in stratum['contents']:
        chunk = app.defs.get(path)
        if chunk.get('build-mode', 'staging') == 'bootstrap':
            continue

        try:
            metafile = path_to_metafile(chunk)
            with open(metafile, "r") as f:
                filelist = []
                metadata = yaml.safe_load(f)
                split_metadata = {'ref': metadata.get('ref'),
                                  'repo': metadata.get('repo'),
                                  'products': []}
                if config.get('artifact-version', 0) not in range(0, 1):
                    metadata['cache'] = dn.get('cache')

                for product in metadata['products']:
                    if product['artifact'] in to_keep:
                        filelist += product.get('components', [])
                        # handle old artifacts still containing 'files'
                        filelist += product.get('files', [])

                        split_metadata['products'].append(product)

                if split_metadata['products'] != []:
                    split_metafile = os.path.join(baserockpath,
                                                  os.path.basename(metafile))
                    with open(split_metafile, "w") as f:
                        yaml.safe_dump(split_metadata, f,
                                       default_flow_style=False)
                    log(dn, 'Splits split_metadata is\n', split_metadata,
                        verbose=True)
                    log(dn, 'Splits filelist is\n', filelist, verbose=True)
                    copy_file_list(dn['sandbox'], dn['install'], filelist)
        except:
            import traceback
            traceback.print_exc()
            log(dn, 'Failed to install split components', exit=True)
Пример #59
0
#! /usr/bin/python
# -*- coding:utf-8 -*-

from app import app
from app import config

if __name__ == '__main__':
    app.run(host=config.get('flask', 'url'),
            port=config.getint('flask', 'port'),
            debug=True)
Пример #60
0
''' Performs randomized requests

Will flip coins or roll dice as requested and configured.
'''
from app import bot, config
from random import choice

COIN_OPTIONS = config.get("coin_sides", ["heads", "tails"])
DIE = {type: map(str, range(1, sides+1)) for (type, sides)
       in config.get("die", {"dice": 6}).items()}


@bot.listen("@me: flip coin")
def flip_coin(channel, user):
    ''' flips a coin for the user
    Will flip a coin for the user

    usage: `@me flip coin`
    '''
    result = choice(COIN_OPTIONS)
    channel.post("{!s}: flipped a {!s}".format(user, result))
    return result


@bot.listen("@me: roll <({}):die_type>"
            .format("|".join(DIE.keys())), regex=True)
@bot.listen("@me: roll <int:number> <({}):die_type>"
            .format("|".join(DIE.keys())), regex=True)
def roll_dice(channel, user, die_type, number=1):
    ''' rolls a variable number of various die types for the user
    Will roll one or many die of the requested type.