Exemple #1
0
def download(uris):
    """
    Downloads a file by providing it an url and a write path in a tuple
    """
    url = uris[0]
    path = uris[1]
    info("dl: %s" % url)

    try:
        rfile = requests.get(url, stream=True, timeout=20)
    except (requests.exceptions.ConnectionError,
            requests.exceptions.ReadTimeout, ConnectionResetError) as err:
        warn('Caught exception: "%s". Retrying...' % err)
        return download(uris)

    if rfile.status_code != 200:
        warn('%s failed: %d' % (url, rfile.status_code))
        return

    makedirs(dirname(path), exist_ok=True)
    lfile = open(path, 'wb')
    # chunk_size {sh,c}ould be more on gbit servers
    for chunk in rfile.iter_content(chunk_size=1024):
        if chunk:
            lfile.write(chunk)
            # f.flush()
    lfile.close()
Exemple #2
0
 def attribute_dblclick(self, value):
     callback = utils.rget(self.control, value)
     if not callback:
         log.warn('Invalid dblclick callback: %s' % value)
         return
     self.dblclick_enabled = True
     self._dblclick.connect(callback)
def handle_thresholded_url(url):
    if not db.was_sent(url):
        sources = db.get_source_urls(url)
        title = urlwork.fetch_title(url)
        if notifications.send_mail(url, sources, title):
            db.mark_sent(url)
    else:
        log.warn('Skipping already-seen URL %s', url)
Exemple #4
0
def read_stats(filename):
    """Read stats file."""
    try:
        with open(filename) as fd:
            return json.load(fd)
    except:
        warn('Cannot load stats file: {}'.format(filename))
        return []
def handle_thresholded_url(url):
    if not db.was_sent(url):
        sources = db.get_source_urls(url)
        title = urlwork.fetch_title(url)
        if notifications.send_mail(url, sources, title):
            db.mark_sent(url)
    else:
        log.warn('Skipping already-seen URL %s', url)
def handle_message(message):
    if 'text' in message and message['entities']:
        return handle_message_with_entities(message)
    elif 'friends' in message:
        log.info('Got %d friends on startup', len(message['friends']))
    elif 'delete' in message:
        pass
    else:
        log.warn('Skipping message: %r', message)
def handle_message(message):
    if 'text' in message and message['entities']:
        return handle_message_with_entities(message)
    elif 'friends' in message:
        log.info('Got %d friends on startup', len(message['friends']))
    elif 'delete' in message:
        pass
    else:
        log.warn('Skipping message: %r', message)
 def update(self, oId, obj, transaction=None):
     log.warn("update is deprecated, please use updateWithId!")
     if "$set" not in obj:
         obj['$set'] = {}
     obj['$set']['updateAt'] = datetime.now()
     result = self.db.updateOne(self.coll, {"_id": oId}, obj, transaction)
     if result:
         CacheHelper.deleteCacheByOid(self.coll, oId)
     return result
Exemple #9
0
def main():
    try:
        for i, message in enumerate(streamer.iter_stream(STREAM_URL)):
            handle_message(message)
            if i and i % 100 == 0:
                log.info('Processed %d messages', i)
    except KeyboardInterrupt:
        log.info('Bye bye!')
    except Exception, e:
        log.exception('Error handling message: %s', e)
        log.warn('Exiting...')
        return 1
def main():
    try:
        for i, message in enumerate(streamer.iter_stream(STREAM_URL)):
            handle_message(message)
            if i and i % 100 == 0:
                log.info('Processed %d messages', i)
    except KeyboardInterrupt:
        log.info('Bye bye!')
    except Exception, e:
        log.exception('Error handling message: %s', e)
        log.warn('Exiting...')
        return 1
 def filterAuTH(self, userId, obj):
     for auth in self.blacklistAuth:
         if self.classname == auth['classname']:
             classHelper = ClassHelper("Blacklist")
             item = classHelper.find_one({
                 'user': obj[auth['foreign']],
                 "blacker": userId
             })
             if item:
                 log.warn("user %s black blacker %s", obj[auth['foreign']],
                          userId)
                 return None
     return obj
Exemple #12
0
 def _validate(self, field, force=False):
     if not field.input:
         return
     try:
         # self._set_field_status(field, self.STATUS_LOADING, '')
         value = field.input.get_value()
         if field.validator:
             result = field.validator(field, value)
             value = value if result is None else result
             log.info('Validation passed for %s.%s', self.namespace,
                      field.name)
         self._set_field_status(field, self.STATUS_OK, '')
     except Exception as err:
         log.warn('Validation Error for %s.%s: %s', self.namespace,
                  field.name, err)
         self._set_field_status(field, self.STATUS_ERROR, str(err))
     finally:
         log.info('Setting value %s.%s: %s', self.namespace, field.name,
                  value)
         field.value = value
Exemple #13
0
def match_files(io: IO, input_paths: List[str]):
    """Match found files to analysis file contents"""
    analysis_file = io.get("analysis")
    logline(analysis_file)

    analysis = AnalysisFile(analysis_file)

    mapped: Dict[str, str] = {}
    reverse_map: Dict[str, str] = {}
    for in_path in input_paths:
        file_name = in_path.split("/")[-1].split(".")[0]
        for track_analysis in analysis.tracks:
            if track_analysis.name.lower() in file_name.lower():
                mapped[in_path] = track_analysis.name
                reverse_map[track_analysis.name] = file_name
                break

    logline("came up with the following mapping:")
    logline("")
    for file_name in mapped:
        logline('"{}" -> "{}"'.format(file_name, mapped[file_name]))

    unmapped_amount: int = 0
    for in_path in input_paths:
        if in_path not in mapped:
            warn('input file "{}" not mapped'.format(in_path))
            unmapped_amount += 1
    for track_analysis in analysis.tracks:
        if track_analysis.name not in reverse_map:
            warn('analysed file "{}" not mapped'.format(track_analysis.name))
            unmapped_amount += 1
    logline("")
    if unmapped_amount > 0:
        try:
            correct = input("is this correct? Y/n")
            if correct.lower() == "n":
                return None
        except KeyboardInterrupt:
            return None

    return analysis, mapped
Exemple #14
0
def perform_update(suite, paths):
    """
    Performs an incremental update and merge of a given suite
    """
    info('Checking for updates in %s' % suite)
    # print(paths)
    globalvars.suite = suite

    needsmerge = {}
    needsmerge['downloads'] = []  # all files that have to be downloaded
    regenrelease = False
    c = 0
    for i in repo_order:
        # i = repository name
        needsmerge[i] = {}
        needsmerge[i]['mergelist'] = []

        if paths[c]:
            info('Working on %s repo' % i)
            remote_path = paths[c].replace(spooldir, repos[i]['host'])
            try:
                remote_rel = requests.get(join(remote_path, 'Release'))
            except requests.exceptions.ConnectionError as err:
                warn('Caught exception: "%s". Retrying...' % err)
                return perform_update(suite, paths)

            local_rel_text = open(join(paths[c], 'Release')).read()

            diffs = {}
            if remote_is_newer(remote_rel.text, local_rel_text):
                download((join(remote_path,
                               'Release'), join(paths[c], 'Release')))
                regenrelease = True

                diffs = compare_dict(parse_release(remote_rel.text),
                                     parse_release(local_rel_text))
            if diffs:
                for k in diffs:
                    if k.endswith('Packages.gz') or k.endswith('Sources.gz'):
                        needsmerge[i]['mergelist'].append(k)
                    rmt = join(paths[c].replace(spooldir, repos[i]['host']), k)
                    loc = join(paths[c], k)
                    dlf = (rmt, loc)
                    needsmerge['downloads'].append(dlf)

        c += 1
        # break

    # download what needs to be downloaded
    if needsmerge['downloads']:
        info('Downloading updates...')
        dlpool = Pool(cpunm)
        dlpool.map(download, needsmerge['downloads'])

    # create union of our Packages.gz and Sources.gz files we will merge
    uni = []
    for i in repo_order:
        uni.append(needsmerge[i]['mergelist'])
    updpkg_list = set().union(*uni)

    # make a list of package lists to feed into merge()
    merge_list = []
    for i in updpkg_list:
        pkgs = []
        for j in repo_order:
            sui = suite
            # append valid aliases
            if repos[j]['aliases']:
                if suite in aliases[repos[j]['name']]:
                    sui = aliases[repos[j]['name']][suite]
                elif repos[j]['skipmissing']:
                    sui = None
                skips = ['jessie-security', 'ascii-security']  # hack
                if j == 'debian' and suite in skips:
                    sui = None

            if sui:
                pkgs.append(join(spooldir, repos[j]['dists'], sui, i))
            else:
                pkgs.append(None)

        merge_list.append(pkgs)

    # perform the actual merge
    if merge_list:
        info('Merging files...')
        mrgpool = Pool(cpunm)
        mrgpool.map(merge, merge_list)

    # generate Release files if we got any new files
    if needsmerge['downloads'] or regenrelease:
        info('Generating Release...')
        gen_release(suite)