Пример #1
0
 def patch(self, package, args):
     #
     # Scan the patches found in the config file for the one we are
     # after. Infos or tags are lists.
     #
     patches = package.patches()
     url = None
     for p in patches:
         if args[0][1:].lower() == p:
             url = patches[p][0]
             break
     if url is None:
         raise error.general('patch tag not found: %s' % (args[0]))
     #
     # Parse the URL first in the source builder's patch directory.
     #
     patch = download.parse_url(url, '_patchdir', self.config, self.opts)
     #
     # If not in the source builder package check the source directory.
     #
     if not path.exists(patch['local']):
         patch = download.parse_url(url, '_patchdir', self.config, self.opts)
     download.get_file(patch['url'], patch['local'], self.opts, self.config)
     if 'compressed' in patch:
         patch['script'] = patch['compressed'] + ' ' +  patch['local']
     else:
         patch['script'] = '%{__cat} ' + patch['local']
     patch['script'] += ' | %{__patch} ' + ' '.join(args[1:])
     self.script.append(self.config.expand(patch['script']))
Пример #2
0
 def source(self, name):
     #
     # Return the list of sources. Merge in any macro defined sources as
     # these may be overridden by user loaded macros.
     #
     _map = "source-%s" % (name)
     src_keys = self.macros.map_keys(_map)
     if len(src_keys) == 0:
         raise error.general("no source set: %s (%s)" % (name, _map))
     srcs = []
     for s in src_keys:
         sm = self.macros.get(s, globals=False, maps=_map)
         if sm is None:
             raise error.internal("source macro not found: %s in %s (%s)" % (s, name, _map))
         url = self.config.expand(sm[2])
         src = download.parse_url(url, "_sourcedir", self.config, self.opts)
         download.get_file(src["url"], src["local"], self.opts, self.config)
         if "symlink" in src:
             src["script"] = "%%{__ln_s} %s ${source_dir_%s}" % (src["symlink"], name)
         elif "compressed" in src:
             #
             # Zip files unpack as well so do not use tar.
             #
             src["script"] = "%s %s" % (src["compressed"], src["local"])
             if src["compressed-type"] != "zip":
                 src["script"] += " | %{__tar_extract} -"
         else:
             src["script"] = "%%{__tar_extract} %s" % (src["local"])
         srcs += [src]
     return srcs
 def patch_setup(self, package, args):
     name = args[1]
     args = args[2:]
     _map = 'patch-%s' % (name)
     default_opts = ' '.join(args)
     patch_keys = [p for p in self.macros.map_keys(_map) if p != 'setup']
     patches = []
     for p in patch_keys:
         pm = self.macros.get(p, globals = False, maps = _map)
         if pm is None:
             raise error.internal('patch macro not found: %s in %s (%s)' % \
                                      (p, name, _map))
         opts = []
         url = []
         for pp in pm[2].split():
             if len(url) == 0 and pp[0] == '-':
                 opts += [pp]
             else:
                 url += [pp]
         if len(url) == 0:
             raise error.general('patch URL not found: %s' % (' '.join(opts)))
         #
         # Look for --rsb-file as an option we use as a local file name.
         # This can be used if a URL has no reasonable file name the
         # download URL parser can figure out.
         #
         file_override = None
         if len(opts) > 0:
             for o in opts:
                 if o.startswith('--rsb-file'):
                    os_ = o.split('=')
                    if len(os_) != 2:
                        raise error.general('invalid --rsb-file option: %s' % \
                                            (' '.join(opts)))
                    if os_[0] != '--rsb-file':
                        raise error.general('invalid --rsb-file option: %s' % \
                                            (' '.join(opts)))
                    file_override = os_[1]
             opts = [o for o in opts if not o.startswith('--rsb-')]
         if len(opts) == 0:
             opts = default_opts
         else:
             opts = ' '.join(opts)
         opts = self.config.expand(opts)
         url = self.config.expand(' '.join(url))
         #
         # Parse the URL first in the source builder's patch directory.
         #
         patch = download.parse_url(url, '_patchdir', self.config,
                                    self.opts, file_override)
         #
         # Download the patch
         #
         download.get_file(patch['url'], patch['local'], self.opts, self.config)
         if 'compressed' in patch:
             patch['script'] = patch['compressed'] + ' ' +  patch['local']
         else:
             patch['script'] = '%{__cat} ' + patch['local']
         patch['script'] += ' | %%{__patch} %s' % (opts)
         self.script_build.append(self.config.expand(patch['script']))
 def source(self, name):
     #
     # Return the list of sources. Merge in any macro defined sources as
     # these may be overridden by user loaded macros.
     #
     _map = 'source-%s' % (name)
     src_keys = [s for s in self.macros.map_keys(_map) if s != 'setup']
     if len(src_keys) == 0:
         raise error.general('no source set: %s (%s)' % (name, _map))
     srcs = []
     for s in src_keys:
         sm = self.macros.get(s, globals=False, maps=_map)
         if sm is None:
             raise error.internal('source macro not found: %s in %s (%s)' % \
                                      (s, name, _map))
         url = self.config.expand(sm[2])
         src = download.parse_url(url, '_sourcedir', self.config, self.opts)
         download.get_file(src['url'], src['local'], self.opts, self.config)
         if 'symlink' in src:
             sname = name.replace('-', '_')
             src['script'] = '%%{__ln_s} %s ${source_dir_%s}' % (
                 src['symlink'], sname)
         elif 'compressed' in src:
             #
             # Zip files unpack as well so do not use tar.
             #
             src['script'] = '%s %s' % (src['compressed'], src['local'])
             if src['compressed-type'] != 'zip':
                 src['script'] += ' | %{__tar_extract} -'
         else:
             src['script'] = '%%{__tar_extract} %s' % (src['local'])
         srcs += [src]
     return srcs
Пример #5
0
 def source(self, package, source_tag):
     #
     # Scan the sources found in the config file for the one we are
     # after. Infos or tags are lists. Merge in any macro defined
     # sources as these may be overridden by user loaded macros.
     #
     sources = package.sources()
     url = None
     for s in sources:
         tag = s[len('source'):]
         if tag.isdigit():
             if int(tag) == source_tag:
                 url = sources[s][0]
                 break
     if url is None:
         raise error.general('source tag not found: source%d' %
                             (source_tag))
     source = download.parse_url(url, '_sourcedir', self.config, self.opts)
     download.get_file(source['url'], source['local'], self.opts,
                       self.config)
     if 'symlink' in source:
         source['script'] = '%%{__ln_s} %s ${source_dir_%d}' % (
             source['symlink'], source_tag)
     elif 'compressed' in source:
         source['script'] = source['compressed'] + ' ' + \
             source['local'] + ' | %{__tar_extract} -'
     else:
         source['script'] = '%{__tar_extract} ' + source['local']
     return source
Пример #6
0
 def source(self, package, source_tag):
     #
     # Scan the sources found in the config file for the one we are
     # after. Infos or tags are lists. Merge in any macro defined
     # sources as these may be overridden by user loaded macros.
     #
     sources = package.sources()
     url = None
     for s in sources:
         tag = s[len('source'):]
         if tag.isdigit():
             if int(tag) == source_tag:
                 url = sources[s][0]
                 break
     if url is None:
         raise error.general('source tag not found: source%d' % (source_tag))
     source = download.parse_url(url, '_sourcedir', self.config, self.opts)
     download.get_file(source['url'], source['local'], self.opts, self.config)
     if 'symlink' in source:
         source['script'] = '%%{__ln_s} %s ${source_dir_%d}' % (source['symlink'], source_tag)
     elif 'compressed' in source:
         source['script'] = source['compressed'] + ' ' + \
             source['local'] + ' | %{__tar_extract} -'
     else:
         source['script'] = '%{__tar_extract} ' + source['local']
     return source
Пример #7
0
 def source(self, name):
     #
     # Return the list of sources. Merge in any macro defined sources as
     # these may be overridden by user loaded macros.
     #
     _map = 'source-%s' % (name)
     src_keys = self.macros.map_keys(_map)
     if len(src_keys) == 0:
         raise error.general('no source set: %s (%s)' % (name, _map))
     srcs = []
     for s in src_keys:
         sm = self.macros.get(s, globals = False, maps = _map)
         if sm is None:
             raise error.internal('source macro not found: %s in %s (%s)' % \
                                      (s, name, _map))
         url = self.config.expand(sm[2])
         src = download.parse_url(url, '_sourcedir', self.config, self.opts)
         download.get_file(src['url'], src['local'], self.opts, self.config)
         if 'symlink' in src:
             src['script'] = '%%{__ln_s} %s ${source_dir_%s}' % (src['symlink'], name)
         elif 'compressed' in src:
             #
             # Zip files unpack as well so do not use tar.
             #
             src['script'] = '%s %s' % (src['compressed'], src['local'])
             if src['compressed-type'] != 'zip':
                 src['script'] += ' | %{__tar_extract} -'
         else:
             src['script'] = '%%{__tar_extract} %s' % (src['local'])
         srcs += [src]
     return srcs
Пример #8
0
 def patch(self, package, args):
     #
     # Scan the patches found in the config file for the one we are
     # after. Infos or tags are lists.
     #
     patches = package.patches()
     url = None
     for p in patches:
         if args[0][1:].lower() == p:
             url = patches[p][0]
             break
     if url is None:
         raise error.general('patch tag not found: %s' % (args[0]))
     #
     # Parse the URL first in the source builder's patch directory.
     #
     patch = download.parse_url(url, '_patchdir', self.config, self.opts)
     #
     # If not in the source builder package check the source directory.
     #
     if not path.exists(patch['local']):
         patch = download.parse_url(url, '_patchdir', self.config,
                                    self.opts)
     download.get_file(patch['url'], patch['local'], self.opts, self.config)
     if 'compressed' in patch:
         patch['script'] = patch['compressed'] + ' ' + patch['local']
     else:
         patch['script'] = '%{__cat} ' + patch['local']
     patch['script'] += ' | %{__patch} ' + ' '.join(args[1:])
     self.script.append(self.config.expand(patch['script']))
Пример #9
0
 def patch_setup(self, package, args):
     name = args[1]
     args = args[2:]
     _map = 'patch-%s' % (name)
     default_opts = ' '.join(args)
     patch_keys = [p for p in self.macros.map_keys(_map) if p != 'setup']
     patches = []
     for p in patch_keys:
         pm = self.macros.get(p, globals = False, maps = _map)
         if pm is None:
             raise error.internal('patch macro not found: %s in %s (%s)' % \
                                      (p, name, _map))
         opts = []
         url = []
         for pp in pm[2].split():
             if len(url) == 0 and pp[0] == '-':
                 opts += [pp]
             else:
                 url += [pp]
         if len(url) == 0:
             raise error.general('patch URL not found: %s' % (' '.join(args)))
         #
         # Look for --rsb-file as an option we use as a local file name.
         # This can be used if a URL has no reasonable file name the
         # download URL parser can figure out.
         #
         file_override = None
         if len(opts) > 0:
             for o in opts:
                 if o.startswith('--rsb-file'):
                    os_ = o.split('=')
                    if len(os_) != 2:
                        raise error.general('invalid --rsb-file option: %s' % (' '.join(args)))
                    if os_[0] != '--rsb-file':
                        raise error.general('invalid --rsb-file option: %s' % (' '.join(args)))
                    file_override = os_[1]
             opts = [o for o in opts if not o.startswith('--rsb-')]
         if len(opts) == 0:
             opts = default_opts
         else:
             opts = ' '.join(opts)
         opts = self.config.expand(opts)
         url = self.config.expand(' '.join(url))
         #
         # Parse the URL first in the source builder's patch directory.
         #
         patch = download.parse_url(url, '_patchdir', self.config, self.opts, file_override)
         #
         # Download the patch
         #
         download.get_file(patch['url'], patch['local'], self.opts, self.config)
         if 'compressed' in patch:
             patch['script'] = patch['compressed'] + ' ' +  patch['local']
         else:
             patch['script'] = '%{__cat} ' + patch['local']
         patch['script'] += ' | %%{__patch} %s' % (opts)
         self.script.append(self.config.expand(patch['script']))
Пример #10
0
 def source(self, name):
     #
     # Return the list of sources. Merge in any macro defined sources as
     # these may be overridden by user loaded macros.
     #
     _map = 'source-%s' % (name)
     src_keys = [s for s in self.macros.map_keys(_map) if s != 'setup']
     if len(src_keys) == 0:
         raise error.general('no source set: %s (%s)' % (name, _map))
     srcs = []
     for s in src_keys:
         sm = self.macros.get(s, globals = False, maps = _map)
         if sm is None:
             raise error.internal('source macro not found: %s in %s (%s)' % \
                                      (s, name, _map))
         opts = []
         url = []
         for sp in sm[2].split():
             if len(url) == 0 and sp[0] == '-':
                 opts += [sp]
             else:
                 url += [sp]
         if len(url) == 0:
             raise error.general('source URL not found: %s' % (' '.join(args)))
         #
         # Look for --rsb-file as an option we use as a local file name.
         # This can be used if a URL has no reasonable file name the
         # download URL parser can figure out.
         #
         file_override = None
         if len(opts) > 0:
             for o in opts:
                 if o.startswith('--rsb-file'):
                    os_ = o.split('=')
                    if len(os_) != 2:
                        raise error.general('invalid --rsb-file option: %s' % (' '.join(args)))
                    if os_[0] != '--rsb-file':
                        raise error.general('invalid --rsb-file option: %s' % (' '.join(args)))
                    file_override = os_[1]
             opts = [o for o in opts if not o.startswith('--rsb-')]
         url = self.config.expand(' '.join(url))
         src = download.parse_url(url, '_sourcedir', self.config, self.opts, file_override)
         download.get_file(src['url'], src['local'], self.opts, self.config)
         if 'symlink' in src:
             sname = name.replace('-', '_')
             src['script'] = '%%{__ln_s} %s ${source_dir_%s}' % (src['symlink'], sname)
         elif 'compressed' in src:
             #
             # Zip files unpack as well so do not use tar.
             #
             src['script'] = '%s %s' % (src['compressed'], src['local'])
             if src['compressed-type'] != 'zip':
                 src['script'] += ' | %{__tar_extract} -'
         else:
             src['script'] = '%%{__tar_extract} %s' % (src['local'])
         srcs += [src]
     return srcs
Пример #11
0
    def test_download(self) -> None:
        """データを一つ分ロードし解凍する。
        """
        NUM_ARCHIVE = 5
        URL_BASE = "http://data-acoustics.com/wp-content/uploads/2014/06/"
        EXPAND_DIR = pathlib.Path("data")

        for idx in range(1, NUM_ARCHIVE + 1):
            url = URL_BASE + f"hs_bearing_{idx}.zip"
            archive_file = EXPAND_DIR.joinpath(url.split("/")[-1])
            if archive_file.exists() is False:
                download.get_file(url, str(EXPAND_DIR))
            download.extract(str(archive_file), str(EXPAND_DIR))
Пример #12
0
def get_data(params):
    params["batch_size"] = 512
    path = get_file(
        'nietzsche.txt',
        origin='https://s3.amazonaws.com/text-datasets/nietzsche.txt')
    text = open(path).read().lower()
    print('corpus length:', len(text))

    chars = sorted(list(set(text)))
    params["cnt_classes"] = len(chars)
    char_indices = dict((c, i) for i, c in enumerate(chars))
    indices_char = dict((i, c) for i, c in enumerate(chars))

    # cut the text in semi-redundant sequences of maxlen characters
    maxlen = 40
    step = 3
    sentences = []
    next_chars = []
    for i in range(0, len(text) - maxlen, step):
        sentences.append(text[i:i + maxlen])
        next_chars.append(text[i + maxlen])

    X = np.zeros((len(sentences), maxlen, len(chars)), dtype=np.bool)
    y = np.zeros((len(sentences), len(chars)), dtype=np.bool)
    for i, sentence in enumerate(sentences):
        for t, char in enumerate(sentence):
            X[i, t, char_indices[char]] = 1
        y[i, char_indices[next_chars[i]]] = 1
    return X, y
 def patch_setup(self, package, args):
     name = args[1]
     args = args[2:]
     _map = 'patch-%s' % (name)
     default_opts = ' '.join(args)
     patch_keys = [p for p in self.macros.map_keys(_map) if p != 'setup']
     patches = []
     for p in patch_keys:
         pm = self.macros.get(p, globals=False, maps=_map)
         if pm is None:
             raise error.internal('patch macro not found: %s in %s (%s)' % \
                                      (p, name, _map))
         opts = []
         url = []
         for pp in pm[2].split():
             if len(url) == 0 and pp[0] == '-':
                 opts += [pp]
             else:
                 url += [pp]
         if len(url) == 0:
             raise error.general('patch URL not found: %s' %
                                 (' '.join(args)))
         if len(opts) == 0:
             opts = default_opts
         else:
             opts = ' '.join(opts)
         opts = self.config.expand(opts)
         url = self.config.expand(' '.join(url))
         #
         # Parse the URL first in the source builder's patch directory.
         #
         patch = download.parse_url(url, '_patchdir', self.config,
                                    self.opts)
         #
         # If not in the source builder package check the source directory.
         #
         if not path.exists(patch['local']):
             patch = download.parse_url(url, '_patchdir', self.config,
                                        self.opts)
         download.get_file(patch['url'], patch['local'], self.opts,
                           self.config)
         if 'compressed' in patch:
             patch['script'] = patch['compressed'] + ' ' + patch['local']
         else:
             patch['script'] = '%{__cat} ' + patch['local']
         patch['script'] += ' | %%{__patch} %s' % (opts)
         self.script.append(self.config.expand(patch['script']))
Пример #14
0
 def patch_setup(self, package, args):
     name = args[1]
     args = args[2:]
     _map = 'patch-%s' % (name)
     default_opts = ' '.join(args)
     patch_keys = self.macros.map_keys(_map)
     patches = []
     for p in patch_keys:
         pm = self.macros.get(p, globals = False, maps = _map)
         if pm is None:
             raise error.internal('patch macro not found: %s in %s (%s)' % \
                                      (p, name, _map))
         opts = []
         url = []
         for pp in pm[2].split():
             if len(url) == 0 and pp[0] == '-':
                 opts += [pp]
             else:
                 url += [pp]
         if len(url) == 0:
             raise error.general('patch URL not found: %s' % (' '.join(args)))
         if len(opts) == 0:
             opts = default_opts
         else:
             opts = ' '.join(opts)
         opts = self.config.expand(opts)
         url = self.config.expand(' '.join(url))
         #
         # Parse the URL first in the source builder's patch directory.
         #
         patch = download.parse_url(url, '_patchdir', self.config, self.opts)
         #
         # If not in the source builder package check the source directory.
         #
         if not path.exists(patch['local']):
             patch = download.parse_url(url, '_patchdir', self.config, self.opts)
         download.get_file(patch['url'], patch['local'], self.opts, self.config)
         if 'compressed' in patch:
             patch['script'] = patch['compressed'] + ' ' +  patch['local']
         else:
             patch['script'] = '%{__cat} ' + patch['local']
         patch['script'] += ' | %%{__patch} %s' % (opts)
         self.script.append(self.config.expand(patch['script']))
Пример #15
0
    def test_matfile(self):
        """テスト用にmatファイルをダウンロードし、読み込みを実行します。
        """
        url = "http://data-acoustics.com/wp-content/uploads/2014/06/hs_bearing_1.zip"
        extract_dir = pathlib.Path("data")
        archive_path = extract_dir.joinpath(url.split("/")[-1])
        matfile = extract_dir.joinpath(
            archive_path.stem).joinpath("sensor-20130307T015746Z.mat")

        # download mat file
        if archive_path.exists() is False:
            download.get_file(url, str(extract_dir))
        if matfile.exists() is False:
            download.unzip(str(archive_path), str(extract_dir))

        # load mat file
        var = io.loadmat(str(matfile))
        for key, item in var.items():
            print(key, item)
Пример #16
0
 def patch_setup(self, package, args):
     name = args[1]
     args = args[2:]
     _map = "patch-%s" % (name)
     default_opts = " ".join(args)
     patch_keys = self.macros.map_keys(_map)
     patches = []
     for p in patch_keys:
         pm = self.macros.get(p, globals=False, maps=_map)
         if pm is None:
             raise error.internal("patch macro not found: %s in %s (%s)" % (p, name, _map))
         opts = []
         url = []
         for pp in pm[2].split():
             if len(url) == 0 and pp[0] == "-":
                 opts += [pp]
             else:
                 url += [pp]
         if len(url) == 0:
             raise error.general("patch URL not found: %s" % (" ".join(args)))
         if len(opts) == 0:
             opts = default_opts
         else:
             opts = " ".join(opts)
         opts = self.config.expand(opts)
         url = self.config.expand(" ".join(url))
         #
         # Parse the URL first in the source builder's patch directory.
         #
         patch = download.parse_url(url, "_patchdir", self.config, self.opts)
         #
         # If not in the source builder package check the source directory.
         #
         if not path.exists(patch["local"]):
             patch = download.parse_url(url, "_patchdir", self.config, self.opts)
         download.get_file(patch["url"], patch["local"], self.opts, self.config)
         if "compressed" in patch:
             patch["script"] = patch["compressed"] + " " + patch["local"]
         else:
             patch["script"] = "%{__cat} " + patch["local"]
         patch["script"] += " | %%{__patch} %s" % (opts)
         self.script.append(self.config.expand(patch["script"]))
Пример #17
0
async def help_handler(event):
    sent = 0
    shelf = shelve.open("classes")

    for CID in HASHTAGS:
        logging.info(f"Getting for {CID}")
        if not CID in shelf:
            logging.info(f"{CID} not in shelf")
            shelf[CID] = set()
            shelf.close()
            shelf = shelve.open("classes")

        ans = classr.get_announcements(CID)

        for ann in ans[::-1]:
            if ann["id"] in shelf[CID]:
                logging.debug("Already posted")
                continue
            else:
                logging.debug(f'Saving {ann["id"]} in shelf')
                shelf[CID].add(ann["id"])
                shelf.close()
                shelf = shelve.open("classes")

            try:
                text = ann["text"]
            except:
                text = ""
            files = []
            for i in ann["materials"]:
                try:
                    file = download.get_file(i["driveFile"]["driveFile"]["id"])
                    file.name = i["driveFile"]["driveFile"]["title"]
                    file.seek(0)
                    files.append(file)
                except KeyError:
                    logging.debug("Key not found")
                    print(i)
                    try:
                        URL = i["link"]["url"]
                        text = f"{text}\n{URL}"
                    except KeyError:
                        logging.debug("No URL found")
            text = f'{text}\n{ann["alternateLink"]}\nUpdate time: {ann["updateTime"]}\n\n#DigitalClass {HASHTAGS[CID]}'
            print(text, files)
            text_m = await event.reply(message=text)
            sent += 1
            for file in files:
                await text_m.reply(file=file)
                sent += 1
            logging.debug("replied")

    await event.reply("No new updates")
    raise events.StopPropagation
Пример #18
0
    def do_POST(self):
        global current_status
        global dirlst
        global page_info
        global upl

        self.send_response(200)
        self.allow_CORS()

        length = int(self.headers["Content-Length"])
        self.post_data = self.rfile.read(length)  # bytes
        # print(self.post_data.decode('utf-8'))
        recv = json.loads(self.post_data.decode("utf-8"))

        if type(recv) == dict:
            if "PAGEINFO" in recv:
                page_info = recv["PAGEINFO"]
            elif "START" in recv:

                # try:

                try:
                    url = recv["START"]["URL"]
                except KeyError:
                    url = None

                tag_name = recv["START"]["tagName"]

                try:
                    new_dirlst = [
                        page_info["prod"],
                        page_info["serverFolder"],
                        (lambda t: ("File" if t == "A" else "Image"))(tag_name),
                    ] + list(
                        map(
                            lambda x: remove_front_trail(ascii_normalize(replace_unsupported_symbols(x))),
                            page_info["pageList"],
                        )
                    )
                    print(time.asctime(), "Page info loaded")
                except (TypeError, KeyError):
                    print("Cannot retrieve server number. You need to re-login into the CMS site.")
                    current_status = {"status": "Session Expired"}
                    sys.exit()

                    # Check URL

                current_status = {"status": "Checking"}

                URL = urlcheck.href_filter(url)

                if URL == None:
                    current_status = {"status": "Skipped", "URL": URL}
                else:

                    # Base64 img case:

                    if urlparse(URL).scheme == "data":
                        filename = base64img.save_to_file(URL, directory=temp)

                        current_status = {"status": "Uploading"}
                        if new_dirlst != dirlst:
                            dirlst = new_dirlst
                            upl.jump(dirlst)

                        while True:
                            try:
                                upl.upload(filename, directory=temp)
                            except ftplib.all_errors:
                                print(time.asctime(), "Failed to upload {0}. Retrying.".format(filename))
                                upl = upload.Upload()
                                upl.login(ftp_json["server"], user=ftp_json["username"], pswd=ftp_json["password"])
                                upl.jump(dirlst)
                                upl.upload(filename, directory=temp)
                            break

                        URL = "/UserFiles/Servers/" + "/".join(dirlst[1:]) + "/" + filename
                        current_status = {"status": "Done", "URL": URL}
                    else:

                        # Non-Base64 img case:

                        chk = urlcheck.URL(URL, headers=request_headers, verify=cert_path)
                        chk.get_headers()

                        if chk.content_type in not_allowed_type or chk.filename == None:
                            if chk.status_code != None:
                                if chk.status_code // 100 == 4:
                                    current_status = {"status": "Error " + str(chk.status_code), "URL": URL}

                                else:
                                    current_status = {"status": "Skipped", "URL": URL}

                            else:
                                current_status = {"status": "Error"}
                        else:

                            # Download

                            current_status = {"status": "Downloading"}
                            filename = chk.filename
                            download.get_file(
                                chk.final_url,
                                fname=chk.filename,
                                directory=temp,
                                headers=request_headers,
                                verify=cert_path,
                            )

                            # Upload

                            current_status = {"status": "Uploading"}
                            if new_dirlst != dirlst:
                                dirlst = new_dirlst
                                upl.jump(dirlst)

                            while True:
                                try:
                                    upl.upload(filename, directory=temp)
                                except ftplib.all_errors:
                                    print(time.asctime(), "Re-uploading {0} ...".format(filename))
                                    upl = upload.Upload()
                                    upl.login(ftp_json["server"], user=ftp_json["username"], pswd=ftp_json["password"])
                                    upl.jump(dirlst)
                                    upl.upload(filename, directory=temp)
                                break

                            URL = "/UserFiles/Servers/" + "/".join(dirlst[1:]) + "/" + filename
                            current_status = {"status": "Done", "URL": URL}
            else:

                # except:
                # current_status = {"status": "Error"}

                pass
Пример #19
0
 def gerar_arquivo_zip(self):
     return get_file()
Пример #20
0
 def source(self, name, strip_components, download_only):
     #
     # Return the list of sources. Merge in any macro defined sources as
     # these may be overridden by user loaded macros.
     #
     _map = 'source-%s' % (name)
     src_keys = [s for s in self.macros.map_keys(_map) if s != 'setup']
     if len(src_keys) == 0:
         raise error.general('no source set: %s (%s)' % (name, _map))
     srcs = []
     for s in src_keys:
         sm = self.macros.get(s, globals=False, maps=_map)
         if sm is None:
             raise error.internal('source macro not found: %s in %s (%s)' % \
                                      (s, name, _map))
         opts = []
         url = []
         for sp in sm[2].split():
             if len(url) == 0 and sp[0] == '-':
                 opts += [sp]
             else:
                 url += [sp]
         if len(url) == 0:
             raise error.general('source URL not found: %s' %
                                 (' '.join(args)))
         #
         # Look for --rsb-file as an option we use as a local file name.
         # This can be used if a URL has no reasonable file name the
         # download URL parser can figure out.
         #
         file_override = None
         if len(opts) > 0:
             for o in opts:
                 if o.startswith('--rsb-file'):
                     os_ = o.split('=')
                     if len(os_) != 2:
                         raise error.general('invalid --rsb-file option: %s' % \
                                             (' '.join(args)))
                     if os_[0] != '--rsb-file':
                         raise error.general('invalid --rsb-file option: %s' % \
                                             (' '.join(args)))
                     file_override = os_[1]
             opts = [o for o in opts if not o.startswith('--rsb-')]
         url = self.config.expand(' '.join(url))
         src = download.parse_url(url, '_sourcedir', self.config, self.opts,
                                  file_override)
         download.get_file(src['url'], src['local'], self.opts, self.config)
         if not download_only:
             if strip_components > 0:
                 tar_extract = '%%{__tar_extract} --strip-components %d' % \
                     (strip_components)
             else:
                 tar_extract = '%{__tar_extract}'
             if 'symlink' in src:
                 sname = name.replace('-', '_')
                 src['script'] = '%%{__ln_s} %s ${source_dir_%s}' % \
                     (src['symlink'], sname)
             elif 'compressed' in src:
                 #
                 # Zip files unpack as well so do not use tar.
                 #
                 src['script'] = '%s %s' % (src['compressed'], src['local'])
                 if src['compressed-type'] != 'zip':
                     src['script'] += ' | %s -f -' % (tar_extract)
             else:
                 src['script'] = '%s -f %s' % (tar_extract, src['local'])
             srcs += [src]
     return srcs