示例#1
0
    async def _tail(cls, filename: str, num_lines: int) -> list:
        """returns the number of lines defined as a parameter
        or the maximum the file has

        Args:
            filename (str): file name
            num_lines ([type]): number of lines
        Returns:
            list: lines
        """
        linesep = '\r\n' if os.name == 'nt' else '\n'
        lines = []

        async with AIOFile(filename, 'r') as afp:
            async for line in LineReader(afp, line_sep=linesep):
                lines.append(line)
                if num_lines == len(lines):
                    break
        return lines
async def save_tracked_orders(dt, trackedOrders):
    try:
        if len(Settings.tracked_orders.trackedOrders) == 0:
            return

        filename = get_tracked_order_filename(dt)

        data = json.dumps(trackedOrders, default=lambda o: o.__dict__, sort_keys=True, indent=4)

        async with AIOFile(filename, 'w+') as afp:
            await afp.write(data)
            await afp.fsync()

            LOGGER.info(f'Saved {len(Settings.tracked_orders.trackedOrders)} tracked orders in file {filename}')

    except Exception as ex:
        LOGGER.error(f'Failed to load from tracked orders file {filename}')
        LOGGER.error('An unexpected error occurred in the main loop: {}'.format(ex))
        LOGGER.fatal(ex, exc_info=True)
async def chunk_stories_from_file(file: str, batch_size: int = 100) -> Tuple[List[str], List[int]]:
    """ Async yield batches of stories that are line separated/
    """
    line_count = 1
    lines = []
    story_nums = []
    async with AIOFile(file, mode="rb") as f:
        async for line in LineReader(f):
            line = line.decode('utf-8', errors="ignore")
            line = line.replace("<newline>", "")
            lines.append(line)
            story_nums.append(line_count)
            line_count += 1
            if len(lines) == batch_size:
                yield lines, story_nums
                lines = []
                story_nums = []

    yield lines, story_nums
示例#4
0
    async def test_sia_mirror_2_blocks(self):
        storage = ss.SiapathStorage(self.session,
                                    TestSiaOperations.TEST_DIR,
                                    default_block_size=20 * 1000 * 1000)
        await storage.update()
        async with AIOFile('40MiBempty.img', mode='rb') as afp:
            reference = await afp.read()
            async for status in ss.siapath_mirror(storage, afp):
                pass

        uploaded = b''
        async for chunk in storage.download(0):
            uploaded += chunk
        async for chunk in storage.download(1):
            uploaded += chunk
        self.assertEqual(uploaded, reference)

        await storage.delete(0)
        await storage.delete(1)
示例#5
0
async def buffer_to_file(filename, buffer):
    """Writes the PCAP every .2 seconds to avoid synchronous writes."""
    file = None

    async with AIOFile(str(filename), 'ab') as pcap:
        while True:
            await asyncio.sleep(1)

            b = buffer.getvalue()
            buffer.truncate(0)
            buffer.seek(0)

            if not b:
                print("No data.")
                continue

            print(f"Has {len(b)} bytes.")

            await pcap.write(b)
示例#6
0
async def updateStylesInFile(filepath, styleUpdates, game, componentGamedata,
                             pieceGamedata):
    if filepath == None:
        print("filepath cannot be None.")
        return

    if styleUpdates == None:
        print("styleUpdates cannot be None.")
        return

    if game == None:
        print("game cannot be None.")
        return

    if componentGamedata == None:
        print("componentGamedata cannot be None.")
        return

    if pieceGamedata == None:
        print("pieceGamedata cannot be None.")
        return

    try:
        parsedTree = ET.parse(filepath)
        tree = parsedTree.getroot()

        for styleUpdate in styleUpdates:
            findById = styleUpdate["id"]
            elementToUpdate = tree.find(".//*[@id='%s']" % findById)
            if (elementToUpdate != None):
                value = await getScopedValue(styleUpdate, game,
                                             componentGamedata, pieceGamedata)
                await replaceStyleAttributeForElement(elementToUpdate, "style",
                                                      styleUpdate["cssValue"],
                                                      value)
            else:
                print("Could not find element with id [%s]." % (findById))

        async with AIOFile(filepath, 'wb') as f:
            await f.write(ET.tostring(tree))
    except ET.ParseError as pe:
        print("Production failed!", pe, filepath)
示例#7
0
async def test_write_returned_negative(temp_file, loop):
    ctx = asynctest.Mock(caio.AbstractContext)
    ctx.loop = loop
    ctx.fdsync = asynctest.CoroutineMock(return_value=None)
    ctx.write = asynctest.CoroutineMock(side_effect=asyncio.InvalidStateError)

    async with AIOFile(temp_file, 'w', context=ctx) as afp:
        return_iter = iter((3, -27))
        ctx.write.side_effect = lambda *_, **__: next(return_iter)
        with pytest.raises(OSError) as raises:
            await afp.write('aiofile')
        assert raises.value.errno == 27
        assert raises.value.filename == temp_file

        ctx.write.reset_mock(side_effect=True)
        ctx.write.return_value = -122
        with pytest.raises(OSError) as raises:
            await afp.write('aiofile')
        assert raises.value.errno == 122
        assert raises.value.filename == temp_file
示例#8
0
async def get_skiplist():
    skiplist = {}
    path = "skiplist.txt"
    try:
        async with AIOFile(path, "r") as In:
            async for line in LineReader(In):
                line = line.strip()
                if line == "":
                    continue
                s = line.split(" ", maxsplit=1)
                if len(s) != 2:
                    continue
                debug_id, debug_file = s
                skiplist[debug_id] = debug_file.lower()
    except FileNotFoundError:
        pass

    log.debug(f"{path} contains {len(skiplist)} items")

    return skiplist
示例#9
0
async def read_from_socket(host, port):
    timer = 0
    reader, writer = None, None
    async with AIOFile("text.txt", 'a') as _file:
        while True:
            try:
                if not reader or  not writer:
                    reader, writer = await asyncio.open_connection(host=host, port=port)
                text = await reader.readline()
                time_now = datetime.datetime.now().strftime("%y.%m.%d %H.%M")
                await _file.write('[{}] {}'.format(time_now, text.decode("utf-8")))
                print(text.decode("utf-8"))
            except (ConnectionRefusedError, ConnectionResetError):
                logging.warning('sleep %s seconds', 2 ** timer)
                await asyncio.sleep(2 ** timer)
                reader, writer = None, None
                timer += 1
            except asyncio.CancelledError:
                writer.close()
                raise
示例#10
0
    async def start(self):
        """
        Starts the livelogging
        """
        logger.debug("build-{}: starting buildlogger".format(self.build_id))
        self.__up = True
        while self.__up:
            try:
                async with AIOFile(str(self.__filepath), "rb") as log_file:
                    reader = Reader(log_file, chunk_size=16384)
                    retries = 0
                    while self.__up:
                        async for data in reader:
                            message = {
                                "event": Event.added.value,
                                "subject": Subject.buildlog.value,
                                "data": str(data, 'utf-8', errors="ignore")
                            }
                            await self.__sender(json.dumps(message))

                        # EOF
                        if retries % 100 == 0:
                            retries = 0
                            if self.check_abort():
                                self.stop()
                                break
                        retries += 1
                        await asyncio.sleep(.1)
            except FileNotFoundError:
                await asyncio.sleep(1)
                self.check_abort()
            except Exception as exc:
                logger.error("buildlogger: error sending buildlogs")
                logger.exception(exc)
                self.stop()

        message = {
            "subject": Subject.buildlog.value,
            "event": Event.done.value
        }
        await self.__sender(json.dumps(message))
示例#11
0
    async def _get_blob_gh(ghclient, sem, path, url, cache_root):
        """Asynchronously get a blob from the Git tree through the GitHub
        API.

        This method should only be called from `SalXmlRepo.from_github`.
        """
        async with sem:
            data = await ghclient.getitem(url)
            try:
                content = base64.b64decode(data['content'])
            except KeyError:
                print('keyerror')
                print(data)
                raise
        # Write to cache
        cache_path = cache_root \
            / (base64.b64encode(url.encode()).decode('utf-8') + '.xml')
        async with AIOFile(cache_path, 'wb') as f:
            await f.write(content)
            await f.fsync()
        return path, content
示例#12
0
文件: load.py 项目: ppsirg/krai
async def check_server_load(settings):
    """Register server load
    """
    print(f"{settings['interval']} for load")
    await sleep(settings['interval'])
    mem = psutil.virtual_memory()
    server_load = {
        'time': time(),
        'cpu': psutil.cpu_percent(),
        'memory': {
            'percent': mem.percent,
            'used': mem.used,
            'free': mem.free,
            'buffers': mem.buffers,
            'cached': mem.cached,
        }
    }
    file_name = datetime.now().strftime(
        f'{settings["file_prefix"]}_%Y_%m_%d.log')
    async with AIOFile(file_name, 'a') as lg:
        await lg.write(f'{server_load}\n')
async def analyze_sec(data, pool):
    async with pool.acquire() as conn:
        async with conn.cursor() as cur:
            path = 'SEC_STATS/'
            csv_file = f'{data[0]}_{data[1]}.csv'

            await cur.callproc('sec', data)
            rows = await cur.fetchall()

            # Create the directory 'SEC_STATS'
            mask = os.umask(0)
            os.makedirs(path, exist_ok=True)
            os.umask(mask)

            # Write the analyzed document into the directory 'SEC_STATS'
            records = ''
            async with AIOFile(path + csv_file, 'w+') as afp:
                for row in rows:
                    row = map(str, row)
                    record = '|'.join(row)
                    records += record + '\n'

                await afp.write(records)
                await afp.fsync()

            await cur.execute(f"""
                load data local infile 'SEC_STATS/{csv_file}'
                into table sec_stats
                fields terminated by '|'
                lines terminated by '\n'
                ignore 1 lines
                (
                    cik, quarter, security, value, shares, weighting
                );
                """)

            print(
                f'stats.py: sec with cik "{data[0]}" on quarter "{data[1]}" analyzed'
            )
            return data
示例#14
0
    async def asetitem(self, k, v):
        """

        >>> from dol.filesys import mk_tmp_dol_dir
        >>> import os
        >>>
        >>> rootdir = mk_tmp_dol_dir('test')
        >>> rpath = lambda *p: os.path.join(rootdir, *p)
        >>> s = AioFileBytesPersister(rootdir)
        >>> k = rpath('foo')
        >>> if k in s:
        ...     del s[k]  # delete key if present
        ...
        >>> n = len(s)  # number of items in store
        >>> await s.asetitem(k, b'bar')
        >>> assert len(s) == n + 1  # there's one more item in store
        >>> assert k in s
        >>> assert (await s[k]) == b'bar'
        """
        async with AIOFile(k, **self._write_open_kwargs) as fp:
            await fp.write(v)
            await fp.fsync()
示例#15
0
async def debchanges_get_files(sourcepath, sourcename, version, arch="source"):
    changes_file = get_debchanges_filename(sourcepath, sourcename, version,
                                           arch)
    files = []
    try:
        async with AIOFile(changes_file, "rb") as f:
            data = await f.read()
            file_tag = False
            for line in str(data, 'utf-8').split('\n'):
                line = line.rstrip()
                if not file_tag:
                    if line == "Files:":
                        file_tag = True
                else:
                    if not line.startswith(" "):
                        break
                    line = line.lstrip()
                    parts = line.split(" ")
                    files.append(parts[4])
    except Exception as exc:
        logger.exception(exc)
    return files
示例#16
0
async def send_and_gen_sentence(*args):
    file, peer_id = args
    if not os.path.exists(file):
        message = "База слов для этой беседы ещё не существует"
        await get_api().messages.send(
            peer_id=peer_id, message=message, random_id=0
        )
        return
    async with AIOFile(file, encoding="utf-8") as f:
        text = await f.read()
        text_model = [sample.strip() for sample in text.split(",")]
    generator = mc.StringGenerator(samples=text_model)
    message = generator.generate_string(
        attempts=20,
        validator=mc.validators.words_count(minimal=1, maximal=15),
        formatter=mc.formatters.usual_syntax if USUAL_SYNTAX else None,
    )
    if not message:
        message = "База слов слишком мала для генерации"
    await get_api().messages.send(
        peer_id=peer_id, message=message, random_id=0
    )
示例#17
0
async def buildlog_writer(build_id):
    filename = get_log_file_path(build_id)
    if not filename:
        logger.error("buildlog_writer: cannot get path for build %s", str(build_id))
        del buildlogs[build_id]
        return
    try:
        async with AIOFile(filename, 'a') as afp:
            writer = Writer(afp)
            while True:
                msg = await dequeue(buildlogs[build_id])
                if msg is None:
                    await enqueue_backend({"logging_done": build_id})
                    continue
                elif msg is False:
                    break
                await writer(msg)
                await afp.fsync()
    except Exception as exc:
        logger.exception(exc)

    del buildlogs[build_id]
示例#18
0
    async def top_reacters(self, ctx: commands.Context) -> None:
        try:
            async with AIOFile("db/reacts.json", "r", encoding="utf-8") as f:
                r = json.loads(await f.read())
        except FileNotFoundError:
            raise CommandError(
                "Bot has no message reaction statistics!\n"
                f"Run `{self.bot.command_prefix}getreacts` to fetch it.")
        r = sorted([(id, user) for id, user in r.items()],
                   key=lambda i: i[1]["total_reacts"],
                   reverse=True)

        reacters = {}

        for reacter_id, reacter in r:
            reacters[int(reacter_id)] = User.fromdict(reacter)

        # Print top 5 reacters
        fields = []
        for idx, (uid, user) in enumerate(reacters.items(), start=1):
            u = self.bot.get_user(int(uid))
            if not u or not user.reacts:
                continue

            emoji = next(iter(user.reacts.values()))
            fields.append(
                EmbedField(
                    name=u.name,
                    value=
                    f"Total reacts: {user.total_reacts}\nFavorite emoji: <:{emoji.name}:{emoji.id}>"
                ))
            if idx == 5:
                break

        embed = await self.get_embed(ctx,
                                     title="Top Reacters",
                                     description="_",
                                     fields=fields)
        await ctx.send(embed=embed)
示例#19
0
    async def convert_to_yml(self, ctx=None, **variables) -> str:
        """
        Convert template to single docker-compose file text
        :param ctx: Context for filters
        :param variables: Variables to replace
        :return:
        """
        async with AIOFile(self.path / 'docker-compose.yml') as afp:
            compose_text = await afp.read()

        ni = StackerNodeInfo(self.path)
        info = await ni.load()
        compose_vars = info['vars']

        for k, v in compose_vars.items():
            compose_vars[k]['value'] = None
            # 1. Value = default if applicable
            if 'default' in v:
                compose_vars[k]['value'] = v['default']
            # 2. Override from kwargs
            if k in variables:
                compose_vars[k]['value'] = str(
                    variables[k] if variables[k] else '').replace('$', '$$')
            # 3. Apply filters
            compose_vars[k].update(await self._apply_component(
                compose_vars[k], 'filters', compose_vars[k]['value'], ctx))

        # Replace vars
        repl = {k: v['value'] for k, v in compose_vars.items()}

        compose_text = envsubst(compose_text, **repl)
        processed = StackerNodeInfo(compose_text)
        w = await processed.load()
        w['vars'] = repl
        w['from_template'] = self.name
        w.update(ctx)
        compose_text = await processed.save(w)

        return compose_text
示例#20
0
async def get_documents(number, total_count, file_name):
    async with sem:
        try:
            async with AIOFile(f"{BASE_DIR}\\{file_name}",
                               "r",
                               encoding="utf-8") as fd:
                data = await fd.read()
                case = Case(_id=file_name, html_document=data)
                for i, doc in enumerate(case.linked_documents):
                    response = requests.get(BASE_URL + doc)
                    if response.status_code == 200:
                        with open(
                                f"{CHILD_DIR}\\{file_name.split('.')[0]}__{i}.html",
                                "w",
                                encoding="utf-8") as f:
                            f.write(response.text)
                sleep_time = randint(20, 60) / 10
                logger.info(
                    f"Filename[{number}/{total_count}]: {case.id} / {case.linked_documents} finished. Sleep for [{sleep_time}]"
                )
                await asyncio.sleep(sleep_time)
        except Exception as error:
            logger.critical(error)
示例#21
0
文件: util.py 项目: jgmel/hyperglass
async def write_env(variables):
    """Write environment variables to temporary JSON file.

    Arguments:
        variables {dict} -- Environment variables to write.

    Raises:
        RuntimeError: Raised on any errors.
    """
    from aiofile import AIOFile
    import json

    env_file = Path("/tmp/hyperglass.env.json")  # noqa: S108
    env_vars = json.dumps(variables)

    try:
        async with AIOFile(env_file, "w+") as ef:
            await ef.write(env_vars)
            await ef.fsync()
    except Exception as e:
        raise RuntimeError(str(e))

    return f"Wrote {env_vars} to {str(env_file)}"
示例#22
0
async def check_connectivity(settings):
    """Register server connectivity.
    """
    timeoutLapse = settings['interval']
    await sleep(settings['interval'])
    print(f"{settings['interval']} for connection")
    try:
        timeout = aiohttp.ClientTimeout(total=timeoutLapse)
        async with aiohttp.ClientSession(timeout=timeout) as session:
            pings = [ping(session, url) for url in settings['checks']]
            results = await gather(*pings)
    except TimeoutError as err:
        print(f'el ping falló por timeout: {err}')
        now = datetime.now().strftime('%Y_%m_%d-%H:%M:%S')
        results = [{
            'url': url,
            'code': 502,
            'length': 0,
            'time': timeoutLapse,
            'datetime': now
        } for url in settings['checks']]
    except Exception as err:
        print(f'el ping falló porque: {err}')
        now = datetime.now().strftime('%Y_%m_%d-%H:%M:%S')
        results = [{
            'url': url,
            'code': 502,
            'length': 0,
            'time': 0,
            'datetime': now
        } for url in settings['checks']]
    finally:
        file_name = datetime.now().strftime(
            f'{settings["file_prefix"]}_%Y_%m_%d.log')
        async with AIOFile(file_name, 'a') as lg:
            await lg.write(f'{results}\n')
        print(['{url}: [{code}]'.format(**a) for a in results])
示例#23
0
    async def __call__(self, datum: str, timestamp: float, uuid: str):
        datum = json.loads(datum)
        if not 'maker_order_id' in datum:
            return
        datum.pop('type')
        datum.pop('maker_order_id')
        datum.pop('taker_order_id')
        datum = json.dumps(datum)

        self.data.append(datum)
        print(f'{sym} data is ', len(self.data), ' out of ', self.length)
        if len(self.data) > self.length:
            p = f"{self.path}/{sym}_USD.{uuid}.{self.file_counter}"
            async with AIOFile(p, mode='a') as fp:
                print('writing file', p)
                r = await fp.write('\n'.join([f'{d}' for d in self.data]) + '\n', offset=self.pointer)
                self.pointer += r
                self.data = []
                print('file written. pointer is ', self.pointer, ' out of ', self.rotate)
            
                if self.pointer >= self.rotate:
                    print('rotating file')
                    self.pointer = 0
                    self.file_counter += 1
示例#24
0
文件: friend.py 项目: monokrome/slick
    async def get_file(self, *, path, size, target):
        connection = self.connection()
        if not connection or not connection.active:
            logger.debug(f"cannot get connection {connection}")
            return False
        else:
            loop = asyncio.get_event_loop()
            queue = asyncio.Queue()
            chunk_count = math.ceil(size / file_chunk_size)

            for i in range(chunk_count):
                await queue.put(i)

            async with aiofiles.open(target, "wb") as fh:
                await fh.truncate(size)

            with tqdm(total=size, unit="B", unit_scale=True) as bar:
                async with AIOFile(target, "wb") as fh:
                    for i in range(concurrency):
                        worker = Worker(queue, fh, connection, size, path, bar)
                        loop.create_task(worker.run())

                    await queue.join()
                    await fh.fsync()
    async def save(self, path: str):
        """Saves the image in the `path`.

        Parameters
        ----------
        path : str
            The path where the image will be saved.

        Notes
        -----
        THE `aiofile` LIBRARY IS NEEDED IN ORDER TO USE THIS METHOD.
        USE `sync_save` IF YOU DO NOT WANT TO INSTALL THE `aiofile` LIBRARY.
        """
        if not has_aiofile:
            raise RuntimeError(
                "aiofile library is needed in order to use this method.")

        path = join(path, self.full_name)
        async with AIOFile(path, "wb") as file:
            try:
                await file.write(self.bytes)
            except ValueError:
                raise RuntimeError(
                    "you need to get the image bytes to use this method")
示例#26
0
async def handle_connection(host, port_to_read, port_to_write, token, filepath, queues):
    while True:
        # Open new stream.
        async with get_stream(host, port_to_write, queues, gui.SendingConnectionStateChanged.CLOSED) as (reader, writer):
            queues['watchdog'].put_nowait('Connection is alive. Prompt before auth')
            # Authorization.
            nickname = await auth(reader, writer, token, queues)
            queues['statuses'].put_nowait(gui.NicknameReceived(nickname))
            queues['messages'].put_nowait(
                'WELCOME BACK {}!\nLoading chat history and connectiong to chat '
                'in {} seconds...\n'.format(nickname, DELAY_TO_LOAD_HISTORY))
            await asyncio.sleep(DELAY_TO_LOAD_HISTORY)
            # Read chat history.
            async with AIOFile(filepath, 'a+') as afp:
                history = await afp.read()
                if history:
                    queues['messages'].put_nowait('*** CHAT HISTORY\n{}***\n'.format(history))
            # Run grandchildren tasks.
            async with create_handy_nursery() as nursery:
                nursery.start_soon(log_msgs(filepath, queues))
                nursery.start_soon(read_msgs(host, port_to_read, queues))
                nursery.start_soon(send_msgs(writer, queues))
                nursery.start_soon(watch_for_connection(queues))
                nursery.start_soon(ping_pong(reader, writer))
async def download_document_and_save(session: ClientSession, url: str,
                                     dir: str) -> None:
    """
    This function downloads a document from a given link.

    This function uses asyncronous calls to obtain a downloadable object from a link and
    saves it at a certain location in the current computer

    Parameters:
    session (ClientSession): The async session used to obtain the webpage
    url (str): The url of the webpage to convert
    dir (str): The directory where to save this document

    Returns:
    None
    """
    async with session.get(url) as response:
        try:
            if response.status == 200:  # Document still exists
                document = await response.read()
                async with AIOFile(dir, 'wb') as downloaded_file:
                    await downloaded_file.write(document)
        except Exception as e:
            return
示例#28
0
    async def timer_loop(self):

        # Grab the UID of channel 0
        headers = {
            "client-id": get_client_id(),
            "Authorization": f"Bearer {get_oauth(remove_prefix=True)}",
        }

        channel = cfg.channels[0]

        async with ClientSession() as session:
            params = [("login", channel)]
            r = await session.get(
                url=f"https://api.twitch.tv/helix/users?login={channel}",
                headers=headers)
            data = await r.json()
            try:
                channel_id = data["data"][0]["id"]
            except KeyError:
                print(data["error"], data["status"], data["message"])
                return

        while True:
            while bot.live:
                try:
                    # Try except to prevent loop from accidentally crashing, no known reasons to crash.
                    url = "https://tmi.twitch.tv/hosts"
                    params = [("include_logins", "1")]

                    # Add channel message came from to the parameters
                    params.append(("target", channel_id))

                    async with ClientSession() as session:
                        response = await session.get(url=url,
                                                     params=params,
                                                     headers=headers)

                        if response.status == 401:
                            print("OAuth token error.")
                            return

                        # Get the response as a json object.
                        response_js = await response.json()
                        hosts = response_js["hosts"]

                        if len(hosts) > 0:  # Being hosted
                            for host in hosts:
                                hoster = host["host_display_name"]
                                if hoster not in self.hosters:
                                    self.hosters.append(hoster)
                                    print(f"Hosted by {hoster}")

                                    # Send to channel
                                    await channels[
                                        cfg.channels[0]
                                    ].send_message(
                                        f"{bot.msg_prefix} Thanks for hosting {hoster}"
                                    )

                                    # Log to json file
                                    async with AIOFile(
                                        f"irc_logs/{date.today().isoformat()}-{cfg.channels[0]}-hosted.log",
                                        "a") as afp:
                                        await afp.write(
                                            f"{datetime.now().isoformat()}:{hoster} \n"
                                        )
                                        await afp.fsync()

                                    # Don't spam the channel
                                    await sleep(1)

                            # Overwrite the hosters seen so if someone stops hosting they are removed from the list
                            self.hosters = [
                                host["host_display_name"] for host in hosts
                            ]

                    await sleep(60)  # Only check once a minute

                except Exception as e:
                    print(e)

            await sleep(60)  # Sleep before checking if live again.
示例#29
0
文件: util.py 项目: jgmel/hyperglass
async def build_frontend(  # noqa: C901
    dev_mode: bool,
    dev_url: str,
    prod_url: str,
    params: dict,
    app_path: Path,
    force: bool = False,
):
    """Perform full frontend UI build process.

    Securely creates temporary file, writes frontend configuration
    parameters to file as JSON. Then writes the name of the temporary
    file to /tmp/hyperglass.env.json as {"configFile": <file_name> }.

    Webpack reads /tmp/hyperglass.env.json, loads the temporary file,
    and sets its contents to Node environment variables during the build
    process.

    After the build is successful, the temporary file is automatically
    closed during garbage collection.

    Arguments:
        dev_mode {bool} -- Development Mode
        dev_url {str} -- Development Mode URL
        prod_url {str} -- Production Mode URL
        params {dict} -- Frontend Config paramters

    Raises:
        RuntimeError: Raised if errors occur during build process.

    Returns:
        {bool} -- True if successful
    """
    import hashlib
    import tempfile

    from aiofile import AIOFile
    import json
    from hyperglass.constants import __version__

    env_file = Path("/tmp/hyperglass.env.json")  # noqa: S108

    package_json = await read_package_json()

    env_vars = {
        "_HYPERGLASS_CONFIG_": params,
        "_HYPERGLASS_VERSION_": __version__,
        "_HYPERGLASS_PACKAGE_JSON_": package_json,
        "_HYPERGLASS_APP_PATH_": str(app_path),
    }

    # Set NextJS production/development mode and base URL based on
    # developer_mode setting.
    if dev_mode:
        env_vars.update({
            "NODE_ENV": "development",
            "_HYPERGLASS_URL_": dev_url
        })
    else:
        env_vars.update({
            "NODE_ENV": "production",
            "_HYPERGLASS_URL_": prod_url
        })

    # Check if hyperglass/ui/node_modules has been initialized. If not,
    # initialize it.
    initialized = await check_node_modules()
    if initialized:
        log.debug("node_modules is already initialized")
    elif not initialized:
        log.debug(
            "node_modules has not been initialized. Starting initialization..."
        )
        node_setup = await node_initial(dev_mode)
        if node_setup == "":
            log.debug("Re-initialized node_modules")

    try:
        env_json = json.dumps(env_vars, default=str)

        # Create SHA256 hash from all parameters passed to UI, use as
        # build identifier.
        build_id = hashlib.sha256(env_json.encode()).hexdigest()

        # Read hard-coded environment file from last build. If build ID
        # matches this build's ID, don't run a new build.
        if env_file.exists() and not force:
            async with AIOFile(env_file, "r") as ef:
                ef_json = await ef.read()
                ef_id = json.loads(ef_json).get("buildId", "empty")

                log.debug("Previous Build ID: {id}", id=ef_id)

                if ef_id == build_id:
                    log.debug(
                        "UI parameters unchanged since last build, skipping UI build..."
                    )
                    return True

        # Create temporary file. json file extension is added for easy
        # webpack JSON parsing.
        temp_file = tempfile.NamedTemporaryFile(mode="w+",
                                                prefix="hyperglass_",
                                                suffix=".json",
                                                delete=not dev_mode)
        log.info("Starting UI build...")
        log.debug(
            f"Created temporary UI config file: '{temp_file.name}' for build {build_id}"
        )

        async with AIOFile(temp_file.name, "w+") as temp:
            await temp.write(env_json)
            await temp.fsync()

            # Write "permanent" file (hard-coded named) for Node to read.
            async with AIOFile(env_file, "w+") as ef:
                await ef.write(
                    json.dumps({
                        "configFile": temp_file.name,
                        "buildId": build_id
                    }))
                await ef.fsync()

                # While temporary file is still open, initiate UI build process.
                if not dev_mode or force:
                    initialize_result = await node_initial(dev_mode)
                    build_result = await build_ui(app_path=app_path)

                    if initialize_result:
                        log.debug(initialize_result)
                    elif initialize_result == "":
                        log.debug("Re-initialized node_modules")

                    if build_result:
                        log.success("Completed UI build")
                elif dev_mode and not force:
                    log.debug(
                        "Running in developer mode, did not build new UI files"
                    )

        await migrate_images(app_path, params)

        generate_opengraph(
            Path(params["web"]["opengraph"]["image"]),
            1200,
            630,
            app_path / "static" / "images",
            params["web"]["theme"]["colors"]["black"],
        )

    except Exception as e:
        raise RuntimeError(str(e)) from None

    return True
示例#30
0
async def get_local_image(path):
    try:
        async with AIOFile(path, "rb") as afp:
            return await afp.read()
    except FileNotFoundError:
        raise ValueError(f"{path} does not exist")