Ejemplo n.º 1
0
def recv_response_from_server(proxy_browser_connection, proxy_as_client_socket,
                              is_cache_required, decoded_request):
    response = proxy_as_client_socket.recv(4096)
    if is_cache_required:
        cache.write_cache(decoded_request, response, LOCK)

    header_body_split = response.split(b'\r\n\r\n')
    header = bytes.decode(header_body_split[0], 'utf-8')
    body = header_body_split[1]

    content_length = int(get_content_length(header))
    current_body_length = len(body)

    while (current_body_length < content_length):
        response = proxy_as_client_socket.recv(4096)
        if is_cache_required:
            cache.write_cache(decoded_request, response, LOCK)
        body += response
        current_body_length += len(response)

    header_body_split[0] = str.encode(header)
    header_body_split[1] = body

    proxy_as_client_socket.close()
    return b'\r\n\r\n'.join(header_body_split)
def prepare_vivado_folder(project_id):
    if not cache.project_exists(project_id):
        q, k, d = ssh.start_exec_thread(
            f"rm -rf /home/ls2715/vivado_projects/p_{project_id}; "
            f"cp -r /home/ls2715/vivado_template /home/ls2715/vivado_projects/p_{project_id}"
        )
        d.wait()
        if d.returncode > 0:
            raise CompileError(
                "Could not copy the reference Vivado files to the project location"
            )
        else:
            cache.write_cache(project_id, [])
Ejemplo n.º 3
0
    def __new_vnic_info():
        """
        Create a new vnic info file

        Returns
        -------
        tuple
            (vnic info timestamp: datetime, vnic info: dict)
        """
        vnic_info = {
            'ns': None,
            'sshd': False,
            'exclude': [],
            'sec_priv_ip': []
        }
        vnic_info_ts = 0

        # migration from oci-utils 0.5's net_exclude file
        excludes = cache.load_cache(VNICUtils.__net_exclude_file)[1]
        if excludes is not None:
            vnic_info['exclude'] = excludes
            vnic_info_ts = \
                cache.write_cache(cache_content=vnic_info,
                                  cache_fname=VNICUtils.__vnic_info_file)
            try:
                os.remove(VNICUtils.__net_exclude_file)
            except Exception:
                pass

        # can we make API calls?
        oci_sess = None
        if HAVE_OCI_SDK:
            try:
                oci_sess = OCISession()
            except Exception:
                pass
        if oci_sess is not None:
            p_ips = oci_sess.this_instance().all_private_ips(refresh=True)
            sec_priv_ip = \
                [[ip.get_address(), ip.get_vnic().get_ocid()] for ip in p_ips]
            vnic_info['sec_priv_ip'] = sec_priv_ip
            vnic_info_ts = \
                cache.write_cache(cache_content=vnic_info,
                                  cache_fname=VNICUtils.__vnic_info_file)
        return vnic_info_ts, vnic_info
Ejemplo n.º 4
0
def get_growth_data(year, quarter):
    """
        获取成长能力数据
    Parameters
    --------
    year:int 年度 e.g:2014
    quarter:int 季度 :1、2、3、4,只能输入这4个季度
       
    Return
    --------
    DataFrame
        mbrg,主营业务收入增长率(%)
        nprg,净利润增长率(%)
        nav,净资产增长率(%)
        targ,总资产增长率(%)
        code,代码
        name,股票名称
        EXCHANGE,交易所
        eps,每股收益
        holderInterests,股东权益
        epsLastYear,去年每股收益
        holderInterestsLastYear,去年股东权益
        epsg,每股收益增长率(%)
        seg,股东权益增长率(%)
    """
    if ct._check_input(year, quarter) is True:
        filename = "growth_data_%d_%d.csv"%(year, quarter)
        data = cache.read_cache(filename)
        if  data is not None:
            data = data.drop_duplicates('code')
            data['code'] = data['code'].map(lambda x:str(x).zfill(6))
            return data
        #nocache
        ct._write_head()
        data = _get_growth_data(year, quarter,1,pd.DataFrame())
        cache.write_cache(data,filename)
        if data is not None:
            data = data.drop_duplicates('code')
            data['code'] = data['code'].map(lambda x:str(x).zfill(6))
        return data
Ejemplo n.º 5
0
def get_debtpaying_data(year, quarter):
    """
        获取偿债能力数据
    Parameters
    --------
    year:int 年度 e.g:2014
    quarter:int 季度 :1、2、3、4,只能输入这4个季度
       
    Return
    --------
    DataFrame
        FinancialRatios1,流动比率(%)
        FinancialRatios2,速动比率(%)
        FinancialRatios5,现金比率(%)
        FinancialRatios6,利息支付倍数
        FinancialRatios8,股东权益比率(%)
        FinancialRatios56,资产负债率(%)
        Symbol,代码
        SName,股票名称
    """
    
    #nocache
    if ct._check_input(year, quarter) is True:
        filename = "debtpaying_data_%d_%d.csv"%(year, quarter)
        data = cache.read_cache(filename)
        if  data is not None:
            return data
        ct._write_head()
        data = _get_debtpaying_data(year, quarter,1,pd.DataFrame())
        if data is not None:
            data = data.drop('FinancialRatios9',axis=1)
            data = data.drop('FinancialRatios18',axis=1)
            data = data.drop_duplicates('Symbol')
            data['Symbol'] = data['Symbol'].map(lambda x:str(x).zfill(6))
        cache.write_cache(data,filename)
        return data
Ejemplo n.º 6
0
    def save_vnic_info(self):
        """
        Save self.vnic_info in the vnic_info file.

        Returns
        -------
        int
            The timestamp of the file or None on failure.
        """
        _logger.debug("Saving vnic_info.")
        vnic_info_ts = cache.write_cache(cache_content=self.vnic_info,
                                         cache_fname=VNICUtils.__vnic_info_file)
        if vnic_info_ts is not None:
            self.vnic_info_ts = vnic_info_ts
        else:
            _logger.warn("Failed to save VNIC info to %s" %
                         VNICUtils.__vnic_info_file)
        return vnic_info_ts
def compile(project_id, verilog_sources, components, log, cancel_event):
    try:
        log.put(
            "Build started at " +
            datetime.now(pytz.timezone('Europe/London')).strftime('%H:%M:%S') +
            "\n")

        log.progress = 2
        log.put("Preparing Vivado project folder\n")
        prepare_vivado_folder(project_id)

        log.put("Determining which Verilog modules to compile\n")
        tcl_script = tcl.TclScript(
            f"/home/ls2715/vivado_projects/p_{project_id}/Pynq-Z1/base/base")

        mappings = []
        line_number = 4
        for filename, source in verilog_sources.items():
            lines = len(source.splitlines())
            mappings.append({
                "file": filename,
                "start": line_number,
                "end": line_number + lines
            })
            line_number += lines + 1
        cache.put_source_mapping(project_id, mappings)

        for c in cache.cached_components(project_id):
            tcl_script.delete_IP(c)

        cache.clear_axi_wrappers(project_id)

        for component in components:
            axi_wrapper = verilog.create_wrapper(component,
                                                 verilog_sources.values())
            outer_axi_wrapper = verilog_outer.create_wrapper(component)
            cache.write_axi_wrapper(project_id, component.name, axi_wrapper)
            cache.write_outer_axi_wrapper(project_id, component.name,
                                          outer_axi_wrapper)
            tcl_script.create_IP(component.name, component.register_count())
            tcl_script.edit_IP(
                component.name,
                f"/home/ls2715/vivado_projects/p_{project_id}/wrappers/{component.name}"
            )
            tcl_script.add_IP(component.name, component.has_video_out_port())

        tcl_script.compile()

        log.progress = 5
        log.put("Generating the automated build script for Vivado\n")
        cache.write_tcl_script(project_id, str(tcl_script))

        if len(components) > 0:
            log.put("Uploading the source code to Vivado servers\n")
            sftp.put_file(cache.get_wrappers_dir(project_id),
                          f"/home/ls2715/vivado_projects/p_{project_id}")
        else:
            log.put("Skipping source code upload (no components to compile)\n")

        log.progress = 10
        log.put("Uploading the build script to Vivado servers\n")
        sftp.put_file(
            cache.get_script_path(project_id),
            f"/home/ls2715/vivado_projects/p_{project_id}/script.tcl")

        if cancel_event.is_set():
            with open(cache.get_report_path(project_id), 'w') as f:
                f.write("ERROR\nBuild cancelled by user")
            raise RuntimeError("Cancelled by user")

        log.progress = 15
        log.put("Updating the local cache with the latest changes\n")
        cache.write_cache(project_id, [c.name for c in components])

        # run
        log.put("\nStarting Vivado build!\n")
        _, _, done = ssh.start_exec_thread(
            f"vivado -mode batch -nojournal -nolog -notrace -source /home/ls2715/vivado_projects/p_{project_id}/script.tcl",
            out_queue=log,
            kill_event=cancel_event,
            intercept_progress=True)

        done.wait()
        log.put(f"Build completed with return code {done.returncode}\n")
        log.progress = 95
        log.put("Downloading the build report from Vivado servers\n")
        sftp.get_file(
            f"/home/ls2715/vivado_projects/p_{project_id}/build_report.txt",
            cache.get_report_path(project_id))

        if done.returncode == 2:
            raise CompileError("Synthesis failed")
        elif done.returncode == 3:
            raise CompileError("Hardware implementation failed")
        elif done.returncode == 4:
            raise CompileError("Syntax error")
        elif done.returncode > 0:
            raise CompileError("Build failed due to unknown error")

        if cancel_event.is_set():
            with open(cache.get_report_path(project_id), 'w') as f:
                f.write("ERROR\nBuild cancelled by user")
            raise RuntimeError("Cancelled by user")

        # copy bit and tcl back to local server
        log.put("Downloading the generated files from Vivado servers\n")
        sftp.get_file(
            f"/home/ls2715/vivado_projects/p_{project_id}/overlay.tcl",
            cache.get_overlay_tcl_path(project_id))
        sftp.get_file(
            f"/home/ls2715/vivado_projects/p_{project_id}/overlay.bit",
            cache.get_overlay_bit_path(project_id))

        log.put("Generating Python API\n")
        python_api.write_python_api_to_file(
            components, cache.get_python_api_path(project_id))

        log.put("All done.\n")
    except CompileError as e:
        log.put(str(e.args[0]) + "\n")
    except RuntimeError as e:
        log.put("Build failed: " + str(e.args[0]) + "\n")
    except Exception as e:
        log.put("Build failed with a " + type(e).__name__ + "\n")
        with open('/tmp/log', 'a') as f:
            f.write(str(e))

    log.progress = 100
    running_threads.pop(project_id, None)
Ejemplo n.º 8
0
def install(directoryvar,
            download_set,
            cache_obj,
            cache_obj_path=cache.DEFAULT_CACHE,
            cache_loc=cache.DEFAULT_CACHE_LOC,
            progressbar=None,
            progress_labelvar=None,
            finish_trigger_fn=None,
            credit_path=None,
            wait=1.0,
            outstream=sys.stdout):
    # check the directory again for good measure
    directory = directoryvar.get()
    if (not check_location(directoryvar, outstream=outstream)):
        messagebox.showerror(
            title="Wrong directory",
            message=
            "Directory {:s} is not a valid WOT instance. Try again. The directory to be used is one where you can see WorldOfTank.exe in the files."
            .format(directory))
        if (progressbar or progress_labelvar
            ):  # if wrong directory, immediately enable exit
            progress_labelvar.set(
                "Error found. Exit, and try again or check your version.")
            finish_trigger_fn(False)
        return
    # make certain cache_loc
    if (isinstance(cache_loc, tk.StringVar)):
        cache_loc = cache_loc.get()
    # progressbar, progress label and finish trigger is checked
    start = time.time()
    #if(progressbar):
    #progressbar.start(100) # 100ms interval
    # step_size = 400 / (len(download_set) + 1)

    session = requests.Session()
    # Download and extract the UML base to correct location (res_mod/vernumber/)
    resmod_folder = os.path.join(directory, "res_mods")
    subfolders = [
        os.path.basename(os.path.normpath(f.path))
        for f in os.scandir(resmod_folder) if f.is_dir()
    ]
    valid = sorted(
        [pth for pth in subfolders if all(c in "1234567890." for c in pth)],
        key=lambda x: version.parse(x),
        reverse=True)  # hack to search for game version
    if (len(valid) > 1):
        outstream.write(
            "Multiple game versions found, using the highest({:s} in {})\n".
            format(valid[0], valid))
    elif (len(valid) == 0):
        messagebox.showerror(
            title="No version available",
            message=
            "There is no version detected in the resmod folder (list of folder found: {}). Try to play a battle or something, I dunno."
            .format(subfolders))
        return
    # correct location to install, correct cache zip file
    UML_loc = os.path.join(resmod_folder, valid[0])
    uml_filepath = os.path.join(cache_loc, "src.zip")
    if (progress_labelvar):
        progress_labelvar.set("Downloading and extracting main UML file...")
    if (cache_obj.get("use_drive_UML", 0) == 1):  # Use inbuilt drive file
        filehandler.download(uml_filepath,
                             DRIVE_FILE_LOCATION,
                             progressbar=progressbar,
                             session=session)
    else:  # use the github file
        filehandler.download(uml_filepath,
                             GITHUB_PATTERN_DEFAULT.format(
                                 DEFAULT_REPO, "src.zip"),
                             progressbar=progressbar)
    filehandler.extractZip(uml_filepath, UML_loc)  # extract the file to resmod
    # delete the file after extraction. This prevent updates from being blocked by previous cached UML package
    # not needed this time as we check filesize
    # os.remove(uml_filepath)
    outstream.write("Base UML installed to {:s}\n".format(UML_loc))

    # if selected, attempt to find and copy old ownModel.xml from other valid directoryvar
    copy_ownModel, symlink_ownModel = cache_obj.get(
        "copy_ownModel", 0) == 1, cache_obj.get("symlink_ownModel", 0) == 1
    if (copy_ownModel or symlink_ownModel):
        if (symlink_ownModel):
            # filepath is the cached location (symlink)
            cached_ownmodel_filepath = os.path.join(cache_loc, "ownModel.xml")
            symlink_destination = os.path.join(UML_loc, "scripts", "client",
                                               "mods", "ownModel.xml")
            if (os.path.islink(symlink_destination)):
                # already symlinked, nothing to do
                outstream.write("Symlink already created, continuing.\n")
            else:
                if (not os.path.isfile(cached_ownmodel_filepath)):
                    # make sure cached_ownmodel_filepath have a file
                    with io.open(cached_ownmodel_filepath, "w") as temp:
                        pass
                if (os.path.isfile(symlink_destination)):
                    # existing ownModel.xml; this should already been copied by the oldversion section above
                    os.remove(symlink_destination)
                # attenmting symlink
                try:
                    os.symlink(cached_ownmodel_filepath, symlink_destination)
                except OSError as e:
                    # insufficient privilege, copy directly
                    outstream.write("OSError caught: " + str(e) + "\n")
                    messagebox.showerror(
                        title="Insufficient privilege",
                        message=
                        "The process do not have enough privilege to create a symlink. Falling back to common copying.\n"
                    )
                    shutil.copyfile(cached_ownmodel_filepath,
                                    symlink_destination)
        else:
            # filepath is the new location of ownModel (copy)
            new_ownmodel_filepath = os.path.join(UML_loc, "scripts", "client",
                                                 "mods", "ownModel.xml")
            copied = False
            for oldversion in (
                    valid[1:] if copy_ownModel else valid
            ):  # go back from the latest version; copy to correct location
                ownmodel_filepath = os.path.join(resmod_folder, oldversion,
                                                 "scripts", "client", "mods",
                                                 "ownModel.xml")
                if (os.path.isfile(ownmodel_filepath)
                        and (not os.path.islink(ownmodel_filepath)
                             or not os.path.islink(new_ownmodel_filepath))):
                    # if there is links already set up (in old or new ownModel), the copying is ignored
                    # last condition is because if there is 3 versions 1-2-3, (1) is symlinked, (2) is copied, and (3) is symlinked again, subsequent copy will override the symlinked (3) with old config (2)
                    shutil.copyfile(ownmodel_filepath, new_ownmodel_filepath)
                    outstream.write(
                        "Found ownModel.xml at {:s}, copied to {:s}".format(
                            oldversion, ownmodel_filepath))
                    copied = True
                    break
            if (not copied):
                outstream.write(
                    "Did not find any ownModel.xml on older directories. Continuing.\n"
                )

    # download all the supplementary mods recorded in download_set into the mods folder
    for i, (filename, link) in enumerate(download_set):
        fileloc = os.path.join(directory, "mods", valid[0], "UML", filename)
        if (progress_labelvar):
            progress_labelvar.set(
                "Downloading {:s} from {} to location {:s}...".format(
                    filename, link, fileloc))
        filehandler.download(
            fileloc,
            link,
            stream=True,
            cache_loc=cache_loc,
            wait=wait,
            progressbar=progressbar,
            session=session)  # check for file in specific locations as well
        outstream.write("Installed mod {:s} to {:s}.\n".format(
            filename, fileloc))
    # check the cache if the credit is already shown for this version (subsequent runs on the same version will no longer show the credit)
    show_credit = cache_obj.get("installed_version",
                                "") != valid[0] and credit_path is not None
    if (show_credit):
        cache_obj["installed_version"] = valid[0]
    # after finished installing, update the cache_obj and write it to disk
    cache_obj["WOT_location"] = directory
    cache_obj["mods"] = [name for name, link in download_set]
    cache.write_cache(cache_obj, cache_obj_path)
    # done
    session.close()
    if (progressbar or progress_labelvar
        ):  # if there are a progressbar in another thread, run its complete
        if (callable(finish_trigger_fn)):
            progress_labelvar.set("Everything finished.")
            if (show_credit):
                credit_dialog(download_set,
                              credit_path,
                              callback_fn=lambda: finish_trigger_fn(True))
            else:
                finish_trigger_fn(True)
    else:  # create a simple infobox
        messagebox.showinfo(
            title="Done",
            message="Installation complete in {:s}".format(directory))

    outstream.write("Finish installation.\n")