コード例 #1
0
ファイル: server.py プロジェクト: kennym/itools
 def stop(self):
     SoupServer.stop(self)
     self.main_loop.quit()
     if self.pid_file:
         remove_file(self.pid_file)
     if self.access_log:
         self.access_log_file.close()
コード例 #2
0
def install(install_asset_info, install_path):
    old_install_files = listdir(install_path)
    mapping = {}

    for asset_info in install_asset_info:
        if not asset_info.install:
            continue
        try:
            file_hash = get_file_hash(asset_info.build_path)
            logical_path = asset_info.logical_path
            physical_path = '%s_%s.%s' % (splitext(basename(logical_path))[0],
                                          file_hash,
                                          asset_info.build_path.split('.', 1)[1])

            copy_file(asset_info.build_path, path_join(install_path, physical_path))
            mapping[logical_path] = physical_path

            try:
                old_install_files.remove(physical_path)
            except ValueError:
                pass

        except (IOError, TypeError):
            error('could not install %s' % asset_info.path)

    for path in old_install_files:
        asset_install_path = path_join(install_path, path)
        print 'Removing old install file ' + asset_install_path
        remove_file(asset_install_path)

    return mapping
コード例 #3
0
def remove_old_build_files(build_asset_info, build_path):
    old_build_files = []
    exludes = [
        path_join(build_path, 'sourcehashes.json'),
        path_join(build_path, 'cgfx2json.version'),
        path_join(build_path, 'json2json.version'),
        path_join(build_path, 'obj2json.version'),
        path_join(build_path, 'tga2png.version'),
        path_join(build_path, 'bmfont2json.version'),
        path_join(build_path, 'dae2json.version'),
        path_join(build_path, 'material2json.version')
    ]
    for base, _, files in os_walk(build_path):
        dir_files = [path_join(base, filename) for filename in files]
        old_build_files.extend(f for f in dir_files if f not in exludes)

    for asset_info in build_asset_info:
        try:
            old_build_files.remove(asset_info.build_path)
        except ValueError:
            pass

    for path in old_build_files:
        print 'Removing old build file ' + path
        remove_file(path)

    for base, _, _ in os_walk(build_path, topdown=False):
        try:
            rmdir(base)
        except OSError:
            pass
        else:
            print 'Removed old build directory ' + base
コード例 #4
0
 def test_file_write(self):
     # Check that the file exists after writing it
     write_text_file(file_name="test", file_format=".txt", file_lines="test string only", verbose=True)
     self.assertTrue(file_exists("test.txt"))
     # Check if the one string we passed is written correctly
     with open("test.txt", "r") as file:
         file_data = file.readline().strip()
     self.assertEqual("test string only", file_data)
     # Pass an empty list and see if it fails as expected
     state = write_text_file(file_name="test", file_format=".txt", file_lines=[], verbose=True)
     self.assertFalse(state)
     # Write one line, but passed as a list
     write_text_file(file_name="test", file_format=".txt", file_lines=["One line given"], verbose=True)
     with open("test.txt", "r") as file:
         file_data = file.readline().strip()
     self.assertEqual("One line given", file_data)
     # write a list of strings
     test_lines = ["hello", "my", "name", "is", "Simon"]
     write_text_file(file_name="test", file_format=".txt", file_lines=test_lines, verbose=True)
     # Read the lines and compare with the lines written
     with open("test.txt", "r") as file:
         file_data = file.readlines()
     for line_number in range(len(test_lines)):
         self.assertEqual(test_lines[line_number], file_data[line_number].strip())
     # Clean up after the test
     remove_file("test.txt")
コード例 #5
0
ファイル: conftest.py プロジェクト: Arbitrage0/augur-core
    def __init__(self):
        tester.GASPRICE = 0
        tester.STARTGAS = long(6.7 * 10**7)
        config_metropolis['GASLIMIT_ADJMAX_FACTOR'] = .000000000001
        config_metropolis['GENESIS_GAS_LIMIT'] = 2**60
        config_metropolis['MIN_GAS_LIMIT'] = 2**60
        config_metropolis['BLOCK_GAS_LIMIT'] = 2**60

        for a in range(10):
            tester.base_alloc[getattr(tester, 'a%i' % a)] = {'balance': 10**24}

        self.chain = Chain(env=Env(config=config_metropolis))
        self.contracts = {}
        self.testerAddress = self.generateTesterMap('a')
        self.testerKey = self.generateTesterMap('k')
        self.testerAddressToKey = dict(zip(self.testerAddress.values(), self.testerKey.values()))
        if path.isfile('./allFiredEvents'):
            remove_file('./allFiredEvents')
        self.relativeContractsPath = '../source/contracts'
        self.relativeTestContractsPath = 'solidity_test_helpers'
        self.coverageMode = pytest.config.option.cover
        if self.coverageMode:
            self.chain.head_state.log_listeners.append(self.writeLogToFile)
            self.relativeContractsPath = '../coverageEnv/contracts'
            self.relativeTestContractsPath = '../coverageEnv/solidity_test_helpers'
コード例 #6
0
def build_asset(tool, asset_path, dst_path, verbose, args):
    try:
        create_dir(dirname(dst_path))
        tool.run(asset_path, dst_path, verbose, args)
    except:
        if path_exists(dst_path):
            remove_file(dst_path)
        raise
コード例 #7
0
ファイル: views.py プロジェクト: kapustkin/SMD_PCS
def material_delete(part_name):
    session = db.session()
    part = Material.query.filter_by(part=part_name).first()
    if part is not None:
        if path.exists(dir + '/app/static/img/material/%s_%s.png' % (part.part, part.vendor)):
            remove_file(dir + '/app/static/img/material/%s_%s.png' % (part.part, part.vendor))
        session.delete(part)
        session.commit()
    return redirect(url_for("material_list"))
コード例 #8
0
ファイル: CImportController.py プロジェクト: failys/cairis
  def post(self):
    session_id = get_session_id(session, request)
    json_dict = request.get_json(silent=True)
    if json_dict is False or json_dict is None:
      raise MalformedJSONHTTPError(data=request.get_data())

    cimport_params = json_dict.get('object', None)
    check_required_keys(cimport_params or {}, CImportParams.required)
    file_contents = cimport_params['urlenc_file_contents']
    file_contents = unquote(file_contents)
    file_contents = file_contents.replace("\u2018", "'").replace("\u2019", "'")
    overwrite = cimport_params['overwrite']
    type = cimport_params['type']

    if file_contents.startswith('<?xml'):
      fd, abs_path = mkstemp(suffix='.xml')
      fs_temp = codecs.open(abs_path, 'w','utf-8')
      fs_temp.write(file_contents)
      fs_temp.close()
      fd_close(fd)

      try:
        dao = ImportDAO(session_id)
        result = dao.file_import(abs_path, type, overwrite)
        dao.close()
      except DatabaseProxyException as ex:
        raise ARMHTTPError(ex)
      except ARMException as ex:
        raise ARMHTTPError(ex)
      except Exception as ex:
        raise CairisHTTPError(status_code=500,message=str(ex.message),status='Unknown error')

      remove_file(abs_path)

      resp_dict = {'message': str(result)}
      resp = make_response(json_serialize(resp_dict, session_id=session_id), OK)
      resp.headers['Content-Type'] = 'application/json'
      return resp
    elif type == 'Attack Tree (Dot)':
      try:
        environment_name = cimport_params['environment']
        contributor_name = cimport_params['contributor']
        dao = ImportDAO(session_id)
        result = dao.import_attack_tree(file_contents,environment_name,contributor_name)
        dao.close()
      except DatabaseProxyException as ex:
        raise ARMHTTPError(ex)
      except ARMException as ex:
        raise ARMHTTPError(ex)
      except Exception as ex:
        raise CairisHTTPError(status_code=500,message=str(ex.message),status='Unknown error')
    else:
      raise CairisHTTPError(status_code=BAD_REQUEST,message='The provided file is not a valid XML file',status='Invalid XML input')
コード例 #9
0
ファイル: svg.py プロジェクト: fossasia/knittingpattern
    def kivy_svg(self):
        """An SVG object.

        :return: an SVG object
        :rtype: kivy.graphics.svg.Svg
        :raises ImportError: if the module was not found
        """
        from kivy.graphics.svg import Svg
        path = self.temporary_path(".svg")
        try:
            return Svg(path)
        finally:
            remove_file(path)
コード例 #10
0
def remove_old_build_files(build_asset_info, build_paths):
    old_build_files = []
    for path in build_paths:
        old_build_files.extend(path_join(path, filename) for filename in listdir(path))

    for asset_info in build_asset_info:
        try:
            old_build_files.remove(asset_info.build_path)
        except ValueError:
            pass

    for path in old_build_files:
        print "Removing old build file " + path
        remove_file(path)
コード例 #11
0
def encrypt(tofile=False):
    pubkey = input('Public key file: ')
    p = Popen(['gpg', '--import', pubkey],
              stdout=PIPE, stderr=PIPE)

    message = input('Message: ')
    with open('message', 'w') as f:
        f.write(message)

    p = Popen(['gpg', '--encrypt', 'message'],
              stdout=PIPE, stderr=PIPE)
    out, err = p.communicate()

    remove_file('message')
コード例 #12
0
    def post(self):
        session_id = get_session_id(session, request)
        json_dict = request.get_json(silent=True)

        if json_dict is False or json_dict is None:
            raise MalformedJSONHTTPError(data=request.get_data())

        cimport_params = json_dict.get('object', None)
        check_required_keys(cimport_params or {}, CImportParams.required)
        file_contents = cimport_params['urlenc_file_contents']
        file_contents = unquote(file_contents)
        type = cimport_params['type']
        overwrite = cimport_params.get('overwrite', None)

        if file_contents.startswith('<?xml'):
            fd, abs_path = mkstemp(suffix='.xml')
            fs_temp = open(abs_path, 'w')
            fs_temp.write(file_contents)
            fs_temp.close()
            fd_close(fd)

            try:
                result = cimport.file_import(abs_path, type, overwrite, session_id=session_id)
            except DatabaseProxyException as ex:
                raise ARMHTTPError(ex)
            except ARMException as ex:
                raise ARMHTTPError(ex)
            except Exception as ex:
                raise CairisHTTPError(
                    status_code=500,
                    message=str(ex.message),
                    status='Unknown error'
                )

            remove_file(abs_path)

            resp_dict = {'message': result}
            resp = make_response(json_serialize(resp_dict, session_id=session_id), httplib.OK)
            resp.headers['Content-Type'] = 'application/json'
            return resp
        else:
            raise CairisHTTPError(
                status_code=httplib.BAD_REQUEST,
                message='The provided file is not a valid XML file',
                status='Invalid XML input'
            )
コード例 #13
0
    def post(self, type):
        session_id = get_session_id(session, request)
        overwrite = request.form.get('overwrite', None)
        overwrite = request.args.get('overwrite', overwrite)
        try:
            if not request.files:
                raise LookupError()
            file = request.files['file']
        except LookupError:
            raise MissingParameterHTTPError(param_names=['file'])

        try:
            fd, abs_path = mkstemp(suffix='.xml')
            fs_temp = open(abs_path, 'w')
            xml_text = file.stream.read()
            fs_temp.write(xml_text)
            fs_temp.close()
            fd_close(fd)
        except IOError:
            raise CairisHTTPError(
                status_code=httplib.CONFLICT,
                status='Unable to load XML file',
                message='The XML file could not be loaded on the server.' +
                        'Please check if the application has permission to write temporary files.'
            )

        try:
            result = cimport.file_import(abs_path, type, overwrite, session_id=session_id)
        except DatabaseProxyException as ex:
            raise ARMHTTPError(ex)
        except ARMException as ex:
            raise ARMHTTPError(ex)
        except Exception as ex:
            raise CairisHTTPError(
                status_code=500,
                message=str(ex.message),
                status='Unknown error'
            )

        remove_file(abs_path)

        resp_dict = { 'message': result }
        resp = make_response(json_serialize(resp_dict, session_id=session_id), httplib.OK)
        resp.headers['Content-Type'] = 'application/json'
        return resp
コード例 #14
0
ファイル: __init__.py プロジェクト: TheTypoMaster/paralarea
def remove(login, proj_name, fullpath):

    proj_path = "{0}/{1}/projects/{2}".format(HEAD_PATH, login, proj_name)

    if not path.exists(proj_path):
        return False

    fullpath = '{0}/{1}'.format(proj_path, fullpath)

    if not path.exists(fullpath):
        return False

    if path.isdir(fullpath):
        shutil.rmtree(fullpath)
    else:
        remove_file(fullpath)

    return True
コード例 #15
0
ファイル: server.py プロジェクト: halftanolger/screenly-ose
def delete_asset(asset_id):
    c = connection.cursor()

    c.execute("DELETE FROM assets WHERE asset_id=?", (asset_id,))
    try:
        connection.commit()

        # If file exist on disk, delete it.
        local_uri = path.join(asset_folder, asset_id)
        if path.isfile(local_uri):
            remove_file(local_uri)

        header = "Success!"
        message = "Deleted asset."
        return template('message', header=header, message=message)
    except:
        header = "Ops!"
        message = "Failed to delete asset."
        return template('message', header=header, message=message)
コード例 #16
0
ファイル: network.py プロジェクト: dprada/Aqua
def load_net():
   name_net= tkSimpleDialog.askstring('Loading Network',
                                                      'Please enter the name of the new object:')
   file_coors= open_it()
   net=aqua.network(file_coors)
   cmd.set("connect_mode", 1)
   nfile='/tmp/fake'+''.join(random.choice(string.ascii_uppercase) for i in range(6))+'.xyz'
   f = open(nfile, 'wb')
   f.write(str(net.num_nodes)+"\n")
   #f.write("FAKE\n")
   for ii in net.node:
      f.write("X\t%f\t%f\t%f\n" % (ii.coors[0],ii.coors[1],ii.coors[2]))
   f.close()
   cmd.load(nfile,name_net)
   remove_file(nfile)
   for ii in xrange(net.num_nodes):
      selec=name_net+' & id '+str(ii+1)
      #cmd.alter(selec,'name='+net.node[ii].label)
      cmd.alter(selec,'name='+str(ii))
      cmd.alter(selec,'resi='+str(net.node[ii].cluster))
      cmd.alter(selec,'q='+str(net.node[ii].cluster))
      cmd.alter(selec,'b='+str(net.node[ii].weight))
      #cmd.alter(selec,'q='+str(net.node[ii].att1))
      cmd.alter(selec,'ID='+str(ii))
コード例 #17
0
 def delete_file(self, fpath):
     from os import remove as remove_file
     remove_file(fpath)
コード例 #18
0
    def post(self):
        session_id = get_session_id(session, request)
        json_dict = request.get_json(silent=True)

        if json_dict is False or json_dict is None:
            raise MalformedJSONHTTPError(data=request.get_data())

        cimport_params = json_dict.get('object', None)
        check_required_keys(cimport_params or {}, CImportParams.required)
        file_contents = cimport_params['urlenc_file_contents']
        file_contents = unquote(file_contents)
        file_contents = file_contents.replace(u"\u2018",
                                              "'").replace(u"\u2019", "'")
        type = cimport_params['type']

        if file_contents.startswith('<?xml'):
            fd, abs_path = mkstemp(suffix='.xml')
            fs_temp = open(abs_path, 'w')
            fs_temp.write(file_contents)
            fs_temp.close()
            fd_close(fd)

            try:
                dao = ImportDAO(session_id)
                result = dao.file_import(abs_path, type, 1)
                dao.close()
            except DatabaseProxyException as ex:
                raise ARMHTTPError(ex)
            except ARMException as ex:
                raise ARMHTTPError(ex)
            except Exception as ex:
                raise CairisHTTPError(status_code=500,
                                      message=str(ex.message),
                                      status='Unknown error')

            remove_file(abs_path)

            resp_dict = {'message': str(result)}
            resp = make_response(
                json_serialize(resp_dict, session_id=session_id), httplib.OK)
            resp.headers['Content-Type'] = 'application/json'
            return resp
        elif type == 'Attack Tree (Dot)':
            try:
                environment_name = cimport_params['environment']
                contributor_name = cimport_params['contributor']
                dao = ImportDAO(session_id)
                result = dao.import_attack_tree(file_contents,
                                                environment_name,
                                                contributor_name)
                dao.close()
            except DatabaseProxyException as ex:
                raise ARMHTTPError(ex)
            except ARMException as ex:
                raise ARMHTTPError(ex)
            except Exception as ex:
                raise CairisHTTPError(status_code=500,
                                      message=str(ex.message),
                                      status='Unknown error')

        else:
            raise CairisHTTPError(
                status_code=httplib.BAD_REQUEST,
                message='The provided file is not a valid XML file',
                status='Invalid XML input')
コード例 #19
0
param_grid = {
      'max_depth' : [3, None],
      'max_features' : [1, 3, 10],
      'min_samples_split' : [1, 3, 10],
      'min_samples_leaf' : [1, 3, 10],
      'bootstrap' : [True, False],
      'criterion' : ['gini', 'entropy']
}

clf = RandomForestClassifier()
gs = GridSearchCV(clf, param_grid=param_grid, n_jobs=-1, scoring='roc_auc')
gs.fit(X, Y)

# Recipe outputs
model_skikit = dataiku.Folder("70DO2Zpk").get_path() #Destination Folder

for file in os.listdir(model_skikit):
    try: os.remove_file()
    except: pass

serials = [
           {'pkl': 'schema.pkl','obj': SCHEMA},
           {'pkl': 'trf_num.pkl', 'obj': trf_num},
           {'pkl': 'trf_cat.pkl','obj': trf_cat},
           {'pkl': 'model.pkl','obj': gs.best_estimator_},
           ]

for serial in serials:
    fp = os.path.join(model_skikit, serial['pkl'])
    joblib.dump(serial['obj'], fp)
コード例 #20
0
ファイル: datashare.py プロジェクト: Lungta21/turbulenz_local
 def _delete(self):
     try:
         remove_file(self.path)
     except OSError:
         pass
     self.deleted = True
コード例 #21
0
 def _delete(self):
     try:
         remove_file(self.path)
     except OSError:
         pass
     self.deleted = True
コード例 #22
0
def export_material(self, context):
    mat_list = []
    et = context.scene.export_materials_type
    folder_path = ""
    folder_name = ""
    # determine what all is being exported
    if et == "1" and context.material != None:
        mat_list.append(context.material.name)
    elif et == "2":
        for i in context.object.data.materials:
            mat_list.append(i.name)
    elif et == "3":
        for i in bpy.data.materials:
            mat_list.append(i.name)
    # export materials
    for mat_name in mat_list:
        mat = bpy.data.materials[mat_name]
        epath = context.scene.save_path_export
        if mat != None:
            if epath != "":
                # try open file
                error = True
                if "//" in epath:
                    epath = bpy.path.abspath(epath)
                if path.exists(epath):
                    error = False
                # error = True
                if error == False:
                    root = ET.Element("material")
                    names = {}
                    data = []
                    m_nodes = mat.node_tree.nodes
                    m_n = []  # main nodes
                    m_l = []  # main links
                    images = []
                    for n_main in m_nodes:  # nodes
                        out, is_group, im = export_node_type(n_main)
                        m_n.append(out)
                        images.append(im)
                        if is_group == True:  # group level 1
                            g_n = []
                            g_l = []
                            for n_group in n_main.node_tree.nodes:  # nodes
                                g_out, is_group1, im1 = export_node_type(n_group)
                                g_n.append(g_out)
                                images.append(im1)

                                if is_group1 == True:  # group level 2
                                    g_n2 = []
                                    g_l2 = []
                                    for n_group2 in n_group.node_tree.nodes:  # nodes
                                        g_out2, is_group2, im2 = export_node_type(n_group2)
                                        g_n2.append(g_out2)
                                        images.append(im2)

                                        if is_group2 == True:  # group level 3
                                            g_n3 = []
                                            g_l3 = []
                                            for n_group3 in n_group2.node_tree.nodes:  # nodes
                                                g_out3, is_group3, im3 = export_node_type(n_group3)
                                                g_n3.append(g_out3)
                                                images.append(im3)

                                                if is_group3 == True:  # group level 4
                                                    g_n4 = []
                                                    g_l4 = []
                                                    for n_group4 in n_group3.node_tree.nodes:  # nodes
                                                        g_out4, is_group4, im4 = export_node_type(n_group4)
                                                        g_n4.append(g_out4)
                                                        images.append(im4)

                                                    for l_group4 in n_group3.node_tree.links:  # links
                                                        out5 = link_info(l_group4)
                                                        g_l4.append(out5)
                                                    data.append([g_n4, g_l4])
                                                    names[c_name(n_group3.node_tree.name)] = len(data) - 1

                                            for l_group3 in n_group2.node_tree.links:  # links
                                                out4 = link_info(l_group3)
                                                g_l3.append(out4)
                                            data.append([g_n3, g_l3])
                                            names[c_name(n_group2.node_tree.name)] = len(data) - 1

                                    for l_group2 in n_group.node_tree.links:  # links
                                        out3 = link_info(l_group2)
                                        g_l2.append(out3)
                                    data.append([g_n2, g_l2])
                                    names[c_name(n_group.node_tree.name)] = len(data) - 1

                            for l_group in n_main.node_tree.links:  # links
                                out2 = link_info(l_group)
                                g_l.append(out2)
                            data.append([g_n, g_l])
                            names[c_name(n_main.node_tree.name)] = len(data) - 1

                    for l_main in mat.node_tree.links:  # links
                        out = link_info(l_main)
                        m_l.append(out)
                    data.append([m_n, m_l])
                    names["main"] = len(data) - 1

                    # write data
                    # material attribs
                    t = datetime.now()
                    date_string = "{}/{}/{} at {}:{}:{} in {}".format(t.month, t.day, t.year, t.hour, t.minute, t.second, tzname[0])
                    root.attrib = {"Render_Engine": context.scene.render.engine, "Material_Name": c_name(mat.name), "Date_Created": date_string, "Number_Of_Nodes": ""}
                    n = 0
                    num_nodes = 0
                    for group in names:
                        sub_e = ET.SubElement(root, group.replace("/", "_"))
                        d = data[names[group]]
                        sub_e_nodes = ET.SubElement(sub_e, group.replace("/", "_") + "_nodes")
                        for i in d[0]:  # nodes
                            ET.SubElement(sub_e_nodes, "node" + str(n), {"name": i["name"], "bl_idname": i["bl_idname"], "label": i["label"], "color": i["color"], "parent": str(i["parent"]), "location": i["location"],
                                                                         "height": i["height"], "width": i["width"], "mute": i["mute"], "hide": i["hide"], "inputs": i["inputs"], "outputs": i["outputs"], "node_specific": i["node_specific"], "use_custom_color": i["use_custom_color"]})
                            num_nodes += 1
                        sub_e_links = ET.SubElement(sub_e, group.replace("/", "_") + "_links")
                        for i in d[1]:  # links
                            ET.SubElement(sub_e_links, "link" + str(n), {"link_info": i})
                            n += 1
                    root.attrib["Number_Of_Nodes"] = str(num_nodes)
                    # get order of groups
                    pre_order = sorted(names.items(), key=operator.itemgetter(1))
                    order = [i[0].replace("/", "_") for i in pre_order]
                    root.attrib["Group_Order"] = str(order)
                    # images
                    img_out = []
                    save_path = epath + c_name(mat.name) + ".bmat"
                    # create folder if needed
                    if (et == "2" and len(context.object.data.materials) >= 2) or (et == "3" and len(bpy.data.materials) >= 2):
                        if not path.exists(epath + c_name(mat.name)) and folder_path == "":
                            try:
                                makedirs(epath + c_name(mat.name))
                                folder_path = epath + c_name(mat.name)
                                folder_name = c_name(mat.name)
                            except PermissionError:
                                pass
                        elif folder_path == "":
                            folder_path = epath + c_name(mat.name)
                    # set save path based on folder path
                    if folder_path != "":
                        save_path = folder_path + "\\" + c_name(mat.name) + ".bmat"
                    # image file paths
                    if context.scene.image_save_type == "1":  # absolute filepaths
                        root.attrib["Path_Type"] = "Absolute"
                        for i in images:
                            for i2 in i:
                                img_out.append([i2[0], bpy.path.abspath(i2[1])])
                    else:  # relative filepaths
                        error = False
                        for i in images:
                            if i != []:
                                error = True
                        if error == True:
                            save_path = epath + c_name(mat.name) + "\\" + c_name(mat.name) + ".bmat"
                            image_path = epath + c_name(mat.name)
                            if not path.exists(epath + c_name(mat.name)) and folder_path == "":
                                try:
                                    makedirs(epath + c_name(mat.name))
                                    folder_path = epath + c_name(mat.name)
                                    folder_name = c_name(mat.name)
                                except PermissionError:
                                    error = False
                            elif folder_path != "":
                                save_path = folder_path + "\\" + c_name(mat.name) + ".bmat"
                                image_path = folder_path
                            # make sure folder_path is correct
                            if path.exists(epath + c_name(mat.name)) and folder_path == "":
                                folder_path = epath + c_name(mat.name)
                        root.attrib["Path_Type"] = "Relative"
                        if error == True:
                            for i in images:
                                for i2 in i:
                                    i3 = bpy.path.abspath(i2[1])
                                    i2_l = i3.split("\\")
                                    img_out.append([i2[0], "//" + i2_l[len(i2_l) - 1]])
                                    if path.exists(image_path):
                                        copyfile(i3, image_path + "\\" + i2_l[len(i2_l) - 1])

                    root.attrib["Images"] = str(img_out)
                    tree = ET.ElementTree(root)
                    error2 = True
                    try:
                        tree.write(save_path)
                        error2 = False
                    except (PermissionError, FileNotFoundError):
                        self.report({"ERROR"}, "Permission Denied At That Location")
                    # if no error make text pretty
                    if error2 == False:
                        pretty_file = pretty_parse(save_path)
                        pretty_text = pretty_file.toprettyxml()
                        file = open(save_path, "w+")
                        file.write(pretty_text)
                        file.close()
                # if error
                elif error == True:
                    self.report({"ERROR"}, "Export Path Is Invalid")
    # zip folder
    if folder_path != "" and context.scene.compress_folder == True:
        if path.exists(epath + "\\" + folder_name + ".zip"):  # if file is already there, delete
            remove_file(epath + "\\" + folder_name + ".zip")
        zf = zipfile.ZipFile(epath + "\\" + folder_name + ".zip", "w", zipfile.ZIP_DEFLATED)
        for dirname, subdirs, files in walk(folder_path):
            for filename in files:
                zf.write(dirname + "\\" + filename, arcname=filename)
        zf.close()
        # delete non-compressed folder
        rmtree(folder_path)
コード例 #23
0
ファイル: loop.py プロジェクト: Nabellaleen/itools
 def quit(self):
     super(Loop, self).quit()
     if self.pid_file:
         remove_file(self.pid_file)
コード例 #24
0
ファイル: nsapi.py プロジェクト: Eluvatar/zombie-watchtower
def __unlock_rm():
  now = time.time()
  time.sleep( last_request + 0.625 - now)
  LOCK_FILE.close()
  remove_file(LOCK_FILE_PATH)
コード例 #25
0
 def remove(self):
     remove_file(self.get_path())
コード例 #26
0
ファイル: HttpServers.py プロジェクト: BikerDroid/HttpServers
 def delete_file(self,fpath):
     from os import remove as remove_file
     remove_file(fpath)
コード例 #27
0
 def remove_old_archive(name):
     remove_file(name)
     print "... removed old archive", name