Пример #1
0
    def fetch(self, request, opener=None, summary=None):

        if not self.__enable_http:
            return (None, None)

        if opener is None:
            opener = OpenerDirector()
            opener.add_handler(HTTPDefaultErrorHandler())
            opener.add_handler(HTTPSHandler())

        t = time.clock()
        response = opener.open(request)
        body = response.read()
        t = timedelta(seconds=time.clock() - t)
        url = request.get_full_url()
        self.__context.get_logger().info('HTTP time: %s\n%s' % (t, url))

        if self.__log_http:
            log_dir = os.path.join(self.__context.get_config_dir(), 'http-log')
            makedirs(log_dir)
            log_file = os.path.join(log_dir,
                                    datetime.utcnow().strftime(
                                        '%Y-%m-%d-%H-%M-%S-%f'))
            if summary is not None:
                log_file += '-' + _safe_str(summary)
            fp = open(log_file, 'w')
            fp.write('\n\n'.join([
                request.get_full_url(),
                request.get_data() or 'No request data',
                body or 'No response body',
            ]))
            fp.close()

        return (response, body)
Пример #2
0
    def writeFile(self, f, decrypted_chunks):
        path = os.path.join(self.outputFolder,
                            re.sub(r'[:|*<>?"]', "_", f.RelativePath))
        print path
        makedirs(os.path.dirname(path))
        ff = open(path, "wb")
        h = hashlib.sha1()
        for i in xrange(len(decrypted_chunks)):
            d = decrypted_chunks[i]
            h.update(d)
            ff.write(d)
        ff.close()

        if f.Attributes.EncryptionKey:
            EncryptionKey = f.Attributes.EncryptionKey
            #ProtectionClass = f.Attributes.ProtectionClass
            hexdump(EncryptionKey)
            ProtectionClass = struct.unpack(">L", EncryptionKey[0x18:0x1C])[0]
            assert ProtectionClass == f.Attributes.ProtectionClass
            #EncryptionKeyVersion=2 => starts with keybag uuid
            if f.Attributes.EncryptionKeyVersion and f.Attributes.EncryptionKeyVersion == 2:
                assert self.kb.uuid == EncryptionKey[:0x10]
                keyLength = struct.unpack(">L", EncryptionKey[0x20:0x24])[0]
                assert keyLength == 0x48
                wrapped_key = EncryptionKey[0x24:]
            else:  #XXX old format ios 5 backup
                wrapped_key = EncryptionKey[0x1C:]
            print "ProtectionClass= %d" % ProtectionClass
            filekey = self.kb.unwrapCurve25519(ProtectionClass, wrapped_key)
            if not filekey:
                print "Failed to unwrap file key for file %s !!!" % f.RelativePath
            else:
                print "filekey", filekey.encode("hex")
                self.decryptProtectedFile(path, filekey,
                                          f.Attributes.DecryptedSize)
Пример #3
0
def versionthis(filetoversion):
    global options
    try:
        if accesscontrollist.hasacl(filetoversion) and not options.ignoreacl:
            err = "filetoversion has a 'deny' in ACL permissions (ls -lde %s: %s) \n \
            This program is currently not clever enough to check if you have permission to move/delete this file. \n \
            To avoid this problem remove deny permissions from the access control entries \n \
            or rerun this command with --ignoreacl" % (filetoversion, accesscontrollist.getacl(filetoversion))
            raise SyncherException(err)

        # TODO: verify that this file is not already added
        logging.info("should: check for dups")

        filetoversionpath, repospathofversionedfile, repospathtoputnewfilein = settings.getFileToVersionPathes(filetoversion)

        util.makedirs(repospathtoputnewfilein)

        acl = None
        if options.ignoreacl:
            acl = accesscontrollist.removeacl(filetoversion)

        util.move(filetoversionpath, repospathofversionedfile)  # repospathtoputnewfilein)

        if acl is not None:
            accesscontrollist.setacl(repospathofversionedfile, acl)

        util.symlink(repospathofversionedfile, filetoversionpath)

        syncdb.add(filetoversionpath)

    except Exception as e:
        logging.warn("ROLLING BACK because of %s" % e)
        undo.rollback()
        raise
Пример #4
0
def write_xenvmd_config(uuid, vg, devices, vgsize):
    global config_dir
    configfile = "%s/%s.xenvmd.config" % (config_dir, vg)
    sockpath = sockpath_of_sr_uuid(uuid)
    #Min host allocation quantum in MiB, i.e., 10 times
    #min_allocation_quantum (currently 16MiB):
    min_host_allocation_quantum = 160
    #host_allocation_quantum is 0.5% of SR size
    host_allocation_quantum = (vgsize * 0.005) / (1024 * 1024)
    #host_allocation_quantum should be bigger than 1GiB
    host_allocation_quantum = max(min_host_allocation_quantum,
                                  host_allocation_quantum)
    host_low_water_mark = (host_allocation_quantum * 0.5)
    config = """
(
 (listenPort ())
 (listenPath (Some %s))
 (host_allocation_quantum %d)
 (host_low_water_mark %d)
 (vg %s)
 (devices (%s))
 (rrd_ds_owner %s)
)
""" % (sockpath, host_allocation_quantum, host_low_water_mark, vg,
       " ".join(devices), uuid)
    if not os.path.exists(config_dir):
        util.makedirs(config_dir)
    if not os.path.exists(os.path.dirname(sockpath)):
        util.makedirs(os.path.dirname(sockpath))
    with open(configfile, 'w') as f:
        f.write(config)
Пример #5
0
def runxenvm_local_allocator(uuid, vg, devices, uri):
    global config_dir
    configfile = "%s/%s.xenvm-local-allocator.config" % (config_dir, vg)
    uuid = util.get_this_host()
    socket_dir = "/var/run/sm/allocator"
    journal_dir = "/tmp/sm/allocator-journal"
    for d in [socket_dir, journal_dir]:
        if not os.path.exists(d):
            util.makedirs(d)
    local_allocator = "%s/%s" % (socket_dir, vg)
    config = """
(
 (socket %s)
 (allocation_quantum 16)
 (localJournal %s/%s)
 (devices (%s))
 (toLVM %s-toLVM)
 (fromLVM %s-fromLVM)
)
""" % (local_allocator, journal_dir, vg, "".join(devices), uuid, uuid)
    if not os.path.exists(config_dir):
        util.makedirs(config_dir)
    with open(configfile, 'w') as f:
        f.write(config)
    cmd = ["/bin/xenvm", "host-create", vg, uuid]
    util.pread2(cmd)
    cmd = ["/bin/xenvm", "host-connect", vg, uuid]
    util.pread2(cmd)
    cmd = ["/bin/xenvm-local-allocator", "--daemon", "--config", configfile]
    util.pread2(cmd)
    setvginfo(uuid, vg, devices, uri, local_allocator)
    def __init__(self, ctx, path, state):
        self._path = path
        self._state = state
        r = ctx._repo
        root = r.wjoin(path)
        create = False
        if not os.path.exists(os.path.join(root, '.hg')):
            create = True
            util.makedirs(root)
        self._repo = hg.repository(r.ui, root, create=create)
        self._repo._subparent = r
        self._repo._subsource = state[0]

        if create:
            fp = self._repo.opener("hgrc", "w", text=True)
            fp.write('[paths]\n')

            def addpathconfig(key, value):
                if value:
                    fp.write('%s = %s\n' % (key, value))
                    self._repo.ui.setconfig('paths', key, value)

            defpath = _abssource(self._repo, abort=False)
            defpushpath = _abssource(self._repo, True, abort=False)
            addpathconfig('default', defpath)
            if defpath != defpushpath:
                addpathconfig('default-push', defpushpath)
            fp.close()
Пример #7
0
    def build_preferences(self, params):
        """Build the XML files to setup Trepn and the data points"""
        current_dir = op.dirname(op.realpath(__file__))
        # lxml is not the most secure parser, it is up to the user for valid configurations
        # https://docs.python.org/2/library/xml.html#xml-vulnerabilities
        self.pref_dir = op.join(paths.OUTPUT_DIR, 'trepn.pref/')
        makedirs(self.pref_dir)

        preferences_file = et.parse(
            op.join(current_dir, 'trepn/preferences.xml'))
        if 'sample_interval' in params:
            for i in preferences_file.getroot().iter('int'):
                if i.get(
                        'name'
                ) == 'com.quicinc.preferences.general.profiling_interval':
                    i.set('value', str(params['sample_interval']))
        preferences_file.write(op.join(self.pref_dir,
                                       'com.quicinc.trepn_preferences.xml'),
                               encoding='utf-8',
                               xml_declaration=True,
                               standalone=True)

        datapoints_file = et.parse(
            op.join(current_dir, 'trepn/data_points.xml'))
        dp_root = datapoints_file.getroot()
        data_points = load_json(op.join(current_dir, 'trepn/data_points.json'))
        for dp in params['data_points']:
            dp = str(data_points[dp])
            dp_root.append(et.Element('int', {'name': dp, 'value': dp}))
        datapoints_file.write(op.join(
            self.pref_dir, 'com.quicinc.preferences.saved_data_points.xml'),
                              encoding='utf-8',
                              xml_declaration=True,
                              standalone=True)
Пример #8
0
 def set_output(self):
     # TODO clean up!
     self.paths['OUTPUT_DIR'] = os.path.join(paths.OUTPUT_DIR, self.name)
     makedirs(self.paths['OUTPUT_DIR'])
     self.logger.debug('%s: Setting output: %s' %
                       (self.moduleName, self.paths['OUTPUT_DIR']))
     self.currentProfiler.set_output(self.paths['OUTPUT_DIR'])
Пример #9
0
def extract(df, target_col='text', info_type='link', out_dir=''):
    ut.makedirs(out_dir)
    df = df[['com_id', target_col]]

    ut.out('target column: %s, info type: %s' % (target_col, info_type))

    if info_type == 'text':
        ut.out('writing info to csv...\n')
        df.to_csv(out_dir + info_type + '.csv', index=None)
        return

    d, i = {}, 0
    regex = _get_regex(info_type)

    df = df[['com_id', target_col]]
    for ndx, com_id, text in df.itertuples():
        i += 1
        if i % 100000 == 0:
            ut.out('(%d/%d)...' % (i, len(df)))

        info = _get_items(text, regex)
        if info != '':
            d[com_id] = info

    if len(d) > 0:
        info_df = pd.DataFrame.from_dict(d, orient='index').reset_index()
        info_df.columns = ['com_id', info_type]
        fname = info_type + '.csv'
        ut.out(str(info_df))
        ut.out('writing info to csv...\n')
        info_df.to_csv(out_dir + fname, index=None)
    else:
        ut.out('No extractions made...')
Пример #10
0
def runxenvm_local_allocator(uuid, vg, devices, uri):
    global config_dir
    configfile = "%s/%s.xenvm-local-allocator.config" % (config_dir, vg)
    uuid = util.get_this_host ()
    socket_dir = "/var/run/sm/allocator"
    journal_dir = "/tmp/sm/allocator-journal"
    for d in [ socket_dir, journal_dir ]:
        if not os.path.exists(d):
            util.makedirs(d)
    local_allocator = "%s/%s" % (socket_dir, vg)
    config = """
(
 (socket %s)
 (allocation_quantum 16)
 (localJournal %s/%s)
 (devices (%s))
 (toLVM %s-toLVM)
 (fromLVM %s-fromLVM)
)
""" % (local_allocator, journal_dir, vg, "".join(devices), uuid, uuid)
    if not os.path.exists(config_dir):
      util.makedirs(config_dir)
    with open(configfile, 'w') as f:
        f.write(config)
    cmd = [ "/bin/xenvm", "host-create", vg, uuid ]
    util.pread2(cmd)
    cmd = [ "/bin/xenvm", "host-connect", vg, uuid ]
    util.pread2(cmd)
    cmd = [ "/bin/xenvm-local-allocator", "--daemon", "--config", configfile ]
    util.pread2(cmd)
    setvginfo(uuid,vg,devices,uri,local_allocator)
Пример #11
0
def run():
    print("Creating timeline")
    util.makedirs("../dest/timeline")
    timeline_index.run()
    for monthdata in t_db:
        print(">>>>", monthdata["id"])
        timeline_month.run(monthdata["id"])
Пример #12
0
    def writeFile(self, f, decrypted_chunks):
        path = os.path.join(self.outputFolder, re.sub(r'[:|*<>?"]', "_", f.RelativePath))
        print path
        makedirs(os.path.dirname(path))
        ff = open(path, "wb")
        h = hashlib.sha1()
        for i in xrange(len(decrypted_chunks)):
            d = decrypted_chunks[i]
            h.update(d)
            ff.write(d)
        ff.close()

        if f.Attributes.EncryptionKey:
            EncryptionKey = f.Attributes.EncryptionKey
            #ProtectionClass = f.Attributes.ProtectionClass
            hexdump(EncryptionKey)
            ProtectionClass = struct.unpack(">L", EncryptionKey[0x18:0x1C])[0]
            assert ProtectionClass == f.Attributes.ProtectionClass
            #EncryptionKeyVersion=2 => starts with keybag uuid
            if f.Attributes.EncryptionKeyVersion and f.Attributes.EncryptionKeyVersion == 2:
                assert self.kb.uuid == EncryptionKey[:0x10]
                keyLength = struct.unpack(">L", EncryptionKey[0x20:0x24])[0]
                assert keyLength == 0x48
                wrapped_key = EncryptionKey[0x24:]
            else:#XXX old format ios 5 backup
                wrapped_key = EncryptionKey[0x1C:]
            print "ProtectionClass= %d" % ProtectionClass
            filekey = self.kb.unwrapCurve25519(ProtectionClass, wrapped_key)
            if not filekey:
                print "Failed to unwrap file key for file %s !!!" % f.RelativePath
            else:
                print "filekey",filekey.encode("hex")
                self.decryptProtectedFile(path, filekey, f.Attributes.DecryptedSize)
Пример #13
0
  def __call__(self, user):
    configDir = user + 'Config/'
    util.makedirs(configDir)

    with open(configDir + 'GCPadNew.ini', 'w') as f:
      f.write(generateGCPadNew(self.cpus))

    with open(configDir + 'Dolphin.ini', 'w') as f:
      config_args = dict(
        user=user,
        gfx=self.gfx,
        cpu_thread=self.cpu_thread,
        dump_frames=self.dump_frames,
        audio=self.audio,
        speed=self.speed
      )
      f.write(dolphinConfig.format(**config_args))

    # don't need memory card with netplay
    #gcDir = user + 'GC/'
    #os.makedirs(gcDir, exist_ok=True)
    #memcardName = 'MemoryCardA.USA.raw'
    #shutil.copyfile(memcardName, gcDir + memcardName)
    
    gameSettings = "GameSettings/"
    shutil.copytree(gameSettings, user + gameSettings)

    util.makedirs(user + 'Dump/Frames/')
Пример #14
0
    def save_snapshot(self):
        print(self.session)
        naccupoints = int(3e7)
        nscans = len(self.velofiles)
        nmaxpoints = naccupoints / nscans
        accupoints = np.full([naccupoints, 3], np.nan)
        accuintensities = np.empty([naccupoints, 1])

        ipstart = 0
        with progressbar.ProgressBar(max_value=nscans) as bar:
            for i in range(nscans):
                points, intensities = self.get_velo(i)
                npoints = min(points.shape[0], nmaxpoints)
                ip = np.random.choice(points.shape[0], npoints, replace=False)
                points = np.hstack([points[ip], np.ones([npoints, 1])]).T
                points = self.T_w_r_gt_velo[i].dot(points)[:3].T
                accupoints[ipstart:ipstart + npoints] = points
                intensities = intensities[ip].reshape([-1, 1])
                accuintensities[ipstart:ipstart + npoints] = intensities
                ipstart += npoints
                bar.update(i)
        trajectory = self.T_w_r_gt[:, :3, 3]

        util.makedirs(self.dir)
        np.savez(os.path.join(self.dir, snapshotfile),
                 points=accupoints,
                 intensities=accuintensities,
                 trajectory=trajectory)
Пример #15
0
    def download(self, backupUDID):
        mbsbackup = self.getBackup(backupUDID)
        print "Downloading backup %s" % backupUDID.encode("hex")
        self.outputFolder = os.path.join(self.outputFolder, backupUDID.encode("hex"))
        makedirs(self.outputFolder)
        print backup_summary(mbsbackup)
        #print mbsbackup.Snapshot.Attributes.KeybagUUID.encode("hex")
        keys = self.getKeys(backupUDID)
        if not keys or not len(keys.Key):
            print "getKeys FAILED!"
            return
        
        print "Got OTA Keybag"
        self.kb = Keybag(keys.Key[1].KeyData)
        if not self.kb.unlockBackupKeybagWithPasscode(keys.Key[0].KeyData):
            print "Unable to unlock OTA keybag !"
            return

        for snapshot in xrange(1, mbsbackup.Snapshot.SnapshotID+1):
            files = self.listFiles(backupUDID, snapshot)
            print "%d files" % len(files)
            files2 = []
            for f in files:
                if f.Attributes.EncryptionKey:
                    files2.append(f)
                    print f
            if len(files2):
                authTokens = self.getFiles(backupUDID, snapshot, files)
                self.authorizeGet(authTokens)
Пример #16
0
    def __call__(self, user):
        configDir = user + 'Config/'
        util.makedirs(configDir)

        with open(configDir + 'GCPadNew.ini', 'w') as f:
            f.write(generateGCPadNew(self.cpus))

        with open(configDir + 'Dolphin.ini', 'w') as f:
            config_args = dict(user=user,
                               gfx=self.gfx,
                               cpu_thread=self.cpu_thread,
                               dump_frames=self.dump_frames,
                               audio=self.audio,
                               speed=self.speed)
            f.write(dolphinConfig.format(**config_args))

        # don't need memory card with netplay
        #gcDir = user + 'GC/'
        #os.makedirs(gcDir, exist_ok=True)
        #memcardName = 'MemoryCardA.USA.raw'
        #shutil.copyfile(memcardName, gcDir + memcardName)

        gameSettings = "GameSettings/"
        shutil.copytree(gameSettings, user + gameSettings)

        util.makedirs(user + 'Dump/Frames/')
Пример #17
0
def open_new_output_files(target_name):
    global o
    global o1
    global o2
    global folder_index
    global file_in_folder_count
    folder_name = folder_names[folder_index]
    makedirs(pa.out+'/'+folder_name)
    if pa.paired:
        o1 = open('{out}/{folder_name}/{target_name}_R1.fastq'.format(
            out=pa.out,
            folder_name=folder_name,
            target_name=target_name,
        ), 'w')
        o2 = open('{out}/{folder_name}/{target_name}_R2.fastq'.format(
            out=pa.out,
            folder_name=folder_name,
            target_name=target_name,
        ), 'w')
    else:
        o = open('{out}/{folder_name}/{target_name}.fastq'.format(
            out=pa.out,
            folder_name=folder_name,
            target_name=target_name,
        ), 'w')
    # FIXME: either have an off-by-one error here, or am not correctly
    # handling skipped targets
    file_in_folder_count += 1
    if pa.paired:
        file_in_folder_count += 1
    if file_in_folder_count >= pa.max_files_per_folder:
        folder_index += 1
        file_in_folder_count = 0
Пример #18
0
    def start_profiling(self, device, **kwargs):
        # Reset logs on the device
        device.shell('dumpsys batterystats --reset')
        device.shell('logcat -c')
        print('Batterystats cleared')
        print('Logcat cleared')

        # Create output directories
        global app
        global systrace_file
        global logcat_file
        global batterystats_file
        global results_file
        output_dir = op.join(paths.OUTPUT_DIR, 'android/')
        makedirs(output_dir)

        if self.type == 'native':
            app = kwargs.get('app', None)
        # TODO: add support for other browsers, required form: app = 'package.name'
        elif self.type == 'web':
            app = 'com.android.chrome'

        # Create files on system
        systrace_file = '{}systrace_{}_{}.html'.format(output_dir, device.id, time.strftime('%Y.%m.%d_%H%M%S'))
        logcat_file = '{}logcat_{}_{}.txt'.format(output_dir, device.id, time.strftime('%Y.%m.%d_%H%M%S'))
        batterystats_file = op.join(output_dir, 'batterystats_history_{}_{}.txt'.format(device.id, time.strftime(
            '%Y.%m.%d_%H%M%S')))
        results_file = op.join(output_dir, 'results_{}_{}.csv'
                               .format(device.id, time.strftime('%Y.%m.%d_%H%M%S')))

        super(Batterystats, self).start_profiling(device, **kwargs)
        self.profile = True
        self.get_data(device, app)
Пример #19
0
    def download(self, backupUDID):
        mbsbackup = self.getBackup(backupUDID)
        print "Downloading backup %s" % backupUDID.encode("hex")
        self.outputFolder = os.path.join(self.outputFolder,
                                         backupUDID.encode("hex"))
        makedirs(self.outputFolder)
        print backup_summary(mbsbackup)
        #print mbsbackup.Snapshot.Attributes.KeybagUUID.encode("hex")
        keys = self.getKeys(backupUDID)
        if not keys or not len(keys.Key):
            print "getKeys FAILED!"
            return

        print "Got OTA Keybag"
        self.kb = Keybag(keys.Key[1].KeyData)
        if not self.kb.unlockBackupKeybagWithPasscode(keys.Key[0].KeyData):
            print "Unable to unlock OTA keybag !"
            return

        for snapshot in xrange(1, mbsbackup.Snapshot.SnapshotID + 1):
            files = self.listFiles(backupUDID, snapshot)
            print "%d files" % len(files)
            files2 = []
            for f in files:
                if f.Attributes.EncryptionKey:
                    files2.append(f)
                    print f
            if len(files2):
                authTokens = self.getFiles(backupUDID, snapshot, files)
                self.authorizeGet(authTokens)
Пример #20
0
    def writeOperaSetup(self, sourceRoot, outputRoot):
        """
        Writes a small header file with the current time to
        {outputRoot}/modules/hardcore/base/operasetup.h
        That file's sole purpose is to help dependency resolving. If
        the platform needs it, it should be included from a
        platform-specific header file.

        If outputRoot equals sourceRoot, then modules/hardcore/module.generated
        is updated with the name of the generated file: "base/operasetup.h".
        """
        if outputRoot is None:
            outputRoot = sourceRoot
        opera_setup_h = os.path.join(outputRoot, 'modules', 'hardcore', 'base', 'operasetup.h')
        util.makedirs(os.path.dirname(opera_setup_h))
        f = None
        try:
            f = open(opera_setup_h, "w")
            print >>f, "#ifndef MODULES_HARDCORE_OPERA_OPERASETUP_H"
            print >>f, "#define MODULES_HARDCORE_OPERA_OPERASETUP_H"
            print >>f, "// Modified by operasetup.py %s" % time.ctime()
            print >>f, "#endif // !MODULES_HARDCORE_OPERA_OPERASETUP_H"
        finally:
            if f: f.close()
        if outputRoot == sourceRoot:
            util.updateModuleGenerated(os.path.join(sourceRoot, 'modules', 'hardcore'),
                                       ["base/operasetup.h"])
Пример #21
0
    def writeOutputFiles(self, sourceRoot, outputRoot=None):
        import util
        hardcoreDir = os.path.join(sourceRoot, 'modules', 'hardcore')
        actionsDir = os.path.join(hardcoreDir, 'actions')
        if outputRoot is None: targetDir = actionsDir
        else:
            targetDir = os.path.join(outputRoot, 'modules', 'hardcore', 'actions')
            util.makedirs(targetDir)
        changed = util.readTemplate(os.path.join(actionsDir, 'actions_template.h'),
                                    os.path.join(targetDir, 'generated_actions.h'),
                                    HandleTemplateAction(self))

        changed = util.readTemplate(os.path.join(actionsDir, 'actions_enum_template.h'),
                                    os.path.join(targetDir, 'generated_actions_enum.h'),
                                    HandleTemplateAction(self)) or changed
        changed = util.readTemplate(os.path.join(actionsDir, 'actions_strings_template.h'),
                                    os.path.join(targetDir, 'generated_actions_strings.h'),
                                    HandleTemplateAction(self)) or changed
        # Ignore changes in this; this is just a template for a
        # platforms' actions.h and not used when building Opera:
        util.readTemplate(os.path.join(actionsDir, 'actions_template_template.h'),
                          os.path.join(targetDir, 'generated_actions_template.h'),
                          HandleTemplateAction(self))
        if targetDir == actionsDir:
            util.updateModuleGenerated(
                hardcoreDir,
                ['actions/generated_actions.h',
                 'actions/generated_actions_enum.h',
                 'actions/generated_actions_strings.h',
                 'actions/generated_actions_template.h'])
        return changed
Пример #22
0
    def render_ebook(self, polish_fun=None):
        print "-- generating ebooks"
        makedirs(self.ebook_dir)

        this_dir = os.getcwd()
        os.chdir(self.extracted_dir)
#        for t in range(0, 99):
#            first_nr = 100 * t + 1
#            last_nr = 100 * (t + 1)
#            md_name = "%s.%04i.md" % (self.name, last_nr)
#            if not os.path.exists(md_name):
#                break
        import glob
        for md_name in sorted(glob.glob("%s.*.md" % self.name)):
            last_nr = int(re.sub("%s." % self.name, "", os.path.splitext(md_name)[0]))
            first_nr = last_nr - self.chunk_size + 1
            print "---- processing markdown %s" % md_name
            epub_name = os.path.join(self.ebook_dir, "%s.%04i.epub" % (self.name, last_nr))
            makedirs(os.path.dirname(epub_name))
            cmd = ["pandoc", "-S", md_name,  "-o", epub_name, "--smart"]
            if self.res_dir:
                cmd.append("--epub-stylesheet=%s/epub.css" % self.res_dir)
            if polish_fun:
                context = locals()
                context.update(vars(self))
                cmd = polish_fun(context)
            print "------ rendering %s" % epub_name
            subprocess.check_call(cmd)
        os.chdir(this_dir)
Пример #23
0
def run():
    print("Creating timeline")
    util.makedirs("../timeline")
    timeline_index.run()
    timeline_2020.run()
    for yeardata in t_db:
        timeline_year.run(yeardata["year"])
Пример #24
0
def plot_trajectories():
    trajectorydir = os.path.join(
        pynclt.resultdir, 'trajectories_est_{:.0f}_{:.0f}_{:.0f}'.format(
            n_mapdetections, 10 * poles.minscore, poles.polesides[-1]))
    # pgfdir = os.path.join(trajectorydir, 'pgf')
    util.makedirs(trajectorydir)
    # util.makedirs(pgfdir)
    mapdata = np.load(os.path.join(pynclt.resultdir, get_globalmapname() + '.npz'))
    polemap = mapdata['polemeans']
    # plt.rcParams.update(params)
    for sessionname in pynclt.sessions:
        try:
            session = pynclt.session(sessionname)
            files = [file for file \
                in os.listdir(os.path.join(pynclt.resultdir, sessionname)) \
                    if file.startswith(localization_name_start)]
                    # if file.startswith(get_locfileprefix())]
            for file in files:
                T_w_r_est = np.load(os.path.join(
                    pynclt.resultdir, sessionname, file))['T_w_r_est']
                plt.clf()
                plt.scatter(polemap[:, 0], polemap[:, 1], 
                    s=1, c='b', marker='.')
                plt.plot(session.T_w_r_gt[::20, 0, 3], 
                    session.T_w_r_gt[::20, 1, 3], color=(0.5, 0.5, 0.5))
                plt.plot(T_w_r_est[::20, 0, 3], T_w_r_est[::20, 1, 3], 'r')
                plt.xlabel('x [m]')
                plt.ylabel('y [m]')
                plt.gcf().subplots_adjust(
                    bottom=0.13, top=0.98, left=0.145, right=0.98)
                filename = sessionname + file[18:-4]
                plt.savefig(os.path.join(trajectorydir, filename + '.svg'))
                # plt.savefig(os.path.join(pgfdir, filename + '.pgf'))
        except:
            pass
Пример #25
0
def write_xenvmd_config(uuid, vg, devices, vgsize):
    global config_dir
    configfile = "%s/%s.xenvmd.config" % (config_dir, vg)
    sockpath = sockpath_of_sr_uuid(uuid)
    #Min host allocation quantum in MiB, i.e., 10 times
    #min_allocation_quantum (currently 16MiB):
    min_host_allocation_quantum = 160
    #host_allocation_quantum is 0.5% of SR size
    host_allocation_quantum = (vgsize * 0.005) / (1024 * 1024)
    #host_allocation_quantum should be bigger than 1GiB
    host_allocation_quantum = max(min_host_allocation_quantum,
                                  host_allocation_quantum)
    host_low_water_mark = (host_allocation_quantum * 0.5)
    config = """
(
 (listenPort ())
 (listenPath (Some %s))
 (host_allocation_quantum %d)
 (host_low_water_mark %d)
 (vg %s)
 (devices (%s))
 (rrd_ds_owner %s)
)
""" % (sockpath, host_allocation_quantum, host_low_water_mark, vg, " ".join(devices), uuid)
    if not os.path.exists(config_dir):
      util.makedirs(config_dir)
    if not os.path.exists(os.path.dirname(sockpath)):
      util.makedirs(os.path.dirname(sockpath))
    with open(configfile,'w') as f:
        f.write(config)
Пример #26
0
 def dir(self):
   d = os.path.join(
     self.__context.get_config_dir(),
     'cache',
     os.path.join(*self.__path)
   )
   makedirs(d)
   return d
 def before_first_run(self, device, path, *args, **kwargs):
     super(NativeExperiment, self).before_first_run(device, path)
     filename = op.basename(path)
     paths.OUTPUT_DIR = op.join(paths.OUTPUT_DIR, slugify(filename))
     makedirs(paths.OUTPUT_DIR)
     self.logger.info('APK: %s' % filename)
     device.install(path)
     self.package = op.splitext(op.basename(path))[0]
Пример #28
0
def run():
    print("Creating search")
    util.makedirs("../dest/search")
    util.copyfile("templates/search/search.js", "../dest/search/search.js")
    html = templates.get("search/index")
    html = templates.initial_replace(html, 3)
    html = templates.final_replace(html, "..")
    util.writefile("../dest/search/index.html", html)
Пример #29
0
def setvginfo(uuid,vg,devices,uri, local_allocator=None):
    sockpath = sockpath_of_sr_uuid(uuid)

    try:
        util.makedirs(config_dir)
    except OSError, e:
        if e.errno != errno.EEXIST:
            raise
Пример #30
0
def run():
    print("Creating contestants")
    util.makedirs("../dest/contestants")
    contestants_index.run()
    for data in database:
        os.makedirs(os.path.normpath("../dest/contestants/" + data["user-id"]),
                    exist_ok=True)
        contestants_profile.run(data["user-id"])
Пример #31
0
def setvginfo(uuid, vg, devices, uri, local_allocator=None):
    sockpath = sockpath_of_sr_uuid(uuid)

    try:
        util.makedirs(config_dir)
    except OSError, e:
        if e.errno != errno.EEXIST:
            raise
Пример #32
0
	def set_options(self, opts):
		self._keep_on_close = opts["keep_images"]

		suf = time.strftime("-%y.%m.%d-%H.%M", time.localtime())
		util.makedirs(opts["img_path"])
		wdir = tempfile.mkdtemp(suf, "%s-" % self._typ, opts["img_path"])
		self._wdir = opendir(wdir)
		self._img_path = os.path.join(self._wdir.name(), "img")
		os.mkdir(self._img_path)
Пример #33
0
 def set_dirs(self, device, profiler_name):
     self.dirs['subject'] = os.path.join(self.output_root, 'data',
                                         device.name,
                                         'test_dir_1', 'test_dir_2',
                                         profiler_name.lower())
     self.dirs['aggregated'] = os.path.join(
         paths.OUTPUT_DIR, '{}_aggregated.csv'.format(profiler_name))
     self.dirs['base'] = os.path.join(paths.OUTPUT_DIR, 'data')
     makedirs(self.dirs['subject'])
 def logcat_to_file(self, path):
     """Dumps the last x lines of logcat into a file specified by path"""
     makedirs(path)
     with open(
             op.join(
                 path,
                 '%s_%s.txt' % (self.id, time.strftime('%Y.%m.%d_%H%M%S'))),
             'w+') as f:
         f.write(Adb.logcat(self.id))
Пример #35
0
def save_local_maps(sessionname, visualize=False):
    print(sessionname)
    session = pynclt.session(sessionname)
    util.makedirs(session.dir)
    istart, imid, iend = get_map_indices(session)
    maps = []
    with progressbar.ProgressBar(max_value=len(iend)) as bar:
        for i in range(len(iend)):
            scans = []
            for idx, val in enumerate(range(istart[i], iend[i])):
                xyz, _ = session.get_velo(val)
                scan = o3.geometry.PointCloud()
                scan.points = o3.utility.Vector3dVector(xyz)
                scans.append(scan)

            T_w_mc = util.project_xy(
                session.T_w_r_odo_velo[imid[i]].dot(T_r_mc))
            T_w_m = T_w_mc.dot(T_mc_m)
            T_m_w = util.invert_ht(T_w_m)
            T_w_r = session.T_w_r_odo_velo[istart[i]:iend[i]]
            T_m_r = np.matmul(T_m_w, T_w_r)

            occupancymap = mapping.occupancymap(scans, T_m_r, mapshape,
                                                mapsize)
            poleparams = poles.detect_poles(occupancymap, mapsize)

            if visualize:
                cloud = o3.geometry.PointCloud()
                for T, scan in zip(T_w_r, scans):
                    s = copy.copy(scan)
                    s.transform(T)
                    cloud.points.extend(s.points)
                mapboundsvis = util.create_wire_box(mapextent, [0.0, 0.0, 1.0])
                mapboundsvis.transform(T_w_m)
                polevis = []
                for j in range(poleparams.shape[0]):
                    x, y, zs, ze, a = poleparams[j, :5]
                    pole = util.create_wire_box([a, a, ze - zs],
                                                color=[1.0, 1.0, 0.0])
                    T_m_p = np.identity(4)
                    T_m_p[:3, 3] = [x - 0.5 * a, y - 0.5 * a, zs]
                    pole.transform(T_w_m.dot(T_m_p))
                    polevis.append(pole)
                o3.visualization.draw_geometries(polevis +
                                                 [cloud, mapboundsvis])

            map = {
                'poleparams': poleparams,
                'T_w_m': T_w_m,
                'istart': istart[i],
                'imid': imid[i],
                'iend': iend[i]
            }
            maps.append(map)
            bar.update(i)
    np.savez(os.path.join(session.dir, get_localmapfile()), maps=maps)
Пример #36
0
def download(item):
    path = item['path']
    if not os.path.exists(path):
        makedirs(path)

    cmd = 'cd "%s"; wget "%s" ' % (item['path'], item['url'])
    if item['fname']:
        cmd += '-O "%s"' % item['fname']
    print cmd
    return os.system(cmd.encode('utf8'))
Пример #37
0
 def collect_results(self, device, path=None):
     super(Android, self).collect_results(device)
     output_dir = op.join(paths.OUTPUT_DIR, 'android/')
     makedirs(output_dir)
     filename = '{}_{}.csv'.format(device.id,
                                   time.strftime('%Y.%m.%d_%H%M%S'))
     with open(op.join(output_dir, filename), 'w+') as f:
         writer = csv.writer(f)
         for row in self.data:
             writer.writerow(row)
Пример #38
0
        def close(self):
            written = self.__file.getvalue()

            try: existing = open(self.__path).read()
            except: existing = None

            if written != existing:
                self.__updated = True
                util.makedirs(os.path.dirname(self.__path))
                open(self.__path, "w").write(written)
Пример #39
0
def plot_trajectories():
    trajectorydir = os.path.join(
        pynclt.resultdir,
        'trajectories_est_{:.0f}_{:.0f}_{:.0f}'.format(n_mapdetections,
                                                       10 * poles.minscore,
                                                       poles.polesides[-1]))
    # pgfdir = os.path.join(trajectorydir, 'pgf')
    util.makedirs(trajectorydir)
    # util.makedirs(pgfdir)
    mapdata = np.load(
        os.path.join(pynclt.resultdir,
                     get_globalmapname() + '.npz'))
    polemap = mapdata['polemeans']
    # plt.rcParams.update(params)
    for sessionname in pynclt.sessions:
        try:
            session = pynclt.session(sessionname)
            files = [file for file \
                in os.listdir(os.path.join(pynclt.resultdir, sessionname)) \
                    if file.startswith(localization_name_start)]

            for file in files:
                T_w_r_est = np.load(
                    os.path.join(pynclt.resultdir, sessionname,
                                 file))['T_w_r_est']
                plt.clf()
                landmarks = plt.scatter(polemap[:, 0],
                                        polemap[:, 1],
                                        s=1,
                                        c='m',
                                        marker='*',
                                        label='Landmarks')
                ground_truth = plt.plot(session.T_w_r_gt[::20, 0, 3],
                                        session.T_w_r_gt[::20, 1, 3],
                                        color=(0, 1, 0),
                                        label='Ground truth',
                                        linewidth=3.0)
                pos_estimation = plt.plot(T_w_r_est[::20, 0, 3],
                                          T_w_r_est[::20, 1, 3],
                                          'r',
                                          label='Estimated trajectory')
                plt.ylabel('North (Unit:m)')
                plt.xlabel('East (Unit:m)')
                plt.legend()
                plt.gcf().subplots_adjust(bottom=0.13,
                                          top=0.98,
                                          left=0.145,
                                          right=0.98)
                plt.grid(color=(0.5, 0.5, 0.5), linestyle='-', linewidth=1)
                filename = sessionname + file[18:-4]
                plt.savefig(os.path.join(trajectorydir, filename + '.svg'))
                plt.savefig(os.path.join(trajectorydir, filename + '.png'))
                # plt.savefig(os.path.join(pgfdir, filename + '.pgf'))
        except:
            pass
def check_dir(path, force):
    if exists(path) and force:
        warnings.warn(f'{path} exists and will be overwritten')
        rmtree(path)
        makedirs(path)
    elif not exists(path):
        makedirs(path)
    else:
        warnings.warn(
            f'{path} exists, --force not specified, continuing with existing directory'
        )
Пример #41
0
 def __init__(self, ctx, path, state):
     self._path = path
     self._state = state
     r = ctx._repo
     root = r.wjoin(path)
     create = False
     if not os.path.exists(os.path.join(root, '.hg')):
         create = True
         util.makedirs(root)
     self._repo = hg.repository(r.ui, root, create=create)
     self._initrepo(r, state[0], create)
Пример #42
0
def save_trajectories():
    trajectorydir = os.path.join(resultdir, 'trajectories_gt')
    util.makedirs(trajectorydir)

    trajectories = [session(s).T_w_r_gt[::20, :2, 3] for s in sessions]
    for i in range(len(trajectories)):
        plt.clf()
        [plt.plot(t[:, 0], t[:, 1], color=(0.5, 0.5, 0.5)) \
            for t in trajectories]
        plt.plot(trajectories[i][:, 0], trajectories[i][:, 1], color='y')
        plt.savefig(os.path.join(trajectorydir, sessions[i] + '.svg'))
Пример #43
0
def run():
    print("Creating search")
    util.makedirs("../search")
    util.copyfile("database/countries.csv", "../search/countries.csv")
    util.copyfile("database/estudiantes.csv", "../search/estudiantes.csv")
    util.copyfile("templates/search/search.js", "../search/search.js")
    util.copyfile("templates/search/asciify.js", "../search/asciify.js")
    html = templates.get("search/index")
    html = templates.initial_replace(html, 3)
    html = templates.final_replace(html, "..")
    util.writefile("../search/index.html", html)
Пример #44
0
 def __init__(self, ctx, path, state):
     self._path = path
     self._state = state
     r = ctx._repo
     root = r.wjoin(path)
     create = False
     if not os.path.exists(os.path.join(root, '.hg')):
         create = True
         util.makedirs(root)
     self._repo = hg.repository(r.ui, root, create=create)
     self._initrepo(r, state[0], create)
Пример #45
0
def init_album(alb):
    path = alb['path']
    makedirs(path)

    with open(os.path.join(path, 'meta'), 'w') as f:
        f.write(json.dumps(alb))

    if alb['desc']:
        with open(os.path.join(path, 'desc'), 'w') as f:
            f.write(alb['desc'])

    '''
Пример #46
0
 def writeUndeletedFile(self, filename, data):
     knownExtensions = (".m4a", ".plist",".sqlite",".sqlitedb", ".jpeg", ".jpg", ".png", ".db",".json",".xml",".sql")
     #windows invalid chars  \/:*?"<>|
     filename = str(filename.encode("utf-8")).translate(None, "\\/:*?\"<>|,")
     folder = self.outputdir
     if self.outputdir == "./":
         folder = folder + "/undelete"
     elif filename.lower().endswith(knownExtensions):
         ext = filename[filename.rfind(".")+1:]
         folder = folder + "/" + ext.lower()
     makedirs(folder)
     open(folder + "/" + filename, "wb").write(data)
Пример #47
0
 def __init__(self, ctx, path, state):
     self._path = path
     self._state = state
     r = ctx._repo
     root = r.wjoin(path)
     if os.path.exists(os.path.join(root, '.hg')):
         self._repo = localrepo.localrepository(r.ui, root)
     else:
         util.makedirs(root)
         self._repo = localrepo.localrepository(r.ui, root, create=True)
     self._repo._subparent = r
     self._repo._subsource = state[0]
Пример #48
0
 def extract_all(self,feature_type,image_sets,sizes, step_size):    
   if not os.path.isdir(self.data_dir):
     os.mkdir(self.data_dir)    
   for imset in image_sets:#'train.txt', 'test.txt']:
     d = Dataset(imset)
     images = d.images    
     for feature in feature_type:#, 'phow', 'cphow', 'phog180', 'phog360']:          
       ut.makedirs(self.data_dir + feature + '/' )
       ut.makedirs(self.data_dir + feature + '/times/')               
       for img in range(comm_rank, len(images), comm_size): # PARALLEL
         image = images[img]
         self.create_image_feature(image, feature, sizes, step_size)
Пример #49
0
 def writeModuleSources(self, content):
     """
     Writes the specified content into the module.sources file in
     the temporary directory that was created in the setUp() step.
     The file is removed in tearDown().
     """
     path = os.path.join(self.__tmpdir, 'test', 'foo')
     util.makedirs(path)
     f = None
     try:
         f = open(self.moduleSourcesFile(), "w")
         f.write(content)
     finally:
         if f: f.close()
Пример #50
0
    def prepare_file(cls, main_input, output_dir, verbose=False, overwrite=False):
        path, fname = os.path.realpath(main_input).rsplit('/', 1)
        provider = DirDocProvider(path)
        slug, ext = os.path.splitext(fname)

        if output_dir != '':
            makedirs(output_dir)
        outfile = os.path.join(output_dir, slug + '.' + cls.ext)
        if os.path.exists(outfile) and not overwrite:
            return

        doc = WLDocument.from_file(main_input, provider=provider)
        output_file = cls.transform(doc, cover=cls.cover, flags=cls.flags)
        doc.save_output_file(output_file, output_path=outfile)
Пример #51
0
 def fetch_raw_pages(self):
     print "-- fetching raw pages from %s" % self.url
     makedirs(self.pages_dir)
     for nr in self.nr_range:
         if os.path.exists("%s/%s%s" % (self.pages_dir, self.page_name, str(nr))):
             print("---- nr %i cached\r" % nr)
             continue
         print "---- fetching comic nr %i" % nr
         print "------ %s%s" % (self.url, nr)
         return_code = subprocess.call(
             ["/usr/bin/wget", "-q", "-k", "-p", "-r", "-P", self.raw_dir, "--follow-tags=img", "%s%s" % (self.url, nr)]
         )
         if return_code:
             print "------ wget returns with exit code %i, assuming last comic" % return_code
             break
Пример #52
0
    def create(self, sr_uuid, size):
        if util.ioretry(lambda: self._checkmount()):
            raise xs_errors.XenError('NFSAttached')

        # Set the target path temporarily to the base dir
        # so that we can create the target SR directory
        self.remotepath = self.dconf['serverpath']
        try:
            self.attach(sr_uuid)
        except:
            try:
                os.rmdir(self.path)
            except:
                pass
            raise xs_errors.XenError('NFSMount')
        newpath = os.path.join(self.path, sr_uuid)
        if util.ioretry(lambda: util.pathexists(newpath)):
            if len(util.ioretry(lambda: util.listdir(newpath))) != 0:
                self.detach(sr_uuid)
                raise xs_errors.XenError('SRExists')
        else:
            try:
                util.ioretry(lambda: util.makedirs(newpath))
            except util.CommandException, inst:
                if inst.code != errno.EEXIST:
                    self.detach(sr_uuid)
                    raise xs_errors.XenError('NFSCreate', \
                          opterr='remote directory creation error is %d' \
                          % inst.code)
Пример #53
0
    def mount(self, mountpoint=None):
        """Mount the remote CIFS export at 'mountpoint'"""
        if mountpoint == None:
            mountpoint = self.mountpoint
        elif not util.is_string(mountpoint) or mountpoint == "":
            raise CifsException("mountpoint not a string object")

        missing_params = set()

        if not self.dconf.has_key('username'):
            missing_params.add('username')

        if not (self.dconf.has_key('password') or
                self.dconf.has_key('password_secret')):
            missing_params.add('password')

        if missing_params:
            errstr = 'device-config is missing the following parameters: ' + \
                     ', '.join([param for param in missing_params])
            raise xs_errors.XenError('ConfigParamsMissing', opterr=errstr)

        try:
            if not util.ioretry(lambda: util.isdir(mountpoint)):
                util.ioretry(lambda: util.makedirs(mountpoint))
        except util.CommandException, inst:
            raise CifsException("Failed to make directory: code is %d" %
                                inst.code)
Пример #54
0
def extract_backup(backup_path, output_path, password=""):
    if not os.path.exists(backup_path + "/Manifest.plist"):
        print "Manifest.plist not found"
        return
    manifest = readPlist(backup_path + "/Manifest.plist")
    
    info = readPlist( backup_path + "/Info.plist")
    for i in showinfo:
        print i + " : " + unicode(info.get(i, "missing"))
    
#jsc
#    print "Extract backup to %s ? (y/n)" % output_path
#    if raw_input() == "n":
#        return
    
    print "Backup is %sencrypted" % (int(not manifest["IsEncrypted"]) * "not ")
    
#jsc
#    if manifest["IsEncrypted"] and password == "":
#        print "Enter backup password : "******"BackupKeyBag"):
        print "No BackupKeyBag in manifest, assuming iOS 3.x backup"
        decrypt_backup3(backup_path, output_path, password)
    else:    
        mbdb = MBDB(backup_path)
    
        kb = Keybag.createWithBackupManifest(manifest, password)
        if not kb:
            return
        
        #jsc
        password = kb.bfPassword

        manifest["password"] = password
        makedirs(output_path)
        plistlib.writePlist(manifest, output_path + "/Manifest.plist")
       
        mbdb.keybag = kb
        mbdb.extract_backup(output_path)
        
        #jsc
        print "Bruteforce successful, backup password : %s" % password
        
        print "You can decrypt the keychain using the following command : "
        print "python keychain_tool.py -d \"%s\" \"%s\"" % (output_path + "/KeychainDomain/keychain-backup.plist", output_path + "/Manifest.plist")
Пример #55
0
def soft_mount(mountpoint, remoteserver, remotepath, transport):
    """Mount the remote NFS export at 'mountpoint'"""
    try:
        if not util.ioretry(lambda: util.isdir(mountpoint)):
            util.ioretry(lambda: util.makedirs(mountpoint))
    except util.CommandException, inst:
        raise NfsException("Failed to make directory: code is %d" % 
                            inst.code)
Пример #56
0
    def __call__(self, sourceRoot, outputRoot=None, quiet=True, show_all=False):
        """
        Calling this instance will generate the source files from the
        modules/viewers/module.viewers file.

        @param sourceRoot is the root directory of the source tree
          that was parsed. Some of the output files are always
          generated relative to the sourceRoot.
        @param outputRoot root of the tree for generated files,
          defaults to sourceRoot
        @param quiet if False, print a message if no files were changed.
        @param show_all controls whether to show only modified files or all
          files which are inspected. Default is False.

        @return The convention of the "system-functions" are that the
          return value should be
          - 0 to indicate success
          - 1 to indicate an error
          - 2 to indicate that output files have changed.
        """
        self.startTiming()
        import viewers

        if outputRoot == None:
            outputRoot = sourceRoot

        modulepath = os.path.join(sourceRoot, "modules", "viewers")
        viewersfile = os.path.join(modulepath, "module.viewers")
        destpath = os.path.join(outputRoot, "modules", "viewers", "src")
        enumfile = os.path.join(destpath, "generated_viewers_enum.h")
        datafile = os.path.join(destpath, "generated_viewers_data.inc")
        result = 0
        util.fileTracker.addInput(viewersfile)
        if not os.path.exists(enumfile) or not os.path.exists(datafile) or \
            max(os.path.getmtime(viewersfile), os.path.getmtime(viewers.__file__)) > min(os.path.getmtime(enumfile), os.path.getmtime(datafile)):
            util.makedirs(destpath)
            viewers.BuildViewers(viewersfile, enumfile, datafile)
            result = 2

            # List the generated files in module.generated
            if outputRoot == sourceRoot:
                util.updateModuleGenerated(modulepath,
                                           [enumfile[len(modulepath) + 1:],
                                            datafile[len(modulepath) + 1:]])

        return self.endTiming(result, quiet=quiet)
Пример #57
0
    def symlink(self, src, dst):
        self.auditor(dst)
        linkname = os.path.join(self.base, dst)
        try:
            os.unlink(linkname)
        except OSError:
            pass

        dirname = os.path.dirname(linkname)
        if not os.path.exists(dirname):
            util.makedirs(dirname, self.createmode)

        if self._cansymlink:
            try:
                os.symlink(src, linkname)
            except OSError, err:
                raise OSError(err.errno, _('could not symlink to %r: %s') %
                              (src, err.strerror), linkname)