def init():
    if not os.path.isfile(TRIBLER_DB_PATH_BACKUP):
        print >> sys.stderr, "Please download bak_tribler.sdb from http://www.st.ewi.tudelft.nl/~jyang/donotremove/bak_tribler.sdb and save it as", os.path.abspath(
            TRIBLER_DB_PATH_BACKUP)
        sys.exit(1)
    if os.path.isfile(TRIBLER_DB_PATH_BACKUP):
        copyFile(TRIBLER_DB_PATH_BACKUP, TRIBLER_DB_PATH)
def init():
    if not os.path.isfile(TRIBLER_DB_PATH_BACKUP):
        print >> sys.stderr, "Please download bak_tribler.sdb from http://www.st.ewi.tudelft.nl/~jyang/donotremove/bak_tribler.sdb and save it as", os.path.abspath(TRIBLER_DB_PATH_BACKUP)
        sys.exit(1)
    if os.path.isfile(TRIBLER_DB_PATH_BACKUP):
        copyFile(TRIBLER_DB_PATH_BACKUP, TRIBLER_DB_PATH)
        print "refresh sqlite db", TRIBLER_DB_PATH
Beispiel #3
0
def __copyToDir(srcPath, destDirPath):
    """ Copies 1 item (recursively) """
    srcPath = absPath(srcPath)
    destDirPath = absPath(destDirPath)
    srcTail = fileBaseName(normpath(srcPath))
    destPath = joinPath(destDirPath, srcTail)
    if srcPath == destPath: return
    __removeFromDir(srcTail, destDirPath)
    if isFile(srcPath): copyFile(srcPath, destPath)
    elif isDir(srcPath): copyDir(srcPath, destPath)
    print('Copied "%s" to "%s"' % (srcPath, destPath))
    return destPath
Beispiel #4
0
def init_bak_tribler_sdb():
    if not os.path.isfile(TRIBLER_DB_PATH_BACKUP):
        got = extract_db_files(FILES_DIR, 'bak_tribler.tar.gz')
        if not got:
            print >> sys.stderr, "Missing bak_tribler.tar.gz"
            sys.exit(1)

    if os.path.isfile(TRIBLER_DB_PATH_BACKUP):
        copyFile(TRIBLER_DB_PATH_BACKUP, TRIBLER_DB_PATH)
        #print "refresh sqlite db", TRIBLER_DB_PATH

    if os.path.exists(STATE_FILE_NAME_PATH):
        os.remove(STATE_FILE_NAME_PATH)
        print "remove journal file"
def init_bak_tribler_sdb():
    if not os.path.isfile(TRIBLER_DB_PATH_BACKUP):
        got = extract_db_files(FILES_DIR, 'bak_tribler.tar.gz')
        if not got:
            print >> sys.stderr, "Missing bak_tribler.tar.gz"
            sys.exit(1)

    if os.path.isfile(TRIBLER_DB_PATH_BACKUP):
        copyFile(TRIBLER_DB_PATH_BACKUP, TRIBLER_DB_PATH)
        #print "refresh sqlite db", TRIBLER_DB_PATH

    if os.path.exists(STATE_FILE_NAME_PATH):
        os.remove(STATE_FILE_NAME_PATH)
        print "remove journal file"
 def copyIcon(self):
     """>>> copyIcon() -> success
     Copy the custom icon of the Python program
     into the template, if one was specified.
     """
     if self.appIcon is None:
         return True
     self.logger.info('Copying icon file...')
     templateIconPath = os.path.join(self.templateDir, self.TEMPLATE_ICON_PATH)
     if not os.path.exists(templateIconPath):
         self.logger.error('Could not find the icon file of the template at {path}'
                           .format(path=templateIconPath))
         return False
     os.remove(templateIconPath)
     copyFile(self.appIcon, templateIconPath)
     return True
 def copyCustomManifestTemplate(self):
     """>>> copyCustomManifestTemplate() -> success
     Copy the custom manifest template of the Python
     program into the template, if one was specified.
     """
     if self.appManifestTemplate is None:
         return True
     templateManifestPath = os.path.join(self.templateDir, self.TEMPLATE_MANIFEST_PATH)
     self.logger.info('Copying manifest template...')
     if not os.path.exists(templateManifestPath):
         self.logger.error('Could not find the manifest file of the template at {path}'
                           .format(path=templateManifestPath))
         return False
     os.remove(templateManifestPath)
     copyFile(self.appManifestTemplate, templateManifestPath)
     return True
Beispiel #8
0
def init_bak_tribler_sdb(backup='bak_tribler.sdb', destination= 'tribler.sdb', destination_path = FILES_DIR, overwrite = False):
    backup_path = os.path.join(FILES_DIR, backup)
    destination_path = os.path.join(destination_path, destination)

    if not os.path.isfile(backup_path) or overwrite:
        got = extract_db_files(FILES_DIR, backup_path + ".tar.gz", overwrite)
        if not got:
            print >> sys.stderr, "Missing", backup_path + ".tar.gz"
            sys.exit(1)

    for f in os.listdir(FILES_DIR):
        if f.startswith(destination):
            os.remove(os.path.join(FILES_DIR, f))

    if os.path.isfile(backup_path):
        copyFile(backup_path, destination_path)

    return destination_path
Beispiel #9
0
def init_bak_tribler_sdb(backup='bak_tribler.sdb', destination='tribler.sdb', destination_path=TESTS_DATA_DIR, overwrite=False):
    backup_path = os.path.join(TESTS_DATA_DIR, backup)
    destination_path = os.path.join(destination_path, destination)

    if not os.path.isfile(backup_path) or overwrite:
        got = extract_db_files(TESTS_DATA_DIR, backup_path + ".tar.gz", overwrite)
        if not got:
            print >> sys.stderr, "Missing", backup_path + ".tar.gz"
            sys.exit(1)

    for f in os.listdir(TESTS_DATA_DIR):
        if f.startswith(destination):
            os.remove(os.path.join(TESTS_DATA_DIR, f))

    if os.path.isfile(backup_path):
        copyFile(backup_path, destination_path)

    return destination_path
Beispiel #10
0
    def __init__(self,
                 aFileName,
                 aDelimiters = None,
                 aWorkOnCharacters = False):

        self.mDelimiters = aDelimiters
        if self.mDelimiters is not None:
            self.mDelimiters = self.mDelimiters.split(",")
            if len(self.mDelimiters) < 2:
                self.mDelimiters = ["DDBEGIN", "DDEND"]

        self.mFileName = aFileName
        self.mTmpFileName = self.mFileName + "-"
        copyFile(self.mFileName, self.mTmpFileName)

        self.mWorkOnCharacters = aWorkOnCharacters

        self.mRemovedBlocks = deque()
        self.mRemovedBlocks.append(None) # add a "None" element to mark the end
        self.clearChunkBoundaries()
Beispiel #11
0
    def __init__(self,
                 aFileName,
                 aDelimiters = None,
                 aWorkOnCharacters = False):

        self.mDelimiters = aDelimiters
        if self.mDelimiters is not None:
            self.mDelimiters = self.mDelimiters.split(",")
            if len(self.mDelimiters) < 2:
                self.mDelimiters = ["DDBEGIN", "DDEND"]

        self.mFileName = aFileName
        self.mTmpFileName = self.mFileName + "-"
        copyFile(self.mFileName, self.mTmpFileName)

        self.mWorkOnCharacters = aWorkOnCharacters

        self.mRemovedBlocks = deque()
        self.mRemovedBlocks.append(None) # add a "None" element to mark the end
        self.clearChunkBoundaries()
Beispiel #12
0
def CreateNodeFiles(srcFile, numMachines, numGPUs, uiMachine, uiGPU):
	
	# numCards is a list of # of GPUs of each machine
	
	confirm = ui.messageBox('Create Node Files', 'Are you sure? This will overwrite previous node files.', buttons = ['Yes', 'No'])
	
	if confirm == 0:
	
		projectFolder = project.folder
		projectPath = projectFolder + '/' + srcFile
		
		name = ''
		
		from shutil import copyfile as copyFile
						
		for i in range(0, numMachines):

			UIGPU = [-1, uiGPU][i == 0 and uiMachine == 1]

			for n in range(0, numGPUs[i]):
			
				if n != UIGPU:
					name = 'M' + str(i) + '_GPU' + str(n) + '.toe'	
					copyFile(projectPath, name)
Beispiel #13
0
    def install_syslinux(self, device, directory):
    
        boot_folder = directory+"boot"
        dev = device[:8]
        try:
            log.info("cp /usr/lib/syslinux/menu.c32 {0}/syslinux".format(boot_folder))
            copyFile("/usr/lib/syslinux/menu.c32", boot_folder+"/syslinux")
            log.info("cp /usr/lib/syslinux/chain.c32 {0}/syslinux".format(boot_folder))
            copyFile("/usr/lib/syslinux/chain.c32", boot_folder+"/syslinux")
            log.info("cp /usr/lib/syslinux/reboot.c32 {0}/syslinux".format(boot_folder))
            copyFile("/usr/lib/syslinux/reboot.c32", boot_folder+"/syslinux")
            log.info("cp /usr/lib/syslinux/poweroff.com {0}/syslinux".format(boot_folder))
            copyFile("/usr/lib/syslinux/poweroff.com", boot_folder+"/syslinux")

        except:
            log.error("Error copying files")
            return False
        try:
            aux = get_filesystem_type(device)
            if "fat" in aux:
               run_simple_command_echo("syslinux -d {0}/syslinux {1}".format(boot_folder,device),True)
            elif "ext" in aux:
                log.info("extlinux --install {0}/syslinux".format(boot_folder))                      
                run_simple_command_echo("extlinux --install {0}/syslinux".format(boot_folder),True)

            else:
                log.error("Filesystem not accepted.")
                return False

        except:
            log.error("installing syslinux in {0} failed".format(device))
            return False        
        try:
            run_simple_command_echo("dd if=/usr/lib/syslinux/mbr.bin of={0} \
                bs=440 conv=notrunc count=1".format(dev),True)
        except:
            log.error("dd failed")
            return False
            
        if not get_boot_flag(device):
            unset_all_boot_flags(device)
            if set_boot_flag(device):
                log.info("{0} has been set as bootable device".format(device))
        return True
Beispiel #14
0
    def preProcess(self, _edObject=None):
        EDPluginExecProcessScript.preProcess(self)
        self.DEBUG("EDPluginBestv1_2.preProcess")

        self.setScriptLogFileName("best.log")

        if self.dataInput.bestFileContentPar is not None:
            self.setFileBestDat(
                os.path.join(self.getWorkingDirectory(), "bestfile.dat"))
            self.setFileBestPar(
                os.path.join(self.getWorkingDirectory(), "bestfile.par"))

            EDUtilsFile.writeFile(self.getFileBestDat(),
                                  self.dataInput.bestFileContentDat.value)
            EDUtilsFile.writeFile(self.getFileBestPar(),
                                  self.dataInput.bestFileContentPar.value)

            listBestFileContentHKL = self.dataInput.bestFileContentHKL

            iterator = 0
            for bestFileContentHKL in listBestFileContentHKL:
                iterator = iterator + 1
                bestFileHKL = os.path.join(self.getWorkingDirectory(),
                                           "bestfile" + str(iterator) + ".hkl")
                self.listFileBestHKL.append(bestFileHKL)
                EDUtilsFile.writeFile(bestFileHKL, bestFileContentHKL.value)

        elif self.dataInput.xdsCorrectLp is not None:
            self.strPathToCorrectLp = os.path.join(self.getWorkingDirectory(),
                                                   "CORRECT.LP")
            shutil.copyFile(self.dataInput.xdsCorrectLp.path.value,
                            self.strPathToCorrectLp)

            self.strPathToBkgpixCbf = os.path.join(self.getWorkingDirectory(),
                                                   "BKGPIX.cbf")
            shutil.copyFile(self.dataInput.xdsBkgpixCbf.path.value,
                            self.strPathToBkgpixCbf)

            self.strListFileXdsAsciiHkl = ""
            index = 1
            for xdsAsciiHkl in self.dataInput.xdsAsciiHkl:
                strPathToXdsAsciiHkl = os.path.join(
                    self.getWorkingDirectory(),
                    "XDS_ASCII_{0}.HKL".format(index))
                shutil.copyFile(xdsAsciiHkl.path.value, strPathToXdsAsciiHkl)
                self.strListFileXdsAsciiHkl += " " + strPathToXdsAsciiHkl
                index += 1

        if self.dataInput.complexity is not None:
            self.strComplexity = self.dataInput.complexity.value

        self.initializeCommands()
    def preProcess(self, _edObject=None):
        EDPluginExecProcessScript.preProcess(self)
        self.DEBUG("EDPluginBestv1_2.preProcess")

        self.setScriptLogFileName("best.log")

        if self.dataInput.bestFileContentPar is not None:
            self.setFileBestDat(os.path.join(self.getWorkingDirectory(), "bestfile.dat"))
            self.setFileBestPar(os.path.join(self.getWorkingDirectory(), "bestfile.par"))

            EDUtilsFile.writeFile(self.getFileBestDat(), self.dataInput.bestFileContentDat.value)
            EDUtilsFile.writeFile(self.getFileBestPar(), self.dataInput.bestFileContentPar.value)

            listBestFileContentHKL = self.dataInput.bestFileContentHKL

            iterator = 0
            for bestFileContentHKL in listBestFileContentHKL:
                iterator = iterator + 1
                bestFileHKL = os.path.join(self.getWorkingDirectory(), "bestfile" + str(iterator) + ".hkl")
                self.listFileBestHKL.append(bestFileHKL)
                EDUtilsFile.writeFile(bestFileHKL, bestFileContentHKL.value)

        elif self.dataInput.xdsCorrectLp is not None:
            self.strPathToCorrectLp = os.path.join(self.getWorkingDirectory(), "CORRECT.LP")
            shutil.copyFile(self.dataInput.xdsCorrectLp.path.value, self.strPathToCorrectLp)

            self.strPathToBkgpixCbf = os.path.join(self.getWorkingDirectory(), "BKGPIX.cbf")
            shutil.copyFile(self.dataInput.xdsBkgpixCbf.path.value, self.strPathToBkgpixCbf)

            self.strListFileXdsAsciiHkl = ""
            index = 1
            for xdsAsciiHkl in self.dataInput.xdsAsciiHkl:
                strPathToXdsAsciiHkl = os.path.join(self.getWorkingDirectory(), "XDS_ASCII_{0}.HKL".format(index))
                shutil.copyFile(xdsAsciiHkl.path.value, strPathToXdsAsciiHkl)
                self.strListFileXdsAsciiHkl += " " + strPathToXdsAsciiHkl
                index += 1

        if self.dataInput.complexity is not None:
            self.strComplexity = self.dataInput.complexity.value

        self.initializeCommands()
mkdir(__JOB_PATH__)
mkdir(__DEs_PATH__)


all_files = []
while True:
	all_files = all_files + fileopenbox("Séléctionnez les fiches PA à traiter", "Abtronics", filetypes= "*.xls", multiple=True)
	cont = boolbox("Séléctionner d'autres?", ['Oui', 'Non, c\'est tout'])
	if cont:
		pass
	else:
		break

for file in all_files:
	filename = ntpath.basename(file)
	copyFile(file, __JOB_PATH__ + filename)


__RG = {
	'NRO/PMZ/PA': r'NRO\s(?P<NRO>[A-Z]+)\s-\sPMZ\s(?P<PMZ>[0-9]+)\s-\sPA\s(?P<PA>[0-9]+)'
}

"""
Fichiers nécéssaires:
 * Un model descro PA
 * Une fiche PA remplie
Etapes:
1. Ouvrir la fiche PA
2. Aller à la feuille "synoptique-bilan µmodules"
3. Ouvrir tout les déssins Visio qu'il y a dedans un par un
4. Détecter le bloc PA (le point d'aboutement)
Beispiel #17
0
def addNewModel(model_name=None, model_dir=None):
    """
    addNewModel(model_name = None, model_dir = None, db_file_name = ".rhml_db.sqlite3")

    0. The path is checked to be already present in the database. If true, the according status returned, workflow stops.
    1. Model wrapper script template copied to the model's directory.
    2. The record containing model name, model path is added to `models_table`.
    3. The record of version 0 is added to `versions_table`.
    4. The model folder is scanned recursiveliy, adding all the files found to the `files_table` (absolute paths). 
    5. The `.rhml_storage` folder created within specified model directory.
    6. The initial, 0-version archive is created.

    """
    if model_dir == "~":
        model_path = expandUser(model_dir)
    else:
        model_path = model_dir

    if ((not model_name) or (not model_path)):
        return "Can't add the model: model name or model path is missing."

    timestamp = str(datetime.now())
    #####################################################33##########3#####
    templateSource = configuration.model_wrapper_class_file_name
    templateDestination = "{}/{}".format(
        model_path, configuration.model_wrapper_class_file_name)

    if not exists(templateDestination):
        copyFile(templateSource, templateDestination)
#######################################################################33
#=================starting DB work =====================================

    with rhythmicDB(configuration.db_name, configuration.db_file_name) as db:

        probe = db.execute(
            "SELECT model_name FROM models_table WHERE model_path = '{}'".
            format(model_path))

        if len(probe) > 0:
            return "The model [ {} ] stored in [ {} ] is already in the base.".format(
                probe[0][0], model_path)

        new_model_id = db.execute("""
            INSERT INTO models_table
            (
                model_name, 
                model_path, 
                last_version_timestamp
            ) 
            VALUES 
            (
                '{}', '{}', '{}'
            );
            """.format(model_name, model_path, timestamp))

        new_model_version_id = db.execute("""
            INSERT INTO versions_table
            (
                model_id,
                created_timestamp
            )
            VALUES
            (
                '{}', '{}'
            );
            """.format(new_model_id, timestamp))

        files_record_request = \
        """
        INSERT INTO files_table
        (
            model_version_id,
            absolute_path,
            last_modified_time
        )
        VALUES
        """

        new_model_files = scanModelFolder(model_path)

        if len(new_model_files) > 0:
            files_record_values = ""

            for item_path in new_model_files:
                item = new_model_files[item_path]
                files_record_values += "('{}', '{}', '{}'), \n".format(
                    new_model_version_id, item_path,
                    item["last_modified_time"])

            files_record_request += files_record_values[:len(
                files_record_values) - 3] + ";"
            #truncating `, \n` from the end of request and adding `;`.

            db.execute(files_record_request)

    #================= finished DB work =====================================

    model_storage_path = model_path + "/{}".format(
        configuration.storage_folder_name)

    if ((not exists(model_storage_path)) or (not isDir(model_storage_path))):
        makeDir(model_storage_path)

    #================= Starting  building ver0 .zip in storage =====================================

    archive_name = model_storage_path + "/model_{}_ver0.zip".format(
        new_model_id)

    packFiles(model_path, archive_name, new_model_files)

    #================= Finished building ver0 .zip in storage =====================================

    return "Success"
from shutil import copy as copyFile
import abtronics
from easygui import fileopenbox
from subprocess import call
from ntpath import basename

title = 'Générateur word - déscription des travaux'

__models_folder__ = abtronics.__models_folder__
__model_name__ = 'descro_travaux_final.doc'
__model_path__ = __models_folder__ + sep + __model_name__
__jobs_folder__ = abtronics.__jobs_folder__
__JOB_ID__ = abtronics.__JOB_ID__
__JOB_PATH__ = __jobs_folder__ + sep + __JOB_ID__ + sep
mkdir(__JOB_PATH__)
copyFile(__model_path__, __JOB_PATH__ + sep + __model_name__)
descro_travaux_doc_path = __JOB_PATH__ + sep + __model_name__

mois = {1: u'janvier', 2: u'février', 3: u'mars', 4: u'avril', 5: u'mai', 6: u'juin', 7: u'juillet', 8: u'août', 9: u'septembre', 10: u'octobre', 11: u'novembre', 12: u'décembre'}
now = datetime.now()
TODAY = str(now.day) + ' ' + mois[now.month]  + ' ' + str(now.year)

def dump(obj):
	for attr in dir(obj):
		print("obj.%s = %s" % (attr, getattr(obj, attr)))


pp = pprint.PrettyPrinter(indent=4)
pp = pp.pprint
FI = fileopenbox("Séléctionnez la fiche FI à traiter", "Abtronics", filetypes= "*.xlsm")
copyFile(FI, __JOB_PATH__ + sep + basename(FI))
    def addTorrent(self):
        old_size = self.tdb.size()
        old_tracker_size = self.tdb._db.size('TrackerInfo')

        s_infohash = unhexlify('44865489ac16e2f34ea0cd3043cfd970cc24ec09')
        m_infohash = unhexlify('ed81da94d21ad1b305133f2726cdaec5a57fed98')

        sid = self.tdb.getTorrentID(s_infohash)
        mid = self.tdb.getTorrentID(m_infohash)

        single_torrent_file_path = os.path.join(self.getStateDir(), 'single.torrent')
        multiple_torrent_file_path = os.path.join(self.getStateDir(), 'multiple.torrent')

        copyFile(S_TORRENT_PATH_BACKUP, single_torrent_file_path)
        copyFile(M_TORRENT_PATH_BACKUP, multiple_torrent_file_path)

        single_tdef = TorrentDef.load(single_torrent_file_path)
        assert s_infohash == single_tdef.get_infohash()
        multiple_tdef = TorrentDef.load(multiple_torrent_file_path)
        assert m_infohash == multiple_tdef.get_infohash()

        self.tdb.addExternalTorrent(single_tdef)
        self.tdb.addExternalTorrent(multiple_tdef)

        single_torrent_id = self.tdb.getTorrentID(s_infohash)
        multiple_torrent_id = self.tdb.getTorrentID(m_infohash)

        assert self.tdb.getInfohash(single_torrent_id) == s_infohash

        single_name = 'Tribler_4.1.7_src.zip'
        multiple_name = 'Tribler_4.1.7_src'

        assert self.tdb.size() == old_size + 2, old_size - self.tdb.size()
        new_tracker_table_size = self.tdb._db.size('TrackerInfo')
        assert old_tracker_size < new_tracker_table_size, new_tracker_table_size - old_tracker_size

        sname = self.tdb.getOne('name', torrent_id=single_torrent_id)
        assert sname == single_name, (sname, single_name)
        mname = self.tdb.getOne('name', torrent_id=multiple_torrent_id)
        assert mname == multiple_name, (mname, multiple_name)

        s_size = self.tdb.getOne('length', torrent_id=single_torrent_id)
        assert s_size == 1583233, s_size
        m_size = self.tdb.getOne('length', torrent_id=multiple_torrent_id)
        assert m_size == 5358560, m_size

        cat = self.tdb.getOne('category', torrent_id=multiple_torrent_id)
        assert cat == u'xxx', cat

        s_status = self.tdb.getOne('status', torrent_id=single_torrent_id)
        assert s_status == u'unknown', s_status

        m_comment = self.tdb.getOne('comment', torrent_id=multiple_torrent_id)
        comments = 'www.tribler.org'
        assert m_comment.find(comments) > -1
        comments = 'something not inside'
        assert m_comment.find(comments) == -1

        m_trackers = self.tdb.getTrackerListByInfohash(m_infohash)
        assert len(m_trackers) == 8
        assert 'http://tpb.tracker.thepiratebay.org/announce' in m_trackers, m_trackers

        s_torrent = self.tdb.getTorrent(s_infohash)
        m_torrent = self.tdb.getTorrent(m_infohash)
        assert s_torrent['name'] == 'Tribler_4.1.7_src.zip', s_torrent['name']
        assert m_torrent['name'] == 'Tribler_4.1.7_src', m_torrent['name']
        assert m_torrent['last_tracker_check'] == 0
Beispiel #20
0
    mkdir(__FICHES_PATH__)
    chmod(__FICHES_PATH__, "0o777")
except:
    print("__FICHES_PATH__ éxiste déjà")


def dump(obj):
    for attr in dir(obj):
        print("obj.%s = %s" % (attr, getattr(obj, attr)))


pp = pprint.PrettyPrinter(indent=4)
pp = pp.pprint

CAP_FT = fileopenbox("Séléctionnez le fichier CAP-FT à traiter", "Abtronics", filetypes="*.xls", multiple=True)[0]
copyFile(CAP_FT, __JOB_PATH__ + ntpath.basename(CAP_FT))
xl = CreateObject("Excel.application")
xl.Visible = False
xl.DisplayAlerts = False
wb = xl.Workbooks.Open(__JOB_PATH__ + ntpath.basename(CAP_FT))
print(" " + str(wb.Sheets.Count) + " sheets found.")
for ws in wb.Worksheets:
    print("  >> Generating " + ws.name + ".xlsx ...")
    print("Saving " + __FICHES_PATH__ + ws.name + ".xlsx")
    ws.Copy()
    xl.ActiveWorkbook.SaveAs(__FICHES_PATH__ + ws.name + ".xlsx")
    xl.ActiveWorkbook.Close()

xl.Quit()

# À la toute fin quand on est vraiment content on affiche le dossier de job
from Tribler.Core.BuddyCast.TorrentCollecting import SimpleTorrentCollecting

CREATE_SQL_FILE = os.path.join(BASE_DIR, 'tribler_sdb_v1.sql')
assert os.path.isfile(CREATE_SQL_FILE)
DB_FILE_NAME = 'tribler.sdb'
DB_DIR_NAME = None
FILES_DIR = os.path.join(BASE_DIR, 'Test/extend_db_dir/')
TRIBLER_DB_PATH = os.path.join(FILES_DIR, 'tribler.sdb')
TRIBLER_DB_PATH_BACKUP = os.path.join(FILES_DIR, 'bak_tribler.sdb')
if not os.path.isfile(TRIBLER_DB_PATH_BACKUP):
    print >> sys.stderr, "Please download bak_tribler.sdb from http://www.st.ewi.tudelft.nl/~jyang/donotremove/bak_tribler.sdb and save it as", os.path.abspath(
        TRIBLER_DB_PATH_BACKUP)
    sys.exit(1)
if os.path.isfile(TRIBLER_DB_PATH_BACKUP):
    from shutil import copy as copyFile
    copyFile(TRIBLER_DB_PATH_BACKUP, TRIBLER_DB_PATH)
    #print "refresh sqlite db", TRIBLER_DB_PATH

SQLiteCacheDB.DEBUG = False


class TestTorrentCollecting(unittest.TestCase):
    def setUp(self):
        self.db = SQLiteCacheDB.getInstance()
        self.db.initDB(TRIBLER_DB_PATH_BACKUP, lib=0)
        permid = {}
        permid[
            3127] = 'MFIwEAYHKoZIzj0CAQYFK4EEABoDPgAEAcPezgQ13k1MSOaUrCPisWRhYuNT7Tm+q5rUgHFvAWd9b+BcSut6TCniEgHYHDnQ6TH/vxQBqtY8Loag'
        permid[
            994] = 'MFIwEAYHKoZIzj0CAQYFK4EEABoDPgAEAJUNmwvDaigRaM4cj7cE2O7lessqnnFEQsan7df9AZS8xeNmVsP/XXVrEt4t7e2TNicYmjn34st/sx2P'
        permid[
Beispiel #22
0
__VSD_PATH__ = __JOB_PATH__ + 'Dessin' + sep
mkdir(__JOB_PATH__)
mkdir(__VSD_PATH__)

all_files = []
while True:
	all_files = all_files + fileopenbox("Séléctionnez les fiches FI à traiter", "Abtronics", filetypes= "*.xlsm", multiple=True)
	cont = boolbox("Séléctionner d'autres?", ['Oui', 'Non, c\'est tout'])
	if cont:
		pass
	else:
		break

for file in all_files:
	filename = ntpath.basename(file)
	copyFile(file, __JOB_PATH__ + filename)

title = 'SCHEMA DE COLONNES MONTANTES'

def dump(obj):
	for attr in dir(obj):
		print("obj.%s = %s" % (attr, getattr(obj, attr)))

pp = pprint.PrettyPrinter(indent=4)
pp = pp.pprint

def add_pline(x_, y_, _x, _y):
	global page
	pline = page.DrawPolyline(
		array.array('d',
			[x_, y_] +

def file_get_contents(filename):
	with open(filename) as f:
		return f.read()

PA_PMR = fileopenbox("Séléctionnez la fiche PA ou PMR à traiter", "Abtronics", filetypes= "*.xls", multiple=True)[0]
if basename(PA_PMR)[:2] == 'PA':
	type_pf = 'PA'
elif basename(PA_PMR)[:2] == 'FI':
	type_pf = 'PMR'
else:
	print('type de point fonctionnel inconnu:', basename(PA_PMR)[:2])
	sys.exit()

copyFile(PA_PMR, __JOB_PATH__ + basename(PA_PMR))
msg = "Entrez les informations nécessaires"
title = "Comparateur ELR"
fieldNames = ["INSEE", "NRO", "PA"]
fieldValues = multenterbox(msg, title, fieldNames)

if fieldValues is None:
	exit(0)

while 1:
	errmsg = ""
	for i, name in enumerate(fieldNames):
		if fieldValues[i].strip() == "":
		  errmsg += "{} is a required field.\n\n".format(name)
	if errmsg == "":
		break # no problems found
    def addTorrent(self):
        old_size = self.tdb.size()
        old_src_size = self.tdb._db.size('TorrentSource')
        old_tracker_size = self.tdb._db.size('TorrentTracker')

        s_infohash = unhexlify('44865489ac16e2f34ea0cd3043cfd970cc24ec09')
        m_infohash = unhexlify('ed81da94d21ad1b305133f2726cdaec5a57fed98')

        sid = self.tdb._db.getTorrentID(s_infohash)
        mid = self.tdb._db.getTorrentID(m_infohash)

        single_torrent_file_path = os.path.join(self.getStateDir(), 'single.torrent')
        multiple_torrent_file_path = os.path.join(self.getStateDir(), 'multiple.torrent')

        copyFile(S_TORRENT_PATH_BACKUP, single_torrent_file_path)
        copyFile(M_TORRENT_PATH_BACKUP, multiple_torrent_file_path)

        single_tdef = TorrentDef.load(single_torrent_file_path)
        assert s_infohash == single_tdef.get_infohash()
        src = 'http://www.rss.com/torrent.xml'
        multiple_tdef = TorrentDef.load(multiple_torrent_file_path)
        assert m_infohash == multiple_tdef.get_infohash()

        self.tdb.addExternalTorrent(single_tdef, extra_info={'filename': single_torrent_file_path})
        self.tdb.addExternalTorrent(multiple_tdef, source=src, extra_info={'filename': multiple_torrent_file_path})

        single_torrent_id = self.tdb._db.getTorrentID(s_infohash)
        multiple_torrent_id = self.tdb._db.getTorrentID(m_infohash)

        assert self.tdb.getInfohash(single_torrent_id) == s_infohash

        single_name = 'Tribler_4.1.7_src.zip'
        multiple_name = 'Tribler_4.1.7_src'

        assert self.tdb.size() == old_size + 2, old_size - self.tdb.size()
        assert old_src_size + 1 == self.tdb._db.size('TorrentSource')
        assert old_tracker_size + 2 == self.tdb._db.size('TorrentTracker'), self.tdb._db.size('TorrentTracker') - old_tracker_size

        sname = self.tdb.getOne('name', torrent_id=single_torrent_id)
        assert sname == single_name, (sname, single_name)
        mname = self.tdb.getOne('name', torrent_id=multiple_torrent_id)
        assert mname == multiple_name, (mname, multiple_name)

        s_size = self.tdb.getOne('length', torrent_id=single_torrent_id)
        assert s_size == 1583233, s_size
        m_size = self.tdb.getOne('length', torrent_id=multiple_torrent_id)
        assert m_size == 5358560, m_size

        # TODO: action is flagged as XXX causing this torrent to be XXX instead of other
        cat = self.tdb.getOne('category_id', torrent_id=multiple_torrent_id)
        # assert cat == 8, cat  # other

        sid = self.tdb._db.getOne('TorrentSource', 'source_id', name=src)
        assert sid > 1
        m_sid = self.tdb.getOne('source_id', torrent_id=multiple_torrent_id)
        assert sid == m_sid
        s_sid = self.tdb.getOne('source_id', torrent_id=single_torrent_id)
        assert 1 == s_sid
        s_status = self.tdb.getOne('status_id', torrent_id=single_torrent_id)
        assert s_status == 0

        m_comment = self.tdb.getOne('comment', torrent_id=multiple_torrent_id)
        comments = 'www.tribler.org'
        assert m_comment.find(comments) > -1
        comments = 'something not inside'
        assert m_comment.find(comments) == -1

        m_trackers = self.tdb.getTracker(m_infohash, 0)  # db._db.getAll('TorrentTracker', 'tracker', 'torrent_id=%d'%multiple_torrent_id)
        assert len(m_trackers) == 1
        assert ('http://tpb.tracker.thepiratebay.org/announce', 1) in m_trackers, m_trackers

        s_torrent = self.tdb.getTorrent(s_infohash)
        m_torrent = self.tdb.getTorrent(m_infohash)
        assert s_torrent['name'] == 'Tribler_4.1.7_src.zip', s_torrent['name']
        assert m_torrent['name'] == 'Tribler_4.1.7_src', m_torrent['name']
        assert m_torrent['last_check_time'] == 0
	print('__ARs_PATH__ éxiste déjà')

# Demander à l'utilisateur de spécifier les fiches PA, mettre le tout dans all_files
all_files = []
while True:
	all_files = all_files + fileopenbox("Séléctionnez les fiches PA à traiter", "Abtronics", filetypes= "*.xls", multiple=True)
	cont = boolbox("Séléctionner d'autres?", ['Oui', 'Non, c\'est tout'])
	if cont:
		pass
	else:
		break

# Copier toutes les fiches PA dans le dossier du job
for file in all_files:
	filename = ntpath.basename(file)
	copyFile(file, __JOB_PATH__ + filename)

# un lonely regex!
__RG = {
	'NRO/PMZ/PA': r'NRO\s(?P<NRO>[A-Z]+)\s-\sPMZ\s(?P<PMZ>[0-9]+)\s-\sPA\s(?P<PA>[0-9]+)'
}

"""
Fichiers nécéssaires:
 * Un model descro PA
 * Une fiche PA remplie
Etapes:
1. Demander les paths vers toutes les fiches PA à traiter
2. Pour chaque fiche PA ouvrir la fiche
3. Aller à la feuille "synoptique-bilan µmodules"
4. Ouvrir tout les déssins Visio qu'il y a dedans un par un
    
from Core.CacheDB.sqlitecachedb import SQLiteCacheDB
    
CREATE_SQL_FILE = os.path.join(BASE_DIR, 'tribler_sdb_v1.sql')
assert os.path.isfile(CREATE_SQL_FILE)
DB_FILE_NAME = 'tribler.sdb'
DB_DIR_NAME = None
FILES_DIR = os.path.join(BASE_DIR, 'Test/extend_db_dir/')
TRIBLER_DB_PATH = os.path.join(FILES_DIR, 'tribler.sdb')
TRIBLER_DB_PATH_BACKUP = os.path.join(FILES_DIR, 'bak_tribler.sdb')
if not os.path.isfile(TRIBLER_DB_PATH_BACKUP):
    print >> sys.stderr, "Please download bak_tribler.sdb from http://www.st.ewi.tudelft.nl/~jyang/donotremove/bak_tribler.sdb and save it as", os.path.abspath(TRIBLER_DB_PATH_BACKUP)
    sys.exit(1)
if os.path.isfile(TRIBLER_DB_PATH_BACKUP):
    from shutil import copy as copyFile
    copyFile(TRIBLER_DB_PATH_BACKUP, TRIBLER_DB_PATH)
    #print "refresh sqlite db", TRIBLER_DB_PATH

SQLiteCacheDB.DEBUG = False

class SQLitePerformanceTest:
    def __init__(self):
        self.db = SQLiteCacheDB.getInstance()
        
    def openDB(self, *args, **argv):
        self.db.openDB(*args, **argv)
    
    def initDB(self, *args, **argv):
        self.db.initDB(*args, **argv)
        
    def close(self, clean=False):
 def _test_addTorrent(self):
     
     MyDBHandler.getInstance().put('torrent_dir', '.')
      
     copyFile(S_TORRENT_PATH_BACKUP, S_TORRENT_PATH)
     copyFile(M_TORRENT_PATH_BACKUP, M_TORRENT_PATH)
     
     db = TorrentDBHandler.getInstance()
     
     old_size = db.size()
     old_src_size = db._db.size('TorrentSource')
     old_tracker_size = db._db.size('TorrentTracker')
     
     s_infohash = unhexlify('44865489ac16e2f34ea0cd3043cfd970cc24ec09')
     m_infohash = unhexlify('ed81da94d21ad1b305133f2726cdaec5a57fed98')
     
     sid = db._db.getTorrentID(s_infohash)
     mid = db._db.getTorrentID(m_infohash)
             
     single_torrent_file_path = os.path.join(FILES_DIR, 'single.torrent')
     multiple_torrent_file_path = os.path.join(FILES_DIR, 'multiple.torrent')
     
     single_infohash, single_torrent = db._readTorrentData(single_torrent_file_path)
     assert s_infohash == single_infohash
     src = 'http://www.rss.com/torrent.xml'
     multiple_infohash, multiple_torrent = db._readTorrentData(multiple_torrent_file_path, src)
     assert m_infohash == multiple_infohash
     
     db._addTorrentToDB(single_infohash, single_torrent)
     db._addTorrentToDB(multiple_infohash, multiple_torrent)
     
     single_torrent_id = db._db.getTorrentID(s_infohash)
     multiple_torrent_id = db._db.getTorrentID(m_infohash)
     
     single_name = 'Tribler_4.1.7_src.zip'
     multiple_name = 'Tribler_4.1.7_src'
     
     assert db.size() == old_size + 2, old_size - db.size()
     assert old_src_size + 1 == db._db.size('TorrentSource')
     assert old_tracker_size + 2 == db._db.size('TorrentTracker'), db._db.size('TorrentTracker')-old_tracker_size
     
     sname = db.getOne('name', torrent_id=single_torrent_id)
     assert sname == single_name, (sname,single_name)
     mname = db.getOne('name', torrent_id=multiple_torrent_id)
     assert mname == multiple_name, (mname,multiple_name)
     
     s_size = db.getOne('length', torrent_id=single_torrent_id)
     assert s_size == 1583233, s_size
     m_size = db.getOne('length', torrent_id=multiple_torrent_id)
     assert m_size == 5358560, m_size
     
     cat = db.getOne('category_id', torrent_id=multiple_torrent_id)
     assert cat == 7, cat    # other
     sid = db._db.getOne('TorrentSource', 'source_id', name=src)
     assert sid > 1
     m_sid = db.getOne('source_id', torrent_id=multiple_torrent_id)
     assert sid == m_sid
     s_sid = db.getOne('source_id', torrent_id=single_torrent_id)
     assert 1 == s_sid
     s_status = db.getOne('status_id', torrent_id=single_torrent_id)
     assert s_status == 0
     
     m_comment = db.getOne('comment', torrent_id=multiple_torrent_id)
     comments = 'www.tribler.org'
     assert m_comment.find(comments)>-1
     comments = 'something not inside'
     assert m_comment.find(comments)==-1
             
     m_trackers = db.getTracker(m_infohash, 0)    #db._db.getAll('TorrentTracker', 'tracker', 'torrent_id=%d'%multiple_torrent_id)
     assert len(m_trackers) == 1
     assert ('http://tpb.tracker.thepiratebay.org/announce',1) in m_trackers, m_trackers
print('Job ID is', __JOB_ID__)

# Créer les dossiers qui n'éxistent pas encore
try:
	mkdir(__JOB_PATH__)
except:
	print('__JOB_PATH__ éxiste déjà')
try:
	mkdir(__DEMANDE_PATH__)
except:
	print('__DEMANDE_PATH__ éxiste déjà')

# Demander à l'utilisateur de spécifier les fiches PA, mettre le tout dans all_files
PA = fileopenbox("Séléctionnez la fiche PA à traiter", "Abtronics", filetypes= "*.xls", multiple=True)[0]
__DEMANDE_FILE__ = __DEMANDE_PATH__ + sep + 'Demande de création d\'IMB pour ' + ntpath.basename(PA)
copyFile(PA, __JOB_PATH__ + ntpath.basename(PA))
copyFile(__DEMANDE_model_path__, __DEMANDE_FILE__)
PA = __JOB_PATH__ + ntpath.basename(PA)

"""
Fichiers nécéssaires:
 * 
Etapes:
1. 
"""

# quelques pré-définitions utiles
mois = {
	1: u'janvier', 2: u'février', 3: u'mars', 4: u'avril',5: u'mai',
	6: u'juin', 7: u'juillet', 8: u'août', 9: u'septembre',
	10: u'octobre', 11: u'novembre', 12: u'décembre' }
Beispiel #29
0
    def addTorrent(self):
        old_size = self.tdb.size()
        old_tracker_size = self.tdb._db.size('TrackerInfo')

        s_infohash = unhexlify('44865489ac16e2f34ea0cd3043cfd970cc24ec09')
        m_infohash = unhexlify('ed81da94d21ad1b305133f2726cdaec5a57fed98')

        sid = self.tdb.getTorrentID(s_infohash)
        mid = self.tdb.getTorrentID(m_infohash)

        single_torrent_file_path = os.path.join(self.getStateDir(),
                                                'single.torrent')
        multiple_torrent_file_path = os.path.join(self.getStateDir(),
                                                  'multiple.torrent')

        copyFile(S_TORRENT_PATH_BACKUP, single_torrent_file_path)
        copyFile(M_TORRENT_PATH_BACKUP, multiple_torrent_file_path)

        single_tdef = TorrentDef.load(single_torrent_file_path)
        assert s_infohash == single_tdef.get_infohash()
        multiple_tdef = TorrentDef.load(multiple_torrent_file_path)
        assert m_infohash == multiple_tdef.get_infohash()

        self.tdb.addExternalTorrent(single_tdef)
        self.tdb.addExternalTorrent(multiple_tdef)

        single_torrent_id = self.tdb.getTorrentID(s_infohash)
        multiple_torrent_id = self.tdb.getTorrentID(m_infohash)

        assert self.tdb.getInfohash(single_torrent_id) == s_infohash

        single_name = 'Tribler_4.1.7_src.zip'
        multiple_name = 'Tribler_4.1.7_src'

        assert self.tdb.size() == old_size + 2, old_size - self.tdb.size()
        new_tracker_table_size = self.tdb._db.size('TrackerInfo')
        assert old_tracker_size < new_tracker_table_size, new_tracker_table_size - old_tracker_size

        sname = self.tdb.getOne('name', torrent_id=single_torrent_id)
        assert sname == single_name, (sname, single_name)
        mname = self.tdb.getOne('name', torrent_id=multiple_torrent_id)
        assert mname == multiple_name, (mname, multiple_name)

        s_size = self.tdb.getOne('length', torrent_id=single_torrent_id)
        assert s_size == 1583233, s_size
        m_size = self.tdb.getOne('length', torrent_id=multiple_torrent_id)
        assert m_size == 5358560, m_size

        cat = self.tdb.getOne('category', torrent_id=multiple_torrent_id)
        assert cat == u'xxx', cat

        s_status = self.tdb.getOne('status', torrent_id=single_torrent_id)
        assert s_status == u'unknown', s_status

        m_comment = self.tdb.getOne('comment', torrent_id=multiple_torrent_id)
        comments = 'www.tribler.org'
        assert m_comment.find(comments) > -1
        comments = 'something not inside'
        assert m_comment.find(comments) == -1

        m_trackers = self.tdb.getTrackerListByInfohash(m_infohash)
        assert len(m_trackers) == 8
        assert 'http://tpb.tracker.thepiratebay.org/announce' in m_trackers, m_trackers

        s_torrent = self.tdb.getTorrent(s_infohash)
        m_torrent = self.tdb.getTorrent(m_infohash)
        assert s_torrent['name'] == 'Tribler_4.1.7_src.zip', s_torrent['name']
        assert m_torrent['name'] == 'Tribler_4.1.7_src', m_torrent['name']
        assert m_torrent['last_tracker_check'] == 0
Beispiel #30
0
    def _test_addTorrent(self):

        MyDBHandler.getInstance().put('torrent_dir', '.')

        copyFile(S_TORRENT_PATH_BACKUP, S_TORRENT_PATH)
        copyFile(M_TORRENT_PATH_BACKUP, M_TORRENT_PATH)

        db = TorrentDBHandler.getInstance()

        old_size = db.size()
        old_src_size = db._db.size('TorrentSource')
        old_tracker_size = db._db.size('TorrentTracker')

        s_infohash = unhexlify('44865489ac16e2f34ea0cd3043cfd970cc24ec09')
        m_infohash = unhexlify('ed81da94d21ad1b305133f2726cdaec5a57fed98')

        sid = db._db.getTorrentID(s_infohash)
        mid = db._db.getTorrentID(m_infohash)

        single_torrent_file_path = os.path.join(FILES_DIR, 'single.torrent')
        multiple_torrent_file_path = os.path.join(FILES_DIR,
                                                  'multiple.torrent')

        single_infohash, single_torrent = db._readTorrentData(
            single_torrent_file_path)
        assert s_infohash == single_infohash
        src = 'http://www.rss.com/torrent.xml'
        multiple_infohash, multiple_torrent = db._readTorrentData(
            multiple_torrent_file_path, src)
        assert m_infohash == multiple_infohash

        db._addTorrentToDB(single_infohash, single_torrent)
        db._addTorrentToDB(multiple_infohash, multiple_torrent)

        single_torrent_id = db._db.getTorrentID(s_infohash)
        multiple_torrent_id = db._db.getTorrentID(m_infohash)

        single_name = 'Tribler_4.1.7_src.zip'
        multiple_name = 'Tribler_4.1.7_src'

        assert db.size() == old_size + 2, old_size - db.size()
        assert old_src_size + 1 == db._db.size('TorrentSource')
        assert old_tracker_size + 2 == db._db.size(
            'TorrentTracker'), db._db.size('TorrentTracker') - old_tracker_size

        sname = db.getOne('name', torrent_id=single_torrent_id)
        assert sname == single_name, (sname, single_name)
        mname = db.getOne('name', torrent_id=multiple_torrent_id)
        assert mname == multiple_name, (mname, multiple_name)

        s_size = db.getOne('length', torrent_id=single_torrent_id)
        assert s_size == 1583233, s_size
        m_size = db.getOne('length', torrent_id=multiple_torrent_id)
        assert m_size == 5358560, m_size

        cat = db.getOne('category_id', torrent_id=multiple_torrent_id)
        assert cat == 7, cat  # other
        sid = db._db.getOne('TorrentSource', 'source_id', name=src)
        assert sid > 1
        m_sid = db.getOne('source_id', torrent_id=multiple_torrent_id)
        assert sid == m_sid
        s_sid = db.getOne('source_id', torrent_id=single_torrent_id)
        assert 1 == s_sid
        s_status = db.getOne('status_id', torrent_id=single_torrent_id)
        assert s_status == 0

        m_comment = db.getOne('comment', torrent_id=multiple_torrent_id)
        comments = 'www.tribler.org'
        assert m_comment.find(comments) > -1
        comments = 'something not inside'
        assert m_comment.find(comments) == -1

        m_trackers = db.getTracker(
            m_infohash, 0
        )  #db._db.getAll('TorrentTracker', 'tracker', 'torrent_id=%d'%multiple_torrent_id)
        assert len(m_trackers) == 1
        assert ('http://tpb.tracker.thepiratebay.org/announce',
                1) in m_trackers, m_trackers