Example #1
0
    def update_image(self,filepath):
        """ Adds or updates the given image in the index

            Set commit to False and call index.commit() yourself
            when doing batch operations
        """
        relpath  = os.path.relpath(filepath,self.root)
        folder   = os.path.dirname(relpath)

        meta = Metadata(filepath)

        if not self.writer:
            self.writer = self.index.writer()
            commit      = True
        else:
            commit      = False

        # add to index
        self.writer.update_document(
            path    = unicode(relpath),
            folder  = unicode(folder),
            time    = os.path.getmtime(filepath),
            title   = meta.get_title(),
            content = meta.get_content(),
            tags    = meta.get_tags()
            #FIXME add more EXIF data here
        )

        if(commit):
            self.writer.commit()
            del self.writer
Example #2
0
 def table_row(bill_dic):
     metadata = Metadata()
     session_name = metadata.session_name(bill_dic['session'])
     return {
         'Title':
         bill_dic['bill_id'] + ', ' + session_name,
         'os_bill_id':
         bill_dic['id'],
         'bill':
         bill_dic['bill_id'],
         'bill_title':
         bill_title(bill_dic),
         'chamber':
         state_utils.chamber_name(bill_dic['chamber']),
         'website':
         bill_dic['sources'][-1]['url'],
         'session':
         session_name,
         'status':
         bill_status(bill_dic),
         'latest_status':
         latest_status(bill_dic),
         'bill_date':
         utils.datetime_to_date(bill_dic['action_dates']['first']),
         'date_passed_senate':
         utils.datetime_to_date(bill_dic['action_dates']['passed_upper']),
         'date_passed_assembly':
         utils.datetime_to_date(bill_dic['action_dates']['passed_lower']),
         'date_signed':
         utils.datetime_to_date(bill_dic['action_dates']['signed']),
         'summary':
         bill_summary(bill_dic),
         'keywords':
         suggested_topics(bill_dic)
     }
Example #3
0
 def parseMetadata(self, file_name, metadata):
     metadata = metadata.split('-')
     date = metadata[0]
     time = metadata[1]
     temperature = metadata[2]
     pressure = metadata[3]
     return Metadata(file_name, date, time, temperature, pressure)
Example #4
0
	def query_by_event(self,eid):
		if eid < 4:
			df = pd.read_table(self.data_path1,sep="\s+",low_memory=False)[1:]
			eid_trans = eid
		else:
			df = pd.read_table(self.data_path2,sep="\s+",low_memory=False)[1:]
			eid_trans = eid - 4
		df = df[1:]
		for col in df.columns:
			try:
				df[col] = pd.to_numeric(df[col])
			except:
				continue
		tor_point = df[df['User13'].diff()>=1]
		tor_point = list(tor_point['FRAME_NUM'])
		end_point = df[df['User13'].diff()<=-1]
		end_point = list(end_point['FRAME_NUM'])
		# get the takeover period lists
		takeover_list = []
		for i in range(len(tor_point)):
		    takeover_list.append([tor_point[i],end_point[i]])
		situation_id = self.metadata[self.metadata['subject']==self.sub_id].values[0][1:][eid]
		meta_org = self.meta_dict[situation_id[0]]
		meta = Metadata(self.sub_id,meta_org,eid)
		takeover = SingleTakeover(df,takeover_list[eid_trans],meta)
		return takeover
Example #5
0
def postMetadata(takid=-1):
    if takid <= 0:
        return json_response(code=400)
    tak = Tak.get_by_id(takid)
    if tak is None:
        logging.info("tak is None")
        return json_response(code=400)
    key = getValue(request, "key", "")
    value = getValue(request, "value", "")
    if key != '' and value != '':
        for mdata in tak.metadata:
            if mdata.key == key:
                mdata.value = value
                tak.put()
                return json_response(code=200)
        metadata = Metadata(key=key, value=value)
        tak.metadata.append(metadata)
        tak.put()
        return json_response(code=200)
    else:
        if request.method == 'POST':
            try:
                logging.info("json")
                data = json.loads(request.data, object_hook=_decode_dict)
                logging.info(data)
                for datum in data:
                    # datum is a metadata object
                    logging.info(datum['key'])
                    logging.info(datum['value'])
                    found = bool(0)
                    for mdata in tak.metadata:
                        if datum['key'] == mdata.key:
                            mdata.value = datum['value']
                            found = bool(1)
                            break
                    if not found:
                        metadata = Metadata(key=datum['key'],
                                            value=datum['value'])
                        tak.metadata.append(metadata)
                tak.put()

                return json_success(data)
            except Exception as e:
                logging.info(e)
                return json_response(code=400)
        return json_response(code=400)
Example #6
0
def __tree_hdfs(path):
    cmd = "hadoop fs -ls {}".format(path)
    txt, err = shell.execute(cmd)
    dirs = []
    if err:
        print("Failed to read HDFS path {}".format(path))
        shell.log(os.path.basename(sys.argv[0]), os.path.basename(sys.argv[1]),
                  "WARN", "Failed to read HDFS path {}".format(err))
    # Parse `ls` output
    lines = txt.decode("utf-8").split("\n")

    for line in lines:
        # Igonore file count and other meta informations
        if line.find("/") == -1:
            continue

        _path = _path = line[line.find("/"):]

        # Check if the current path belongs to configured pond
        pond_path = _path[len(hdfs):]
        if pond_path.startswith("/"):
            pond_path = pond_path[1:]

        if pond_path.find("/") != -1:
            pond_end = pond_path.find("/")
        else:
            pond_end = len(pond_path)

        pond_name = pond_path[:pond_end]
        config_ponds = retentions.keys()
        if pond_name not in config_ponds:
            continue

        # drwx------
        if line.startswith("d"):
            print("Working on path: {}".format(_path))
            #shell.log(os.path.basename(sys.argv[0]), os.path.basename(sys.argv[1]), "INFO", "Working on path: {}". format(_path))
            yield from __tree_hdfs(_path)

        # -rwx------
        elif line.startswith("-"):
            print("Working on file: {}".format(_path))
            _date = ' '.join(line[:line.find("/")].split(' ')[-3:-1])
            if _path.split('/')[-2] == 'data':
                _name = _path.split('/')[-3]
            else:
                _name = _path.split('/')[-2]
            _pond = _path.split('/')[2]
            _last_used = datetime.strptime(_date, "%Y-%m-%d %H:%M")
            #shell.log(os.path.basename(sys.argv[0]), os.path.basename(sys.argv[1]), "INFO", "Working on file: {}". format(_path))
            yield Metadata(_name, _pond, _last_used, path)

        # Any other text or errors
        else:
            continue
Example #7
0
 def get_features(self, label):
     test_dataset_folder_path = os.path.abspath(
         os.path.join(Path(os.getcwd()).parent, self.labelled_dataset_path))
     images_list = list(
         misc.get_images_in_directory(test_dataset_folder_path).keys())
     metadata = Metadata(images_list)
     if self.feature_name != 'SIFT':
         metadata.save_label_decomposed_features(label,
                                                 self.decomposed_feature)
         features = misc.load_from_pickle(
             self.reduced_pickle_file_folder,
             self.decomposed_feature + '_' + label)
     else:
         features = {}
         database_features = misc.load_from_pickle(
             self.main_pickle_file_folder, self.feature_name)
         label_images_list = metadata.get_specific_metadata_images_list(
             feature_dict={'aspectOfHand': label})
         for image in label_images_list:
             features[image] = database_features[image]
     return features
 def setUp(self):
     self.test_metadata = Metadata()
     self.test_text = ("Here is some test text. Blah blah blah blah \n" +
                       "1234567890987654321 Yea Alabama Drown 'em Tide!\n")
     self.test_filename = "test_superdoc.txt"
     self.test_document = Document(self.test_metadata, self.test_text,
                                   self.test_filename)
     self.test_metadata_list = ([
         self.test_metadata, self.test_metadata, self.test_metadata
     ])
     self.test_superdoc_text = self.test_text * 3
     #print self.test_superdoc_text
     self.test_superdoc = SuperDocument(self.test_metadata_list,
                                        self.test_superdoc_text,
                                        self.test_filename)
     self.assertEqual(len(self.test_superdoc.component_metadata), 3)
Example #9
0
    def configDb(self,type,creds = None):
        if self.test == True:
            name = "test.u1db"
            name2 = "test1.u1db"
        else:
            name = "metadata.u1db"
            name2 = None
            if type == "deleteEvent" or type == "updateEvent" or type == "selectEvent" or type == "insertEvent" or type == "insertCalendar" or type == "deleteCalendar" or type == "selectCalendar" or type == "updateCalendar" or type == 'deleteCalendarUser' or type == 'selectCalendarsAndEvents':
                name = "calendar.u1db"
            elif type == "insertDownloadVersion" or type == "updateDownloadVersion" or type == "deleteDownloadVersion" or type == "getDownloadVersion":
                name = "downloadfile.u1db"
            elif type == "recursiveDeleteVersion" or type == "deleteMetadataUser":
                name2 = "downloadfile.u1db"
            elif type == "getMetadataFile" or type == "lockFile" or type == "updateDateTime" or type == "unLockFile":
                name = "lockfile.u1db"

        self.metadata = Metadata(name,creds,name2)
Example #10
0
    def query(self, type='active', term_name=None):
        """
        Obtains raw data of legislators, defaults to active legislators from the latest term
        Args:
            term_name: term name as it comes from OpenStates API
            type: Either 'all' or 'active'

        Returns:
            String transformed
        """
        Tables.query(self)

        if type == 'all':
            if term_name is None:
                metadata = Metadata()
                term_name = metadata.latest_term_name
            legislators = pyopenstates.search_legislators(state=config.STATE,
                                                          term=term_name,
                                                          fields='id')
        else:  # 'active'
            legislators = pyopenstates.search_legislators(
                state=config.STATE,
                active='true',  # default
                fields='id')

        self.raw_dictionary = map(
            lambda dic: pyopenstates.get_legislator(
                dic['id'],
                fields=[
                    'id',
                    'full_name',
                    'url',
                    'roles',
                    # 'old_roles',
                    'party',
                    'district',
                    'chamber',
                    'offices',
                    'email'
                ]),
            legislators)
Example #11
0
    def load_image(self):
        """ Load the next image to show """
        self.save_image()

        panel = self.builder.get_object('imagepanel')
        panel.hide()
        self.image.hide()

        pos = self.get_currentpos()
        if(pos == None):
            return
        img = self.filemgr.get_itemat(pos)

        if(img['ft'] == 'dir'):
            return

        self.meta = Metadata(img['fn']);

        obj = self.builder.get_object('imgtitle')
        obj.set_text(self.meta.get_title())
        obj.set_sensitive(self.meta.writable)

        obj = self.builder.get_object('imgcontent')
        obj.set_text(self.meta.get_content())
        obj = self.builder.get_object('imgcontentbox')
        obj.set_sensitive(self.meta.writable)

        obj = self.builder.get_object('imgtags')
        obj.set_text(self.meta.get_tags())
        obj.set_sensitive(self.meta.writable)

        obj = self.builder.get_object('imgname')
        obj.set_text(os.path.basename(img['fn']))

        panel.show()

        while gtk.events_pending():
            gtk.main_iteration(False)
        self.image.set_from_file(img['fn'])
        self.image.show()
Example #12
0
#! /usr/bin/python3

from Metadata import Metadata

metadata = Metadata()

metadata.parse_metadata("data/left_dataformat.txt",
                        "data/left_all_metadata.txt")

metadata.plot(0)
Example #13
0
from Random import Random
from Regression import Regression
from LinUCB_Disjoint import LinUCB_Disjoint
from TS_Lin import TS_Lin
from GP_Clustered import GP_Clustered
from NN import NN

import TestBuilder
import Util
import MetricsCalculator

detailed = False
campaign_id = 837817
algoName = "LinUCB_Disjoint"
meta = Metadata(algoName, campaign_id)
algo = LinUCB_Disjoint(meta)

testsMeta = TestBuilder.get_lin_tests_mini(meta)

ctr_multipliers = [0.5, 1, 2, 5, 10]
# ctr_multipliers = [1]
simulation_ids = [2]

output_path = "./Results/{0}/Simulated/{1}_2.csv".format(meta.campaign_id, algoName)
output_column_names = False
if not Path(output_path).is_file():
	output = open(output_path, "w")	
	output_column_names = True;
else:
	output = open(output_path, "a")
Example #14
0
 def createAndLoadAnalysis(anaType, runConfig, folder, name=None):
     """
     Creates a new analysis of the specified anaType,
     with common analysis folder given.
     If name is specified, use this name,
     else use the default for this analysis anaType.
     
     The created analysis is then loaded and returned.
     
     raises AnalysisException_createFail if something goes wrong.  
     """
     
     if not anaType in AnalysisInterface.getTypes():
         raise AnalysisException_createFail("Type '" + anaType + "' is not valid")
     
     name = AnalysisInterface.getName(anaType, name)
     if os.path.exists(os.path.join(folder, name)):
         raise AnalysisException_createFail("Analysis file already created?!?")
     
     import acdOpti.AcdOptiRunConfig
     assert isinstance(runConfig, acdOpti.AcdOptiRunConfig.AcdOptiRunConfig)
     
     if anaType == "Dummy":
         from Dummy import Dummy
         Dummy.createNew(folder, name)
         return Dummy(folder, name, runConfig)
     if anaType == "DummyInput":
         from DummyInput import DummyInput
         DummyInput.createNew(folder, name)
         return DummyInput(folder, name, runConfig)
     elif anaType == "FileList":
         from FileList import FileList
         FileList.createNew(folder,name)
         return FileList(folder,name,runConfig)
     elif anaType == "Omega3P_modeInfo":
         from Omega3P_modeInfo import Omega3P_modeInfo
         Omega3P_modeInfo.createNew(folder, name)
         return Omega3P_modeInfo(folder,name,runConfig)
     elif anaType == "RFpost":
         from RFpost import RFpost
         RFpost.createNew(folder, name)
         return RFpost(folder, name, runConfig)
     elif anaType == "RFpost_local":
         from RFpost_local import RFpost_local
         RFpost_local.createNew(folder, name)
         return RFpost_local(folder,name,runConfig)
     elif anaType == "GroupVelocity":
         from GroupVelocity import GroupVelocity
         GroupVelocity.createNew(folder, name)
         return GroupVelocity(folder, name, runConfig)
     elif anaType == "ScMax":
         from ScMax import ScMax
         ScMax.createNew(folder, name)
         return ScMax(folder, name, runConfig)
     elif anaType == "ModeFileRecalc":
         from ModeFileRecalc import ModeFileRecalc
         ModeFileRecalc.createNew(folder, name)
         return ModeFileRecalc(folder, name, runConfig)
     elif anaType == "Metadata":
         from Metadata import Metadata
         Metadata.createNew(folder, name)
         return Metadata(folder,name,runConfig)
     else:
         raise AnalysisException_createFail("Unknown analysis type '" + anaType + "'")
Example #15
0
class Manager(Lister):
    """
    class Manager:
       Main class to create DARball.
       - processes initial request,
       - decides whether it should  be an incremental distribution. For example, if the
         base distribution for the same release is found in the dar shared pool, and user's
         top release directory is in private development area.

       If yes:
         - creates incremental distribution, including a reference to the base release darball,
           and the private part of the application.
       If not:
         - creates a private base darball and incremental part, and generates a request for a
           public base darball. 

       The base  creation of a base darball includes following steps: 
         - create RTE directory structure with simlinks according to request,
         - create metadata,
         - calculate criteria for inital request,
         - check  if calculated  createria are satisfactory
         - run tests (optional)
         - adjust request and repeat the procedure until satisfied (optional in interactive, or
           semi-interactive mode)
         - create and save darball 
       """
    # Defaults:

    def __init__(self, darInput, pltf=None, cnf=None):        
        """
        __init__
          Initializes DAR Manager apects common to both create and install mode.
        """        
        Lister.__init__(self)

        # Set defaults:
        self.darballSuffix = '_dar.tar.gz'

        # and initialize variables:
        self.baseBomDict = {}  # need for incremental darball
        self.darpool = None    # 
        self.dar = None # need for creating darball
        
        self.config = cnf
        infoOut('Validating DAR configuration ...')
        if getVerbose():
            print "CONFIGURATION:", self.config
        # Check that dar shared pool is set up, exists and has right permissions:
        # DAR shared pool is to lookup for a base dar distributions and/or
        # installations . 
        if self.config.has_key("dar_shared_pool"):
            self.sharedPool = self.config["dar_shared_pool"]
        else:
            sys.exit("ERROR:dar shared pool is not defined in the configuration")
        if self.config.has_key("dar_dist_pool"):
            self.distPool = self.config["dar_dist_pool"]
        else:
            sys.exit("ERROR: dar dist pool is not defined in the configuration")
        if self.config.has_key("dar_inst_pool"):
            self.instPool = self.config["dar_inst_pool"]
        else:
            sys.exit("ERROR: dar inst pool is not defined in the configuration")

        # Check that dar tmpdir is set, exists, and has right permissions:
        if self.config.has_key("dar_tmp_dir"):
            self.tmpdir = self.config["dar_tmp_dir"]
        else:
            sys.exit("ERROR: dar_tmp_dir is not defined in the configuration")
        if notWritable(self.tmpdir):
            sys.exit(notWritable(self.tmpdir))
            
        # Each time when dar is called, it creates a new build directory
        # in tmpdir pool, using a unique datestamp:
        self.blddir = self.tmpdir+'/'+str(time.time())

        # Start logger and pre-set log files:
        self.logdir = self.blddir + '/logfiles' # one dir for all logfiles
        self.timingLog = Logger(self.logdir+'/session.timing')
        self.sessionStdoutLog = self.logdir+'/session.stdout'
        self.sessionHistoryLog = self.logdir+'/session.history'

        # Create necessary directories and files:
        os.makedirs(self.blddir)
        os.makedirs(self.logdir)
        for logfile in (self.sessionStdoutLog,  self.sessionHistoryLog):
            infoOut('Creating ' + logfile + ' file')
            open(logfile, 'w').close()

        # Get platform info:
        self.platform = pltf

        self.timingLog('Completed configuration and setup ')

        # Create a request 
        self.currentRequest = Request(darInput, self.timingLog)
        self.timingLog('Created request object')

        # Initialize dar metadata
        self.darMeta = Metadata()
        self.darMeta.setDarVersion(getDarVersion())
    
        # Get release/version metadata from request and
        # put them into Metadata container:
        self.darMeta.setDarInput(darInput)
        self.darMeta.setBaseReleaseName(self.currentRequest.getBaseReleaseName())
        self.darMeta.setProjectName(self.currentRequest.getProjectName())
        self.darMeta.setVersionTag(self.currentRequest.getVersionTag())

        # Architecture
        self.darMeta.setArchitecture(self.currentRequest.getArchitecture())
        
    def prepareDistribution(self, method = 'copy'):
        """
        Manager.prepareDistribution:
          Creates DARball structure according to current request, using
          copy, or link method.
        """
        # Check  how  much  space is  left in tmp dir,
        # where the darball will be built:
        spaceLeft(self.tmpdir)
        self.timingLog('Checked space left in '+self.tmpdir)

        # If incremental darbal was requested, get its metadata,
        # and fill up base bomdictionary
        baseDar = self.currentRequest.baseDar
        self.darMeta.setBaseDar(baseDar)
        
        if baseDar:
            if os.path.isdir(baseDar):
                # This should be an installation directory:
                bomFile = baseDar+'/' + getBOMFileName()

                if os.path.isfile(bomFile):
                    # Create base bom dictionary directly from file 
                    for entry in open(bomFile,'r').readlines():
                        # a list of lines
                        (md5, entryPath) = string.split(entry)
                        self.baseBomDict[entryPath] = md5
                else:
                    raise InputError(baseDar,
                                    'Could not find ' +
                                     getBOMFileName() + ' here.')
            else:
                if os.path.isfile(baseDar):
                    # This should be a DARball:
                    baseDarball = baseDar
                else:
                    # This should be a release tag of a base darball
                    # available from the dar pool.
                    # Lookup for base darball in the distribution pool:
                    
                    if notReadable(self.distPool):
                        sys.exit(notReadable(self.distPool))
                    baseDarball = self.findBaseDarball(self.distPool, baseDar)
                    if not baseDarball:
                        sys.exit( 'Could not find base distribution for ' \
                                  +baseDar+' in '+self.sharedPool)
                        
                # Create base bom dictionary on the flight from the archive:
                result = readFileFromArchive(baseDarball, getBOMFileName())

                for entry in string.split(result,'\n'):
                    md5, entryPath = string.split(entry)
                    self.baseBomDict[entryPath] = md5

        # Now create DAR directory structure:
        self.dar = Structure(self.blddir, self.currentRequest,
                             method, baseBom = self.baseBomDict)

        self.timingLog('Created DAR in ' + self.blddir +
                       ' using ' + method + ' method')
        instDir = self.dar.getTopInstDir()
        if method == 'copy':
            self.timingLog('Counted install. size ' +
                           'before cleanup in shared dir')
            # Make cleanup and create BOM only for the FSO image (shared 
            # between the environment variables):
            cleanup(self.dar.getSharedTop(),
                    os.path.join(self.dar.getTopInstDir(), getBOMFileName()))
            self.timingLog('Removed duplicates and created BOM for ' +
                           self.dar.getSharedTop())
        size = noteSize(instDir)
        self.timingLog('Counted size after cleanup in ' +
                       self.dar.getSharedTop())
        self.darMeta.setInstallationSize(size)
        # fakeScram()
        # including scram runtime command, which may replace setup scripts.
        self.saveMetadata(os.path.join(self.dar.getTopInstDir(),
                                       getMetaDataFile()))
        
        # - saves into metadata file info about creation of a darball,
        # project conifguration info. Adds a spec file (and darInput in
        # scram mode). DAR info and its source code go here.
        self.createReadmeFile()
        # DAR info, istallation instructions, reference to documentation.
        # self.rememberSize()

    def installApplication(self, installDir, testmode):
        """
        Manager.installApplication
          Installs the application by performing the following steps:
            - checks to see if the installation directory is writable
            - loads metadata
            - checks if enough disk space is available
            - checks to see if this is an incremental installation
              - looks for the base disribution for this release
            - checks for previous installation of this package
              and checks the md5sum <not implemented>
            - unpacks the installation package
            - publishes package metadata
            - sets up the environment scripts and runs setup scripts
            - creates links and checks installation size
        """
        if notWritable(installDir):
            sys.exit(notWritable(installDir))
        # Absolutize path if needed:
        if not os.path.isabs(installDir):
            installDir = os.path.abspath(installDir)
        # Extract metadata from distribution:
        metadata = loadMetadata(getMetaDataFile(),
                                archive = self.currentRequest.getDarFile())

        infoOut('Loaded DAR metadata from '+self.currentRequest.getDarFile())
        # If in test mode, print out users info and exit:
        if testmode:
            print metadata.userInfo()
            return
        # Check  that  there is enough space in the installation directory:
        available = spaceLeft(installDir)
        installSize = float(metadata.getInstallationSize())

        if available < installSize:
            sys.exit('Not enough space on the disk!\n Installation size: ' \
                     + str(installSize) +
                     ' KB\n Available: ' +
                     str(available) + ' KB')

        self.timingLog('Checked space left in '+installDir)
        ##########################################
        #   Handling  incremental DARballs:
        ##########################################
        # Check if darball metadata contain a reference to a base dar:
        baseDar = metadata.getBaseDar()
        if baseDar:
            infoOut("This is  incremental distribution based on "+baseDar)
            # This is an incremental darball, so 
            # we need matching base installation.
            baseInstallation = self.currentRequest.baseDar
            if  not baseInstallation:
                usageError ('Please specify a base .')
                # Lookup for base installation in the installation pool:
                # baseInstallation = self.findBaseInstallation(self.distPool,
                #                                           baseDar)
                # if not baseInstallation:
                #    sys.exit( 'Could not find base installation for ' +
                #                   baseDar + '
                #                   in '+self.sharedPool)
            infoOut("(todo)Verifying base installation "+baseInstallation)
        ##########################################
        #   General actions for all DARballs:
        ##########################################
        # Check if the installation already exists:
        releaseInstTop = os.path.join(installDir,
                                      metadata.getVersionTag(),
                                      metadata.getArchitecture())
        if os.path.exists( releaseInstTop):
            # TODO: validate the installation using  md5sum and
            # tell user the results
            sys.exit("ERROR: You already have installation here: \n   " \
                     +releaseInstTop+"\nExiting ....\n")
        # Unpack darball
        infoOut('Unpacking '+self.currentRequest.getDarFile()+' .... ')
        unpackCommand = 'tar -xz -C ' + \
                        installDir + ' -f ' + \
                        self.currentRequest.getDarFile()

        (status, out) = commands.getstatusoutput(unpackCommand)
        
        # Check that in unpacked into toInstallDir as expected:
        if status: # failed
            if out:
                # save command output in the logfile:
                unpackLogfile = os.path.join(self.logdir, 'dar_unpack.log')
                tarlog = open(unpackLogfile, 'w')
                tarlog.write('Output from unpacking command:\n' + \
                         unpackCommand + '\n' + out )
                tarlog.close()
            sys.exit ('Unpacking failed with exit status ' + status + \
                      '\nOutput can be found in \n' + unpackLogfile )
        elif not os.path.isdir(releaseInstTop):
            sys.exit ('Can not  find  '+releaseInstTop)            
        # Link to a base installation for incremental darballs 
        if baseDar:
            infoOut("Create a link to base installation:\n ln -s "
                     +baseInstallation+'/shared '+releaseInstTop+'/base')
            os.symlink(baseInstallation+'/shared', releaseInstTop+'/base')
        # Set up environment scripts:
        infoOut("Setting up the installation")
        templateStub = os.path.join(releaseInstTop, getSetupScriptBasename())
        newSetupScriptStub = os.path.join(releaseInstTop, 'envSetup')
        helpText = self.updateSetupScripts(\
                   templateStub, \
                   releaseInstTop, \
                   newSetupScriptStub )

        # For compatibility with the old Production tools:
        oldSetupScriptStub = os.path.join(releaseInstTop,
                                          metadata.getVersionTag() + '_env')
        self.updateSetupScripts(\
                   templateStub, \
                   releaseInstTop, \
                   oldSetupScriptStub )

        # Move script templates to the DAR admin directory. 
        #infoOut('Removing setup scripts templates ...')
        cmd = 'mv ' + templateStub + '.*sh' + ' ' \
              + installDir + '/' + getDarDirName()
        (status, out)=commands.getstatusoutput(cmd)
        if status != 0: # did not succeed
            DARInternalError("In installApplication: " +
                             "doing command" + cmd +
                             "\ncommand output :\n" + out)
        
        
        #infoOut("(todo) Do md5sum check of BOM in resulting installation")
        #infoOut("(todo) If successful, " +
        #        "register installation in publishing service ")
        self.publishMetadata(installDir + '/' + getDarDirName())
        # Publish installation metadata:
        self.publishMetadata(installDir + '/' + getDarDirName())

        #Print out runtime environment setup help (and exit):
        infoOut(helpText)
        infoOut("Installation completed.")
        
    def updateSetupScripts(self, template, installDir, newScriptStub):
        """
        Manager.updateSetupScript
          Copies the setup scripts for the different shell environments
          and prints instructions for using them
        """
        # Look into using shutils.copyfile(src,dest)
        # For bash shell:
        envScriptSh = newScriptStub + '.sh'
        fileRead = open(template + '.sh')
        contents = fileRead.readlines()
        contents.insert(0, 'export ' + getTopEnvName() +
                        '=\"' + installDir +'\";\n')
        fileRead.close()
        fileWrite = open(envScriptSh, 'w')
        fileWrite.writelines(contents)
        fileWrite.close()
        # For tcsh/csh shell:
        envScriptCsh = newScriptStub + '.csh'
        fileRead = open(template + '.csh')
        contents = fileRead.readlines()
        contents.insert(0, 'setenv ' + getTopEnvName() +
                        ' \"'+installDir+'\";\n')
        fileRead.close()
        fileWrite = open(envScriptCsh, 'w')
        fileWrite.writelines(contents)
        fileWrite.close()
        helpText = """
To set the runtime environment:
------------------------------
in  csh or tcsh:
         source """+envScriptCsh+"""
in  sh, bash, zsh:
         .  """+envScriptSh+"""
"""
        return helpText

    
    def publishMetadata(self, metadataDir):
        """
        Publishing step after successful install
        """
        # Currently it simply removes .DAR directory,
        # in future is could update a simple database of dar installations
        cmd = 'rm -rf '+ metadataDir        
        (status, out)=commands.getstatusoutput(cmd)
        if status != 0: # did not succeed            
            DARInternalError("In publishMetadata: " +
                             "doing command" + cmd +
                             "\ncommand output :\n" + out)
        
    def createReadmeFile(self):
        """
        Manager.createReadmeFile:
          Include DAR info, installation instructions, reference to
          documentation for the user  to read after manually
          unpacking the darball.
          <not implemented>
        """
        print "Creating users README file in the darball (not implemented)" 
        

    def rememberSize(self):
        """
        FSO only reflects the size of the shared space.
        Without the RTE structure, and in incremental installations,
        size above the base does not contribute to the total
        installation size
        """
        # Do we still need this?  The size is  attribute of  the globalFso,
        # and it can be saved in Metadata. 
        # Yes, we need, because fso only reflects the size of the shared part,
        # without rte structure , and because  for incremental dar files 
        # from the base do not contribute into the total installation size.
        #infoOut( "Counting and remembering the size of installation..." )
        #size = noteSize(self.blddir)
        #if size:
        #    self.darMeta.setInstallationSize(int(string.split(output)[0]))
        #    infoOut ("Installation size is " + size + " KB")
        #else:
        #    warning('Could not get size of ' + self.blddir)
        pass    
            
    def saveAndQuit(self, darFile = 'useDefault'):
        """
        Creates the final DARball and returns.
        This is called when the user is satisfied with the result,
        or when the criteria are satisfied (if in non-interactive mode).
        If successful, it will return the name of the final darball that
        was created, otherwise it will return 'None'.
        """

        if darFile == 'useDefault':
            darFile = self.blddir + "/" +\
                      self.darMeta.getVersionTag() +\
                      "." + self.currentRequest.getArchitecture() +\
                      self.darballSuffix
        else:
            darFile = self.blddir + "/" +\
                      self.currentRequest.getDarName() +\
                      self.darballSuffix

        tarCmd = string.join ( [
            'tar -cz -f', darFile,
            '-C',
            os.path.join(self.blddir, self.dar.getTopInstDir()),
            getDarDirName(),
            '-C', self.blddir, self.darMeta.getVersionTag()
            ])
        infoOut("Creating DAR distribution in " +
                self.blddir + "using tar command:")
        infoOut(tarCmd)

        (status, out) = commands.getstatusoutput(tarCmd)
        if status == 0: # successfull 
            infoOut( "Created " + darFile)
            self.timingLog('Created darball: ' + darFile)
            return darFile
        else:
            infoOut( "Creation of dar file failed!\n " + out)
            return None
        
    def setDarPool(self, location):
        """
        Manager.setDarPool:
          Mutator method to set the DAR Pool location
        """
        self.darpool = location

    def findBaseDarball(self, darPool, releaseName):
        """
        Manager.findBaseDarball
          Finds a proper darball in the darpool,
          based on the project release name
        """
        for filename in os.listdir(darPool):
            if filename == releaseName+self.darballSuffix:
                return darPool+'/'+filename

    def changeRequest(self):
        """
        Manager.changeRequest
          <not implemented>
        """
        print "Changing  request (not implemented)"

    def runTest(self):
        """
        Manager.runTest
          <not implemented>
        """
        print "Running  test  (not implemented)"

    def checkCriteria(self):
        """
        Manager.checkCriteria:
          <not implemented>
        """
        print "Checks if criteria  are  satisfied (not implemented)"

    def getCriteria(self):
        """
        Manager.getCriteria:
          <not implemented>
        """
        print "Calculating  criteria   (not implemented)"

    def findBest(self):
        """
        Manager.findBest:
          <not implemented>
        """
        print "Finding request with best criteria  (not implemented)"

    def runDebug(self):
        """
        Manager.runDebug:
          <not implemented>
        """
        print "Running debug test  (not implemented)"

    def saveMetadata(self, metadataFile):
        """
        Manager.saveMetadata
          Saves metadata information to a given file
        """
        print "saving DAR metadata"
        self.darMeta.saveMetadata(metadataFile)
Example #16
0
time.sleep(10)
ap = AdminPanel(val)
print "######Sleep 10 seconds"
time.sleep(10)
n = Netcraft(val)
print "######Sleep 10 seconds"
time.sleep(10)
sd = SubDomain(val)
print "######Sleep 10 seconds"
time.sleep(10)

gl = GeoLocation(val)
print "######Sleep 10 seconds"
time.sleep(10)

l = Linkedin(val)
print "######Sleep 10 seconds"
time.sleep(10)

f = Facebook(val)
print "######Sleep 10 seconds"
time.sleep(10)

wc = WebCr(val)
print "######Sleep 10 seconds"
time.sleep(10)

m = Metadata(val)
print "######Sleep 10 seconds"
time.sleep(10)
    makeColorMap(args.colormap, args.colors)
# Check the arguments for times...
startTime = 0
endTime = -1
timestamp = None
if args.time_stamp is not None:
    timestamp = args.time_stamp
elif args.start_time is not None:
    startTime = args.start_time
if args.end_time is not None:
    endTime = args.end_time
# Once it's guaranteed there's a color map, then generate the colors
#################### METADATA #####################
meta = None
if args.time_stamp is not None:
    meta = Metadata(args.infile, timeStamp=timestamp)
else:
    meta = Metadata(args.infile, startTime=startTime, endTime=endTime)
meta.writeMetadata()
data = meta.loadData()
minVal = np.nanmin(data)
maxVal = np.nanmax(data)

# Read the colormap
PUcols = np.loadtxt(scriptDir + '\\..\\ColorMaps\\' + args.colormap + '.txt')

# Create the Color map object
gendMap = ListedColormap(PUcols, N=len(PUcols))
# Can't make a color nap with NaN values so we set to them to a value out of the color map
data[np.isnan(data)] = minVal - 5
# Set nan values as black
Example #18
0
def main(args):
    __location__ = os.path.realpath(
        os.path.join(os.getcwd(), os.path.dirname(__file__)))

    # additional argument checks
    if not os.path.isdir(args.working_dir):
        raise ValueError('Working directory not found')
    args.working_dir = os.path.realpath(args.working_dir) + '/'
    if os.path.isdir(args.working_dir + 'analysis/'):
        shutil.rmtree(args.working_dir + 'analysis/')

    options_dict = dict()
    options_dict['wd_envs'] = hp.parse_output_path(args.working_dir + 'envs/')
    options_dict['threads'] = args.threads_per_job
    options_dict['ref_fasta'] = os.path.realpath(args.ref_fasta)
    options_dict['reads_fastq'] = args.working_dir + 'all_reads.fastq'
    options_dict['wd_analysis'] = hp.parse_output_path(args.working_dir +
                                                       'analysis/')
    options_dict[
        'wd_analysis_condas'] = __location__ + '/analysis_conda_files/'
    options_dict['__location__'] = __location__

    # --- create output directories
    if os.path.isdir(options_dict['wd_analysis']):
        shutil.rmtree(options_dict['wd_analysis'])
    _ = hp.parse_output_path(options_dict['wd_analysis'] + 'quast')
    _ = hp.parse_output_path(options_dict['wd_analysis'] + 'jellyfish')
    _ = hp.parse_output_path(options_dict['wd_analysis'] + 'readset_analysis')

    options_dict['wd_analysis_summary'] = hp.parse_output_path(
        options_dict['wd_analysis'] + 'summary/')
    options_dict[
        'wd_assembler_results'] = args.working_dir + 'assembler_results/'
    options_dict[
        'wd_assemblies'] = args.working_dir + 'assembler_results/assemblies/'
    assemblies_list = hp.parse_input_path(options_dict['wd_assemblies'],
                                          pattern='*.fasta')
    if len(assemblies_list) == 0:
        raise ValueError('No succesful assemblies found to analyze!')
    assemblies_names_list = [
        os.path.splitext(os.path.basename(af))[0] for af in assemblies_list
    ]
    options_dict['assemblies_string'] = ' '.join(assemblies_names_list)
    with open(args.user_info, 'r') as f:
        md_yaml = yaml.load(f)
    md = Metadata(md_yaml)
    md.write_publication_info(options_dict['wd_analysis_summary'] +
                              'publication_info.yaml')
    # --- Quast ---
    options_dict['quast_options'] = ''
    if md.is_eukaryote:
        options_dict['quast_options'] += '-e '
    if args.gff_file:
        options_dict['quast_options'] += '-G ' + os.path.abspath(
            args.gff_file) + ' '
    quast_output = ''
    quast_output_cmd = ''
    for anl in assemblies_names_list:
        quast_output += (
            ',\n\t\t{anl}_fplot=\'{wd_analysis_summary}quast/{anl}.fplot\''
            ',\n\t\t{anl}_rplot=\'{wd_analysis_summary}quast/{anl}.rplot\''
        ).format(anl=anl,
                 wd_analysis_summary=options_dict['wd_analysis_summary'])
        quast_output_cmd += (
            'if [ -e contigs_reports/nucmer_output/{anl}.fplot ]; then '  # for quast <5.0.0
            'cp contigs_reports/nucmer_output/{anl}.fplot {wd_analysis_summary}quast/.\n'
            'cp contigs_reports/nucmer_output/{anl}.rplot {wd_analysis_summary}quast/.\n'
            'fi\n').format(
                anl=anl,
                wd_analysis_summary=options_dict['wd_analysis_summary'])
        quast_output_cmd += (
            'if [ -e contigs_reports/all_alignments_{anl}.tsv ]; then '  # for quast =>5.0.0
            'cp contigs_reports/all_alignments_{anl}.tsv {wd_analysis_summary}quast/.\n'
            'fi\n').format(
                anl=anl,
                wd_analysis_summary=options_dict['wd_analysis_summary'])
    options_dict['quast_output'] = quast_output
    options_dict['quast_output_cmd'] = quast_output_cmd

    # --- Construct snakemake file ---
    sf_fn = args.working_dir + 'Snakefile_analysis_' + datetime.datetime.now(
    ).strftime('%Y%m%d%H%M%S')
    with open(__location__ + '/Snakemake_analysis', 'r') as f:
        sf = f.read()

    sf = sf.format(**options_dict)
    with open(sf_fn, 'w') as sf_handle:
        sf_handle.write(sf)

    sm_dict = {'use_conda': True}

    if args.slurm_config is not None:
        sm_dict['cluster'] = 'sbatch'
        sm_dict['cluster_config'] = args.slurm_config
        sm_dict['nodes'] = 5

    snakemake.snakemake(sf_fn, **sm_dict)
Example #19
0
    palmar_cluster_visualization.plot()

    similarity_val2 = kmeans.get_similarity_val(
        labelled_dataset_features=palmar_features,
        unlabelled_dataset_features=unlabelled_features)
    result = {}
    for image_id in list(unlabelled_features.keys()):
        if similarity_val1[image_id] <= similarity_val2[image_id]:
            result[image_id] = 'dorsal'
        else:
            result[image_id] = 'palmar'

    print(result)

    #ACCURACY
    metadata = Metadata(metadatapath='Data/HandInfo.csv')
    images_dop_dict = metadata.getimagesdop_dict()
    print('Accuracy:', misc.getAccuracy(result, images_dop_dict))

elif task == '3':
    folder_path = input("Enter folder path: ")
    start_images = list(map(str, input("Enter 3 imageids: ").split()))
    k = int(input("Enter number of outgoing edges: "))
    m = int(input("Enter number of dominant images to show: "))
    pagerank = PageRankUtil(folder_path, k, m, start_images)
    pagerank.page_rank_util()
    pagerank.plot_k_similar()

elif task == '4':
    classifier = input("1.SVM\n2.DT\n3.PPR\nSelect Classifier: ")
    labelled_dataset_path = input('Enter labelled dataset path: ')
    decomposition = Decomposition(decomposition_model, k, model,
                                  test_dataset_path)
    similarity = Similarity(model, image_id, m)
    similarity.get_similar_images(test_dataset_path,
                                  decomposition,
                                  reduced_dimension=True)

elif task == '3':
    model = input("1.CM\n2.LBP\n3.HOG\n4.SIFT\nSelect model: ")
    decomposition_model = input(
        "1.PCA\n2.SVD\n3.NMF\n4.LDA\nSelect decomposition: ")
    test_dataset_folder_path = os.path.abspath(
        os.path.join(Path(os.getcwd()).parent, test_dataset_path))
    images_list = list(
        misc.get_images_in_directory(test_dataset_folder_path).keys())
    metadata = Metadata(images_list)
    label = int(
        input("1.Left-Hand\n2.Right-Hand\n3.Dorsal\n4.Palmar\n"
              "5.With accessories\n6.Without accessories\n7.Male\n8.Female\n"
              "Please choose an option: "))
    label_interpret_dict = {
        1: {
            "aspectOfHand": "left"
        },
        2: {
            "aspectOfHand": "right"
        },
        3: {
            "aspectOfHand": "dorsal"
        },
        4: {
 def setUp(self):
     self.sut = Metadata("test.u1db",{'oauth':{'token_key':'NKKN8XVZLP5X23X','token_secret':'59ZN54UEUD3ULRU','consumer_key':'keySebas','consumer_secret':'secretSebas'}},"test1.u1db")
Example #22
0
    def __init__(self, darInput, pltf=None, cnf=None):        
        """
        __init__
          Initializes DAR Manager apects common to both create and install mode.
        """        
        Lister.__init__(self)

        # Set defaults:
        self.darballSuffix = '_dar.tar.gz'

        # and initialize variables:
        self.baseBomDict = {}  # need for incremental darball
        self.darpool = None    # 
        self.dar = None # need for creating darball
        
        self.config = cnf
        infoOut('Validating DAR configuration ...')
        if getVerbose():
            print "CONFIGURATION:", self.config
        # Check that dar shared pool is set up, exists and has right permissions:
        # DAR shared pool is to lookup for a base dar distributions and/or
        # installations . 
        if self.config.has_key("dar_shared_pool"):
            self.sharedPool = self.config["dar_shared_pool"]
        else:
            sys.exit("ERROR:dar shared pool is not defined in the configuration")
        if self.config.has_key("dar_dist_pool"):
            self.distPool = self.config["dar_dist_pool"]
        else:
            sys.exit("ERROR: dar dist pool is not defined in the configuration")
        if self.config.has_key("dar_inst_pool"):
            self.instPool = self.config["dar_inst_pool"]
        else:
            sys.exit("ERROR: dar inst pool is not defined in the configuration")

        # Check that dar tmpdir is set, exists, and has right permissions:
        if self.config.has_key("dar_tmp_dir"):
            self.tmpdir = self.config["dar_tmp_dir"]
        else:
            sys.exit("ERROR: dar_tmp_dir is not defined in the configuration")
        if notWritable(self.tmpdir):
            sys.exit(notWritable(self.tmpdir))
            
        # Each time when dar is called, it creates a new build directory
        # in tmpdir pool, using a unique datestamp:
        self.blddir = self.tmpdir+'/'+str(time.time())

        # Start logger and pre-set log files:
        self.logdir = self.blddir + '/logfiles' # one dir for all logfiles
        self.timingLog = Logger(self.logdir+'/session.timing')
        self.sessionStdoutLog = self.logdir+'/session.stdout'
        self.sessionHistoryLog = self.logdir+'/session.history'

        # Create necessary directories and files:
        os.makedirs(self.blddir)
        os.makedirs(self.logdir)
        for logfile in (self.sessionStdoutLog,  self.sessionHistoryLog):
            infoOut('Creating ' + logfile + ' file')
            open(logfile, 'w').close()

        # Get platform info:
        self.platform = pltf

        self.timingLog('Completed configuration and setup ')

        # Create a request 
        self.currentRequest = Request(darInput, self.timingLog)
        self.timingLog('Created request object')

        # Initialize dar metadata
        self.darMeta = Metadata()
        self.darMeta.setDarVersion(getDarVersion())
    
        # Get release/version metadata from request and
        # put them into Metadata container:
        self.darMeta.setDarInput(darInput)
        self.darMeta.setBaseReleaseName(self.currentRequest.getBaseReleaseName())
        self.darMeta.setProjectName(self.currentRequest.getProjectName())
        self.darMeta.setVersionTag(self.currentRequest.getVersionTag())

        # Architecture
        self.darMeta.setArchitecture(self.currentRequest.getArchitecture())
class MetadataTest (unittest.TestCase):

    def setUp(self):
        self.sut = Metadata("test.u1db",{'oauth':{'token_key':'NKKN8XVZLP5X23X','token_secret':'59ZN54UEUD3ULRU','consumer_key':'keySebas','consumer_secret':'secretSebas'}},"test1.u1db")

    def tearDown(self):
        self.sut.db.close()
        os.remove("test.u1db")
        os.remove("test1.u1db")

    """
    method: insert
    when: called
    with: array
    should: insertCorrect
    """
    def test_insert_called_array_insertCorrect(self):
        array = self.getArrayInsert()
        self.sut.insert(array)
        self.sut.db.create_index("by-id", "id", "user_eyeos")
        results = self.sut.db.get_from_index("by-id", "32565632156","eyeID_EyeosUser_2")
        self.assertEquals(array[2],results[0].content)

    """
    method: select
    when: called
    with: id
    should: returnArray
    """
    def test_select_called_id_returnArray(self):
        settings[ 'NEW_CODE' ] = "false"
        array = self.getArrayInsert()
        self.sut.insert(array)
        data = self.sut.select({u'id': 9873615, u'user_eyeos': u'eyeID_EyeosUser_2', u'path': u'/'})
        data.sort()
        self.assertEquals(2,len(data))

    """
    method: select
    when: called
    with: idAndUserAndCloudAndPath
    should: returnArray
    """
    def test_select_called_idAndUserAndCloudAndPath_returnArray(self):
        array = self.getArrayInsert()
        self.sut.insert(array)
        data = self.sut.select({u'id': 9873615, u'user_eyeos': u'eyeID_EyeosUser_2', u'cloud': u'Stacksync', u'path': u'/'})
        data.sort()
        self.assertEquals(2,len(data))

    """
    method: update
    when: called
    with: arrayWithoutCloud
    should: updateCorrect
    """
    def test_update_called_arrayWithoutCloud_updateCorrect(self):
        settings[ 'NEW_CODE' ] = "false"
        array = self.getArrayInsert()
        update = self.getArrayUpdate()
        self.sut.insert(array)
        self.sut.update(update)
        self.sut.db.create_index("by-id", "id", "user_eyeos")
        files = self.sut.db.get_from_index("by-id", "32565632156", "eyeID_EyeosUser_2")
        results = []
        if len(files) > 0:
            for file in files:
                results.append(file.content)

        self.assertEquals(update[1],results[0])

    """
    method: update
    when: called
    with: arrayWithCloud
    should: updateCorrect
    """
    def test_update_called_arrayWithCloud_updateCorrect(self):
        array = self.getArrayInsert()
        update = self.getArrayUpdate()
        self.sut.insert(array)
        self.sut.update(update)
        self.sut.db.create_index("by-id", "id", "user_eyeos")
        files = self.sut.db.get_from_index("by-id", "32565632156", "eyeID_EyeosUser_2")
        results = []
        if len(files) > 0:
            for file in files:
                results.append(file.content)

        self.assertEquals(update[1], results[0])

    """
    method: delete
    when: called
    with: arrayWithoutCloud
    should: deleteCorrect
    """
    def test_delete_called_arrayWithoutCloud_deleteCorrect(self):
        settings[ 'NEW_CODE' ] = "false"
        array = self.getArrayInsert()
        self.sut.insert(array)
        list = self.getArrayDelete()
        self.sut.delete(list)
        self.sut.db.create_index("by-user", "user_eyeos")
        files = self.sut.db.get_from_index("by-user", "eyeID_EyeosUser_2")
        self.assertEquals(0,len(files))

    """
    method: delete
    when: called
    with: arrayWithCloud
    should: deleteCorrect
    """
    def test_delete_called_arrayWithCloud_deleteCorrect(self):
        array = self.getArrayInsert()
        self.sut.insert(array)
        list = self.getArrayDelete()
        self.sut.delete(list)
        self.sut.db.create_index("by-user", "user_eyeos", "cloud")
        files = self.sut.db.get_from_index("by-user", "eyeID_EyeosUser_2", "Stacksync")
        self.assertEquals(0, len(files))

    """
    method: getParent
    when: called
    with: path
    should: returnArray
    """
    def test_getParent_called_path_returnArray(self):
        settings[ 'NEW_CODE' ] = "false";
        array = self.getArrayParent()
        self.sut.insert(array)
        data = self.sut.getParent({u'path': u'/documents/', u'filename': u'clients', u'user_eyeos': u'eyeID_EyeosUser_2'})
        self.assertEquals(array[0],data[0])

    """
    method: getParent
    when: called
    with: cloudAndPathAndFilenameAndUser
    should: returnArray
    """
    def test_getParent_called_cloudAndPathAndFilenameAndUser_returnArray(self):
        array = self.getArrayParent()
        self.sut.insert(array)
        data = self.sut.getParent({u'cloud': u'Stacksync', u'path': u'/documents/', u'filename': u'clients', u'user_eyeos': u'eyeID_EyeosUser_2'})
        self.assertEquals(array[0], data[0])


    """
    method: getParent
    when: called
    with: cloudAndPathAndFilenameAndUser
    should: returnArrayInteroperability
    """

    def test_getParent_called_cloudAndPathAndFilenameAndUser_returnArrayInteroperability(self):
        array = self.getArrayParentInteroperability()
        self.sut.insert(array)
        data = self.sut.getParent({u'cloud': u'Stacksync', u'path': u'/documents/', u'filename': u'clients', u'user_eyeos': u'eyeID_EyeosUser_2'})
        array[0]['id'] = '9873615'
        self.assertEquals(array[0], data[0])


    """
    method: deleteFolder
    when: called
    with: idFolder
    should: returnCorrect
    """
    def test_deleteFolder_called_idFolder_returnCorrect(self):
        settings[ 'NEW_CODE' ] = "false"
        array = self.getArrayDeleteFolder()
        self.sut.insert(array)
        self.sut.deleteFolder({u'id': 9873615, u'user_eyeos': u'eyeID_EyeosUser_2', u'path': u'/documents/'})
        docs = self.sut.db.get_all_docs()
        self.assertEquals(1, len(docs[1]))

    """
    method: deleteFolder
    when: called
    with: idFolderAndUserAndCloudAndPath
    should: returnCorrect
    """
    def test_deleteFolder_called_idFolderAndUserAndCloudAndPath_returnCorrect(self):
        array = self.getArrayDeleteFolder()
        self.sut.insert(array)
        self.sut.deleteFolder({u'id': 9873615, u'user_eyeos': u'eyeID_EyeosUser_2', u'cloud': u'Stacksync', u'path': u'/documents/'})
        docs = self.sut.db.get_all_docs()
        self.assertEquals(1, len(docs[1]))

    """
    method: deleteMetadataUser
    when: called
    with: user
    should: deleteCorrect
    """
    def test_deleteMetadataUser_called_user_deleteCorrect(self):
        array = self.getArrayInsert()
        self.sut.insert(array)
        arrayVersion = self.getArrayInsertVersionDeleteUser()
        for version in arrayVersion:
            self.sut.db2.create_doc_from_json(json.dumps(version))
        list = [{'user_eyeos' : 'eyeID_EyeosUser_2'}]
        self.sut.deleteMetadataUser(list)
        docs = self.sut.db.get_all_docs()
        result = len(docs[1])
        docs = self.sut.db2.get_all_docs()
        result += len(docs[1])
        self.assertEquals(0,result)

    """
    method: deleteMetadataUser
    when: called
    with: userAndCloud
    should: deleteCorrect
    """
    def test_deleteMetadataUser_called_userAndCloud_deleteCorrect(self):
        array = self.getArrayInsert()
        self.sut.insert(array)
        arrayVersion = self.getArrayInsertVersionDeleteUser()
        for version in arrayVersion:
            self.sut.db2.create_doc_from_json(json.dumps(version))
        list = [{'user_eyeos' : 'eyeID_EyeosUser_2', 'cloud' : 'Stacksync'}]
        self.sut.deleteMetadataUser(list)
        docs = self.sut.db.get_all_docs()
        result = len(docs[1])
        docs = self.sut.db2.get_all_docs()
        result += len(docs[1])
        self.assertEquals(2,result)

    """
    method: selectMetadataUser
    when: called
    with: user
    should: return Array
    """
    def test_selectMetadataUser_called_user_returnArray(self):
        array = self.getArrayInsert()
        self.sut.insert(array)
        files = self.sut.selectMetadataUser('eyeID_EyeosUser_2')
        files.sort()
        self.assertEquals(array,files)

    """
    method: renameMetadata
    when: called
    with: userAndIdAndPathAndName
    should: renameFolderCorrect
    """
    def test_renameMetadata_called_userAndIdAndPathAndName_renameFolderCorrect(self):
        array = self.getArrayInsertRename()
        self.sut.insert(array)
        expected = self.getArrayRenameFolder('/A 1/','A 1')
        self.sut.renameMetadata({u'cloud': u'Stacksync', u'user_eyeos':u'eyeID_EyeosUser_2',u'status': u'CHANGED', u'is_root': False, u'version': 2, u'filename': u'A 1', u'parent_id': u'null', u'server_modified': u'2013-03-08 10:36:41.997', u'path': u'/', u'client_modified': u'2013-03-08 10:36:41.997', u'id': u'9873615', u'user': u'eyeID_EyeosUser_2', u'is_root':False, u'is_folder':True})
        files = self.sut.db.get_all_docs()
        results = []
        for file in files[1]:
            results.append(file.content)
        results.sort()
        self.assertEquals(expected,results)

    """
    method: renameMetadata
    when: called
    with: userAndIdAndPathAndName
    should: renameFileCorrect
    """
    def test_renameMetadata_called_userAndIdAndPathAndName_renameFileCorrect(self):
        array = self.getArrayInsertRename()
        self.sut.insert(array)
        expected = self.getArrayRenameFile('B 1.txt')
        self.sut.renameMetadata({u'cloud': u'Stacksync', u'user_eyeos': u'eyeID_EyeosUser_2', u'filename': u'B 1.txt', u'path': u'/A/', u'id': u'32565632156', u'size': 775412, u'mimetype': u'application/pdf', u'status': u'CHANGED', u'version': 2, u'parent_id': u'9873615', u'user': u'eyeos', u'client_modified': u'2013-03-08 10:36:41.997', u'server_modified': u'2013-03-08 10:36:41.997', u'is_folder': False})
        files = self.sut.db.get_all_docs()
        results = []
        for file in files[1]:
            results.append(file.content)
        results.sort()
        self.assertEquals(expected,results)

    """
    method: insertDownloadVersion
    when: called
    with: metadataWithoutCloud
    should: insertCorrect
    """
    def test_insertDownloadVersion_called_metadataWithoutCloud_insertCorrect(self):
        settings[ 'NEW_CODE' ] = "false"
        expected = {u'id': u'12457988', u'user_eyeos': u'eyeID_EyeosUser_2', u'version': u'2', u'recover': False}
        self.sut.insertDownloadVersion(expected)
        results = ''
        files = self.sut.db.get_all_docs()
        if len(files[1]) > 0:
            results = files[1][0].content
        self.assertEquals(expected,results)

    """
    method: insertDownloadVersion
    when: called
    with: metadataWithCloud
    should: insertCorrect
    """
    def test_insertDownloadVersion_called_metadataWithCloud_insertCorrect(self):
        expected = {u'id': u'12457988', u'cloud': u'Stacksync', u'user_eyeos': u'eyeID_EyeosUser_2', u'version': u'2', u'recover': False}
        self.sut.insertDownloadVersion(expected)
        results = ''
        files = self.sut.db.get_all_docs()
        if len(files[1]) > 0:
            results = files[1][0].content
        self.assertEquals(expected,results)


    """
    method: updateDownloadVersion
    when: called
    with: metadataWithoutCloud
    should: updateCorrect
    """
    def test_updateDownloadVersion_called_metadataWithoutCloud_updateCorrect(self):
        settings[ 'NEW_CODE' ] = "false"
        metadata = {u'id': u'12457988', u'user_eyeos': u'eyeID_EyeosUser_2', u'version': u'2', u'recover': False}
        expected = {u'id': u'12457988', u'user_eyeos': u'eyeID_EyeosUser_2', u'version': u'3', u'recover': True}
        self.sut.insertDownloadVersion(metadata)
        self.sut.updateDownloadVersion(expected)
        results = ''
        files = self.sut.db.get_all_docs()
        if len(files[1]) > 0:
            results = files[1][0].content
        self.assertEquals(expected,results)

    """
    method: updateDownloadVersion
    when: called
    with: metadataWithCloud
    should: updateCorrect
    """
    def test_updateDownloadVersion_called_metadataWithCloud_updateCorrect(self):
        metadata = {u'id': u'12457988', u'cloud': u'Stacksync', u'user_eyeos': u'eyeID_EyeosUser_2', u'version': u'2', u'recover': False}
        expected = {u'id': u'12457988', u'cloud': u'Stacksync', u'user_eyeos': u'eyeID_EyeosUser_2', u'version': u'3', u'recover': True}
        self.sut.insertDownloadVersion(metadata)
        self.sut.updateDownloadVersion(expected)
        results = ''
        files = self.sut.db.get_all_docs()
        if len(files[1]) > 0:
            results = files[1][0].content
        self.assertEquals(expected,results)

    """
    method: deleteDownloadVersion
    when: called
    with: id
    should: deleteCorrect
    """
    def test_deleteDownloadVersion_called_id_deleteCorrect(self):
        metadata = {u'id':u'12457988',u'user_eyeos':u'eyeID_EyeosUser_2',u'version': u'2', u'recover': False}
        self.sut.insertDownloadVersion(metadata)
        self.sut.deleteDownloadVersion("12457988","eyeID_EyeosUser_2")
        files = self.sut.db.get_all_docs()
        self.assertEquals(0,len(files[1]))

    """
    method: getDownloadVersion
    when: called
    with: idAndUserEyeos
    should: returnMetadata
    """
    def test_getDownloadVersion_called_idAndUserEyeos_returnMetadata(self):
        settings[ 'NEW_CODE' ] = "false"
        metadata = {u'id': u'12457988', u'user_eyeos': u'eyeID_EyeosUser_2', u'version': u'2', u'recover': False}
        self.sut.insertDownloadVersion(metadata)
        result = self.sut.getDownloadVersion({u'id': u'12457988', u'user_eyeos': u'eyeID_EyeosUser_2'})
        self.assertEquals(metadata,result)

    """
    method: getDownloadVersion
    when: called
    with: idAndUserEyeosAndCloud
    should: returnMetadata
    """
    def test_getDownloadVersion_called_idAndUserEyeosAndCloud_returnMetadata(self):
        metadata = {u'id': u'12457988', u'cloud': u'Stacksync', u'user_eyeos': u'eyeID_EyeosUser_2', u'version': u'2', u'recover': False}
        self.sut.insertDownloadVersion(metadata)
        result = self.sut.getDownloadVersion({u'id': u'12457988', u'user_eyeos': u'eyeID_EyeosUser_2', u'cloud': u'Stacksync'})
        self.assertEquals(metadata,result)

    """
    method: recursiveDeleteVersion
    when: called
    with: idAndUser
    should: deleteCorrect
    """
    def test_recursiveDeleteVersion_called_idAndUser_deleteCorrect(self):
        settings[ 'NEW_CODE' ] = "false"
        array = self.getArrayInsertVersionMetadata()
        self.sut.insert(array)
        arrayVersion = self.getArrayInsertVersion()
        for version in arrayVersion:
            self.sut.db2.create_doc_from_json(json.dumps(version))
        self.sut.recursiveDeleteVersion({u'id': 9873615, u'user_eyeos': u'eyeID_EyeosUser_2'})
        files = self.sut.db2.get_all_docs()
        self.assertEquals(0, len(files[1]))

    """
    method: recursiveDeleteVersion
    when: called
    with: idAndUserAndCloud
    should: deleteCorrect
    """
    def test_recursiveDeleteVersion_called_idAndUserAndCloud_deleteCorrect(self):
        array = self.getArrayInsertVersionMetadata()
        self.sut.insert(array)
        arrayVersion = self.getArrayInsertVersion()
        for version in arrayVersion:
            self.sut.db2.create_doc_from_json(json.dumps(version))
        self.sut.recursiveDeleteVersion({u'id': 9873615, u'user_eyeos': u'eyeID_EyeosUser_2', u'cloud': u'Stacksync'})
        files = self.sut.db2.get_all_docs()
        self.assertEquals(0, len(files[1]))


    def getArrayInsert(self):
        array = [{u'user_eyeos': u'eyeID_EyeosUser_2', u'cloud':u'Stacksync', u'status': u'NEW', u'is_root': False, u'version': 1, u'filename': u'clients', u'parent_id': u'null', u'server_modified': u'2013-03-08 10:36:41.997', u'path': u'/', u'client_modified': u'2013-03-08 10:36:41.997', u'id': u'9873615', u'user': u'eyeID_EyeosUser_2',u'is_folder':True},
                 {u'user_eyeos': u'eyeID_EyeosUser_2', u'cloud':u'Stacksync', u'filename': u'Client1.pdf', u'path': u'/clients/', u'id': u'32565632156', u'size': 775412, u'mimetype': u'application/pdf', u'status': u'NEW', u'version': 3, u'parent_id': u'9873615', u'user': u'eyeos', u'client_modified': u'2013-03-08 10:36:41.997', u'server_modified': u'2013-03-08 10:36:41.997', u'is_folder': False},
                 {u'user_eyeos': u'eyeID_EyeosUser_2', u'cloud':u'Nec', u'filename': u'Client1.pdf', u'path': u'/', u'id': u'32565632157', u'size': 775412, u'mimetype': u'application/pdf', u'status': u'NEW', u'version': 3, u'parent_id': u'null', u'user': u'eyeos', u'client_modified': u'2013-03-08 10:36:41.997', u'server_modified': u'2013-03-08 10:36:41.997', u'is_folder': False}]
        array.sort()
        return array


    def getArrayUpdate(self):
        if settings[ 'NEW_CODE' ] == "true":
            array = [{u'parent_old': u'9873615'},
                     {u'user_eyeos': u'eyeID_EyeosUser_2', u'cloud': u'Stacksync', u'filename': u'Client2.pdf', u'path': u'/clients/', u'id': u'32565632156', u'size': 775412, u'mimetype': u'application/pdf', u'status': u'CHANGED', u'version': 3, u'parent_id': u'9873615', u'user': u'eyeos', u'client_modified': u'2013-03-08 10:36:41.997', u'server_modified': u'2013-03-08 10:36:41.997', u'is_folder': False}]
        else:
            array = [{u'parent_old': u'9873615'},
                     {u'user_eyeos': u'eyeID_EyeosUser_2', u'filename': u'Client2.pdf', u'path': u'/clients/', u'id': u'32565632156', u'size': 775412, u'mimetype': u'application/pdf', u'status': u'CHANGED', u'version': 3, u'parent_id': u'9873615', u'user': u'eyeos', u'client_modified': u'2013-03-08 10:36:41.997', u'server_modified': u'2013-03-08 10:36:41.997', u'is_folder': False}]
        return array

    def getArrayDelete(self):
        if settings[ 'NEW_CODE' ] == "true":
            array = [{u'id': u'9873615', u'user_eyeos': u'eyeID_EyeosUser_2', u'cloud':u'Stacksync', u'parent_id':u'null'},
                     {u'id': u'32565632156', u'user_eyeos': u'eyeID_EyeosUser_2', u'cloud':u'Stacksync', u'parent_id':u'9873615'},
                     {u'id': u'32565632157', u'user_eyeos': u'eyeID_EyeosUser_2', u'cloud':u'Nec', u'parent_id':u'null'}]
        else:
            array = [{u'id': u'9873615', u'user_eyeos': u'eyeID_EyeosUser_2', u'parent_id':u'null'},
                     {u'id': u'32565632156', u'user_eyeos': u'eyeID_EyeosUser_2', u'parent_id':u'9873615'},
                     {u'id': u'32565632157', u'user_eyeos': u'eyeID_EyeosUser_2', u'parent_id':u'null'}]
        array.sort()
        return array

    def getArrayParent(self):
        array = [{u'user_eyeos': u'eyeID_EyeosUser_2', u'cloud': u'Stacksync', u'status': u'CHANGED', u'is_root': False, u'version': 1, u'filename':u'clients', u'parent_id': u'null', u'server_modified': u'2013-03-08 10:36:41.997', u'path': u'/documents/', u'client_modified': u'2013-03-08 10:36:41.997', u'id': u'9873615', u'user': u'eyeID_EyeosUser_2', u'is_folder': True},
                 {u'user_eyeos': u'eyeID_EyeosUser_2', u'cloud': u'Stacksync', u'filename': u'Client1.pdf', u'path': u'/documents/clients/', u'id': u'32565632156', u'size': 775412, u'mimetype': u'application/pdf', u'status': u'CHANGED', u'version': 3, u'parent_id': u'null', u'user': u'eyeos', u'client_modified': u'2013-03-08 10:36:41.997', u'server_modified': u'2013-03-08 10:36:41.997', u'is_folder': False}]
        array.sort()
        return array

    def getArrayParentInteroperability(self):
        array = [{u'user_eyeos': u'eyeID_EyeosUser_2', u'cloud': u'Stacksync', u'status': u'CHANGED', u'is_root': False, u'version': 1, u'name':u'clients', u'parent_id': u'null', u'server_modified': u'2013-03-08 10:36:41.997', u'path': u'/documents/', u'client_modified': u'2013-03-08 10:36:41.997', u'id': u'9873615_Stacksync', u'user': u'eyeID_EyeosUser_2', u'is_folder': True},
                 {u'user_eyeos': u'eyeID_EyeosUser_2', u'cloud': u'Stacksync', u'filename': u'Client1.pdf', u'path': u'/documents/clients/', u'id': u'32565632156', u'size': 775412, u'mimetype': u'application/pdf', u'status': u'CHANGED', u'version': 3, u'parent_id': u'null', u'user': u'eyeos', u'client_modified': u'2013-03-08 10:36:41.997', u'server_modified': u'2013-03-08 10:36:41.997', u'is_folder': False}]
        array.sort()
        return array

    def getArrayDeleteFolder(self):
        array = [{u'user_eyeos':u'eyeID_EyeosUser_2', u'cloud':u'Stacksync', u'status': u'CHANGED', u'is_root': False, u'version': 1, u'filename':u'clients', u'parent_id': u'474411411', u'server_modified': u'2013-03-08 10:36:41.997', u'path': u'/documents/', u'client_modified': u'2013-03-08 10:36:41.997', u'id': u'9873615', u'user': u'eyeID_EyeosUser_2',u'is_folder':True},
                 {u'user_eyeos':u'eyeID_EyeosUser_2', u'cloud':u'Stacksync', u'filename':u'Client1.pdf',u'path':u'/documents/clients/',u'id':u'32565632156',u'size':775412,u'mimetype':u'application/pdf',u'status':u'CHANGED',u'version':3,u'parent_id':u'9873615',u'user':u'eyeos',u'client_modified':u'2013-03-08 10:36:41.997',u'server_modified':u'2013-03-08 10:36:41.997',u'is_folder':False},
                 {u'user_eyeos':u'eyeID_EyeosUser_2', u'cloud':u'Nec', u'status': u'CHANGED', u'is_root': False, u'version': 1, u'filename':u'datos', u'parent_id': u'474411411', u'server_modified': u'2013-03-08 10:36:41.997', u'path': u'/documents/', u'client_modified': u'2013-03-08 10:36:41.997', u'id': u'1478526', u'user': u'eyeID_EyeosUser_2',u'is_folder':True}]
        array.sort()
        return array

    def getArrayInsertRename(self):
        array = [{u'cloud': u'Stacksync', u'user_eyeos': u'eyeID_EyeosUser_2', u'status': u'NEW', u'is_root': False, u'version': 1, u'filename': u'A', u'parent_id': u'null', u'server_modified': u'2013-03-08 10:36:41.997', u'path': u'/', u'client_modified': u'2013-03-08 10:36:41.997', u'id': u'9873615', u'user': u'eyeID_EyeosUser_2', u'is_root':False, u'is_folder':True},
                 {u'cloud': u'Stacksync', u'user_eyeos': u'eyeID_EyeosUser_2', u'filename': u'B.txt', u'path': u'/A/', u'id': u'32565632156', u'size': 775412, u'mimetype': u'application/pdf', u'status': u'NEW', u'version': 1, u'parent_id': u'9873615', u'user': u'eyeos', u'client_modified': u'2013-03-08 10:36:41.997', u'server_modified': u'2013-03-08 10:36:41.997', u'is_folder': False},
                 {u'cloud': u'Stacksync', u'user_eyeos': u'eyeID_EyeosUser_2', u'filename': u'D.txt', u'path': u'/A/', u'id': u'444441714', u'size': 775412, u'mimetype': u'application/pdf', u'status': u'NEW', u'version': 1, u'parent_id': u'9873615', u'user': u'eyeos', u'client_modified': u'2013-03-08 10:36:41.997', u'server_modified': u'2013-03-08 10:36:41.997', u'is_folder': False},
                 {u'cloud': u'Stacksync', u'user_eyeos': u'eyeID_EyeosUser_2', u'filename': u'C', u'path': u'/A/', u'id': u'32565632157', u'size': 775412, u'mimetype': u'application/pdf', u'status': u'NEW', u'version': 1, u'parent_id': u'9873615', u'user': u'eyeos', u'client_modified': u'2013-03-08 10:36:41.997', u'server_modified': u'2013-03-08 10:36:41.997', u'is_root': False, u'is_folder': True},
                 {u'cloud': u'Stacksync', u'user_eyeos': u'eyeID_EyeosUser_2', u'filename': u'E.txt', u'path': u'/A/C/', u'id': u'4415512', u'size': 775412, u'mimetype': u'application/pdf', u'status': u'NEW', u'version': 1, u'parent_id': u'32565632157', u'user': u'eyeos', u'client_modified': u'2013-03-08 10:36:41.997', u'server_modified': u'2013-03-08 10:36:41.997', u'is_root': False, u'is_folder': False}]
        array.sort()
        return array

    def getArrayRenameFolder(self, path, foldername):
        array = [{u'cloud': u'Stacksync', u'user_eyeos': u'eyeID_EyeosUser_2', u'status': u'CHANGED', u'is_root': False, u'version': 2, u'filename': u'' + foldername + '', u'parent_id': u'null', u'server_modified': u'2013-03-08 10:36:41.997', u'path': u'/', u'client_modified': u'2013-03-08 10:36:41.997', u'id': u'9873615', u'user': u'eyeID_EyeosUser_2', u'is_root':False, u'is_folder':True},
                 {u'cloud': u'Stacksync', u'user_eyeos': u'eyeID_EyeosUser_2', u'filename': u'B.txt', u'path': u'' + path + '', u'id': u'32565632156', u'size': 775412, u'mimetype': u'application/pdf', u'status': u'NEW', u'version': 1,u'parent_id': u'9873615', u'user': u'eyeos', u'client_modified': u'2013-03-08 10:36:41.997', u'server_modified': u'2013-03-08 10:36:41.997', u'is_folder': False},
                 {u'cloud': u'Stacksync', u'user_eyeos': u'eyeID_EyeosUser_2', u'filename': u'D.txt', u'path': u'' + path + '', u'id': u'444441714', u'size': 775412, u'mimetype': u'application/pdf', u'status': u'NEW', u'version': 1,u'parent_id': u'9873615', u'user': u'eyeos', u'client_modified': u'2013-03-08 10:36:41.997', u'server_modified': u'2013-03-08 10:36:41.997', u'is_folder': False},
                 {u'cloud': u'Stacksync', u'user_eyeos': u'eyeID_EyeosUser_2', u'filename': u'C', u'path': u'' + path + '', u'id': u'32565632157', u'size': 775412, u'mimetype': u'application/pdf', u'status': u'NEW', u'version': 1,u'parent_id': u'9873615', u'user': u'eyeos', u'client_modified': u'2013-03-08 10:36:41.997', u'server_modified': u'2013-03-08 10:36:41.997', u'is_root': False, u'is_folder': True},
                 {u'cloud': u'Stacksync', u'user_eyeos': u'eyeID_EyeosUser_2', u'filename': u'E.txt', u'path': u'' + path + 'C/', u'id': u'4415512', u'size': 775412, u'mimetype': u'application/pdf', u'status': u'NEW', u'version': 1,u'parent_id': u'32565632157', u'user': u'eyeos', u'client_modified': u'2013-03-08 10:36:41.997', u'server_modified': u'2013-03-08 10:36:41.997', u'is_root': False, u'is_folder': False}]
        array.sort()
        return array

    def getArrayRenameFile(self, filename):
        array = [{u'cloud': u'Stacksync', u'user_eyeos': u'eyeID_EyeosUser_2', u'status': u'NEW', u'is_root': False, u'version': 1, u'filename': u'A', u'parent_id': u'null', u'server_modified': u'2013-03-08 10:36:41.997', u'path': u'/', u'client_modified': u'2013-03-08 10:36:41.997', u'id': u'9873615', u'user': u'eyeID_EyeosUser_2', u'is_root':False, u'is_folder':True},
                 {u'cloud': u'Stacksync', u'user_eyeos': u'eyeID_EyeosUser_2', u'filename': u'' + filename + '', u'path': u'/A/', u'id': u'32565632156', u'size': 775412, u'mimetype': u'application/pdf', u'status': u'CHANGED', u'version': 2, u'parent_id': u'9873615', u'user': u'eyeos', u'client_modified': u'2013-03-08 10:36:41.997', u'server_modified': u'2013-03-08 10:36:41.997', u'is_folder': False},
                 {u'cloud': u'Stacksync', u'user_eyeos': u'eyeID_EyeosUser_2', u'filename': u'D.txt', u'path': u'/A/', u'id': u'444441714', u'size': 775412, u'mimetype': u'application/pdf', u'status': u'NEW', u'version': 1, u'parent_id': u'9873615', u'user':u'eyeos', u'client_modified': u'2013-03-08 10:36:41.997', u'server_modified': u'2013-03-08 10:36:41.997', u'is_folder': False},
                 {u'cloud': u'Stacksync', u'user_eyeos': u'eyeID_EyeosUser_2', u'filename': u'C', u'path':u'/A/', u'id': u'32565632157', u'size': 775412, u'mimetype': u'application/pdf', u'status': u'NEW', u'version': 1, u'parent_id': u'9873615', u'user': u'eyeos', u'client_modified': u'2013-03-08 10:36:41.997', u'server_modified': u'2013-03-08 10:36:41.997', u'is_root': False, u'is_folder': True},
                 {u'cloud': u'Stacksync', u'user_eyeos': u'eyeID_EyeosUser_2', u'filename': u'E.txt', u'path': u'/A/C/', u'id': u'4415512', u'size': 775412, u'mimetype': u'application/pdf', u'status': u'NEW', u'version': 1, u'parent_id': u'32565632157', u'user': u'eyeos', u'client_modified': u'2013-03-08 10:36:41.997', u'server_modified': u'2013-03-08 10:36:41.997', u'is_root': False, u'is_folder': False}]
        array.sort()
        return array

    def getArrayInsertVersionMetadata(self):
        array = [{u'user_eyeos':u'eyeID_EyeosUser_2',u'cloud':u'Stacksync',u'status': u'NEW', u'is_root': False, u'version': 1, u'filename': u'clients', u'parent_id': u'null', u'server_modified': u'2013-03-08 10:36:41.997', u'path': u'/', u'client_modified': u'2013-03-08 10:36:41.997', u'id': u'9873615', u'user': u'eyeID_EyeosUser_2',u'is_folder':True},
                 {u'user_eyeos':u'eyeID_EyeosUser_2',u'cloud':u'Stacksync',u'filename':u'Client1.pdf',u'path':u'/clients/',u'id':u'32565632156',u'size':775412,u'mimetype':u'application/pdf',u'status':u'NEW',u'version':3,u'parent_id':u'9873615',u'user':u'eyeos',u'client_modified':u'2013-03-08 10:36:41.997',u'server_modified':u'2013-03-08 10:36:41.997',u'is_folder':False},
                 {u'user_eyeos':u'eyeID_EyeosUser_2',u'cloud':u'Stacksync',u'filename':u'B',u'path':u'/',u'id':u'11111',u'size':0,u'mimetype':u'application/pdf',u'status':u'NEW',u'version':2,u'parent_id':u'9873615',u'user':u'eyeos',u'client_modified':u'2013-03-08 10:36:41.997',u'server_modified':u'2013-03-08 10:36:41.997',u'is_folder':True},
                 {u'user_eyeos':u'eyeID_EyeosUser_2',u'cloud':u'Stacksync',u'filename':u'c.pdf',u'path':u'/',u'id':u'222333',u'size':775412,u'mimetype':u'application/pdf',u'status':u'NEW',u'version':3,u'parent_id':u'11111',u'user':u'eyeos',u'client_modified':u'2013-03-08 10:36:41.997',u'server_modified':u'2013-03-08 10:36:41.997',u'is_folder':False},
                 {u'user_eyeos':u'eyeID_EyeosUser_2',u'cloud':u'Stacksync',u'filename':u'Client1.pdf',u'path':u'/',u'id':u'32565632157',u'size':775412,u'mimetype':u'application/pdf',u'status':u'NEW',u'version':3,u'parent_id':u'null',u'user':u'eyeos',u'client_modified':u'2013-03-08 10:36:41.997',u'server_modified':u'2013-03-08 10:36:41.997',u'is_folder':False}]
        array.sort()
        return array

    def getArrayInsertVersion(self):
        if settings[ 'NEW_CODE' ] == "true":
            array = [{u'id': u'32565632156', u'user_eyeos': u'eyeID_EyeosUser_2', u'cloud': u'Stacksync', u'version': 2, u'recover': False},
                     {u'id': u'222333', u'user_eyeos': u'eyeID_EyeosUser_2', u'cloud': u'Stacksync', u'version': 2, u'recover': False}]
        else:
            array = [{u'id': u'32565632156', u'user_eyeos': u'eyeID_EyeosUser_2', u'version': 2, u'recover': False},
                     {u'id': u'222333', u'user_eyeos': u'eyeID_EyeosUser_2', u'version': 2, u'recover': False}]
        array.sort()
        return array

    def getArrayInsertVersionDeleteUser(self):
        array = [{u'id':u'9873615',u'user_eyeos':u'eyeID_EyeosUser_2',u'cloud':u'Stacksync',u'version':2,u'recover':False},
         {u'id':u'32565632156',u'user_eyeos':u'eyeID_EyeosUser_2',u'cloud':u'Stacksync',u'version':2,u'recover':False},
         {u'id':u'32565632157',u'user_eyeos':u'eyeID_EyeosUser_2',u'cloud':u'Nec',u'version':2,u'recover':False}]
        return array

    """
    ##################################################################################################################################################
                                                                    TEST CALENDAR
    ##################################################################################################################################################
    """

    """
    method: deleteEvent
    when: called
    with: array
    should: deleteCorrect
    """
    def test_deleteEvent_called_array_deleteCorrect(self):
        array = self.getArrayInsertEvent()
        self.sut.insert(array)
        list = self.getArrayDeleteEvent()
        self.sut.deleteEvent(list)
        files = self.sut.db.get_all_docs()
        results = []
        for file in files[1]:
            results.append(file.content)
        results.sort()
        self.assertEquals(list,results)

    """
    method: updateEvent
    when: called
    with: array
    should: updateCorrect
    """
    def test_updateEvent_called_array_updateCorrect(self):
        array = self.getArrayInsertEvent()
        self.sut.insert(array)
        update = self.getArrayUpdateEvent()
        self.sut.updateEvent(update)
        files = self.sut.db.get_all_docs();
        results = []
        for file in files[1]:
            results.append(file.content)
        results.sort()
        self.assertEquals(update,results)

    """
    method: selectEvent
    when: called
    with: userAndIdCalendar
    should: returnArray
    """
    def test_selectEvent_called_userAndIdCalendar_returnArray(self):
        array = self.getArrayInsertEvent()
        self.sut.insert(array)
        data = self.sut.selectEvent('event','eyeos','laboral')
        self.assertEquals(2,len(data))
        """self.db = u1db.open("metadata.u1db", create=True)
        files = self.db.get_all_docs()
        for file in files[1]:
            print(file.content)"""


    """
    method: insertEvent
    when: called
    with: array
    should: insertCorrect
    """
    def test_insertEvent_called_array_insertCorrect(self):
        array = self.getArrayInsertEvent()
        self.sut.insertEvent(array)
        array2 = [{u'type':u'event',u'user_eyeos': u'eyeos',u'calendar': u'personal',u'status':u'NEW', u'isallday': u'0', u'timestart': u'201419160000', u'timeend':u'201419170000', u'repetition': u'None', u'finaltype': u'1', u'finalvalue': u'0', u'subject': u'Visita Médico', u'location': u'Barcelona', u'description': u'Llevar justificante'}]
        self.sut.insertEvent(array2)
        files = self.sut.db.get_all_docs()
        results = []
        for file in files[1]:
            results.append(file.content)
        results.sort()
        self.assertEquals(array,results)

    """
    method: insertCalendar
    when: called
    with: array
    should: insertCorrect
    """
    def test_insertCalendar_called_array_insertCorrect(self):
        array = self.getArrayInsertCalendar()
        self.sut.insertCalendar(array)
        array2 = [{u'type':u'calendar',u'user_eyeos':u'eyeos',u'name':u'school',u'status':u'NEW',u'description':u'school calendar',u'timezone':0}]
        self.sut.insertCalendar(array2)
        files = self.sut.db.get_all_docs()
        results = []
        for file in files[1]:
            results.append(file.content)
        results.sort()
        self.assertEquals(array,results)
    """
    method: insertCalendar
    when: called
    with: array
    should: updateCorrect
    """
    def test_insertCalendar_called_array_updateCorrect(self):
        array =[{u'type':u'calendar',u'user_eyeos':u'eyeos',u'name':u'school',u'status':u'DELETED',u'description':u'school calendar',u'timezone':0}]
        self.sut.insertCalendar(array)
        array2 = [{u'type':u'calendar',u'user_eyeos':u'eyeos',u'name':u'school',u'status':u'NEW',u'description':u'school calendar',u'timezone':0}]
        self.sut.insertCalendar(array2)
        files = self.sut.db.get_all_docs()
        results = []
        for file in files[1]:
            results.append(file.content)
        results.sort()
        self.assertEquals(array2,results)


    """
    method: deleteCalendar
    when: called
    with: array
    should: deleteCorrect
    """
    def test_deleteCalendar_called_array_deleteCorrect(self):
        array = self.getArrayInsertCalendar()
        self.sut.insertCalendar(array)
        listEvents = self.getArrayInsertCalendarEvents()
        self.sut.insertEvent(listEvents)
        arrayDelete = self.getArrayDeleteCalendar()
        self.sut.deleteCalendar(arrayDelete)
        files = self.sut.db.get_all_docs()
        results = []
        for file in files[1]:
            results.append(file.content)
        results.sort()
        self.assertEquals(self.getArrayDeleteCalendarAndEvents("DELETED"),results)

    """
    method: selectCalendar
    when: called
    with: nameCalendar
    should: returnArray
    """
    def test_selectCalendar_called_nameCalendar_returnArray(self):
        array = self.getArrayInsertCalendar()
        self.sut.insertCalendar(array)
        select = {u'type':u'calendar',u'user_eyeos':u'eyeos'}
        calendar = self.sut.selectCalendar(select)
        calendar.sort()
        self.assertEquals(array,calendar)

    """
    method: updateCalendar
    when: called
    with: array
    should: updateCorrect
    """
    def test_updateCalendar_called_array_updateCorrect(self):
        array = self.getArrayInsertCalendar()
        self.sut.insertCalendar(array)
        arrayUpdate = [{u'type':u'calendar',u'user_eyeos':u'eyeos',u'name':u'personal',u'status':u'DELETED',u'description':u'personal calendar',u'timezone':0}]
        self.sut.updateCalendar(arrayUpdate)
        calendar = self.sut.getCalendar({u'type':u'calendar',u'user_eyeos':u'eyeos',u'name':u'personal'})
        self.assertEquals(arrayUpdate[0],calendar[0].content)

    """
    method: deleteCalendarUser
    when: called
    with: user
    should: deleteCorrect
    """
    def test_deleteCalendarUser_called_user_deleteCorrect(self):
        calendars = self.getArrayInsertCalendar()
        self.sut.insertCalendar(calendars)
        events = self.getArrayInsertCalendarEvents()
        self.sut.insertEvent(events)
        self.sut.deleteCalendarUser('eyeos')
        files = self.sut.db.get_all_docs()
        self.assertEquals(0,len(files[1]))

    """
    method: selectCalendarsAndEvents
    when: called
    with: user
    should: returnArray
    """
    def test_selectCalendarsAndEvents_called_user_returnArray(self):
        calendars = self.getArrayInsertCalendar()
        self.sut.insertCalendar(calendars)
        self.sut.insertCalendar([{u'type':u'calendar',u'user_eyeos':u'eyeos',u'name':u'class',u'status':u'DELETED'}])
        events = self.getArrayInsertCalendarEvents()
        self.sut.insertEvent(events)
        self.sut.insertEvent([{u'type':u'event',u'user_eyeos': u'eyeos',u'calendar': u'class',u'status':u'DELETED', u'isallday': u'0', u'timestart': u'201419160000', u'timeend':u'201419170000', u'repetition': u'None', u'finaltype': u'1', u'finalvalue': u'0', u'subject': u'Visita Médico', u'location': u'Barcelona', u'description': u'Llevar justificante'}])
        files = self.sut.selectCalendarsAndEvents('eyeos')
        files.sort()
        self.assertEquals(self.getArrayDeleteCalendarAndEvents("NEW"),files)

    def getArrayInsertEvent(self):
        array = [{u'type':u'event',u'user_eyeos': u'eyeos',u'calendar': u'personal',u'status':u'NEW', u'isallday': u'0', u'timestart': u'201419160000', u'timeend':u'201419170000', u'repetition': u'None', u'finaltype': u'1', u'finalvalue': u'0', u'subject': u'Visita Médico', u'location': u'Barcelona', u'description': u'Llevar justificante'},
                 {u'type':u'event',u'user_eyeos': u'eyeos',u'calendar': u'laboral', u'status':u'NEW',u'isallday': u'1', u'timestart': u'201420160000', u'timeend':u'201420170000', u'repetition': u'None', u'finaltype': u'1', u'finalvalue': u'0', u'subject': u'Excursión', u'location': u'Girona', u'description': u'Mochila'},
                 {u'type':u'event',u'user_eyeos': u'eyeos',u'calendar': u'laboral',u'status':u'NEW', u'isallday': u'0', u'timestart': u'201421173000', u'timeend':u'201421183000', u'repetition': u'EveryWeek', u'finaltype': u'1', u'finalvalue': u'0', u'subject': u'ClaseInglés', u'location': u'Hospitalet', u'description': u'Trimestre'}]
        """array = [{u'status': u'NEW', u'description': u'Medico', u'finalvalue': u'0', u'finaltype': 1, u'subject': u'Prueba', u'timeend': 1395930600, u'timestart': 1395928800, u'user_eyeos': u'eyeos', u'location': u'Barcelona', u'repeattype': u'n', u'calendar': u'eyeos', u'repetition': u'None', u'type': u'event', u'isallday': 0}]"""
        array.sort()
        return array

    def getArrayDeleteEvent(self):
        array = [{u'type': u'event',u'user_eyeos': u'eyeos',u'calendar':u'personal',u'status':u'DELETED',u'timestart':u'201419160000',u'timeend':u'201419170000',u'isallday':u'0'},
                 {u'type': u'event',u'user_eyeos': u'eyeos',u'calendar':u'laboral', u'status':u'DELETED',u'timestart':u'201420160000',u'timeend':u'201420170000',u'isallday':u'1'},
                 {u'type': u'event',u'user_eyeos': u'eyeos',u'calendar':u'laboral',u'status':u'DELETED',u'timestart':u'201421173000',u'timeend':u'201421183000',u'isallday':u'0'}]
        """array = [{u'type':u'event',u'user_eyeos':u'eyeos',u'calendar':u'eyeos',u'status':u'DELETED',u'isallday':0,u'timestart':1395928800,u'timeend':1395930600,u'repetition':u'None',u'finaltype':1,u'finalvalue':u'0',u'subject':u'Prueba',u'location':u'Barcelona',u'repeattype':u'n',u'description':u'Medico'}]"""
        array.sort()
        return array

    def getArrayUpdateEvent(self):
        array = [{u'type':u'event',u'user_eyeos': u'eyeos',u'calendar': u'personal',u'status':u'CHANGED', u'isallday': u'0', u'timestart': u'201419160000', u'timeend':u'201419170000', u'repetition': u'None', u'finaltype': u'1', u'finalvalue': u'0', u'subject': u'Visita Museo', u'location': u'Esplugues de llobregat', u'description': u'Llevar Ticket'},
                 {u'type':u'event',u'user_eyeos': u'eyeos',u'calendar': u'laboral', u'status':u'CHANGED',u'isallday': u'1', u'timestart': u'201420160000', u'timeend':u'201420170000', u'repetition': u'None', u'finaltype': u'1', u'finalvalue': u'0', u'subject': u'Excursión', u'location': u'Girona', u'description': u'Mochila'},
                 {u'type':u'event',u'user_eyeos': u'eyeos',u'calendar': u'laboral', u'status':u'CHANGED',u'isallday': u'0',u'timestart': u'201421173000', u'timeend':u'201421183000',u'repetition': u'EveryMonth', u'finaltype': u'1', u'finalvalue': u'0', u'subject': u'ClaseFrancés', u'location': u'Hospitalet', u'description': u'Trimestre'}]
        array.sort()
        return array

    def getArrayInsertCalendar(self):
        array =[{u'type':u'calendar',u'user_eyeos':u'eyeos',u'name':u'personal',u'status':u'NEW',u'description':u'personal calendar',u'timezone':0},
                {u'type':u'calendar',u'user_eyeos':u'eyeos',u'name':u'school',u'status':u'NEW',u'description':u'school calendar',u'timezone':0}]
        array.sort()
        return array

    def getArrayInsertCalendarEvents(self):
        array = [{u'type':u'event',u'user_eyeos': u'eyeos',u'calendar': u'personal',u'status':u'NEW', u'isallday': u'0', u'timestart': u'201419160000', u'timeend':u'201419170000', u'repetition': u'None', u'finaltype': u'1', u'finalvalue': u'0', u'subject': u'Visita Médico', u'location': u'Barcelona', u'description': u'Llevar justificante'},
                 {u'type':u'event',u'user_eyeos': u'eyeos',u'calendar': u'personal', u'status':u'NEW',u'isallday': u'1', u'timestart': u'201420160000', u'timeend':u'201420170000', u'repetition': u'None', u'finaltype': u'1', u'finalvalue': u'0', u'subject': u'Excursión', u'location': u'Girona', u'description': u'Mochila'}]
        array.sort()
        return array

    def getArrayDeleteCalendar(self):
        array =[{u'type':u'calendar',u'user_eyeos':u'eyeos',u'name':u'personal'},
                {u'type':u'calendar',u'user_eyeos':u'eyeos',u'name':u'school'}]
        array.sort()
        return array

    def getArrayDeleteCalendarAndEvents(self,status):
        array =[{u'type':u'calendar',u'user_eyeos':u'eyeos',u'name':u'personal',u'status':u'' + status + '',u'description':u'personal calendar',u'timezone':0},
                {u'type':u'calendar',u'user_eyeos':u'eyeos',u'name':u'school',u'status':u'' + status +'',u'description':u'school calendar',u'timezone':0},
                {u'type':u'event',u'user_eyeos': u'eyeos',u'calendar': u'personal',u'status':u'' + status +'', u'isallday': u'0', u'timestart': u'201419160000', u'timeend':u'201419170000', u'repetition': u'None', u'finaltype': u'1', u'finalvalue': u'0', u'subject': u'Visita Médico', u'location': u'Barcelona', u'description': u'Llevar justificante'},
                {u'type':u'event',u'user_eyeos': u'eyeos',u'calendar': u'personal', u'status':u'' + status +'',u'isallday': u'1', u'timestart': u'201420160000', u'timeend':u'201420170000', u'repetition': u'None', u'finaltype': u'1', u'finalvalue': u'0', u'subject': u'Excursión', u'location': u'Girona', u'description': u'Mochila'}]
        array.sort()
        return array

    """
    ##################################################################################################################################################
                                                                    TEST LOCK FILE
    ##################################################################################################################################################
    """

    """
    method: getMetadataFile
    when: called
    with: idAndCloud
    should: returnArray
    """

    def test_getMetadataFile_called_user_returnArray(self):
        self.sut.db.create_doc_from_json(json.dumps({u'id':u'124568',u'cloud':u'Stacksync',u'username':u'eyeos',u'IpServer':u'192.168.56.101',u'datetime':u'2015-05-12 10:50:00',u'status':u'close'}))
        data = self.sut.getMetadataFile('124568','Stacksync')
        data.sort()
        self.assertEquals(1,len(data))

    """
    method: lockFile
    when: called
    with: metadata
    should: emptyData
    """
    def test_lockFile_called_metadata_emptyData(self):
        data = {u'id':u'124568',u'cloud':u'Stacksync',u'username':u'eyeos',u'IpServer':u'192.168.56.101',u'datetime':u'2015-05-12 10:50:00',u'status':u'close',u'timeLimit':10}
        self.sut.lockFile(data)
        files =self.sut.getMetadataFile('124568','Stacksync')
        self.assertEquals(data,files[0])

    """
    method: lockFile
    when: called
    with: metadata
    should: updateData
    """
    def test_lockFile_called_metadata_updateData(self):
        data = {u'id':u'124568',u'cloud':u'Stacksync',u'username':u'eyeos',u'IpServer':u'192.168.56.101',u'datetime':u'2015-05-12 10:50:00',u'status':u'close'}
        self.sut.db.create_doc_from_json(json.dumps(data))
        data['status'] = u'open'
        data['timeLimit'] = 10
        self.sut.lockFile(data)
        files = self.sut.getMetadataFile('124568','Stacksync')
        self.assertEquals(data,files[0])

    """
    method: lockFile
    when: called
    with: metadata
    should: updateDataSameUser
    """

    def test_lockFile_called_metadata_updateDataSameUserAndServer(self):
        data = {u'id':u'124568',u'cloud':u'Stacksync',u'username':u'eyeos',u'IpServer':u'192.168.56.101',u'datetime':u'2015-05-12 10:50:00',u'status':u'open'}
        self.sut.db.create_doc_from_json(json.dumps(data))
        data['datetime'] = u'2015-05-12 10:55:00'
        data['timeLimit'] = 10
        self.sut.lockFile(data)
        files = self.sut.getMetadataFile('124568','Stacksync')
        self.assertEquals(data,files[0])


    """
    method: lockFile
    when: called
    with: metadata
    should: updateDataTimeExpired
    """

    def test_lockFile_called_metadata_updateDataTimeExpired(self):
        data = {u'id':u'124568',u'cloud':u'Stacksync',u'username':u'tester',u'IpServer':u'192.168.56.101',u'datetime':u'2015-05-12 10:50:00',u'status':u'open'}
        self.sut.db.create_doc_from_json(json.dumps(data))
        data['username'] = u'eyeos'
        data['datetime'] = u'2015-05-12 11:05:00'
        data['timeLimit'] = 10
        self.sut.lockFile(data)
        files = self.sut.getMetadataFile('124568','Stacksync')
        self.assertEquals(data,files[0])

    """
    method: lockFile
    when: called
    with: metadata
    should: returnIncorrectDistinctUser
    """
    def test_lockFile_called_metadata_returnIncorrectDistinctUser(self):
        data = {u'id':u'124568',u'cloud':u'Stacksync',u'username':u'eyeos',u'IpServer':u'192.168.56.101',u'datetime':u'2015-05-12 10:50:00',u'status':u'open'}
        self.sut.db.create_doc_from_json(json.dumps(data))
        data['username'] = u'tester'
        data['timeLimit'] = 10
        data['datetime'] = u'2015-05-12 10:55:00'
        result = self.sut.lockFile(data)
        self.assertEquals(False,result)

    """
    method: lockFile
    when: called
    with: metadata
    should: returnIncorrectDistinctServer
    """

    def test_lockFile_called_metadata_returnIncorrectDistinctServer(self):
        data = {u'id':u'124568',u'cloud':u'Stacksync',u'username':u'eyeos',u'IpServer':u'192.168.56.101',u'datetime':u'2015-05-12 10:50:00',u'status':u'open'}
        self.sut.db.create_doc_from_json(json.dumps(data))
        data['IpServer'] = u'192.168.56.102'
        data['timeLimit'] = 10
        data['datetime'] = u'2015-05-12 10:55:00'
        result = self.sut.lockFile(data)
        self.assertEquals(False,result)


    """
    method: updateDateTime
    when: called
    with: metadata
    should: returnUpdateCorrect
    """
    def test_updateDateTime_called_metadata_returnUpdateCorrect(self):
        data = {u'id':u'124568',u'cloud':u'Stacksync',u'username':u'eyeos',u'IpServer':u'192.168.56.101',u'datetime':u'2015-05-12 10:50:00',u'status':u'open'}
        self.sut.db.create_doc_from_json(json.dumps(data))
        data['datetime'] = u'2015-05-12 11:50:00'
        self.sut.updateDateTime(data)
        files = self.sut.getMetadataFile('124568','Stacksync')
        self.assertEquals(data,files[0])

    """
    method: updateDateTime
    when: called
    with: metadata
    should: returnIncorrectDistinctUsername
    """
    def test_updateDateTime_called_metadata_returnIncorrectDistinctUsername(self):
        data = {u'id':u'124568',u'cloud':u'Stacksync',u'username':u'eyeos',u'IpServer':u'192.168.56.101',u'datetime':u'2015-05-12 10:50:00',u'status':u'open'}
        data['username'] = u'tester'
        result = self.sut.updateDateTime(data)
        self.assertEquals(False,result)

    """
    method: unLockFile
    when: called
    with: metadata
    should: returnIncorrectDistinctUsername
    """
    def test_unLockFile_called_metadata_returnCorrect(self):
        data = {u'id':u'124568',u'cloud':u'Stacksync',u'username':u'eyeos',u'IpServer':u'192.168.56.101',u'datetime':u'2015-05-12 10:50:00',u'status':u'open'}
        self.sut.db.create_doc_from_json(json.dumps(data))
        data['status'] = u'close'
        self.sut.unLockFile(data)
        files = self.sut.getMetadataFile('124568','Stacksync')
        self.assertEquals(data,files[0])
#! /usr/bin/python3

from Metadata import Metadata
from ImageSynchronizer import ImageSynchronizer
import numpy as np
import matplotlib.pyplot as plt

tokamak = Metadata()
tokamak.parse_metadata('tokamak/dataformat.txt','tokamak/tokamak.txt')
x_tokamak = tokamak.get_nparray('x')
y_tokamak = tokamak.get_nparray('y')

fig1 = plt.figure()
plt.plot(x, y, label="robot position")
plt.legend(loc="upper right")
plt.xlabel("East (m)")
plt.ylabel("North (m)")
plt.gca().set_aspect('equal', adjustable='box')

plt.show(block=False)
Example #25
0
# sync.export_extrinsic_synchronized_index_file("synchronized_cam_indexes.txt")
print(
    "Uncomment line 11 to generate syncronised image indexes file. See ImageSynchronizer for code details."
)

# Front cam
frontLeftStamps = sync.dataFrontLeft.get_nparray('timestamp')
frontRightStamps = sync.dataFrontRight.get_nparray('timestamp')
# Rear cam
rearLeftStamps = sync.dataRearLeft.get_nparray('timestamp')
rearRightStamps = sync.dataRearRight.get_nparray('timestamp')
# Nav cam
navLeftStamps = sync.dataNavLeft.get_nparray('timestamp')
navRightStamps = sync.dataNavRight.get_nparray('timestamp')

tokamak = Metadata()
# tokamak.parse_metadata('tokamak/dataformat.txt','tokamak/tokamak.txt')

# time beginning of acquisition
t0 = min([frontLeftStamps[0], rearLeftStamps[1], navLeftStamps[2]])

# Plotting #######################################################

# Ploting image timestamps
fig0, axes = plt.subplots(2, 1, sharex=True, sharey=False)
axes[0].plot((frontLeftStamps - t0) / 1000000.0, label="front left stamp")
axes[0].plot((rearLeftStamps - t0) / 1000000.0, label="rear left stamp")
axes[0].plot((navLeftStamps - t0) / 1000000.0, label="nav left stamp")
axes[0].legend(loc="upper left")
axes[0].set_xlabel("image index")
axes[0].set_ylabel("time (s)")
Example #26
0
class PicThing:
    window   = None
    builder  = None
    iconview = None
    filemgr  = None
    meta     = None
    libs     = None
    image    = None

    def on_window_destroy(self, widget, data=None):
        gtk.main_quit()


    def __init__(self):
        self.builder = gtk.Builder()
        self.builder.add_from_file("picthing.glade")

        self.window = self.builder.get_object("window")
        self.builder.connect_signals(self)

        config = ConfigParser.ConfigParser()
        config.read(['picthing.ini', os.path.expanduser('~/.picthing.ini')])
        self.libs = config.items('libraries')

        place   = self.builder.get_object("place_librarypicker")
        libpick =  gtk.combo_box_new_text()
        libpick.connect('changed',self.action_switchlibrary)
        place.add(libpick)
        libpick.show()
        if(len(self.libs)):
            for lib in self.libs:
                libpick.append_text(lib[0])
            libpick.set_active(0)
        else:
            #FIXME use some dialog here
            sys.exit()


        # add image canvas
        self.image = ResizableImage()
        self.image.show()
        self.builder.get_object("picframe").add(self.image)

    def action_switchlibrary(self, widget):
        """ Load a new library """
        library = self.libs[widget.get_active()]
        print "loading library '"+library[0]+"' in '"+library[1]+"'"
        self.meta     = None
        self.builder.get_object('notebook').set_current_page(0)
        if(self.filemgr != None):
            self.filemgr.index.close();
        self.filemgr  = FileManager(library[1])
        self.iconview = self.builder.get_object("iconview")
        self.iconview.set_model(self.filemgr.store)
        self.iconview.set_text_column(self.filemgr.COL_TITLE)
        self.iconview.set_pixbuf_column(self.filemgr.COL_PIXBUF)
        self.iconview.set_tooltip_column(self.filemgr.COL_PATH)
        self.new_query('')

        self.filemgr.index.tagcloud()


    def action_search(self,widget):
        querybox = self.builder.get_object("querybox")
        self.set_status("Searching...")

        query = querybox.get_text()
        query = query.strip()
        m = re.search('^folder:("([^"]*)")?$',query)

        try:
            if(m):
                query = m.group(2);
                self.filemgr.browse(query)
            elif(query == ''):
                self.filemgr.browse(query)
            else:
                    self.filemgr.search(query)

            self.set_status("okay")

        except ParseException:
            self.set_status("Couldn't parse query")
        except KeyError:
            self.set_status("Wrong field name")
        except NoDirException:
            self.set_status("No such directory")

        self.builder.get_object('notebook').set_current_page(0)


    def action_iconclick(self,widget,item):
        model = widget.get_model()
        path  = model[item][self.filemgr.COL_PATH]
        ftype = model[item][self.filemgr.COL_TYPE]

        if(ftype == 'dir'):
            if(path):
                self.new_query('folder:"'+path+'"')
            else:
                self.new_query('')
        else:
            self.builder.get_object('notebook').set_current_page(1)

    def action_pageswitch(self,notebook, page, page_num):
        """ Signal handler. Activates when the notebok tab is switched """

        self.meta = None
        if(page_num == 1):
            self.load_image()
        elif(page_num == 2):
            self.builder.get_object('tagcloud').set_markup(self.filemgr.get_tagcloudstring())



    def action_imgnext(self, button):
        """ Navigate to the next image in icon view """
        pos = self.get_currentpos()
        pos = self.filemgr.get_nextimagepos(pos)
        if(pos != None):
            self.iconview.select_path(pos)
            self.load_image()


    def action_imgprev(self, button):
        """ Navigate to the previous image in icon view """
        pos = self.get_currentpos()
        pos = self.filemgr.get_previmagepos(pos)
        if(pos != None):
            self.iconview.select_path(pos)
            self.load_image()

    def action_activate_link(self, widget, link):
        """ handle clicks in label links """
        link = urllib.unquote(link)
        self.new_query(link)

    def get_currentpos(self):
        """ return the number (position) of the currently selected icon

            returns None if nothing is selected
        """
        pos = self.iconview.get_selected_items()

        if(pos == None):
            return None;
        return pos[0][0] # array, tuple


    def load_image(self):
        """ Load the next image to show """
        self.save_image()

        panel = self.builder.get_object('imagepanel')
        panel.hide()
        self.image.hide()

        pos = self.get_currentpos()
        if(pos == None):
            return
        img = self.filemgr.get_itemat(pos)

        if(img['ft'] == 'dir'):
            return

        self.meta = Metadata(img['fn']);

        obj = self.builder.get_object('imgtitle')
        obj.set_text(self.meta.get_title())
        obj.set_sensitive(self.meta.writable)

        obj = self.builder.get_object('imgcontent')
        obj.set_text(self.meta.get_content())
        obj = self.builder.get_object('imgcontentbox')
        obj.set_sensitive(self.meta.writable)

        obj = self.builder.get_object('imgtags')
        obj.set_text(self.meta.get_tags())
        obj.set_sensitive(self.meta.writable)

        obj = self.builder.get_object('imgname')
        obj.set_text(os.path.basename(img['fn']))

        panel.show()

        while gtk.events_pending():
            gtk.main_iteration(False)
        self.image.set_from_file(img['fn'])
        self.image.show()

    def save_image(self):
        """ Save metadata of the currently loaded image (if any) """
        if(self.meta == None):
            return
        cnt = self.builder.get_object('imgcontent');
        self.meta.set_title(   self.builder.get_object('imgtitle').get_text()   )
        self.meta.set_content( cnt.get_text(cnt.get_start_iter(),cnt.get_end_iter()) )
        self.meta.set_tags(    self.builder.get_object('imgtags').get_text()    )
        if self.meta.conditional_write():
            # update index with new data
            self.filemgr.index.update_image(self.meta.filename)


    def new_query(self,query):
        """ Set a new search or browse query and execute it """
        querybox = self.builder.get_object("querybox")
        querybox.set_text(query)
        self.action_search(None)


    def set_status(self,text,context=1):
        status = self.builder.get_object("statusbar")
        status.push(context,text)



    def action_scandialog(self,widget):

        query = self.builder.get_object("querybox").get_text()
        m = re.search('folder:("([^"]*)")?$',query)

        if(m):
            base = m.group(2)
            base = os.path.join(self.filemgr.root,base)
        else:
            base = self.filemgr.root
        self.builder.get_object("scandialog_folder").set_text(base);


        prg = self.builder.get_object("scandialog_progress")
        prg.hide()
        prg.set_pulse_step(0.01)
        btn = self.builder.get_object("scandialog_execute")
        btn.set_sensitive(True)

        dialog = self.builder.get_object("scandialog")
        dialog.run()

    def action_scandialog_response(self,dialog,response_id):
        print response_id
        if response_id < 0:
            dialog.hide()
            # abort any running scan:
            self.filemgr.index.scan_stop()
            return

        prg = self.builder.get_object("scandialog_progress")
        prg.set_text('')
        prg.show()
        btn = self.builder.get_object("scandialog_execute")
        btn.set_sensitive(False)

        base = self.builder.get_object("scandialog_folder").get_text();

        self.filemgr.index.scan_start()
        scan = self.filemgr.index.scan_iterator(base,
                                                self.action_scan_loop,
                                                self.action_scan_exit)
        gobject.idle_add(scan.next)

    def action_scan_loop(self,fn,isimg):
        prg = self.builder.get_object("scandialog_progress")
        prg.show()
        prg.pulse()
        if isimg:
            prg.set_text(os.path.basename(fn))

    def action_scan_exit(self,isabort):
        dialog = self.builder.get_object("scandialog")
        dialog.hide()
class ImageSynchronizer:

    dataRootFolder = ""

    dataFrontLeft  = Metadata()
    dataFrontRight = Metadata()
    dataRearLeft   = Metadata()
    dataRearRight  = Metadata()
    dataNavLeft    = Metadata()
    dataNavRight   = Metadata()

    rawFrontStamps = OrderedDict()
    rawRearStamps  = OrderedDict()
    rawNavStamps   = OrderedDict()

    iSynchedFrontStamps = OrderedDict()
    iSynchedRearStamps  = OrderedDict()
    iSynchedNavStamps   = OrderedDict()

    eSynchedFrontStamps = OrderedDict()
    eSynchedRearStamps  = OrderedDict()
    eSynchedNavStamps   = OrderedDict()

    def __init__(self, dataRootFolder="./"):

        self.dataRootFolder = dataRootFolder

        # loading front cam metadata
        self.dataFrontLeft.parse_metadata( self.dataRootFolder + "front_cam/left/left_dataformat.txt",   self.dataRootFolder + "front_cam/left/left_all_metadata.txt")
        self.dataFrontRight.parse_metadata(self.dataRootFolder + "front_cam/right/right_dataformat.txt", self.dataRootFolder + "front_cam/right/right_all_metadata.txt")
        for i in range(len(self.dataFrontLeft.timestamp)):
            self.rawFrontStamps[self.dataFrontLeft.index[i]] = StereoPairStamp(self.dataFrontLeft.index[i], self.dataFrontLeft.timestamp[i], self.dataFrontRight.timestamp[i])

        # loading rear cam metadata
        self.dataRearLeft.parse_metadata( self.dataRootFolder + "rear_cam/left/left_dataformat.txt",   self.dataRootFolder + "rear_cam/left/left_all_metadata.txt")
        self.dataRearRight.parse_metadata(self.dataRootFolder + "rear_cam/right/right_dataformat.txt", self.dataRootFolder + "rear_cam/right/right_all_metadata.txt")
        for i in range(len(self.dataRearLeft.timestamp)):
            self.rawRearStamps[self.dataRearLeft.index[i]] = StereoPairStamp(self.dataRearLeft.index[i], self.dataRearLeft.timestamp[i], self.dataRearRight.timestamp[i])

        # loading nav cam metadata
        self.dataNavLeft.parse_metadata( self.dataRootFolder + "nav_cam/left/left_dataformat.txt",   self.dataRootFolder + "nav_cam/left/left_all_metadata.txt")
        self.dataNavRight.parse_metadata(self.dataRootFolder + "nav_cam/right/right_dataformat.txt", self.dataRootFolder + "nav_cam/right/right_all_metadata.txt")
        for i in range(len(self.dataNavLeft.timestamp)):
            self.rawNavStamps[self.dataNavLeft.index[i]] = StereoPairStamp(self.dataNavLeft.index[i], self.dataNavLeft.timestamp[i], self.dataNavRight.timestamp[i])

    def get_isynched_front_pair_stamps(self):
        res = np.empty([len(self.iSynchedFrontStamps.values()),1])
        i = 0
        for value in self.iSynchedFrontStamps.values():
            res[i] = value.leftStamp
            i += 1
        return res

    def get_isynched_rear_pair_stamps(self):
        res = np.empty([len(self.iSynchedRearStamps.values()),1])
        i = 0
        for value in self.iSynchedRearStamps.values():
            res[i] = value.leftStamp
            i += 1
        return res

    def get_isynched_nav_pair_stamps(self):
        res = np.empty([len(self.iSynchedNavStamps.values()),1])
        i = 0
        for value in self.iSynchedNavStamps.values():
            res[i] = value.leftStamp
            i += 1
        return res

    def get_esynched_front_pair_stamps(self):
        res = np.empty([len(self.eSynchedFrontStamps.values()),1])
        i = 0
        for value in self.eSynchedFrontStamps.values():
            res[i] = value.leftStamp
            i += 1
        return res

    def get_esynched_rear_pair_stamps(self):
        res = np.empty([len(self.eSynchedRearStamps.values()),1])
        i = 0
        for value in self.eSynchedRearStamps.values():
            res[i] = value.leftStamp
            i += 1
        return res

    def get_esynched_nav_pair_stamps(self):
        res = np.empty([len(self.eSynchedNavStamps.values()),1])
        i = 0
        for value in self.eSynchedNavStamps.values():
            res[i] = value.leftStamp
            i += 1
        return res

    # getting indexes
    def get_esynched_front_pair_indexes(self):
        res = np.empty([len(self.eSynchedFrontStamps.values()),1])
        i = 0
        for value in self.eSynchedFrontStamps.values():
            res[i] = value.index
            i += 1
        return res

    def get_esynched_rear_pair_indexes(self):
        res = np.empty([len(self.eSynchedRearStamps.values()),1])
        i = 0
        for value in self.eSynchedRearStamps.values():
            res[i] = value.index
            i += 1
        return res

    def get_esynched_nav_pair_indexes(self):
        res = np.empty([len(self.eSynchedNavStamps.values()),1])
        i = 0
        for value in self.eSynchedNavStamps.values():
            res[i] = value.index
            i += 1
        return res
    # remove unsynchronized stereo pairs tolerance is expressed in milliseconds
    def intrinsic_synchro(self, tolerance = 0):

        print("Stereo pairs intrisic synchronization :")

        print("Front cams :\n")
        self.iSynchedFrontStamps = self.rawFrontStamps
        toBeRemoved = []
        for pair in self.iSynchedFrontStamps.values():
            if abs(pair.leftStamp - pair.rightStamp) > 1000*tolerance:
                toBeRemoved.append(pair.index)
        for index in toBeRemoved:
            print("    Removing " + str(index))
            del self.iSynchedFrontStamps[index]

        print("Rear cams :\n")
        self.iSynchedRearStamps = self.rawRearStamps
        toBeRemoved = []
        for pair in self.iSynchedRearStamps.values():
            if abs(pair.leftStamp - pair.rightStamp) > 1000*tolerance:
                toBeRemoved.append(pair.index)
        for index in toBeRemoved:
            print("    Removing " + str(index))
            del self.iSynchedRearStamps[index]

        print("Nav cams :\n")
        self.iSynchedNavStamps = self.rawNavStamps
        toBeRemoved = []
        for pair in self.iSynchedNavStamps.values():
            if abs(pair.leftStamp - pair.rightStamp) > 1000*tolerance:
                toBeRemoved.append(pair.index)
        for index in toBeRemoved:
            print("    Removing " + str(index))
            del self.iSynchedNavStamps[index]


    # remove stereo pairs to keep sync between stereo benches, tolerance is expressed in milliseconds
    def extrinsic_synchro(self, tolerance = 75):

        def are_synched(stamps, tol):
            m = median(stamps)
            if abs(stamps[0] - m) < tol and abs(stamps[1] - m) < tol and abs(stamps[2] - m) < tol:
                return True
            else:
                return False

        print("Stereo benches extrinsic synchronization :")
        tolerance = 1000*tolerance

        iFront = list(self.iSynchedFrontStamps.values());
        iRear  = list(self.iSynchedRearStamps.values());
        iNav   = list(self.iSynchedNavStamps.values());

        while len(iFront) > 0 and len(iRear) > 0 and len(iNav) > 0:
        # for i in range(10):
            # print(median([iFront[0].leftStamp, iRear[0].leftStamp, iNav[0].leftStamp]), [iFront[0].leftStamp, iRear[0].leftStamp, iNav[0].leftStamp])
            if are_synched([iFront[0].leftStamp, iRear[0].leftStamp, iNav[0].leftStamp], tolerance):
                self.eSynchedFrontStamps[iFront[0].leftStamp] = iFront[0]
                self.eSynchedRearStamps [iRear[0].leftStamp]  = iRear[0]
                self.eSynchedNavStamps  [iNav[0].leftStamp]   = iNav[0]
                del(iFront[0])
                del(iRear[0])
                del(iNav[0])
            else:
                class SyncInfo:

                    def __init__(self, distToMedian, name, tol):
                        self.stamp = distToMedian
                        self.name = name
                        self.toKeep = True
                        if abs(self.stamp) <= tol:
                            self.synched = True
                        else:
                            self.synched = False

                    def __repr__(self):
                        return "SyncInfo()"

                    def __str__(self):
                        return str(self.stamp) + " " + self.name + " synched=" + str(self.synched) + " toKeep=" + str(self.toKeep)

                m = median([iFront[0].leftStamp, iRear[0].leftStamp, iNav[0].leftStamp])
                syncInfo = []
                syncInfo.append(SyncInfo(iFront[0].leftStamp - m, 'iFront', tolerance))
                syncInfo.append(SyncInfo(iRear[0].leftStamp  - m, 'iRear',  tolerance))
                syncInfo.append(SyncInfo(iNav[0].leftStamp   - m, 'iNav',   tolerance))

                # Sort syncInfo by stamp values
                syncInfo.sort(key=lambda x: x.stamp)

                if syncInfo[0].synched:
                    if syncInfo[2].synched:
                        raise Exception("Fatal error extrinsic_synchro")
                    else:
                        syncInfo[0].toKeep = False
                        syncInfo[1].toKeep = False
                else:
                    if syncInfo[2].synched:
                        syncInfo[0].toKeep = False
                    else:
                        syncInfo[0].toKeep = False
                        syncInfo[1].toKeep = False
                        syncInfo[2].toKeep = False

                for inf in syncInfo:
                    if not inf.toKeep:
                        cam = list(inf.name)
                        cam[0] = " " 
                        print("    Removing " + str(eval(inf.name)[0].index) + "".join(cam))
                        del(eval(inf.name)[0])

    def export_extrinsic_synchronized_index_file(self, filename):

        front = list(self.eSynchedFrontStamps.values())
        rear  = list(self.eSynchedRearStamps.values())
        nav   = list(self.eSynchedNavStamps.values())
        
        exportFile = open(filename, "w")
        for i in range(len(self.eSynchedFrontStamps)):
            exportFile.write(str(front[i].index) + " " + str(rear[i].index) + " " + str(nav[i].index) + "\n")
        exportFile.close()
Example #28
0
from Metadata import Metadata
from TestMetadata import TestMetadata

from Random_Multi import Random_Multi
from LinUCB_Disjoint_Multi import LinUCB_Disjoint_Multi
from TS_Lin_Multi import TS_Lin_Multi

import MetricsCalculator
import TestBuilder
import Util

campaign_ids = set([866128, 856805, 847460, 858140, 865041])
campaign_ids_str = ",".join([str(x) for x in campaign_ids])

meta = Metadata("LinUCB_Disjoint_Multi_Target",
                campaign_id=5,
                initialize_user_embeddings=False)
days = pd.date_range(start='15/8/2018', end='20/08/2018')

algo = LinUCB_Disjoint_Multi(meta, campaign_ids, days[0], days[-1] + 1)

testsMeta = TestBuilder.basic_feature_target_tests2(meta, 6)

output_path = "./Results/{0}/{1}_Feature.csv".format(meta.campaign_id,
                                                     meta.algo_name)
output_log_path = "./Log/{0}/{1}_Feature.csv".format(meta.campaign_id,
                                                     meta.algo_name)
output_campaign_log_path = "./Log/{0}/Campaign_Log_Feature.csv".format(
    meta.campaign_id)

output_column_names = False
Example #29
0
from TestMetadata import TestMetadata

from Random import Random
from Regression import Regression
from LinUCB_Disjoint import LinUCB_Disjoint
from TS_Lin import TS_Lin
from GP_Clustered import GP_Clustered
from NN import NN

import MetricsCalculator
import TestBuilder
import Util

campaign_ids = [866128, 856805, 847460, 858140, 865041]
for campaign_id in campaign_ids:
    meta = Metadata("LinUCB_Disjoint", campaign_id)
    algo = LinUCB_Disjoint(meta)

    testsMeta = TestBuilder.get_lin_test(meta, 12)
    output_path = "./Results/{0}/{1}_Metrics.csv".format(
        meta.campaign_id, meta.algo_name)

    output_column_names = False
    if not Path(output_path).is_file():
        output = open(output_path, "w")
        output_column_names = True
    else:
        output = open(output_path, "a")

    # specials = read_csv("{0}//special_users.csv".format(meta.path), header=0)
    # specials = set(specials["UserHash"].values)