コード例 #1
0
ファイル: GetBisqueInfo.py プロジェクト: drmaize/compvision
def GetBisqueInfo():

    if len(sys.argv) != 2:
        print "Usage: python GetBisqueInfo.py <image_file>"

    else:

        image_file = sys.argv[1].replace('\'', '').encode("utf8")

        # Setup BisqueHandlers
        script_path = os.path.dirname(os.path.realpath(__file__))
        config_path = os.path.join(script_path, ".ConfigOptions")
        uh = UploadHandler(config_file=config_path, debug=True)
        dbh = DBHandler(config_file=config_path, debug=True)

        #######################################################
        ## 1) Check to see all parameters are included for DB #
        #######################################################

        # Set the MicroImageSet DB info
        microImageSet_table = 'microImageSets'
        microImageSet_key = 'microImageSet_id'
        microImageSet_headers = dbh.get_columns(microImageSet_table)

        # Get info from filename
        file_basename = os.path.basename(image_file)
        regex = re.compile('(e)(.*?)(p)(.*?)(_.*?)(.....)\.')
        matches = regex.match(file_basename)
        dataset_name = matches.group(2)
        microImage_id = matches.group(1) + matches.group(2) + matches.group(
            3) + matches.group(4) + matches.group(5)
        reconstructedImage = microImage_id
        imageChannel = matches.group(6)
        microImageSet_id = reconstructedImage + imageChannel

        ############################################################
        ## 4) Search for this entry in the MicroImageSets Database #
        ############################################################

        # Search for existing entry
        search_dict = {microImageSet_key: microImageSet_id}
        try:
            print "Searching for: ", search_dict
            microImageSet_row = dbh.search_col(microImageSet_table,
                                               search_dict)[0]
            print "=> microImageSet entry", microImageSet_row
            return microImageSet_row
        except IndexError:
            # If entry doesn't exist, attempt to add entry
            print ">>> microImageSet entry doesn't exist... returning -1"
            return -1
コード例 #2
0
def main():

    if len(sys.argv) != 3:
        print "Usage: python UploadFromDB.py <experiment> <path_to_directory>"
    else:
        dataset_name = sys.argv[1]
        path = sys.argv[2]
        files = [
            f for f in os.listdir(path)
            if os.path.isfile(os.path.join(path, f)) and not f.startswith('.')
        ]

        uh = UploadHandler(config_file=".ConfigOptions", debug=True)
        dbh = DBHandler(config_file=".ConfigOptions", debug=True)

        table = 'microImage'
        p_key = 'sample'
        col_headers = dbh.get_columns(table)

        reg = re.compile('e(.*?)x.._')
        uri_list = []
        for index, f in enumerate(files):
            print "Uploading file: " + str(f) + " (" + str(
                index + 1) + " of " + str(len(files)) + ")"
            matches = reg.match(f)
            col = dbh.search_col(table, p_key, matches.group(1))

            print "\n\n========================"
            print path, f
            print len(col), col
            ri = raw_input()

            col = col[0]

            metadata = {}
            for i in range(len(col_headers)):
                metadata[col_headers[i]] = str(col[i])
            metadata['experiment'] = dataset_name

            #retval = uh.upload_image(str(os.path.join(path, f)), metadata=metadata)
            #uri_list.append((f,retval))
            print ">>>"
            print metadata

            ri = raw_input()

        print uri_list
        with open(dataset_name + "_uris.txt", "w+") as fp:
            for (f, uri) in uri_list:
                print f, uri
                fp.write(f + "\t" + uri + "\n")
コード例 #3
0
ファイル: GetBisqueInfo.py プロジェクト: drmaize/compvision
def GetBisqueInfo():

	if len(sys.argv) != 2:
		print "Usage: python GetBisqueInfo.py <image_file>"
		
	else:
	
		image_file = sys.argv[1].replace('\'','').encode("utf8")

		# Setup BisqueHandlers
		script_path = os.path.dirname(os.path.realpath(__file__))
		config_path = os.path.join(script_path, ".ConfigOptions")
		uh = UploadHandler(config_file=config_path, debug=True)
		dbh = DBHandler(config_file=config_path, debug=True)
		
		#######################################################
		## 1) Check to see all parameters are included for DB #
		#######################################################
		
		# Set the MicroImageSet DB info
		microImageSet_table = 'microImageSets'
		microImageSet_key = 'microImageSet_id'
		microImageSet_headers = dbh.get_columns(microImageSet_table)
		
		# Get info from filename
		file_basename = os.path.basename(image_file)
		regex = re.compile('(e)(.*?)(p)(.*?)(_.*?)(.....)\.')
		matches = regex.match(file_basename)
		dataset_name = matches.group(2)
		microImage_id = matches.group(1) + matches.group(2) + matches.group(3) + matches.group(4) + matches.group(5)
		reconstructedImage = microImage_id
		imageChannel = matches.group(6)
		microImageSet_id = reconstructedImage + imageChannel
				
		############################################################
		## 4) Search for this entry in the MicroImageSets Database #
		############################################################
		
		# Search for existing entry
		search_dict = {microImageSet_key:microImageSet_id}
		try:
			print "Searching for: ", search_dict
			microImageSet_row = dbh.search_col(microImageSet_table, search_dict)[0]
			print "=> microImageSet entry", microImageSet_row
			return microImageSet_row
		except IndexError:
			# If entry doesn't exist, attempt to add entry
			print ">>> microImageSet entry doesn't exist... returning -1"
			return -1
コード例 #4
0
def main():

        if len(sys.argv) != 3:
                print "Usage: python UploadFromDB.py <experiment> <path_to_directory>"
        else:
                dataset_name = sys.argv[1]
                path = sys.argv[2]
                files = [f for f in os.listdir(path)
                         if os.path.isfile(os.path.join(path, f)) and not f.startswith('.')]

		uh = UploadHandler(config_file=".ConfigOptions", debug=True)
		dbh = DBHandler(config_file=".ConfigOptions", debug=True)
		
		table = 'microImage'
		p_key = 'sample'
		col_headers = dbh.get_columns(table)

		reg = re.compile('e(.*?)x.._')
		uri_list = []
		for index, f in enumerate(files):
			print "Uploading file: " + str(f) + " (" + str(index+1) + " of " + str(len(files)) + ")"
			matches = reg.match(f)
			col = dbh.search_col(table, p_key, matches.group(1))
			
			print "\n\n========================"
			print path, f
			print len(col), col
			ri = raw_input()

			col = col[0]

			metadata = {}
			for i in range(len(col_headers)): 
				metadata[col_headers[i]] = str(col[i])
			metadata['experiment'] = dataset_name

			#retval = uh.upload_image(str(os.path.join(path, f)), metadata=metadata)
			#uri_list.append((f,retval))
			print ">>>"
			print metadata

			ri = raw_input()

		print uri_list
		with open(dataset_name + "_uris.txt", "w+") as fp:
			for (f,uri) in uri_list:
				print f, uri
				fp.write(f + "\t" + uri + "\n")
コード例 #5
0
ファイル: main.py プロジェクト: lllilllilllilili/mongoTobq
def main():
    cursor_list = DBHandler().find_item(None, "gsn", "gsn")
    cursor_l = []
    for list in cursor_list:
        cursor_l.append(list)
    errors = BigQueryIns_rows('hig-bigqueryproject.hig.fhig', cursor_l)
    if errors == []:
        print("New rows have been added.")
    else:
        print("Encountered errors while inserting rows: {}".format(errors))
コード例 #6
0
 def find(self):
     list = DBHandler.find_item(noteDB.my_client, None, "notedb", "notecol")
     dbListAll = []
     dbList = []
     for dic in list:
         for i in dic.values():
             dbList.append(i)
         dbListAll.append(dbList)
         dbList = []
     return dbListAll
コード例 #7
0
def UploadToBisque():

    if len(sys.argv) < 3:
        print "Usage: python UploadToBisque.py <image_file> <dataset_name> [<meta_tag> <meta_data> ...]"
    else:

        #Set semi-globals
        reconnect_time = 5
        reconnection_attempts = 5

        image_file = sys.argv[1].replace('\'', '').encode("utf8")
        dataset_name = sys.argv[2]
        inp_metadata = sys.argv[3:]

        if len(inp_metadata) % 2 != 0:
            print "Error: Metadata needs to be passed in pairs (%2 == 0)"
            return

        print "**************************"
        print "\nPreparing to upload file", image_file

        # Setup metadata
        metadata = {}
        for tag, data in pairwise(inp_metadata):
            metadata[str(tag)] = str(data.replace('\'', ''))

        # Setup BisqueHandlers
        script_path = os.path.dirname(os.path.realpath(__file__))
        config_path = os.path.join(script_path, ".ConfigOptions")
        uh = UploadHandler(config_file=config_path, debug=True)
        dbh = DBHandler(config_file=config_path, debug=True)
        tclient = TwitterClient(config_file=config_path)

        #######################################################
        ## 1) Check to see all parameters are included for DB #
        #######################################################

        #
        ## Example File Name:
        #
        ## e013SLBp01wA1x20_1506111930rc001.ome.tif
        #

        # Set the Inventory DB info
        inventory_table = 'inventory'
        inventory_key = 'sample'
        inventory_headers = dbh.get_columns(inventory_table)

        # Set the MicroImage DB info
        microImage_table = 'microImage'
        microImage_key = 'microImage_id'
        microImage_headers = dbh.get_columns(microImage_table)

        # Set the MicroImageSet DB info
        microImageSet_table = 'microImageSets'
        microImageSet_key = 'microImageSet_id'
        microImageSet_headers = dbh.get_columns(microImageSet_table)

        # Get info from filename
        file_basename = os.path.basename(image_file)
        regex = re.compile('(e)(.*?)(p)(.*?)(_.*?)(.....)\.')
        matches = regex.match(file_basename)
        dataset_name = matches.group(2)
        microImage_id = matches.group(1) + matches.group(2) + matches.group(
            3) + matches.group(4) + matches.group(5)
        reconstructedImage = microImage_id
        imageChannel = matches.group(6)
        microImageSet_id = reconstructedImage + imageChannel

        # Add in filename info to argument dictionary
        metadata['dataset'] = dataset_name
        metadata['microImage_id'] = microImage_id
        metadata['microImageSet_id'] = microImageSet_id
        metadata['reconstructedImage'] = reconstructedImage
        metadata['imageChannel'] = imageChannel

        # Add in Bisque Data
        metadata['bisqueURI'] = "NULL"
        metadata['bisqueText'] = "NULL"
        metadata['bisqueGobj'] = "NULL"

        print "\nMetadata: "
        for k, v in metadata.iteritems():
            print k + ": " + v

        # Combine lists w/o duplicates
        all_headers = list(
            set(inventory_headers + microImage_headers +
                microImageSet_headers))

        # Check that all headers are satisfied
        for header in all_headers:
            if header not in metadata.keys():
                print ">>> Error! Header " + header + " was not included... exiting"
                return

        print "All arguments are included!\n"

        #######################################################
        ## 2) Search for this entry in the Inventory Database #
        #######################################################

        # Search for existing entry
        search_dict = {inventory_key: metadata[inventory_key]}
        try:
            print "Searching for: ", search_dict
            inventory_row = dbh.search_col(inventory_table,
                                           search_dict,
                                           mode=4)[0]
            print "=> inventory entry", inventory_row

        except IndexError:

            # If entry doesn't exist, attempt to add entry
            print ">>> inventory entry doesn't exist... adding"
            row_list = []
            for header in inventory_headers:
                row_list.append(metadata[str(header)])
            #print row_list
            dbh.insert_into(inventory_table, row_list)

        print ""
        ########################################################
        ## 3) Search for this entry in the MicroImage Database #
        ########################################################

        # Search for existing entry
        search_dict = {microImage_key: metadata[microImage_key]}
        try:
            print "Searching for: ", search_dict
            microImage_row = dbh.search_col(microImage_table,
                                            search_dict,
                                            mode=4)[0]
            print "=> microImage entry", microImage_row

        except IndexError:

            # If entry doesn't exist, attempt to add entry
            print ">>> microImage entry doesn't exist... adding"
            row_list = []
            for header in microImage_headers:
                row_list.append(metadata[str(header)])
            #print row_list
            dbh.insert_into(microImage_table, row_list)

        print ""
        ############################################################
        ## 4) Search for this entry in the MicroImageSets Database #
        ############################################################

        # Search for existing entry
        search_dict = {microImageSet_key: metadata[microImageSet_key]}
        try:
            print "Searching for: ", search_dict
            microImageSet_row = dbh.search_col(microImageSet_table,
                                               search_dict)[0]
            print "=> microImageSet entry", microImageSet_row
        except IndexError:

            # If entry doesn't exist, attempt to add entry
            print ">>> microImageSet entry doesn't exist... adding"
            row_list = [
                microImageSet_id, reconstructedImage, imageChannel, 'NULL',
                'NULL', 'NULL'
            ]
            #print row_list
            dbh.insert_into(microImageSet_table, row_list)

        print ""

        ##################################################
        ## 5) Upload file and obtain retval (Bisque URI) #
        ##################################################

        uri = 'NULL'
        # uri should never get added as "NULL"
        for attempts in range(reconnection_attempts):
            try:
                print "Uploading File", image_file
                print sys.path
                retval = uh.upload_image(image_file, metadata=metadata)
                uri = retval[1]
                print ">>> BisqueURI is ", uri
                print ""

                break
            except Exception, e:
                print ">>> Error: " + str(e)
                print ">>> Could not upload... trying again... (" + str(
                    attempts + 1) + "/" + str(reconnection_attempts) + ")"
                for i in range(reconnect_time):
                    print "Retrying in... ", reconnect_time - i
                    time.sleep(1)
        else:
コード例 #8
0
ファイル: UploadToBisque.py プロジェクト: drmaize/compvision
def UploadToBisque():

	if len(sys.argv) < 3:
		print "Usage: python UploadToBisque.py <image_file> <dataset_name> [<meta_tag> <meta_data> ...]"
	else:
	
		#Set semi-globals
		reconnect_time = 5
		reconnection_attempts = 5
	
		image_file = sys.argv[1].replace('\'','').encode("utf8")
		dataset_name = sys.argv[2]
		inp_metadata = sys.argv[3:]
		
		if len(inp_metadata) % 2 != 0:
			print "Error: Metadata needs to be passed in pairs (%2 == 0)"
			return
		
		print "**************************"
		print "\nPreparing to upload file", image_file
		
		# Setup metadata
		metadata = {}
		for tag, data in pairwise(inp_metadata):
			metadata[str(tag)] = str(data.replace('\'',''))
		
		# Setup BisqueHandlers
		script_path = os.path.dirname(os.path.realpath(__file__))
		config_path = os.path.join(script_path, ".ConfigOptions")
		uh = UploadHandler(config_file=config_path, debug=True)
		dbh = DBHandler(config_file=config_path, debug=True)
		rm = ResourceManager(config_file=config_path, debug=True)
		tclient = TwitterClient(config_file=config_path)
		
		#######################################################
		## 1) Check to see all parameters are included for DB #
		#######################################################

		# Set the Inventory DB info
		inventory_table = 'inventory'
		inventory_key = 'sample'
		inventory_headers = dbh.get_columns(inventory_table)
		
		# Set the MicroImage DB info
		macroImage_table = 'macroImage'
		macroImage_key = 'macroImage_id'
		macroImage_headers = dbh.get_columns(macroImage_table)
		
		# Set the MicroImage DB info
		microImage_table = 'microImage'
		microImage_key = 'microImage_id'
		microImage_headers = dbh.get_columns(microImage_table)
		
		# Set the MicroImageSet DB info
		microImageSet_table = 'microImageSets'
		microImageSet_key = 'microImageSet_id'
		microImageSet_headers = dbh.get_columns(microImageSet_table)
		
		# Get info from filename (Ex: e013SLBp01wA1x20_1506111930rc001.ome.tif)
		file_basename = os.path.basename(image_file)
		regex = re.compile('(e)(.*?)(p)(.*?)(x)(.*?)(_.*?)(.....)\.')
		matches = regex.match(file_basename)
		dataset_name = matches.group(2)
		macroImage_id = matches.group(1) + matches.group(2) + matches.group(3) + matches.group(4)
		microImage_id = macroImage_id + matches.group(5) + matches.group(6) + matches.group(7)
		reconstructedImage = microImage_id
		imageChannel = matches.group(8)
		microImageSet_id = reconstructedImage + imageChannel
		
		# Add in filename info to argument dictionary
		metadata['dataset'] = dataset_name
		metadata['macroImage_id'] = macroImage_id
		metadata['microImage_id'] = microImage_id
		metadata['microImageSet_id'] = microImageSet_id
		metadata['reconstructedImage'] = reconstructedImage
		metadata['imageChannel'] = imageChannel
		
		# Add in filesize
		fileBytes = os.path.getsize(image_file)
		fileSize = '{0:.3f}'.format(fileBytes/1024.0/1024.0)
		fileSize += ' MB'
		metadata['file_size'] = fileSize
		
		# Add in Bisque Data
		metadata['bisqueURI'] = "NULL"
		metadata['bisqueText'] = "NULL"
		metadata['bisqueGobj'] = "NULL"
		
		print "\nMetadata: "
		for k,v in metadata.iteritems():
			print k + ": " + v
		
		# Combine lists w/o duplicates
		all_headers = list(set(inventory_headers + macroImage_headers + microImage_headers + microImageSet_headers))
		
		# Check that all headers are satisfied
		for header in all_headers:
			if header not in metadata.keys():
				print ">>> Error! Header " + header + " was not included... exiting"
				return
				
		print "All arguments are included!\n"
		
		#######################################################
		## 2) Search for this entry in the Inventory Database #
		#######################################################
		
		# Search for existing entry
		search_dict = {inventory_key:metadata[inventory_key]}
		try:
			print "Searching for: ", search_dict
			inventory_row = dbh.search_col(inventory_table, search_dict, mode=4)[0]
			print "=> inventory entry", inventory_row
			
		except IndexError:
			
			# If entry doesn't exist, attempt to add entry
			print ">>> inventory entry doesn't exist... adding"
			row_list = []
			for header in inventory_headers:
				row_list.append(metadata[str(header)])
			dbh.insert_into(inventory_table, row_list)
		
		print ""
		
		########################################################
		## 3) Search for this entry in the MacroImage Database #
		########################################################
		
		# Search for existing entry
		search_dict = {macroImage_key:metadata[macroImage_key]}
		try:
			print "Searching for: ", search_dict
			macroImage_row = dbh.search_col(macroImage_table, search_dict, mode=4)[0]
			print "=> macroImage entry", macroImage_row
			
		except IndexError:
		
			# If entry doesn't exist, attempt to add entry
			print ">>> macroImage entry doesn't exist... adding"
			row_list = []
			for header in macroImage_headers:
				row_list.append(metadata[str(header)])
			dbh.insert_into(macroImage_table, row_list)
		
		print ""
		
		########################################################
		## 4) Search for this entry in the MicroImage Database #
		########################################################
		
		# Search for existing entry
		search_dict = {microImage_key:metadata[microImage_key]}
		try:
			print "Searching for: ", search_dict
			microImage_row = dbh.search_col(microImage_table, search_dict, mode=4)[0]
			print "=> microImage entry", microImage_row
			
		except IndexError:
		
			# If entry doesn't exist, attempt to add entry
			print ">>> microImage entry doesn't exist... adding"
			row_list = []
			for header in microImage_headers:
				row_list.append(metadata[str(header)])
			dbh.insert_into(microImage_table, row_list)
		
		print ""
		
		############################################################
		## 5) Search for this entry in the MicroImageSets Database #
		############################################################
		
		# To see if this was a duplicate upload
		duplicate = False
		
		# Search for existing entry
		search_dict = {microImageSet_key:metadata[microImageSet_key]}
		try:
			print "Searching for: ", search_dict
			microImageSet_row = dbh.search_col(microImageSet_table, search_dict)[0]
			print "=> microImageSet entry", microImageSet_row
			print "This is a duplicate image. Standard procedure is to upload it, generating a new Bisque URI,"
			print "update the database with the new URI, -THEN- delete the old resource on Bisque (using the old URI)."
			print "If this is not appropriate, then update the workflow as you see fit."
			old_uri = microImageSet_row[3]
			duplicate = True
		except IndexError:
			# If entry doesn't exist, attempt to add entry
			print ">>> microImageSet entry doesn't exist... adding"
			row_list = [microImageSet_id, reconstructedImage, imageChannel, 'NULL', 'NULL', 'NULL']
			dbh.insert_into(microImageSet_table, row_list)
			
		print ""
		
		##################################################
		## 6) Upload file and obtain retval (Bisque URI) #
		##################################################
		
		uri = 'NULL'; # uri should never get added as "NULL"
		for attempts in range(reconnection_attempts):
			try:
				print "Uploading File", image_file
				print sys.path
				retval = uh.upload_image(image_file, metadata=metadata)
				uri = retval[1];
				print ">>> BisqueURI is ", uri
				print ""

				break;
			except Exception,e:
				print ">>> Error: " + str(e)
				print ">>> Could not upload... trying again... (" + str(attempts+1) + "/" + str(reconnection_attempts) + ")"
				for i in range(reconnect_time):
					print "Retrying in... ", reconnect_time-i
					time.sleep(1)
		else:
コード例 #9
0
 def __str__(self):
     list = DBHandler.find_item(timerDB.my_client, None, "timerdb",
                                "timercol")
     for x in list:
         print(x)
コード例 #10
0
 def delete(self, title):
     DBHandler.delete_item_one(timerDB.my_client, {"title": title},
                               "timerdb", "timercol")
コード例 #11
0
 def insert(self, time, date):
     DBHandler.insert_item_one(timerDB.my_client, {
         "title": time,
         "date": date
     }, "timerdb", "timercol")
コード例 #12
0
ファイル: UploadFromDB.py プロジェクト: drmaize/compvision
def main():

	if len(sys.argv) != 3:
		print "Usage: python UploadFromDB.py <dataset_name> <path_to_directory>"
	else:
		dataset_name = sys.argv[1]
		path = sys.argv[2]
		files = [f for f in os.listdir(path)
			if os.path.isfile(os.path.join(path, f)) 
			and not f.startswith('.') 
			and not f.startswith('exp')]
			
		if os.path.isfile("retry_files"):
			print "Files that were not uploaded exist... uploading now"
			with open("retry_files.txt", "r+") as fp:
				files = [line.strip() for line in fp]

		uh = UploadHandler(config_file=".ConfigOptions", debug=True)
		dbh = DBHandler(config_file=".ConfigOptions", debug=True)
		
		# Set the MicroImageSet DB info
		microImageSet_table = 'microImageSets'
		microImageSet_key_1 = 'reconstructedImage'
		microImageSet_key_2 = 'imageChannel'
		microImageSet_headers = dbh.get_columns(microImageSet_table)
		reg = re.compile('(e.*?)(.....)\.')
		
		# Set the MicroImage DB info
		microImage_table = 'microImage'
		microImage_key = 'reconstructedImage'
		microImage_headers = dbh.get_columns(microImage_table)
		
		# Set the Inventory DB info
		inventory_table = 'inventory'
		inventory_key = 'sample'
		inventory_headers = dbh.get_columns(inventory_table)
		
		# If the first file doesn't upload correctly, this will control (stop) the flow
		inp = 'n'
		
		for index, f in enumerate(files):
			for retries in range(2):
				try:
					print "Uploading file: " + str(f) + " (" + str(index+1) + " of " + str(len(files)) + ")"
					
					# Search for entry in microImageSet table (unique)
					matches = reg.match(f)
					reconstructedImage = matches.group(1)
					imageChannel = matches.group(2)
					search_dict = {microImageSet_key_1 : reconstructedImage, microImageSet_key_2 : imageChannel}
					row = dbh.search_col(microImageSet_table, search_dict)[0]
					
					# Setup metadata dictionary
					metadata = {}
					metadata['dataset'] = dataset_name
					
					# Insert ONLY image channel and microimagesets_id info from microimagesets table 
					metadata['imageChannel'] = str(row[0])
					metadata['microImageSet_id'] = str(row[2])

					# Search for information in microImage table
					search_dict = {microImage_key : reconstructedImage}
					row = dbh.search_col(microImage_table, search_dict)[0]
					sample = row[1] # Get sample from microImage table
					for i in range(len(microImage_headers)): 
						metadata[microImage_headers[i]] = str(row[i])
					
					# Finally, search for info in inventory table
					search_dict = {inventory_key : sample}
					row = dbh.search_col(inventory_table, search_dict)[0]
					for i in range(len(inventory_headers)): 
						metadata[inventory_headers[i]] = str(row[i])
					
					# Upload file and obtain reval (Bisque URI)
					try:
						retval = uh.upload_image(str(os.path.join(path, f)), metadata=metadata)
						uri = retval[1];
					except:
						print "Error Uploading... trying again in 2 minutes"
						time.sleep(120)
						retval = uh.upload_image(str(os.path.join(path, f)), metadata=metadata)
						uri = retval[1];
					
					print "Bisque URI:", uri
					set_dict = {"bisqueURI":uri}
					where_dict = {microImage_key : reconstructedImage, microImageSet_key_2 : imageChannel}
					dbh.update_entry(microImageSet_table, set_dict, where_dict)
					row = dbh.search_col(microImageSet_table, where_dict)[0]
					print "New entry:", row
					
					print "Update Process Complete!"
					print "========================================"
					print ""
					
				except Exception,e:
					if retries < 1:
						print "=========================="
						print "Error: ", str(e)
						print "Trying one more time..."
						print "=========================="
					else:
						print "=========================="
						print "Dumping remaining files to retry_files and errors to error_dump"
						with open("retry_files","w+") as fp:
							for elem in files[index:]:
								fp.write(elem + "\n")
						with open("error_dump", "w+") as fp:
							fp.write(str(e))
						return
コード例 #13
0
def main():

    if len(sys.argv) != 3:
        print "Usage: python UploadFromDB.py <dataset_name> <path_to_directory>"
    else:
        dataset_name = sys.argv[1]
        path = sys.argv[2]
        files = [
            f for f in os.listdir(path)
            if os.path.isfile(os.path.join(path, f)) and not f.startswith('.')
            and not f.startswith('exp')
        ]

        if os.path.isfile("retry_files"):
            print "Files that were not uploaded exist... uploading now"
            with open("retry_files.txt", "r+") as fp:
                files = [line.strip() for line in fp]

        uh = UploadHandler(config_file=".ConfigOptions", debug=True)
        dbh = DBHandler(config_file=".ConfigOptions", debug=True)

        # Set the MicroImageSet DB info
        microImageSet_table = 'microImageSets'
        microImageSet_key_1 = 'reconstructedImage'
        microImageSet_key_2 = 'imageChannel'
        microImageSet_headers = dbh.get_columns(microImageSet_table)
        reg = re.compile('(e.*?)(.....)\.')

        # Set the MicroImage DB info
        microImage_table = 'microImage'
        microImage_key = 'reconstructedImage'
        microImage_headers = dbh.get_columns(microImage_table)

        # Set the Inventory DB info
        inventory_table = 'inventory'
        inventory_key = 'sample'
        inventory_headers = dbh.get_columns(inventory_table)

        # If the first file doesn't upload correctly, this will control (stop) the flow
        inp = 'n'

        for index, f in enumerate(files):
            for retries in range(2):
                try:
                    print "Uploading file: " + str(f) + " (" + str(
                        index + 1) + " of " + str(len(files)) + ")"

                    # Search for entry in microImageSet table (unique)
                    matches = reg.match(f)
                    reconstructedImage = matches.group(1)
                    imageChannel = matches.group(2)
                    search_dict = {
                        microImageSet_key_1: reconstructedImage,
                        microImageSet_key_2: imageChannel
                    }
                    row = dbh.search_col(microImageSet_table, search_dict)[0]

                    # Setup metadata dictionary
                    metadata = {}
                    metadata['dataset'] = dataset_name

                    # Insert ONLY image channel and microimagesets_id info from microimagesets table
                    metadata['imageChannel'] = str(row[0])
                    metadata['microImageSet_id'] = str(row[2])

                    # Search for information in microImage table
                    search_dict = {microImage_key: reconstructedImage}
                    row = dbh.search_col(microImage_table, search_dict)[0]
                    sample = row[1]  # Get sample from microImage table
                    for i in range(len(microImage_headers)):
                        metadata[microImage_headers[i]] = str(row[i])

                    # Finally, search for info in inventory table
                    search_dict = {inventory_key: sample}
                    row = dbh.search_col(inventory_table, search_dict)[0]
                    for i in range(len(inventory_headers)):
                        metadata[inventory_headers[i]] = str(row[i])

                    # Upload file and obtain reval (Bisque URI)
                    try:
                        retval = uh.upload_image(str(os.path.join(path, f)),
                                                 metadata=metadata)
                        uri = retval[1]
                    except:
                        print "Error Uploading... trying again in 2 minutes"
                        time.sleep(120)
                        retval = uh.upload_image(str(os.path.join(path, f)),
                                                 metadata=metadata)
                        uri = retval[1]

                    print "Bisque URI:", uri
                    set_dict = {"bisqueURI": uri}
                    where_dict = {
                        microImage_key: reconstructedImage,
                        microImageSet_key_2: imageChannel
                    }
                    dbh.update_entry(microImageSet_table, set_dict, where_dict)
                    row = dbh.search_col(microImageSet_table, where_dict)[0]
                    print "New entry:", row

                    print "Update Process Complete!"
                    print "========================================"
                    print ""

                except Exception, e:
                    if retries < 1:
                        print "=========================="
                        print "Error: ", str(e)
                        print "Trying one more time..."
                        print "=========================="
                    else:
                        print "=========================="
                        print "Dumping remaining files to retry_files and errors to error_dump"
                        with open("retry_files", "w+") as fp:
                            for elem in files[index:]:
                                fp.write(elem + "\n")
                        with open("error_dump", "w+") as fp:
                            fp.write(str(e))
                        return
コード例 #14
0
 def __str__(self):
     list = DBHandler.find_item(noteDB.my_client, None, "notedb", "notecol")
     for x in list:
         print(x)
コード例 #15
0
 def delete(self, title):
     DBHandler.delete_item_one(noteDB.my_client, {"title": title}, "notedb",
                               "notecol")
コード例 #16
0
 def insert(self, title, content, date):
     DBHandler.insert_item_one(noteDB.my_client, {
         "title": title,
         "content": content,
         "date": date
     }, "notedb", "notecol")