Exemplo n.º 1
0
def parallel_executor(node_list):
    # This is responsible for the parallel execution of the test cases
    # Get the node list from the calling function and use it to fill the inital queue for execution

    #logger.info("start time ",start_time)
    global exitFlag
    global threadID
    logger.info("Starting the parallel execution")
    #print thread_list
    for tName in thread_list:
        thread = myThread(threadID, tName, workQueue)
        thread.start()
        threads.append(thread)
        threadID += 1

    queueLock.acquire()
    for node in node_list:
        workQueue.put(node)
        queue_list.append(node)
    queueLock.release()

    # Wait for queue to empty
    while not (len(finished_list) == len(label_map.keys())):
        pass

    #print finished_list
    #print dictionary_list.keys()
    # Notify threads it's time to exit

    exitFlag = 1

    # Wait for all threads to complete
    for thread in threads:
        thread.join()
    print "Exiting Main Thread"
Exemplo n.º 2
0
def process_data(threadName, q):
    while not exitFlag:
        queueLock.acquire()
        if not workQueue.empty():
            #queueLock.acquire()
            data = q.get()

            queueLock.release()

            script_text = "mvn -Dtest=" + inverse_map[int(data)] + " test"
            directory = "cd " + test_orch_object.test_dir

            #out = os.popen(directory+ ";" +script_text)
            #print threadName, list(workQueue.queue)

            # print out.readlines()[-6]
            print directory + ";" + script_text
            queueLock.acquire()

            #if int(data) in dictionary_list.keys():
            finished_list.append(int(data))
            for edges in dictionary_list[int(data)]:
                if edges not in finished_list and edges not in queue_list:
                    q.put(edges)
                    queue_list.append(edges)
            queueLock.release()

            logger.info("%s processing test case%s" % (threadName, data))
        else:
            queueLock.release()
Exemplo n.º 3
0
 def get_override(self, commit, commit_date):
     override = self._calculate_override(commit_date)
     if override:
         logger.info("%s: Overriding %s to use version %s" % (commit, self.name, override['version']))
         return self._get_override_format() % override['path']
     else:
         return self._get_default_format()
Exemplo n.º 4
0
    def node_list_generator(self, reverse_adj_list):
        # This function is used to output the nodes that does not have any incoming arcs
        logger.info(" Generating the node list")
        node_list = []
        for node in reverse_adj_list:
            if len(reverse_adj_list[node]) == 0:
                node_list.append(str(node))

        return node_list
Exemplo n.º 5
0
 def update_db(self):
     """Update the Laptop in the table laptop of the db"""
     try:
         query = config.QUERY_UPDATE_LAPTOP
         self.cur.execute(query, (self.price, self.rating, self.reviews, datetime.now(), self.name))
         self.con.commit()
         logger.info('Table laptop: updated -> ' + self.name)
     except Exception as e:
         logger.error(f'Updating table laptop: {e} ')
Exemplo n.º 6
0
 def add_to_db(self):
     """Add the Laptop to the table laptop of the db"""
     try:
         query = config.QUERY_INSERT_LAPTOP
         records = (self.name, self.price, self.rating, self.reviews, self.link, datetime.now(), None, self.valid)
         self.cur.execute(query, records)
         self.con.commit()
         logger.info('Table laptop: added -> ' + self.name)
     except Exception as e:
         logger.error(f'Adding record to table laptop: {e} ')
Exemplo n.º 7
0
def Create_Vul_Report_Url(file_path):
    Url = []
    #print('----Create Url----')
    logger.info(f'Create Url')
    with open(file_path, 'r', encoding='utf-8') as F:
        lines = F.readlines()
        for index in lines:
            Url.append('https://www.cert.ssi.gouv.fr/avis/' +
                       index.replace('\n', '') + '/')
    return Url
Exemplo n.º 8
0
    def add_to_db(self, laptop_id):
        """Add the Review to the table reviews of the db"""
        try:
            query = config.QUERY_INSERT_REVIEWS
            records = (laptop_id, self.user_id, self.username, self.location, self.date, self.rank, self.profile, self.content, datetime.now())
            self.cur.execute(query, records)
            self.con.commit()
            logger.info('Reviews added for laptop:-> ' + str(laptop_id))

        except Exception as e:
            logger.error(f'Adding record to the table Review: {e}')
Exemplo n.º 9
0
	def unloadAll(self):
		if not self.unloaded:
			for pluginName, pluginObject in self.pluginObjects.items():
				try:
					pluginObject.unload()
					logger.info("Unloaded plugin: " + pluginName)
				except:
					exceptionType, exceptionValue, exceptionTraceback = sys.exc_info()
					logger.error("Uncaught exception occurred while unloading plugin: " + traceback.format_exc())
				
			self.unloaded = True
Exemplo n.º 10
0
 def update_db(self):
     """Update the Profile in the table profile of the db"""
     try:
         query = config.QUERY_UPDATE_PROFILE
         records = (self.ranking, self.review, self.votes, datetime.now(),
                    self.username)
         self.cur.execute(query, records)
         self.con.commit()
         logger.info('Table profile: updated -> ' + self.username)
     except Exception as e:
         logger.error(f'Updating table Profile {e}')
Exemplo n.º 11
0
	def processManifest(self, manifest):
		if manifest.Enabled:
	
			if manifest.Provides != None:
			
				if not manifest.Provides in self.providers.keys():
					self.providers[manifest.Provides] = {}
				self.providers[manifest.Provides][manifest.SymbolicName] = manifest
	
			self.manifests[manifest.SymbolicName] = manifest
		else:
			logger.info("Plugin disabled: " + manifest.Name)	
Exemplo n.º 12
0
    def add_to_db(self):
        """Add the Profile to the table profile of the db"""
        try:
            query = config.QUERY_INSERT_PROFILE
            records = (self.username, self.ranking, self.review, self.votes,
                       datetime.now(), None, self.valid)
            self.cur.execute(query, records)
            self.con.commit()
            logger.info('Table profile: added -> ' + self.username)

        except Exception as e:
            logger.error(f'Adding to Profile table: {e}')
Exemplo n.º 13
0
def create_db():
    db = mysql.connector.connect(host='localhost',
                                 user='******',
                                 passwd='Eacded9')

    logger.info("\n*** Connection was created successfully. ***\n")

    conn = db.cursor()
    conn.execute(f"DROP DATABASE IF EXISTS {config.DB_FILENAME}")
    conn.execute(f"CREATE DATABASE IF NOT EXISTS {config.DB_FILENAME}")
    db.commit()
    logger.info("\n*** Database was created successfully. ***\n")
Exemplo n.º 14
0
def read_csv():
    # Reads the csv and returns the file object
    logger.info(" reading the dependency file")
    try:
        fname = test_orch_object.data_path + test_orch_object.file_name
        file = open(fname, 'ra')
        csv_f = csv.reader(file, delimiter=',')
        return csv_f

    except:
        logger.error("Error in reading the input dependency file")

    return None
Exemplo n.º 15
0
    def reverse_adjacency(self, adjacency_list):
        # This will pring the adjacency list in the reverse way so that it can be used to compute the nodes with no incoming
        # arcs to be used for parallel execution

        logger.info("Generating the reverse adjacency")
        reverse_list = {}
        for node in adjacency_list:
            reverse_list[node] = []

        for node in adjacency_list:
            for edge in adjacency_list[node]:
                reverse_list[edge].append(node)
        return reverse_list
Exemplo n.º 16
0
 def inverse_label_generator(self, label_map):
     # This inverses the total dictionary
     #inv_map = {v: k for k, v in label_map.iteritems()}
     logger.info("Generating the inverse label map")
     reverse_map = {}
     for key, value in label_map.iteritems():
         split_text = key.split('.')
         last_text = key.rsplit('.', 1)[0]
         #print last_text + "#" + split_text[-1]
         new_text = last_text + "#" + split_text[
             -1]  # This step is done for Maven test execution from command line
         reverse_map[value] = new_text
     return reverse_map
Exemplo n.º 17
0
    def label_generator(self, csv_object):
        # Take the graph and give labels to each of the nodes
        # This is particulary used for performing computation of Strongly connected components
        logger.info("Generating the label map")
        label_map = {}
        key = 0  # This is the start value
        for row in csv_object:
            if row[0] not in label_map:
                label_map[row[0]] = key
                key = key + 1
            if row[1] not in label_map:
                label_map[row[1]] = key
                key = key + 1

        return label_map
Exemplo n.º 18
0
def get_test_cases(lablel_map):
    # Takes the label map as the input to find the test cases that has to be executed in sequence
    logger.info("Checking for sequential test cases")
    path = test_orch_object.test_dir
    fileName = "test-execution-order"
    complete_name = path + fileName
    f = open(complete_name, 'r')
    difference_list = []
    file_list = []
    for line in f:
        # The script section
        item = line.strip("\n")
        if item not in label_map.keys():
            file_list.append(item)

    return file_list
Exemplo n.º 19
0
    def add_to_db(self, laptop_id, link):
        """Add the Features to the table laptop_features of the db"""
        try:
            query = config.QUERY_INSERT_FEATURES
            records = (laptop_id, link, self.screen_size,
                       self.max_screen_resolution, self.brand,
                       self.card_description, self.brand_name,
                       self.item_weight, self.operating_system,
                       self.computer_memory_type, self.batteries, self.date,
                       datetime.now(), self.valid)
            self.cur.execute(query, records)
            self.con.commit()
            logger.info('Table features laptop: added -> ' + str(laptop_id))

        except Exception as e:
            logger.error(f'Adding Laptop features: {e}')
Exemplo n.º 20
0
    def adjacency_list_generator(self, csv_obj, label_map):
        # Create the adjacecny list representation
        # use the csv_obj  and label_map as input to create the list representation
        logger.info("Adjacency list generator")
        adjacency_map = {}  # Initializ the map with 0 values
        #for row in csv_obj:
        #    adjacency_map[label_map[row[0]]] = []
        #    adjacency_map[label_map[row[1]]] = []

        for key in label_map.values():
            adjacency_map[key] = []

        csv_obj2 = read_csv()
        for row2 in csv_obj2:
            adjacency_map[label_map[row2[1]]].append(label_map[row2[0]])

        return adjacency_map
Exemplo n.º 21
0
 def __init__(self):
     logger.info("Initializing the orchestrator")
     parser = SafeConfigParser()
     parser.read(CONFIG_FILE)
     try:
         global thread_list
         self.file_name = parser.get(CONFIG_SECTION, "fileName")
         self.data_path = parser.get(CONFIG_SECTION, "data")
         self.test_dir = parser.get(CONFIG_SECTION, "test_dir")
         self.num_threads = parser.get(CONFIG_SECTION, "num_threads")
         self.test_execution_order = parser.get(CONFIG_SECTION,
                                                "test_order_file")
         # Initializ the thread list as per the number of threads in the configuration
         for index in range(0, int(self.num_threads), 1):
             thread_list.append("Thread-" + str(index))
         #print thread_list
     except:
         traceback.print_exc()
         logger.error("Error in reading configuration file")
Exemplo n.º 22
0
 def __init__(self):
     # Initialize the data configurations module
     logger.info("Initializing the orchestrator")
     parser = SafeConfigParser()
     parser.read(CONFIG_FILE)
     self.data_path = parser.get(CONFIG_SECTION, "data_path")
     self.trust_file = parser.get(
         CONFIG_SECTION, "trust_file")  # This will form the signed network
     self.rating_file = parser.get(
         CONFIG_SECTION, "rating_file")  # For the user opinion matrix O
     self.author_file = parser.get(
         CONFIG_SECTION, "author_file")  # For the author content matrix A
     self.json_path = parser.get(
         CONFIG_SECTION, "json_path")  # For the author content matrix A
     self.num_users = int(parser.get(CONFIG_SECTION, "users"))
     self.num_posts = int(parser.get(CONFIG_SECTION, "posts"))
     self.feature_path = parser.get(CONFIG_SECTION, "feature_path")
     self.model_path = parser.get(CONFIG_SECTION, "model_path")
     self.generated_data_path = parser.get(CONFIG_SECTION,
                                           "generated_data_path")
Exemplo n.º 23
0
    def sequential_test_set_generator(self, label_map):
        # This finds the test cases that needs to be executed sequentially if they are not in the dependency file
        # Take label map as input and then compare
        logger.info("inside the sequential test case executor")
        reverse_dict = {}
        reverse_dict = {value: key for key, value in label_map.iteritems()}
        file_name = self.test_dir + self.test_execution_order
        try:
            file_object = open(file_name, 'r')
            sequence_list = [
            ]  # To find the test cases that needs to be executed in sequence
            for line in file_object:
                test_case = line.strip("\n")
                if test_case not in reverse_dict:
                    sequence_list.append(test_case)

            return sequence_list  # Returns the sequence of test case that needs to be executed

        except Exception as e:
            logger.error("Error in executing test cases in sequence ", e)
Exemplo n.º 24
0
def create_tables():
    db = connect_to_db()
    conn = db.cursor(buffered=True)

    try:
        conn.execute(config.TABLE1)
        db.commit()
        conn.execute(config.TABLE2)
        db.commit()
        conn.execute(config.TABLE3)
        db.commit()
        conn.execute(config.TABLE4)
        db.commit()
        conn.execute(config.KEY_TABLE1)
        conn.execute(config.KEY_TABLE2)
        db.commit()
        logger.info("\n*** Created tables successfully ***\n")
    except Exception as e:
        logger.error(f'{e} ')

    finally:
        db.close()
Exemplo n.º 25
0
def setupSyncService(usernames, varName, dirNameVariants):
  log.debug("Setting up sync service: {}".format(varName))
  searchDirs = []
  for rootDirName in dirNameVariants:
    searchDirs.append(getPath(usernames[0], "$HOME/{}".format(rootDirName), False))
    searchDirs.append(getPath(usernames[0], "$DOCUMENTS/{}".format(rootDirName), False))
    searchDirs.append(getPath(usernames[0], "{}/Users/{}/{}".format(os.getenv("HOMEDRIVE"), os.getenv("USERNAME"), rootDirName), False))
    searchDirs.append(getPath(usernames[0], "{}/Users/{}/Documents/{}".format(os.getenv("HOMEDRIVE"), os.getenv("USERNAME"), rootDirName), False))
    searchDirs.append(getPath(usernames[0], "{}/{}".format(os.getenv("HOMEDRIVE"), rootDirName), False))
    searchDirs.append(getPath(usernames[0], "{}/{}".format(os.getenv("SYSTEMDRIVE"), rootDirName), False))
    for driveLetter in string.ascii_uppercase:
      searchDirs.append(getPath(usernames[0], "{}:/{}".format(driveLetter, rootDirName), False))
      for username in usernames:
        searchDirs.append(getPath(username, "{}:/Users/{}/{}".format(driveLetter, username, rootDirName), False))
  for searchDir in searchDirs:
    log.debug("Checking: {}".format(searchDir))
    if os.path.exists(searchDir):
      config[varName] = searchDir
      log.debug("{}: \"{}\"".format(varName, searchDir))
      break
  if not os.path.exists(config[varName]):
    log.info("Could not find {} directory.".format(varName))
Exemplo n.º 26
0
 def run(self):
     while True:
         if self.urlQ.empty():
             break
         url = self.urlQ.get()
         logger.info(f'The {url} is connecting...')
         try:
             report_info = []
             page = requests.get(url,
                                 timeout=60,
                                 headers={'User-Agent': "Magic Browser"})
             tree = html.fromstring(page.content)
             #print(page.content)
             report_title = tree.xpath(
                 '/html/body/div[1]/div/article/section[1]/div[7]/div/div/table/tbody/tr[2]/td[2]/text()'
             )
             report_id = tree.xpath(
                 '/html/body/div[1]/div/article/section[1]/div[7]/div/div/table/tbody/tr[1]/td[2]/text()'
             )
             temp_affect = tree.xpath(
                 '/html/body/div/div/article/section[2]/div/div/ul[2]/li')
             if not temp_affect:
                 continue
             else:
                 temp_affect = check_element_number(temp_affect)
                 report_affect = '; '.join(temp_affect)
             temp_cve = tree.xpath(
                 '/html/body/div/div/article/section[2]/div/div/ul[3]/li')
             temp_cve = check_element_number(temp_cve)
             report_cve = '; '.join(temp_cve)
             #report_version = tree.xpath('/html/body/div/div/article/section[2]/div/div/ul[2]/li/text()')
             #print('title: '+report_title[0])
             if report_id and report_title and report_affect and report_cve:
                 report_info.append(report_id[0])
                 report_info.append(report_title[0])
                 report_info.append(report_affect)
                 report_info.append(report_cve)
             self.dataQ.put(report_info)
             report_info = []
             self.dataQ.task_done()
         except:
             #print(url + 'does not connect!')
             logger.info(f'Error: The {url} does not connect!')
     #print('----It is ready to Write!---------------')
     logger.info(f'It is ready to write data!')
     row, col = 1, 0
     while True:
         if self.dataQ.empty():
             break
         info_list = self.dataQ.get()
         #print(info_list)
         with self.lock:
             if info_list:
                 self.sheet.write(row, col, info_list[0])
                 self.sheet.write(row, col + 1, info_list[1])
                 self.sheet.write(row, col + 2, info_list[2])
                 self.sheet.write(row, col + 3, info_list[3])
                 row += 1
Exemplo n.º 27
0
    def train_test(self):
        #training
        model = self.classifier
        criterion = nn.CrossEntropyLoss()
        optims = torch.optim.Adam(model.parameters(), lr=self.learning_rate)
        for epoch in range(self.num_epoch):
            for i, (images, labels) in enumerate(self.train_loader):
                images = images.to(device)
                labels = labels.to(device)

                outputs = model(images)
                loss = criterion(outputs, labels)

                optims.zero_grad()
                loss.backward()
                optims.step()

                if (i + 1) % 100 == 0:
                    logger.info(
                        f"Epoch [{epoch+1}/{self.num_epoch}], Step [{i+1}/{len(self.train_loader)}], Loss: {loss.item():.4}"
                    )

        #testing
        model.eval()
        with torch.no_grad():
            correct = 0
            total = 0
            for images, labels in self.test_loader:
                images = images.to(device)
                labels = labels.to(device)
                outputs = model(images)
                _, predicted = torch.max(outputs.data, 1)
                total += labels.size(0)
                correct += (predicted == labels).sum().item()
            logger.info(
                'Test Accuracy of the model on the 10000 test images: {} %'.
                format(100 * correct / total))
        logger.info('Save model')
        torch.save(model.state_dict(), 'model_save/model.ckpt')
Exemplo n.º 28
0
def setupLink(linkPath, targetPath):
  log.debug("\"{}\" -> \"{}\"".format(linkPath, targetPath))
  if os.path.islink(linkPath):
    log.debug("Skipping \"{}\" because it is already a symlink".format(linkPath))
    return False
  if not pathlib.Path(linkPath).exists():
    log.debug("Skipping \"{}\" because it doesn't exist".format(linkPath))
    return False
  copyPath = targetPath
  if pathlib.Path(targetPath).exists():
    remotePathBak = targetPath
    i=1
    while pathlib.Path(remotePathBak).exists():
      remotePathBak = "{}-bak-{}".format(targetPath, i)
      i+=1
    log.info("\"{}\" already exists. Will backup local files to \"{}\"".format(targetPath, remotePathBak))
    copyPath = remotePathBak
  parentPath = pathlib.Path(copyPath).parent.absolute()
  if not parentPath.exists():
    os.makedirs(parentPath)
  if os.path.isdir(linkPath):
    log.debug("\"{}\" is directory".format(linkPath))
    shutil.copytree(linkPath, copyPath)
    shutil.rmtree(linkPath)
    log.info("Linking \"{}\" -> \"{}\"".format(linkPath, targetPath))
    mklinkDir(linkPath, targetPath)
  elif os.path.isfile(linkPath):
    log.debug("\"{}\" is file".format(linkPath))
    shutil.copy(linkPath, copyPath)
    os.remove(linkPath)
    log.info("Linking \"{}\" -> \"{}\"".format(linkPath, targetPath))
    mklink(linkPath, targetPath)
  else:
    log.error("\"{}\" is neither symlink, directory, nor file".format(linkPath))
    return False
  return True
Exemplo n.º 29
0
# -*- coding: utf-8 -*-
from Logging import logger
from model.LeNet import lenet
from model.ResNet import ResNet18
from NetWork import Test_nn
import torch

device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')
batch_size = 64
learning_rate = 0.01
kernel = 5
input = 3
num_epochs = 10


def Test_NetWork(classifier, batch_size, learning_rate, num_epochs):
    test_nn = Test_nn(classifier, learning_rate, num_epochs)
    test_nn.add_data(batch_size)
    test_nn.train_test()


if __name__ == '__main__':
    logger.level = 'debug'
    logger.addFileHandler(path='log/Cifar.log')
    model = lenet(input, kernel).to(device)
    #model = ResNet18().to(device)
    Test_NetWork(model, batch_size, learning_rate, num_epochs)
    logger.info(f'The testing process is over!')
Exemplo n.º 30
0
from ConfigParser import SafeConfigParser
from Logging import logger

def read_conf():
   logger.debug("Reading properties file")

   CONFIG_FILE = "fluxa_properties.conf"
   config = SafeConfigParser()
   config.read(CONFIG_FILE)

   logger.debug("Properties file read")

   return config

if __name__ == '__main__':
   logger.info("Started")
   config = read_conf()

   JoinJson(config).join_jsons()
   logger.info("Completed")
.
.
.
.
.
.
## @package suvoda
########################################################################################################################
# Author: Author Name #
# Version: 1.0 #
# Date created: 05/24/2017 #
Exemplo n.º 31
0
if __name__ == '__main__':
    logger.level = 'debug'
    logger.addFileHandler(path='DataSet/TestLog.log')
    id_file_path = 'C:\\Users\\85670\\Desktop\\test_id.txt'
    Url_Queue = queue.Queue(
    )  ###The variabel is a queue which saves urls of vulnerability report.
    Data_Queue = queue.Queue(
    )  ###The variabel is a queue which saves information of vulnerability report.
    Thread_List = []  ### Thread List
    Thread_Num = 4  ###Thread Number
    lock = threading.RLock()
    info_table = xlsxwriter.Workbook(
        'DataSet/French_Vulnerability_Report.xlsx')
    sheet = info_table.add_worksheet()  # New sheet
    bold = info_table.add_format({'bold': True})
    sheet.write('A1', 'Id', bold)
    sheet.write('B1', 'Title', bold)
    sheet.write('C1', 'Affect', bold)
    sheet.write('D1', 'CVE', bold)
    Url = Create_Vul_Report_Url(id_file_path)
    for index in Url:
        Url_Queue.put(index)
    for i in range(Thread_Num):
        thread = MyThread(Url_Queue, Data_Queue, lock, sheet)
        thread.start()
        Thread_List.append(thread)
    logger.info(f'The Multi-Thread Starts')
    for threads in Thread_List:
        threads.join()
    info_table.close()
Exemplo n.º 32
0
	def __loadPlugin(self, symbolicName, dependency, dependList):
		#dependList holds the list of dependencies along the depth-first cross section of the tree. Used to find cycles.
		dependList = dependList[:]
		dependList.append(symbolicName)
		
		#get the manifest from the manifest list
		try:
			manifest = self.manifests[symbolicName]
		except KeyError:
			self.failed.append(symbolicName)
			raise UnsatisfiedDependency(symbolicName + ":" + dependency.dependencyString)
		
		#to check whether the dependency can actually be satisfied by loading this plugin
		if dependency != None:
			if dependency.satisfied(manifest.SymbolicName, manifest.Version):
				pass #dependency is satisfied
			else:
				self.failed.append(manifest.SymbolicName)
				raise UnsatisfiedDependency(symbolicName + ":" + dependency.dependencyString + ". Version present is: " + manifest.Version)
		
		#preliminary checks done. Start actually loading the plugin now
		if not manifest.SymbolicName in self.plugins.keys():
			
			#load the dependencies
			self.loadDependencies(manifest, dependList)
			
			#load the requests
			self.loadRequests(manifest, dependList)
			
			#import the plugin
			try:
				pluginModule = __import__(manifest.SymbolicName)
			except ImportError:
				exceptionType, exceptionValue, exceptionTraceback = sys.exc_info()	
				logger.error('Uncaught exception occured in command handler.')
				logger.error(traceback.format_exc())
				raise MalformedPlugin(manifest.SymbolicName + ": failed to import.")
			
			#get the plugin class from the module
			try:
				#pluginObjectClass = pluginModule.__getattribute__(manifest.SymbolicName)
				pluginObjectClass = pluginModule.__getattribute__("Plugin")
			except AttributeError:
				self.failed.append(manifest.SymbolicName)
				raise MalformedPlugin(manifest.SymbolicName + ": class is not present.")
			
			#check that the plugin class is a subclass of Plugin
			if not issubclass(pluginObjectClass, Plugin):
				self.failed.append(manifest.SymbolicName)
				raise MalformedPlugin(manifest.SymbolicName + ": is not derived from Plugin.")
			
			#add the plugin object and plugin module to the correct dictionaries
			self.pluginObjects[manifest.SymbolicName] = pluginObjectClass()
			self.plugins[manifest.SymbolicName] = pluginModule
			
			#load the actual plugin
			self.pluginObjects[manifest.SymbolicName].load()
			logger.info("Loaded plugin: " + manifest.Name)
			self.reloadOrder.append(manifest.SymbolicName)
		else:
			pass
Exemplo n.º 33
0
    def uploadData(self):
        try:
            finalData = {}
            finalData["stationId"] = self.stationId
            finalData["subStationId"] = self.subStationId
            finalData["data"] = self.finalDataJson
            finalData["timestamp"] = str(datetime.now()).split(".")[0]

            jsonFileObj = open(self.dataDumpJsonFilename, "r+")
            try:
                jsonDumpData = json.load(jsonFileObj)
            except:
                jsonDumpData = []

            #print "FinalData: ", finalData
            #print "finaldatajson: ", str(finalData["data"])
            #print "previousData : ", str(self.previousDataJson)
            if self.compare == 1:
                if not self.compareJson(finalData["data"],self.previousDataJson):

                    try:
                        tempDataJson = copy.deepcopy(finalData["data"])
                        for everyItem in tempDataJson:
                            if self.previousDataJson!= '' and finalData["data"][everyItem] == self.previousDataJson[everyItem]:
                                finalData["data"].pop(everyItem)
                    except Exception as e:
                        traceback.print_exc()
                        print e
                        pass
                    print "Uploading: ", str(finalData).replace("ON", "T").replace("OFF", "F").replace("True", "T").replace("False", "F")

                    try:
                        resp = requests.post(self.serviceUrl + "updateCircuit",
                                         data=str(finalData).replace("ON", "T").replace("OFF", "F").replace("True", "T").replace("False", "F"))
                        logger.debug(resp)
                    except:
                        # store the finalDataJson until connectivity returns and gets pushed back to the server
                        pass

                    if self.connectServer():
                        MQTTresponse = self.mqttc.publish(self.topic, str(self.finalDataJson).replace("ON", "T").replace("OFF", "F").replace("True", "T").replace("False", "F"))
                        if not MQTTresponse.is_published():
                            self.mqttc.reconnect()
                            MQTTresponse = self.mqttc.publish(self.topic,
                                                              str(self.finalDataJson).replace("ON", "T").replace("OFF", "F").replace("True", "T").replace("False", "F"))
                        logger.debug(MQTTresponse.is_published())
                        self.mqttc.loop(2)  # timeout = 2s
                        self.previousDataJson = copy.deepcopy(finalData["data"])
                        print "Uploaded: ", MQTTresponse.is_published()
                        self.connectivity = True         # use this flag to check for internet connectivity
                    else:
                        print "Failed to connect to mqtt"
                        logger.debug("Error while uploading to the mqtt server")
                        logger.debug("Failed to connect to the mqtt server")
                        self.connectivity = False        # use this flag to check for internet connectivity
                        # in that case, store the finalDataJson until connectivity returns and gets pushed back to the server
                        # write JSON into the file
                        jsonFileObj.seek(0)
                        jsonDumpData.append(finalData)
                        json.dump(jsonDumpData, jsonFileObj)
                        fileUploaderObj.writeintoJSON(finalData, self.stationId)
                        self.uploadHistorical = True

            else:
                tempData = {}
                tempData["values"] = self.finalDataJson
                tempData["stationId"] = self.stationId
                tempData["subStationId"] = self.subStationId
                tempData["timeStamp"] = int(time.mktime(datetime.now().timetuple()))
                data = json.dumps(tempData).replace("ON", "T").replace("OFF", "F").replace("True", "T").replace("False", "F")

                print "data:", data
                if self.connectServer():
                    self.mqttc.publish(self.topic, data)
                    self.mqttc.loop(2)  # timeout = 2s
                    self.disconnectServer()
                    self.previousDataJson = copy.deepcopy(finalData["data"])
                    print ("Uploaded")
                    self.connectivity = True  # use this flag to check for internet connectivity
                else:
                    print "Failed to connect to mqtt"
                    logger.debug("Error while uploading to the mqtt server")
                    logger.debug("Failed to connect to the mqtt server")
                    self.connectivity = False  # use this flag to check for internet connectivity
                    # if device fails to transmit the data, save it into a JSON for future publication
                    # write JSON into the file
                    logger.info("Writing data into json")
                    jsonFileObj.seek(0)
                    jsonDumpData.append(tempData)
                    json.dump(jsonDumpData, jsonFileObj)
                    fileUploaderObj.writeintoJSON(finalData, self.stationId)
                    self.uploadHistorical = True         # True if data dump from json file is required

        except Exception as e:
            traceback.print_exc()
            print "Exception"
            logger.exception(e)
            try:
                self.mqttc.disconnect()
                self.connectionFailed = 1
            except Exception as e:
                traceback.print_exc()
                logger.exception(e)
                self.connectionFailed = 1
        finally:
            try:
                jsonFileObj.close()
            except:
                pass