def loadAlterSQL(self, dbConnection):
     fileList = os.listdir(ResourceLocation.AlterDatabaseSQLs.value)
     index = 0
     if len(fileList) > 0:
         print("Choose the file number:\n")
         foundSQLScript = False
         for fileName in fileList:
             index += 1
             if "sql" in fileName:
                 print((str)(index) + ".) " + fileName + "\n")
                 foundSQLScript = True
         if foundSQLScript == True:
             exceptionFlag = False
             choosenFileIndex = input()
             filePaths = self.getFilePaths(
                 fileList, "sql", ResourceLocation.AlterDatabaseSQLs.value)
             try:
                 filePath = filePaths[(int)(choosenFileIndex) - 1]
             except Exception as e:
                 er = Error("You have chosen wrong file as an Input.",
                            traceback.format_exc())
                 er.handleError()
                 exceptionFlag = True
             if exceptionFlag == False:
                 file = FileUtil(filePath, "r")
                 self.executeAndCommitToDatabase(dbConnection, file)
         else:
             print("Sorry, No SQL Files Exists in the Folder.")
     else:
         print("Sorry, No SQL Files Exists in the Folder.")
def init(use_base_dir=False):
    args = parser.parse_args()
    setup = ExperimentSetups.parse(args.setup)
    dirname = fileutil.base_dir(args.dest_dir, setup.name, args.max_quantifier_length, args.model_size) if use_base_dir \
        else fileutil.run_dir(args.dest_dir, setup.name, args.max_quantifier_length, args.model_size, args.name)
    file_util = FileUtil(dirname)
    return args, setup, file_util
示例#3
0
文件: pipes.py 项目: oprema/OpenDoor
def main():
    # Create directory if it doesn't exist
    futil = FileUtil(".opendoord")

    # Get access to the database handler
    logger = Logger.get(verbose=True)
    db = Sqlite(futil.path + "/opendoor.db", logger)
    port = Port(logger)
    pipes = Pipes(logger, port, db)

    i = 0
    logger.debug("Send commands via pipe with 10 sec delay")
    while i < 100:
        i += 1
        pipes.send_to_app("OPEN DOOR\n", i)
        logger.debug("OPEN DOOR")
        time.sleep(10)
        i += 1
        pipes.send_to_app("DOORBELL PRESSED\n", i)
        logger.debug("DOORBELL PRESSED")
        time.sleep(10)
        i += 1
        pipes.send_to_app("DOW RING WITH AUTO OPEN\n", i)
        logger.debug("DOW RING WITH AUTO OPEN")
        time.sleep(10)
示例#4
0
 def writeLogs(self, fileLocation, message, content, access, doComplete):
     t = time.localtime()
     logCommands = FileUtil(fileLocation, access)
     logCommands.writeFileContent(
         time.strftime("%H:%M:%S", t) + " : " + message + "\n" + content +
         "\n")
     if doComplete == True:
         logCommands.writeFileContent("\n" + logmessage.Seperator.value +
                                      "\n")
 def createSchema(self, dbConnection, schemaName):
     utility = Utility()
     self.createDataTablesSQLScript(schemaName)
     sqlRead = FileUtil(ResourceLocation.DatabaseScript.value, "r")
     utility.writeLogs(ResourceLocation.LogFileLocation.value, "",
                       LogMessage.DBDatabaseCreation.value, "a", False)
     self.executeAndCommitToDatabase(dbConnection, sqlRead)
     utility.writeLogs(ResourceLocation.LogFileLocation.value, "",
                       LogMessage.Completed.value, "a", True)
 def readConfig(self):
     # Read the file and config the connection variables
     file = FileUtil(self.configFileName, "r")
     dbConfiguration = file.getFileContent()
     config = dbConfiguration[int(self.configLine)]
     configArray = config.split(self.configDelimiter)
     self.hostName = configArray[0]
     self.portValue = configArray[1]
     self.dbName = configArray[2]
     self.userName = configArray[3]
     self.password = configArray[4]
     self.schemaName = configArray[5]
示例#7
0
def main():
  global log, gpio, lirc, ping, db, gammu

  # Parse arguments, use file docstring as a parameter definition
  args = docopt.docopt(__doc__, version='0.1a')
  #print args

  # Create directory if it doesn't exist
  futil = FileUtil("/home/pi/.resq-pi")
  gammu = None

  # Create a logger
  if args["--verbose"]:
    log = Logger.get(verbose = True)
  else:
    log = Logger.get(futil.path + "/resq-pi.log", False)
  log.info("*** Start ResQ-Pi ***")

  # Be sure we have root privileges
  if os.geteuid() != 0:
    exit("You need to have root privileges. Exiting.")
    
  # Ctrl-C and SIGTERM handler
  signal.signal(signal.SIGINT, signal_handler)
  signal.signal(signal.SIGTERM, signal_handler)

  # Get access to the resq-pi database
  db = ResqStore(futil.path + "/resq-pi.db")
  if not db.exist():
    log.info("No database found. Will create one.")
    db.create_tables() # if not already created
    db.reset_tables()  # and initialize

  # Initalize GPIO, Lirc, GooglePing ...
  gpio = ResqGpio()
  gpio.led(0, False) # all LEDs off
  lirc = ResqLirc(log, gpio)
  ping = GooglePing(log, gpio)

  test = False
  if args["--alarm"]:
    test = True

  if args["--resetdb"]:
    log.info("Reset database")
    db.reset_tables()
  elif args["--resetpass"]:
    log.info("Reset password")
    db.reset_password()
  elif args["--credits"]:
    get_sms_credits()
  else:
    resqpi_endless_loop(test)
 def getTableHeader(self, fileList):
     utility = Utility()
     filePaths = self.getFilePaths(fileList, "csv",
                                   ResourceLocation.DatabaseLocation.value)
     utility.writeLogs(ResourceLocation.LogFileLocation.value,
                       ("\n").join(filePaths), LogMessage.Files.value, "a",
                       False)
     tableHeaders = []
     for filePath in filePaths:
         fileHeader = ((FileUtil(filePath, "r")).getFileContent())[0]
         tableHeaders.append(fileHeader)
     return tableHeaders
示例#9
0
def main():
    global log, port, pipes, db, test_mode

    # Be sure we have root privileges
    if os.geteuid() != 0:
        exit("You need to have root privileges. Exiting.")

    # Ctrl-C and SIGTERM handler
    signal.signal(signal.SIGINT, signal_handler)
    signal.signal(signal.SIGTERM, signal_handler)

    # Parse arguments, use file docstring as a parameter definition
    args = docopt.docopt(__doc__, version='0.1a')

    # Create directory if it doesn't exist
    futil = FileUtil(".opendoord")

    print("Path: %s, args: %s" % (futil.path, args))
    # Create a logger
    if args["--console"]:
        log = Logger.get(verbose=True)
    else:
        log = Logger.get(futil.path + "/opendoor.log",
                         verbose=args["--verbose"])
    log.info("*** Start OpenDoor ***")

    # Get access to the database handler
    db = Sqlite(futil.path + "/opendoor.db", log)
    if not db.exist():
        log.info("No database found. Will create one.")
        db.create_tables()  # if not already created
        db.reset_tables()  # and initialize

    if args["--test"]:
        test_mode = True

    # Let's initialize the gpio's
    port = Port(log, test_mode)

    # Open the pipes
    pipes = Pipes(log, port, db)

    if args["--resetdb"]:
        db.reset_tables()
        log.info("Database has been reset.")
    else:
        log.info("Watch door events in an endless loop.")
        opendoor_endless_loop()
 def processToExtractData(self, dbConnection):
     print("Processing....")
     utility = Utility()
     queriesConfigList = self.getConfiguration(
         ProcessLocation.ProcessConfigExtractData.value)
     queries = self.createQueries(queriesConfigList, "simple")
     utility.writeCommandSqlScript(
         ("\n").join(queries),
         ProcessLocation.ProcessResultExtractDataQuery.value)
     queriesIndex = 1
     for query in queries:
         fileName = ProcessLocation.ProcessResultExtractData.value + "Query" + (
             str)(queriesIndex) + ".csv"
         file = FileUtil(fileName, "w")
         dbConnection.copyToCSVs(file.getFile(), query, ",")
         queriesIndex += 1
     print("Processing Completed.")
 def loadDataFromCSV(self, dbConnection, schemaName):
     print("Loading Data....")
     utility = Utility()
     fileList = os.listdir(ResourceLocation.DatabaseLocation.value)
     filePaths = self.getFilePaths(fileList, "csv",
                                   ResourceLocation.DatabaseLocation.value)
     for filePath in filePaths:
         file = (FileUtil(filePath, "r")).getFile()
         dbConnection.copyFromCSVs(
             file,
             schemaName + "." + (((filePath.split("/"))[2]).split("."))[0],
             ",")
         dbConnection.commitTransaction()
     dbConnection.closeDBConnection()
     utility.writeLogs(ResourceLocation.LogFileLocation.value, "",
                       LogMessage.Completed.value, "a", True)
     print("Data Loaded.")
示例#12
0
parser.add_argument('max_quantifier_length', type=int)
parser.add_argument('model_size', type=int)
parser.add_argument('max_words', type=int)
parser.add_argument('comp_strat')
parser.add_argument('inf_strat')
parser.add_argument('--sample', type=int, default=None)
parser.add_argument('--dest_dir', default='results')
parser.add_argument('--processes', default=4, type=int)
parser.add_argument('--name', default='run_0')

args = parser.parse_args()

setup = ExperimentSetups.parse(args.setup)

file_util = FileUtil(
    fileutil.run_dir(args.dest_dir, setup.name, args.max_quantifier_length,
                     args.model_size, args.name))

universe = Generator.generate_simplified_models(args.model_size)

if args.inf_strat == 'exact':
    informativeness_measurer = InformativenessMeasurer(len(universe))
elif args.inf_strat == 'simmax':
    informativeness_measurer = SimMaxInformativenessMeasurer(universe)
else:
    raise ValueError('{0} is not a valid informativeness strategy.'.format(
        args.inf_strat))

if args.comp_strat == 'wordcount':
    complexity_measurer = WordCountComplexityMeasurer(args.max_words)
elif args.comp_strat == 'wordcomplexity':
 def getConfiguration(self, fileLocation):
     fileRead = FileUtil(fileLocation, "r")
     return fileRead.getCSVReader()
示例#14
0
#coding:utf-8
__author__ = 'jason'
#从日志文件中提取指定字符开始之后的内容,然后写入新文件
import re
import os, shutil, string, time
from fileutil import FileUtil
path = "D:\\techworkspace\\python\\domainHandler\\"
fp = open(path + 'parent-cn.txt', 'r', encoding="utf-8")
print('start!')
while 1:
    line = fp.readline()
    if not line:
        fp.close()
        break
    s = line.find('\"')
    if s > -1:
        result = line[s:]

        # write content
        fileUtil = FileUtil(path, result)  # 这里每次都创建?有没有其他传参方式?
        fileUtil.write_content()
    # else:
    #     print('not found --> ' + line)
print('completed!')
示例#15
0
parser = argparse.ArgumentParser(description="Generate Quantifiers")
parser.add_argument('setup', help='Path to the setup json file.')
parser.add_argument('max_quantifier_length', type=int)
parser.add_argument('model_size', type=int)
parser.add_argument('--dest_dir', default='results')
parser.add_argument('--processes', default=4, type=int)

args = parser.parse_args()

processes = args.processes
setup = ExperimentSetups.parse(args.setup)
max_quantifier_length = args.max_quantifier_length
model_size = args.model_size

file_util = FileUtil(
    fileutil.base_dir(args.dest_dir, setup.name, max_quantifier_length,
                      model_size))

folderName = "{0}/{1}_length={2}_size={3}".format(args.dest_dir, setup.name,
                                                  max_quantifier_length,
                                                  model_size)

processpool = ProcessPool(nodes=processes)

expressions = file_util.load_dill('expressions.dill')

complexities = processpool.map(
    lambda ex: setup.measure_expression_complexity(ex, max_quantifier_length),
    expressions)

file_util.dump_dill(complexities, 'expression_complexities.dill')
示例#16
0
 def writeCommandSqlScript(self, content, fileLocation):
     fileTo = FileUtil(fileLocation, "w")
     fileTo.writeFileContent(content)