Beispiel #1
0
 def db(self):
     """ Create a database connection when a request handler is called
     and store the connection in the application object.
     """
     if not hasattr(self.application, 'db'):
         self.application.db = database.DatabaseManager()
     return self.application.db
Beispiel #2
0
 def testNoSystemFiles(self):
     # This test shows that stubbing out sqlite classes is quite laborious.
     saved_s = database.m.Srv4FileStats
     srv4_file_stats_mock_factory = self.mox.CreateMockAnything()
     database.m.Srv4FileStats = srv4_file_stats_mock_factory
     q_mock = self.mox.CreateMockAnything()
     q_mock.use_to_generate_catalogs = self.mox.CreateMockAnything()
     database.m.Srv4FileStats.q = q_mock
     # We would prefer to use self.mox.CreateMock(models.OsRelease).  The
     # reason why it doesn't work, is that mox tries to inspect the class, and
     # sqlobject overrides the __get__ method of that class, where it tries to
     # verify that a connection to a database exists.  In our tests we don't
     # have a connection, and sqlobject throws an exception.
     osrel_mock = self.mox.CreateMockAnything()
     arch_mock = self.mox.CreateMockAnything()
     osrel_mock.short_name = 'AlienOS5.3'
     arch_mock.name = 'amd65'
     dm = database.DatabaseManager()
     result_mock = self.mox.CreateMockAnything()
     srv4_file_stats_mock_factory.select(0).AndReturn(result_mock)
     # This is where we return the number of system files (0)
     result_mock.count().AndReturn(0)
     self.mox.ReplayAll()
     self.assertRaises(database.DatabaseError, dm.VerifyContents,
                       osrel_mock, arch_mock)
     database.m.Srv4FileStats = saved_s
Beispiel #3
0
def populate_database(filename):
    with open(filename, "r") as fd:
        text = fd.read()
    start_time = time.time()
    with open("data/words.txt") as fd:
        ultra_word_list = fd.read().split()
    wordlist = nlp.nltk_word_tokenizer(text)
    wait_to_save = {}
    db = database.DatabaseManager()
    db.open()
    saved_count = 0
    for word in wordlist:
        word_lower = word.lower()
        if word_lower not in ultra_word_list:
            continue

        if db.query(word_lower) is not None:
            continue
        elif word_lower in wait_to_save:
            continue
        else:
            pos_pron = cambridge_crawler.crawler(word_lower)
            wait_to_save[word_lower] = ';'.join(pos_pron)
            saved_count += 1

    db.save_to_db(wait_to_save)
    db.close()

    text_wc = len(wordlist)
    end_time = time.time()
    elapsed_time = end_time - start_time
    print("Total Word Count: %d\nElapsed Time: %fs\nSaved Word Count: %d" %
          (text_wc, elapsed_time, saved_count))
Beispiel #4
0
def main():
    _daemon = BMP180()
    if len(sys.argv) > 1:
        if sys.argv[1] == "start":
            _daemon.start()
        elif sys.argv[1] == "stop":
            _daemon.quit()
            time.sleep(0.5)
            _daemon.stop()
        elif sys.argv[1] == "backup":
            import database
            with database.DatabaseManager() as db:
                db.BACKUP()
        elif sys.argv[1] == "restart":
            _daemon.quit()
            time.sleep(0.5)
            _daemon.stop()
            time.sleep(0.5)
            _daemon.start()
    else:
        print("Usage: start | stop | backup | restart")
Beispiel #5
0
def main():
    parser = optparse.OptionParser(USAGE)
    parser.add_option("-d",
                      "--debug",
                      dest="debug",
                      default=False,
                      action="store_true",
                      help="Turn on debugging messages")
    parser.add_option(
        "-t",
        "--pkg-review-template",
        dest="pkg_review_template",
        help="A Cheetah template used for package review reports.")
    parser.add_option("-r",
                      "--os-release",
                      dest="osrel",
                      default="SunOS5.9",
                      help="E.g. SunOS5.9")
    parser.add_option("-a",
                      "--arch",
                      dest="arch",
                      default="sparc",
                      help="'i386' or 'sparc'")
    parser.add_option("-c",
                      "--catalog-release",
                      dest="catrel",
                      default="unstable",
                      help="E.g. unstable, dublin")
    parser.add_option("--replace",
                      dest="replace",
                      default=False,
                      action="store_true",
                      help="Replace packages when importing (importpkg)")
    parser.add_option("--profile",
                      dest="profile",
                      default=False,
                      action="store_true",
                      help="Turn on profiling")
    parser.add_option("--force-unpack",
                      dest="force_unpack",
                      default=False,
                      action="store_true",
                      help="Force unpacking of packages")
    options, args = parser.parse_args()
    if options.debug:
        logging.basicConfig(level=logging.DEBUG)
        logging.debug("Debugging on")
    else:
        logging.basicConfig(level=logging.INFO)
    if not args:
        raise UsageError("Please specify a command.  Se --help.")
    # SetUpSqlobjectConnection needs to be called after
    # logging.basicConfig
    configuration.SetUpSqlobjectConnection()
    command = args[0]
    args = args[1:]
    if command == 'show':
        subcommand = args[0]
        args = args[1:]
    elif command == 'pkg':
        subcommand = args[0]
        args = args[1:]
    else:
        subcommand = None

    md5_sums = args

    dm = database.DatabaseManager()
    # Automanage is not what we want to do, if the intention is to initialize
    # the database.
    if command != 'initdb':
        dm.AutoManage()

    if (command, subcommand) == ('show', 'errors'):
        for md5_sum in md5_sums:
            srv4 = GetPkg(md5_sum)
            res = m.CheckpkgErrorTag.select(
                m.CheckpkgErrorTag.q.srv4_file == srv4)
            for row in res:
                print row.pkgname, row.tag_name, row.tag_info, row.catrel.name, row.arch.name,
                print row.os_rel.short_name
    elif (command, subcommand) == ('show', 'overrides'):
        for md5_sum in md5_sums:
            srv4 = GetPkg(md5_sum)
            res = m.CheckpkgOverride.select(
                m.CheckpkgOverride.q.srv4_file == srv4)
            for row in res:
                print row.pkgname, row.tag_name, row.tag_info
    elif (command, subcommand) == ('show', 'pkg'):
        for md5_sum in md5_sums:
            srv4 = GetPkg(md5_sum)
            t = Template(SHOW_PKG_TMPL, searchList=[srv4])
            sys.stdout.write(unicode(t))
    elif command == 'gen-html':
        g = HtmlGenerator(md5_sums, options.pkg_review_template)
        sys.stdout.write(g.GenerateHtml())
    elif command == 'initdb':
        config = configuration.GetConfig()
        db_uri = configuration.ComposeDatabaseUri(config)
        dbc = database.CatalogDatabase(uri=db_uri)
        dbc.CreateTables()
        dbc.InitialDataImport()
    elif command == 'importpkg':
        collector = package_stats.StatsCollector(logger=logging,
                                                 debug=options.debug)
        file_list = args
        try:
            stats_list = collector.CollectStatsFromFiles(
                file_list, None, force_unpack=options.force_unpack)
        except sqlobject.dberrors.OperationalError, e:
            exception_msg = (
                "DELETE command denied to user "
                "'pkg_maintainer'@'192.168.1.2' for table 'csw_file'")
            if exception_msg in str(e):
                logging.fatal(
                    "You don't have sufficient privileges to overwrite previously "
                    "imported package. Did you run checkpkg before running "
                    "csw-upload-pkg?")
                sys.exit(1)
            else:
                raise e
        for stats in stats_list:
            logging.debug("Importing %s, %s", stats["basic_stats"]["md5_sum"],
                          stats["basic_stats"]["pkg_basename"])
            try:
                package_stats.PackageStats.ImportPkg(stats, options.replace)
            except sqlobject.dberrors.OperationalError, e:
                logging.fatal(
                    "A problem when importing package data has occurred: %s",
                    e)
                sys.exit(1)
Beispiel #6
0
 def runsim(self, halfClocks=100, outfile='results.db'):
     dbmgr = database.DatabaseManager(outfile)
     for i in range(halfClocks):
         self.sim.advanceOneHalfClock()
         dbmgr.commit(i, self.sim.sim6507.getWires(),
                      self.sim.sim6507.getTransistors())
Beispiel #7
0
def main():
  parser = optparse.OptionParser(USAGE)
  parser.add_option("-d", "--debug",
      dest="debug",
      action="store_true",
      default=False,
      help="Switch on debugging messages")
  parser.add_option("-q", "--quiet",
      dest="quiet",
      action="store_true",
      default=False,
      help="Display less messages")
  parser.add_option("--catalog-release",
      dest="catrel",
      default="current",
      help="A catalog release: current, unstable, testing, stable.")
  parser.add_option("-r", "--os-releases",
      dest="osrel_commas",
      help=("Comma separated list of ['SunOS5.9', 'SunOS5.10'], "
            "e.g. 'SunOS5.9,SunOS5.10'."))
  parser.add_option("-a", "--architecture",
      dest="arch",
      help="Architecture: i386, sparc.")
  parser.add_option("--profile", dest="profile",
      default=False, action="store_true",
      help="Enable profiling (a developer option).")
  options, args = parser.parse_args()
  assert len(args), "The list of files or md5 sums must be not empty."
  logging_level = logging.INFO
  if options.quiet:
    logging_level = logging.WARNING
  elif options.debug:
    # If both flags are set, debug wins.
    logging_level = logging.DEBUG
  logging.basicConfig(level=logging_level)
  logging.debug("Starting.")

  configuration.SetUpSqlobjectConnection()
  dm = database.DatabaseManager()
  dm.AutoManage()


  err_msg_list = []
  if not options.osrel_commas:
    err_msg_list.append("Please specify --os-releases.")
  if not options.arch:
    err_msg_list.append("Please specify --architecture.")
  if options.arch not in cc.PHYSICAL_ARCHITECTURES:
    err_msg_list.append(
        "Valid --architecture values are: %s, you passed: %r"
        % (cc.PHYSICAL_ARCHITECTURES, options.arch))
  if err_msg_list:
    raise UsageError(" ".join(err_msg_list))

  stats_list = []
  collector = package_stats.StatsCollector(
      logger=logging,
      debug=options.debug)
  # We need to separate files and md5 sums.
  md5_sums, file_list = [], []
  for arg in args:
    if struct_util.IsMd5(arg):
      md5_sums.append(arg)
    else:
      file_list.append(arg)
  if file_list:
    stats_list = collector.CollectStatsFromFiles(file_list, None)
  # We need the md5 sums of these files
  md5_sums.extend([x["basic_stats"]["md5_sum"] for x in stats_list])
  assert md5_sums, "The list of md5 sums must not be empty."
  logging.debug("md5_sums: %s", md5_sums)
  osrel_list = options.osrel_commas.split(",")
  logging.debug("Reading packages data from the database.")
  # This part might need improvements in order to handle a whole
  # catalog.  On the other hand, if we already have the whole catalog in
  # the database, we can do it altogether differently.
  # Transforming the result to a list in order to force object
  # retrieval.
  sqo_pkgs = list(models.Srv4FileStats.select(
    sqlobject.IN(models.Srv4FileStats.q.md5_sum, md5_sums)))
  tags_for_all_osrels = []
  try:
    sqo_catrel = models.CatalogRelease.selectBy(name=options.catrel).getOne()
  except sqlobject.main.SQLObjectNotFound as e:
    logging.fatal("Fetching from the db has failed: catrel=%s",
                  repr(str(options.catrel)))
    logging.fatal("Available catalog releases:")
    sqo_catrels = models.CatalogRelease.select()
    for sqo_catrel in sqo_catrels:
      logging.fatal(" - %s", sqo_catrel.name)
    raise
  sqo_arch = models.Architecture.selectBy(name=options.arch).getOne()
  for osrel in osrel_list:
    sqo_osrel = models.OsRelease.selectBy(short_name=osrel).getOne()
    dm.VerifyContents(sqo_osrel, sqo_arch)
    check_manager = checkpkg_lib.CheckpkgManager2(
        CHECKPKG_MODULE_NAME,
        sqo_pkgs,
        osrel,
        options.arch,
        options.catrel,
        debug=options.debug,
        show_progress=(os.isatty(1) and not options.quiet))
    # Running the checks, reporting and exiting.
    exit_code, screen_report, tags_report = check_manager.Run()
    screen_report = unicode(screen_report)
    if not options.quiet and screen_report:
      # TODO: Write this to screen only after overrides are applied.
      sys.stdout.write(screen_report)
    else:
      logging.debug("No screen report.")

    overrides_list = [list(pkg.GetOverridesResult()) for pkg in sqo_pkgs]
    override_list = reduce(operator.add, overrides_list)
    args = (sqo_osrel, sqo_arch, sqo_catrel)
    tag_lists = [list(pkg.GetErrorTagsResult(*args)) for pkg in sqo_pkgs]
    error_tags = reduce(operator.add, tag_lists)
    (tags_after_overrides,
     unapplied_overrides) = overrides.ApplyOverrides(error_tags, override_list)
    tags_for_all_osrels.extend(tags_after_overrides)
    if not options.quiet:
      if tags_after_overrides:
        print(textwrap.fill(BEFORE_OVERRIDES, 80))
        for checkpkg_tag in tags_after_overrides:
          print checkpkg_tag.ToGarSyntax()
        print textwrap.fill(AFTER_OVERRIDES, 80)
      if unapplied_overrides:
        print textwrap.fill(UNAPPLIED_OVERRIDES, 80)
        for override in unapplied_overrides:
          print u"* Unused %s" % override
  exit_code = bool(tags_for_all_osrels)
  sys.exit(exit_code)
Beispiel #8
0
"""
This file is intended to add all the csv entries into the main sqlite file.
"""

import database
import csv
import os

#_path_ = '/home/ubuntu/pybmp180/pyscript/data'
_path_ = 'data'

with database.DatabaseManager('data') as db:
    for file in os.listdir(_path_):
        if 'csv' in file:
            _p = os.path.join(_path_, file)
            print(_p)
            with open(_p, 'rt', encoding='utf-8') as _csv:
                dr = csv.DictReader(_csv)
                for line in dr:
                    db.add_data(line['Time(date)'], line[' degree'],
                                line[' df'], line[' pascals'],
                                line[' hectopascals'], line[' humidity'])
                    if len(db) > 500:
                        db.commit()
            db.commit()
Beispiel #9
0
def main():
    parser = optparse.OptionParser(USAGE)
    parser.add_option("-d",
                      "--debug",
                      dest="debug",
                      default=False,
                      action="store_true",
                      help="Turn on debugging messages")
    parser.add_option(
        "-t",
        "--pkg-review-template",
        dest="pkg_review_template",
        help="A Cheetah template used for package review reports.")
    parser.add_option("-r",
                      "--os-release",
                      dest="osrel",
                      default="SunOS5.9",
                      help="E.g. SunOS5.9")
    parser.add_option("-a",
                      "--arch",
                      dest="arch",
                      default="sparc",
                      help="'i386' or 'sparc'")
    parser.add_option("-c",
                      "--catalog-release",
                      dest="catrel",
                      default="current",
                      help="E.g. current, unstable, testing, stable")
    parser.add_option("--replace",
                      dest="replace",
                      default=False,
                      action="store_true",
                      help="Replace packages when importing (importpkg)")
    parser.add_option("--profile",
                      dest="profile",
                      default=False,
                      action="store_true",
                      help="Turn on profiling")
    parser.add_option("--force-unpack",
                      dest="force_unpack",
                      default=False,
                      action="store_true",
                      help="Force unpacking of packages")
    options, args = parser.parse_args()
    if options.debug:
        logging.basicConfig(level=logging.DEBUG)
        logging.debug("Debugging on")
    else:
        logging.basicConfig(level=logging.INFO)
    if not args:
        raise UsageError("Please specify a command.  Se --help.")
    # SetUpSqlobjectConnection needs to be called after
    # logging.basicConfig
    configuration.SetUpSqlobjectConnection()
    command = args[0]
    args = args[1:]
    if command == 'show':
        subcommand = args[0]
        args = args[1:]
    elif command == 'pkg':
        subcommand = args[0]
        args = args[1:]
    else:
        subcommand = None

    md5_sums = args

    dm = database.DatabaseManager()
    # Automanage is not what we want to do, if the intention is to initialize
    # the database.
    if command != 'initdb':
        dm.AutoManage()

    if (command, subcommand) == ('show', 'errors'):
        for md5_sum in md5_sums:
            srv4 = GetPkg(md5_sum)
            res = m.CheckpkgErrorTag.select(
                m.CheckpkgErrorTag.q.srv4_file == srv4)
            for row in res:
                print row.pkgname, row.tag_name, row.tag_info, row.catrel.name, row.arch.name,
                print row.os_rel.short_name
    elif (command, subcommand) == ('show', 'overrides'):
        for md5_sum in md5_sums:
            srv4 = GetPkg(md5_sum)
            res = m.CheckpkgOverride.select(
                m.CheckpkgOverride.q.srv4_file == srv4)
            for row in res:
                print row.pkgname, row.tag_name, row.tag_info
    elif (command, subcommand) == ('show', 'pkg'):
        for md5_sum in md5_sums:
            srv4 = GetPkg(md5_sum)
            t = Template(SHOW_PKG_TMPL, searchList=[srv4])
            sys.stdout.write(unicode(t))
    elif command == 'gen-html':
        g = HtmlGenerator(md5_sums, options.pkg_review_template)
        sys.stdout.write(g.GenerateHtml())
    elif command == 'initdb':
        config = configuration.GetConfig()
        db_uri = configuration.ComposeDatabaseUri(config)
        dbc = database.CatalogDatabase(uri=db_uri)
        dbc.CreateTables()
        dbc.InitialDataImport()
    elif command == 'importpkg':
        collector = package_stats.StatsCollector(logger=logging,
                                                 debug=options.debug)
        file_list = args
        stats_list = collector.CollectStatsFromFiles(
            file_list, None, force_unpack=options.force_unpack)
        for stats in stats_list:
            logging.debug("Importing %s, %s", stats["basic_stats"]["md5_sum"],
                          stats["basic_stats"]["pkg_basename"])
            package_stats.PackageStats.ImportPkg(stats, options.replace)
    elif command == 'removepkg':
        for md5_sum in md5_sums:
            srv4 = GetPkg(md5_sum)
            in_catalogs = list(srv4.in_catalogs)
            if in_catalogs:
                for in_catalog in in_catalogs:
                    logging.warning("%s", in_catalog)
                logging.warning(
                    "Not removing from the database, because the package "
                    "in question is part of at least one catalog.")
            else:
                logging.info("Removing %s", srv4)
                srv4.DeleteAllDependentObjects()
                srv4.destroySelf()
    elif command == 'add-to-cat':
        if len(args) <= 3:
            raise UsageError("Not enough arguments, see usage.")
        osrel, arch, catrel = args[:3]
        c = checkpkg_lib.Catalog()
        md5_sums = args[3:]
        for md5_sum in md5_sums:
            logging.debug("Adding %s to the catalog", md5_sum)
            try:
                sqo_srv4 = m.Srv4FileStats.select(
                    m.Srv4FileStats.q.md5_sum == md5_sum).getOne()
                c.AddSrv4ToCatalog(sqo_srv4, osrel, arch, catrel)
            except sqlobject.main.SQLObjectNotFound, e:
                logging.warning("Srv4 file %s was not found in the database.",
                                md5_sum)
def processing():
    start_time = time.time()  # 计时起点
    # 读取用于拼写检查的超级单词表
    with open("data/words.txt") as fd:
        ultra_word_list = fd.read().split()
    text = request.form["text"]
    text = text[0:2000]
    wordlist = nlp.nltk_word_tokenizer(text)
    '''
	生成参考字典,其格式如下:
	refer_dict = '{
		"word1":{"pos_pron":["pos1:pron1", "pos2:pron2,pron3"], "index":0},
		"word2":{"pos_pron":["pos1:pron1", "pos2:pron2,pron3"], "index":0},
		"word3":{"pos_pron":["pos1:pron1", "pos2:pron2,pron3"], "index":0}
	}'
	'''
    # refer_dict 用于转为 json 字符串并写入 HTML 页面供 JavaScript 使用
    refer_dict = {}
    # wait_to_save 用于暂时存储将要写入数据库的新词数据:{"word":"pos_pron",...}
    wait_to_save = {}
    db = database.DatabaseManager()
    db.open()
    for word in wordlist:
        word_lower = word.lower()  # 解析音标统一使用小写形式
        #print("Current Word: %s" %word)
        # 首先检测 word 是不是英语单词
        if word_lower not in ultra_word_list:
            refer_dict[word_lower] = {"pos_pron": [":"], "index": 0}
            #print("%s 不是英语单词" %word)
            continue
        pos_pron_str = db.query(word_lower)
        if pos_pron_str is not None:
            # 数据库查出的是字符串,要转为list类型
            pos_pron = pos_pron_str.split(';')
            #print("Hit in DB cache.")
        else:
            pos_pron = cambridge_crawler.crawler(word_lower)  # list
            # 传给数据库时保证键值都是字符串类型
            wait_to_save[word_lower] = ';'.join(pos_pron)
            #print("Crawl from URL.")
        # pos_pron 是一个list,形如:["pos1:pron1", "pos2:pron2"]
        refer_dict[word_lower] = {"pos_pron": pos_pron, "index": 0}

    # 将新的数据保存到数据库
    db.save_to_db(wait_to_save)
    db.close()

    # 组装 HTML 片段
    content_block = ""
    for word in wordlist:
        pos_pron = refer_dict[word.lower()]["pos_pron"]
        pos_pron_element = pos_pron[0]  # 获取第一组词性音标,即 pos1:pron1
        pos, pron = pos_pron_element.split(":")  # 切分得到词性和音标
        multipos = "" if len(
            pos_pron) == 1 else " multipos"  # 若是多词性单词,则添加额外的类属性
        content_block += "<div class=\"group%s\" title=\"%s\">\n"	\
                "\t<p class=\"word\">%s</p>"		\
                "\t<p class=\"pronunciation\">%s</p>"	\
                "</div>\n"				\
                %(multipos, pos, word, pron)
    # 循环完成后,HTML 片段生成完毕

    # 字典转json字符串
    refer_dict_str = json.dumps(refer_dict)

    # print(refer_dict_str)
    text_wc = len(wordlist)
    end_time = time.time()  # 计时终点
    elapsed_time = end_time - start_time
    content_block = ("<h5>输入词汇数: %d 个</h5>" %text_wc) \
            + ("<h5>执行时间: %f 秒</h5>" %elapsed_time) \
            + content_block
    return render_template('pti_result.html',
                           refer_dict_str=refer_dict_str,
                           content_block=content_block)
Beispiel #11
0
def main():
    timers = {
        "bme280": Timer(180),
    }

    _file_name = "data"

    # Have to override the get_default_bus for the Adafruit.GPIO.I2C
    # The Pine64 uses the secondary bus so it needs to return 1, if different change this return value
    import Adafruit_GPIO.I2C as I2C

    def get_default_bus():
        return 1

    I2C.get_default_bus = get_default_bus

    def get_sensor():
        return BME280(t_mode=BME280_OSAMPLE_8, p_mode=BME280_OSAMPLE_8, h_mode=BME280_OSAMPLE_8, address=0x76)

    try:
        sensor = get_sensor()
    except OSError:
        sys.exit(-1)
    except RuntimeError:
        sys.exit(-2)

    def gather_data():
        degrees = sensor.read_temperature()
        df = sensor.read_temperature_f()
        pascals = sensor.read_pressure()
        hectopascals = pascals / 100
        humidity = sensor.read_humidity()
        return degrees, df, pascals, hectopascals, humidity

    out = {
        "tempc": 'Temp      = {0:0.3f} deg C',
        "tempf": 'Temp      = {0:0.3f} deg F',
        "pressure": 'Pressure  = {0:0.2f} hPa',
        "humidity": 'Humidity  = {0:0.2f} %'
    }

    for timer in timers.values():
        timer.start()

    _wait = True
    try:
        global signal_watch
        global BACKUP

        with database.DatabaseManager() as _db:
            while not signal_watch.kill:
                if timers["bme280"].check > timers["bme280"].tdelta:
                    if DEBUG:
                        data = gather_data()
                        print(data)
                        print(out['tempc'].format(data[0]))
                        print(out['tempf'].format(data[1]))
                        print(out['pressure'].format(data[2]))
                        print(out['humidity'].format(data[4]))
                    else:
                        _db.add_data(timers["bme280"].time.strftime("%Y-%m-%d %H:%M:%S"), *gather_data())
                        _db.commit()

                    timers["bme280"].start()

                if _wait:
                    sleep(1)
                    _db.backup = BACKUP

                if _db.backup:
                    _db.BACKUP()
    except FileNotFoundError as err:
        print(err)
Beispiel #12
0
                        print(out['humidity'].format(data[4]))
                    else:
                        _db.add_data(timers["bme280"].time.strftime("%Y-%m-%d %H:%M:%S"), *gather_data())
                        _db.commit()

                    timers["bme280"].start()

                if _wait:
                    sleep(1)
                    _db.backup = BACKUP

                if _db.backup:
                    _db.BACKUP()
    except FileNotFoundError as err:
        print(err)


if __name__ == "__main__":
    with database.DatabaseManager() as db:
        dt = "2018-11-24 07:48:54"
        c = '22.755369503429392'
        f = '72.9596651061729'
        p = '84456.90144499036'
        hp = '844.5690144499036'
        h = '21.803302389095357'

        db.add_data(dt, c, f, p, hp, h)
        print(db)


Beispiel #13
0
def main():
  parser = optparse.OptionParser(USAGE)
  parser.add_option("-d", "--debug",
      dest="debug",
      action="store_true",
      default=False,
      help="Switch on debugging messages")
  parser.add_option("-q", "--quiet",
      dest="quiet",
      action="store_true",
      default=False,
      help="Display less messages")
  parser.add_option("--catalog-release",
      dest="catrel",
      default="current",
      help="A catalog release: current, unstable, testing, stable.")
  parser.add_option("-r", "--os-releases",
      dest="osrel_commas",
      help=("Comma separated list of ['SunOS5.9', 'SunOS5.10'], "
            "e.g. 'SunOS5.9,SunOS5.10'."))
  parser.add_option("-a", "--architecture",
      dest="arch",
      help="Architecture: i386, sparc.")
  parser.add_option("--profile", dest="profile",
      default=False, action="store_true",
      help="Enable profiling (a developer option).")
  options, args = parser.parse_args()
  assert len(args), "The list of files or md5 sums must be not empty."
  logging_level = logging.INFO
  if options.quiet:
    logging_level = logging.WARNING
  elif options.debug:
    # If both flags are set, debug wins.
    logging_level = logging.DEBUG
  logging.basicConfig(level=logging_level)
  logging.debug("Starting.")

  configuration.SetUpSqlobjectConnection()
  dm = database.DatabaseManager()
  dm.AutoManage()


  err_msg_list = []
  if not options.osrel_commas:
    err_msg_list.append("Please specify --os-releases.")
  if not options.arch:
    err_msg_list.append("Please specify --architecture.")
  if err_msg_list:
    raise UsageError(" ".join(err_msg_list))

  stats_list = []
  collector = package_stats.StatsCollector(
      logger=logging,
      debug=options.debug)
  # We need to separate files and md5 sums.
  md5_sums, file_list = [], []
  for arg in args:
    if struct_util.IsMd5(arg):
      md5_sums.append(arg)
    else:
      file_list.append(arg)
  if file_list:
    stats_list = collector.CollectStatsFromFiles(file_list, None)
  # We need the md5 sums of these files
  md5_sums.extend([x["basic_stats"]["md5_sum"] for x in stats_list])
  assert md5_sums, "The list of md5 sums must not be empty."
  logging.debug("md5_sums: %s", md5_sums)
  osrel_list = options.osrel_commas.split(",")
  logging.debug("Reading packages data from the database.")
  # This part might need improvements in order to handle a whole
  # catalog.  On the other hand, if we already have the whole catalog in
  # the database, we can do it altogether differently.
  # Transforming the result to a list in order to force object
  # retrieval.
  sqo_pkgs = list(models.Srv4FileStats.select(
    sqlobject.IN(models.Srv4FileStats.q.md5_sum, md5_sums)))
  tags_for_all_osrels = []
  try:
    sqo_catrel = models.CatalogRelease.selectBy(name=options.catrel).getOne()
  except sqlobject.main.SQLObjectNotFound, e:
    logging.fatal("Fetching from the db has failed: catrel=%s",
                  repr(str(options.catrel)))
    logging.fatal("Available catalog releases:")
    sqo_catrels = models.CatalogRelease.select()
    for sqo_catrel in sqo_catrels:
      logging.fatal(" - %s", sqo_catrel.name)
    raise