Esempio n. 1
0
def cmdrun(cmd, errorText='', loglevel=0):
  PIPE = subprocess.PIPE
  p1 = subprocess.Popen(cmd, shell = True)
  returncode = p1.wait()
  if returncode <> 0:
    log.add (text=errorText, level=loglevel, file=file_log)
    raise Exception('returncode: ' + str(returncode) + ', text error: ' + str(errorText))
def add_user_to_exim_db(db_host, db_name, db_user, db_passwd, email_prefix, email_domain, email_passwd, email_descr):
	try:
		con = mdb.connect(host=db_host, user=db_user, passwd=db_passwd, db=db_name, charset='utf8', init_command='SET NAMES UTF8');
		cur = con.cursor()
	except mdb.Error, e:
		log.add("ERROR mysql connect: %d: %s" % (e.args[0],e.args[1]))
		return STATUS_INTERNAL_ERROR
def set_email_in_ad(
    username,
    email,
    domain=conf.domain,
    employee_num="1",
    base_dn=conf.base_user_dn,
    group_acl_base=conf.group_acl_base,
    group_rbl_base=conf.group_rbl_base,
):
    # LDAP connection
    try:
        # без ssl:
        # ldap.set_option(ldap.OPT_X_TLS_REQUIRE_CERT, 0)
        # ldap_connection = ldap.initialize(LDAP_SERVER)

        # ssl:
        # ldap.set_option(ldap.OPT_X_TLS_REQUIRE_CERT, ldap.OPT_X_TLS_NEVER)
        # ldap_connection = ldap.initialize(LDAP_SERVER)
        # ldap_connection.set_option(ldap.OPT_REFERRALS, 0)
        # ldap_connection.set_option(ldap.OPT_PROTOCOL_VERSION, 3)
        # ldap_connection.set_option(ldap.OPT_X_TLS,ldap.OPT_X_TLS_DEMAND)
        # ldap_connection.set_option( ldap.OPT_X_TLS_DEMAND, True )
        # ldap_connection.set_option( ldap.OPT_DEBUG_LEVEL, 255 )
        ldap.set_option(ldap.OPT_X_TLS_REQUIRE_CERT, ldap.OPT_X_TLS_NEVER)
        ldap_connection = ldap.initialize(conf.LDAP_SERVER)
        ldap_connection.simple_bind_s(conf.BIND_DN, conf.BIND_PASS)
    except ldap.LDAPError, error_message:
        log.add(u"Error connecting to LDAP server: %s" % error_message)
        return STATUS_INTERNAL_ERROR
Esempio n. 4
0
def loadSingle(fileName, package=None):
    """Load a plugin that consists of a single .py module in a plugins
    directory.

    @param fileName File name of the the plugin.
    """
    name = paths.getBase(fileName)

    obsoletePluginNames = ['tab1_summary', 'tab2_addons', 'tab3_settings', 'tab30_addons']
    
    # Old plugin names.
    if name in obsoletePluginNames:
        logger.add(logger.MEDIUM, 'warning-obsolete-plugin', name, fileName)
    
    sysPath = sys.path
    if os.path.dirname(fileName) not in sysPath:
        sysPath = [os.path.dirname(fileName)] + sys.path   
        
    if package:
        prefix = package + '.'
    else:
        prefix = ''
   
    # Initialization will be done after everything has been loaded.
    initCommands.append(('import ' + prefix + name, prefix + name + '.init()', sysPath))
Esempio n. 5
0
    def readMetaData(self):
        """Read the metadata files (e.g. Contents/Info)."""

        try:
            for info in META_NAMES:
                try:
                    conf = file(self.__makePath(info)).read()
                    self.parseConfiguration(conf)
                except OSError:
                    pass
                except IOError:
                    pass
                except Exception, x:
                    logger.add(logger.HIGH, 'error-read-info-file',
                               self.__makePath(info), self.getId())
        except:
            # Log a warning?
            traceback.print_exc()

        # Load a readme from a separate file.
        readme = self.__makePath('Readme.html')
        if not os.path.exists(readme):
            readme = self.__makePath('readme.html')
        if os.path.exists(readme):
            language.define('english',
                            self.makeLocalId('readme'),
                            file(readme).read())
Esempio n. 6
0
    def readMetaData(self):
        """Read the metadata files (e.g. Contents/Info)."""

        try:
            for info in META_NAMES:
                try:
                    conf = file(self.__makePath(info)).read()
                    self.parseConfiguration(conf)
                except OSError:
                    pass
                except IOError:
                    pass
                except Exception, x:
                    logger.add(logger.HIGH, 'error-read-info-file',
                               self.__makePath(info), self.getId())
        except:
            # Log a warning?
            traceback.print_exc()

        # Load a readme from a separate file.
        readme = self.__makePath('Readme.html')
        if not os.path.exists(readme):
            readme = self.__makePath('readme.html')
        if os.path.exists(readme):
            language.define('english', self.makeLocalId('readme'),
                            file(readme).read())
Esempio n. 7
0
def set_email_in_ad(username,
                    email,
                    domain=conf.domain,
                    employee_num="1",
                    base_dn=conf.base_user_dn,
                    group_acl_base=conf.group_acl_base,
                    group_rbl_base=conf.group_rbl_base):
    # LDAP connection
    try:
        # без ssl:
        #ldap.set_option(ldap.OPT_X_TLS_REQUIRE_CERT, 0)
        #ldap_connection = ldap.initialize(LDAP_SERVER)

        # ssl:
        #ldap.set_option(ldap.OPT_X_TLS_REQUIRE_CERT, ldap.OPT_X_TLS_NEVER)
        #ldap_connection = ldap.initialize(LDAP_SERVER)
        #ldap_connection.set_option(ldap.OPT_REFERRALS, 0)
        #ldap_connection.set_option(ldap.OPT_PROTOCOL_VERSION, 3)
        #ldap_connection.set_option(ldap.OPT_X_TLS,ldap.OPT_X_TLS_DEMAND)
        #ldap_connection.set_option( ldap.OPT_X_TLS_DEMAND, True )
        #ldap_connection.set_option( ldap.OPT_DEBUG_LEVEL, 255 )
        ldap.set_option(ldap.OPT_X_TLS_REQUIRE_CERT, ldap.OPT_X_TLS_NEVER)
        ldap_connection = ldap.initialize(conf.LDAP_SERVER)
        ldap_connection.simple_bind_s(conf.BIND_DN, conf.BIND_PASS)
    except ldap.LDAPError, error_message:
        log.add(u"Error connecting to LDAP server: %s" % error_message)
        return STATUS_INTERNAL_ERROR
Esempio n. 8
0
def load(fileName):
    """Loads a language file and stores the strings into the database.
    @param fileName Name of the language file.
    """
    p = cfparser.FileParser(fileName)

    # Construct a block element to hold all the strings.
    languageName = paths.getBase(fileName)
    impliedBlock = cfparser.BlockElement('language', languageName)

    try:
        while True:
            # Get the next string from the language file.
            e = p.get()
            
            # Only key elements are recognized.
            if e.isKey():
                impliedBlock.add(e)
    
    except cfparser.OutOfElements:
        # All OK.
        pass

    except:
        import logger
        logger.add(logger.MEDIUM, 'warning-read-language', languageName, 
                   fileName)

    processLanguageBlock(impliedBlock)
Esempio n. 9
0
def controlAuto(isEnd=False, isError=False, loglevel=0):
  if isEnd:
    if not isError:
      if os.path.exists(conf.workactual + 'work.dat'):
        os.remove(conf.workactual + 'work.dat')
    
    dir = conf.workdir + 'data'
    if dir[-1] == os.sep: dir = dir[:-1]
    files = os.listdir(dir)
    for file in files:
      if file == '.' or file == '..': continue
      os.remove(dir + os.sep + file)
    
    if conf.runAfter:
      cmdrun(cmd=conf.runAfter, errorText='! error run after', loglevel=loglevel)
    
  else:
    if os.path.exists(conf.workactual + 'work.dat'):
      texterror = '! previous requests can not be completed or if an error, exists "work.dat"'
      log.add (text=texterror, level=loglevel, file=file_log)
      raise Exception(texterror)
  
    file_log_d = open(conf.workactual + 'work.dat','w')
    file_log_d.write(str(datetime.datetime.now().strftime(conf.format_datetime)))
    file_log_d.close()
Esempio n. 10
0
def load(path, notify=True):
    """Loads a single profile from disk.  After the profile has been
    loaded, sends a "profile-updated" notification so that the plugins
    may react to the new profile.  If a profile with the

    @param path The file name of the profile to load.
    """

    # Parameter "path" stands for profile's file name. Every time a
    # profile is being loaded, we'll have to check if the profile
    # allready exists. In that case, old profile will be updated by
    # new profile's information. Finally when the loading is complete,

    # The identifier of the profile (the base name of the file).
    identifier = paths.getBase(path)

    # Parse the file.
    elements = []
    p = cfparser.FileParser(path)
    try:
        while True:
            elements.append(p.get())

    except cfparser.OutOfElements:
        # The file ended normally.
        pass

    except cfparser.ParseFailed, ex:
        # Show the error dialog.
        logger.add(logger.HIGH, 'error-parsing-profile', path, str(ex))
        return
Esempio n. 11
0
def load(path, notify=True):
    """Loads a single profile from disk.  After the profile has been
    loaded, sends a "profile-updated" notification so that the plugins
    may react to the new profile.  If a profile with the

    @param path The file name of the profile to load.
    """

    # Parameter "path" stands for profile's file name. Every time a
    # profile is being loaded, we'll have to check if the profile
    # allready exists. In that case, old profile will be updated by
    # new profile's information. Finally when the loading is complete,

    # The identifier of the profile (the base name of the file).
    identifier = paths.getBase(path)

    # Parse the file.
    elements = []
    p = cfparser.FileParser(path)
    try:
        while True:
            elements.append(p.get())

    except cfparser.OutOfElements:
        # The file ended normally.
        pass

    except cfparser.ParseFailed, ex:
        # Show the error dialog.
        logger.add(logger.HIGH, 'error-parsing-profile', path, str(ex))
        return
Esempio n. 12
0
def control_auto(is_end=False, is_error=False, log_level=0):
    if is_end:
        if not is_error:
            if os.path.exists(conf.workactual + 'work.dat'):
                os.remove(conf.workactual + 'work.dat')

        data_dir = conf.workdir + '/data'
        if data_dir[-1] == os.sep:
            data_dir = data_dir[:-1]
        files = os.listdir(data_dir)
        for _file in files:
            if _file == '.' or _file == '..' or _file == '.gitkeep':
                continue
            os.remove(data_dir + os.sep + _file)

        if conf.runAfter:
            cmd_run(cmd=conf.runAfter,
                    error_text='! error run after',
                    log_level=log_level)

    else:
        if os.path.exists(conf.workactual + 'work.dat'):
            text_error = '! previous requests can not be completed or if an error, exists "work.dat"'
            log.add(text=text_error, level=log_level, file=file_log)
            raise Exception(text_error)

        file_log_d = open(conf.workactual + 'work.dat', 'w')
        file_log_d.write(
            str(datetime.datetime.now().strftime(conf.format_datetime)))
        file_log_d.close()
Esempio n. 13
0
def train(args, data_loader, model, global_stats, logger):
    """Run through one epoch of model training with the provided data loader."""
    # Initialize meters + timers
    ml_loss = AverageMeter()
    loc_loss = AverageMeter()
    fix_loss = AverageMeter()
    epoch_time = Timer()

    current_epoch = global_stats['epoch']
    if args.use_tqdm:
        pbar = tqdm(data_loader)
        pbar.set_description(
            "%s" %
            'Epoch = %d tot_loss = x.xx loc_loss = x.xx fix_loss = x.xx]' %
            current_epoch)
    else:
        pbar = data_loader

    # Run one epoch
    for idx, ex in enumerate(pbar):
        bsz = ex['batch_size']
        if args.optimizer in ['sgd', 'adam'
                              ] and current_epoch <= args.warmup_epochs:
            cur_lrate = global_stats['warmup_factor'] * (model.updates + 1)
            for param_group in model.optimizer.param_groups:
                param_group['lr'] = cur_lrate

        net_loss = model.update(ex)
        ml_loss.update(net_loss["loss"].detach().item(), bsz)
        loc_loss.update(net_loss["loc_loss"].detach().item(), bsz)
        fix_loss.update(net_loss["fix_loss"].detach().item(), bsz)
        log_info = 'Epoch = %d [tot_loss = %.2f loc_loss = %.2f fix_loss = %.2f]' % \
                   (current_epoch, ml_loss.avg, loc_loss.avg, fix_loss.avg)

        if args.use_tqdm:
            pbar.set_description("%s" % log_info)

        if idx % 1000 == 0:
            logger.print(
                'train: Epoch %d | tot_loss = %.2f | loc_loss = %.2f | fix_loss = %.2f'
                % (current_epoch, ml_loss.avg, loc_loss.avg, fix_loss.avg))

    kvs = [("ml_lo_tr", ml_loss.avg), ("loc_lo_tr", loc_loss.avg), ("fix_lo_tr", fix_loss.avg),\
               ("epoch_time", epoch_time.time())]

    for k, v in kvs:
        logger.add(current_epoch, **{k: v})
    logger.print(
        'train: Epoch %d | tot_loss = %.2f | loc_loss = %.2f | fix_loss = %.2f | '
        'Time for epoch = %.2f (s)' %
        (current_epoch, ml_loss.avg, loc_loss.avg, fix_loss.avg,
         epoch_time.time()))

    # Checkpoint
    if args.checkpoint:
        model.checkpoint(logger.path + '/model.cpt.checkpoint',
                         current_epoch + 1)

    gc.collect()
Esempio n. 14
0
def cmd_run(cmd, error_text='', log_level=0):
    print '!>> ' + cmd
    p1 = subprocess.Popen(cmd, shell=True)
    return_code = p1.wait()
    if not return_code == 0:
        log.add(text=error_text, level=log_level, file=file_log)
        raise Exception('return code: ' + str(return_code) + ', text error: ' +
                        str(error_text))
def findImgMarker(loglevel=0):
    log.add('start findImgMarker', level=loglevel, file=file_log)
    markers = []
    for fd in os.listdir(conf.path_markers):
        markers.append(fd[:-4])

    write_file = open(conf.file_marker_json, "w")
    write_file.write(json.dumps(markers))
    log.add('end findImgMarker', level=loglevel, file=file_log)
Esempio n. 16
0
def findImgMarker(loglevel=0):
  log.add ('start findImgMarker', level=loglevel, file=file_log)
  markers = []
  for fd in os.listdir(conf.path_markers):
    markers.append(fd[:-4])
  
  write_file = open(conf.file_marker_json, "w")
  write_file.write(json.dumps(markers))
  log.add ('end findImgMarker', level=loglevel, file=file_log)
Esempio n. 17
0
def run():
    # Open the file selection dialog.
    for selection in chooseAddons('install-addon-dialog',
                                  language.translate('install-addon-title'),
                                  'install'):
        try:
            ao.install(selection)
        except Exception, ex:
            logger.add(logger.HIGH, 'error-addon-installation-failed',
                       selection, str(ex))
Esempio n. 18
0
def run():
    # Open the file selection dialog.
    for selection in chooseAddons('install-addon-dialog',
                                  language.translate('install-addon-title'),
                                  'install'):
        try:
            ao.install(selection)
        except Exception, ex:
            logger.add(logger.HIGH, 'error-addon-installation-failed',
                       selection, str(ex))
def get_users_phones_from_site():
#  Начало 
	log.add("get_users_phones_from_site()")
	try:
		con = mdb.connect(conf.user_phone_db_host, conf.user_phone_db_user, conf.user_phone_db_passwd, conf.user_phone_db_name);
		cur = con.cursor()
	except mdb.Error, e:
		print "Error %d: %s" % (e.args[0],e.args[1])
		log.add("error connect to mysql user phone db: %d: %s" % (e.args[0],e.args[1]))
		sys.exit(1)
Esempio n. 20
0
def connection():
    """Connects to the database."""
    conn = sqlite3.connect(DB_NAME)
    # cur=conn.cursor()
    conn.execute('PRAGMA foreign_keys = ON;')
    conn.commit()
    try:
        logger.add("Connected to database: %s" % (DB_NAME), 4)
        return conn
    except sqlite3.Error as e:
        pass
        logger.add("I am unable to connect to the database: %s" % e.args[0], 1)
Esempio n. 21
0
def borrow(bookID, personID, start=None):
    """Description """
    c = connection()
    cur = c.cursor()
    cur.execute(
        'SELECT * FROM borrowings WHERE id_book=? AND start_date IS NOT NULL AND end_date IS NULL;',
        (bookID, ))
    row = cur.fetchone()
    available = True if (row == None) else False
    if (available == False):
        logger.add(
            "Book with id=%d is not available (someone is reading it)." %
            (bookID, ), 1)
        return
    # print(row,available)
    if (start):
        try:
            cur.execute(
                'INSERT  INTO borrowings(id_book,id_people,start_date) VALUES(?,?,?);',
                (bookID, personID, start))
        except sqlite3.Error as er:
            logger.add('SQLite error: %s' % (' '.join(er.args)), 1)
    else:
        try:
            cur.execute(
                'INSERT  INTO borrowings(id_book,id_people) VALUES(?,?);',
                (bookID, personID))
        except sqlite3.Error as er:
            logger.add('SQLite error: %s' % (' '.join(er.args)), 1)

    logger.add("The book with id=%d has been borrowed" % (bookID, ), 3)
    c.commit()
    c.close()
def createTree(loglevel=0):
    log.add('start createTree', level=loglevel, file=file_log)

    def fnChildren(catalog_json, dictionary_json, parent=''):
        data = []
        for icat in catalog_json:
            if len(icat['parent']) == 0:
                icat['parent'] = ['']
            for ipar in icat['parent']:
                if ipar == parent:
                    rec = {}
                    rec['data'] = dictionary_json['catalog'][
                        icat['name']]['name']
                    rec['attr'] = {'nclass': icat['name']}
                    children = fnChildren(catalog_json=catalog_json,
                                          dictionary_json=dictionary_json,
                                          parent=icat['name'])
                    if (children):
                        rec['children'] = children
                    data.append(rec)
        data.sort(key=lambda x: x['data'])
        return data

    with codecs.open(conf.file_catalog_json, 'rt', encoding='utf-8') as f:
        catalog_json = json.load(f)
    for fd in os.listdir(conf.path_dictionary_json):
        with codecs.open(conf.path_dictionary_json + fd,
                         'rt',
                         encoding='utf-8') as f:
            fjson = json.load(f)
            if (fjson['language'] == lang[0]):
                dictionary_json = fjson
                break

    datajson = fnChildren(catalog_json=catalog_json,
                          dictionary_json=dictionary_json)

    datajson = [{
        'data': u'выбрать все',
        'attr': {
            'class': 'root',
            'nclass': 'root'
        },
        'state': 'open',
        'children': datajson
    }]

    write_file = open(conf.file_tree_json, "w")
    write_file.write(json.dumps(datajson))
    log.add('end createTree', level=loglevel, file=file_log)
def createListPermalink(loglevel=0):
    log.add('start createListPermalink', level=loglevel, file=file_log)
    with codecs.open(conf.file_catalog_json, 'rt', encoding='utf-8') as f:
        catalog_json = json.load(f)
    with codecs.open(conf.file_listPerm_json, 'rt', encoding='utf-8') as f:
        listPerm_json = json.load(f)

    for icat in catalog_json:
        if listPerm_json.count(icat['name']) == 0:
            listPerm_json.append(icat['name'])

    write_file = open(conf.file_listPerm_json, "w")
    write_file.write(json.dumps(listPerm_json))

    log.add('end createListPermalink', level=loglevel, file=file_log)
Esempio n. 24
0
def createListPermalink(loglevel=0):
  log.add ('start createListPermalink', level=loglevel, file=file_log)
  with codecs.open(conf.file_catalog_json, 'rt', encoding='utf-8') as f:
    catalog_json = json.load(f)
  with codecs.open(conf.file_listPerm_json, 'rt', encoding='utf-8') as f:
    listPerm_json = json.load(f)
  
  for icat in catalog_json:
    if listPerm_json.count(icat['name']) == 0:
      listPerm_json.append(icat['name'])
  
  write_file = open(conf.file_listPerm_json, "w")
  write_file.write(json.dumps(listPerm_json))
  
  log.add ('end createListPermalink', level=loglevel, file=file_log)
Esempio n. 25
0
def readConfigFile(fileName):
    """Reads a configuration file from the specified path.  The file
    must be a .conf file that contains either settings or components,
    or both."""
    p = cfparser.FileParser(fileName)

    # We'll collect all the elements into this array.
    elements = []
    
    try:
        # Loop through all the elements in the config file.
        while True:
            e = p.get()

            if e.isKey() and e.getName() == 'required-platform':
                # This configuration file is specific to a particular
                # platform.
                if (e.getValue() == 'windows' and not host.isWindows()) or \
                   (e.getValue() == 'mac' and not host.isMac()) or \
                   (e.getValue() == 'unix' and not host.isUnix()):
                    # Look, we ran out early.
                    raise cfparser.OutOfElements

            # Config files may contain blocks that group similar
            # settings together.
            if e.isBlock() and e.getType() == 'group':
                # Add everything inside the group into the elements.
                # A group can only contain settings.
                for sub in e.getContents():
                    # There can only be blocks in a group.
                    if not sub.isBlock():
                        continue
                    # Add the 'group' key element to the setting.
                    sub.add(cfparser.KeyElement('group', e.getName()))
                    elements.append(sub)
            else:
                elements.append(e)

    except cfparser.OutOfElements:
        # Parsing was completed.
        pass

    except Exception, exception:
        # Log an error message.
        import traceback
        traceback.print_exc()
        logger.add(logger.HIGH, 'error-read-config-file', fileName,
                  str(exception))
Esempio n. 26
0
def insertPOI(conn, loglevel=0):
  log.add ('start insert poi', level=loglevel, file=file_log)

  cur = conn.cursor(cursor_factory=psycopg2.extras.RealDictCursor)

  # time последнего объекта и удаленные
  cur.execute("""
    SELECT MAX(tstamp) as nupd, (SELECT MAX(deleted_at) FROM deleted_entries) as ndel
    FROM
    (
      SELECT MAX(tstamp) as tstamp
        FROM relations
      UNION
      SELECT MAX(updated_at)
        FROM relations
      UNION
      SELECT MAX(tstamp)
        FROM ways
      UNION
      SELECT MAX(updated_at)
        FROM ways
      UNION
      SELECT MAX(tstamp)
        FROM nodes
    ) as t1
  """)
  whereTime = cur.fetchone()
  if not whereTime['ndel']:
    whereTime['ndel'] = datetime.datetime.now()

  catalog(cur, loglevel=loglevel+1)
  addr(cur, loglevel=loglevel+1)
  conn.commit()
  
  cur = conn.cursor()
  log.add ('clear table ershkus_poi', level=loglevel+1, file=file_log)
  cur.execute("DELETE FROM ershkus_poi")
  
  log.add ('copy table ershkus_poi_c to ershkus_poi', level=loglevel+1, file=file_log)
  cur.execute("""
    INSERT INTO ershkus_poi(
      id, class, tags, name_ru, operator, tags_ru, opening_hours, class_ru, addr_region, addr_district, addr_city, addr_full_name, index_name, addr_region_id, addr_district_id, addr_city_id, addr_house_id, addr_in_id, addr_in_type, brand, phone, fax, website, addr_village, addr_street, addr_house, c_geom, addr_country, email, description, wikipedia, osm_id)
    SELECT
      id, class, tags, name_ru, operator, tags_ru, opening_hours, class_ru, addr_region, addr_district, addr_city, addr_full_name, index_name, addr_region_id, addr_district_id, addr_city_id, addr_house_id, addr_in_id, addr_in_type, brand, phone, fax, website, addr_village, addr_street, addr_house, c_geom, addr_country, email, description, wikipedia, osm_id
    FROM
      ershkus_poi_c
    ;  
  """)
  conn.commit()  
  

  # сохраним текущую позицию
  utils.saveDate(whereTime=whereTime, file=fupd_time, key_config='datepoi')
  # f = open(conf.workactual + fupd_time,'w')
  # f.write(json.dumps(whereTime, default=utils.jsondumps))
  # f.close()

  log.add ('end insert poi', level=loglevel, file=file_log)
Esempio n. 27
0
def loadAll():
    "Load all the plugins from the plugins directory."

    global initCommands
    initCommands = []

    # We'll only use the modified sys.path during this function.
    oldPath = sys.path

    # Make the plugins directory the one where to look first.
    sys.path = [PLUGIN_PATH] + sys.path

    found = []

    for fileName in paths.listFiles(paths.PLUGINS):
        if paths.hasExtension('py', fileName) or \
           (paths.hasExtension('plugin', fileName) and os.path.isdir(fileName)):
            if fileName not in found:
                found.append(fileName)
                
    # Sort alphabetically across all located plugins.
    found.sort(key=sortKey)
    for fileName in found:
        if paths.hasExtension('py', fileName):
            loadSingle(fileName) # Single Python module.
        elif paths.hasExtension('plugin', fileName):            
            loadBundle(fileName) # A plugin bundle.

    # Import all plugin modules.
    for importStatement, initStatement, searchPath in initCommands:
        sys.path = searchPath
        try:
            exec importStatement
        except:
            logger.add(logger.HIGH, 'error-plugin-init-failed', importStatement,
                       logger.formatTraceback())

    # Initialize all plugins.
    for importStatement, initStatement, searchPath in initCommands:
        sys.path = searchPath
        try:
            exec initStatement
        except AttributeError, ex:
            if "'init'" not in str(ex):
                logger.add(logger.HIGH, 'error-plugin-init-failed', initStatement,
                           logger.formatTraceback())
        except:
Esempio n. 28
0
    def readMetaData(self):
        """Read the metadata file(s)."""

        # Check if there is an Info lump.
        try:
            z = zipfile.ZipFile(self.source)
            for info in META_NAMES:
                try:
                    self.parseConfiguration(z.read(info))
                except KeyError:
                    pass
                except:
                    logger.add(logger.HIGH, 'error-read-zip-info-file',
                               self.source, self.getId())

        except:
            # Log a warning?
            print "%s: Failed to open archive." % self.source
Esempio n. 29
0
    def readMetaData(self):
        """Read the metadata file(s)."""

        # Check if there is an Info lump.
        try:
            z = zipfile.ZipFile(self.source)
            for info in META_NAMES:
                try:
                    self.parseConfiguration(z.read(info))
                except KeyError:
                    pass
                except:
                    logger.add(logger.HIGH, 'error-read-zip-info-file',
                               self.source, self.getId())

        except:
            # Log a warning?
            print "%s: Failed to open archive." % self.source
Esempio n. 30
0
def train(args, data_loader, model, global_stats, logger):
    """Run through one epoch of model training with the provided data loader."""
    # Initialize meters + timers
    ml_loss = AverageMeter()
    perplexity = AverageMeter()
    epoch_time = Timer()

    current_epoch = global_stats['epoch']
    pbar = tqdm(data_loader)

    pbar.set_description("%s" %
                         'Epoch = %d [perplexity = x.xx, ml_loss = x.xx]' %
                         current_epoch)

    # Run one epoch
    for idx, ex in enumerate(pbar):
        bsz = ex['batch_size']
        if args.optimizer in ['sgd', 'adam'
                              ] and current_epoch <= args.warmup_epochs:
            cur_lrate = global_stats['warmup_factor'] * (model.updates + 1)
            for param_group in model.optimizer.param_groups:
                param_group['lr'] = cur_lrate

        net_loss = model.update(ex)
        ml_loss.update(net_loss['ml_loss'], bsz)
        perplexity.update(net_loss['perplexity'], bsz)
        log_info = 'Epoch = %d [perplexity = %.2f, ml_loss = %.2f]' % \
                   (current_epoch, perplexity.avg, ml_loss.avg)

        pbar.set_description("%s" % log_info)
        #break
    kvs = [("perp_tr", perplexity.avg), ("ml_lo_tr", ml_loss.avg),\
               ("epoch_time", epoch_time.time())]
    for k, v in kvs:
        logger.add(current_epoch, **{k: v})
    logger.print(
        'train: Epoch %d | perplexity = %.2f | ml_loss = %.2f | '
        'Time for epoch = %.2f (s)' %
        (current_epoch, perplexity.avg, ml_loss.avg, epoch_time.time()))

    # Checkpoint
    if args.checkpoint:
        model.checkpoint(logger.path + '/best_model.cpt.checkpoint',
                         current_epoch + 1)
Esempio n. 31
0
def send(event):
    """Broadcast an event to all listeners of the appropriate type.
    The event is processed synchronously: all listeners will be
    notified immediately during the execution of this function.

    @param event The event to send.  All listeners will get the same
    event object.
    """
    if areEventsMuted:
        return # Discard.

    global sendDepth, queuedEvents

    sendDepth += 1
    
    # Commands and Notifys go to a different set of listeners.
    if event.myClass == Command:
        listeners = commandListeners
    else:
        listeners = notifyListeners

    # The identifier of the event to be sent.
    sendId = event.getId()
    
    # Always include the unfiltered callbacks.
    callbacks = [c for c in listeners[None]]
    
    if listeners.has_key(sendId):
        # The specialized callbacks.
        callbacks += listeners[sendId]

    #print "Sending " + sendId + ":"
    #print callbacks

    # Send the event to all the appropriate listeners.
    for callback in callbacks:
        try:
            callback(event)
        except Exception, x:
			# Report errors.
            import logger
            logger.add(logger.HIGH, 'error-runtime-exception-during-event',
					   sendId, str(x), logger.formatTraceback())
            logger.show()
Esempio n. 32
0
def insertPOI(conn, loglevel=0):
  log.add ('start insert poi', level=loglevel, file=file_log)

  cur = conn.cursor(cursor_factory=psycopg2.extras.RealDictCursor)

  # time последнего объекта и удаленные
  cur.execute("""
    SELECT MAX(tstamp) as nupd, (SELECT MAX(deleted_at) FROM deleted_entries) as ndel
    FROM
    (
      SELECT MAX(tstamp) as tstamp
        FROM relations
      UNION
      SELECT MAX(updated_at)
        FROM relations
      UNION
      SELECT MAX(tstamp)
        FROM ways
      UNION
      SELECT MAX(updated_at)
        FROM ways
      UNION
      SELECT MAX(tstamp)
        FROM nodes
    ) as t1
  """)
  whereTime = cur.fetchone()
  if not whereTime['ndel']:
    whereTime['ndel'] = datetime.datetime.now()

  catalog(cur, loglevel=loglevel+1)
  addr(cur, loglevel=loglevel+1)
  conn.commit()

  # сохраним текущую позицию
  utils.saveDate(whereTime=whereTime, file=fupd_time, key_config='datepoi')
  # f = open(conf.workactual + fupd_time,'w')
  # f.write(json.dumps(whereTime, default=utils.jsondumps))
  # f.close()

  log.add ('end insert poi', level=loglevel, file=file_log)
def insert_poi(conn, log_level=0):
    log.add('start insert poi', level=log_level, file=file_log)

    cur = conn.cursor(cursor_factory=psycopg2.extras.RealDictCursor)

    # time последнего объекта и удаленные
    cur.execute("""
    SELECT MAX(tstamp) as nupd, (SELECT MAX(deleted_at) FROM deleted_entries) as ndel
    FROM
    (
      SELECT MAX(tstamp) as tstamp
        FROM relations
      UNION
      SELECT MAX(updated_at)
        FROM relations
      UNION
      SELECT MAX(tstamp)
        FROM ways
      UNION
      SELECT MAX(updated_at)
        FROM ways
      UNION
      SELECT MAX(tstamp)
        FROM nodes
    ) as t1
  """)
    whereTime = cur.fetchone()
    if not whereTime['ndel']:
        whereTime['ndel'] = datetime.datetime.now()

    catalog(cur, loglevel=log_level + 1)
    addr(cur, loglevel=log_level + 1)
    conn.commit()

    # сохраним текущую позицию
    utils.saveDate(whereTime=whereTime, file=fupd_time, key_config='datepoi')
    # f = open(conf.workactual + fupd_time,'w')
    # f.write(json.dumps(whereTime, default=utils.jsondumps))
    # f.close()

    log.add('end insert poi', level=log_level, file=file_log)
def main():
    try:
        parser = argparse.ArgumentParser(add_help=True, version='0.1')
        parser.add_argument(
            'action',
            metavar='action',
            type=str,
            choices=['insert', 'update', 'createTree'],
            help='action operations `insert` or `update` or `createTree`')
        args = parser.parse_args()

        log.add('start main', file=file_log)
        conn = psycopg2.connect(host=conf.addrfull_host,
                                database=conf.addrfull_database,
                                user=conf.addrfull_user,
                                password=conf.addrfull_password)

        if args.action == 'insert':
            insert_poi(conn, log_level=1)
        elif args.action == 'update':
            update_poi(conn, log_level=1)
        elif args.action == 'createTree':
            createTree(loglevel=1)
            createListPermalink(loglevel=1)
            findImgMarker(loglevel=1)

        log.add('end main', file=file_log)
    except:
        log.add('! error: ' + str(traceback.format_exc()), file=file_log)
Esempio n. 35
0
def add_tag2book(tagID, bookID):
    """Description """
    c = connection()
    cur = c.cursor()
    cur.execute('SELECT COUNT(*),title FROM "book" WHERE id=?', (bookID, ))
    temp = cur.fetchone()
    bookEXIST = temp[0]
    bookTITLE = temp[1]
    cur.execute('SELECT COUNT(*),name FROM "tag" WHERE id=?', (tagID, ))
    temp = cur.fetchone()
    tagEXIST = temp[0]
    tagNAME = temp[1]
    if (tagEXIST * bookEXIST == 1):
        try:
            cur.execute('INSERT  INTO book_R_tag(id_book,id_tag) VALUES(?,?);',
                        (bookID, tagID))
        except sqlite3.Error as er:
            logger.add('SQLite error: %s' % (' '.join(er.args)), 1)

    if (cur.rowcount == 1):
        logger.add("Tag '%s' added to the book '%s'" % (tagNAME, bookTITLE), 3)
    else:
        logger.add(
            "Tag '%s' already is connected to the book  '%s'" %
            (tagNAME, bookTITLE), 2)
    c.commit()
    c.close()
    return cur.lastrowid
Esempio n. 36
0
def main():
  try:
    # parser = argparse.ArgumentParser(add_help=True, version='0.1')
    parser = argparse.ArgumentParser()
    parser.add_argument('action', metavar='action', type=str, choices=['insert', 'update', 'createTree'], help='action operations `insert` or `update` or `createTree`')
    args = parser.parse_args()

    log.add ('start main', file=file_log)
    conn = psycopg2.connect(host=conf.addrfull_host, database=conf.addrfull_database, user=conf.addrfull_user, password=conf.addrfull_password)
    conn.set_client_encoding('UTF8')



    if args.action == 'insert':
      insertPOI(conn, loglevel = 1)
    elif args.action == 'update':
      updatePOI(conn,  loglevel = 1)
    elif args.action == 'createTree':
      createTree(loglevel = 1)
      findImgMarker(loglevel = 1)




    log.add ('end main', file=file_log)
  except :
    log.add ('! error: '+str(traceback.format_exc()), file=file_log)
Esempio n. 37
0
def main():
  parser = argparse.ArgumentParser(add_help=True, version='0.1')
  parser.add_argument("--noLoad",action="store_true", help="Не загружать файлы и не грузить osmosis-ом",default=False)
  parser.add_argument("--onlyAddr",action="store_true", help="Обработка только адресной информации",default=False)
  parser.add_argument("--onlyPOI",action="store_true", help="Обработка только POI",default=False)
  parser.add_argument("--isAuto",action="store_true", help="Проверка предыдущего завершения, отказ если были ошибки",default=False)
  parser.add_argument('action', metavar='action', type=str, choices=['insert', 'update', 'load-insert', 'load-update', 'install', 'test'], help='action operations `insert` or `update` or `load-insert` or `load-update` or `install`')
  args = parser.parse_args()

  try:
    log.add ('start main', file=file_log)
    
    if args.isAuto:
      controlAuto(loglevel=1)
    
    if args.action == 'insert':
      insert(loglevel = 1, noLoad=args.noLoad, onlyAddr=args.onlyAddr, onlyPOI=args.onlyPOI)
    elif args.action == 'update':
      update(loglevel = 1, noLoad=args.noLoad, onlyAddr=args.onlyAddr)
    elif args.action == 'load-insert':
      load(update = False, loglevel = 1)
    elif args.action == 'load-update':
      load(update = True, loglevel = 1)
    elif args.action == 'install':
      install(loglevel = 1)
    elif args.action == 'test':
      print 'test'
      testStep(loglevel = 1)
    
    controlAuto(isEnd=True, loglevel=1)
    
    log.add ('end main', file=file_log, finish=True)
  except :
    controlAuto(isEnd=True, isError=True, loglevel=1)
    log.add ('! error: '+str(traceback.format_exc()), file=file_log)
Esempio n. 38
0
def createTree(loglevel=0):
  log.add ('start createTree', level=loglevel, file=file_log)
  def fnChildren(catalog_json, dictionary_json, parent=''):
    data = []
    for icat in catalog_json:
      if len(icat['parent']) == 0:
        icat['parent'] = ['']
      for ipar in icat['parent']:
        if ipar == parent:
          rec={}
          rec['data'] = dictionary_json['catalog'][icat['name']]['name']
          rec['attr'] = {'nclass':icat['name']}
          children = fnChildren(
                          catalog_json=catalog_json,
                          dictionary_json=dictionary_json,
                          parent=icat['name'])
          if (children):
            rec['children'] = children
          data.append(rec)
    data.sort(key=lambda x: x['data'])
    return data
  
  with codecs.open(conf.file_catalog_json, 'rt', encoding='utf-8') as f:
    catalog_json = json.load(f)
  for fd in os.listdir(conf.path_dictionary_json):
    with codecs.open(conf.path_dictionary_json+fd, 'rt', encoding='utf-8') as f:
      fjson = json.load(f)
      if (fjson['language'] == lang[0]):
        dictionary_json = fjson
        break

  datajson = fnChildren(catalog_json=catalog_json,
                        dictionary_json=dictionary_json)

  datajson = [{'data':u'выбрать все', 'attr':{'class':'root', 'nclass':'root'}, 'state':'open', 'children':datajson}]
  
  write_file = open(conf.file_tree_json, "w")
  write_file.write(json.dumps(datajson))
  log.add ('end createTree', level=loglevel, file=file_log)
Esempio n. 39
0
def returnbook(bookID, end=None):
    """Description """
    c = connection()
    cur = c.cursor()
    cur.execute(
        'SELECT * FROM borrowings WHERE id_book=? AND start_date IS NOT NULL AND end_date IS NULL;',
        (bookID, ))
    if (cur.fetchone() is None):
        logger.add(
            "Book with id=%d is not borrowed, it's on the shelf." % (bookID, ),
            1)
        return

    if (end):
        cur.execute(
            'UPDATE borrowings SET end_date=? WHERE id_book=? AND end_date IS NULL;',
            (end, bookID))
    else:
        cur.execute(
            'UPDATE borrowings SET end_date=date("now") WHERE id_book=? AND end_date IS NULL;',
            (bookID, ))
    c.commit()
    logger.add("The book has been returned.", 3)
    c.close()
Esempio n. 40
0
def loadManifest(fileName):
    """Manifests contain metadata for other addons.  A manifest may
    augment or replace the metadata of an existing addon, or define an
    entirely new addon.

    @param fileName  Path of the manifest file.

    @return  Identifier of the addon the manifest is associated with.
    """
    identifier = paths.getBase(fileName)

    if exists(identifier):
        a = get(identifier)
    else:
        # Create a new addon.
        a = ao.Addon(identifier, fileName)
        addons[identifier] = a

    # The manifest contains metadata configuration.
    try:
        a.parseConfiguration(file(fileName).read())

    except Exception, x:
        logger.add(logger.HIGH, 'error-read-manifest', fileName, str(x))
Esempio n. 41
0
def loadManifest(fileName):
    """Manifests contain metadata for other addons.  A manifest may
    augment or replace the metadata of an existing addon, or define an
    entirely new addon.

    @param fileName  Path of the manifest file.

    @return  Identifier of the addon the manifest is associated with.
    """
    identifier = paths.getBase(fileName)

    if exists(identifier):
        a = get(identifier)
    else:
        # Create a new addon.
        a = ao.Addon(identifier, fileName)
        addons[identifier] = a

    # The manifest contains metadata configuration.
    try:
        a.parseConfiguration(file(fileName).read())

    except Exception, x:
        logger.add(logger.HIGH, "error-read-manifest", fileName, str(x))
Esempio n. 42
0
def test_step(log_level=0):
    log_file = 'test.log'
    conn = psycopg2.connect(host=conf.addrfull_host,
                            database=conf.addrfull_database,
                            user=conf.addrfull_user,
                            password=conf.addrfull_password)
    cur = conn.cursor(cursor_factory=psycopg2.extras.RealDictCursor)
    cur.execute("""SELECT min(id) as min, max(id) as max FROM %(table)s;""",
                {'table': conf.addr_table})
    limit = cur.fetchone()

    limit['max'] = limit['min'] + 300000
    step = 300000

    log.add('start round, step=' + str(step) + '  range=' +
            str(limit['max'] - limit['min']),
            level=log_level,
            file=log_file)  # # # # #
    for n in range(limit['min'], limit['max'], step):
        if limit['min'] >= limit['max']:
            break

        log.add('step = ' + str(n) + ' / ' + str(
            int((float(n) - limit['min']) /
                (limit['max'] - limit['min']) * 100)) + '%',
                level=log_level + 1,
                file=log_file)

        cur = conn.cursor()
        cur.execute(
            """
      UPDATE %(table)s AS search1
        SET
          country=search2.country, country_id=search2.country_id,
          region=search2.region, region_id=search2.region_id,
          district=search2.district, district_id=search2.district_id,
          city=search2.city, city_id=search2.id
        FROM %(table)s AS search2
        WHERE ((search2.geom && search1.geom) AND ST_Covers(search2.geom, search1.geom))
          AND search2.city is not null
          --AND search1.city is null
          --AND (search1.street is not null or search1.housenumber is not null)
          AND search2.addr_type_id = 20 --city
          AND (ST_IsValid(search2.geom))
          AND (ST_IsValid(search1.geom))
          AND (search1.id>=%(min)s AND search1.id<%(max)s)
          AND (search1.member_role = 'outer' OR search1.member_role is null);
    """, {
                'table': conf.addr_table,
                'min': n,
                'max': n + step
            })

    conn.rollback()
    log.add('end round', level=log_level, file=log_file)  # # # # #
Esempio n. 43
0
def update(loglevel=0, noLoad=False, onlyAddr=False):

  if not noLoad:
    load(update = True, loglevel = loglevel+1)

  conn = psycopg2.connect(host=conf.addrfull_host, database=conf.addrfull_database, user=conf.addrfull_user, password=conf.addrfull_password)

  log.add ('update addr', level=loglevel, file=file_log)
  ershkus_addr.updateAddr(conn, loglevel=loglevel+1)

  if not onlyAddr:
    log.add ('update poi', level=loglevel, file=file_log)
    ershkus_poi.updatePOI(conn, loglevel=loglevel+1)

  if conf.sphinx_reindex:
    log.add ('update sphinx index', level=loglevel, file=file_log)
    cmdrun(cmd=conf.cmdindexrotate, errorText='! error update sphinx index', loglevel=loglevel)
    log.add ('update sphinx index complite', level=loglevel, file=file_log)
Esempio n. 44
0
def insert(loglevel=0, noLoad=False, onlyAddr=False, onlyPOI=False):
  if not noLoad:
    load(update = False, loglevel = loglevel+1)

  conn = psycopg2.connect(host=conf.addrfull_host, database=conf.addrfull_database, user=conf.addrfull_user, password=conf.addrfull_password)
  conn.set_client_encoding('UTF8')

  if not onlyPOI:
    log.add ('insert addr', level=loglevel, file=file_log)
    ershkus_addr.insertAddr(conn, loglevel=loglevel+1)

  if not onlyAddr:
    log.add ('insert poi', level=loglevel, file=file_log)
    ershkus_poi.insertPOI(conn, loglevel=loglevel+1)

  if conf.sphinx_reindex:
    log.add ('update sphinx index', level=loglevel, file=file_log)
    cmdrun(cmd=conf.cmdindexrotate, errorText='! error update sphinx index', loglevel=loglevel)
    log.add ('update sphinx index complite', level=loglevel, file=file_log)
Esempio n. 45
0
def update(log_level=0, load_files=True, only_addr=False):
    if load_files:
        load(to_update=True, log_level=log_level + 1)

    conn = psycopg2.connect(host=conf.addrfull_host,
                            database=conf.addrfull_database,
                            user=conf.addrfull_user,
                            password=conf.addrfull_password)

    log.add('update addr', level=log_level, file=file_log)
    stapio_addr.update_addr(conn, log_level=log_level + 1)

    if not only_addr:
        log.add('update poi', level=log_level, file=file_log)
        stapio_poi.update_poi(conn, log_level=log_level + 1)

    if conf.sphinx_reindex:
        log.add('update sphinx index', level=log_level, file=file_log)
        cmd_run(cmd=conf.cmdindexrotate,
                error_text='! error update sphinx index',
                log_level=log_level)
        log.add('update sphinx index complete', level=log_level, file=file_log)
Esempio n. 46
0
def add_tag(tag):
    """Description """
    c = connection()
    cur = c.cursor()
    try:
        cur.execute('INSERT  INTO tag(name) VALUES(?);', (tag, ))
    except sqlite3.Error as er:
        logger.add('SQLite error: %s' % (' '.join(er.args)), 1)

    if (cur.rowcount == 1):
        logger.add("Tag '%s' added to database %s" % (tag, DB_NAME), 3)
    else:
        logger.add(
            "Tag '%s' already exists in the database %s" % (tag, DB_NAME), 2)
    c.commit()
    c.close()
    return cur.lastrowid
Esempio n. 47
0
def add_author(fname, lname):
    """Description """
    c = connection()
    cur = c.cursor()
    try:
        cur.execute('INSERT  INTO author(first_name,last_name) VALUES(?,?);',
                    (fname, lname))
    except sqlite3.Error as er:
        logger.add('SQLite error: %s' % (' '.join(er.args)), 1)

    if (cur.rowcount == 1):
        logger.add(
            "Author '%s %s' added to database %s" % (fname, lname, DB_NAME), 3)
    else:
        logger.add(
            "Author '%s %s' already exists in the database %s" %
            (fname, lname, DB_NAME), 3)
    c.commit()
    c.close()
    return cur.lastrowid
Esempio n. 48
0
def testStep(loglevel=0):
  fileLog = 'test.log'
  conn = psycopg2.connect(host=conf.addrfull_host, database=conf.addrfull_database, user=conf.addrfull_user, password=conf.addrfull_password)
  conn.set_client_encoding('UTF8')
  cur = conn.cursor(cursor_factory=psycopg2.extras.RealDictCursor)
  cur.execute("""SELECT min(id) as min, max(id) as max FROM ershkus_search_addr;""")
  limit = cur.fetchone()
  
  
  limit['max'] = limit['min']+300000
  step = 300000

  log.add ('start round, step='+str(step)+'  range='+str(limit['max']-limit['min']), level=loglevel, file=fileLog) # # # # #
  for n in range(limit['min'], limit['max'], step):
    if limit['min'] >= limit['max']:
      break
      
    log.add ('step = '+str(n)+' / '+str(int((float(n)-limit['min'])/(limit['max']-limit['min'])*100))+'%', level=loglevel+1, file=fileLog)
    
    cur = conn.cursor()
    cur.execute("""
      UPDATE ershkus_search_addr AS search1
        SET
          country=search2.country, country_id=search2.country_id,
          region=search2.region, region_id=search2.region_id,
          district=search2.district, district_id=search2.district_id,
          city=search2.city, city_id=search2.id
        FROM ershkus_search_addr AS search2
        WHERE ((search2.geom && search1.geom) AND ST_Covers(search2.geom, search1.geom))
          AND search2.city is not null
          --AND search1.city is null
          --AND (search1.street is not null or search1.housenumber is not null)
          AND search2.addr_type_id = 20 --city
          AND (ST_IsValid(search2.geom))
          AND (ST_IsValid(search1.geom))
          AND (search1.id>=%(min)s AND search1.id<%(max)s)
          AND (search1.member_role = 'outer' OR search1.member_role is null);
    """, {'min': n, 'max': n+step})
      
  conn.rollback()
  log.add ('end round', level=loglevel, file=fileLog) # # # # #
Esempio n. 49
0
def add_person(fname, lname, email=None, phone=None):
    """Description """
    c = connection()
    cur = c.cursor()
    try:
        cur.execute(
            'INSERT  INTO people(first_name,last_name,email,phone) VALUES(?,?,?,?);',
            (fname, lname, email, phone))
    except sqlite3.Error as er:
        logger.add('SQLite error: %s' % (' '.join(er.args)), 1)

    if (cur.rowcount == 1):
        logger.add(
            "Person '%s %s (Email:%s, Phone:%s)' added to database %s" %
            (fname, lname, email, phone, DB_NAME), 3)
    else:
        logger.add(
            "Person '%s %s (Email:%s, Phone:%s)' already exists in the database %s"
            % (fname, lname, email, phone, DB_NAME), 2)
    c.commit()
    c.close()
    return cur.lastrowid
def get_ad_user_list_by_login():
	try:
		if config.DEBUG:
			log.add("user_ad_postgres_db.py connect to: dbname='" + config.user_list_db_name + "' user='******' host='" + config.user_list_db_host + "' password='******'")
		conn = psycopg2.connect("dbname='" + config.user_list_db_name + "' user='******' host='" + config.user_list_db_host + "' password='******'")
		cur = conn.cursor()
	except psycopg2.Error as e:
		log.add("user_ad_postgres_db.py I am unable to connect to the database: %s" % e.pgerror);return False
	try:
		sql="""select
			fio,
			name,
			familiya,
			otchestvo,
			login,
			old_login,
			passwd,
			drsk_email,
			drsk_email_passwd,
			rsprim_email,
			rsprim_email_passwd,
			hostname,
			ip,
			os,
			os_version,
			patches,
			doljnost,
			add_time,
			add_ip,
			add_user_name,
			old_familiya
		from ad_users
		"""
		if config.DEBUG:
			log.add("user_ad_postgres_db.py sql=%s" % sql)
		cur.execute(sql)
		data = cur.fetchall()
	except psycopg2.Error as e:
		log.add("user_ad_postgres_db.py I am unable select data from db: %s" % e.pgerror);return False
	user_list={}
	for line in data:
		user={}
		user["fio"]=line[0]
		user["name"]=line[1]
		user["familiya"]=line[2]
		user["otchestvo"]=line[3]
		user["login"]=line[4]
		user["old_login"]=line[5]
		user["passwd"]=line[6]
		user["drsk_email"]=line[7]
		user["drsk_email_passwd"]=line[8]
		user["rsprim_email"]=line[9]
		user["rsprim_email_passwd"]=line[10]
		user["hostname"]=line[11]
		user["ip"]=line[12]
		user["os"]=line[13]
		user["os_version"]=line[14]
		user["patches"]=line[15]
		user["doljnost"]=line[16]
		user["add_time"]=line[17]
		user["add_ip"]=line[18]
		user["add_user_name"]=line[19]
		user["old_familiya"]=line[20]
		user_list[user["login"]]=user
	return user_list
def add_ad_user(name, familiya, otchestvo, login, old_login, passwd, drsk_email, drsk_email_passwd, rsprim_email, rsprim_email_passwd, hostname, ip, os, os_version, patches, doljnost, add_ip, add_user_name):
	try:
		if config.DEBUG:
			log.add("user_ad_postgres_db.py connect to: dbname='" + config.user_list_db_name + "' user='******' host='" + config.user_list_db_host + "' password='******'")
		conn = psycopg2.connect("dbname='" + config.user_list_db_name + "' user='******' host='" + config.user_list_db_host + "' password='******'")
		cur = conn.cursor()
	except psycopg2.Error as e:
		log.add("user_ad_postgres_db.py I am unable to connect to the database: %s" % e.pgerror);return STATUS_INTERNAL_ERROR
		
	fio=familiya + " " + name + " " + otchestvo

	# Проверяем, есть ли уже такой:
	result=[]
	try:
		sql="""select rsprim_email from ad_users where rsprim_email='%(rsprim_email)s'""" \
			 % {\
				 "rsprim_email":rsprim_email \
			 }
		if config.DEBUG:
			log.add("user_ad_postgres_db.py user_ad_postgres_db.py add_ad_user() exec sql: %s" % sql)
		cur.execute(sql)
		result=cur.fetchall()
	except psycopg2.Error as e:
		log.add("user_ad_postgres_db.py ERROR postgres select: %s" %  e.pgerror)
		return STATUS_INTERNAL_ERROR
	if len(result) > 0:
		# Уже есть такой аккаунт:
		return STATUS_USER_EXIST

	try:
		sql="""insert into ad_users (
			fio,
			name,
			familiya,
			otchestvo,
			login,
			old_login,
			passwd,
			drsk_email,
			drsk_email_passwd,
			rsprim_email,
			rsprim_email_passwd,
			hostname,
			ip,
			os,
			os_version,
			patches,
			doljnost,
			add_time,
			add_ip,
			add_user_name
		) 
		VALUES (
			'%(fio)s',
			'%(name)s',
			'%(familiya)s',
			'%(otchestvo)s',
			'%(login)s',
			'%(old_login)s',
			'%(passwd)s',
			'%(drsk_email)s',
			'%(drsk_email_passwd)s',
			'%(rsprim_email)s',
			'%(rsprim_email_passwd)s',
			'%(hostname)s',
			'%(ip)s',
			'%(os)s',
			'%(os_version)s',
			'%(patches)s',
			'%(doljnost)s',
			now(),
			'%(add_ip)s',
			'%(add_user_name)s'
		)""" % \
		{\
			"fio":fio,\
			"name":name,\
			"familiya":familiya,\
			"otchestvo":otchestvo,\
			"login":login,\
			"old_login":old_login,\
			"passwd":passwd,\
			"drsk_email":drsk_email,\
			"drsk_email_passwd":drsk_email_passwd,\
			"rsprim_email":rsprim_email,\
			"rsprim_email_passwd":rsprim_email_passwd,\
			"hostname":hostname,\
			"ip":ip,\
			"os":os,\
			"os_version":os_version,\
			"patches":patches,\
			"doljnost":doljnost,\
			"add_ip":add_ip,\
			"add_user_name":add_user_name\
		}
		if config.DEBUG:
			log.add("user_ad_postgres_db.py sql=%s" % sql)
		cur.execute(sql)
		conn.commit()
	except psycopg2.Error as e:
		log.add("user_ad_postgres_db.py I am unable insert data to db: %s" % e.pgerror);return STATUS_INTERNAL_ERROR
	return STATUS_SUCCESS
Esempio n. 52
0
def validate_official(
    args,
    data_loader,
    model,
    global_stats,
    logger,
    mode='dev',
):
    """Run one full validation.
    """
    eval_time = Timer()
    # Run through examples

    global_pred_loc, global_target_loc, is_buggy, global_target_probs, \
    global_correct_fix = None, None, None, None, None
    with torch.no_grad():
        if args.use_tqdm:
            pbar = tqdm(data_loader)
        else:
            pbar = data_loader
        for idx, ex in enumerate(pbar):
            batch_size = ex['batch_size']
            logits_loc, logits_fix = model.predict(ex)
            pred_loc = np.argmax(logits_loc.cpu().numpy(), axis=1) - 1
            pred_fix = np.argmax(logits_fix.cpu().numpy(), axis=1)
            scope_mask = ex["scope_t"]  # batch x seq_len
            logits_fix = logits_fix.masked_fill(~scope_mask, -1e18)
            pointer_probs = F.softmax(logits_fix, dim=1)  # batch x seq_len
            target_mask = ex["fixes_t"]  # batch x seq_len
            target_probs = (target_mask * pointer_probs).sum(dim=-1)  # batch
            target_fix = ex["target_fix"].cpu().numpy()
            correct_fix = target_fix[np.arange(target_fix.shape[0]), pred_fix]
            if global_pred_loc is None:
                global_pred_loc = pred_loc
                global_target_loc = ex["target_pos"].cpu().numpy()
                global_correct_fix = correct_fix
                is_buggy = ex["mask_incorrect"].cpu().numpy()
                global_target_probs = target_probs.cpu().numpy()
            else:
                global_pred_loc = np.hstack((global_pred_loc, pred_loc))
                global_target_loc = np.hstack((global_target_loc,\
                                               ex["target_pos"].cpu().numpy()))
                global_correct_fix = np.hstack(
                    (global_correct_fix, correct_fix))
                is_buggy = np.hstack(
                    (is_buggy, ex["mask_incorrect"].cpu().numpy()))
                global_target_probs = np.hstack((global_target_probs, \
                                                target_probs.cpu().numpy()))
    # Store two metrics: the accuracy at predicting specifically the non-buggy samples correctly (to measure false alarm rate), and the accuracy at detecting the real bugs.
    loc_correct = (global_pred_loc == global_target_loc)
    no_bug_pred_acc = (
        (1 - is_buggy) * loc_correct).sum() / (1e-9 +
                                               (1 - is_buggy).sum()) * 100
    bug_loc_acc = (is_buggy * loc_correct).sum() / (1e-9 +
                                                    (is_buggy).sum()) * 100

    # Version by Hellendoorn et al:
    # To simplify the comparison, accuracy is computed as achieving >= 50% probability for the top guess
    # (as opposed to the slightly more accurate, but hard to compute quickly, greatest probability among distinct variable names).
    fix_correct = (global_target_probs >= 0.5)
    target_fix_acc = (is_buggy * fix_correct).sum() / (1e-9 +
                                                       (is_buggy).sum()) * 100

    joint_acc_bug = (is_buggy * loc_correct *
                     fix_correct).sum() / (1e-9 + (is_buggy).sum()) * 100
    result = dict()
    result['no_bug_pred_acc'] = no_bug_pred_acc
    result['bug_loc_acc'] = bug_loc_acc
    result['bug_fix_acc'] = target_fix_acc
    result['joint_acc_bug'] = joint_acc_bug
    result["ev_time"] = eval_time.time()
    logger.add(global_stats['epoch'], **result)

    logger.print("%s valid official: " % mode +
                 "no_bug_pred_acc = %.2f | bug_loc_acc = %.2f " %
                 (no_bug_pred_acc, bug_loc_acc) +
                 "target_fix_acc = %.2f | joint_acc_bug = %.2f " %
                 (target_fix_acc, joint_acc_bug) +
                 'test time = %.2f (s)' % eval_time.time())

    gc.collect()

    return result
Esempio n. 53
0
def load(update, today=False, loglevel=0):
  log.add (('load start (update=%s)' % update), level=loglevel, file=file_log)
  file={'temp':conf.tempdir, 'authFileOsmosis':conf.authFileOsmosis}
  if not os.path.exists('data'):
    os.mkdir('data')
  i=0
  if update:
    file['name_d'] = conf.workdir + 'data/load%s.osc.gz'
    file['name_e'] = conf.workdir + 'data/load%se.osc'
    file['url_list'] = conf.urlmaskosc
    file['osmosis_read'] = 'read-xml-change'
    file['osmosis_merge'] = 'merge-change --sort-change'
    file['osmosis_write'] = 'write-xml-change'
    file['osmosis_writedb'] = 'write-pgsql-change'

    # загрузим предыдущую позицию
    f = open(conf.workactual + 'upd_date.dat','r')
    file['date_s'] = datetime.datetime.strptime(f.readline(), conf.format_datetime)
    f.close()
    
    file['date_e'] = file['date_s'] + datetime.timedelta(days=1)
    file['daystart'] = file['date_s'].strftime("%y%m%d")
    file['dayend'] = file['date_e'].strftime("%y%m%d")
  else:
    file['name_d'] = conf.workdir + 'data/load%s.pbf'
    file['name_e'] = conf.workdir + 'data/load%se.pbf'
    file['url_list'] = conf.urlpbf
    file['osmosis_read'] = 'read-pbf'
    file['osmosis_merge'] = 'merge --sort'
    file['osmosis_write'] = 'write-pbf'
    file['osmosis_writedb'] = 'write-pgsql'

  info = {'load':False, 'next_load':True}
  if not update:
    urllib.urlretrieve(conf.urlpbfmeta, conf.workdir + "data/load.pbf.meta")
  while info['next_load']:
    if update:
      log.add ('load date at ' + file['date_s'].strftime(conf.format_datetime), level=loglevel, file=file_log)
    for url_file in file['url_list']:
      i += 1
      file['end'] = (file['name_e'] % i)
      log.add (('load, i=%s' % i), level=loglevel+1, file=file_log)
      file['now'] = (file['name_d'] % i)
      url_file = url_file % file
      try:
        asock = urllib2.urlopen(url_file)
      except urllib2.HTTPError, e:
        if e.code == 404:
          info['next_load'] = False
          file['date_e'] = file['date_s'] - datetime.timedelta(days=1)
          break
        log.add (('! error download (code=%s)' % e.code), level=loglevel+1, file=file_log)
        raise e
      print url_file
      urllib.urlretrieve(url_file, file['now'])
      if update:
        log.add ('decompress', level=loglevel+1, file=file_log)
        cmdrun(cmd=('gzip -df '+file['now']), errorText=('! error decompress, i=%s' % i), loglevel=loglevel+1)
        file['now'] = file['now'][:-3]
      if i == 1:
        file['in'] = file['now']
        continue
      log.add (('merge, i=%s' % i), level=loglevel+1, file=file_log)
      file['n'] = file['now']
      cmd  = 'osmosis -quiet --%(osmosis_read)s file=%(in)s '
      cmd += '--%(osmosis_read)s file=%(n)s  --%(osmosis_merge)s '
      cmd += '--%(osmosis_write)s file=%(end)s'
      cmd = cmd % file
      #print cmd  #  #  #  #  #  #  #  #  #  #  #  #  #  #
      cmdrun(cmd, errorText=('! error merge, i=%s' % i), loglevel=loglevel+1)

      file['in'] = file['end']
      
    if info['next_load']:
      info['load'] = True
    if update:
      file['date_s'] = file['date_e']
      file['date_e'] = file['date_s'] + datetime.timedelta(days=1)
      file['daystart'] = file['date_s'].strftime("%y%m%d")
      file['dayend'] = file['date_e'].strftime("%y%m%d")
    else:
      info['next_load'] = False
Esempio n. 54
0
      file['date_s'] = file['date_e']
      file['date_e'] = file['date_s'] + datetime.timedelta(days=1)
      file['daystart'] = file['date_s'].strftime("%y%m%d")
      file['dayend'] = file['date_e'].strftime("%y%m%d")
    else:
      info['next_load'] = False
      
  if not info['load']:
    raise Exception('no load from pbf/osc')
  
  conn = psycopg2.connect(host=conf.addrfull_host, database=conf.addrfull_database, user=conf.addrfull_user, password=conf.addrfull_password)
  if not update:
    pbfmeta = ConfigParser.RawConfigParser()
    pbfmeta.read(conf.workdir + 'data/load.pbf.meta')
    file['date_e'] = datetime.datetime.strptime(pbfmeta.get('DEFAULT', 'version'), '%Y-%m-%d %H:%M:%S')
    log.add ('pbf at ' + file['date_e'].strftime(conf.format_datetime), level=loglevel, file=file_log)
    log.add ('clear db', level=loglevel, file=file_log)
    cur = conn.cursor()
    cur.execute("""
      TRUNCATE TABLE nodes;
      TRUNCATE TABLE relation_members;
      TRUNCATE TABLE relations;
      TRUNCATE TABLE users;
      TRUNCATE TABLE way_nodes;
      TRUNCATE TABLE ways;
      TRUNCATE TABLE deleted_entries;
    """)
    conn.commit()

  log.add ('load in db', level=loglevel, file=file_log)
  cmd = 'osmosis -quiet --%(osmosis_read)s file=%(in)s '
Esempio n. 55
0
def load(update, today=False, loglevel=0):
  log.add (('load start (update=%s)' % update), level=loglevel, file=file_log)
  file={'temp':conf.tempdir, 'authFileOsmosis':conf.authFileOsmosis}
  if not os.path.exists('data'):
    os.mkdir('data')
  i=0
  if update:
    file['name_d'] = conf.workdir + 'data/load%s.osc.gz'
    file['name_e'] = conf.workdir + 'data/load%se.osc'
    file['url_list'] = conf.urlmaskosc
    file['osmosis_read'] = 'read-xml-change'
    file['osmosis_merge'] = 'merge-change --sort-change'
    file['osmosis_write'] = 'write-xml-change'
    file['osmosis_writedb'] = 'write-pgsql-change'

    # загрузим предыдущую позицию
    f = open(conf.workactual + 'upd_date.dat','r')
    file['date_s'] = datetime.datetime.strptime(f.readline(), conf.format_datetime)
    f.close()
    
    file['date_e'] = file['date_s'] + datetime.timedelta(days=1)
    file['daystart'] = file['date_s'].strftime("%y%m%d")
    file['dayend'] = file['date_e'].strftime("%y%m%d")
  else:
    file['name_d'] = conf.workdir + 'data/load%s.pbf'
    file['name_e'] = conf.workdir + 'data/load%se.pbf'
    file['url_list'] = conf.urlpbf
    file['osmosis_read'] = 'read-pbf'
    file['osmosis_merge'] = 'merge --sort'
    file['osmosis_write'] = 'write-pbf'
    file['osmosis_writedb'] = 'write-pgsql'

  info = {'load':False, 'next_load':True}
  # отключил meta
	# if not update:
    # urllib.urlretrieve(conf.urlpbfmeta, conf.workdir + "data/load.pbf.meta")
  while info['next_load']:
    if update:
      log.add ('load date at ' + file['date_s'].strftime(conf.format_datetime), level=loglevel, file=file_log)
    for url_file in file['url_list']:
      i += 1
      file['end'] = (file['name_e'] % i)
      log.add (('load, i=%s' % i), level=loglevel+1, file=file_log)
      file['now'] = (file['name_d'] % i)
      url_file = url_file % file
      # try:
        # asock = urllib2.urlopen(url_file)
      # except urllib2.HTTPError, e:
        # if e.code == 404:
          # info['next_load'] = False
          # file['date_e'] = file['date_s'] - datetime.timedelta(days=1)
          # break
        # log.add (('! error download (code=%s)' % e.code), level=loglevel+1, file=file_log)
        # raise e
      # print url_file
      urllib.request.urlretrieve(url_file, file['now'])
      if update:
        log.add ('decompress', level=loglevel+1, file=file_log)
        cmdrun(cmd=('gzip -df '+file['now']), errorText=('! error decompress, i=%s' % i), loglevel=loglevel+1)
        file['now'] = file['now'][:-3]
      if i == 1:
        file['in'] = file['now']
        continue
      log.add (('merge, i=%s' % i), level=loglevel+1, file=file_log)
      file['n'] = file['now']
      cmd  = 'osmosis -quiet --%(osmosis_read)s file=%(in)s '
      cmd += '--%(osmosis_read)s file=%(n)s  --%(osmosis_merge)s '
      cmd += '--%(osmosis_write)s file=%(end)s'
      cmd = cmd % file
      #print cmd  #  #  #  #  #  #  #  #  #  #  #  #  #  #
      cmdrun(cmd, errorText=('! error merge, i=%s' % i), loglevel=loglevel+1)

      file['in'] = file['end']
      
    if info['next_load']:
      info['load'] = True
    if update:
      file['date_s'] = file['date_e']
      file['date_e'] = file['date_s'] + datetime.timedelta(days=1)
      file['daystart'] = file['date_s'].strftime("%y%m%d")
      file['dayend'] = file['date_e'].strftime("%y%m%d")
    else:
      info['next_load'] = False
      
  if not info['load']:
    raise Exception('no load from pbf/osc')
  
  conn = psycopg2.connect(host=conf.addrfull_host, database=conf.addrfull_database, user=conf.addrfull_user, password=conf.addrfull_password)
  conn.set_client_encoding('UTF8')
  if not update:
		# отключил meta
    # pbfmeta = ConfigParser.RawConfigParser()
    # pbfmeta.read(conf.workdir + 'data/load.pbf.meta')
    # file['date_e'] = datetime.datetime.strptime(pbfmeta.get('DEFAULT', 'version'), '%Y-%m-%d %H:%M:%S')
    # log.add ('pbf at ' + file['date_e'].strftime(conf.format_datetime), level=loglevel, file=file_log)
    log.add ('clear db', level=loglevel, file=file_log)
    cur = conn.cursor()
    cur.execute("""
      TRUNCATE TABLE nodes;
      TRUNCATE TABLE relation_members;
      TRUNCATE TABLE relations;
      TRUNCATE TABLE users;
      TRUNCATE TABLE way_nodes;
      TRUNCATE TABLE ways;
      TRUNCATE TABLE deleted_entries;
    """)
    conn.commit()

  log.add ('load in db', level=loglevel, file=file_log)
  
  #cmd = 'export JAVACMD_OPTIONS="-Djava.io.tmpdir=' + conf.workdir + 'tmp" && '
  #cmd = 'export JAVA_OPTIONS="-Xmx8G -server" && '
  cmd = ''
  
  cmd += 'osmosis -quiet --%(osmosis_read)s file=%(in)s '
  # cmd += '--buffer --%(osmosis_writedb)s authFile=%(authFileOsmosis)s nodeLocationStoreType=TempFile validateSchemaVersion=no'
  cmd += '--%(osmosis_writedb)s authFile=%(authFileOsmosis)s nodeLocationStoreType=TempFile validateSchemaVersion=no'
  cmd = cmd % file
  # log.add ('cmd: ' + cmd, level=loglevel, file=file_log)
  cmdrun(cmd, errorText='! error load in db', loglevel=loglevel)

  # сохраним текущую позицию
  # отключил meta
  # log.add ('save date', level=loglevel, file=file_log)
  # f = open(conf.workactual + 'upd_date.dat','w')
  # f.write(file['date_e'].strftime(conf.format_datetime))
  # f.close()

  log.add ('load complite', level=loglevel, file=file_log)