Ejemplo n.º 1
0
def parseSolConfig():
    """Parses the solenopsis config file"""
    if not hasConfigFile():
        logger.critical('Unable to open solenopsis config file "%s"' % (solenopsis_path,))
        sys.exit(-1)

    try:
        config = ConfigParser.ConfigParser()
        config.readfp(FakeSecHead(open(os.path.expanduser(getDefaultConfig()))))

        setHome(config.get('section', 'solenopsis.env.HOME'))
        setMaster(config.get('section', 'solenopsis.env.MASTER'))

        if getDependent() == None:
            setDependent(config.get('section', 'solenopsis.env.DEPENDENT'))

        raw_config = {}

        for (name, value) in config.items('section'):
            raw_config[name.lower()] = value

        fname = os.path.expanduser('~/.solenopsis/environments/' + getDependent() + '.properties');
        if os.path.isfile(fname):
            config.readfp(FakeSecHead(open(fname)))
            for (name, value) in config.items('section'):
                raw_config[name.lower()] = value

        setRawConfig(raw_config)
    except:
        logger.critical('Unable to parse config file')
        sys.exit(-1)
Ejemplo n.º 2
0
 def load_protocol(self):
     """ hook a protocol module into the framework """
     
     modu = var.c.get('uplink', 'protocol')
     with warnings.catch_warnings():
         warnings.simplefilter('ignore')
         self.mod = load_source(modu, modu)
     self.protocol = self.mod.Protocol()
     if not hasattr(self.protocol, 'negotiate'):
         logger.critical('protocol: invalid protocol module: missing negotiation block')
         exit(1)
     if not hasattr(self.protocol, 'parse'):
         logger.critical('protocol: invalid protocol module: missing parse block')
         exit(1)
     if not hasattr(self.protocol, 'introduce'):
         logger.critical('protocol: invalid protocol module: missing introduce block')
         exit(1)
     if not hasattr(self.protocol, 'protocol_init'):
         logger.critical('protocol: invalid protocol module: missing protocol_init block')
         exit(1)
     if not hasattr(self.protocol, 'protocol_close'):
         logger.critical('protocol: invalid protocol module: missing protocol_close block')
         exit(1)
     self.protocol.protocol_init()
     logger.info('protocol: loaded %s' % (self.mod.__name__))
     self.proto_loaded = True
Ejemplo n.º 3
0
def main():
    logger.info(f"Starting '{{ cookiecutter.project_name }}' {settings.version}")
    logger.debug("this is a debugging message")
    logger.info("this is an informational message")
    logger.warning("this is a warning message")
    logger.error("this is an error message")
    logger.critical("this is a critical message")
Ejemplo n.º 4
0
 def __load_db_categories(self):
     result = self.__database.query("SELECT id, title FROM categories")
     if not result[0]:
         critical(self.__database.error())
         exit(1)
     for cat_id, title in result[1]:
         self.__db_categories[cat_id] = title
def _run_manifest(jobs: [Job], job_config: JobConfiguration):
    """
    End-to-end run mode. 
    - foreach entry in the manifest, submit a job using the pre-defined template
    """
    try:
        abs_path = os.path.abspath(
            job_config.template_settings.parameter_mapper_file_path)
        logger.debug("loading parameter mapper module: ", abs_path)

        # dynamically load the custom module for mapping job parameters
        spec = importlib.util.spec_from_file_location(
            job_config.template_settings.mapper_module_name, abs_path)
        module = importlib.util.module_from_spec(spec)
        spec.loader.exec_module(module)

        # create an instance of the mapper and execute the map_parameters method
        mapper = module.ParameterMapper()
    except Exception as ex:
        logger.critical("failed to dymanically load the parameter mapper: ",
                        ex)
        raise

    logger.debug("about to submit '{}' jobs to '{}'".format(
        len(jobs), job_config.batch_client.config.base_url))
    for job in jobs:
        try:
            _submit_job(job, job_config, mapper)
        except:
            logger.error("caught exception while processing job: ", job.job_id)
Ejemplo n.º 6
0
def parseSolConfig():
    """Parses the solenopsis config file"""
    if not hasConfigFile():
        logger.critical('Unable to open solenopsis config file "%s"' %
                        (solenopsis_path, ))
        sys.exit(-1)

    try:
        config = ConfigParser.ConfigParser()
        config.readfp(FakeSecHead(open(os.path.expanduser(
            getDefaultConfig()))))

        setHome(config.get('section', 'solenopsis.env.HOME'))
        setMaster(config.get('section', 'solenopsis.env.MASTER'))
        setDependent(config.get('section', 'solenopsis.env.DEPENDENT'))

        raw_config = {}

        for (name, value) in config.items('section'):
            raw_config[name.lower()] = value

        setRawConfig(raw_config)
    except:
        logger.critical('Unable to parse config file')
        sys.exit(-1)
Ejemplo n.º 7
0
def generateFile(fname, params):
    """Returns the generated file body

    fname - The file name to write to
    params - The params past in
            [1] label/name
            [2] object name
    """
    file_path = '%s/%s' % (getTemplateDir(), fname,)
    try:
        f = open(file_path, 'r')
    except:
        logger.critical('Unable to open %s' % (file_path,))
        sys.exit(-1)

    template = Template(f.read())

    f.close()

    body = template.safe_substitute(name=params[1], label=params[1],
                    api_version=getApiVersion())

    if (len(params) == 3):
        template = Template(body)
        body = template.safe_substitute(object=params[2])

    return body
Ejemplo n.º 8
0
def filePush(fileList):
    """Pushes individual files to SFDC

    fileList - An array of file names to push
    """
    if len(fileList) == 0:
        logger.critical('No files listed to push')
        sys.exit(-1)

    file_list = ''

    for fname in fileList:
        file_path = os.path.join(os.path.expanduser(getRootDir()), fname)
        if os.path.exists(file_path):
            file_list = "%s%s%s" % (
                file_list,
                fname,
                os.pathsep,
            )
        else:
            logger.warning('Unable to find file "%s".  Skipping.' %
                           (file_path, ))

    if not file_list == '':
        file_list[:-2]
        addFlag('%s=\'%s\'' % (
            'sf.files2push',
            file_list,
        ))
        print getFlags()
        runAnt('file-push')
    else:
        logger.critical('Unable to find any files to push.')
        sys.exit(-1)
Ejemplo n.º 9
0
 def _parse_num_results(self):
     # try to get the number of results for our search query
     try:
         self.SEARCH_RESULTS['num_results_for_kw'] = \
             self.dom.xpath(self._xp('div#resultStats'))[0].text_content()
     except Exception as e:
         logger.critical(e.msg)
Ejemplo n.º 10
0
def filePush(fileList):
    """Pushes individual files to SFDC

    fileList - An array of file names to push
    """
    if len(fileList) == 0:
        logger.critical('No files listed to push')
        sys.exit(-1)

    file_list = ''

    for fname in fileList:
        file_path = os.path.join(os.path.expanduser(getRootDir()), fname)
        if os.path.exists(file_path):
            file_list = "%s%s%s" %(file_list, fname, os.pathsep,)
        else:
            logger.warning('Unable to find file "%s".  Skipping.' % (file_path,))

    if not file_list == '':
        file_list[:-2]
        addFlag('%s=\'%s\'' % ('sf.files2push', file_list,))
        print getFlags()
        runAnt('file-push')
    else:
        logger.critical('Unable to find any files to push.')
        sys.exit(-1)
Ejemplo n.º 11
0
def newConfig(name, root_path):
    """Creates a new solenopsis config file

    name - The name of the base environment
    root_path - The root path for where the SFDC data lives"""
    solenopsis_path = os.path.expanduser(getDefaultConfig())
    home_path = os.path.expanduser(getHome())
    if os.path.exists(solenopsis_path) and not isForced():
        logger.critical('Solenopsis config file already exists "%s"' % (solenopsis_path,))
        sys.exit(-1)

    try:
        f = open(solenopsis_path, 'w')
        f.write("%s=%s %s\n" % (CONFIG_NAMES["env"], name, 'local',))
        f.write("%s=%s\n" % (CONFIG_NAMES["home"], home_path,))
        f.write("%s=%s\n" % (CONFIG_NAMES["master"], 'local',))
        f.write("%s=%s\n" % (CONFIG_NAMES["dependent"], name,))
        f.write("%s=%s\n" % (CONFIG_NAMES["local-home"], root_path,))
        f.close()

        cred_path = os.path.expanduser(os.path.join(getHome(), 'credentials'))
        env_path = os.path.expanduser(os.path.join(getHome(), 'env'))

        if not os.path.exists(home_path):
            os.mkdir(home_path)

        if not os.path.exists(cred_path):
            os.mkdir(cred_path)

        if not os.path.exists(env_path):
            os.mkdir(env_path)
    except:
        logger.critical('An error occured trying to write to "%s"' % (solenopsis_path,))
        sys.exit(-1)
Ejemplo n.º 12
0
def demo():
    logger.info(f"Starting 'Name of the project' {settings.version}")
    logger.debug("this is a debugging message")
    logger.info("this is an informational message")
    logger.warning("this is a warning message")
    logger.error("this is an error message")
    logger.critical("this is a critical message")
Ejemplo n.º 13
0
def main():
    logger.info(' ')
    logger.info('NEW EXECUTION Printing ' + wlan_interface + ' probe:')
    wlan_probe = pynmcli.get_data(pynmcli.NetworkManager.Device().wifi().execute())
    ssid_in_use, ssid_in_use_priority, available_ssid_priority, available_ssid = check_connection_priority(wlan_probe)
    logger.info(pynmcli.NetworkManager.Device().wifi().execute())

    if ssid_in_use is None and available_ssid is None:
        logger.critical('None of requested ssids is available:', ' '.join(list(ssids)))
        exit(1)

    if ping.is_connected(wlan_interface):
        logger.info('Already connected to:', ssid_in_use, 'that has priority:', ssid_in_use_priority)
        if available_ssid_priority < ssid_in_use_priority:
            logger.info('Found more preferred wlan available:', available_ssid, 'that has priority',
                        available_ssid_priority)
            logger.info('Reconnecting...')
            connect(available_ssid)
        else:
            logger.info('Already connected to the best available wlan.', 'Nothing to do...')
    else:
        connect(available_ssid)
        logger.info('Connected to', available_ssid, 'that has priority', available_ssid_priority)

    subprocess.run(['iwconfig', wlan_interface, 'power', 'off'])
    start_vnc()
Ejemplo n.º 14
0
 def __init__(self):
     self.c = ConfigParser.ConfigParser()
     ck = self.c.read('etc/labere.conf')
     if not ck:
         logger.critical('labere configuration file is non-existant!')
         logger.critical('please rename etc/labere.conf.example to' + \
                        ' etc/labere.conf and configure it.')
         exit(1)
Ejemplo n.º 15
0
    def abbyy(self, text):
        try:
            parsing_result = json.loads(text)['Translation']['Translation']
        except Exception:
            logger.critical(errors.parse_error['error'])

            parsing_result = errors.unspecified_error

        return parsing_result
Ejemplo n.º 16
0
    def yandex(self, text):
        try:
            parsing_result = json.loads(text)['languageCode']
        except Exception:
            logger.critical('Парсинг ответа не выполнен')

            parsing_result = errors.unspecified_error

        return parsing_result
Ejemplo n.º 17
0
    def google(self, text):
        try:
            parsing_result = json.loads(text)['data']['translations'][0]['translatedText']
        except Exception:
            logger.critical(errors.parse_error['error'])

            parsing_result = errors.unspecified_error

        return parsing_result
Ejemplo n.º 18
0
    def google(self, text):
        try:
            parsing_result = json.loads(text)['data']['detections'][0][0]['language']
        except Exception:
            logger.critical('Парсинг ответа не выполнен')

            parsing_result = errors.unspecified_error

        return parsing_result
Ejemplo n.º 19
0
 def __error(self):
     if self.__err.errno == errorcode.ER_ACCESS_DENIED_ERROR:
         critical('Something is wrong with your user name or password')
         exit(1)
     elif self.__err.errno == errorcode.ER_BAD_DB_ERROR:
         critical('Database does not exist')
         exit(1)
     else:
         error(self.__err)
Ejemplo n.º 20
0
def _handle_error(e, k, _k, row, index, dt, tablename, exit_on_error):
    # details = {"k": k, "_k": _k, "error_type": type(e), "error": e}
    # logger.error(False, '', '', details)

    # logger.error(False, "row: %s" % row)
    logger.error(False, "row#: %s, col: %s, type: %s, value: %s" % (index, k, dt, row[k]))

    if exit_on_error:
        logger.critical(True, "exit_on_error for row is true, exiting!")
        sys.exit(1)
Ejemplo n.º 21
0
def create_table_user_statistics():
    try:
        cursor.execute("""CREATE TABLE IF NOT EXISTS statistic
            (user_id INT UNIQUE NOT NULL,
            requests_count INT DEFAULT 0,
            symbols_count INT DEFAULT 0);""")
    except Exception as error:
        logger.critical(f'Ошибка при создании таблицы статистики.\n{error}')

    connection.commit()
Ejemplo n.º 22
0
 def support_check(self):
     # check this file if is support
     with open(self.FILE, 'rb') as f:
         f.seek(0)
     magic = f.read(2)
     if magic != '\x4d\x5a':
         logger.critical("This is not a PE file")
         sys.exit(1)
     else:
         return True
Ejemplo n.º 23
0
def refresh_abbyy_api_token():

    new_token = get_abbyy_api_token()

    if utils.is_response_failed(new_token):
        logger.critical(new_token['error'])
    else:
        config.ABBYY_API_TOKEN = new_token

        logger.info('Токен успешно обновлен')
Ejemplo n.º 24
0
def is_response_not_ok(response):
    if not response.ok:
        status_code = str(response.status_code)
        text = str(response.text)

        logger.critical(
            f'Пришел ответ с кодом {status_code} и текстом:\n{text}')

        return True

    return False
Ejemplo n.º 25
0
def create_table_users():
    try:
        cursor.execute("""CREATE TABLE IF NOT EXISTS USERS
            (USER_ID INT UNIQUE NOT NULL,
            SELECTED_LANGUAGE_PAIR TEXT,
            IS_PREMIUM_USER BOOLEAN NOT NULL DEFAULT FALSE,
            PREMIUM_EXPIRED_DATE INT,
            CREATED_AT TIMESTAMP DEFAULT CURRENT_TIMESTAMP);""")
    except Exception as error:
        logger.critical(f'Ошибка при создании таблицы пользователей.\n{error}')

    connection.commit()
Ejemplo n.º 26
0
def _handle_error(e, k, _k, row, index, dt, tablename, exit_on_error):
    # details = {"k": k, "_k": _k, "error_type": type(e), "error": e}
    # logger.error(False, '', '', details)

    # logger.error(False, "row: %s" % row)
    logger.error(
        False,
        "row#: %s, col: %s, type: %s, value: %s" % (index, k, dt, row[k]))

    if exit_on_error:
        logger.critical(True, "exit_on_error for row is true, exiting!")
        sys.exit(1)
Ejemplo n.º 27
0
 def __load_db_channels(self):
     result = self.__database.query("SELECT * FROM channels")
     if not result[0]:
         critical(self.__database.error())
         exit(1)
     for channel_id, title, lang, icon in result[1]:
         self.__db_channels[channel_id] = {
             'title': title,
             'lang': lang,
             'icon': icon,
             'delete': True
         }
Ejemplo n.º 28
0
def newConfig(name, root_path):
    """Creates a new solenopsis config file

    name - The name of the base environment
    root_path - The root path for where the SFDC data lives"""
    solenopsis_path = os.path.expanduser(getDefaultConfig())
    home_path = os.path.expanduser(getHome())
    if os.path.exists(solenopsis_path) and not isForced():
        logger.critical('Solenopsis config file already exists "%s"' %
                        (solenopsis_path, ))
        sys.exit(-1)

    try:
        f = open(solenopsis_path, 'w')
        f.write("%s=%s %s\n" % (
            CONFIG_NAMES["env"],
            name,
            'local',
        ))
        f.write("%s=%s\n" % (
            CONFIG_NAMES["home"],
            home_path,
        ))
        f.write("%s=%s\n" % (
            CONFIG_NAMES["master"],
            'local',
        ))
        f.write("%s=%s\n" % (
            CONFIG_NAMES["dependent"],
            name,
        ))
        f.write("%s=%s\n" % (
            CONFIG_NAMES["local-home"],
            root_path,
        ))
        f.close()

        cred_path = os.path.expanduser(os.path.join(getHome(), 'credentials'))
        env_path = os.path.expanduser(os.path.join(getHome(), 'env'))

        if not os.path.exists(home_path):
            os.mkdir(home_path)

        if not os.path.exists(cred_path):
            os.mkdir(cred_path)

        if not os.path.exists(env_path):
            os.mkdir(env_path)
    except:
        logger.critical('An error occured trying to write to "%s"' %
                        (solenopsis_path, ))
        sys.exit(-1)
Ejemplo n.º 29
0
def send_request(method, url, query_parameters, headers, payload):
    try:
        response = requests.request(
            method=method,
            url=url,
            params=query_parameters,
            headers=headers,
            json=payload,
        )
    except Exception:
        logger.critical('Запрос не выполнен')

    return response
Ejemplo n.º 30
0
def check_user_key(api_key, context):
    try:
        session = SESSION()
        api_key = session.query(ApiKey).filter_by(api_key=api_key).one()
        logger.debug("Authenticated user with key: %s" % api_key)
        user = api_key.user
        session.close()
        return user
    except MultipleResultsFound:
        traceback.print_exc()
        logger.critical("Impossible")
    except NoResultFound:
        logger.debug("Cannot authenticate user with key: %s" % api_key)
    return False
Ejemplo n.º 31
0
def setupInteractive():
    """An interactive setup for the initial solenopsis config file"""
    if hasConfigFile() and not isForced():
        logger.critical('config file already exists')
        sys.exit(-1)

    name = raw_input("Please enter your environment name: ")
    username = raw_input("Please enter your salesforce username: "******"Please enter your salesforce password: "******"Please enter your salesforce token: ")
    isProd = raw_input("Is this a production instance? (Y/N): ")
    root_path = raw_input("Please enter that path to your src directory: ")
    
    setup(name, username, password, token, isProd, root_path)
Ejemplo n.º 32
0
    def get_config(self):
        # get config file
        settings_file = os.path.abspath(
            os.path.dirname(__file__)) + '/config.ini'
        found = self.parser.read(settings_file)
        if not found:
            critical(
                "Configuration file not found, check your config.ini file")
            exit()

        # get configs
        self.radarr_url = self.check_config('radarr_url')
        self.radarr_api_key = self.check_config('radarr_api_key')
        self.trailer_folder = self.check_config('trailer_folder')
        self.interval = int(self.check_config('interval'))
Ejemplo n.º 33
0
def setupInteractive():
    """An interactive setup for the initial solenopsis config file"""
    if hasConfigFile() and not isForced():
        logger.critical('config file already exists')
        sys.exit(-1)

    name = raw_input("Please enter your environment name: ")
    username = raw_input("Please enter your salesforce username: "******"Please enter your salesforce password: "******"Please enter your salesforce token: ")
    isProd = raw_input("Is this a production instance? (Y/N): ")
    root_path = raw_input("Please enter that path to your src directory: ")

    setup(name, username, password, token, isProd, root_path)
Ejemplo n.º 34
0
    def init_env(self):
        """ initial test environments"""

        logger.info('Initial test environments')

        #create log_base_dir >> /Temp/${USER}/${JOBNAME}_${BuildID}
        self._get_log_base()
        self._create_log_directory(self._log_base)

        #parse CASE_PARAM get from env
        txt_case_param = os.environ.get('CASE_PARAM')
        if txt_case_param:
            self._case_param = self._txt_parse_to_dict(txt_case_param)
            #return case_param={'IMAGE':['E201-vx.x.x.bin','V501-vx.x.x.bin']} or {'IMAGE':'E201'} or {'IMAGE':['E201','V501']} or {'IMAGE':''}
            logger.info('self._case_param: %s' % self._case_param)
            if self._case_param['IMAGE']:
                self._image_name = self._case_param['IMAGE']
                logger.info("DETECT CASE_PARAM: IMAGE >> %s" %
                            self._image_name)
            else:  #self._case_param['IMAGE']==''or None
                logger.info(
                    "Not detect param IMAGE, upgrade CPE will NOT handle!")
        else:
            logger.warn('NO CASE PARAM, upgrade CPE will NOT handle!')

        #parse test_plan get from env
        txt_test_plan = os.environ.get('TEST_PLAN')
        if txt_test_plan == None:
            logger.critical('NO TEST PLAN found, will end job process!')
            raise AssertionError('NO TEST PLAN found!')
        else:
            self._group_test_plan(txt_test_plan)
            #return {'tb_index':[{'feature':'AP_VLAN','argfile':'/testplan/apvalan_case_for_dit.txt'},{}],}

        #parse autocase scripts local workspace path which gitlab fetch branch to
        self._jenkins_job_path = os.path.join(
            '/home', self._job_owner, 'workspace',
            self._job_name)  #/home/USER/workspace/JOB_NAME
        logger.info('current jenkins job workspace >> %s' %
                    self._jenkins_job_path)
        self._featrue_dir = os.path.join(self._jenkins_job_path, 'features')
        logger.info('feature scripts directory >> %s' % self._featrue_dir)
        #self._testplan_dir = os.path.join(self._featrue_dir,'testplan')
        #logger.info('testplan directory >> %s' % self._testplan_dir)
        self._topo_dir = os.path.join(self._jenkins_job_path, 'public/topo')
        logger.info('public topo directory >> %s' % self._topo_dir)
        self._basic_dir = os.path.join(self._jenkins_job_path, 'public/basic')
        logger.info('preprocess script directory >> %s' % self._basic_dir)
Ejemplo n.º 35
0
def writeTemplate(body, dest):
    """Writes the template to disk

    body - The body to write
    dest - The file destination to write
    """
    if os.path.exists(dest) and not isForced():
        logger.critical('Destination exists "%s" skipping...' % (dest,))
        sys.exit(-1)

    try:
        f = open(dest, 'w')
        f.write(body)
        f.close()
    except:
        logger.crititcal('Unable to open %s for writing' % (dest,))
        sys.exit(-1)
Ejemplo n.º 36
0
    def handle_error(self):
        '''Record the traceback and exit.'''

        logger.critical('Internal asyncore failure, writing traceback to %s' %
                        var.conf.get('options', 'tbfile')[0])

        try:
            tracefile = open(var.conf.get('options', 'tbfile')[0], 'w')
            traceback.print_exc(file=tracefile)
            tracefile.close()

            # Print one to the screen if we're not forked.
            if not var.fork:
                traceback.print_exc()
        except:
            raise

        shutdown(os.EX_SOFTWARE, 'asyncore failure')
Ejemplo n.º 37
0
def prettyQuery(q, dependent = None):
    result = query(q, dependent)

    if len(result) == 0:
        logger.critical('No rows returned')
        sys.exit(-1)

    col_width = {}
    keys = result[0].keys()
    keys.remove('type')
    for key in keys:
        col_width[key] = get_max_width(result, key)

    for key in keys:
        print key.ljust(col_width[key] + 1),
    print ""

    for row in result:
        for key in keys:
            print row[key].ljust(col_width[key] + 1),
        print ""
Ejemplo n.º 38
0
def prettyQuery(q, dependent = None):
    result = query(q, dependent)

    if len(result) == 0:
        logger.critical('No rows returned')
        sys.exit(-1)

    col_width = {}
    keys = result[0].keys()
    keys.remove('type')
    for key in keys:
        col_width[key] = get_max_width(result, key)

    for key in keys:
        print key.ljust(col_width[key] + 1),
    print ""

    for row in result:
        for key in keys:
            print row[key].ljust(col_width[key] + 1),
        print ""
Ejemplo n.º 39
0
    def __init__(self, config_file, dev_config_file):
        """Builder method."""
        super(SettingsLoader, self).__init__()

        self.Config = configparser.ConfigParser(delimiters=('='))
        self.conf_file = None
        self.settings = {}

        if os.path.isfile(config_file):
            self.Config.read([config_file])
            self.conf_file = config_file
        elif os.path.isfile(dev_config_file):
            logger.info('Loading dev config file %s' % dev_config_file)
            self.Config.read([dev_config_file])
            self.conf_file = dev_config_file
        else:
            logger.critical('Configuration file not found: (%s or %s)' % (config_file, dev_config_file))

            sys.exit()

        self.load_settings()
Ejemplo n.º 40
0
def query(q, dependent = None):
    result = None

    environment.parseSolConfig()
    creds = environment.getDependentCreds()

    if not dependent is None:
        creds = environment.getCreds(dependent)
        if creds is None:
            logger.critical("Could not read configuration for '%s'" % (dependent,))
            sys.exit(-1)

    if not creds["url"]:
        logger.critical("Url not specified in solenopsis configuration")
        sys.exit(-1)

    server_url = urlparse.urljoin(creds["url"], 'services/Soap/u/20.0')
    connection = beatbox.PythonClient(serverUrl=server_url)
    connection.login(creds["username"], '%s%s' % (creds["password"], creds["token"],))
    try:
        result = connection.query(q)
    except beatbox.SoapFaultError:
        logger.critical("Error with query\n %s" % sys.exc_info()[1])
        sys.exit(-1)
    return result
Ejemplo n.º 41
0
def parseCreds(name):
    """Reads the credentials file

    name - The environment name"""
    if not getHome():
        logger.critical('Home not set')
        sys.exit(-1)

    cred_filename = '%s.properties' % (name,)
    cred_path = os.path.expanduser(os.path.join(getHome(), 'credentials', cred_filename))

    try:
        config = ConfigParser.ConfigParser()
        config.readfp(FakeSecHead(open(cred_path)))
        setUsername(config.get('section', 'username'))
        setPassword(config.get('section', 'password'))
        setToken(config.get('section', 'token'))
        if config.has_option('section', 'url'):
            setUrl(config.get('section', 'url'))
    except:
        logger.critical('An error occurred trying to read "%s"' % (cred_path))
        sys.exit(-1)
Ejemplo n.º 42
0
    def handle_error(self):
        '''Record a normal traceback and exit.'''

        logger.critical('asyncore failure (BUG)')

        try:
            traceback_file = var.conf.get('options', 'traceback_file')[0]
        except conf.VariableNotFound:
            raise

        try:
            tracefile = open(traceback_file, 'w')
            traceback.print_exc(file=tracefile)
            tracefile.close()

            # Print one to the screen if we're not forked.
            if not var.fork:
                traceback.print_exc()
        except:
            raise

        shutdown(os.EX_SOFTWARE, 'asyncore failure')
Ejemplo n.º 43
0
def config(secondary):
    """The main function for running config commands

    secondary - The rest of the command after 'config'"""
    if len(secondary) == 0:
        logger.critical('No secondary command passed to config')
        sys.exit(-1)

    if secondary[0] == 'new':
        if len(secondary) < 2:
            logger.critical('No type of new config')
            sys.exit(-1)

        if secondary[1] == 'credential':
            newCredsInteractive()
    elif secondary[0] == 'setup':
        setupInteractive()
    else:
        logger.critical('Command "config %s" not found' % (secondary[0],))
Ejemplo n.º 44
0
def newCreds(name, username, password, token, isProd):
    """Generates a new credentials file

    name - The name of the new credential file
    username - The usename
    password - The password
    token - The token
    isProd - (Y/N)"""
    if not getHome():
        logger.critical('Home not set')
        sys.exit(-1)

    new_filename = '%s.properties' % (name,)
    cred_path = os.path.expanduser(os.path.join(getHome(), 'credentials', new_filename))

    is_production = False

    if isProd.upper() == 'Y' or isProd.upper() == 'YES':
        is_production = True

    if os.path.exists(cred_path) and not isForced():
        logger.critical('Credential file already exists "%s"' % (cred_path,))
        sys.exit(-1)

    url = URL_MAP["dev"]
    if is_production:
        url = URL_MAP["prod"]

    try:
        f = open(cred_path, 'w')
        f.write("%s=%s\n" % (CREDENTIAL_NAMES["username"], username,))
        f.write("%s=%s\n" % (CREDENTIAL_NAMES["password"], password,))
        f.write("%s=%s\n" % (CREDENTIAL_NAMES["token"], token,))
        f.write("%s=%s\n" % (CREDENTIAL_NAMES["url"], url,))

        f.close()
    except:
        logger.critical('An error occurred trying to write to "%s"' % (cred_path,))
        sys.exit(-1)
Ejemplo n.º 45
0
def createFile(params):
    """Creates the file from a tmplate

    params - The paramaters
        [0] type
        [n] See generate file
    """
    if params[0] == 'class' and len(params) != 2:
        logger.critical('Invalid parameters.\n Usage: create class NAME')
        sys.exit(-1)
    elif params[0] == 'trigger' and len(params) != 3:
        if len(params) == 2:
            logger.warning('No object name given.  Using generic name')
            params.append('MyObject__c')
        else:
            logger.critical('Invalid parameters.\n Usage: create trigger NAME OBJECT')
            sys.exit(-1)

    template = None
    try:
        template = TEMPLATE_TXT[params[0]]
    except KeyError, (errno, strerror):
        logger.critical('Unable to find template for %s' % (strerror,))
        sys.exit(-1)
Ejemplo n.º 46
0
def parse_raw_abstracts_and_authors(fname):
	"""
	Hack to grab the unadultered 'abstract' and 'author' keys from bib files.
	"""

	def fgenerator(fname):
		for line in open(fname):
			yield line

	raw_abstracts = {}
	raw_authors = {}

	proc = None
	fg = fgenerator(fname)
	for line in fg:
		if line[0] == '@':
			line = line.rstrip()
			if line[-1] != ',':
				logger.critical_leader('Citation declaration must be on its own line')
				logger.critical_leader('This line must end with a comma')
				logger.critical_leader('Offending line:')
				logger.critical_leader('\t>>>'+line+'<<<')
				logger.critical('Failed to parse ' + fname)
			proc = line.split('{')[1].split(',')[0]
		line = line.lstrip()
		if line[0:6] == 'author':
			if proc is None:
				logger.critical_leader('Found an author before a citation?')
				logger.critical_leader('Something has gone wrong. I found this line:')
				logger.critical_leader('\t>>>'+line.rstrip()+'<<<')
				logger.critical_leader('Outside of a citation?')
				logger.critical('Failed to parse ' + fname)
			raw_authors[proc] = ''
			line = line.split('=')[1].lstrip()[1:]
			scope = 1 # We have already stripped the leading '{'
			while True:
				scope += line.count('{') - line.count('}')
				line = line.lstrip()
				if scope == 0:
					line = line.rstrip()
					if line[-2:] != '},':
						logger.critical_leader('Author last line must end with "},"')
						logger.critical_leader('Instead, I think the author ends with line:')
						logger.critical_leader('\t>>>'+line+'<<<')
						logger.critical('Failed to parse ' + fname)
					line = line[:-2]
					raw_authors[proc] += line
					break
				else:
					if len(line) == 0:
						raw_authors[proc] += '\n'
					else:
						raw_authors[proc] += line
					line = next(fg)
		elif line[0:8] == 'abstract':
			if proc is None:
				logger.critical_leader('Found an abstract before a citation?')
				logger.critical_leader('Something has gone wrong. I found this line:')
				logger.critical_leader('\t>>>'+line.rstrip()+'<<<')
				logger.critical_leader('Outside of a citation?')
				logger.critical('Failed to parse ' + fname)
			raw_abstracts[proc] = ''
			line = line.split('=')[1].lstrip()[1:]
			scope = 1 # We have already stripped the leading '{'
			while True:
				scope += line.count('{') - line.count('}')
				line = line.lstrip()
				if scope == 0:
					line = line.rstrip()
					if line[-2:] != '},':
						logger.critical_leader('Abstract block last line must end with "},"')
						logger.critical_leader('Instead, I think the abstract ends with line:')
						logger.critical_leader('\t>>>'+line+'<<<')
						logger.critical('Failed to parse ' + fname)
					line = line[:-2]
					raw_abstracts[proc] += line
					break
				else:
					if len(line) == 0:
						raw_abstracts[proc] += '\n'
					else:
						raw_abstracts[proc] += line
					line = next(fg)
			proc = None

	return raw_abstracts, raw_authors
Ejemplo n.º 47
0
        else:
            logger.critical('Invalid parameters.\n Usage: create trigger NAME OBJECT')
            sys.exit(-1)

    template = None
    try:
        template = TEMPLATE_TXT[params[0]]
    except KeyError, (errno, strerror):
        logger.critical('Unable to find template for %s' % (strerror,))
        sys.exit(-1)

    extension = None
    try:
        extension = TEMPLATE_EXT[params[0]]
    except KeyError, (errno, strerror):
        logger.critical('Unable to find extension for %s' % (strerror,))
        sys.exit(-1)

    xml = None
    try:
        xml = TEMPLATE_XML[params[0]]
    except KeyError, (errno, strerror):
        logger.critical('Unable to find template for %s' % (strerror,))
        sys.exit(-1)

    template_dest = None

    if not isRelative():
        if not getSourceDir() or getSourceDir().__len__ == 0:
            logger.critical('Absolute flag set, but source dir not specified')
            sys.exit(-1)
Ejemplo n.º 48
0
def factory_install(device_barcode):
    '''main factory installer'''
    start_time = time.time()

    script_dir = os.path.dirname(os.path.abspath(__file__))

    ete.command_bytes('test_work')

    if not args.test:
        colour_text.clear_screen()

    # start a new log directory on each run
    logger.new_log_dir()
    logger.reopen_logfile()

    logdir = logger.get_log_dir()
    logger.info("Logging to %s" % logdir)
    #logger.info("Device barcode %s" % device_barcode)

    colour_text.print_blue('''
==================================================
| Starting installation. Barcode is %s
==================================================
''' % device_barcode)

    logger.info(time.ctime())
    
    if args.erase:
        if not jtag.erase_firmwares():
            colour_text.print_fail('''
======================================
| FAILED: JTAG firmware erase failed
======================================
''')
            logger.critical("JTAG firmware erase failed")
            ete.command_bytes('test_fail')
            ete.command_bytes('reset')
            ete.accel(10000)
            ete.yawspeed(5000)
            ete.rollspeed(10000)
            return False
    
    if not args.nofw and not jtag.load_all_firmwares(retries=3):
        colour_text.print_fail('''
======================================
| FAILED: JTAG firmware install failed
======================================
''')
        logger.critical("JTAG firmware install failed")
        ete.command_bytes('test_fail')
        ete.command_bytes('reset')
        ete.accel(100000)
        ete.yawspeed(5000)
        ete.rollspeed(10000)
        try:
            conn = connection.Connection(ref_only=True)
            rotate.center_servos(conn)
        except Exception as ex:
            print("Failed to center servos: %s" % ex)
            pass
        return False

    if args.erase:
        if not connection.erase_parameters():
            colour_text.print_fail('''
==========================================
| FAILED: Failed to erase parameters
==========================================
''')
            logger.critical("Failed to erase parameters")
            ete.command_bytes('test_fail')
            ete.position(0, 0)
            ete.command_bytes('reset')
            ete.accel(100000)
            ete.yawspeed(5000)
            ete.rollspeed(10000)
            return False

    if not accelcal.accel_calibrate_retries(retries=4):
        colour_text.print_fail('''
==========================================
| FAILED: Accelerometer calibration failed
==========================================
''')
        logger.critical("Accelerometer calibration failed")
        ete.command_bytes('test_fail')
        ete.position(0, 0)
        ete.command_bytes('reset')
        ete.accel(100000)
        ete.yawspeed(5000)
        ete.rollspeed(10000)
        return False

    # all OK
    logger.info("Writting OTP region in STM32")
    #Add OTP HERE
    script_dir = os.path.dirname(os.path.abspath(__file__))
    if args.otp_show:
        p1 = Popen(['python', script_dir + '/otp_program.py', '--port', FMU_DEBUG,'--only-display',"abc"], stdin=PIPE, stdout=PIPE, stderr=PIPE)
        output, err = p1.communicate()
        logger.info(output)
    time.sleep(1)
    #conn = connection.Connection(ref_only=False)
    #otp_program_mod.Display_OTP(conn)


    

    def getMacAddress(): 
        if sys.platform == 'win32': 
            for line in os.popen("ipconfig /all"): 
                if line.lstrip().startswith('Physical Address'): 
                    mac = line.split(':')[1].strip().replace('-',':') 
                    break 
        else: 
            for line in os.popen("/sbin/ifconfig"): 
                if line.find('Ether') > -1: 
                    mac = line.split()[4] 
                    break 
        return mac 

    print("Manufacturer info : Hex Technology, \xA9 ProfiCNC 2016")
    print "MAC Address:",getMacAddress() #.join(['{:02x}'.format((uuid.getnode() >> i) & 0xff) for i in range(0,8*6,8)][::-1])

    colour_text.print_blue('''Barcode is %s''' % device_barcode)
    print("date of testing :" + time.strftime("%x"))
    print("time of testing :" + time.strftime("%X"))
    accel_data0 = "%f,%f,%f,%f,%f,%f" % (test_sensors.offset[0][0] ,test_sensors.offset[0][1] ,test_sensors.offset[0][2],test_sensors.scale_factor[0][0],test_sensors.scale_factor[0][1],test_sensors.scale_factor[0][2])
    accel_data2 = "%f,%f,%f,%f,%f,%f" % (test_sensors.offset[2][0] ,test_sensors.offset[2][1] ,test_sensors.offset[2][2],test_sensors.scale_factor[2][0],test_sensors.scale_factor[2][1],test_sensors.scale_factor[2][2])
    print "Accel :", accel_data0
    print "Accel :", accel_data2
    if True:#args.otp_write:
	#Manufacturing Info
        p2 = Popen(['python', script_dir + '/otp_program.py', '--port', FMU_DEBUG,'Hex Technology, \xA9 ProfiCNC 2016',getMacAddress(),device_barcode,time.strftime("%x"),time.strftime("%X"),'--',str(accel_data0),str(accel_data2)], stdin=PIPE, stdout=PIPE, stderr=PIPE)
        output, err = p2.communicate()
        logger.info(output)
	logger.info(err)
	time.sleep(1)
        #Display
        #p3 = Popen(['python', script_dir + '/otp_program.py', '--port', FMU_DEBUG,'--only-display',"abc"], stdin=PIPE, stdout=PIPE, stderr=PIPE)
        #output, err = p3.communicate()
        #logger.info(output)

    colour_text.print_green('''
================================================
| Device: %s
| PASSED: Factory install complete (%u seconds)
================================================
''' %  (device_barcode, (time.time() - start_time)))
    logger.info("Factory install complete (%u seconds)" % (time.time() - start_time))
    ete.command_bytes('test_pass')
    ete.position(0, 0)
    ete.command_bytes('reset')
    ete.accel(100000)
    ete.yawspeed(5000)
    ete.rollspeed(10000)
    return True
Ejemplo n.º 49
0
Created on 2011-7-25
Last Modified By xiang.ye#renren-inc.com on 2011-9-16
'''

import os,sys,traceback
import logger

from xml.dom import minidom  
from Queue import Queue
from taskdata import PathFactory,PathInfo,UrlFactory,UrlInfo,TaskFactory
from workmanager import WorkMonitor,WorkEvent,WorkEventHandler
from taskmanager import TaskEventHandler,TaskInitor

if __name__ == "__main__":
    if len(sys.argv) < 2 :
	logger.critical( 'Usage: python inspector.py [etcfile] parameteres needed,quit...' )
        sys.exit(1)
    etcfile = sys.argv[1]

    '''读配置文件获取六个路径
        spath: 源文件监听的路径
        wpath: 编码工作路径
        bpath: 源文件备份路径
        vpath: 生成视频文件的存储路径
        ipath: 生成视频预图的存储路径
        lpath: 编码log路径
    '''
    try:
        xmlfile = open( etcfile, 'r' )
        try:
            invalid_place = 'not_a_xmlfile'
Ejemplo n.º 50
0
    logger.info('Test data file: ' + test_file)
    logger.info('Train data file: ' + train_file)

    logger.info('Start loading data...')
    test_df = pd.read_csv(test_file)
    train_df = pd.read_csv(train_file)
    logger.info('Done loading')

    test_productid_array = test_df.product_productid.unique()
    logger.info('Number of products in test data: %d' % len(test_productid_array))

    square_errors_array = []
    for target_productid in test_productid_array:
        # check whether this product id exists
        if not target_productid in train_df.product_productid.unique():
            logger.critical('Cannot find product in data with id: ' + target_productid)
            continue

        # compute similarity between this product and all others
        logger.debug('Computing similarity for product: ' + target_productid)

        # get products with which have common reviewers
        # instead of looping through all other products
        # to improve performance
        common_reviewers = train_df[train_df.product_productid == target_productid].review_userid
        productid_array = train_df[train_df.review_userid.isin(common_reviewers)].product_productid.unique()
        logger.debug('Number of compared products: %d' % len(productid_array))

        result_sim = pd.Series()
        for index, productid in enumerate(productid_array):
            df = train_df[train_df.product_productid.isin([target_productid, productid])]
Ejemplo n.º 51
0
	def __init__ (self, bibkey, entry, bibgroup, raw_authors, raw_abstracts):
		self.bibkey = bibkey
		self.raw_abstract = raw_abstracts

		authors = []
		if 'author' in entry.persons:
			for person in entry.persons['author']:
				authors.append(get_name(person))
		entry.fields['authors'] = ', '.join(authors)

		entry.fields['badge'] = bibgroup[0].upper()
		if entry.fields['badge'] in ('P', 'D'):
			entry.fields['badge'] = 'PD'
		entry.fields['type'] = bibgroup
		entry.fields['display-type'] = WORK_TYPES[bibgroup]


		self.paths = {}

		# Try to copy the PDF to the content directory
		if os.path.exists(os.path.join('static', 'cv', bibkey + '.pdf')):
			cp(os.path.join('static', 'cv', bibkey + '.pdf'), LOCAL_CONTENT_DIR)
			self.paths['pdf'] = os.path.join(CONTENT_DIR, bibkey + '.pdf')
		else:
			if 'to-appear' in entry.fields and entry.fields['to-appear'] == '1':
				logger.warn('No PDF for "To Appear" paper {}'.format(bibkey))
			else:
				logger.critical_leader('Unable to find {}'.format(bibkey + '.pdf'))
				logger.critical_leader('\tYou need to add a copy of your paper to the cv/ direcotry')
				logger.critical('\tYour paper should be named the same as the key to the bib entry')

		# Try to copy the paper source to the content directory
		self.missing_zip = True
		for ext in ['.zip', '.tgz', '.tar.gz']:
			if os.path.exists(os.path.join('cv', bibkey + ext)):
				cp(os.path.join('cv', bibkey + ext), LOCAL_CONTENT_DIR)
				self.paths['tex_source'] = os.path.join(CONTENT_DIR, bibkey + ext)
				self.missing_zip = False
				break

		# Grab a ref to the talk if it exists
		if 'series' in entry.fields:
			series_short = entry.fields['series'].lower().replace(' ', '').replace("'", '')
			if os.path.exists(os.path.join('static', 'talks', series_short+'.pdf')):
				# talk named after conference
				self.paths['talk'] = '/talks.html#{}'.format(series_short)
		if 'talk' not in self.paths:
			if os.path.exists(os.path.join('static', 'talks', bibkey+'.pdf')):
				# talk named after bibkey
				self.paths['talk'] = '/talks.html#{}'.format(bibkey)

		# Possibly remove \url{} from the url entry if needed
		try:
			if entry.fields['conference-url'][0:5] == '\\url{':
				entry.fields['conference-url'] = entry.fields['conference-url'][5:-1]
		except KeyError:
			logger.warn("Unable to find conference URL for {}".format(bibkey))
			logger.warn('\tThis entry will be missing a link to the conference')

		# Possibly remove \url{} from the video link if needed
		try:
			if entry.fields['video-url'][0:5] == '\\url{':
				entry.fields['video-url'] = entry.fields['video-url'][5:-1]
		except KeyError:
			pass

		# Construct the best date we can for this publication
		try:
			year = entry.fields['year']
		except KeyError:
			logger.critical_leader("Bib entry {} is missing publication year.".format(bibkey))
			logger.critical("\tPlease add a year entry and try again.")

		try:
			month = entry.fields['month']
		except KeyError:
			try:
				month = entry.fields['mon']
			except KeyError:
				month = None
				logger.warn("Bib entry {} is missing publication month.".format(bibkey))
				logger.warn("\tPublication sort order may be affected. Please add a month entry")
		if month:
			if month.isalpha():
				month = MONTH_CONV[month.lower()[0:3]]
			else:
				month = int(month)

		# good enough to sort
		self.date = 365 * int(year)
		if month:
			self.date += 30 * (month - 1)

		# Get values for content that starts out hidden
		self.hiddens = {}
		hiddens_add_bibtex(self.hiddens, bibkey, entry, raw_authors)
		hiddens_add_abstract(self.hiddens, bibkey, entry, raw_abstracts)

		# Add html-friendly entries; note this must be done *after* generating hiddens
		entry.fields['title-html']   = latex_to_html(entry.fields['title'])

		authors = latex_to_html(entry.fields['authors'])
		if len(authors.split(',')) == 1:
			entry.fields['authors-html'] = authors
		elif len(authors.split(',')) == 2:
			entry.fields['authors-html'] = ' and'.join(authors.split(','))
		else:
			authors = authors.split(',')
			authors.insert(len(authors)-1, ' and')
			entry.fields['authors-html'] = ','.join(authors[:-1]) + authors[-1]

		try:
			entry.fields['booktitle-html'] = latex_to_html(entry.fields['booktitle'])
			try:
				series = latex_to_html(entry.fields['series'])
				series = series.replace(' ', '&nbsp;')
				entry.fields['booktitle-html'] += ' (' + series + ')'
			except KeyError:
				pass
		except KeyError:
			pass

		if 'journal' in entry.fields:
			entry.fields['journal-html'] = latex_to_html(entry.fields['journal'])
			if 'series' in entry.fields:
				series = latex_to_html(entry.fields['series'])
				series = series.replace(' ', '&nbsp;')
				entry.fields['journal-html'] += ' (' + series + ')'
			if 'volume' not in entry.fields:
				logger.error('{}: a volume key is required for journals'.format(bibkey))
			if 'number' not in entry.fields:
				logger.error('{}: a number key (issue) is required for journals'.format(bibkey))

		if 'journal' in entry.fields and 'booktitle' in entry.fields:
			logger.error('{} has a journal and booktitle entry.'.format(bibkey))
			logger.error('This is probably not what your want.')


		try:
			entry.fields['acceptance-percent'] =\
					float(entry.fields['acceptance-accepted']) /\
					float(entry.fields['acceptance-total']) * 100
		except KeyError:
			pass

		self.entry = entry
Ejemplo n.º 52
0
	def generatePublicationSidebarHTML (self, bibkey):
		for paper in self.papers:
			if paper.bibkey == bibkey:
				return self.spotlight_tmpl.render(pub=paper.entry.fields,
					bibkey=paper.bibkey)
		logger.critical('Paper with key {} not found'.format(bibkey))
Ejemplo n.º 53
0
    device_barcode = args.barcode
    if not args.test and device_barcode is None:
        colour_text.print_blue('''
==========================================
| PLEASE SWIPE DEVICE BARCODE
==========================================
''')
        device_barcode = barcode.barcode_read()
        if device_barcode is None:
            colour_text.print_fail('''
            ==========================================
            | FAILED: Barcode not detected
            ==========================================
            ''')
            logger.critical("Barcode not detected")
            time.sleep(2)
            continue
        
        # log the barcode
        logger.info("Barcode detected: %s" % device_barcode)
    
    # wait for the power to come on again
    while not util.wait_devices([FMU_JTAG, IO_JTAG, FMU_DEBUG]):
        logger.info("waiting for power up....")

    ret = factory_install(device_barcode)

    # increment the cycles counters
    savedstate.incr('current_cycles')
    savedstate.incr('total_cycles')
Ejemplo n.º 54
0
def factory_install(device_barcode):
    '''main factory installer'''
    start_time = time.time()

    if not args.test:
        colour_text.clear_screen()

    # start a new log directory on each run
    logger.new_log_dir()
    logger.reopen_logfile()

    logdir = logger.get_log_dir()
    logger.info("Logging to %s" % logdir)
    logger.info("Device barcode %s" % device_barcode)

    colour_text.print_blue('''
==================================================
| Starting installation. Barcode is %s
==================================================
''' % device_barcode)

    logger.info(time.ctime())
    
    if args.erase:
        if not jtag.erase_firmwares():
            colour_text.print_fail('''
======================================
| FAILED: JTAG firmware erase failed
======================================
''')
            logger.critical("JTAG firmware erase failed")
            return False
    
    if not args.nofw and not jtag.load_all_firmwares(retries=3):
        colour_text.print_fail('''
======================================
| FAILED: JTAG firmware install failed
======================================
''')
        logger.critical("JTAG firmware install failed")
        return False

    if args.erase:
        if not connection.erase_parameters():
            colour_text.print_fail('''
==========================================
| FAILED: Failed to erase parameters
==========================================
''')
            logger.critical("Failed to erase parameters")
            return False

    if not accelcal.accel_calibrate_retries(retries=4):
        colour_text.print_fail('''
==========================================
| FAILED: Accelerometer calibration failed
==========================================
''')
        logger.critical("Accelerometer calibration failed")
        return False

    # all OK
    colour_text.print_green('''
================================================
| Device: %s
| PASSED: Factory install complete (%u seconds)
================================================
''' %  (device_barcode, (time.time() - start_time)))
    logger.info("Factory install complete (%u seconds)" % (time.time() - start_time))
    return True
Ejemplo n.º 55
0
    logger.info('Test data file: ' + test_file)
    logger.info('Train data file: ' + train_file)

    logger.info('Start loading data...')
    test_df = pd.read_csv(test_file)
    train_df = pd.read_csv(train_file)
    logger.info('Done loading')

    test_userid_array = test_df.review_userid.unique()
    logger.info('Number of users in test data: %d' % len(test_userid_array))

    square_errors_array = []
    for target_userid in test_userid_array:
        # check whether this user id exists
        if not target_userid in train_df.review_userid.unique():
            logger.critical('Cannot find user in data with id: ' + target_userid)
            continue

        # compute similarity between this user and all others
        logger.debug('Computing similarity for user: '******'Number of compared users: %d' % len(userid_array))

        result_sim = pd.Series()
        for index, userid in enumerate(userid_array):
            df = train_df[train_df.review_userid.isin([target_userid, userid])]