Пример #1
0
    def process(self):
        """Process all keystone accounts to sync."""
        orig_auth_url = get_config("auth", "keystone_origin")
        orig_admin_tenant, orig_admin_user, orig_admin_password = get_config(
            "auth", "keystone_origin_admin_credentials"
        ).split(":")
        oa_st_url, orig_admin_token = self.get_swift_auth(
            orig_auth_url, orig_admin_tenant, orig_admin_user, orig_admin_password
        )
        dest_auth_url = get_config("auth", "keystone_dest")

        # we assume orig and dest passwd are the same obv synchronized.
        dst_st_url, dest_admin_token = self.get_swift_auth(
            dest_auth_url, orig_admin_tenant, orig_admin_user, orig_admin_password
        )

        bare_oa_st_url = oa_st_url[: oa_st_url.find("AUTH_")] + "AUTH_"
        bare_dst_st_url = dst_st_url[: dst_st_url.find("AUTH_")] + "AUTH_"

        self.keystone_cnx = self.get_ks_auth_orig()

        for tenant in self.keystone_cnx.tenants.list():
            user_orig_st_url = bare_oa_st_url + tenant.id
            user_dst_st_url = bare_dst_st_url + tenant.id

            self.sync_account(user_orig_st_url, orig_admin_token, user_dst_st_url, dest_admin_token)
Пример #2
0
	def start_gpio(self):
		if mock_gpio:
			return self.__on_gpio_status_changed(True, mocked_gpio=True)

		gpio_mappings = get_config('gpio_mappings')
		cmd = [
			"python",
			os.path.join(BASE_DIR, "core", "interact", "gpio_builder.py"),
			json.dumps(gpio_mappings),
			str(self.conf['api_port'])
		]

		bouncetime = get_config('bouncetime')
		if bouncetime is not None:
			cmd.append(bouncetime)
		
		# signal start
		from subprocess import Popen

		DEV_NULL = open(os.devnull, 'w')
		gpio_process = Popen(cmd, shell=False, stdout=DEV_NULL, stderr=DEV_NULL)

		with open(self.conf['d_files']['gpio']['pid'], 'wb+') as gpio_pid:			
			gpio_pid.write(str(gpio_process.pid))
		
		return self.__on_gpio_status_changed(True)
Пример #3
0
def swift_cnx(acc, user):
    ks_url = get_config('auth', 'keystone_origin')
    cnx = sclient.Connection(ks_url,
                             user=user,
                             key=get_config('filler', 'default_user_password'),
                             tenant_name=acc[0],
                             auth_version=2)
    return cnx
Пример #4
0
    def get_ks_auth_orig(self):
        """Get keystone cnx from config."""
        orig_auth_url = get_config("auth", "keystone_origin")
        cfg = get_config("auth", "keystone_origin_admin_credentials")
        (tenant_name, username, password) = cfg.split(":")

        return keystoneclient.v2_0.client.Client(
            auth_url=orig_auth_url, username=username, password=password, tenant_name=tenant_name
        )
Пример #5
0
    def __init__(self):
        self.split_chan = False

        if get_config("split_audio_channels"):
            self.split_chan = True
            self.split_map = get_config("split_map")

        self.max_audio_level = get_config("max_audio_level")
        if self.max_audio_level is None:
            self.max_audio_level = MAX_AUDIO_LEVEL

        logging.basicConfig(filename=self.conf["d_files"]["audio"]["log"], level=logging.DEBUG)
Пример #6
0
    def __init__(self):
        #signal.signal(signal.SIGINT, self._exit_gracefully)
        #signal.signal(signal.SIGQUIT, self._exit_gracefully)
        #signal.signal(signal.SIGTERM, self._exit_gracefully)

        _config_client = utils.get_config(settings.CONF_FILE, 'client')

        _log_level = logging.INFO
        if type(_config_client) is dict:
            self.migas_server = _config_client.get('server', 'migasfree.org')
            self.migas_proxy = _config_client.get('proxy', None)
            self.migas_ssl_cert = _config_client.get('ssl_cert', None)
            if 'debug' in _config_client:
                if _config_client['debug'] == 'True' \
                or _config_client['debug'] == '1' \
                or _config_client['debug'] == 'On':
                    self._debug = True
                    _log_level = logging.DEBUG

        _config_packager = utils.get_config(settings.CONF_FILE, 'packager')
        if type(_config_packager) is dict:
            self.packager_user = _config_packager.get('user', None)
            self.packager_pwd = _config_packager.get('password', None)
            self.packager_version = _config_packager.get('version', None)
            self.packager_store = _config_packager.get('store', None)

        logging.basicConfig(
            format='%(asctime)s - %(levelname)s - %(message)s',
            level=_log_level,
            filename=settings.LOG_FILE
        )
        logging.info('*' * 76)
        logging.info('%s in execution', self.CMD)
        logging.debug('Config client: %s', _config_client)
        logging.debug('Config packager: %s', _config_packager)

        # init UrlRequest
        _url_base = '%s/api/' % str(self.migas_server)
        if self.migas_ssl_cert:
            _url_base = '%s://%s' % ('https', _url_base)
        else:
            _url_base = '%s://%s' % ('http', _url_base)
        self._url_request = url_request.UrlRequest(
            debug=self._debug,
            url_base=_url_base,
            proxy=self.migas_proxy,
            info_keys={
                'path': settings.KEYS_PATH,
                'private': self.PRIVATE_KEY,
                'public': self.PUBLIC_KEY
            },
            cert=self.migas_ssl_cert
        )
Пример #7
0
def update_cdn():
	use_cdn = get_config("use_cdn")
	if use_cdn is not None and use_cdn is False:
		print "Not pulling from cdn!"
		return

	media_manifest, cdn = get_config(['media_manifest', 'cdn'])
	# download media from "cdn"
	if media_manifest is None or len(media_manifest) == 0:
		media_manifest = ["prompts"]
	else:
		media_manifest.append("prompts")

	ftp = FTP()
	ftp.connect(cdn['addr'], cdn['port'])
	ftp.login(cdn['user'])
	ftp.cwd(cdn['home_dir'])

	for mm in media_manifest:
		out_dir = os.path.join(BASE_DIR, "media", mm)
		
		if mm == "video":
			out_dir = os.path.join(out_dir, "viz")
		
		if not os.path.exists(out_dir):
			os.makedirs(out_dir)
			print "initialized empty directory \"%s\" at %s" % (mm, out_dir)

		try:
			ftp.cwd(mm)
		except Exception as e:
			print "directory \"%s\" doesn't exist in CDN" % mm		
			continue

		dir_list = []
		ftp.dir(dir_list.append)
		dir_list = [d for d in dir_list if d not in [UNPLAYABLE_FILES]]

		for l in [l.split(" ")[-1] for l in dir_list]:
			out_file = os.path.join(out_dir, l)

			try:
				with open(out_file, 'wb+') as O:
					ftp.retrbinary("RETR %s" % l, O.write)
			except Exception as e:
				print "could not download %s to %s" % (l, out_file)
				print e, type(e)

				continue

		ftp.cwd('..')

	ftp.quit()
Пример #8
0
 def _create_user(account_name, account_id):
     user = get_rand_str(mode='user_')
     # Create a user in that tenant
     uid = client.users.create(user,
                               get_config('filler',
                                          'default_user_password'),
                               get_config('filler', 'default_user_email'),
                               account_id)
     # Get swift_operator_role id
     roleid = [role.id for role in client.roles.list()
               if role.name == get_config('filler', 'swift_operator_role')]
     roleid = roleid[0]
     # Add tenant/user in swift operator role/group
     client.roles.add_user_role(uid.id, roleid, account_id)
     return (user, uid.id, roleid)
Пример #9
0
def reload_plugins(*args, **kwargs):
    """Module function that'll reload all of the plugins"""
    config = utils.get_config()

    # When the modules are reloaded, the meta class will append
    # all of the classes again, so we need to make sure this is empty
    Plugin._plugin_classes = []
    _reset_variables()

    # Now reload all of the plugins
    plugins_to_reload = []
    plugindir = "pyhole.plugins"

    # Reload existing plugins
    for mod, val in sys.modules.items():
        if plugindir in mod and val and mod != plugindir:
            mod_file = val.__file__
            if not os.path.isfile(mod_file):
                continue
            for p in config.get("plugins", type="list"):
                if plugindir + "." + p == mod:
                    plugins_to_reload.append(mod)

    for plugin in plugins_to_reload:
        try:
            reload(sys.modules[plugin])
        except Exception, exc:
            LOG.error(exc)
def main(handle):
	parser = argparse.ArgumentParser(description='Fetch tweets for a given twitter account.')
	parser.add_argument('handle', metavar='handle', type=str, 
                   help='name of twitter account')
	args = parser.parse_args()

	config = utils.get_config('../config.ini')
	host = config['db']['host']
	user = config['db']['user']
	password = config['db']['password']
	db_name = config['db']['dbname']

	conn = MySQLdb.connect(host,user,password,db_name,charset='utf8',use_unicode=True)
	c = conn.cursor()

	consumer_key = config['twitter']['consumer_key']
	consumer_secret = config['twitter']['consumer_secret'] 
	access_token = config['twitter']['access_token']
	access_secret = config['twitter']['access_secret']

	auth = OAuthHandler(consumer_key, consumer_secret)
	auth.set_access_token(access_token, access_secret)

	api = tweepy.API(auth)

	stuff = api.user_timeline(screen_name = handle, count = 8000, include_rts = False)
	
	for status in stuff:
	    tweet =status.text
	    screen_name = handle
	    c.execute("INSERT INTO tweets_by_user (HANDLE, TWEET) VALUES (%s,%s)",(screen_name, tweet))
	    conn.commit()    	

	conn.close()   
Пример #11
0
    def __init__(self):

        self.choices = [
            "Whole Phamerated Phage",
            "Whole Unphamerated Phage",
            "One Phamerated Gene",
            "One Unphamerated Gene",
            "Pham",
        ]

        # self.set_border_width(10)
        self.config_info = utils.get_config()

        builder = Gtk.Builder()
        builder.add_from_file(utils.glade_file)
        self.window = builder.get_object("StartWindow")
        self.window.set_icon_from_file(utils.icon_file)
        choice_box = builder.get_object("choicebox")
        choice_list = Gtk.ListStore(str)
        for choice in self.choices:
            choice_list.append([choice])
        choice_box.set_model(choice_list)
        renderer = Gtk.CellRendererText()
        choice_box.pack_start(renderer, True)
        choice_box.add_attribute(renderer, "text", 0)
        choice_box.set_active(0)
        builder.connect_signals(self)
        self.window.show_all()
        # self.vbox.pack_start(vbox, True, True, 0)
        # self.config_info = {}
        # print utils.get_configuration
        self.check_blast_type()
Пример #12
0
def start_tracker():
    """Start the Torrent Tracker.
    """
    # parse commandline options
    parser = OptionParser()
    parser.add_option('-p', '--port', help='Tracker Port', default=0)
    parser.add_option('-b', '--background', action='store_true', default=False,
                      help='Start in background')
    parser.add_option('-d', '--debug', action='store_true', default=False,
                      help='Debug mode')
    (options, args) = parser.parse_args()

    # setup directories
    utils.create_pytt_dirs()
    # setup logging
    utils.setup_logging(options.debug)

    try:
        # start the torrent tracker
        run_app(int(options.port) or utils.get_config().getint('tracker',
                                                               'port'))
    except KeyboardInterrupt:
        logging.info('Tracker Stopped.')
        utils.close_db()
        sys.exit(0)
    except Exception, ex:
        logging.fatal('%s' % str(ex))
        utils.close_db()
        sys.exit(-1)
Пример #13
0
def team_accept_invite_request():
	params = utils.flat_multi(request.form)
	_user = user.get_user().first()
	if not user.in_team(_user):
		raise WebException("You must be in a team!")
	_team = get_team(tid=_user.tid).first()
	tid = _team.tid
	if _user.uid != _team.owner:
		raise WebException("You must be the captain of your team to rescind invitations!")
	if _team.finalized:
		raise WebException("This team is finalized.")

	if len(_team.get_members()) >= utils.get_config("team_size"):
		raise WebException("Your team is full.")

	uid = params.get("uid")
	_user2 = user.get_user(uid=uid).first()
	if user.in_team(_user2):
		raise WebException("This user is already in a team!")

	invitation = TeamInvitations.query.filter_by(rtype=1, frid=_user2.uid, toid=tid).first()
	if invitation is None:
		raise WebException("Invitation doesn't exist.")

	with app.app_context():
		_user2 = Users.query.filter_by(uid=_user2.uid).first()
		_user2.tid = tid
		db.session.delete(invitation)
		invitation2 = TeamInvitations.query.filter_by(rtype=0, frid=tid, toid=_user2.uid).first()
		if invitation2 is not None:
			db.session.delete(invitation2)
		db.session.commit()
		db.session.close()

	return { "success": 1, "message": "Success!" }
def register_server():
    app = Flask(__name__)

    settings_entry = os.environ.get('SKELETONS_SETTINGS_ENTRY', 'skeletons')
    server_settings = get_config(settings_entry)
    app.config['server_settings'] = server_settings

    app.config['SESSION_COOKIE_NAME'] = server_settings.cookie_name
    app.secret_key = server_settings.secret_key

    app.register_blueprint(greeting_blueprint, url_prefix='/greeting')

    @app.before_request
    def before_request():
        pass

    @app.teardown_request
    def teardown_request(error=None):
        pass

    @app.after_request
    def after_request(response):
        return response

    @app.errorhandler(404)
    def page_not_found(e):
        return jsonify({'error': 'Invalid API path'}), 404

    @app.errorhandler(HTTPError)
    def http_error(e):
        return jsonify({'error': e.msg}), e.status_code

    return app
Пример #15
0
def set_autostart_info():
	info = get_config('info')
	if info is None:
		return False

	# setup auto-start
	for f in [".profile", ".mp_profile"]:
		Popen(["cp", os.path.join(BASE_DIR, "core", "lib", "autostart", f), os.path.expanduser('~')])

	with open(os.path.join(os.path.expanduser('~'), ".mp_autostart"), 'wb+') as A:
		A.write("cd %s && python %s.py --start" % (info['dir'], info['module']))

	with open(os.path.join(os.path.expanduser('~'), ".profile"), 'ab') as A:
		A.write("\nsleep 15 && ~/.mp_autostart")

	Popen(["sudo", "cp", os.path.join(BASE_DIR, "core", "lib", "autostart", "rc.local"), os.path.join("/", "etc", "rc.local")])

	# set media info
	if "sculpture" in info.keys():
		info_directives = [
			"export SCULPTURE_TITLE=\"%s\"" % info['sculpture']['title'],
			"export SCULPTURE_LINK=\"%s\"" % info['sculpture']['link']
		]

		with open(os.path.join(os.path.expanduser('~'), ".mp_profile"), 'ab') as I:
			I.write("\n%s" % "\n".join(info_directives))

	return True
Пример #16
0
    def _read_conf_file(self):
        _config = utils.get_config(settings.CONF_FILE, 'client')
        _log_level = logging.INFO

        self.migas_version = utils.get_mfc_version()
        self.migas_computer_name = utils.get_mfc_computer_name()
        if type(_config) is dict:
            self.migas_server = _config.get('server', 'migasfree.org')
            self.migas_proxy = _config.get('proxy', None)
            self.migas_ssl_cert = _config.get('ssl_cert', None)

            self.migas_gui_verbose = True  # by default
            if 'gui_verbose' in _config:
                if _config['gui_verbose'] == 'False' \
                or _config['gui_verbose'] == '0' \
                or _config['gui_verbose'] == 'Off':
                    self.migas_gui_verbose = False

            if 'debug' in _config:
                if _config['debug'] == 'True' \
                or _config['debug'] == '1' \
                or _config['debug'] == 'On':
                    self._debug = True
                    _log_level = logging.DEBUG

        # http://www.lightbird.net/py-by-example/logging.html
        logging.basicConfig(
            format='%(asctime)s - %(levelname)s - %(module)s - %(funcName)s - %(message)s',
            level=_log_level,
            filename=settings.LOG_FILE
        )
        logging.info('*' * 20)
        logging.info('%s in execution', self.CMD)
        logging.debug('Config: %s', _config)
Пример #17
0
def team_accept_invite():
	params = utils.flat_multi(request.form)
	_user = user.get_user().first()
	if user.in_team(_user):
		raise WebException("You're already in a team!")

	tid = params.get("tid")
	_team = get_team(tid=tid).first()
	if _team is None:
		raise WebException("Team not found.")

	if len(_team.get_members()) >= utils.get_config("team_size"):
		raise WebException("This team is full.")

	invitation = TeamInvitations.query.filter_by(rtype=0, frid=tid, toid=_user.uid).first()
	if invitation is None:
		raise WebException("Invitation doesn't exist.")

	with app.app_context():
		_user = Users.query.filter_by(uid=_user.uid).first()
		_user.tid = tid
		db.session.delete(invitation)
		invitation2 = TeamInvitations.query.filter_by(rtype=1, frid=_user.uid, toid=tid).first()
		if invitation2 is not None:
			db.session.delete(invitation2)
		db.session.add(Activity(_user.uid, 6, _team.tid, -1))
		db.session.commit()
		db.session.close()

	return { "success": 1, "message": "Success!" }
Пример #18
0
def setup_logger(name="Pyhole"):
    """Log handler"""
    # NOTE(jk0): Disable unnecessary requests logging.
    requests.packages.urllib3.disable_warnings()
    requests_log = logging.getLogger("requests")
    requests_log.setLevel(logging.WARNING)

    debug_option = utils.get_option("debug")
    debug_config = utils.get_config().get("debug", type="bool")
    debug = debug_option or debug_config

    log_dir = utils.get_directory("logs")
    log_level = logging.DEBUG if debug else logging.INFO
    log_format = "%(asctime)s [%(name)s] %(message)s"
    log_datefmt = "%H:%M:%S"

    logging.basicConfig(level=log_level, format=log_format,
                        datefmt=log_datefmt)

    log_file = "%s/%s.log"
    log = logging.handlers.TimedRotatingFileHandler(log_file % (log_dir,
                                                    name.lower()), "midnight")
    log.setLevel(log_level)
    formatter = logging.Formatter(log_format, log_datefmt)
    log.setFormatter(formatter)
    logging.getLogger(name).addHandler(log)
Пример #19
0
def get_fprop_fn(variable_shape=False, include_pool=True):
    """
    build a theano function that use SAE weights to get convolved(or pooled if
    include_pool is True) features from a given input
    """
    conf = utils.get_config()
    paths = utils.get_paths()
    ae = serial.load(paths['sae']['model'])
    cnn_layer = 'cnn_layer_%i' % (conf['cnn_layers'])
    batch_size = conf[cnn_layer]['batch_size']
    nhid = conf['sae']['nhid']
    patch_size = conf['patch_size']
    region_size = conf['region_size']

    input = T.tensor4('input')
    filter_shape = (nhid, 1, patch_size, patch_size)
    filters = theano.shared(ae.get_weights().T.reshape(filter_shape))

    if variable_shape:
        out = conv.conv2d(input, filters)
    else:
        image_shape = [batch_size, 1, region_size, region_size]
        out = conv.conv2d(input, filters, filter_shape=filter_shape,
                          image_shape=image_shape)

    if include_pool:
        pool_fn = getattr(out, conf['pool_fn'])
        out = pool_fn(axis=(2, 3))
    return theano.function([input], out)
Пример #20
0
def get_feats_from_cnn(rows, model=None):
    """
    fprop rows using best trained model and returns activations of the
    penultimate layer
    """
    conf = utils.get_config()
    patch_size = conf['patch_size']
    region_size = conf['region_size']
    batch_size = None
    preds = utils.get_predictor(model=model, return_all=True)
    y = np.zeros(len(rows))
    samples = np.zeros(
        (len(rows), region_size, region_size, 1), dtype=np.float32)
    for i, row in enumerate(rows):
        print 'processing %i-th image: %s' % (i, row['image_filename'])
        try:
            samples[i] = utils.get_samples_from_image(row, False)[0]
        except ValueError as e:
            print '{1} Value error: {0}'.format(str(e), row['image_filename'])
        y[i] = utils.is_positive(row)
    ds = DenseDesignMatrix(topo_view=samples)
    pipeline = utils.get_pipeline(
        ds.X_topo_space.shape, patch_size, batch_size)
    pipeline.apply(ds)
    return preds[-2](ds.get_topological_view()), y
Пример #21
0
def get_feats_from_imagenet_in_partitions():
    conf = utils.get_config()
    imagenet_data = os.path.join(
        conf['models_path'], 'decafnet', 'imagenet.decafnet.epoch90')
    imagenet_meta = os.path.join(
        conf['models_path'], 'decafnet', 'imagenet.decafnet.meta')
    net = DecafNet(imagenet_data, imagenet_meta)
    rows = utils.get_filtered_rows()
    sets = utils.split_dataset(
        rows, conf['valid_percent'], conf['test_percent'], rng=conf['rng_seed'])
    feats = []
    ys = []
    for s in sets:
        X = np.zeros((len(s), 4096))
        y = np.zeros(len(s))
        for i, row in enumerate(s):
            try:
                log.info('processing %i-th of %i' % (i, len(s)))
                origin, im = utils.extract_roi(row, 30, True)
                scores = net.classify(np.asarray(im), center_only=True)
                X[i] = net.feature('fc7_cudanet_out')
                y[i] = utils.is_positive(row)
            except:
                continue
        feats.append(X)
        ys.append(y)

    return feats[0], ys[0], feats[1], ys[1], feats[2], ys[2]
Пример #22
0
def main():
    args = get_cli_arguments(for_client=True)
    config = get_config(args.config)

    if not os.path.exists(args.file):
        sys.exit("error: file doesn't exists: {}".format(args.file))

    if not 'client' in config:
        sys.exit("error: client section is empty on your config file.")

    request_parameters = {}

    if 'user' in config.get("client") and 'pass' in config.get("client"):
        request_parameters.update({
            "auth": requests.auth.HTTPBasicAuth(config["client"]["user"], config["client"]["pass"]),
        })

    with open(args.file, 'r') as payload:
        request_parameters.update({
            "files": {"file": payload},
        })
        try:
            r = requests.post(config["client"]["endpoint"], **request_parameters)
        except requests.exceptions.ConnectionError:
            # @todo: how exactly requests library seperate requests with bad auth and requests with bad url?
            sys.exit("invalid URL or invalid credentials.")

        if r.status_code == 201:
            print "file successfully uploaded. {}{}".format(config.get("SERVE_URL"), r.content)
        else:
            sys.exit("error: {}".format(r.content))
Пример #23
0
def main():
    """ Main entry point.

    Used in the console script we setup.

    """

    from zulipRequestHandler import ZulipRequestHandler
    from utils import get_config

    config = get_config()
    ZULIP_USERNAME = config.get('zulip', 'username')
    ZULIP_API_KEY = config.get('zulip', 'api_key')
    LED_SCREEN_ADDRESS = config.get('main', 'led_screen_address')

    zulipRequestHandler = ZulipRequestHandler(ZULIP_USERNAME, ZULIP_API_KEY)

    led_bot = LEDBot(
        address=LED_SCREEN_ADDRESS, listeners=[zulipRequestHandler]
    )

    ## Uncomment the lines below to be able to test the bot from the CLI.
    # from cli_handler import CLIHandler
    # led_bot = LEDBot(
    #     address=LED_SCREEN_ADDRESS,
    #     listeners=[CLIHandler(), zulipRequestHandler]
    # )

    led_bot.run()
Пример #24
0
def main():
    utils.check_python()

    # fix py2exe
    if hasattr(sys, "frozen") and sys.frozen in \
            ("windows_exe", "console_exe"):
        p = os.path.dirname(os.path.abspath(sys.executable))
        os.chdir(p)

    config = utils.get_config(True)

    utils.print_shadowsocks()

    encrypt.init_table(config['password'], config['method'])

    try:
        logging.info("starting local at %s:%d" %
                     (config['local_address'], config['local_port']))

        tcp_server = tcprelay.TCPRelay(config, True)
        udp_server = udprelay.UDPRelay(config, True)
        loop = eventloop.EventLoop()
        tcp_server.add_to_loop(loop)
        udp_server.add_to_loop(loop)
        loop.run()
    except (KeyboardInterrupt, IOError, OSError) as e:
        logging.error(e)
        os._exit(0)
Пример #25
0
def get_feats_in_partitions():
    """
    Extracts features from all dataset and split them in train validation and
    test sets
    """
    conf = utils.get_config()
    paths = utils.get_paths()
    rows = utils.load_csv()
    filters = conf['filters']
    region_size = conf['region_size']
    region_stride = conf['region_stride']

    filtered_rows = [
        row for row in rows if utils.check_filter(row, conf['filters'])]
    train_rows, valid_rows, test_rows = utils.split_dataset(
        filtered_rows, conf['valid_percent'], conf['test_percent'], rng=conf['rng_seed'])

    conv = get_fprop_fn(False)
    print 'Getting features from train...'
    X_train = get_feats_from_rows(
        train_rows, conv, conf['stride'])
    print 'Getting features from valid...'
    X_valid = get_feats_from_rows(
        valid_rows, conv, conf['stride'])
    print 'Getting features from test...'
    X_test = get_feats_from_rows(
        test_rows, conv, conf['stride'])
    y_train = [row['classification'] == 'Malign' for row in train_rows]
    y_valid = [row['classification'] == 'Malign' for row in valid_rows]
    y_test = [row['classification'] == 'Malign' for row in test_rows]
    return X_train, y_train, X_valid, y_valid, X_test, y_test
Пример #26
0
    def setup_zenoss(self):
        try:
            self.config = utils.get_config()
            u = urlparse(self.config["config"]["server_uri"])
            p = urlparse(self.config["config"]["proxy_uri"])
            protocol = u.scheme
            host = u.hostname
            port = u.port
            username = self.config["config"]["username"]
            password = self.config["config"]["password"]
            proxy_protocol = p.scheme
            proxy_host = p.hostname
            proxy_port = p.port
            debug = self.config["config"]["debug"]
        except:
            self.error(1, "Could not setup configuration")

        if self.config["config"]["uid_type"] not in ["ip", "fqdn"]:
            self.error(1, "Configuration error: uid_type is not one of 'ip' or 'fqdn'")

        try:
            self.api = api.ZenossController(
                protocol, host, port, username, password, proxy_protocol, proxy_host, proxy_port, debug
            )
        except URLError, e:
            self.error(1, e)
Пример #27
0
	def __init__(self):
		api_port, num_processes, redis_port, rpi_id, custom_test_pad = \
			get_config(['api_port', 'num_processes', 'redis_port', 'rpi_id', 'custom_test_pad'])

		self.conf = {
			'rpi_id' : rpi_id,
			'd_files' : {
				'api' : {
					'pid' : os.path.join(BASE_DIR, ".monitor", "server.pid.txt"),
					'log' : os.path.join(BASE_DIR, ".monitor", "%s.log.txt" % rpi_id),
					'ports' : api_port
				},
				'gpio' : {
					'log' : os.path.join(BASE_DIR, ".monitor", "%s.log.txt" % rpi_id),
					'pid' : os.path.join(BASE_DIR, ".monitor", "gpio.pid.txt")
				},
				'ivr' : {
					'log' : os.path.join(BASE_DIR, ".monitor", "%s.log.txt" % rpi_id)
				},
				'module' : {
					'log' : os.path.join(BASE_DIR, ".monitor", "%s.log.txt" % rpi_id),
					'pid' : os.path.join(BASE_DIR, ".monitor", "%s.pid.txt" % rpi_id)
				},
				'ap_recorder' : {
					'pid' : os.path.join(BASE_DIR, ".monitor", "ap_recorder.pid.txt")
				},
				'ap_player' : {
					'pid' : os.path.join(BASE_DIR, ".monitor", "ap_player.pid.txt")
				},
				'audio' : {
					'pid' : os.path.join(BASE_DIR, ".monitor", "audio.pid.txt"),
					'log' : os.path.join(BASE_DIR, ".monitor", "%s.log.txt" % rpi_id)
				}
			},
			'api_port' : api_port,
			'num_processes' : num_processes,
			'redis_port' : redis_port,
			'media_dir' : os.path.join(BASE_DIR, "core", "media"),
			'crontab' : os.path.join(BASE_DIR, '.monitor', 'crontab')
		}

		if custom_test_pad is not None:
			self.test_pad_dir = os.path.join(BASE_DIR, custom_test_pad)
			print "CUSTOM TEST PAD: %s" % self.test_pad_dir
		else:
			self.test_pad_dir = os.path.join(BASE_DIR, "core", "test_pad")

		self.routes = [
			("/", self.TestHandler),
			(r'/js/(.*)', tornado.web.StaticFileHandler, \
				{ 'path' : os.path.join(self.test_pad_dir, "js")}),
			("/status", self.StatusHandler),
			("/pick_up", self.PickUpHandler),
			("/hang_up", self.HangUpHandler),
			(r'/mapping/(\d+)', self.MappingHandler)
		]

		self.db = redis.StrictRedis(host='localhost', port=self.conf['redis_port'], db=0)
		
		logging.basicConfig(filename=self.conf['d_files']['api']['log'], level=logging.DEBUG)
Пример #28
0
    def __init__(self, network):
        irclib.SimpleIRCClient.__init__(self)
        network_config = utils.get_config(network)

        self.log = log.get_logger(str(network))
        self.version = version.version_string()
        self.source = None
        self.target = None
        self.addressed = False

        self.admins = CONFIG.get("admins", type="list")
        self.command_prefix = CONFIG.get("command_prefix")
        self.reconnect_delay = CONFIG.get("reconnect_delay", type="int")
        self.rejoin_delay = CONFIG.get("rejoin_delay", type="int")

        self.server = network_config.get("server")
        self.password = network_config.get("password", default=None)
        self.port = network_config.get("port", type="int", default=6667)
        self.ssl = network_config.get("ssl", type="bool", default=False)
        self.ipv6 = network_config.get("ipv6", type="bool", default=False)
        self.bind_to = network_config.get("bind_to", default=None)
        self.nick = network_config.get("nick")
        self.username = network_config.get("username", default=None)
        self.identify_password = network_config.get("identify_password",
                default=None)
        self.channels = network_config.get("channels", type="list")

        self.load_plugins()

        self.log.info("Connecting to %s:%d as %s" % (self.server, self.port,
                self.nick))
        self.connect(self.server, self.port, self.nick, self.password,
                ssl=self.ssl, ipv6=self.ipv6, localaddress=self.bind_to,
                username=self.username)
def _securedrop_crawl():
    config = get_config()['crawler']
    if config.getboolean("use_database"):
        fpdb = RawStorage()
        class_data = fpdb.get_onions(config["hs_history_lookback"])
    else:
        fpdb = None
        with open(join(_log_dir, config["class_data"]), 'rb') as pj:
            class_data = pickle.load(pj)

    nonmonitored_name, monitored_name = class_data.keys()
    nonmonitored_class, monitored_class = class_data.values()

    with Crawler(page_load_timeout=config.getint("page_load_timeout"),
                 wait_on_page=config.getint("wait_on_page"),
                 wait_after_closing_circuits=config.getint("wait_after_closing_circuits"),
                 restart_on_sketchy_exception=config.getboolean("restart_on_sketchy_exception"),
                 db_handler=fpdb,
                 torrc_config={"CookieAuthentication": "1",
                               "EntryNodes": config["entry_nodes"]}) as crawler:
        crawler.crawl_monitored_nonmonitored(monitored_class,
                                             nonmonitored_class,
                                             monitored_name=monitored_name,
                                             nonmonitored_name=nonmonitored_name,
                                             ratio=config.getint("monitored_nonmonitored_ratio"))
 def test_read_config(self):
     config = get_config()
     self.assertTrue(config.has_section('sorter'))
     self.assertTrue(config.has_section('crawler'))
     self.assertIsInstance(config.getint('sorter', 'page_load_timeout'),
                           int)
     entry_nodes = config['crawler']['entry_nodes'].split(',')
     self.assertRegex(entry_nodes[0], "[0-9A-F]{40}")
Пример #31
0
def main():
    cudnn.benchmark = True
    # Load experiment setting
    config = get_config(opts.config)
    max_iter = config['max_iter']
    display_size = config['display_size']
    config['vgg_model_path'] = opts.output_path

    # Setup model and data loader
    trainer = UNIT_Trainer(config)
    if torch.cuda.is_available():
        trainer.cuda(config['gpuID'])
    train_loader_a, train_loader_b, test_loader_a, test_loader_b = get_all_data_loaders(
        config)

    # Setup logger and output folders
    model_name = os.path.splitext(os.path.basename(opts.config))[0]
    writer = SummaryWriter(os.path.join(opts.output_path + "/logs",
                                        model_name))
    output_directory = os.path.join(opts.output_path + "/outputs", model_name)
    checkpoint_directory, image_directory = prepare_sub_folder(
        output_directory)
    shutil.copy(opts.config, os.path.join(
        output_directory, 'config.yaml'))  # copy config file to output folder

    print('start training !!')
    # Start training
    iterations = trainer.resume(checkpoint_directory,
                                hyperparameters=config) if opts.resume else 0

    TraindataA = data_prefetcher(train_loader_a)
    TraindataB = data_prefetcher(train_loader_b)
    testdataA = data_prefetcher(test_loader_a)
    testdataB = data_prefetcher(test_loader_b)

    while True:
        dataA = TraindataA.next()
        dataB = TraindataB.next()
        if dataA is None or dataB is None:
            TraindataA = data_prefetcher(train_loader_a)
            TraindataB = data_prefetcher(train_loader_b)
            dataA = TraindataA.next()
            dataB = TraindataB.next()
        with Timer("Elapsed time in update: %f"):
            # Main training code
            for _ in range(3):
                trainer.content_update(dataA, dataB, config)
            trainer.dis_update(dataA, dataB, config)
            trainer.gen_update(dataA, dataB, config)
            # torch.cuda.synchronize()
        trainer.update_learning_rate()
        # Dump training stats in log file
        if (iterations + 1) % config['log_iter'] == 0:
            print("Iteration: %08d/%08d" % (iterations + 1, max_iter))
            write_loss(iterations, trainer, writer)
        if (iterations + 1) % config['image_save_iter'] == 0:
            testa = testdataA.next()
            testb = testdataB.next()
            if dataA is None or dataB is None or dataA.size(
                    0) != display_size or dataB.size(0) != display_size:
                testdataA = data_prefetcher(test_loader_a)
                testdataB = data_prefetcher(test_loader_b)
                testa = testdataA.next()
                testb = testdataB.next()
            with torch.no_grad():
                test_image_outputs = trainer.sample(testa, testb)
                train_image_outputs = trainer.sample(dataA, dataB)
            if test_image_outputs is not None and train_image_outputs is not None:
                write_2images(test_image_outputs, display_size,
                              image_directory, 'test_%08d' % (iterations + 1))
                write_2images(train_image_outputs, display_size,
                              image_directory, 'train_%08d' % (iterations + 1))
                # HTML
                write_html(output_directory + "/index.html", iterations + 1,
                           config['image_save_iter'], 'images')

        if (iterations + 1) % config['image_display_iter'] == 0:
            with torch.no_grad():
                image_outputs = trainer.sample(dataA, dataB)
            if image_outputs is not None:
                write_2images(image_outputs, display_size, image_directory,
                              'train_current')

            # Save network weights
        if (iterations + 1) % config['snapshot_save_iter'] == 0:
            trainer.save(checkpoint_directory, iterations)

        iterations += 1
        if iterations >= max_iter:
            writer.close()
            sys.exit('Finish training')
Пример #32
0
from utils import NodeType, EdgeType, Encoder

if __name__ == "__main__":
    
    data_dir = os.path.abspath(__file__ + "../../../../data")
    raw_path = os.path.abspath(os.path.join(data_dir, "./processed_dataset/raw"))
    scenes_path = os.path.abspath(os.path.join(raw_path, cmd_args.scene_file_name))
    graphs_path = os.path.join(raw_path, cmd_args.graph_file_name)

    # In the pytorch geometry package, only int and tensor seems to be allowed to save
    # we process all the graphs and save them to a file.
    
    with open(scenes_path, 'r') as scenes_file:
        scenes = json.load(scenes_file)

    config = get_config()

    graphs = []
    attr_encoder = Encoder(config)

    for scene in scenes:
        for target_id in range(len(scene["objects"])):
            graph = Graph(config, scene, target_id)
            graphs.append(graph)
    
    with open(graphs_path, 'wb') as graphs_file:
        pickle.dump(graphs, graphs_file) 

    root = os.path.join(data_dir, "./processed_dataset")
    scene_dataset = SceneDataset(root, config)
Пример #33
0
# -*- coding: utf-8 -*-
# __author__:Song Zhenqi
# 2021-01-20

from argparse import ArgumentParser
import paddle
from utils import get_logger, get_config
from models.architectures import build_model
import torch


def get_args():
    parser = ArgumentParser()
    parser.add_argument('--paddle', '-p', help='Paddle Model path')
    parser.add_argument('--cfg', '-c', help='Config File')

    return parser.parse_args()


if __name__ == '__main__':
    args = get_args()
    print(args.cfg)
    config = get_config(args.cfg)
    config['Architecture']["Head"]['out_channels'] = 6625
    net = build_model(config['Architecture'])
    # static_dict = torch.load('./test.pth')
    paddle_dict = paddle.load(args.paddle)
    # net.load_state_dict(static_dict)
    net.load_paddle_state_dict(paddle_dict)
    torch.save(net.state_dict(), 'mobilev3_crnn_ctc.pth')
Пример #34
0
def main():
    config = get_config()
    db_type = 'mariadb'
    db_port = config.get('db_port', 3306)
    db_host = config.get('db_host')
    site_name = os.environ.get("SITE_NAME", 'site1.localhost')
    db_root_username = os.environ.get("DB_ROOT_USER", 'root')
    mariadb_root_password = get_password("MYSQL_ROOT_PASSWORD", 'admin')
    postgres_root_password = get_password("POSTGRES_PASSWORD")
    db_root_password = mariadb_root_password

    if postgres_root_password:
        db_type = 'postgres'
        db_host = os.environ.get("POSTGRES_HOST")
        db_port = 5432
        db_root_password = postgres_root_password
        if not db_host:
            db_host = config.get('db_host')
            print('Environment variable POSTGRES_HOST not found.')
            print('Using db_host from common_site_config.json')

        sites_path = os.getcwd()
        common_site_config_path = os.path.join(sites_path,
                                               COMMON_SITE_CONFIG_FILE)
        update_site_config("root_login",
                           db_root_username,
                           validate=False,
                           site_config_path=common_site_config_path)
        update_site_config("root_password",
                           db_root_password,
                           validate=False,
                           site_config_path=common_site_config_path)

    force = True if os.environ.get("FORCE", None) else False
    install_apps = os.environ.get("INSTALL_APPS", None)
    install_apps = install_apps.split(',') if install_apps else []
    frappe.init(site_name, new_site=True)

    if semantic_version.Version(frappe.__version__).major > 11:
        _new_site(
            None,
            site_name,
            mariadb_root_username=db_root_username,
            mariadb_root_password=db_root_password,
            admin_password=get_password("ADMIN_PASSWORD", 'admin'),
            verbose=True,
            install_apps=install_apps,
            source_sql=None,
            force=force,
            db_type=db_type,
            reinstall=False,
            db_host=db_host,
            db_port=db_port,
        )
    else:
        _new_site(
            None,
            site_name,
            mariadb_root_username=db_root_username,
            mariadb_root_password=db_root_password,
            admin_password=get_password("ADMIN_PASSWORD", 'admin'),
            verbose=True,
            install_apps=install_apps,
            source_sql=None,
            force=force,
            reinstall=False,
        )

    if db_type == "mariadb":
        site_config = get_site_config(site_name)
        db_name = site_config.get('db_name')
        db_password = site_config.get('db_password')

        mysql_command = [
            "mysql", f"-h{db_host}", f"-u{db_root_username}",
            f"-p{mariadb_root_password}", "-e"
        ]

        # Drop User if exists
        command = mysql_command + [
            f"DROP USER IF EXISTS '{db_name}'; FLUSH PRIVILEGES;"
        ]
        run_command(command)

        # Grant permission to database and set password
        grant_privileges = "ALL PRIVILEGES"

        # for Amazon RDS
        if config.get(RDS_DB) or site_config.get(RDS_DB):
            grant_privileges = RDS_PRIVILEGES

        command = mysql_command + [
            f"GRANT {grant_privileges} ON `{db_name}`.* TO '{db_name}'@'%' IDENTIFIED BY '{db_password}'; FLUSH PRIVILEGES;"
        ]
        run_command(command)

    if frappe.redis_server:
        frappe.redis_server.connection_pool.disconnect()

    exit(0)
Пример #35
0
import utils

aws_config = utils.get_config('/config/aws_info.json')
result = utils.iot_put(aws_config, {'foo': utils.get_date_time_stamp()})
print("status_code: {}".format(result.status_code))
Пример #36
0
def main(args, repo_target, bid):
    config = utils.get_config(args.config_file)
    server_address = config['AUTH']['server_address']
    token = config['AUTH']['token']
    headers = {'Content-Type': 'application/json;', 'Authorization': token}
    path = repo_target
    pk = bid
    block_size = int(config['FILE']['block_size'])
    key = config['CRYPTO']['key']
    wpath = ""

    # start a restore
    logger.info("Start restore session")
    init = init_restore(server_address, headers, pk, path)
    if init.status_code == 200:
        # logger.debug(json.dumps(init.json(), indent=4))
        for value in init.json().values():
            wpath = utils.convert_linuxtowin_path(value['path'])

            logger.info("Restore: {}".format(wpath))
            if value['type'] == 'directory':
                # make directory recusive
                if not os.path.isdir(wpath):
                    os.makedirs(wpath, exist_ok=True)
                    logger.debug("Directory {} created".format(wpath))

                # add attributes
                add_attribute(wpath, value['attr'])
                logger.info("DONE: {} restore done".format(wpath))
            elif value['type'] == 'file':
                if not os.path.isfile(wpath):

                    # touch empty file
                    basedir = os.path.dirname(wpath)
                    if not os.path.exists(basedir):
                        os.makedirs(basedir, exist_ok=True)
                    print("wpath: ", wpath)
                    open(wpath, 'wb')
                    logger.debug("Empty file {} created".format(wpath))

                # compare checksum list
                f = utils.FileDir(wpath, block_size)
                checksum_list = f.list_checksum()
                need_data = {"need": need_blocks(value['checksum'], checksum_list), \
                            "path": value['path']}
                #print (value['path'])
                need_data_json = str(need_data).replace(
                    "'", '"')  # convert to json format
                url = "http://{}/rest/api/download_data/{}/".format(
                    server_address, pk)

                logger.debug("Get data {} - {}".format(wpath,
                                                       value['checksum']))
                response = requests.request("GET",
                                            url,
                                            data=need_data_json,
                                            headers=headers)

                response_json = response.json()
                logger.debug("Download")
                logger.debug(response_json['url'])
                logger.debug("Existed")
                logger.debug(existed_blocks(value['checksum'], checksum_list))

                file_read = open(wpath, 'rb')
                print("checksum:", value['checksum'])
                data_existed = list(utils.read_in_blocks(file_read, \
                            list_block_id_existed(value['checksum'], checksum_list), block_size))
                print("data_existed: ", data_existed)
                data_need = list(get_data(server_address,
                                          response_json['url']))
                print("data_need: ", data_need)

                data = data_existed + data_need  # list tuple [(data, block_id), (), ()]

                print("data: ", data)
                data_sorted = sorted(data, key=lambda x: x[1])

                attrs = win32api.GetFileAttributes(wpath)
                if (attrs & win32con.FILE_ATTRIBUTE_READONLY) != 0:
                    os.chmod(wpath, stat.S_IWRITE)  # file read only
                if (
                        attrs & win32con.FILE_ATTRIBUTE_HIDDEN
                ) != 0:  # Nếu là file hidden thì bỏ thuộc tính hidden đi để đọc file
                    subprocess.check_call(["attrib", "-H",
                                           wpath])  # file hidden

                if data_need != []:
                    # write to file
                    join_file(wpath, data_sorted, key)

                    # add attributes
                    add_attribute(wpath, value['attr'])
                    logger.info("DONE: {} restore done".format(wpath))
                else:
                    add_attribute(wpath, value['attr'])
                    logger.info("DONE: {} not change".format(wpath))

            elif value['type'] == 'symlink':
                logger.info("PASS: Restore link: {} pass".format(wpath))
    else:
        logger.warn("{} - {}".format(init.text, str(init.status_code)))

    result_restore = {
        "job_id": args.job_id,
        "status_code": init.status_code,
        "backup_id": pk,
        "path": wpath
    }
    print(result_restore)

    # Send restore result to Controller
    ctl_address = config['CONTROLLER']['address']
    url_result = "http://{}/api/result-restore/".format(ctl_address)
    response = requests.request("POST",
                                url_result,
                                data=json.dumps(result_restore),
                                headers=headers)
    logger.debug("Send result to Controller: " + str(response.status_code))
Пример #37
0
    async def handle_lcd(self):
        while 1:
            wait = 500
            if self.devices.lcd is None:
                return
            try:
                if self.devices.lcd_light != -1:
                    text = utils.czas(False, True, False)
                    self.devices.write_lcd_at_pos(text, 0, 0)
                    if self.curr_temp < 10:
                        self.devices.write_lcd_at_pos(" %1.1f" % self.curr_temp, 7, 0)
                    else:
                        self.devices.write_lcd_at_pos("%02.1f" % self.curr_temp, 7, 0)
                    self.devices.write_lcd_char_at_pos(chr(7), 11, 0)
                    self.devices.write_lcd_at_pos("C", 12, 0)
                    ws = 0
                    if utils.wifi_signal() >= -30:
                        ws = 4
                    elif utils.wifi_signal() >= -67:
                        ws = 3
                    elif utils.wifi_signal() >= -70:
                        ws = 2
                    elif utils.wifi_signal() >= -80:
                        ws = 1
                    elif utils.wifi_signal() >= -90:
                        ws = 0
                    self.devices.write_lcd_char_at_pos(chr(ws), 15, 0)

                    if self.state < 2:
                        if self.lcd_state < 20:
                            if self.lcd_state == 0:
                                self.devices.write_lcd_at_pos("Piec:           ", 0, 1)
                                self.devices.write_lcd_at_pos("o", 9, 1)
                                ctemp = int(utils.get_config("piec_temperatura", 0))
                                self.devices.write_lcd_at_pos("%02d" % ctemp, 6, 1)
                                ctime = utils.get_config('piec_ostatnia_aktualizacja', '')
                                self.devices.write_lcd_at_pos(ctime[11:16], 11, 1)
                        elif self.lcd_state < 40:
                            if self.lcd_state == 20:
                                self.devices.write_lcd_at_pos("Nast.:          ", 0, 1)
                                self.devices.write_lcd_at_pos("o", 9, 1)
                                (ntime, ntemp) = self.find_next_temp()
                                if ntemp <= -1:
                                    self.lcd_state = 40
                                else:
                                    self.devices.write_lcd_at_pos("%02d" % ntemp, 6, 1)
                                    self.devices.write_lcd_at_pos(ntime, 11, 1)

                        self.lcd_state += 1
                        if self.lcd_state >= 40:
                            self.lcd_state = 0
                    else:
                        if self.lcd_state == 0:
                            self.devices.write_lcd_at_pos("Zmiana na:      ", 0, 1)
                            self.lcd_state = -1
                        elif self.lcd_state > 0:
                            self.lcd_state = 0
                        self.devices.write_lcd_at_pos("%02d" % self.edit_temp, 11, 1)
                        wait = 250
            except Exception as err:
                print(err)
            if wait == 500:
                if self.devices.lcd_light != -1:
                    self.devices.lcd_light_tick()
            await uasyncio.sleep_ms(wait)
Пример #38
0
async def send_chart_data(req, writer):
    cmax = 10
    sqr = False
    req.parse_qs()
    file_name = req.form["file"]
    aliases = utils.get_config("aliases", {})
    data_alias = ""
    gc.collect(generation=2)
    if file_name == 'termometr.hist':
        termometr = sensors.Sensory()
        curr = await termometr.pomiar_temperatury()
        data_alias = "Piec - termometr"
        del termometr
    elif file_name == 'piec.hist':
        curr = int(utils.get_config("piec_temperatura", 40))
        data_alias = "Piec - temperatura"
        sqr = True
    else:
        data_alias = aliases[file_name]
        curr = None
    prev = None
    data = """{"name": "%s", "data": [""" % data_alias
    await writer.awrite(data.encode('utf-8'))
    await writer.drain()
    data = ""
    tc = 0
    try:
        await utils.lock_file(file_name)

        with open(file_name, 'r') as fi:
            c = 0

            data = ""

            while 1:
                buf = fi.readline()
                if str(buf) == '':
                    break
                else:
                    d = buf.rstrip().split(" - ")

                    if sqr and prev is not None:
                        dp = buf.rstrip().split(" - ")
                        dp[1] = prev
                        dp[0] += " GMT"
                        if tc != 0:
                            data += ","
                        else:
                            tc = 1
                        data += json.dumps(dp)

                    prev = d[1]
                    d[0] += " GMT"
                    if tc != 0:
                        data += ","
                    else:
                        tc = 1
                    data += json.dumps(d)
                    c += 1

                    if c == cmax:
                        await writer.awrite(data.encode('utf-8'))
                        await writer.drain()
                        c = 0
                        del data
                        gc.collect()
                        data = ""

            fi.close()
            print('1')
            utils.unlock_file(file_name)
    except Exception as eee:
        print('e')
        utils.log_exception(eee, 1)

    if utils.dst_time()[0] > 2000 and curr is not None:
        czas = utils.czas(True)
        if sqr:
            d = [czas + ' GMT', prev]
            if tc != 0:
                data += ","
            else:
                tc = 1
            data += (json.dumps(d))

        d = [czas + ' GMT', curr]
        if tc != 0:
            data += ","
        data += json.dumps(d)

    await writer.awrite(data.encode('utf-8'))
    del data

    await writer.drain()

    await writer.awrite("""]}""".encode('utf-8'))

    await writer.drain()
    print('f')
    utils.unlock_file(file_name)
Пример #39
0
from training_loop import training_loop, testing_loop
from datasets import load_datasets
from utils import load_net, log, lambda_schedule, torch_optimizer, get_config, get_run_id, copy_src_files, log_losses

parser = argparse.ArgumentParser()
parser.add_argument('--source_dataset', default="SVHN", type=str)
parser.add_argument('--target_dataset', default="MNIST", type=str)
args = parser.parse_args()

out_name = "%s2%s"%(args.source_dataset, args.target_dataset)
config_name = "configs/" + out_name + ".yaml"
out_dir = 'results/'
if not os.path.exists(out_dir):
    os.makedirs(out_dir)

config_init = get_config(config_name)
run = get_run_id(out_dir)
out_name = "%04d_" %(run) + out_name
copy_src_files(out_name)
config_init["num_iter_per_epoch"] = 400 * 200 // (config_init["b_source"] + config_init["b_target"])
config_init['nb_class'] = 9 if args.source_dataset in ["CIFAR", "STL"] else 10
config_init['out_path'] = out_dir + out_name + ".txt"
config_init['source_dataset'] = args.source_dataset
config_init['target_dataset'] = args.target_dataset

# log experiment settings:
for key in sorted(config_init):
    conf = key + " -> " + str(config_init[key])
    log(conf, config_init['out_path'])

# load datasets:
Пример #40
0
                                data_generator,
                                n_batches,
                                data_format=data_format,
                                **config)


if __name__ == '__main__':
    parser = argparse.ArgumentParser(description='Evaluate PredNet model.')
    parser.add_argument('config',
                        help='experiment config name defined in settings.py')
    parser.add_argument('--stateful',
                        help='use stateful PredNet model',
                        action='store_true')
    parser.add_argument('--pretrained',
                        help='choose pre-trained model dataset',
                        type=str)
    parser.add_argument('--image-dir',
                        help='stimulus directory path',
                        type=str)
    FLAGS, unparsed = parser.parse_known_args()
    args = vars(FLAGS)
    config_name, config = utils.get_config(args)

    print('\n==> Starting experiment: {}\n'.format(config['description']))
    config_str = utils.get_config_str(config)
    print('==> Using configuration:\n{}'.format(config_str))

    evaluate(config_name, args['image_dir'], **config)
    utils.save_experiment_config(config_name, config['base_results_dir'],
                                 config)
Пример #41
0
#!/usr/bin/python
#coding:utf-8
from web import app
import os, sys
import utils
#flask session key
app.secret_key = 'A0Zr98j/3yX R~XHH!jmN]LWX/,?RT'
#获取web的config配置
config = utils.get_config('web')
#将配置添加到app.config中
app.config.update(config)

if __name__ == '__main__':
    app.run(host=app.config.get('bind', '0.0.0.0'),
            port=int(app.config.get('port')),
            debug=True)
Пример #42
0
import pika
import utils
import os
import sys

WORK_DIR = os.path.abspath(
    os.path.join(os.path.dirname(__file__), os.path.pardir))
ES_DIR = os.path.join(WORK_DIR, "es")
sys.path.append(ES_DIR)
from core import ESConnect

es_until = ESConnect()
exchange_name = 'ex.healthcare.fanout.logs'

conf = utils.get_config()

credentials = pika.PlainCredentials(conf["username"], conf["password"])

conn = pika.BlockingConnection(
    pika.ConnectionParameters(host=conf["url"],
                              port=conf["port"],
                              virtual_host=conf["vhost"],
                              credentials=credentials))

channel = conn.channel()

channel.exchange_declare(exchange=exchange_name, exchange_type='fanout')
result = channel.queue_declare(queue='', durable=True, exclusive=True)

queue_name = result.method.queue
Пример #43
0
    type=str,
    default='.',
    help="path to the pretrained inception network for domain A")
parser.add_argument(
    '--inception_b',
    type=str,
    default='.',
    help="path to the pretrained inception network for domain B")

opts = parser.parse_args()

torch.manual_seed(opts.seed)
torch.cuda.manual_seed(opts.seed)

# Load experiment setting
config = get_config(opts.config)
input_dim = config['input_dim_a'] if opts.a2b else config['input_dim_b']

# Load the inception networks if we need to compute IS or CIIS
if opts.compute_IS or opts.compute_IS:
    inception = load_inception(
        opts.inception_b) if opts.a2b else load_inception(opts.inception_a)
    # freeze the inception models and set eval mode
    inception.eval()
    for param in inception.parameters():
        param.requires_grad = False
    inception_up = nn.Upsample(size=(299, 299), mode='bilinear')

# Setup model and data loader
image_names = ImageFolder(opts.input_folder, transform=None, return_paths=True)
data_loader = get_data_loader_folder(opts.input_folder,
Пример #44
0
def extract_page_entities(graph) -> dict:
    utils.get_logger().info(
        f'LISTING/EXTRACT: Extracting types and relations for page entities..')

    page_entities = defaultdict(
        lambda: {
            'labels': set(),
            'origins': set(),
            'types': set(),
            'in': set(),
            'out': set()
        })

    df = context.retrieve_page_entity_context(graph)

    # extract list page entities
    utils.get_logger().info(
        f'LISTING/EXTRACT: Extracting types of list page entities..')
    df_lps = df[df['P_type'] == 'List']
    for lp, df_lp in df_lps.groupby(by='P'):
        clg_types = {
            clg_util.clg_type2name(t)
            for t in graph.get_nodes_for_part(dbp_util.name2resource(lp))
        }
        if clg_types:
            for _, row in df_lp.iterrows():
                name = row['E_ent']
                page_entities[name]['labels'].add(row['E_text'])
                page_entities[name]['origins'].add(lp)
                page_entities[name]['types'].update(clg_types)

    df = df.loc[df['P_type'] !=
                'List']  # ignore list pages in subsequent steps

    # compute valid combinations of types and NE tags
    df_types = context.get_entity_types(df, graph)
    dft = pd.merge(left=df, right=df_types, on='E_ent')
    valid_tags = context.get_valid_tags_for_entity_types(
        dft, graph, utils.get_config('listing.valid_tag_threshold'))

    # extract types
    utils.get_logger().info(
        f'LISTING/EXTRACT: Extracting types of page entities..')
    df_new_types = _compute_new_types(df, dft, df_types, valid_tags)
    for ent, df_ent in df_new_types.groupby(by='E_ent'):
        page_entities[ent]['labels'].update(set(df_ent['E_text'].unique()))
        page_entities[ent]['origins'].update(_get_origins_for_entity(df_ent))
        new_types = set(df_ent['E_enttype'].unique())
        transitive_types = {
            clg_util.clg_type2name(tt)
            for t in new_types
            for tt in graph.ancestors(clg_util.name2clg_type(t))
        }
        new_types = new_types.difference(
            transitive_types)  # remove transitive types
        page_entities[ent]['types'].update(new_types)

    # extract relations
    utils.get_logger().info(
        f'LISTING/EXTRACT: Extracting relations of page entities..')
    df_rels = context.get_entity_relations()
    df_new_relations = _compute_new_relations(df, df_rels, 'P', valid_tags)
    df_new_relations = pd.concat([
        df_new_relations,
        _compute_new_relations(df, df_rels, 'TS_ent', valid_tags)
    ])
    df_new_relations = pd.concat([
        df_new_relations,
        _compute_new_relations(df, df_rels, 'S_ent', valid_tags)
    ])
    for ent, df_ent in df_new_relations.groupby(by='E_ent'):
        page_entities[ent]['labels'].update(set(df_ent['E_text'].unique()))
        page_entities[ent]['origins'].update(_get_origins_for_entity(df_ent))
        rels_in = set(
            map(tuple, df_ent[~df_ent['inv']][['pred', 'target']].values))
        page_entities[ent]['in'].update(rels_in)
        rels_out = set(
            map(tuple, df_ent[df_ent['inv']][['pred', 'target']].values))
        page_entities[ent]['out'].update(rels_out)

    return dict(page_entities)
Пример #45
0
 async def web_save(self, temp, times):
     tmp = int(temp)
     curr_tmp = int(utils.get_config("piec_temperatura", tmp))
     if tmp != curr_tmp:
         await self.set_temperature(int(tmp))
     self.save_times(times)
Пример #46
0
        while True:
            process_once(config)
            log.info("Sleeping for %ds", config['JOB_INTERVAL_SECONDS'])
            time.sleep(config['JOB_INTERVAL_SECONDS'])
    except Exception as e:
        send_email(config,
                   "FATAL ERROR. Daemon will need to be fixed.\n%s" % str(e))
        raise


def connect_db():
    url = model.get_database_url()
    model.connect_db(url)


if __name__ == "__main__":
    try:
        log.info("Starting service")
        connect_db()
        if '--test' in sys.argv:
            log.info("Sending test email")
            config = utils.get_config("recalculate.yaml")
            send_email(config, "Test")
            sys.exit(0)
        if not '--no-delay' in sys.argv:
            log.info("Sleeping for %ds", STARTUP_DELAY)
            time.sleep(STARTUP_DELAY)
        process_loop()
    except KeyboardInterrupt:
        log.info("Shutting down due to user request (e.g. Ctrl-C)")
Пример #47
0
import logging
from pathlib import Path
import pdb

from elasticsearch_dsl import Search, A, Q
import pandas as pd
import matplotlib.pyplot as plt
import numpy as np
# from wordcloud import WordCloud, STOPWORDS

import utils
import es_utils
import plot_utils

# from utils
CONFIG = utils.get_config()
CTRS = utils.get_containers()
PLACEBOS = utils.get_placebos()
NONPLACEBOS = utils.get_nonplacebos()

# constants
INDICES = ["ftp-*", "telnet-*"]
SEP = CONFIG["IO"]["CSV_SEP"]
MOST_COMMON_ROCKYOU = 20

# artifacts
BASE_DIR = Path(CONFIG["ARTIFACT_DIR"]) / "ftp-telnet"
DATA_DIR = BASE_DIR / "es"
PLOTS_DIR = BASE_DIR / "plots"
FILTERED_DIR = BASE_DIR / "filtered"
FTP_BOT_IPS = DATA_DIR / "ftp_bot_ips.txt"
def handler(event, context) -> dict:
    del context  # unused

    try:
        cognito_code = event['queryStringParameters']['code']
        state = event['queryStringParameters']['state']

    except (TypeError, KeyError):
        return bad_request('', 'missing required parameter')

    try:
        state = jwt.decode(
            state,
            get_state_jwt_secret(),
            algorithms=['HS256'],
        )
    except jwt.InvalidTokenError:
        return bad_request('', 'invalid state token')

    try:
        cognito_token = exchange_cognito_code(event, cognito_code)
    except BadRequest:
        return bad_request()
    except InternalServerError:
        return internal_server_error()

    # Issue a token valid for 180 days. This allows the user to issue delegate
    # tokens for up to this time.
    # But set the expiration of the Cookie itself to the validity of the
    # Cognito token.
    # Unless the user actively safeguards his cookie, he will have to
    # re-authenticate with Cognito. If this is malicious intend, the user
    # could delegate the same access to himself, and get the same result.
    now = int(time.time())
    refresh_token = {
        'iat': now,  # Issued AT
        'exp': now + 180 * 24 * 60 *
        60,  # EXPire: 180 days, maximum duration of delegated tokens
        'azp': cognito_token['cognito:username'],  # AuthoriZed Party
    }
    raw_refresh_token = jwt.encode(
        refresh_token,
        get_refresh_token_jwt_secret(),
        algorithm='HS256',
    ).decode('ascii')

    structlog.get_logger().msg(
        "Cognito Code exchanged succesfully, issuing refresh_token",
        refresh_token=refresh_token)  # Don't log signed token, only payload

    try:
        if state['action'] == 'index':
            location = f"https://{os.environ['DOMAIN_NAME']}/"
        elif state['action'] == 'delegate':
            location = f"https://{os.environ['DOMAIN_NAME']}/delegate"
        elif state['action'] == 'authorize':
            location = f"https://{os.environ['DOMAIN_NAME']}/authorize?" + \
                f"redirect_uri={urllib.parse.quote_plus(state['redirect_uri'])}"
        else:
            raise ValueError(f"Invalid action `{state['action']}`")
    except (KeyError, ValueError) as e:
        structlog.get_logger().msg("state is invalid", exception=e)
        return internal_server_error()

    return {
        'statusCode': 302,
        'headers': {
            'Content-Type':
            'text/plain',
            'Location':
            location,
            'Set-Cookie':
            generate_cookie(get_config().cookie_name_refresh_token,
                            raw_refresh_token,
                            max_age=int(cognito_token['exp'] - now)),
        },
        'body': 'Redirecting...',
    }
Пример #49
0
face_cascade = cv2.CascadeClassifier()
if not face_cascade.load(
        cv2.samples.findFile('haarcascade_frontalface_alt.xml')):
    print('--(!)Error loading face cascade')
    exit(0)

# alpha mask
mask = cv2.imread("alpha_mask2.png")
mask = np.float32(mask) / 255
# mask = mask * 255

# Calculate class codes for specified image
ckpt = 'pretrained/animal149_gen.pt'
class_image_folder = 'images/golden_retrievers'

config = get_config('configs/funit_animals.yaml')
config['batch_size'] = 1
config['gpus'] = 1

trainer = Trainer(config)
trainer.load_ckpt(ckpt)
trainer.eval()

transform_list = [
    transforms.ToTensor(),
    transforms.Normalize((0.5, 0.5, 0.5), (0.5, 0.5, 0.5))
]
transform_list = [transforms.Resize((128, 128))] + transform_list
transform = transforms.Compose(transform_list)

print('Compute average class codes for images in %s' % class_image_folder)
Пример #50
0
            img_s = image_utils.scale_maxside(img, maxside=maxside)
            gt_s = image_utils.scale_maxside(gt, maxside=maxside)

        box = image_utils.random_crop(img_s)
        data = image_utils.crop_array(img_s, box)
        label = image_utils.crop_array(gt_s, box)

        # Batch size of 1
        data = data[np.newaxis, ...]
        label = label[np.newaxis, ...]
        yield (data, label)


if __name__ == "__main__":
    project = "local"
    config = utils.get_config(project)
    im_list = utils.open_im_list(config["im_list"])
    datasource = DataSource(config)
    generator = DataGenerator(im_list, datasource, maxside=512)

    data, label = generator.next()
    print data.shape
    print label.shape

    data = data[0]
    label = label[0]

    unlabeled = np.max(label, axis=2) == 0
    gt = np.argmax(label, axis=2) + 1
    gt[unlabeled] = 0
    gt = utils.add_color(gt)
# user-defined
from models.crnn import load_model
import dataset
import utils
from models.Synth_gan import ImageDiscriminator
from models.Synth_gan import FeatureDiscriminator
from models.Synth_gan import ImageGenerator

parser = argparse.ArgumentParser()
parser.add_argument('--config',
                    type=str,
                    required=True,
                    help='path of config file')
opt = parser.parse_args()

config = utils.get_config(opt.config)
start_epoch = 0

# make output folder
if not os.path.exists(config['model']['exp_path']):
    os.mkdir(config['model']['exp_path'])

shutil.copy(opt.config, os.path.join(config['model']['exp_path'],
                                     'config.yaml'))

# set random seed
random.seed(config['hyperparameters']['random_seed'])
np.random.seed(config['hyperparameters']['random_seed'])
torch.manual_seed(config['hyperparameters']['random_seed'])
torch.cuda.manual_seed(config['hyperparameters']['random_seed'])
Пример #52
0
################
# Parameters
################
# config_path = '/home/test/program/self-driving/munit/configs/rainy.yaml'
# checkpoint_path = '/home/test/program/self-driving/munit/checkpoints/rainy/gen_01000000.pt'
# config_path = '/home/test/program/self-driving/munit/configs/night.yaml'
# checkpoint_path = '/home/test/program/self-driving/munit/checkpoints/night/gen_01000000.pt'
# config_path = '/home/test/program/self-driving/munit/configs/snowy.yaml'
# checkpoint_path = '/home/test/program/self-driving/munit/checkpoints/snowy/gen_01000000.pt'
# config_path = '/home/test/program/self-driving/munit/configs/sunny.yaml'
# checkpoint_path = '/home/test/program/self-driving/munit/checkpoints/sunny/gen_01250000.pt'
# checkpoint_path = '/home/test/program/MUNIT-master/outputs/day2snow/checkpoints/gen_00260000.pt'
config_path = '/home/test/program/self-driving/munit/configs/snow_night.yaml'
checkpoint_path = '/home/test/program/self-driving/munit/checkpoints/snow_night/gen_01000000.pt'

config = utils.get_config(config_path)

model = MUNIT(config)
try:
    state_dict = torch.load(checkpoint_path)
    model.gen_a.load_state_dict(state_dict['a'])
    model.gen_b.load_state_dict(state_dict['b'])
except:
    raise RuntimeError('load model failed')

model.cuda()
new_size = config['new_size']
style_dim = config['gen']['style_dim']
encode = model.gen_a.encode
style_encode = model.gen_b.encode
decode = model.gen_b.decode
Пример #53
0
    def ssl_check(self, dir):
        """
            Check on the ssl.log:
                * SSL connections which doesn't use the 443.
                * "Free" certificate issuer (taken from the config).
                * Self-signed certificates.
            :return: nothing - all stuff appended to self.alerts
        """
        ssl_default_ports = get_config(("analysis", "ssl_default_ports"))
        free_issuers = get_config(("analysis", "free_issuers"))

        if os.path.isfile(os.path.join(dir, "ssl.log")):
            for record in ParseZeekLogs(os.path.join(dir, "ssl.log"),
                                        output_format="json",
                                        safe_headers=False):
                if record is not None:
                    c = {
                        "host": record['id.resp_h'],
                        "port": record['id.resp_p'],
                        "issuer": record["issuer"],
                        "validation_status": record["validation_status"]
                    }
                    if c not in self.ssl:
                        self.ssl.append(c)

        if self.heuristics_analysis:
            for cert in self.ssl:
                host = self.resolve(cert["host"])

                # If the associated host has not whitelisted, check the cert.
                for c in self.conns:
                    if host in c["resolution"]:
                        # Check for non generic SSL port.
                        if cert["port"] not in ssl_default_ports:
                            c["alert_tiggered"] = True
                            self.alerts.append({
                                "title":
                                "SSL connection done on a non standard port ({}) to {}"
                                .format(cert["port"], host),
                                "description":
                                "It is not common to see SSL connections issued from smartphones using non-standard ports. Even this can be totally legit,"
                                +
                                " we recommend to check the reputation of {}, by looking at its WHOIS record, the associated autonomus system, its creation date, and "
                                .format(host) +
                                " by searching it the internet.",
                                "host":
                                host,
                                "level":
                                "Moderate",
                                "id":
                                "SSL-01"
                            })
                        # Check Free SSL certificates.
                        if cert["issuer"] in free_issuers:
                            c["alert_tiggered"] = True
                            self.alerts.append({
                                "title":
                                "An SSL connection to {} is using a free certificate."
                                .format(host),
                                "description":
                                "Free certificates — such as Let's Encrypt — are wildly used by command and control servers associated to "
                                +
                                "malicious implants or phishing web pages. We recommend to check the host associated to this certificate, "
                                +
                                "by looking at the domain name, its creation date, or by checking its reputation on the internet.",
                                "host":
                                host,
                                "level":
                                "Moderate",
                                "id":
                                "SSL-02"
                            })
                        # Check for self-signed certificates.
                        if cert["validation_status"] == "self signed certificate in certificate chain":
                            c["alert_tiggered"] = True
                            self.alerts.append({
                                "title":
                                "The certificate associated to {} is self-signed."
                                .format(host),
                                "description":
                                "The use of self-signed certificates is a common thing for attacker infrastructure. We recommend to check the host {} "
                                .format(host) +
                                "which is associated to this certificate, by looking at the domain name (if any), its WHOIS record, its creation date, and "
                                +
                                " by checking its reputation on the internet.",
                                "host":
                                host,
                                "level":
                                "Moderate",
                                "id":
                                "SSL-03"
                            })
Пример #54
0
  x = torch.sigmoid(x)
  show(x[0].permute(1, 2, 0).detach().cpu(), "test", 0)
  # print(x.size())



if __name__ == '__main__':
  os.environ["CUDA_VISIBLE_DEVICES"]="0,1,2,3"
  parser = argparse.ArgumentParser()
  parser.add_argument("--no_cuda", default=False, help="Specify if you want to use cuda")
  parser.add_argument("--root", default="./")
  opt = parser.parse_args()

  root = opt.root
  config_path = os.path.join(opt.root, "config.yaml")
  configs = get_config(config_path)
  batch_size = configs["params"]["batch_size"]
  n_block = configs["params"]["n_block"]
  n_flows = configs["params"]["n_flows"]
  num_scales = configs["params"]["num_scales"]
  in_channel = configs["params"]["in_channel"]
  lr = float(configs["params"]["lr"])
  data_root = configs["params"]["data_root"]
  img_sz = configs["params"]["img_sz"]
  epochs = configs["params"]["epochs"]
  test_model = configs["params"]["test"]
  model_path = configs["params"]["model_path"]

  use_cuda = (torch.cuda.is_available())
  device = "cuda" if use_cuda else "cpu"
  kwargs = {'num_workers': 1, 'pin_memory': True} if device=="cuda" else {}
Пример #55
0
    def netflow_check(self, dir):
        """
            Enrich and check the netflow from the conn.log against whitelist and IOCs.
            :return: nothing - all stuff appended to self.alerts
        """
        max_ports = get_config(("analysis", "max_ports"))
        http_default_port = get_config(("analysis", "http_default_port"))

        # Get the netflow from conn.log.
        if os.path.isfile(os.path.join(dir, "conn.log")):
            for record in ParseZeekLogs(os.path.join(dir, "conn.log"),
                                        output_format="json",
                                        safe_headers=False):
                if record is not None:
                    c = {
                        "ip_dst": record["id.resp_h"],
                        "proto": record["proto"],
                        "port_dst": record["id.resp_p"],
                        "service": record["service"],
                        "alert_tiggered": False
                    }
                    if c not in self.conns:
                        self.conns.append(c)

        # Let's add some dns resolutions.
        for c in self.conns:
            c["resolution"] = self.resolve(c["ip_dst"])

        # Order the conns list by the resolution field.
        self.conns = sorted(self.conns, key=lambda c: c["resolution"])

        # Check for whitelisted assets, if any, delete the record.
        if self.whitelist_analysis:

            wl_cidrs = [IPNetwork(cidr) for cidr in get_whitelist("cidr")]
            wl_hosts = get_whitelist("ip4addr") + get_whitelist("ip6addr")
            wl_domains = get_whitelist("domain")

            for i, c in enumerate(self.conns):
                if c["ip_dst"] in [ip for ip in wl_hosts]:
                    self.whitelist.append(self.conns[i])
                    self.conns[i] = False
                elif c["resolution"] in wl_domains:
                    self.whitelist.append(self.conns[i])
                    self.conns[i] = False
                elif True in [
                        c["resolution"].endswith("." + dom)
                        for dom in wl_domains
                ]:
                    self.whitelist.append(self.conns[i])
                    self.conns[i] = False
                elif True in [
                        IPAddress(c["ip_dst"]) in cidr for cidr in wl_cidrs
                ]:
                    self.whitelist.append(self.conns[i])
                    self.conns[i] = False

            # Let's delete whitelisted connections.
            self.conns = list(filter(lambda c: c != False, self.conns))

        if self.heuristics_analysis:
            for c in self.conns:
                # Check for UDP / ICMP (strange from a smartphone.)
                if c["proto"] in ["UDP", "ICMP"]:
                    c["alert_tiggered"] = True
                    self.alerts.append({
                        "title":
                        "{} communication going outside the local network to {}."
                        .format(c["proto"].upper(), c["resolution"]),
                        "description":
                        "The {} protocol is commonly used in internal networks. Please, verify if the host {} leveraged other alerts which may "
                        .format(c["proto"].upper(), c["resolution"]) +
                        "indicates a possible malicious behavior.",
                        "host":
                        c["resolution"],
                        "level":
                        "Moderate",
                        "id":
                        "PROTO-01"
                    })
                # Check for use of ports over 1024.
                if c["port_dst"] >= max_ports:
                    c["alert_tiggered"] = True
                    self.alerts.append({
                        "title":
                        "{} connection to {} to a port over or equal to {}.".
                        format(c["proto"].upper(), c["resolution"], max_ports),
                        "description":
                        "{} connections have been seen to {} by using the port {}. The use of non-standard port can be sometimes associated to malicious activities. "
                        .format(c["proto"].upper(), c["resolution"],
                                c["port_dst"]) +
                        "We recommend to check if this host has a good reputation by looking on other alerts and search it on the internet.",
                        "host":
                        c["resolution"],
                        "level":
                        "Low",
                        "id":
                        "PROTO-02"
                    })
                # Check for use of HTTP.
                if c["service"] == "http" and c[
                        "port_dst"] == http_default_port:
                    c["alert_tiggered"] = True
                    self.alerts.append({
                        "title":
                        "HTTP communications have been done to the host {}".
                        format(c["resolution"]),
                        "description":
                        "Your device exchanged with the host {} by using HTTP, an unencrypted protocol. "
                        .format(c["resolution"]) +
                        "Even if this behavior is not malicious by itself, it is unusual to see HTTP communications issued from smartphone applications "
                        +
                        "running in the background. Please check the host reputation by searching it on the internet.",
                        "host":
                        c["resolution"],
                        "level":
                        "Low",
                        "id":
                        "PROTO-03"
                    })

                # Check for use of HTTP on a non standard port.
                if c["service"] == "http" and c[
                        "port_dst"] != http_default_port:
                    c["alert_tiggered"] = True
                    self.alerts.append({
                        "title":
                        "HTTP communications have been seen to the host {} on a non standard port ({})."
                        .format(c["resolution"], c["port_dst"]),
                        "description":
                        "Your device exchanged with the host {} by using HTTP, an unencrypted protocol on the port {}. "
                        .format(c["resolution"], c["port_dst"]) +
                        "This behavior is quite unusual. Please check the host reputation by searching it on the internet.",
                        "host":
                        c["resolution"],
                        "level":
                        "Moderate",
                        "id":
                        "PROTO-04"
                    })
                # Check for non-resolved IP address.
                if c["ip_dst"] == c["resolution"]:
                    c["alert_tiggered"] = True
                    self.alerts.append({
                        "title":
                        "The server {} hasn't been resolved by any DNS query during the session"
                        .format(c["ip_dst"]),
                        "description":
                        "It means that the server {} is likely not resolved by any domain name or the resolution has already been cached by "
                        .format(c["ip_dst"]) +
                        "the device. If the host appears in other alerts, please check it.",
                        "host":
                        c["ip_dst"],
                        "level":
                        "Low",
                        "id":
                        "PROTO-05"
                    })

        if self.iocs_analysis:

            bl_cidrs = [[IPNetwork(cidr[0]), cidr[1]]
                        for cidr in get_iocs("cidr")]
            bl_hosts = get_iocs("ip4addr") + get_iocs("ip6addr")
            bl_domains = get_iocs("domain")
            bl_freedns = get_iocs("freedns")
            bl_nameservers = get_iocs("ns")
            bl_tlds = get_iocs("tld")

            for c in self.conns:
                # Check for blacklisted IP address.
                for host in bl_hosts:
                    if c["ip_dst"] == host[0]:
                        c["alert_tiggered"] = True
                        self.alerts.append({
                            "title":
                            "A connection has been made to {} ({}) which is tagged as {}."
                            .format(c["resolution"], c["ip_dst"],
                                    host[1].upper()),
                            "description":
                            "The host {} has been explicitly blacklisted for malicious activities. Your device is likely compromised "
                            .format(c["ip_dst"]) +
                            "and needs to be investigated more deeply by IT security professionals.",
                            "host":
                            c["resolution"],
                            "level":
                            "High",
                            "id":
                            "IOC-01"
                        })
                        break
                # Check for blacklisted CIDR.
                for cidr in bl_cidrs:
                    if IPAddress(c["ip_dst"]) in cidr[0]:
                        c["alert_tiggered"] = True
                        self.alerts.append({
                            "title":
                            "Communication to {} under the CIDR {} which is tagged as {}."
                            .format(c["resolution"], cidr[0], cidr[1].upper()),
                            "description":
                            "The server {} is hosted under a network which is known to host malicious activities. Even if this behavior is not malicious by itself, "
                            .format(c["resolution"]) +
                            "you need to check if other alerts are also mentioning this host. If you have some doubts, please "
                            +
                            "search this host on the internet to see if its legit or not.",
                            "host":
                            c["resolution"],
                            "level":
                            "Moderate",
                            "id":
                            "IOC-02"
                        })
                # Check for blacklisted domain.
                for domain in bl_domains:
                    if c["resolution"].endswith(domain[0]):
                        if domain[1] != "tracker":
                            c["alert_tiggered"] = True
                            self.alerts.append({
                                "title":
                                "A DNS request have been done to {} which is tagged as {}."
                                .format(c["resolution"], domain[1].upper()),
                                "description":
                                "The domain name {} seen in the capture has been explicitly tagged as malicious. This indicates that "
                                .format(c["resolution"]) +
                                "your device is likely compromised and needs to be investigated deeply.",
                                "host":
                                c["resolution"],
                                "level":
                                "High",
                                "id":
                                "IOC-03"
                            })
                        else:
                            c["alert_tiggered"] = True
                            self.alerts.append({
                                "title":
                                "A DNS request have been done to {} which is tagged as {}."
                                .format(c["resolution"], domain[1].upper()),
                                "description":
                                "The domain name {} seen in the capture has been explicitly tagged as a Tracker. This "
                                .format(c["resolution"]) +
                                "indicates that one of the active apps is geo-tracking your moves.",
                                "host":
                                c["resolution"],
                                "level":
                                "Moderate",
                                "id":
                                "IOC-03"
                            })
                # Check for blacklisted FreeDNS.
                for domain in bl_freedns:
                    if c["resolution"].endswith("." + domain[0]):
                        c["alert_tiggered"] = True
                        self.alerts.append({
                            "title":
                            "A DNS request have been done to the domain {} which is a Free DNS."
                            .format(c["resolution"]),
                            "description":
                            "The domain name {} is using a Free DNS service. This kind of service is commonly used by cybercriminals "
                            .format(c["resolution"]) +
                            "or state-sponsored threat actors during their operations. It is very suspicious that an application running in background use this kind of service, please investigate.",
                            "host":
                            c["resolution"],
                            "level":
                            "Moderate",
                            "id":
                            "IOC-04"
                        })

                # Check for suspect tlds.
                for tld in bl_tlds:
                    if c["resolution"].endswith(tld[0]):
                        c["alert_tiggered"] = True
                        self.alerts.append({
                            "title":
                            "A DNS request have been done to the domain {} which contains a suspect TLD."
                            .format(c["resolution"]),
                            "description":
                            "The domain name {} is using a suspect Top Level Domain ({}). Even not malicious, this non-generic TLD is used regularly by cybercrime "
                            .format(c["resolution"], tld[0]) +
                            "or state-sponsored operations. Please check this domain by searching it on an internet search engine. If other alerts are related to this "
                            + "host, please consider it as very suspicious.",
                            "host":
                            c["resolution"],
                            "level":
                            "Low",
                            "id":
                            "IOC-05"
                        })

                # Check for use of suspect nameservers.
                try:
                    name_servers = pydig.query(c["resolution"], "NS")
                except:
                    name_servers = []

                if len(name_servers):
                    for ns in bl_nameservers:
                        if name_servers[0].endswith(".{}.".format(ns[0])):
                            c["alert_tiggered"] = True
                            self.alerts.append({
                                "title":
                                "The domain {} is using a suspect nameserver ({})."
                                .format(c["resolution"], name_servers[0]),
                                "description":
                                "The domain name {} is using a nameserver that has been explicitly tagged to be associated to malicious activities. "
                                .format(c["resolution"]) +
                                "Many cybercriminals and state-sponsored threat actors are using this kind of registrars because they allow cryptocurrencies and anonymous payments. It"
                                +
                                " is adviced to investigate on this domain and the associated running application by doing a forensic analysis of the phone.",
                                "host":
                                c["resolution"],
                                "level":
                                "Moderate",
                                "id":
                                "IOC-06"
                            })
Пример #56
0
                               end=month_end.strftime('%s'),
                               query=','.join(query))
    data = results['series'][0].get('pointlist', [])
    senvs = [_get_env(series) for series in results['series']]
    headers = ['Month'
               ] + ['Avg data on "{}" (TB)'.format(env) for env in senvs]
    print(','.join(headers))
    for i in range(0, len(data)):
        posix_time = data[i][0]
        datespan = datetime.utcfromtimestamp(
            posix_time / 1000).date()  # python datetime POSIX TZ issue
        print(", ".join([str(datespan)] + [
            '{}'.format(
                int(series['pointlist'][i][1]) / 1000000000000
                if series['pointlist'][i][1] is not None else '---')
            for series in results['series']
        ]))


if __name__ == "__main__":

    args = _get_args()

    month_start = get_date(args.month_start)
    month_end = get_date(args.month_end)

    config = get_config(args.config)
    init_datadog(config)

    print_requests(args.env, month_start, month_end)
Пример #57
0
 def get_source(self, environment, template):
     if template.startswith('admin/'):
         return super(ThemeLoader, self).get_source(environment, template)
     theme = get_config('ctf_theme')
     template = "/".join([theme, template])
     return super(ThemeLoader, self).get_source(environment, template)
Пример #58
0
        comments = ''
    else:
        num = len(comments['comments']['comment'])
        comments = ' || '.join([x['_content'] for x in comments['comments']['comment']])
    print photo_id, num
    return num, comments

def get_num_favorites(photo_id,flickr):
    favs=flickr.photos.getFavorites(photo_id=str(photo_id))
    #num = len(favs['photo']['person'])
    num = favs['photo']['total']
    print photo_id, num
    return num

if __name__=='__main__':
    config = utils.get_config('config.ini')
    
    host = config['db']['host']
    user = config['db']['user']
    password = config['db']['password']
    db_name = config['db']['db']
    table_name = config['db']['table']
    api_key = config['flickr']['api_key']
    api_secret = config['flickr']['api_secret']    
    
    flickr = flickrapi.FlickrAPI(api_key, api_secret, format='parsed-json')

    con = mdb.connect(host=host,user=user,password=password,db=db_name,charset='utf8mb4')
    cur = con.cursor()
    
    query = "SELECT id,num_comments,num_favs,comments FROM photos where location='sf' and date_taken between '2012-01-01' and NOW()"
Пример #59
0
                    return


if __name__ == '__main__':
    # TODO check for comfortband height and whether correctly implemented
    building = sys.argv[1]

    # read from config file
    try:
        yaml_filename = "Buildings/%s/%s.yml" % (sys.argv[1], sys.argv[1])
    except:
        sys.exit(
            "Please specify the configuration file as: python2 controller.py config_file.yaml"
        )

    cfg = utils.get_config(building)

    client = utils.choose_client()  # TODO add config

    hc = HodClient("xbos/hod", client)

    tstats = utils.get_thermostats(client, hc, cfg["Building"])

    # --- Thermal Model Init ------------
    # initialize and fit thermal model

    # only single stage cooling buildings get to retrive data. otherwise takes too long.
    # if building in ["north-berkeley-senior-center", "ciee", "avenal-veterans-hall", "orinda-community-center",
    #                 "avenal-recreation-center", "word-of-faith-cc", "jesse-turner-center", "berkeley-corporate-yard"]:
    if building != "south-berkeley-senior-center":
        thermal_data = utils.get_data(cfg=cfg,
Пример #60
0
import pytorch_lightning as pl
from pytorch_lightning.callbacks import ModelCheckpoint
from pytorch_lightning.loggers import TensorBoardLogger

from utils import get_config, module_from_config, datamodule_from_config

if __name__ == '__main__':
    warnings.simplefilter('ignore')

    parser = ArgumentParser()
    parser.add_argument('-c', '--config_path', type=str, required=True)
    parser.add_argument('-d', '--data_dir', type=str, required=True)
    parser.add_argument('-m', '--model_dir', type=str, default='./models')
    args = parser.parse_args()

    config = get_config(args.config_path)
    config.data_dir = args.data_dir
    config.model_dir = args.model_dir

    pl.seed_everything(config.seed)

    model = module_from_config(config)
    print(model)

    vcon_dm = datamodule_from_config(config)

    model_dir = pathlib.Path(config.model_dir)
    if not model_dir.exists():
        model_dir.mkdir(parents=True)
    save_fn = str(model_dir / 'vc_{epoch:04d}-{val_loss:.6f}')
    mc = ModelCheckpoint(filepath=save_fn,