예제 #1
0
def test_load_old_checkpoint(cls):
    dataset = AmazonReviewSentimentCrossLingualDataset()
    sha1sum_id = "4ba096cdf6bd76c06386f2c27140db055e59c91b"
    checkpoint_name = "mdeberta-v3-base-checkpoint"
    save_path = os.path.join(get_home_dir(), "checkpoints")
    file_path = os.path.join(save_path, f"{checkpoint_name}.zip")
    checkpoint_path = os.path.join(get_home_dir(), "checkpoints",
                                   checkpoint_name)
    if os.path.exists(save_path):
        shutil.rmtree(save_path)
    download(
        url=
        f"s3://automl-mm-bench/unit-tests-0.4/checkpoints/{checkpoint_name}.zip",
        path=file_path,
        sha1_hash=sha1sum_id,
    )
    protected_zip_extraction(
        file_path,
        sha1_hash=sha1sum_id,
        folder=save_path,
    )
    predictor = cls.load(checkpoint_path)
    verify_predictor_save_load(predictor, dataset.test_df, cls=cls)

    # continuous training
    predictor.fit(
        dataset.train_df,
        presets="multilingual",
        time_limit=10,
        hyperparameters={"optimization.top_k_average_method": "uniform_soup"},
    )
    verify_predictor_save_load(predictor, dataset.test_df, cls=cls)
예제 #2
0
 def _get_config_files(self):
     if string.find(sys.platform, 'sunos') > -1:
         return [
             "/opt/redhat/rhn/solaris/etc/sysconfig/rhn/%s.conf" % self.section,
             os.path.join(utils.get_home_dir(), self._local_config_file_name),
             self._local_config_file_name,
         ]
     else:
         return [
             "/etc/sysconfig/rhn/%s.conf" % self.section,
             os.path.join(utils.get_home_dir(), self._local_config_file_name),
             self._local_config_file_name,
         ]
예제 #3
0
 def _get_config_files(self):
     if string.find(sys.platform, 'sunos') > -1:
         return [
             "/opt/redhat/rhn/solaris/etc/sysconfig/rhn/%s.conf" %
             self.section,
             os.path.join(utils.get_home_dir(),
                          self._local_config_file_name),
             self._local_config_file_name,
         ]
     else:
         return [
             "/etc/sysconfig/rhn/%s.conf" % self.section,
             os.path.join(utils.get_home_dir(),
                          self._local_config_file_name),
             self._local_config_file_name,
         ]
예제 #4
0
	def slot_playlist(self):
		dialog = QtGui.QFileDialog()
		dialog.setFileMode(QtGui.QFileDialog.ExistingFile)
		if config.post_download_playlist_path:
			dialog.setDirectory(os.path.dirname(config.post_download_playlist_path))
		else:
			# The default playlist directory
			dialog.setDirectory(r'%s\My Documents\My Music\My Playlists' % utils.get_home_dir())
		
		f = unicode(dialog.getOpenFileName(caption=tr("Open Playlist"), filter=tr("Supported Playlist Files") + " (*.m3u *.wpl)"))
		f = f.replace('/','\\')
		
		if f:
			try:
				log.debug("Adding %s to the %s playlist..." % (self.audio_path, f))
				utils.add_item_to_playlist(f, self.audio_path)
				self.statusBar_append = tr("; Saved to playlist")
			except (IOError, RuntimeError), e:
				log.error(str(e))
				QtGui.QMessageBox.critical(self, tr("Error"), str(e), QtGui.QMessageBox.Ok)
				return
				
			config.post_download_playlist_path = f
				
			if self.saveSelection_CheckBox.isChecked():
				config.post_download_action = 'addPlaylist'
			
			self.close()
예제 #5
0
	def slot_post_download_action_changed(self, i):
		val = config.post_download_action_dict.keys()[i]
		if val == 'addItunes' and not config.is_itunes_installed:
			QtGui.QMessageBox.critical(self, tr("Error"), tr("iTunes is not installed on this system."), QtGui.QMessageBox.Ok)
			self.post_download_action.setCurrentIndex(config.post_download_action_dict.keys().index(config.post_download_action))
		elif val == 'addPlaylist':
			dialog = QtGui.QFileDialog()
			dialog.setFileMode(QtGui.QFileDialog.ExistingFile)
			if config.post_download_playlist_path:
				dialog.setDirectory(os.path.dirname(config.post_download_playlist_path))
			else:
				dialog.setDirectory(r'%s\My Documents\My Music\My Playlists' % utils.get_home_dir()) # The default playlist directory
			
			f = unicode(dialog.getOpenFileName(caption=tr("Open Playlist"), filter=tr("Supported Playlist Files") + " (*.m3u *.wpl)"))
			f = f.replace('/','\\')
			
			if f:
				config.post_download_playlist_path = f
			elif not config.post_download_playlist_path:
				# if new playlist is not choosed, and there is NO playlist path in the config file, we should revert the post_download_action to the last one.
				self.post_download_action.setCurrentIndex(config.post_download_action_dict.keys().index(config.post_download_action))
				return
		
		config.post_download_action = val
		self.slot_changed_checkbox()
예제 #6
0
 def __init__(self, host=None):
     self.host = host
     ssl_dir = os.path.join(get_home_dir(), "ssl")
     self.key_path = os.path.join(ssl_dir, "dodo.key")
     self.cert_path = os.path.join(ssl_dir, "dodo.crt")
     if not os.path.exists(ssl_dir):
         os.makedirs(ssl_dir)
 def __init__(self,host=None):
     self.host = host
     ssl_dir = os.path.join(get_home_dir(), "ssl")
     self.key_path = os.path.join(ssl_dir, "dodo.key")
     self.cert_path = os.path.join(ssl_dir, "dodo.crt")
     if not os.path.exists(ssl_dir):
         os.makedirs(ssl_dir)
예제 #8
0
    def test_move_most_recent_files(self):

        self.data1 = pd.DataFrame({'data': [1, 2, 3, 4, 5]})
        self.data1.to_csv(get_home_dir() + '/Downloads/test1.csv')

        download_start = datetime.now().timestamp()

        self.data2 = pd.DataFrame({'data': [1, 2, 3, 4, 5]})
        self.data2.to_csv(
            get_home_dir() +
            '/Downloads/Britain Coronavirus Disease Prevention Map Mar 06 2020 Id Id Colocation Map_2020-03-31.csv'
        )

        self.assertTrue(os.path.exists(get_home_dir() +
                                       '/Downloads/test1.csv'))
        self.assertTrue(
            os.path.exists(
                get_home_dir() +
                '/Downloads/Britain Coronavirus Disease Prevention Map Mar 06 2020 Id Id Colocation Map_2020-03-31.csv'
            ))

        move_most_recent_files('./tmp1', ['url'], download_start)

        self.assertTrue(os.path.exists(get_home_dir() +
                                       '/Downloads/test1.csv'))
        self.assertTrue(os.path.exists('./tmp1/Britain_2020_03_31.csv'))

        os.remove(get_home_dir() + '/Downloads/test1.csv')
예제 #9
0
    def __init__(self, optionparser, store_true_list):
        sysdir = '/etc/sysconfig/rhn'
        homedir = utils.get_home_dir()
        default = 'rhnpushrc'
        regular = '.rhnpushrc'
        deffile = os.path.join(sysdir, default)
        regfile = os.path.join(homedir, regular)
        cwdfile = os.path.join(os.getcwd(), regular)

        self.cfgFileList = [deffile, regfile, cwdfile]
        self.defaultconfig = rhnpush_config.rhnpushConfigParser(ensure_consistency=True)

        # Get a reference to the object containing command-line options
        self.cmdconfig = optionparser
        self.store_true_list = store_true_list
예제 #10
0
    def __init__(self, optionparser, store_true_list):
        sysdir = '/etc/sysconfig/rhn'
        homedir = utils.get_home_dir()
        default = 'rhnpushrc'
        regular = '.rhnpushrc'
        deffile = os.path.join(sysdir, default)
        regfile = os.path.join(homedir, regular)
        cwdfile = os.path.join(os.getcwd(), regular)

        self.cfgFileList = [deffile, regfile, cwdfile]
        self.defaultconfig = rhnpush_config.rhnpushConfigParser(ensure_consistency=True)

        # Get a reference to the object containing command-line options
        self.cmdconfig = optionparser
        self.store_true_list = store_true_list
예제 #11
0
def test_hpo(searcher, scheduler):
    dataset = PetFinderDataset()

    hyperparameters = {
        "optimization.learning_rate": tune.uniform(0.0001, 0.01),
        "optimization.max_epochs": 1,
        "model.names": ["numerical_mlp", "categorical_mlp", "fusion_mlp"],
        "data.categorical.convert_to_text": False,
        "data.numerical.convert_to_text": False,
        "env.num_workers": 0,
        "env.num_workers_evaluation": 0,
    }

    hyperparameter_tune_kwargs = {
        "searcher": searcher,
        "scheduler": scheduler,
        "num_trials": 2,
    }

    predictor = MultiModalPredictor(
        label=dataset.label_columns[0],
        problem_type=dataset.problem_type,
        eval_metric=dataset.metric,
    )

    save_path = os.path.join(get_home_dir(), "hpo", f"_{searcher}",
                             f"_{scheduler}")
    if os.path.exists(save_path):
        shutil.rmtree(save_path)

    predictor = predictor.fit(
        train_data=dataset.train_df,
        hyperparameters=hyperparameters,
        time_limit=60,
        save_path=save_path,
        hyperparameter_tune_kwargs=hyperparameter_tune_kwargs,
    )

    score = predictor.evaluate(dataset.test_df)
    verify_predictor_save_load(predictor, dataset.test_df)

    # test for continuous training
    predictor = predictor.fit(
        train_data=dataset.train_df,
        hyperparameters=hyperparameters,
        time_limit=60,
        hyperparameter_tune_kwargs=hyperparameter_tune_kwargs,
    )
예제 #12
0
    def slot_post_download_action_changed(self, i):
        val = config.post_download_action_dict.keys()[i]
        if val == 'addItunes' and not config.is_itunes_installed:
            QtGui.QMessageBox.critical(
                self, tr("Error"),
                tr("iTunes is not installed on this system."),
                QtGui.QMessageBox.Ok)
            self.post_download_action.setCurrentIndex(
                config.post_download_action_dict.keys().index(
                    config.post_download_action))
        elif val == 'addPlaylist':
            dialog = QtGui.QFileDialog()
            dialog.setFileMode(QtGui.QFileDialog.ExistingFile)
            if config.post_download_playlist_path:
                dialog.setDirectory(
                    os.path.dirname(config.post_download_playlist_path))
            else:
                dialog.setDirectory(
                    r'%s\My Documents\My Music\My Playlists' %
                    utils.get_home_dir())  # The default playlist directory

            f = unicode(
                dialog.getOpenFileName(caption=tr("Open Playlist"),
                                       filter=tr("Supported Playlist Files") +
                                       " (*.m3u *.wpl)"))
            f = f.replace('/', '\\')

            if f:
                config.post_download_playlist_path = f
            elif not config.post_download_playlist_path:
                # if new playlist is not choosed, and there is NO playlist path in the config file, we should revert the post_download_action to the last one.
                self.post_download_action.setCurrentIndex(
                    config.post_download_action_dict.keys().index(
                        config.post_download_action))
                return

        config.post_download_action = val
        self.slot_changed_checkbox()
예제 #13
0
def test_standalone():  # test standalong feature in MultiModalPredictor.save()
    from unittest import mock
    import torch

    requests_gag = mock.patch(
        'requests.Session.request',
        mock.Mock(side_effect=RuntimeError(
            'Please use the `responses` library to mock HTTP in your tests.')))

    dataset = PetFinderDataset()

    config = {
        MODEL: f"fusion_mlp_image_text_tabular",
        DATA: "default",
        OPTIMIZATION: "adamw",
        ENVIRONMENT: "default",
    }

    hyperparameters = {
        "optimization.max_epochs":
        1,
        "model.names": [
            "numerical_mlp", "categorical_mlp", "timm_image", "hf_text",
            "clip", "fusion_mlp"
        ],
        "model.hf_text.checkpoint_name":
        "prajjwal1/bert-tiny",
        "model.timm_image.checkpoint_name":
        "swin_tiny_patch4_window7_224",
        "env.num_workers":
        0,
        "env.num_workers_evaluation":
        0,
    }

    predictor = MultiModalPredictor(
        label=dataset.label_columns[0],
        problem_type=dataset.problem_type,
        eval_metric=dataset.metric,
    )

    save_path = os.path.join(get_home_dir(), "standalone", "false")
    if os.path.exists(save_path):
        shutil.rmtree(save_path)

    predictor.fit(
        train_data=dataset.train_df,
        config=config,
        hyperparameters=hyperparameters,
        time_limit=30,
        save_path=save_path,
    )

    save_path_standalone = os.path.join(get_home_dir(), "standalone", "true")

    predictor.save(
        path=save_path_standalone,
        standalone=True,
    )

    del predictor
    torch.cuda.empty_cache()

    loaded_online_predictor = MultiModalPredictor.load(path=save_path)
    online_predictions = loaded_online_predictor.predict(dataset.test_df,
                                                         as_pandas=False)
    del loaded_online_predictor

    # Check if the predictor can be loaded from an offline enivronment.
    with requests_gag:
        # No internet connection here. If any command require internet connection, a RuntimeError will be raised.
        with tempfile.TemporaryDirectory() as tmpdirname:
            torch.hub.set_dir(tmpdirname)  # block reading files in `.cache`
            loaded_offline_predictor = MultiModalPredictor.load(
                path=save_path_standalone)

    offline_predictions = loaded_offline_predictor.predict(dataset.test_df,
                                                           as_pandas=False)
    del loaded_offline_predictor

    # check if save with standalone=True coincide with standalone=False
    npt.assert_equal(online_predictions, offline_predictions)
예제 #14
0
def sanity_check():
	"Sanity Check for script."
	config.count_application_runs += 1
	_warnings = []
	
	### LOCAL CHECKS ###
	
	# Windows version check
	winver = sys.getwindowsversion()
	log.debug('Running iQuality v%s (r%d) on Windows %d.%d.%d %s' % (__version__, __rev__, winver.major, winver.minor, winver.build, winver.service_pack))
	
	# Python version check
	if sys.version_info < (2, 6) or sys.version_info >= (3, 0):
		msg = "must use python 2.7"
		log.critical(msg)
		raise Exception(msg)
	log.debug('CPython version is %d.%d.%d' % (sys.version_info.major, sys.version_info.minor, sys.version_info.micro))
	log.debug('PyQt version is %s, Qt version is %s' % (QtCore.PYQT_VERSION_STR, QtCore.QT_VERSION_STR))
		
	# youtube-dl check
	try:
		import youtube_dl
		log.debug("youtube-dl version is %s" % youtube_dl.__version__)
	except ImportError:
		log.warning("Could not load the youtube-dl module")
		
	# Phonon version check
	try:
		from PyQt4.phonon import Phonon
		if Phonon.BackendCapabilities.isMimeTypeAvailable('video/x-flv'):
			log.debug('Phonon version is %s. video/x-flv is supported.' % Phonon.phononVersion())
		else:
			log.warning('Phonon version is %s. video/x-flv is not supported.' % Phonon.phononVersion())
	except ImportError:
		log.warning("Could not load the phonon module")
	
	# Free space check
	freespace = utils.get_free_space(config.temp_dir)
	if freespace < 200*1024**2: # 200 MB
		drive = os.path.splitdrive(config.temp_dir)[0]
		log.warning("There are less than 200MB available in drive %s (%.2fMB left)." % (drive, freespace/1024.0**2))
		_warnings.append(NoSpaceWarning(drive, freespace))

	# iTunes' availablity check
	itunesPath = r'%s\My Documents\My Music\iTunes\iTunes Media\Automatically Add to iTunes' % utils.get_home_dir()
	if not os.path.exists(itunesPath):
		config.is_itunes_installed = False
		if config.post_download_action == 'addItunes':
			config.post_download_action = 'ask'
		log.warning("iTunes Media not found. setting is_itunes_installed to False")
		
	# Context Menu check
	try: # IMPROVE: REMOVE THE TRY-EXCEPT BLOCK
		if config.id3editor_in_context_menu and not utils.check_context_menu_status():
			log.debug("Registering Context Menu Object...")
			try:
				utils.register_with_context_menu()
			except WindowsError, e:
				if e.winerror == 5: # Access is denied
					log.debug("Access is denied. Setting id3editor_in_context_menu to False.")
					config.id3editor_in_context_menu = False
				else:
					raise
		if not config.id3editor_in_context_menu and utils.check_context_menu_status():
			log.debug("Unregistering Context Menu Object...")
			try:
				utils.unregister_with_context_menu()
			except WindowsError, e:
				if e.winerror == 5: # Access is denied
					log.debug("Access is denied. Setting id3editor_in_context_menu to True.")
					config.id3editor_in_context_menu = True
				else:
					raise
예제 #15
0
def get_config_dir():
    if not os.path.exists('%s/.config/' % get_home_dir() ):
        os.makedirs( '%s/.config/' % get_home_dir() )
    if not os.path.exists('%s/.config/mypyapp' % get_home_dir() ):
        os.makedirs( '%s/.config/mypyapp' % get_home_dir() )
    return '%s/.config/mypyapp' % get_home_dir()
예제 #16
0
 def test_get_home_dir(self):
     self.assertTrue('~' not in get_home_dir())
예제 #17
0
 def merge_data(self):
     add_data_filename = unicode(QFileDialog.getOpenFileName(self, "Open File", get_home_dir(), get_format_filter()))
     self._merge_data(decode_file(add_data_filename))
예제 #18
0
 def excel_import(self):
     excel_filename = unicode(QFileDialog.getOpenFileName(self, "Open File", get_home_dir(),  "Excel files (*.xls)"))
     if excel_filename:
         data_to_merge = import_from_excel(self.scheme, excel_filename)
         self.parent()._merge_data(data_to_merge)
from concurrent.futures import ProcessPoolExecutor

# custom
from utils import get_home_dir
import dl_quandl_EOD as dq

stocks = dq.load_stocks()

DEFAULT_STORAGE = '/home/nate/eod_data/'
# get todays date for checking if files up-to-date
MTN = timezone('America/Denver')
TODAY = datetime.datetime.now(MTN)
WEEKDAY = TODAY.weekday()
HOUR = TODAY.hour

HOME_DIR = get_home_dir()

Q_KEY = os.environ.get('quandl_api')

quandl.ApiConfig.api_key = Q_KEY

spy_vix = {}
closes = {}
dates = {}
for i in range(1, 10):
    print(i)
    spy_vix[i] = quandl.get("CHRIS/CBOE_VX" + str(i))
    # spy_vix[i].to_csv()
    closes[i] = spy_vix[i]['Close']
    dates[i] = spy_vix[i].index
예제 #20
0
import argparse
import os
import re
import shutil
import utils

utils.append_home_to_path(__file__)

from tomahawk.rsync import RsyncMain
from tomahawk.constants import TimeoutError
from tomahawk.expect import CommandWithExpect

TESTS_DIR = os.path.dirname(os.path.abspath(__file__))
TMP_DIR = os.path.join(utils.get_home_dir(__file__), 'tmp')
if not os.path.exists(TMP_DIR):
    os.mkdir(TMP_DIR)

env = os.environ
if env.get('TOMAHAWK_ENV') != None:
    del env['TOMAHAWK_ENV']

hello_file = os.path.join(TMP_DIR, 'hello')
hello_file_copied = os.path.join(TMP_DIR, 'hello.copied')
if os.path.exists(hello_file_copied):
    os.remove(hello_file_copied)
handle = open(hello_file, 'w')
handle.write('hello world')
handle.close()

def test_00_run(monkeypatch):
    EXPECTED = {
예제 #21
0
__author__ = 'CSPF'

from flask import Flask
from flask_sqlalchemy import SQLAlchemy
from os import path, makedirs, stat
from utils import get_home_dir

db_dir = path.join(get_home_dir(), "db")
if not path.exists(db_dir):
    #make sure to create home directory and database directory
    makedirs(db_dir)
db_path = path.join(db_dir, "Database.sqlite")

app = Flask("DodoBank")
app.config["SECRET_KEY"] = "1234"
app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:///' + db_path

db = SQLAlchemy(app)
예제 #22
0
 def __init__(self):
     self.homedir = utils.get_home_dir()
예제 #23
0
def sanity_check():
    "Sanity Check for script."
    config.count_application_runs += 1
    _warnings = []

    ### LOCAL CHECKS ###

    # Windows version check
    winver = sys.getwindowsversion()
    log.debug('Running iQuality v%s (r%d) on Windows %d.%d.%d %s' %
              (__version__, __rev__, winver.major, winver.minor, winver.build,
               winver.service_pack))

    # Python version check
    if sys.version_info < (2, 6) or sys.version_info >= (3, 0):
        msg = "must use python 2.7"
        log.critical(msg)
        raise Exception(msg)
    log.debug('CPython version is %d.%d.%d' %
              (sys.version_info.major, sys.version_info.minor,
               sys.version_info.micro))
    log.debug('PyQt version is %s, Qt version is %s' %
              (QtCore.PYQT_VERSION_STR, QtCore.QT_VERSION_STR))

    # youtube-dl check
    try:
        import youtube_dl
        log.debug("youtube-dl version is %s" % youtube_dl.__version__)
    except ImportError:
        log.warning("Could not load the youtube-dl module")

    # Phonon version check
    try:
        from PyQt4.phonon import Phonon
        if Phonon.BackendCapabilities.isMimeTypeAvailable('video/x-flv'):
            log.debug('Phonon version is %s. video/x-flv is supported.' %
                      Phonon.phononVersion())
        else:
            log.warning('Phonon version is %s. video/x-flv is not supported.' %
                        Phonon.phononVersion())
    except ImportError:
        log.warning("Could not load the phonon module")

    # Free space check
    freespace = utils.get_free_space(config.temp_dir)
    if freespace < 200 * 1024**2:  # 200 MB
        drive = os.path.splitdrive(config.temp_dir)[0]
        log.warning(
            "There are less than 200MB available in drive %s (%.2fMB left)." %
            (drive, freespace / 1024.0**2))
        _warnings.append(NoSpaceWarning(drive, freespace))

    # iTunes' availablity check
    itunesPath = r'%s\My Documents\My Music\iTunes\iTunes Media\Automatically Add to iTunes' % utils.get_home_dir(
    )
    if not os.path.exists(itunesPath):
        config.is_itunes_installed = False
        if config.post_download_action == 'addItunes':
            config.post_download_action = 'ask'
        log.warning(
            "iTunes Media not found. setting is_itunes_installed to False")

    # Context Menu check
    try:  # IMPROVE: REMOVE THE TRY-EXCEPT BLOCK
        if config.id3editor_in_context_menu and not utils.check_context_menu_status(
        ):
            log.debug("Registering Context Menu Object...")
            try:
                utils.register_with_context_menu()
            except WindowsError, e:
                if e.winerror == 5:  # Access is denied
                    log.debug(
                        "Access is denied. Setting id3editor_in_context_menu to False."
                    )
                    config.id3editor_in_context_menu = False
                else:
                    raise
        if not config.id3editor_in_context_menu and utils.check_context_menu_status(
        ):
            log.debug("Unregistering Context Menu Object...")
            try:
                utils.unregister_with_context_menu()
            except WindowsError, e:
                if e.winerror == 5:  # Access is denied
                    log.debug(
                        "Access is denied. Setting id3editor_in_context_menu to True."
                    )
                    config.id3editor_in_context_menu = True
                else:
                    raise
예제 #24
0
 def __init__(self):
     self.homedir = utils.get_home_dir()
     self.desktop = os.getenv('XDG_CURRENT_DESKTOP')
     if self.desktop is None:
         self.desktop = os.getenv('XDG_SESSION_DESKTOP')
예제 #25
0
def test_predictor(
    dataset_name,
    model_names,
    text_backbone,
    image_backbone,
    top_k_average_method,
    efficient_finetune,
    loss_function,
):
    dataset = ALL_DATASETS[dataset_name]()
    metric_name = dataset.metric

    predictor = MultiModalPredictor(
        label=dataset.label_columns[0],
        problem_type=dataset.problem_type,
        eval_metric=metric_name,
    )
    config = {
        MODEL: f"fusion_mlp_image_text_tabular",
        DATA: "default",
        OPTIMIZATION: "adamw",
        ENVIRONMENT: "default",
    }
    hyperparameters = {
        "optimization.max_epochs": 1,
        "model.names": model_names,
        "env.num_workers": 0,
        "env.num_workers_evaluation": 0,
        "optimization.top_k_average_method": top_k_average_method,
        "optimization.efficient_finetune": efficient_finetune,
        "optimization.loss_function": loss_function,
    }
    if text_backbone is not None:
        hyperparameters.update({
            "model.hf_text.checkpoint_name": text_backbone,
        })
    if image_backbone is not None:
        hyperparameters.update({
            "model.timm_image.checkpoint_name":
            image_backbone,
        })
    save_path = os.path.join(get_home_dir(), "outputs", dataset_name)
    if text_backbone is not None:
        save_path = os.path.join(save_path, text_backbone)
    if image_backbone is not None:
        save_path = os.path.join(save_path, image_backbone)

    if os.path.exists(save_path):
        shutil.rmtree(save_path)
    predictor.fit(
        train_data=dataset.train_df,
        config=config,
        hyperparameters=hyperparameters,
        time_limit=30,
        save_path=save_path,
    )

    score = predictor.evaluate(dataset.test_df)
    verify_predictor_save_load(predictor, dataset.test_df)

    # Test for continuous fit
    predictor.fit(
        train_data=dataset.train_df,
        config=config,
        hyperparameters=hyperparameters,
        time_limit=30,
    )
    verify_predictor_save_load(predictor, dataset.test_df)

    # Saving to folder, loading the saved model and call fit again (continuous fit)
    with tempfile.TemporaryDirectory() as root:
        predictor.save(root)
        predictor = MultiModalPredictor.load(root)
        predictor.fit(
            train_data=dataset.train_df,
            config=config,
            hyperparameters=hyperparameters,
            time_limit=30,
        )
예제 #26
0
 def __init__(self):
     self.homedir = utils.get_home_dir()
     self.desktop = os.getenv('XDG_CURRENT_DESKTOP')
     if self.desktop is None:
          self.desktop = os.getenv('XDG_SESSION_DESKTOP')
예제 #27
0
 def load_data(self):
     data_filename = unicode(QFileDialog.getOpenFileName(self, "Open File", get_home_dir(), get_format_filter()))
     if data_filename:
         self.__load_data(data_filename)
예제 #28
0
import argparse
import os
import re
import shutil
import utils

utils.append_home_to_path(__file__)

from tomahawk.rsync import RsyncMain
from tomahawk.constants import TimeoutError
from tomahawk.expect import CommandWithExpect

TESTS_DIR = os.path.dirname(os.path.abspath(__file__))
TMP_DIR = os.path.join(utils.get_home_dir(__file__), 'tmp')
if not os.path.exists(TMP_DIR):
    os.mkdir(TMP_DIR)

env = os.environ
if env.get('TOMAHAWK_ENV') != None:
    del env['TOMAHAWK_ENV']

hello_file = os.path.join(TMP_DIR, 'hello')
hello_file_copied = os.path.join(TMP_DIR, 'hello.copied')
if os.path.exists(hello_file_copied):
    os.remove(hello_file_copied)
handle = open(hello_file, 'w')
handle.write('hello world')
handle.close()


def test_00_run(monkeypatch):
예제 #29
0
def main(argv):
    """Main function"""

    ## Get user information ##
    global op_system
    op_system = os.name
    if op_system != 'nt':
        op_system = subprocess.Popen(['uname', '-s'],
                                     stdout=subprocess.PIPE).communicate()[0]
        op_system = op_system.rstrip('\n')
        # change Cygwin to say 'CYGWIN' always
        under_score_idx = op_system.find('_')
        if under_score_idx != -1:
            op_system = op_system[:under_score_idx]

    if op_system != 'Linux' and op_system != 'Darwin':
        print('Error: %s is not a supported operating system at this time.'
              % op_system)
        exit(5)

    global data_dir
    data_dir = os.path.join(utils.get_home_dir(), '.pimessage/')

    opt1 = grab_opt(argv, 1)
    if opt1 == 'uninstall':
        exit(uninstall())

    script_name = argv[0]
    if not os.path.isdir(data_dir):
        install(script_name, data_dir)

    dir_files = subprocess.Popen(['ls', '-A', data_dir],
                                 stdout=subprocess.PIPE).communicate()[0]

    # must be in correct ls -A order
    correct_dir_files = """contacts
conversations
daemonError.log
editor
"""

    alt_dir_files = """contacts
conversations
editor
"""

    if dir_files != correct_dir_files and dir_files != alt_dir_files:
        install(script_name, data_dir)

    # get user's chosen editor
    edit_cmd = open(data_dir+'editor', 'r').read().rstrip('\n')
    if not subprocess.Popen(['which', edit_cmd],
                            stdout=subprocess.PIPE).communicate()[0]:
        print('Error: %s is not a valid editor. Please adjust your editor '
              'value' % edit_cmd)
        exit(2)

    global host_ip
    host_ip = ip.get_host_ip()
    #if host_ip == IP_FAILURE:
    #    print 'Error: your IP address could not be correctly retrieved.'
    #    exit(2)

    ## Option parsing ##
    # figure out which option was called
    parse_opts(argv, edit_cmd)
    return 0
예제 #30
0
# core
import time
import sys
from concurrent.futures import ProcessPoolExecutor

# custom
sys.path.append('../code')
import dl_quandl_EOD as dq
import data_processing as dp
import short_squeeze_eda as sse
from utils import get_home_dir
HOME_DIR = get_home_dir(repo_name='scrape_stocks')

# installed
import numpy as np
import pandas as pd
from tqdm import tqdm

# plotting
import matplotlib.pyplot as plt
%matplotlib inline
from plotly.offline import init_notebook_mode, iplot
from plotly.graph_objs import Scatter, Scattergl, Figure, Layout

def calc_vol(st, mean_vol):
    """
    takes dataframe of stock data (st) and calculates tp, 50d-mva, and volatility
    also takes dictionary (mean_vol) as arg
    """
    st['typical_price'] = st[['Adj_High', 'Adj_Low', 'Adj_Close']].mean(axis=1)
    st['50d_mva'] = talib.SMA(st['typical_price'].values, timeperiod=50)
예제 #31
0
__author__ = 'CSPF'

from flask import Flask
from flask_sqlalchemy import SQLAlchemy
from os import path,makedirs,stat
from utils import get_home_dir

db_dir = path.join(get_home_dir(),"db")
if not path.exists(db_dir):
    #make sure to create home directory and database directory
    makedirs(db_dir)
db_path = path.join(db_dir,"Database.sqlite")

app = Flask("DodoBank")
app.config["SECRET_KEY"]="1234"
app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:///'+db_path

db = SQLAlchemy(app)
예제 #32
0
def test_distillation():
    dataset = PetFinderDataset()

    hyperparameters = {
        "optimization.max_epochs": 1,
        "model.names": ["hf_text", "timm_image", "fusion_mlp"],
        "model.hf_text.checkpoint_name": "prajjwal1/bert-tiny",
        "model.timm_image.checkpoint_name": "swin_tiny_patch4_window7_224",
        "env.num_workers": 0,
        "env.num_workers_evaluation": 0,
    }

    teacher_predictor = MultiModalPredictor(
        label=dataset.label_columns[0],
        problem_type=dataset.problem_type,
        eval_metric=dataset.metric,
    )

    teacher_save_path = os.path.join(get_home_dir(), "petfinder", "teacher")
    if os.path.exists(teacher_save_path):
        shutil.rmtree(teacher_save_path)

    teacher_predictor = teacher_predictor.fit(
        train_data=dataset.train_df,
        hyperparameters=hyperparameters,
        time_limit=30,
        save_path=teacher_save_path,
    )

    # test for distillation
    predictor = MultiModalPredictor(
        label=dataset.label_columns[0],
        problem_type=dataset.problem_type,
        eval_metric=dataset.metric,
    )

    student_save_path = os.path.join(get_home_dir(), "petfinder", "student")
    if os.path.exists(student_save_path):
        shutil.rmtree(student_save_path)

    predictor = predictor.fit(
        train_data=dataset.train_df,
        teacher_predictor=teacher_predictor,
        hyperparameters=hyperparameters,
        time_limit=30,
        save_path=student_save_path,
    )
    verify_predictor_save_load(predictor, dataset.test_df)

    # test for distillation with teacher predictor path
    predictor = MultiModalPredictor(
        label=dataset.label_columns[0],
        problem_type=dataset.problem_type,
        eval_metric=dataset.metric,
    )

    student_save_path = os.path.join(get_home_dir(), "petfinder", "student")
    if os.path.exists(student_save_path):
        shutil.rmtree(student_save_path)

    predictor = predictor.fit(
        train_data=dataset.train_df,
        teacher_predictor=teacher_predictor.path,
        hyperparameters=hyperparameters,
        time_limit=30,
        save_path=student_save_path,
    )
    verify_predictor_save_load(predictor, dataset.test_df)
예제 #33
0
파일: sound.py 프로젝트: fanyujiao/CMS
 def __init__(self):
     self.homedir = utils.get_home_dir()
예제 #34
0
 def __init__(self):
     self.location = os.path.join(utils.get_home_dir(), ".rhnpushcache")
     self.session = None
예제 #35
0
def install(script_name, homedir):
    """Installs the project"""
    print 'Initializing new user'

    # Let's make sure everyone knows for sure that this is being installed
    print """
    Do you grant piMessage permission to install on your computer?

    PiMessage will have access to:

    - your IP address
    - any contacts you enter into PiMessage
    - all of your conversations through PiMessage

    PiMessage will create on your computer:

    - a directory to save the following PiMessage information
    - a file to save your contacts and their saved IP addresses
    - your conversations with contacts
    - your preferred text editor

    """

    decision = raw_input('Do you want to install PiMessage? (y/n): ')
    if decision != 'y':
        print 'Not installing. Terminating PiMessage.'
        exit(1)

    # make the directory
    if not os.path.isdir(homedir):
        try:
            os.mkdir(homedir)
        except OSError:
            exit(DIR_FAILURE)

    # generate user's keys
    # todo


    if os.system('which vim >/dev/null 2>&1') == 0:
        my_editor = 'vim'
    else:
        my_editor = 'nano'

    os.system('clear')
    for k in [0, 1, 2, 3]:
        if k == 3:
            print 'Error: too many tries for editor.'
            exit(1)
        # Get user input
        prompt = '\nWhat is your preferred text editor? Press enter to '
        prompt += 'default to '+my_editor+", enter 'cancel' to cancel the "
        prompt += 'installation. '
        resp = raw_input(prompt)
        if resp == 'cancel' or resp == "'cancel'":
            # cancel the installation now
            exit(1)
        elif not resp:
            print 'Selecting', my_editor
            break
        else:
            # check if their editor is a valid command
            if os.system('which '+resp+' >/dev/null 2>&1') == 0:
                my_editor = resp
                break
            else:
                print resp, 'is not a recognized command.'

    # write info to files
    with open(homedir+'editor', 'w') as fname:
        fname.write(my_editor) # doesn't terminate in newline

    if not os.path.isdir(homedir + 'conversations'):
        try:
            os.mkdir(homedir + 'conversations', 0700)
        except OSError:
            exit(DIR_FAILURE)
    with open(homedir+'contacts', 'w') as fname:
        fname.write('') # doesn't terminate in newline

    dir_path = os.path.abspath(os.path.dirname(sys.argv[0]))
    script_name = dir_path+'/pimessage.py'

    # alias `pimessage' to point to this script
    _bashrc = os.path.join(utils.get_home_dir(), '.bashrc')

    grep_results = subprocess.Popen(['grep', '^alias \\+pimessage='+script_name,
                                     _bashrc],
                                    stdout=subprocess.PIPE).communicate()[0]
    if grep_results == '':
        # must append alias command
        try:
            with open(_bashrc, 'a') as fname:
                fname.write('\n# For PiMessage -- do not delete\n')
                fname.write('alias pimessage='+script_name+'\n')
        except IOError:
            print 'Error applying shell alias for pimessage'

    # start pmdaemon at startup
    _profile = os.path.join(utils.get_home_dir(), '.profile')
    grep_daemon_cmd = ['grep', '^'+dir_path+'/pmdaemon.py', _profile]
    grep_results = subprocess.Popen(grep_daemon_cmd,
                                    stdout=subprocess.PIPE).communicate()[0]
    if not grep_results:
        # must append alias command
        start_daemon_cmd = dir_path+'/pmdaemon.py &'
        flag_cmd = dir_path+'/pmdaemon.py -f'
        try:
            with open(_profile, 'a') as fname:
                fname.write('\n'.join(['#start pimessage daemon',
                                       start_daemon_cmd, flag_cmd, '']))
        except IOError:
            print 'Error loading PiMessage daemon in .profile'

        # start the daemon manually this time
        os.system(start_daemon_cmd)

    print 'PiMessage has been successfully installed.'
    exit(0)
예제 #36
0
 def load_scheme(self):
     scheme_filename = unicode(QFileDialog.getOpenFileName(self, "Open File", get_home_dir(), get_format_filter()))
     if scheme_filename:
         self.__load_scheme(scheme_filename)
예제 #37
0
def uninstall():
    """Uninstaller for pimessage"""
    status = 0
    try:
        shutil.rmtree(data_dir, ignore_errors=True)
    except OSError:
        print 'Error in removing ~/.pimessage'
        return 1

    # Remove daemon from .profile
    try:
        _profile = os.path.join(utils.get_home_dir(), '.profile')
        with open(_profile, 'r') as fname:
            buf = fname.read()

        # process buffer
        lines = buf.split('\n')
        dir_path = os.path.abspath(os.path.dirname(sys.argv[0]))
        daemon_line0 = '#start pimessage daemon'
        daemon_line1 = dir_path+'/pmdaemon.py &'
        daemon_line2 = dir_path+'/pmdaemon.py -f'

        lines_to_append = []
        for line in lines:
            if (line != daemon_line0 and line != daemon_line1 and
                    line != daemon_line2):
                lines_to_append.append(line)

        buf = '\n'.join(lines_to_append)

        with open(_profile, 'w') as fname:
            fname.write(buf)

    except Exception as err:
        print 'Error in handling ~/.profile'
        print '%s' % str(err)
        status = 1

    # Remove pimessage alias from .bashrc
    try:
        _bashrc = os.path.join(utils.get_home_dir(), '.bashrc')
        with open(_bashrc, 'r') as fname:
            buf = fname.read()

        # process buffer
        lines = buf.split('\n')
        alias_line0 = '# For PiMessage -- do not delete'
        alias_line1 = 'alias pimessage='+dir_path+'/pimessage.py'

        lines_to_append = []
        for line in lines:
            if line != alias_line0 and line != alias_line1:
                lines_to_append.append(line)

        buf = '\n'.join(lines_to_append)

        with open(_bashrc, 'w') as fname:
            fname.write(buf)

    except Exception as err:
        print 'Error in handling ~/.bashrc'
        print '%s' % str(err)
        status = 1


    if status != 0:
        print 'Error removing PiMessage.'
    else:
        print 'PiMessage has been successfully uninstalled.'
    return status
예제 #38
0
 def __init__(self):
     self.location = os.path.join(utils.get_home_dir(), ".rhnpushcache")
     self.session = None
def check_for_new_excel(driver):
    """
    checks for new excel files to download, and if they aren't in the data folder,
    downloads them
    """
    while True:
        try:
            driver.get('http://shortsqueeze.com/ShortFiles.php')
            break
        except TimeoutException:
            pass

    years = get_years(driver)
    # get currently downloaded files
    dates_df = pd.read_excel(
        get_home_dir(repo_name='scrape_stocks') +
        'short_squeeze_release_dates.xlsx', None)
    cal_dict = {v: k for k, v in enumerate(calendar.month_name)}
    del cal_dict['']
    rev_cal_dict = {v: k for k, v in cal_dict.items()}

    bimonthly_files = glob.glob(HOME_DIR + 'short_squeeze.com/*.xlsx')
    bimonthly_filenames = set([f.split('/')[-1] for f in bimonthly_files])
    bimo_dates = [
        parse_bimo_dates(f, dates_df, rev_cal_dict) for f in bimonthly_files
    ]
    # if any dates none, it's because that year isn't in excel file of release dates yet
    all_none = [d is None for d in bimo_dates]
    if any(all_none):
        print('latest release date info not in excel file; need to update it')
        return

    latest_date = max(bimo_dates).date()
    latest_year = latest_date.year
    check_years = years[years >= latest_year]

    files_to_dl = []
    filenames = []
    for y in check_years:
        driver.get('http://shortsqueeze.com/' + str(y) + '.php')
        links = driver.find_elements_by_partial_link_text('Download')
        for l in links:
            link = l.get_attribute('href')
            if link == 'http://shortsqueeze.com/ShortFiles.php':
                continue

            filename = link.split('/')[-1]
            if filename in bimonthly_filenames:
                continue

            files_to_dl.append(link)
            filenames.append(filename)

    if len(files_to_dl) == 0:
        print('no new files to download')

    # seems to hang on download, so this will make it continue
    driver.set_page_load_timeout(4)
    for l in files_to_dl:
        try:
            print('downloading', l)
            driver.get(l)  # saves to downloads folder
        except TimeoutException:
            pass

    for f in filenames:
        full_fn = '/home/nate/Downloads/' + f
        print(full_fn)
        if os.path.exists(full_fn):
            # os.rename(full_fn, HOME_DIR + 'short_squeeze.com/' + f)
            shutil.copy(full_fn, HOME_DIR + 'short_squeeze.com/' + f)
            os.remove(full_fn)
예제 #40
0
computer to receive PiMessages sent over LAN.

2014 Nate Fischer, Ivan De Dios

"""

import socket
import sys

import ip # local file
import utils
import datetime
import time
import os

ERR_FILE = os.path.join(utils.get_home_dir(), '.pimessage', 'daemonError.log')
FLAG_FILE = os.path.join(utils.get_home_dir(), '.pimessage', 'flag')

def err_log(msg):
    """log an error with its time followed by the message describing it"""

    now = datetime.datetime.now().time()
    formatted_stamp = now.strftime('%H:%M %m/%d/%Y')
    err_msg = '\t'.join([formatted_stamp, msg])
    # print >>sys.stderr, err_msg
    with open(ERR_FILE, 'a') as fname:
        fname.write(err_msg+'\n')

def check_startup():
    """
    Checks if the daemon is starting in detached mode
예제 #41
0
def test_customizing_model_names(hyperparameters, ):
    dataset = ALL_DATASETS["petfinder"]()
    metric_name = dataset.metric

    predictor = MultiModalPredictor(
        label=dataset.label_columns[0],
        problem_type=dataset.problem_type,
        eval_metric=metric_name,
    )
    config = {
        MODEL: f"fusion_mlp_image_text_tabular",
        DATA: "default",
        OPTIMIZATION: "adamw",
        ENVIRONMENT: "default",
    }
    hyperparameters.update({
        "env.num_workers": 0,
        "env.num_workers_evaluation": 0,
    })
    hyperparameters_gt = copy.deepcopy(hyperparameters)
    if isinstance(hyperparameters_gt["model.names"], str):
        hyperparameters_gt["model.names"] = OmegaConf.from_dotlist(
            [f'names={hyperparameters["model.names"]}']).names

    save_path = os.path.join(get_home_dir(), "outputs", "petfinder")
    if os.path.exists(save_path):
        shutil.rmtree(save_path)
    predictor.fit(
        train_data=dataset.train_df,
        config=config,
        hyperparameters=hyperparameters,
        time_limit=10,
        save_path=save_path,
    )

    assert sorted(predictor._config.model.names) == sorted(
        hyperparameters_gt["model.names"])
    for per_name in hyperparameters_gt["model.names"]:
        assert hasattr(predictor._config.model, per_name)

    score = predictor.evaluate(dataset.test_df)
    verify_predictor_save_load(predictor, dataset.test_df)

    # Test for continuous fit
    predictor.fit(
        train_data=dataset.train_df,
        config=config,
        hyperparameters=hyperparameters,
        time_limit=10,
    )
    assert sorted(predictor._config.model.names) == sorted(
        hyperparameters_gt["model.names"])
    for per_name in hyperparameters_gt["model.names"]:
        assert hasattr(predictor._config.model, per_name)
    verify_predictor_save_load(predictor, dataset.test_df)

    # Saving to folder, loading the saved model and call fit again (continuous fit)
    with tempfile.TemporaryDirectory() as root:
        predictor.save(root)
        predictor = MultiModalPredictor.load(root)
        predictor.fit(
            train_data=dataset.train_df,
            config=config,
            hyperparameters=hyperparameters,
            time_limit=10,
        )
        assert sorted(predictor._config.model.names) == sorted(
            hyperparameters_gt["model.names"])
        for per_name in hyperparameters_gt["model.names"]:
            assert hasattr(predictor._config.model, per_name)