Beispiel #1
0
import os, sys

base_path = os.path.dirname(__file__)
sys.path.append(os.path.abspath(os.path.join(base_path, '..')))

from common.pogs_analysis_tables import *
from common.database import db_init
from sqlalchemy.engine import create_engine
from sqlalchemy import select, exists
from sqlalchemy import func
from sqlalchemy.sql.expression import func as ffunc

pogs_connection = create_engine(
    'mysql://*****:*****@munro.icrar.org/pogs_analysis').connect()
nn_connection = db_init('sqlite:///Database_run01.db')

filter_map = {
    123: 'fuv',
    124: 'nuv',
    229: 'u',
    230: 'g',
    323: 'g',
    324: 'r',
    231: 'r',
    325: 'i',
    232: 'i',
    326: 'z',
    233: 'z',
    327: 'y',
    280: 'WISEW1',
Beispiel #2
0
)
parser.add_argument("-n", dest="num_to_load", type=int, nargs=1, help="Number of galaxies to load")
parser.add_argument(
    "-r",
    dest="run_id",
    nargs="*",
    help="Run ID folders to load. If ommitted, simply searches for all .fit files in subdirectories of the working directory.",
)
parser.add_argument("-d", dest="database", nargs=1, help="SQLite database to use")
args = vars(parser.parse_args())
working_directory = args["working_directory"][0]
num_to_load = args["num_to_load"][0]
run_ids = args["run_id"]

# Fire up the DB based on our command line args
db_init(config.DB_LOGIN + args["database"][0])

run_dirs = []
num_added = 0
sh_files = 0
current_run_dir = ""

# Is everything valid?
if os.path.exists(working_directory):
    # Check each of the run_ids and check if they're valid
    invalid_folders = []
    for id in run_ids:
        full_path = os.path.join(working_directory, id)
        run_dirs.append(full_path)
        if not os.path.exists(full_path):
            invalid_folders.append(full_path)
#

import os, sys

base_path = os.path.dirname(__file__)
sys.path.append(os.path.abspath(os.path.join(base_path, '..')))

from common.pogs_analysis_tables import *
from common.database import db_init
from sqlalchemy.engine import create_engine
from sqlalchemy import select, exists
from sqlalchemy import func
from sqlalchemy.sql.expression import func as ffunc

pogs_connection = create_engine('mysql://*****:*****@munro.icrar.org/pogs_analysis').connect()
nn_connection = db_init('sqlite:///Database_run01.db')

filter_map = {123: 'fuv',
              124: 'nuv',
              229: 'u',
              230: 'g',
              323: 'g',
              324: 'r',
              231: 'r',
              325: 'i',
              232: 'i',
              326: 'z',
              233: 'z',
              327: 'y',
              280: 'WISEW1',
              281: 'WISEW2',
Beispiel #4
0
    out_tuple = list()
    for count, item in enumerate(args):
        # Apply permutation to each item, then append them to a list for outputting
        out_tuple.append(item[perm])

    return tuple(out_tuple)


inp = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10]
inp2 = [500, 700, 300, 900, 400, 150, 300, 700, 200, 600, 800, 900, 800]

print get_percentile_groups(inp2, 0.1)
print get_dataspace_groups(inp2, 0.1)
exit()

db_init('sqlite:///Database_run06.db')

DatabaseConfig = {
    'database_connection_string': 'sqlite:///Database_run06.db',
    'train_data': 200000,
    'test_data': 1000,
    'run_id': '06',
    'output_type':
    'median',  # median, best_fit, best_fit_model, best_fit_inputs
    'input_type': 'normal',  # normal, Jy
    'include_sigma': False,  # True, False
    'unknown_input_handler': None,
    'input_filter_types': None
}

if check_temp('nn_last_tmp_input3.tmp', {1: 1}):
Beispiel #5
0
    dest='run_id',
    nargs='*',
    help=
    'Run ID folders to load. If ommitted, simply searches for all .fit files in subdirectories of the working directory.'
)
parser.add_argument('-d',
                    dest='database',
                    nargs=1,
                    help='SQLite database to use')
args = vars(parser.parse_args())
working_directory = args['working_directory'][0]
num_to_load = args['num_to_load'][0]
run_ids = args['run_id']

# Fire up the DB based on our command line args
db_init(config.DB_LOGIN + args['database'][0])

run_dirs = []
num_added = 0
sh_files = 0
current_run_dir = ''

# Is everything valid?
if os.path.exists(working_directory):
    # Check each of the run_ids and check if they're valid
    invalid_folders = []
    for id in run_ids:
        full_path = os.path.join(working_directory, id)
        run_dirs.append(full_path)
        if not os.path.exists(full_path):
            invalid_folders.append(full_path)
    out_tuple = list()
    for count, item in enumerate(args):
        # Apply permutation to each item, then append them to a list for outputting
        out_tuple.append(item[perm])

    return tuple(out_tuple)


inp = [0,1,2,3,4,5,6,7,8,9,10]
inp2 = [500,700,300,900,400,150,300,700,200,600,800,900,800]

print get_percentile_groups(inp2, 0.1)
print get_dataspace_groups(inp2, 0.1)
exit()

db_init('sqlite:///Database_run06.db')

DatabaseConfig = {'database_connection_string': 'sqlite:///Database_run06.db',
                  'train_data': 200000,
                  'test_data': 1000,
                  'run_id': '06',
                  'output_type': 'median',  # median, best_fit, best_fit_model, best_fit_inputs
                  'input_type': 'normal',  # normal, Jy
                  'include_sigma': False,  # True, False
                  'unknown_input_handler': None,
                  'input_filter_types': None
                  }

if check_temp('nn_last_tmp_input3.tmp', {1:1}):
    all_in, all_out, redshifts, galaxy_ids = load_from_file('nn_last_tmp_input3.tmp')
else: