Exemplo n.º 1
0
def main():
    args = parse_args()
    setup_logging(args.logging_config, args.debug)

    if 'schedule' not in args:
        run_reporter(args)
    else:
        schedule_reporter(args)
Exemplo n.º 2
0
def main(_args=None):
    """enter the dragon, is what I imagine the main method saying"""
    try:
        _logger = setup_logging('info')
        if _args is None:
            _args = sys.argv
        files = _args
        result_count = 100
        for file in files:
            if "__main__.py" not in file:
                _logger.info(f"Recieved path to file: {file}")
                results = filer.outputter(some_collection=filer.reader(file),
                                          this_many_results=result_count)
                print(f'Top {result_count} matches for file {file}')
                for result in results:
                    print(result)

    except InsufficientArguments:
        _logger.error("Recieved no file input")
        raise
    except ArgumentTypeException:
        _logger.error("Not a valid file path")
        raise
    except:
        _logger.error("Unexpected error: %s", sys.exc_info()[0])
        print("Unexpected error:", sys.exc_info()[0])
        raise
Exemplo n.º 3
0
def main(parameter_list):
    ''' main entry point '''
    start_time = datetime.now()

    config = utils.get_config(parameter_list)

    utils.setup_logging(config['LOG_DIRECTORY'], config['PROGRAM_NAME'],
                        config['FILE_LOGGING_LEVEL'],
                        config['CONSOLE_LOGGING_LEVEL'])

    # debug print config file params
    for key, value in config.items():
        logging.debug('cfg param %s : %s', key, value)

    logging.info("Start time %s", start_time.strftime("%Y-%m-%d %H:%M:%S"))

    connection_string = utils.make_connection_string(
        config['DB_DRIVER'], config['DB_SERVER'], config['DB_NAME'],
        config['DB_USER'], config['DB_PASS'], config['DB_TRUSTED'])

    # split here
    import_table_name = config['ACCIDENT_IMPORT_TABLENAME']
    fars_field_specs = utils.get_field_spec(FARS_FIELD_SPEC)
    sql = utils.make_create_table_sql(fars_field_specs, import_table_name)
    utils.create_table(connection_string, import_table_name, sql, True)
    utils.bulk_insert_csv_file_to_db(connection_string, import_table_name,
                                     config['ACCIDENT_DATAFILE'], 2)
    add_crash_datetime(connection_string, import_table_name)
    update_crash_datetime(connection_string, import_table_name)

    # split here
    import_table_name = config['STATE_CODE_IMPORT_TABLENAME']
    fars_field_specs = utils.get_field_spec(STATES_FIELD_SPEC)
    sql = utils.make_create_table_sql(fars_field_specs, import_table_name)
    utils.create_table(connection_string, import_table_name, sql, True)
    utils.bulk_insert_csv_file_to_db(connection_string, import_table_name,
                                     config['STATE_CODE_DATAFILE'], 1)

    # split here
    utils.report_runtime(start_time)

    print('\n')  # blank line after run is done
    return
Exemplo n.º 4
0
            else:  # start and middle lines, 3 values per line
                irrprofi = str("IRRPROFI" +
                               (20 - (len(str(countdays[x])) + 9)) * " " +
                               str(countdays[x]) + "." +
                               (10 - len(str(flux[x]))) * " " + str(flux[x]) +
                               (9 - len(str(countdays[x + 1]))) * " " +
                               str(countdays[x + 1]) + "." +
                               (10 - len(str(flux[x + 1]))) * " " +
                               str(flux[x + 1]) +
                               (9 - len(str(countdays[x + 2]))) * " " +
                               str(countdays[x + 2]) + "." +
                               (10 - len(str(flux[x + 2]))) * " " +
                               str(flux[x + 2]) + "\n")

            lines.insert(2 * j + 1, irrprofi)

        lines = "".join(lines)
        file.seek(0)
        file.write(lines)
        file.truncate()
    file.close()
    logging.info("Writing file: %s", output_file_name)


if __name__ == "__main__":
    input_file_name = 'cyclemainoperationalparameters.xlsx'
    output_file_name = 'fluka_test.i'
    ut.setup_logging()
    FLUKA_output(input_file_name, output_file_name)
    logging.info("Completed irradiation history production")
Exemplo n.º 5
0
def main():
    args = parse_args()
    setup_logging(args.logging_config, args.debug)
    run_producer(args)
Exemplo n.º 6
0
async def main():

    if getattr(sys, "frozen", False):  # Check if program is compiled to exe
        main_folder_path = os.path.dirname(sys.executable)
    else:
        main_folder_path = os.path.dirname(os.path.abspath(__file__))

    config = load_config(os.path.join(main_folder_path, "horizon_config.ini"))
    setup_logging(main_folder_path, level=config['logging_level'])

    print_timestamp(
        f"Horizon Trade Notifier {version} - https://discord.gg/Xu8pqDWmgE - https://github.com/JartanFTW"
    )
    logger.log(
        49,
        f"Horizon Trade Notifier {version} - https://discord.gg/Xu8pqDWmgE - https://github.com/JartanFTW"
    )

    update = await check_for_update(version)
    if update:
        print_timestamp("A new update is available!")

    tasks = []
    user = await User.create(config["cookie"])
    if config['completed']['enabled']:
        worker = await TradeWorker.create(
            main_folder_path,
            user,
            config['completed']['webhook'],
            config['completed']['update_interval'],
            config['completed']['theme_name'],
            trade_type="Completed",
            add_unvalued_to_value=config['add_unvalued_to_value'],
            testing=config['testing'],
            webhook_content=config['completed']['webhook_content'])
        tasks.append(asyncio.create_task(worker.check_trade_loop()))
    if config['inbound']['enabled']:
        worker = await TradeWorker.create(
            main_folder_path,
            user,
            config['inbound']['webhook'],
            config['inbound']['update_interval'],
            config['inbound']['theme_name'],
            trade_type="Inbound",
            add_unvalued_to_value=config['add_unvalued_to_value'],
            testing=config['testing'],
            double_check=config['double_check'],
            webhook_content=config['inbound']['webhook_content'])
        tasks.append(asyncio.create_task(worker.check_trade_loop()))
    if config['outbound']['enabled']:
        worker = await TradeWorker.create(
            main_folder_path,
            user,
            config['outbound']['webhook'],
            config['outbound']['update_interval'],
            config['outbound']['theme_name'],
            trade_type="Outbound",
            add_unvalued_to_value=config['add_unvalued_to_value'],
            testing=config['testing'],
            webhook_content=config['outbound']['webhook_content'])
        tasks.append(asyncio.create_task(worker.check_trade_loop()))

    if tasks:
        await asyncio.wait(tasks)
    else:
        print_timestamp(
            "Looks like you don't have any trade types enabled in the config! There is nothing for me to do :("
        )
    await user.client.aclose()
    return
Exemplo n.º 7
0
import splunk.Intersplunk
import traceback

from passivetotal.libs.whois import WhoisRequest
from utilities import build_headers
from utilities import gen_label
from utilities import get_config
from utilities import setup_logging

logger = setup_logging()

try:
    logger.info("Starting command processing")
    input_events, dummyresults, settings = splunk.Intersplunk.getOrganizedResults(
    )
    keywords, options = splunk.Intersplunk.getKeywordsAndOptions()

    query_value = options.get("query", "")
    logger.info("Query target: %s" % query_value)
    logger.debug("Raw options: %s" % str(options))

    configuration = get_config("passivetotal", "api-setup")
    username = configuration.get('username', None)
    api_key = configuration.get('apikey', None)

    output_events = list()
    whois = WhoisRequest(username, api_key,
                         headers=build_headers()).get_whois_details(
                             query=query_value, compact_record=True)
    if 'error' in whois:
        raise Exception(
Exemplo n.º 8
0
PASSWORD = os.environ.get('AGOL_PASSWORD')
# Path to the ArcGIS Pro Project
APRX_PATH = (r'D:\jhth490\projects\mobile_suma'
             r'\mobile_suma_gis\mobile_suma_gis.aprx')
# Name of the map to add the layers to. It could be helpful to only
# have one map in the project
MAP = 'collector_map'
# Path to the highest level folder that holds the layers to use.
QDL_PATH = r'\\dnr\agency\app_data_gis\qdl\core'
AGOL = 'https://www.arcgis.com'
# AGOL username of the owner of the items
OWNER = 'jhth490'
# ID of the AGOL group to share the vtpks and tile layers. This helps
# with searching and deleting items and managing shared content.
GROUP_ID = '4154c84c38204236a0a633665f040976'
LOGGER = utilities.setup_logging(ROOT_DIRECTORY)
# Pre-created blank feature class in web mercator projection (WKID 3857).
SPATIAL_REFERENCE_LAYER = (r"D:\jhth490\projects\mobile_suma\mobile_suma_gis"
                           r"\mobile_suma_gis.gdb\fc_set_spatial_reference")
# Dictionary containing the layers to add to the map. These are relative
# to the QDL_PATH above. Keys are the name of vtpk, values are paths to
# the layer files to include in the vtpk.
VECTOR_LAYERS = {
    'Transportation': [
        r'Transportation\State Lands - Active Roads Group',
        r'Transportation\State Lands - Road Barriers'
    ],
    'Topography':
    [r'Topography\Contours 40, 200, 1000, 2000-foot (USGS DEM 10-meter)'],
    'State Lands Knowledge': [
        (r'Forest Management\State Lands Knowledge'
Exemplo n.º 9
0
import copy
import datetime
import splunk.Intersplunk
import traceback

from passivetotal.libs.dns import DnsRequest
from utilities import build_headers
from utilities import get_config
from utilities import setup_logging

logger = setup_logging()


def remove_keys(obj, keys=[]):
    """Remove a set of keys from a dict."""
    obj = copy.deepcopy(obj)
    for key in keys:
        obj.pop(key, None)
    return obj

try:
    logger.info("Starting command processing")
    input_events, dummyresults, settings = splunk.Intersplunk.getOrganizedResults()
    keywords, options = splunk.Intersplunk.getKeywordsAndOptions()

    kwords = dict()
    query_value = options.get("query", "")
    earliest = options.get("earliest", None)
    latest = options.get("latest", None)
    if earliest and earliest.isdigit():
        start = datetime.datetime.fromtimestamp(int(earliest))
Exemplo n.º 10
0
def main(parameter_list):
    ''' main entry point '''
    start_time = datetime.now()

    config = utils.get_config(parameter_list)

    utils.setup_logging(
        config['LOG_DIRECTORY'],
        config['PROGRAM_NAME'],
        config['FILE_LOGGING_LEVEL'],
        config['CONSOLE_LOGGING_LEVEL']
    )

    utils.setup_output(config['OUTPUT_FOLDER'])

    for key, value in config.items():
        logging.debug('cfg param %s : %s', key, value)

    logging.info('Start time %s', start_time.strftime("%Y-%m-%d %H:%M:%S"))

    data_folder = config['DATA_FOLDER']
    logging.info('Processing files in %s', data_folder)
        
    utz_zone = pytz.timezone('UTC')
    first_epoch = long(config['FIRST_EPOCH'])
    last_epoch = long(config['LAST_EPOCH'])
    filter_expression = config['FILE_FILTER_REGEX']

    # cut out _files_ based on date
    logging.info('Starting to filter records in %s by dates %s - %s', 
                 data_folder, first_epoch, last_epoch)
    files_examined, matching_files = extract_filtered_file_list(data_folder, first_epoch, last_epoch, filter_expression)
    logging.info('Filtered %s files to list of %s', files_examined, len(matching_files))
    logging.info('Current time %s', datetime.now().strftime('%H:%M:%S'))

    subset_folder = config['SUBSET_FOLDER']
    if os.path.exists(subset_folder):
        shutil.rmtree(subset_folder)
    os.mkdir(output_dir)

    logging.info('Creating subset of files: %s', subset_folder)
    for file_name in matching_files:
        copy(file_name, subset_folder)

    logging.info('Processing files into records')
    logging.info('Current time %s', datetime.now().strftime('%H:%M:%S'))
    files_processed, total_lines, jams_skipped, single_file_duplicates, combined_lines = extract_lines(subset_folder)
    logging.info('Processed files: %s; total lines: %s; files with duplicate lines: %s; unique lines: %s',
                 files_processed, total_lines, len(single_file_duplicates), len(combined_lines))
    if jams_skipped > 0:
        logging.error('Found lines containing %s "jams" records that were skipped', jams_skipped)

    logging.info('Current time %s', datetime.now().strftime('%H:%M:%S'))

    # cut out _records_ based on date
    filter_expression = config['OBJECT_FILTER_REGEX']
    study_set = extract_waze_objects(combined_lines, first_epoch, last_epoch, filter_expression) 
    logging.info('Processed %s lines, created study set of size: %s',
                 len(combined_lines), len(study_set))

    logging.info('Building study file at %s', datetime.now().strftime("%H:%M:%S"))

    study_file = os.path.join(config['OUTPUT_FOLDER'], config['STUDY_OUTPUT_FILE'])
    build_study_output(study_set, study_file)

    utils.report_runtime(start_time)
    print('\n')
    return
Exemplo n.º 11
0
            source_file = os.path.join(root, 'structure.cpd')
            dest_file = os.path.join(
                root.replace('original_format', 'final_format'),
                'structure.cpd')
            copyfile(source_file, dest_file)


def write_etree(etree, name):
    os.makedirs('debug_output_xmls', exist_ok=True)
    with open(f"debug_output_xmls/{name}.xml", 'w') as f:
        mods_bytes = ET.tostring(etree,
                                 xml_declaration=True,
                                 encoding="utf-8",
                                 pretty_print=True)
        mods_string = mods_bytes.decode('utf-8')
        f.write(mods_string)


if __name__ == '__main__':
    setup_logging()
    try:
        xlsx = sys.argv[1]
    except IndexError:
        logging.warning(
            'Change to: "python convert_xlsx_to_mods.py $path/to/{filename}.xlsx"'
        )
        quit()
    logging.info(f"starting {xlsx}")
    main(xlsx)
    logging.info(f"finished {xlsx}")
Exemplo n.º 12
0

def main(alias, cdm_data_dir):
    PullInBinaries(alias, cdm_data_dir)
    MakeStructureFile(alias)
    IsCountsCorrect(alias, cdm_data_dir)
    report_restricted_files(alias)
    report_filetype(alias)
    folder_by_extension(alias)
    make_zips(alias)
    fix_permissions()
    cleanup_leftover_files(alias)


if __name__ == '__main__':
    logging_string = setup_logging()
    try:
        alias = sys.argv[1]
        cdm_data_dir = sys.argv[2]
    except IndexError:
        logging.warning('')
        logging.warning(
            'Change to: "python post_cdmconversion_cleanup.py $aliasname $path/to/U-Drive/Cached_Cdm_files"'
        )
        logging.warning('')
        quit()
    logging.info('starting {}'.format(alias))
    main(alias, cdm_data_dir)
    logging.info('finished {}'.format(alias))

    log_contents = logging_string.getvalue()