Exemplo n.º 1
0
def main():
    try:
        import os
        os.mkdir('data')
    except Exception as e:
        pass
    cofig_all = joblib.load('config_set')
    cofig_all = cofig_all[200:300]

    for conf_ith, conf in enumerate(cofig_all):
        # temp = conf_ith
        res = read_config(conf)
        span_setting = res['span_setting']
        ch_number = res['ch_number']
        mf = res['mf']

        if mf == 0:
            mf = 'qpsk'
        elif mf == 1:
            mf = '16-qam'
        power = res['power']

        signals = generate_qam_signal(ch_number * [power], ch_number * [35],
                                      mf)

        for i, signal in enumerate(signals):
            signal.center_frequence = i * 50e9 + 193.1e12

        # [signal.center_frequency = 193.1e12 + i *50e9  for i,signal in enumerate(signals)]

        spans, edfas = generate_spans_edfas(span_setting)
        wdm_signal = mux_signal(signals)
        assert len(spans) == len(span_setting)
        center_index = len(signals) // 2
        center_signal = signals[center_index]

        center_signal_to_prop_obj = copy.deepcopy(center_signal)
        center_signal_to_prop = resample(center_signal[:],
                                         center_signal.sps_in_fiber, 4)
        center_signal_to_prop_obj.data_sample_in_fiber = center_signal_to_prop
        center_signal_to_prop_obj.sps_in_fiber = 4
        center_signal_to_prop_obj.set_signal_power(power, 'dbm')
        center_signal_afterprop = simulate_spm(center_signal_to_prop_obj,
                                               spans, edfas)
        wdm_signal_afterprop = sumulate_all(wdm_signal, spans, edfas)
        to_save = dict(wdm_signal_afterprop=wdm_signal_afterprop,
                       center_signal_afterprop=center_signal_afterprop,
                       spans=spans,
                       power=power,
                       mf=mf)
        conf_ith += 200
        save(to_save, f'n:/superchanneldata/{conf_ith}_th')
Exemplo n.º 2
0
async def main():
    """Main method."""
    if "-v" in argv:
        logging.basicConfig(level=logging.INFO)
    elif "-vv" in argv:
        logging.basicConfig(level=logging.DEBUG)
    else:
        logging.basicConfig(level=logging.ERROR)

    async with ClientSession(headers={'Connection': 'keep-alive'}) as session:
        connection = Connection(session, **read_config())
        if await connection._login():
            if await connection.update():
                for vehicle in connection.vehicles:
                    print(vehicle)
            await connection._logout()
Exemplo n.º 3
0
# -*- coding: utf-8 -*-

import feedparser
import pprint
import time
from utilities import create_crawljob_and_upload, read_config, get_show_information
from db import persist_download, download_exists
from pathlib import Path
from guessit import guessit
from utilities import CURRENT_FOLDER, WATCH_FOLDER, CONFIG_FILE, FTP_CONFIG, DB_FILENAME, DB_FILENAME

config = read_config(path_to_file=CONFIG_FILE).get('RMZ_Shows')


# Checks, if
def filter_relevant_show_info(show_info):
    if 'title' in show_info and 'season' in show_info and 'episode' in show_info and 'screen_size' in show_info:
        title = show_info['title']
        season = show_info['season']
        episode = show_info['episode']
        screen_size = show_info['screen_size']
    return title, season, episode, screen_size


def filter_for_shows(entries, shows):
    prefiltered_shows = list(filter(lambda x: x in entries, shows))
    return prefiltered_shows


if __name__ == '__main__':
    d = feedparser.parse('http://rmz.cr/feed')
Exemplo n.º 4
0
## Prediction of Trees from local model
import cv2
import matplotlib.pyplot as plt
import pandas as pd
import glob
import os
import utilities

if __name__ == "__main__":
    config = utilities.read_config()
    model = utilities.read_model(config["model_path"], config)

    #gather images with annotations images
    files = glob.glob("../**/**/*.tif")

    #rgb only
    files = [x for x in files if not "hyperspectral" in x]
    files = [x for x in files if not "depth" in x]
    files = [x for x in files if not "training" in x]
    files = [x for x in files if not "false_color" in x]

    #which images have annotations
    images_with_annotations = glob.glob("../../*/annotations/*")
    images_with_annotations = [
        os.path.basename(x) for x in images_with_annotations
    ]
    images_with_annotations = [
        os.path.splitext(x)[0] for x in images_with_annotations
    ]

    #filter rgb images
 def read_config(self):
     self.config = utilities.read_config()
Exemplo n.º 6
0
# -*- coding: utf-8 -*-

import feedparser
import pprint
import time
from utilities import create_crawljob_and_upload, read_config, get_show_information
from db import persist_download, persist_download2, download_exists, download_exists2
from pathlib import Path
from guessit import guessit
from utilities import CURRENT_FOLDER, WATCH_FOLDER, CONFIG_FILE, FTP_CONFIG, DB_FILENAME, DB_FILENAME
from bs4 import BeautifulSoup
import requests
import re

config = read_config(path_to_file=CONFIG_FILE).get('RMZ_Movies')


# Checks, if
def filter_relevant_show_info(show_info):
    if 'title' in show_info and 'season' in show_info and 'episode' in show_info and 'screen_size' in show_info:
        title = show_info['title']
        season = show_info['season']
        episode = show_info['episode']
        screen_size = show_info['screen_size']
    return title, season, episode, screen_size


def filter_for_shows(entries, shows):
    prefiltered_shows = list(filter(lambda x: x in entries, shows))
    return prefiltered_shows
Exemplo n.º 7
0
import argparse
from repo_url_getter import RepoUrlGetter
from download import RepoDownloader
from deploy import PluginDeployer
import utilities
import json

VERSION = "1.0.0"

args = utilities.read_args()

print()
print("RWP Deployer v" + VERSION)

settings = utilities.read_config('settings')
remote_connect_details = utilities.read_config('remote')

print()
print('Repositories:\n\t' + '\n\t'.join(args.repositories))
print()

url_getter = RepoUrlGetter(settings['github_token'])
repo_urls = url_getter.get_urls(args.repositories)

excluded_filenames = json.loads(settings['excluded_filenames'])
downloader = RepoDownloader(settings['tmp_dir'], excluded_filenames)
downloaded = downloader.download(repo_urls)

if not args.download_only:
    deployer = PluginDeployer(remote_connect_details)
    deployer.deploy(downloaded)
Exemplo n.º 8
0
from Webscraper import FAZ_Scraper
from utilities import Logger, Decorators, read_config
from pymongo import MongoClient
from pathlib import Path
import argparse
import json
from tqdm import tqdm
import argparse
from time import gmtime, strftime

log = Logger.log
conf = read_config('config.yaml')if Path('config.yaml').exists() else read_config(Path(__file__).resolve().parent.parent.joinpath('config.yaml'))

faz_dic = conf['faz_dic']
faz_base_parser = conf['faz_base_parser']


scraper = FAZ_Scraper(root_link=faz_dic['root_link'],
                      topic_class=faz_dic['topic_link'],
                      article_class=faz_dic['article_link'],
                      parser=faz_base_parser)


def convert_arg_str_to_bool(arg):
    return 1 if arg =="y" else 0

@Decorators.run_time
def run_scraper(write_json, write_mongo, host, port, collection, database):
    log.info(f'Running the Web Scraper with the following arguments:\nWrite to JSON:{write_json}\nWrite to MongoDB:{write_mongo}\nHost:{host}\nPort:{port}'
             f'\ncolletion:{collection}\ndatabase:{database}')