Ejemplo n.º 1
0
def projectInit(projDir):
    # Ensure INIs all exist
    projectIniPath = os.path.join(projDir, 'project.ini')
    if os.path.isfile(projectIniPath) == False:
        raise Exception('\u2018' + projectIniPath + '\u2019 is inaccessible.')
    assetsIniPath = os.path.join(projDir, 'assets.ini')
    if os.path.isfile(assetsIniPath) == False:
        raise Exception('\u2018' + assetsIniPath + '\u2019 is inaccessible.')
    # Parse all of the INIs
    #print('Project INI...')
    projectIni = ini.parse(projectIniPath)
    #print('Assets INI...')
    assetsIni = ini.parse(assetsIniPath)
    #print('Done.')
    # Ensure schema is supported
    if (int(projectIni['']['version']) > 0
            or int(assetsIni['']['version']) > 0):
        raise Exception('One or more INI schemas in \u2018' + projDir +
                        '\u2019 are unsupported')
    srcDir = projectIni['source']['sourcedir']
    incDir = projectIni['source']['includedir']
    # Get applicable languages
    _langs = projectIni['source']['langs']
    langs = []
    if ',' in _langs:
        langs += _langs.split(',')
    else:
        langs = [_langs]
    return (projectIni, assetsIni, srcDir, incDir, langs)
 def LoadSensorIniButtonPressed(self):
     # This is executed when the button is pressed
     print('LoadSensorIniButtonPressed')
     global ImgWidth, ImgHeight, ImgFormat, ImgInterface, mipiclk, Sensor_Mclk
     global ImgHdrMode, Sensor_init_reg, IniFileName
     dialog = QtWidgets.QFileDialog(self)
     dialog.setWindowTitle('Open File')
     dialog.setNameFilter('INI files (*.ini)')
     dialog.setDirectory(QtCore.QDir.currentPath())
     dialog.setFileMode(QtWidgets.QFileDialog.ExistingFile)
     if dialog.exec_() == QtWidgets.QDialog.Accepted:
         IniFileName = str(dialog.selectedFiles()[0])
         print(IniFileName)
         ini_config = ini.parse(open(IniFileName).read())
         sensor_type = ini_config["SensorType"]["0"]
         ImgWidth = int(ini_config[sensor_type]["Width"])
         ImgHeight = int(ini_config[sensor_type]["Height"])
         ImgFormat = int(ini_config[sensor_type]["SrOutputFormats"])
         ImgInterface = int(ini_config[sensor_type]["Interface"])
         mipiclk = float(ini_config[sensor_type]["MipiClkRate"])
         Sensor_Mclk = float(ini_config[sensor_type]["MclkRate"])
         ImgHdrMode = int(ini_config[sensor_type]["HDR_Mode"])
         #Sensor_init_reg = ini_config["INI_Register"] #issue: lost some repeat string
     else:
         return None
Ejemplo n.º 3
0
def dash_energy(input):
    path = 'data'
    params = ini.parse(open(input).read())
    N = int(params['particles']['N'])  # Number of particles
    tmax = float(params['time']['tmax'])
    realTime = bool(params['diagnostics']['realTime'])

    pd.options.plotting.backend = "plotly"
    countdown = 20

    if os.path.exists(pjoin(path, 'energy.txt')):
        time, energy = np.loadtxt(pjoin(path, 'energy.txt'), unpack=True)
        data = np.stack((time, energy), axis=1)
        df = pd.DataFrame(data, columns=['timestep', 'Energy'])
        fig = df.plot(template='plotly_dark')
    else:
        fig = go.Figure(data=[go.Scatter(x=[], y=[])])
        fig.layout.template = 'plotly_dark'

    app = dash.Dash(__name__,
                    update_title=None)  # remove "Updating..." from title
    app.layout = html.Div([
        html.H1("PPDyn: Energy"),
        dcc.Interval(
            id='interval-component',
            interval=1 * 1000,  # in milliseconds
            n_intervals=0),
        dcc.Graph(id='graph'),
    ])

    # Define callback to update graph
    @app.callback(Output('graph', 'figure'),
                  [Input('interval-component', "n_intervals")])
    def streamFig(value):
        global df
        if os.path.exists(pjoin(path, 'energy.txt')):
            time, energy = np.loadtxt(pjoin(path, 'energy.txt'), unpack=True)
            data = np.stack((time, energy), axis=1)
            df1 = pd.DataFrame(data, columns=['timestep', 'Energy'])
            fig = df1.plot(x='timestep', y='Energy', template='plotly_dark')
        else:
            fig = go.Figure(data=[go.Scatter(x=[], y=[])])
            fig.layout.template = 'plotly_dark'
        #fig.show()
        return (fig)

    # def run():
    #     app.scripts.config.serve_locally = True
    #     app.run_server(port = 8069, dev_tools_ui=True, debug=False,
    #                       dev_tools_hot_reload =True, threaded=False)
    # s = Process(target=run)
    # s.start()

    webbrowser.open('http://127.0.0.1:8069/')
    app.scripts.config.serve_locally = True
    app.run_server(port=8069,
                   dev_tools_ui=True,
                   debug=False,
                   dev_tools_hot_reload=True,
                   threaded=False)
Ejemplo n.º 4
0
def eview(input):
    params = ini.parse(open(input).read())
    N = int(params['particles']['N'])  # Number of particles
    tmax = float(params['time']['tmax'])
    realTime = bool(params['diagnostics']['realTime'])
    energy(N)
    plt.show()
    plt.close()
Ejemplo n.º 5
0
def load(fpath: str, silent: bool):
    import chkascii
    if chkascii.is_invalid(fpath, False, False):
        return None
    import ini
    f = open(fpath, 'rb')
    cfg = ini.parse(f.read().decode('utf-8'))
    f.close()
    ret = {}
    fac = sdl2.ext.SpriteFactory(sdl2.ext.SOFTWARE)
    ret['tiles'] = fac.from_image(name2fpath(cfg['']['tileset']) + \
     '.4tn.il.png')
    import jascpal
    fullpal = jascpal.from_file(
        name2fpath(cfg['']['tileset']) + '.jasc', silent)
    ret['palettes'] = chunks(fullpal, 16)
    ret['count'] = int(cfg['']['count'], 0)
Ejemplo n.º 6
0
def main(args):
    argc = len(args)
    if argc < 5:
        raise Exception('Insufficient number of arguments provided:\n%s' %
                        '\n'.join(args) + '\n\nExiting...')
    from os import path, sep
    args[1] = args[1].rstrip().rstrip(sep)
    args[2] = args[2].rstrip().rstrip(sep)
    if path.isdir(args[1]) == False:
        raise Exception('Dependency source directory is inaccessible or ' +
                        'does not exist: ' + args[1])
    assetDir = path.join(args[1], 'assets', args[4])
    libsDir = path.join(args[1], 'lib' + args[3], args[4])
    dirsToMake = []
    srcDepsToCopy = []
    dstDepsToCopy = []
    try:
        settings = ini.parse(assetDir + '.ini')
        for key in settings:
            filePath = path.join(args[1], 'assets', args[4], key)
            if path.isfile(filePath) == False:
                raise Exception(
                    'Asset INI section "' + key + '" does ' +
                    'not correspond to an accessible file in the assets ' +
                    'directory for target "' + args[4] + '"')
            copyKey = settings[key]['copy']
            if copyKey == '0':
                continue
            if copyKey != '1':
                raise Exception('Asset INI property "Copy" in section "' +
                                key + '" has an invalid value of "' + copyKey +
                                '"; ' + 'must be either 0 or 1')
            # Otherwise copyKey must be 1
            if 'outputpath' in settings[key]:
                outpathKey = settings[key]['outputpath']
                if outpathKey.startswith('/') == False:
                    raise Exception('Asset INI property "OutputPath" in ' +
                                    'section "' + key +
                                    '" has an invalid value of "' + copyKey +
                                    '"; must begin with a forward slash')
                if outpathKey.endswith('/'):
                    raise Exception('Asset INI property "OutputPath" in ' +
                                    'section "' + key +
                                    '" has an invalid value of "' + copyKey +
                                    '"; cannot be a directory')
                outpathKey = outpathKey.lstrip('/')
                outpath = outpathKey.replace('/', sep)
                if '/' in outpathKey:
                    dir = path.dirname(outpath)
                    dirsToMake += [path.join(args[2], dir)]
                file = path.join(args[2], outpath)
                dstDepsToCopy += [file]
            else:
                dstDepsToCopy += [path.join(args[2], key)]
            srcDepsToCopy += [filePath]
        from os import listdir
        libs = listdir(libsDir)
        for lib in libs:
            if path.isfile(path.join(libsDir, lib)) == False:
                continue
            dstDepsToCopy += [path.join(args[2], lib)]
            srcDepsToCopy += [path.join(libsDir, lib)]
        # Perform the copy operation
        from os import makedirs
        for dir in dirsToMake:
            if path.exists(dir):
                continue
            makedirs(dir)
        i = 0
        fileCount = len(srcDepsToCopy)
        from shutil import copy2
        from os import stat
        while i < fileCount:
            src = srcDepsToCopy[i]
            dst = dstDepsToCopy[i]
            if path.exists(dst):
                srcMtime = stat(src).st_mtime_ns
                dstMtime = stat(dst).st_mtime_ns
                if srcMtime <= dstMtime:
                    i += 1
                    continue
            copy2(src, dst)
            i += 1
    except Exception as ex:
        from sys import stderr
        print(ex, file=stderr)
        return -3
Ejemplo n.º 7
0
#!/usr/bin/env python3
import fire
import subprocess
import ini
import os.path
from os import remove as removefile

__composer_not_installed__ = "Composer can't start because he is not installed! Install it at https://getcomposer.org/doc/00-intro.md#globally!"
__install_loc_not_set__ = "Flarum installation location is not set! Please set it first using flame locate LOCATION!"
__config_file_name__ = "flameconf.ini"

if not os.path.exists(__config_file_name__):
    open(__config_file_name__, "a").close()

conf_e = ini.parse(open(__config_file_name__, "r").read())

def confsave():
    removefile(__config_file_name__)
    with open(__config_file_name__, "w") as f:
        f.write(ini.stringify(conf_e))
        f.close()

def err(e):
    return "ERROR: " + e


def check_env():
    try:
        t = conf_e["install_loc"]
    except:
        print(err(__install_loc_not_set__))
Ejemplo n.º 8
0
        if os.path.exists(pjoin(path, 'energy.txt')):
            time, energy = np.loadtxt(pjoin(path, 'energy.txt'), unpack=True)
            ax.clear()
            ax.plot(time, energy / N)
            ax.set_xlabel("$timestep$")
            ax.set_ylabel("$Energy$")
            ax.set_title("Timestep: %d" % time[-1])

    fig, ax = plt.subplots(figsize=(6, 6))

    ani = animation.FuncAnimation(fig, animate, interval=1000)

    plt.show()


parser = argparse.ArgumentParser(
    description='Plasma Particle Dynamics (PPDyn)')
parser.add_argument('-i',
                    '--input',
                    default='input.ini',
                    type=str,
                    help='Input file name')
args = parser.parse_args()
inputFile = args.input

params = ini.parse(open(inputFile).read())
N = int(params['particles']['N'])  # Number of particles
tmax = float(params['time']['tmax'])

energy(N)
Ejemplo n.º 9
0
"""

    expected = {
        '': {
            'key': 'value',
            'key1': 'value1',
        },

        'section1': {
            'key1': 'value1',
            'key2': 'this=works right?',
        },
    }

    try:
        actual = parse(source)

    except INISyntaxError as e:
        print('Syntax Error on line {0}, {1}'.format(e.num, e.line))

    else:
        assert expected == actual, ('Expected {0}, got {1}'
                                    .format(expected, actual))

    syntax_error = """
this should give an error.
    """

    try:
        actual = parse(syntax_error)
Ejemplo n.º 10
0
def main(args):
    # UTF-8 is off on Windows by default
    if not usingUTF8():  # This fixes it if it's off, though
        print('Changed the codepage to UTF-8. Please rerun this script.')
        return 0
    for line in STARTUP:
        pprint(line)
    argc = len(args)  # Save CPU
    if argc < 3:
        raise Exception('Insufficient arguments provided')
    if not os.path.isfile(args[1]):
        raise Exception('Provided INI file is inaccessible')
    # The solution config file
    mainIni = ini.parse(args[1])
    if int(mainIni['']['version']) > 0:
        # v0 is the latest, as far as we know
        raise Exception('Future INI schema version found; not supported')
    # Either debug/release (w/ optional architecture), or we're linting
    if re.fullmatch(r'((debug|release)(32|64)?)|lint', args[2]) == None:
        raise Exception('Provided build type is invalid')
    pprint(head='prep')
    # Get the project list, ordered for dependency satisfaction
    projectNames = mainIni['']['order'].lower().split(',')
    i = 0
    projectCt = len(projectNames)  # Save CPU
    taskName = 'build'
    projects = {}
    # Get settings for all the projects
    while i < projectCt:
        project = projectInit(mainIni['projects'][projectNames[i]])
        data = {
            projectNames[i]: {
                'projIni': project[0],
                'assetIni': project[1],
                'srcDir': project[2],
                'incDir': project[3],
                'langs': project[4]
            }
        }
        projects = {**projects, **data}
        i += 1
    i = 0
    pprint(head='start')
    try:
        if args[2] == 'lint':
            taskName = 'lint'  # Used if things go wrong
            while i < projectCt:
                project = projects[projectNames[i]]
                pprint(project['projIni']['']['name'], action='lint')
                # 0 = project INI, 2 = source directory
                lint(project['projIni'], project['srcDir'])
                i += 1
        else:  # compiling instead
            # Get project source directories for internal dependency inclusion
            localIncPaths = []
            while i < projectCt:
                project = projects[projectNames[i]]
                localIncPaths += [
                    os.path.join(project['srcDir'],
                                 project['projIni']['']['name'],
                                 project['projIni']['source']['sourcedir'])
                ]
                i += 1
            i = 0
            while i < projectCt:
                # Set up all project variables:-
                project = projects[projectNames[i]]
                # Applicable code languages in project
                pLangs = project['langs']
                # 'executable', 'shared', or 'static'
                pFormat = project['projIni']['output']['type']
                # list of libraries to link
                pLibs = LIBS[:]  # Duplicate array
                if 'depends' in project['projIni']['']:
                    # Include all dependencies in library list
                    _libs = []
                    if ',' in project['projIni']['']['depends']:
                        _libs += project['projIni']['']['depends'].split(',')
                    else:
                        _libs += [project['projIni']['']['depends']]
                    if os.name == 'nt':
                        j = 0
                        _libsCt = len(_libs)
                        while j < _libsCt:
                            _libs[j] = _libs[j].replace('sfml-', 'sfml')
                            j += 1
                    pLibs += _libs
                # Full path for output binary, including name
                _name = project['projIni']['output']['name']
                if os.name != 'nt' and pFormat != 'executable':
                    _name = 'lib' + _name
                pOutPath = os.path.join(project['projIni']['output']['path'],
                                        _name)
                # Language-agnostic location for object code
                pObjPath = os.path.join(project['projIni']['output']['path'],
                                        'code', projectNames[i])
                if not os.path.exists(pObjPath):
                    if os.name == 'nt':
                        # Ya gotta keep 'em separated
                        if 'c' in pLangs or 'c++' in pLangs:
                            os.makedirs(os.path.join(pObjPath, 'c'))
                        if 'd' in pLangs:
                            os.makedirs(os.path.join(pObjPath, 'd'))
                    else:
                        os.makedirs(pObjPath)
                # Direct path to source code
                pSrcPath = os.path.join(
                    mainIni['projects'][projectNames[i]].replace('/', os.sep),
                    project['srcDir'].replace('/', os.sep))
                # Paths for C(++) #includes and D imports
                pIncPaths = [
                    os.path.join(
                        mainIni['']['includedir'].replace('/', os.sep),
                        project['incDir'].replace('/', os.sep)),
                    mainIni['']['includedir'].replace('/', os.sep)
                ] + INCDIRS + localIncPaths
                pObjGlob = os.path.join(pObjPath, '**', '*' + OBJEXT)
                libDepsPath = ''
                if os.name == 'nt':
                    # Manually include external dependencies, since Windows
                    # leaves us on our own with that
                    libDepsPath = '.\\deps\\lib'
                    if IS64BIT:
                        libDepsPath += '64'
                    else:
                        libDepsPath += '32'
                    if DEBUG:
                        libDepsPath += '\\debug'
                    else:
                        libDepsPath += '\\release'
                    if os.path.isdir(libDepsPath):
                        allDeps = os.listdir(libDepsPath)
                        for dep in allDeps:
                            if dep.lower().endswith('.dll'):
                                shutil.copy2(
                                    libDepsPath + '\\' + dep,
                                    project['projIni']['output']['path'])
                for lang in pLangs:
                    # Compile all C code
                    if lang == 'c':
                        flags = CFLAGS[:]  # dup() array
                        if os.name == 'nt':
                            # Path must end with a backslash for CL.EXE
                            # Separate C(++) code from D code because of .obj
                            flags += [COUTFLAG + pObjPath + '\\c\\']
                        else:
                            flags += [COUTFLAG, pObjPath]
                        for incDir in pIncPaths:
                            if os.name == 'nt':
                                flags += [CINCFLAG + incDir]
                            else:
                                flags += [CINCFLAG, incDir]
                        if os.name == 'nt':
                            # MT = multithreaded app
                            # MD = multithreaded library
                            # d suffix = debugging
                            if DEBUG:
                                if pFormat == 'executable':
                                    flags += ['/MTd']
                                elif pFormat == 'shared':
                                    flags += ['/MDd']
                            else:
                                if pFormat == 'executable':
                                    flags += ['/MT']
                                elif pFormat == 'shared':
                                    flags += ['/MD']
                        sources = getSources(pSrcPath, ['c'])
                        for source in sources:
                            source = source.replace('.' + os.sep, '')
                            com = [CC] + flags
                            if os.name != 'nt':
                                com += [
                                    COUTFLAG,
                                    os.path.join(
                                        pObjPath,
                                        os.path.basename(source) + OBJEXT)
                                ]
                            com += [source]
                            pprint(source, action='c')
                            try:
                                run(' '.join(com),
                                    shell=True,
                                    check=True,
                                    stdout=PIPE)
                            except CalledProcessError as ex:
                                lines = ex.stdout.decode().splitlines()
                                for line in lines:
                                    print('ERROR: ' + line)
                                raise Exception(
                                    'Compilation unit failed ' +
                                    'with command ' + color('', fg='white') +
                                    color(' '.join(com), fg='white'))
                    # Compile all project C++ code
                    elif lang == 'c++':
                        flags = CPPFLAGS[:]  # Duplicate array
                        if os.name == 'nt':
                            # Path must end with a backslash for CL.EXE
                            # Separate C(++) code from D code because of .obj
                            flags += [COUTFLAG + pObjPath + '\\c\\']
                        for incDir in pIncPaths:
                            if os.name == 'nt':
                                flags += [CINCFLAG + incDir]
                            else:
                                flags += [CINCFLAG, incDir]
                        if os.name == 'nt':
                            # See notes above for flag meanings
                            if DEBUG:
                                if pFormat == 'executable':
                                    flags += ['/MTd']
                                elif pFormat == 'shared':
                                    flags += ['/MDd']
                            else:
                                if pFormat == 'executable':
                                    flags += ['/MT']
                                elif pFormat == 'shared':
                                    flags += ['/MD']
                        sources = getSources(pSrcPath, ['c++'])
                        for source in sources:
                            source = source.replace('.' + os.sep, '')
                            com = [CXX] + flags
                            if os.name != 'nt':
                                com += [
                                    COUTFLAG,
                                    os.path.join(
                                        pObjPath,
                                        os.path.basename(source) + OBJEXT)
                                ]
                            com += [source]
                            pprint(source, action='c++')
                            try:
                                run(' '.join(com),
                                    shell=True,
                                    check=True,
                                    stdout=PIPE)
                            except CalledProcessError as ex:
                                lines = ex.stdout.decode().splitlines()
                                for line in lines:
                                    print('ERROR: ' + line)
                                raise Exception(
                                    'Compilation unit failed ' +
                                    'with command ' + color('', fg='white') +
                                    color(' '.join(com), fg='white'))
                    # Compile all project D code
                    elif lang == 'd':
                        # Separate D code from C(++) because of .obj
                        flags = DFLAGS
                        if os.name == 'nt':
                            flags += [DOUTFLAG + pObjPath + '\\d\\']
                        else:
                            flags += [DOUTFLAG + pObjPath]
                        if pFormat == 'shared':
                            flags += ['-shared']
                        for incDir in pIncPaths:
                            flags += [DINCFLAG + incDir]
                        sources = getSources(pSrcPath, ['d'])
                        for source in sources:
                            source = source.replace('.' + os.sep, '')
                            # Ignore these, only DMD cares about them
                            # They overwrite each other anyway
                            if source.endswith('package.d'):
                                continue
                            com = [DC, source] + flags
                            pprint(source, action='d')
                            try:
                                run(' '.join(com),
                                    shell=True,
                                    check=True,
                                    stdout=PIPE)
                            except CalledProcessError as ex:
                                lines = ex.stdout.decode().splitlines()
                                for line in lines:
                                    print('ERROR: ' + line)
                                raise Exception(
                                    'Compilation unit failed ' +
                                    'with command ' + color('', fg='white') +
                                    color(' '.join(com), fg='white'))
                # Link the project
                com = [LINK] + glob.glob(pObjGlob, recursive=True)
                # Append a file extension if needed
                if os.name == 'nt':
                    if pFormat == 'executable':
                        pOutPath += '.exe'
                    elif pFormat == 'shared':
                        pOutPath += '.dll'
                        # Windows needs this
                        com += ['/DLL']
                    elif pFormat == 'static':
                        pOutPath += '.lib'
                    # Give the output path to the linker
                    com += [LINKOUTFLAG + pOutPath]
                else:
                    if pFormat == 'shared':
                        pOutPath += '.so'
                        com += ['-shared']
                    elif pFormat == 'static':
                        pOutPath += '.a'
                    com += [LINKOUTFLAG, pOutPath]
                # Add common linker flags
                com += LINKFLAGS
                if libDepsPath != '':
                    # This is for external dependency linkage
                    com += [LINKDIRFLAG + libDepsPath]
                # Add build directory to lib search paths
                com += [LINKDIRFLAG + project['projIni']['output']['path']]
                for libPath in LIBDIRS:
                    # Add common lib search paths
                    com += [LINKDIRFLAG + libPath]
                # Add external dependency libs
                for lib in pLibs:
                    if os.name == 'nt':
                        # On Windows, boost is compiled funny. Check that
                        if lib.startswith('boost'):
                            com += [LINKLIBFLAG + lib + BOOST_SUF + '.lib']
                        else:
                            com += [LINKLIBFLAG + lib + '.lib']
                    else:
                        com += [LINKLIBFLAG + lib]
                pprint(project['projIni']['output']['name'], action='link')
                # Link!
                try:
                    run(' '.join(com), shell=True, check=True, stdout=PIPE)
                except CalledProcessError as ex:
                    lines = ex.stdout.decode().splitlines()
                    for line in lines:
                        print('ERROR: ' + line)
                    raise Exception('Compilation unit failed ' +
                                    'with command ' + color('', fg='white') +
                                    color(' '.join(com), fg='white'))
                i += 1
    except Exception as ex:
        pprint('Exception in ' + taskName + ': ' +
               color('{0}'.format(ex), style='bold'))
        pprint(head='fail')
        return -1
    pprint(head='pass')
    pprint()
    return 0
Ejemplo n.º 11
0
		exit()

if __name__ == "__main__":
	os.system("cls")
	start_time=datetime.now()
	print("Started At: {}".format(start_time.strftime("%Y-%m-%d %H:%M:%S")))
	
	if not os.path.isfile('config.ini'):
		path="{}\\results\\".format(os.getcwd())
		config=open("./config.ini","w",encoding='utf-8')
		os.makedirs(path)
		config.write('keywords="کرونا,ویروس‌کرونا,کوید_19,کوید19,کروناویروس,ویروس_کرونا"\ncount=20000\nheadless=no\ntype=csv\nsplit=1000\nresults_path="{}"\nfrom_date=2020-10-01\nto_date=2020-10-15'.format(path))
		config.close()
		print("Config File Generated... Run Again")
		exit(0)
	config = ini.parse(open('./config.ini',encoding='utf-8').read())
	count = int(config['count'])
	bar = Bar('Processing', max=count)
	unique_hash=[]
	if os.path.exists("{}unique-hashes.txt".format(config['results_path'])):
		with open("{}unique-hashes.txt".format(config['results_path']),'r',encoding='utf-8') as unique_hash_file:
			for line in unique_hash_file:
				unique_hash.append(line.replace("\n",""))
				bar.next()
			print("\r\nLoad Old Hashs [{}]".format(len(unique_hash)))
			time.sleep(3)

	keywords_stack=[]
	if "," in config['keywords']:
		keywords_stack=config['keywords'].split(',')
	else:
Ejemplo n.º 12
0
def main():
    params = ini.parse(open('input.ini').read())
Ejemplo n.º 13
0
def main(argv):
    """ PPDyn main() function """
    parser = argparse.ArgumentParser(description='Plasma Particle Dynamics (PPDyn)')
    parser.add_argument('-i','--input', default='input.ini', type=str, help='Input file name')
    args        = parser.parse_args()
    inputFile   = args.input

    params = ini.parse(open(inputFile).read())
    #========== Input Parameters ===========

    Lx      = float(params['simbox']['Lx'])  # System length in X
    Ly      = float(params['simbox']['Ly'])   # System length in Y
    Lz      = float(params['simbox']['Lz'])   # System length in Z

    N       = int(params['particles']['N'])    # Number of particles

    Vxmax   = float(params['particles']['Vxmax']) # Maximum velocity in X
    Vymax   = float(params['particles']['Vymax']) # Maximum velocity in Y
    Vzmax   = float(params['particles']['Vzmax']) # Maximum velocity in Z

    k       = float(params['screening']['k'])

    Temp    = float(params['particles']['Temp'])

    tmax    = float(params['time']['tmax'])  # Final time
    dt      = float(params['time']['dt']) # time step size
    Nt      = round(tmax/dt) #number of time steps

    #========= Boundary ==========
    btype   = str(params['boundary']['btype']) # Type of boundary

    #========= Diagnostics =======
    dumpPeriod  = int(params['diagnostics']['dumpPeriod'])
    path        = "data/"  # DO NOT CHANGE THE PATH
    os.makedirs(path, exist_ok=True)

    dumpData    = bool(params['diagnostics']['dumpData'])
    f           = h5py.File(path+"particle.hdf5","w")
    if dumpData:
        diagn.attributes(f,tmax,Lx,Ly,Lz,N,dt,dumpPeriod)
        dset = f.create_dataset('energy', (1,), maxshape=(None,), dtype='float64', chunks=(1,))

    vtkData     = bool(params['diagnostics']['vtkData'])
    realTime    = bool(params['diagnostics']['realTime'])
    #========== Options ============
    parallelMode    = bool(params['options']['parallelMode'])
    if parallelMode:
        if btype == 'periodic':
            from pusher_parallel import verlet_periodic as verlet
            from init import initial_periodic as initial
            print("Running in Parallel Mode (Periodic boundary)")
        elif btype == 'reflecting':
            from pusher_parallel import verlet_reflecting as verlet
            from init import initial_reflecting as initial
            print("Running in Parallel Mode (Reflecting boundary)")
    else:
        if btype == 'periodic':
            from pusher_serial import verlet_periodic as verlet
            from init import initial_periodic as initial
            print("Running in Serial Mode (Periodic boundary)")
        elif btype == 'reflecting':
            from pusher_serial import verlet_reflecting as verlet
            from init import initial_reflecting as initial
            print("Running in Serial Mode (Reflecting boundary)")
    #========= Initialize ========
    x,y,z,vx,vy,vz,ux,uy,uz,ax,ay,az,time,data_num = initial(Lx,Ly,Lz,Vxmax,Vymax,Vzmax,N,tmax,Nt,k,dumpPeriod)

    #========= Time Loop =========

    for t in range(len(time)):
        KE = 0.0   # Reset KE
        x,y,z,vx,vy,vz,ux,uy,uz,ax,ay,az,KE = verlet(x,y,z,vx,vy,vz,ux,uy,uz,ax,ay,az,dt,Lx,Ly,Lz,N,KE,k)
        #============  Thermostat =========================
        vx,vy,vz = berendsen(vx,vy,vz,dt,Temp,KE,N,t,tmax)

        #============ Diagnostics Write ===================
        if dumpData:
            if t%dumpPeriod==0:
                diagn.configSpace(f,dset,t,x,y,z,KE)
                print('TimeSteps = %d'%int(t)+' of %d'%Nt+' Energy: %e'%KE)

    if vtkData:
        from vtk_data import vtkwrite
        print('Writing VTK files for Paraview visualization ...')
        vtkwrite()
    return 0
Ejemplo n.º 14
0
import ini
from pyrogram import Client
from urllib.parse import urlparse
from pyrogram import InlineKeyboardMarkup, InlineKeyboardButton

config = ini.parse(open('config.ini').read())


class VideoFromChannel:
    def __init__(self, chat_id=None, depth=1):
        self.depth = depth
        self.chat_id = chat_id
        self.user_videos = {}
        print(config["Telegram"]["TOKEN"])
        self.client = Client("Video_parser_bot",
                             api_id=config["Telegram"]["api_id"],
                             api_hash=config["Telegram"]["api_hash"],
                             bot_token=config["Telegram"]["TOKEN"])

    def change_depth(self, new_depth=1):
        self.depth = new_depth
        return self.depth

    def clear_user_data(self):
        self.user_videos = {}

    def pop_user_video(self, file_id=''):
        item = self.user_videos.pop(file_id, None)
        if item:
            return True
        else:
Ejemplo n.º 15
0
 def __init__(self):
     super().__init__()
     self._config = ini.parse(open("/etc/wpa_statusd.ini").read())
     self._client = wpa_status.Client(self._config["socket"])
     self._text_cache = ""
Ejemplo n.º 16
0
def ppdyn(argv):
    print(
        "===================================================================")
    print("Running PPDyn (Plasma Particle Dynamics)")
    print("Author: Dr. Sayan Adhikari, PostDoc @ UiO, Norway")
    print(
        "::::::: Dr. Rupak Mukherjee, Associate Research Physicist @ PPPL, NJ")
    print("Input: Edit input.ini file to change the parameters for simulation")
    print(
        "===================================================================")
    """ PPDyn main() function """
    # parser = argparse.ArgumentParser(description='Plasma Particle Dynamics (PPDyn)')
    # parser.add_argument('-i','--input', default='input.ini', type=str, help='Input file name')
    # args        = parser.parse_args()
    # inputFile   = args.input

    params = ini.parse(open(argv).read())
    #========== Input Parameters ===========

    Lx = float(params['simbox']['Lx'])  # System length in X
    Ly = float(params['simbox']['Ly'])  # System length in Y
    Lz = float(params['simbox']['Lz'])  # System length in Z

    N = int(params['particles']['N'])  # Number of particles

    Vxmax = float(params['particles']['Vxmax'])  # Maximum velocity in X
    Vymax = float(params['particles']['Vymax'])  # Maximum velocity in Y
    Vzmax = float(params['particles']['Vzmax'])  # Maximum velocity in Z

    k = float(params['screening']['k'])

    Temp = float(params['particles']['Temp'])

    tmax = float(params['time']['tmax'])  # Final time
    dt = float(params['time']['dt'])  # time step size
    Nt = round(tmax / dt)  #number of time steps

    #========= Boundary ==========
    btype = str(params['boundary']['btype'])  # Type of boundary

    #========= Diagnostics =======
    dumpPeriod = int(params['diagnostics']['dumpPeriod'])
    path = "data/"  # DO NOT CHANGE THE PATH
    #========== Data Directory Setup =============
    if os.path.exists(path):
        print(
            "Data directory exists. Current data will be replaced after the run."
        )
    else:
        os.mkdir(path)
    dumpData = bool(params['diagnostics']['dumpData'])
    f = h5py.File(path + "particle.hdf5", "w")

    if dumpData:
        attributes(f, tmax, Lx, Ly, Lz, N, dt, dumpPeriod)
        dset = f.create_dataset('energy', (1, ),
                                maxshape=(None, ),
                                dtype='float64',
                                chunks=(1, ))

    vtkData = bool(params['diagnostics']['vtkData'])
    realTime = bool(params['diagnostics']['realTime'])
    if os.path.exists(pjoin(path, 'energy.txt')):
        os.remove(pjoin(path, 'energy.txt'))
    #========== Options ============
    parallelMode = bool(params['options']['parallelMode'])
    if parallelMode:
        if btype == 'periodic':
            from PPDyn.pusher_parallel import verlet_periodic as verlet
            from PPDyn.init import initial_periodic as initial
            print("Running in Parallel Mode (Periodic boundary)")
        elif btype == 'reflecting':
            from PPDyn.pusher_parallel import verlet_reflecting as verlet
            from PPDyn.init import initial_reflecting as initial
            print("Running in Parallel Mode (Reflecting boundary)")
    else:
        if btype == 'periodic':
            from PPDyn.pusher_serial import verlet_periodic as verlet
            from PPDyn.init import initial_periodic as initial
            print("Running in Serial Mode (Periodic boundary)")
        elif btype == 'reflecting':
            from PPDyn.pusher_serial import verlet_reflecting as verlet
            from PPDyn.init import initial_reflecting as initial
            print("Running in Serial Mode (Reflecting boundary)")
    #========= Initialize ========
    x, y, z, vx, vy, vz, ux, uy, uz, ax, ay, az, time, data_num = initial(
        Lx, Ly, Lz, Vxmax, Vymax, Vzmax, N, tmax, Nt, k, dumpPeriod)

    #========= Time Loop =========

    for t in range(len(time)):
        KE = 0.0  # Reset KE
        x, y, z, vx, vy, vz, ux, uy, uz, ax, ay, az, KE = verlet(
            x, y, z, vx, vy, vz, ux, uy, uz, ax, ay, az, dt, Lx, Ly, Lz, N, KE,
            k)
        #============  Thermostat =========================
        vx, vy, vz = berendsen(vx, vy, vz, dt, Temp, KE, N, t, tmax)

        #============ Diagnostics Write ===================
        if dumpData:
            if t % dumpPeriod == 0:
                configSpace(f, dset, t, x, y, z, KE)
                print('TimeSteps = %d' % int(t) + ' of %d' % Nt +
                      ' Energy: %e' % KE)

    if vtkData:
        from PPDyn.vtk_data import vtkwrite
        print('Writing VTK files for Paraview visualization ...')
        vtkwrite()
    return "All done!!"
Ejemplo n.º 17
0
 def readIni(self, filename):
     return ini.parse(open(str(filename), 'r').read())
def main():
    params = ini.parse(open('input.ini').read())

    # Input parameters
    directory   =   str(params['fileHierarchy']['directory'])
    inDir       =   str(params['fileHierarchy']['inDir'])
    outDir      =   str(params['fileHierarchy']['outDir'])
    imgDir      =   str(params['fileHierarchy']['imgDir'])
    contDir     =   str(params['fileHierarchy']['contDir'])

    fstart      =   int(params['fileSequence']['start'])
    fend        =   int(params['fileSequence']['end'])
    dt          =   int(params['fileSequence']['interval'])

    thresholdDensity = float(params['contourParams']['threshold'])

    show_anim = bool(params['animation']['show'])
    save_anim = bool(params['animation']['save'])
    fps       = float(params['animation']['fps'])

    INDIR =directory+"/"+inDir+"/"
    OUTDIR =directory+"/"+outDir+"/"
    IMGDIR =directory+"/"+outDir+"/"+imgDir+"/"
    CONTDIR =directory+"/"+outDir+"/"+contDir+"/"

    print("===========File Hierarchy===========")
    print("Raw data directory: "+INDIR)
    print("Processed Blob property data directory: "+OUTDIR)
    print("Blob images directory: "+IMGDIR)
    print("Blob contour data directory: "+CONTDIR)



    #========== Blob Data Directory Setup =============
    if os.path.exists(directory):
        if os.path.exists(OUTDIR):
            os.system('rm '+OUTDIR+"*.txt 2>/dev/null")
            if os.path.exists(IMGDIR) and os.path.exists(CONTDIR):
                os.system('rm '+IMGDIR+"* 2>/dev/null")
                os.system('rm '+CONTDIR+"* 2>/dev/null")
            else:
                os.system('mkdir '+IMGDIR)
                os.system('mkdir '+CONTDIR)
        else:
            os.system('mkdir '+OUTDIR)
            os.system('mkdir '+IMGDIR)
            os.system('mkdir '+CONTDIR)
    else:
        os.system('mkdir '+directory)
        os.system('mkdir '+OUTDIR)
        os.system('mkdir '+IMGDIR)
        os.system('mkdir '+CONTDIR)
    ############################################
    data_num = np.arange(start=fstart, stop=fend, step=dt, dtype=int)
    f = ad.file(INDIR+'asdex_phi_%d'%data_num[0]+'.bp')

    blob_size_file = open(OUTDIR+"/blob_size.txt", "w")
    
    Nx = f['numCells'][0]
    Ny = f['numCells'][1]
    Nz = f['numCells'][2]

    Xmin = f['lowerBounds'][0]
    Ymin = f['lowerBounds'][1]
    Zmin = f['lowerBounds'][2]

    Xmax = f['upperBounds'][0]
    Ymax = f['upperBounds'][1]
    Zmax = f['upperBounds'][2]

    dx = (Xmax - Xmin) / Nx
    dy = (Ymax - Ymin) / Ny

    z_slice = 10
    cnum = 100
    cnumout = 30
    color = 'jet'


    ################### INTERPOLATION ###########################

    def interpTestPoint(xWeight,yWeight,dx,dy,probeDensity):
        testDensity00 = probeDensity[0,0] * (dx-xWeight) * (dy-yWeight)
        testDensity01 = probeDensity[0,1] * xWeight * (dy-yWeight)
        testDensity10 = probeDensity[1,0] * (dx-xWeight) * yWeight
        testDensity11 = probeDensity[1,1] * xWeight * yWeight
        testDensity = ( testDensity00 + testDensity01 + testDensity10 + testDensity11 ) / (dx*dy)
        return testDensity

    ################### Shoelace formula to find polygon Area ###########################

    def PolyArea(x,y):
        return 0.5*np.abs(np.dot(x,np.roll(y,1))-np.dot(y,np.roll(x,1)))
    ####################################################################################
    #################### RAY TRACING ALGORITHM #########################################
    ####################################################################################

    # A Python3 program to check if a given point lies inside a given polygon
    # Refer https://www.geeksforgeeks.org/check-if-two-given-line-segments-intersect/
    # for explanation of functions onSegment(), orientation() and doIntersect()

    # Define Infinite (Using INT_MAX caused overflow problems)
    INF = 10000

    class Point:
        def __init__(self, x, y):
            self.x = x
            self.y = y

    # Given three colinear points p, q, r, the function checks if
    # point q lies on line segment 'pr'
    def onSegment(p, q, r):
        if ( (q.x <= max(p.x, r.x)) and (q.x >= min(p.x, r.x)) and
               (q.y <= max(p.y, r.y)) and (q.y >= min(p.y, r.y))):
            return True
        return False

    def orientation(p, q, r):
        # to find the orientation of an ordered triplet (p,q,r)
        # function returns the following values:
        # 0 : Colinear points
        # 1 : Clockwise points
        # 2 : Counterclockwise

        # See https://www.geeksforgeeks.org/orientation-3-ordered-points/amp/
        # for details of below formula.

        val = (float(q.y - p.y) * (r.x - q.x)) - (float(q.x - p.x) * (r.y - q.y))
        if (val > 0):
            # Clockwise orientation
            return 1
        elif (val < 0):
            # Counterclockwise orientation
            return 2
        else:
            # Colinear orientation
            return 0

    # The main function that returns true if
    # the line segment 'p1q1' and 'p2q2' intersect.
    def doIntersect(p1,q1,p2,q2):

        # Find the 4 orientations required for
        # the general and special cases
        o1 = orientation(p1, q1, p2)
        o2 = orientation(p1, q1, q2)
        o3 = orientation(p2, q2, p1)
        o4 = orientation(p2, q2, q1)

        # General case
        if ((o1 != o2) and (o3 != o4)):
            return True

        # Special Cases

        # p1 , q1 and p2 are colinear and p2 lies on segment p1q1
        if ((o1 == 0) and onSegment(p1, p2, q1)):
            return True

        # p1 , q1 and q2 are colinear and q2 lies on segment p1q1
        if ((o2 == 0) and onSegment(p1, q2, q1)):
            return True

        # p2 , q2 and p1 are colinear and p1 lies on segment p2q2
        if ((o3 == 0) and onSegment(p2, p1, q2)):
            return True

        # p2 , q2 and q1 are colinear and q1 lies on segment p2q2
        if ((o4 == 0) and onSegment(p2, q1, q2)):
            return True

        # If none of the cases
        return False

    # Returns true if the point p lies inside the polygon[] with n vertices
    def isInside(polygon, n, p):
        # There must be at least 3 vertices in polygon[]
        if (n < 3):
            return False

        # Create a point for line segment from p to infinite
        extreme = Point(INF, p.y)

        # Count intersections of the above line with sides of polygon
        count = 0
        i = 0

        # To initialize i for the first iteration of do-while loop of C++ type
        next = (i+1)%n
        # Check if the line segment from 'p' to 'extreme' intersects
        # with the line segment from 'polygon[i]' to 'polygon[next]'
        if (doIntersect(polygon[i], polygon[next], p, extreme)):
            # If the point 'p' is colinear with line segment 'i-next',
            # then check if it lies on segment. If it lies, return true,
            # otherwise false
            if (orientation(polygon[i], p, polygon[next]) == 0):
                return onSegment(polygon[i], p, polygon[next])
            count = count + 1
        i = next

        while (i != 0):
            next = (i+1)%n
            # Check if the line segment from 'p' to 'extreme' intersects
            # with the line segment from 'polygon[i]' to 'polygon[next]'
            if (doIntersect(polygon[i], polygon[next], p, extreme)):
                # If the point 'p' is colinear with line segment 'i-next',
                # then check if it lies on segment. If it lies, return true,
                # otherwise false
                if (orientation(polygon[i], p, polygon[next]) == 0):
                    return onSegment(polygon[i], p, polygon[next])
                count = count + 1
            i = next
            if (i == 0):
                break
        # Return true if count is odd, false otherwise
        if (count%2 == 1):
            return True
        else:
            return False

    ####################################################################################
    ####################################################################################
    ####################################################################################

    def func_data(ionDensityData,phiData):
    	ionDensityInterp = pg.data.GInterpModal(ionDensityData, 1, 'ms')
    	phiInterp = pg.data.GInterpModal(phiData, 1, 'ms')
    	interpGrid, ionDensityValues = ionDensityInterp.interpolate()
    	interpGrid, phiValues = phiInterp.interpolate()

    	#exValues = - np.gradient(phiValues,dx,axis = 0)
    	#dexdxValues = np.gradient(exValues,dx,axis = 0)
    	eyValues = - np.gradient(phiValues,dy,axis = 1)

    	# get cell center coordinates
    	CCC = []
    	for j in range(0,len(interpGrid)):
    	    CCC.append((interpGrid[j][1:] + interpGrid[j][:-1])/2)

    	x_vals = CCC[0]
    	y_vals = CCC[1]
    	z_vals = CCC[2]
    	X, Y = np.meshgrid(x_vals, y_vals)
    	ionDensityGrid = np.transpose(ionDensityValues[:,:,z_slice,0])
    	eyGrid = np.transpose(eyValues[:,:,z_slice,0])
    	return x_vals,y_vals,X,Y,ionDensityGrid,eyGrid

    def animate(i):
            blob_counter = 0
            ionDensity=INDIR+'asdex_ion_GkM0_%d'%data_num[i]+'.bp'
            phi=INDIR+'asdex_phi_%d'%data_num[i]+'.bp'
            ionDensityData = pg.data.GData(ionDensity)
            phiData = pg.data.GData(phi)

            x_vals,y_vals,X,Y,ionDensityGrid,eyGrid = func_data(ionDensityData,phiData)

            Nx = len(x_vals)
            Ny = len(y_vals)

            ax1.cla()
            ax1.set_title('Time = %d'%i+' $\\mu$s')

            cp1 = ax1.contourf(X, Y, ionDensityGrid, cnum, cmap=color)
            #cp2 = ax1.contour(X, Y, eyGrid, cnum, linewidths=0.1, colors='black', linestyles='solid')
            cp3 = ax1.contour(X, Y, ionDensityGrid, cnumout, linewidths=0.1, colors='black', linestyles='solid')
            #cp3 = ax1.contour(X, Y, ionDensityGrid, cnumout, linewidths=1, cmap=color)
            cp4 = ax1.contour(X, Y, ionDensityGrid, [thresholdDensity], linewidths=1, colors='black', linestyles='solid')
            # #plt.grid()
            # ax1.set_xticks(x_vals , minor=True)
            # ax1.set_yticks(y_vals , minor=True)
            # #ax1.grid(which='both')
            # ax1.grid(which='minor', alpha=0.9, color='k', linestyle='-')

            p = cp4.collections[0].get_paths()
            contour_number = len(p)
            imageCounter = 0
            for j in range(contour_number):
                p_new = cp4.collections[0].get_paths()[j]
                v = p_new.vertices
                x = v[:,0]
                y = v[:,1]
                x_min = np.min(x)
                x_max = np.max(x)
                y_min = np.min(y)
                y_max = np.max(y)
                blobMidX = (x_min + x_max)/2
                blobMidY = (y_min + y_max)/2
                blobLimX = abs(x_max - x_min)
                blobLimY = abs(y_max - y_min)
                if (abs(x[0]-x[len(x)-1]) <= 1e-10) and blobLimX > 2*dx and blobLimY > 2*dy:
                    polygon = []
                    for plgn in range(len(x)):
                        polygon.append(Point(x[plgn],y[plgn]))
                    npoly = len(polygon)
                    numTrial = 100
                    blobConfidence = 0
                    insideTrialPoints = 0
                    for numT in range(numTrial):
                        xT = 0.5*(x_max+x_min) - 0.5*(x_max-x_min)*(random()-0.5)
                        yT = 0.5*(y_max+y_min) - 0.5*(y_max-y_min)*(random()-0.5)
                        #print("Trial point",numT,"with",round(xT,4),round(yT,4),'for contour number %d'%j)
                        trialPoint = Point(xT,yT)
                        if isInside(polygon, npoly, trialPoint):
                            insideTrialPoints = insideTrialPoints + 1
                            #print("Trial point", numT, "is INSIDE for contour number %d"%j)
                            xd = abs(x_vals-xT)
                            yd = abs(y_vals-yT)
                            idx = np.where(xd <= 0.5*dx)
                            idy = np.where(yd <= 0.5*dy)
                            ionDensityFind = np.reshape(ionDensityGrid,Nx*Ny)
                            probeDensity = np.zeros((2,2))
                            for id in range(len(idx[0])):
                                for jd in range(len(idy[0])):
                                    probeDensity[id,jd] = ionDensityFind[(idy[0][jd] * Nx) + (idx[0][id] + 1)]

                            xGrid = np.zeros(2)
                            yGrid = np.zeros(2)
                            for id in range(len(idx[0])):
                                xGrid[id] = x_vals[idx[0][id]]
                            for jd in range(len(idy[0])):
                                yGrid[jd] = y_vals[idy[0][jd]]

                            xWeight = abs(xGrid[0]-xT)
                            yWeight = abs(yGrid[0]-yT)
                            testDensity = interpTestPoint(xWeight,yWeight,dx,dy,probeDensity)
                            if (testDensity >= thresholdDensity):
                                    #print("Interpolated point",numT,"with",round(xInterp,4),round(yInterp,4)," for Contour number %d"%j+" is INSIDE & truly a BLOB! Yeyy...")
                                    blobConfidence = blobConfidence + 1

                            else:
                                    None
                        else:
                            None
                            #print("Trial point", numT, " lies Outside before interpolation")

                    confidence = blobConfidence/insideTrialPoints
                    #print("Confidence = ",confidence*100,"%")
                    if (confidence > 0.80):
                        blob_counter = blob_counter + 1
                        polyArea = PolyArea(x,y)
                        #print(polyArea)
                        # print('File number = %d'%data_num[i]+', contour number %d'%j+' = It is TRULY a blob with confidence',confidence*100,"%")
                        blob_size_file.write('%d'%data_num[i]+'\t%d'%j+'\t%.8f'%blobLimX+'\t%.8f'%blobLimY+'\t%.8f'%blobMidX+'\t%.8f'%blobMidY+'\t%.8f'%polyArea+'\n')
                        if imageCounter == 0:
                            plt.savefig(IMGDIR+"/file_number%d"%data_num[i]+"_blob_snap.png")   # save the figure to file
                        imageCounter = imageCounter + 1
                        #print("blobConfidence=",blobConfidence,"insideTrialPoints=",insideTrialPoints)
                        blob_file = open(CONTDIR+"/file_number%d"%data_num[i]+"_contour_number_%d"%j+".txt", "w")
                        for k in range(len(x)):
                            blob_file.write('%.8f'%x[k]+'\t%.8f'%y[k]+'\n')
                        blob_file.close()
                elif (abs(x[0]-x[len(x)-1]) <= 1e-10):
                    None
                    # print('File number = %d'%data_num[i]+', contour number %d'%j+' = It is a sub-grid-sized closed contour')
                else:
                    None
                    # print('File number = %d'%data_num[i]+', contour number %d'%j+' = It is open line & NOT a blob')
                    #print(x,y)
                    #for k in range(len(x)):
                        #print(round(x[k],7),round(y[k],7))

            if blob_counter == 0:
                None
                # print("No blob found for file number = %d"%data_num[i])
            sleep(0.1)
            pbar.update(pstep)
            #plt.grid(True)
            ax1.set_xlabel("X",fontsize=14)
            ax1.set_ylabel("Y",fontsize=14)
            #ax1.tick_params(axis='both', which='major', labelsize=12)
            del ionDensityData
            del phiData

    if (show_anim == True):
        fig,ax1 = plt.subplots(1,1,figsize=(8,5),dpi=150)
        plt.rcParams["font.size"] = "12"
        plt.rcParams["font.family"] = "Times New Roman"
        #To keep the colorbar static:
        ionDensity=INDIR+'asdex_ion_GkM0_%d'%data_num[0]+'.bp'
        phi=INDIR+'asdex_phi_%d'%data_num[0]+'.bp'
        ionDensityData = pg.data.GData(ionDensity)
        phiData = pg.data.GData(phi)
        x_vals,y_vals,X,Y,ionDensityGrid,eyGrid = func_data(ionDensityData,phiData)
        cp1 = ax1.contourf(X, Y, ionDensityGrid, cnum, cmap=color)
        fig.colorbar(cp1)
        #TColorbar fixing completed:
        pstep = 1#len(data_num)/100
        pbar = tqdm(total=len(data_num))
        ani = animation.FuncAnimation(fig,animate,frames=len(data_num),interval=(1/fps)*1e+3,blit=False,repeat=False)
        ax1.set_xticks(x_vals , minor=True)
        ax1.set_yticks(y_vals , minor=True)
        ax1.grid(which='both')
        ax1.grid(which='minor', alpha=0.2, color='b', linestyle='--')
        #ax1.grid(b=True, which='major', color='b', linestyle='-')
        plt.show()
        if(save_anim == True):
            try:
                Writer = animation.writers['ffmpeg']
                writer = Writer(fps=fps, metadata=dict(artist='Me'), bitrate=1800)
            except RuntimeError:
                print("ffmpeg not available trying ImageMagickWriter")
                writer = animation.ImageMagickWriter(fps=fps)
            ani.save('animation.mp4')
    pbar.close()
    blob_size_file.close()
    return 0
Ejemplo n.º 19
0
def pyview(argv):
    path = "data/"
    print('Welcome to PPDyn Visualization Toolkit')
    # parser = argparse.ArgumentParser(description='PPDyn Visualization Toolkit')
    # parser.add_argument('-p','--part', default='particle', type=str, help='data type particle')
    # parser.add_argument('-v','--view', action='store_true', help='Show Animation')
    # parser.add_argument('-s','--save', action='store_false', help='Save Animation')
    # args        = parser.parse_args()
    # data   = args.part
    # show_anim = args.view
    # save_anim = args.save
    inputFile = argv

    data = 'particle'
    params = ini.parse(open(argv).read())
    show_anim = bool(params['animate']['show_anim'])
    save_anim = bool(params['animate']['save_anim'])

    interval = 0.001  #in seconds

    h5 = h5py.File('data/' + data + '.hdf5', 'r')

    Lx = h5.attrs["Lx"]
    Ly = h5.attrs["Ly"]
    Lz = h5.attrs["Lz"]

    dp = h5.attrs["dp"]
    Nt = h5.attrs["Nt"]

    data_num = np.arange(start=0, stop=Nt, step=dp, dtype=int)

    if (show_anim == True):

        def animate(i):
            # file=path+'/data%d'%data_num[i]+'.dat'
            datax = h5["/%d" % data_num[i] + "/position/x"]
            datay = h5["/%d" % data_num[i] + "/position/y"]
            dataz = h5["/%d" % data_num[i] + "/position/z"]
            ax1.cla()
            img1 = ax1.scatter(datax,
                               datay,
                               dataz,
                               marker='o',
                               color='b',
                               alpha=1.0,
                               s=10)
            ax1.set_title('TimeSteps = %d' % i + '\n Phase Space')
            ax1.set_xlabel("$x$")
            ax1.set_ylabel("$y$")
            ax1.set_xlim([-Lx, Lx])
            ax1.set_ylim([-Ly, Ly])
            ax1.set_ylim([-Lz, Lz])

    if (show_anim == True):
        # fig,ax1 = plt.subplots(1,1,1, projection='3d')
        fig = plt.figure(figsize=(6, 6))
        ax1 = plt.axes(projection="3d")
        ani = animation.FuncAnimation(fig,
                                      animate,
                                      frames=len(data_num),
                                      interval=interval * 1e+3,
                                      blit=False)
        # ani.save('phase_space.gif',writer='imagemagick')
        plt.show()
        if (save_anim == True):
            try:
                Writer = animation.writers['ffmpeg']
                writer = Writer(fps=(1 / interval),
                                metadata=dict(artist='Me'),
                                bitrate=1800)
            except RuntimeError:
                print("ffmpeg not available trying ImageMagickWriter")
                writer = animation.ImageMagickWriter(fps=(1 / interval))
            ani.save('mdanimation3d.mp4')

    if show_anim == False and show_anim == False:
        print("You have not opted for showing or saving animation.")

    print("End of animation")