Exemplo n.º 1
0
	def eval_build_items(self,build_assign):
		level = build_assign.level
		hero_name = self.eval_hero_name(build_assign.hero_name)
		item_build = self.eval_item_dec(build_assign.items_dec)
		build = Build(hero_name, level)
		build.give_item_build(item_build)
		return build
def main():

    # check we're not top level
    if os.path.exists('./application.py'):
        print 'You cannot run these tools from the top level directory'
        sys.exit(1)

    # remove appstream
    if os.path.exists('./appstream'):
        shutil.rmtree('./appstream')
    if os.path.exists('./icons'):
        shutil.rmtree('./icons')

    # the status HTML page goes here too
    if not os.path.exists('./screenshots'):
        os.makedirs('./screenshots')

    files_all = glob.glob("./packages/*.rpm")
    files = _do_newest_filtering(files_all)
    files.sort()

    log = LoggerItem()
    job = Build()

    for f in files:
        log.update_key(f)
        try:
            job.build(f)
        except Exception as e:
            log.write(LoggerItem.WARNING, str(e))
    job.write_appstream()
Exemplo n.º 3
0
	def eval_build_value(self,build_assign):
		level = build_assign.level
		hero_name = self.eval_hero_name(build_assign.hero_name)
		item_build = self.eval_var_name(build_assign.items_name)
		if type(item_build).__name__ != "Item_Build":
			raise Exception("Variable: " + items_name + "isn't an item build")
		build = Build(hero_name, level)
		build.give_item_build(item_build)
		return build
Exemplo n.º 4
0
    def exec_build(self):
        if self._module is not None:
            try:
                b = getattr(self._module.plugin, 'Build')(self._config)
            except AttributeError:
                b = Build(self._config)
        else:
            b = Build(self._config)

        b.run()

        print 'Successfully built the project.'
Exemplo n.º 5
0
    def exec_build(self, plugin):
        try:
            b = Build(self._config)
            b.build_project()
        except:
            raise

        if plugin:
            try:
                plugin.after_build()
            except AttributeError:
                pass

        print 'Successfully built the project.'
Exemplo n.º 6
0
def watch(queue):
    while True:
        debug.set_prefix("build_watcher")
        try:
            project = queue.get()

            prepare_repository(project)
            build = Build(project)
            build.run()

        except ValueError, e:
            debug.exception("Error communicating with API", e)
        except GitError, e:
            debug.exception("Error with Git repository", e)
Exemplo n.º 7
0
def main():

    # remove appstream
    if os.path.exists('./appstream'):
        shutil.rmtree('./appstream')
    if os.path.exists('./icons'):
        shutil.rmtree('./icons')

    files = glob.glob("./packages/*.rpm")
    files.sort()

    job = Build()
    for f in files:
        try:
            job.build(f)
        except Exception as e:
            print 'WARNING\t', f, str(e)
Exemplo n.º 8
0
def build(builds, original_text, settings, incremental, fmt):
    """
    Starts a build for this corpus. If it is already running,
    joins it. Messages from the build is received on a queue.
    """

    build = Build(original_text, settings)

    # Start build or listen to existing build
    if build.build_hash not in builds:
        log.info("Starting a new build")
        builds[build.build_hash] = build
        build.make_files()
        t = Thread(target=Build.run, args=[build, fmt])
        t.start()
    else:
        build = builds[build.build_hash]
        log.info("Joining existing build which started at %s" %
                  pretty_epoch_time(build.status_change_time))

    return join_build(build, incremental)
Exemplo n.º 9
0
def check_for_updates(data_store):

    url = 'https://www.jetbrains.com/updates/updates.xml'
    response = requests.get(url)
    root = eTree.fromstring(response.content)
    ide_list = vars(data_store)

    products = parse_product_xml(root)
    for ide in ide_list.values():
        product = products.get(ide.name)
        for build in product.keys():
            build_info = product[build]
            ide_build = Build()
            ide_build.build_number = build
            ide_build.download_url = build_info['download_url'] if 'download_url' in build_info.keys() else ""
            ide_build.major_version = build_info['major_version']
            ide_build.status = build_info['status']
            if newer_build_available(ide.installed_version, build):
                ide.available_version = build
                print(build)
            ide.builds.append(ide_build)
Exemplo n.º 10
0
def build_create(repo_name, source, namespace, image_tag, commit_id):
    build = Build()
    build.create(repo_name, source, namespace, image_tag, commit_id)
Exemplo n.º 11
0
from build import Build
from item import Item
import combat
import matplotlib.pyplot as plt 



antimage = Build("Anti-Mage", 25, [4,4,4,3])
print antimage.get_base_damage()
print antimage.get_total_damage()
antimage.add_item_by_name("Monkey King Bar")
antimage.add_item_by_name("Demon Edge")
antimage.add_item_by_name("Butterfly")
antimage.add_item_by_name("Crystalys")

print antimage.get_base_damage()
print antimage.get_total_damage()


poor_little_lion = Build("Lion", 16, [4,4,4,3,1])
print combat.calculate_damage(antimage, poor_little_lion)
print combat.calculate_damage(antimage, antimage)

#stop hitting yourself
print combat.calculate_average_damage(antimage, antimage)
# should be different from the above version.

print combat.calculate_average_damage(poor_little_lion, antimage)
D =  combat.calculate_hits_to_kill(antimage, poor_little_lion)
print combat.calculate_hits_to_kill(antimage, antimage)
Exemplo n.º 12
0
	def __init__(self, *args, **kw):
		'''THIS IS A FACADE.
		
		See :class:`generate.build.Build` for details of *args and **kw
		'''
		self.build = Build(*args, **kw)
Exemplo n.º 13
0
class WMGenerator(object):
	def __init__(self, *args, **kw):
		'''THIS IS A FACADE.
		
		See :class:`generate.build.Build` for details of *args and **kw
		'''
		self.build = Build(*args, **kw)
	
	@property
	def unpackaged(self):
		'For legacy reasons: older code expects there to be a WMGenerator.unpackaged attribute'
		return self.build.unpackaged
	@property
	def packaged(self):
		'For legacy reasons: older code expects there to be a WMGenerator.packaged attribute'
		return self.build.packaged

	def run(self, script='unused argument'):
		self.build.add_steps(server_phases.prepare_config())
		self.build.add_steps(customer_phases.resolve_urls())
		self.build.add_steps(server_phases.copy_platform_source())
		if self.build.override_plugins:
			self.build.add_steps(server_phases.override_plugins())
		self.build.add_steps(server_phases.sensible_default_for_toolbar())
		self.build.add_steps(server_phases.copy_common_files())
		self.build.add_steps(server_phases.pre_create_all_js())
		if (set(['ie', 'chrome', 'safari', 'firefox']) & set(self.build.enabled_platforms)):
			self.build.add_steps(legacy_phases.create_all_js())
		self.build.add_steps(server_phases.post_create_all_js())
		self.build.add_steps(server_phases.remove_assets_forge())
		# -- TODO - mutating the build while adding phases is evil and should be reconsidered ----
		# XXX: Temporary server-side migration until we publicly deploy modules as plugins
		customer_tasks.migrate_to_plugins(self.build)
		self.build.add_steps(server_phases.platform_specific_templating(self.build))
		if (set(['ie', 'chrome', 'safari', 'firefox']) & set(self.build.enabled_platforms)):
			self.build.add_steps(legacy_phases.platform_specific_templating(self.build))
		# -- TODO --------------------------------------------------------------------------------
		self.build.add_steps(server_phases.add_plugins())
		self.build.add_steps(server_phases.minification())
		if getattr(self.build, "debug", False):
			self.build.add_steps(server_phases.copy_customer_source())
			self.build.add_steps(customer_phases.validate_user_source())
			self.build.add_steps(customer_phases.copy_user_source_to_template(debug=True))
			self.build.add_steps(customer_phases.include_platform_in_html(debug=True))
			self.build.add_steps(customer_phases.include_config(debug=True))
			self.build.add_steps(server_phases.handle_debug_output())
		else:
			self.build.add_steps(server_phases.platform_specific_build())
			self.build.add_steps(server_phases.handle_template_output())
			if not self.build.template_only:
				# TODO should this branch be handled by a predicate?
				self.build.add_steps(server_phases.copy_customer_source())
				self.build.add_steps(customer_phases.validate_user_source())
				self.build.add_steps(customer_phases.copy_user_source_to_template())
				self.build.add_steps(customer_phases.include_platform_in_html())
				self.build.add_steps(customer_phases.include_name())
				self.build.add_steps(customer_phases.include_uuid())
				self.build.add_steps(customer_phases.include_author())
				self.build.add_steps(customer_phases.include_description())
				self.build.add_steps(customer_phases.include_version())
				self.build.add_steps(customer_phases.include_requirements())
				self.build.add_steps(customer_phases.include_config())
				if (set(['ie', 'chrome', 'safari', 'firefox']) & set(self.build.enabled_platforms)):
					self.build.add_steps(legacy_phases.customer_phase())
				self.build.add_steps(customer_phases.run_plugin_build_steps(self.build))
				self.build.add_steps(customer_phases.make_installers())
			if getattr(self.build, "package", False):
				# TODO should this branch be handled by a predicate?
				self.build.log.info("we will be doing packaging too")
				self.build.add_steps(
					server_phases.copy_lib_files_to_template(self.build.source_dir)
				)
				self.build.add_steps(customer_phases.package(self.build.output_dir))

			self.build.add_steps(server_phases.handle_output())

		orig_dir = os.getcwd()
		temp_d = tempfile.mkdtemp()
		try:
			os.chdir(temp_d)
			self.build.run()
		finally:
			os.chdir(orig_dir)
			shutil.rmtree(temp_d, ignore_errors=True)
		
	def __repr__(self):
		return '<Build ({0})>'.format(', '.join(self.build.enabled_platforms))
Exemplo n.º 14
0
def summary(jobsdir, newerthan, jsonfile):

    # calculate age limit based on retention days,
    # builds older than this will be ignored weather
    # they are found in json or jobdir.
    age_limit = (datetime.datetime.now() -
                 datetime.timedelta(days=RETENTION_DAYS))

    data = dict(builds={})
    # read data from json input file
    if os.path.exists(jsonfile):
        try:
            with open(jsonfile, 'r') as f:
                data = json.load(f)
        except Exception as e:
            print("Failed to read json file: {jsonfile}".format(
                jsonfile=jsonfile))
            traceback.print_exc()

    # Current production data.json has some extremely long failure detail
    # fields. This commit includes a change to failure.py to ensure
    # that doesn't happen in future. However to deal with the problem
    # on disk, we load and truncate the fields here.
    # At the end of this run, the data file will be rewritten with
    # truncated values, so this fix code will only be needed once.
    if "failures" in data:
        for id, failure in data['failures'].items():
            failure['detail'] = failure['detail'][:1000]

    # create set of build ids so we don't scan builds
    # we already have summary information about
    if "builds" in data:
        build_dict = {
            "{jn}_{bn}".format(jn=b['job_name'], bn=b['build_num']): b
            for b in data['builds'].values()
        }
    else:
        build_dict = {}

    # These dicts store builds and failures read in from
    # the input json file that will also be written
    # to the output json file.
    cached_builds = {}
    cached_failures = {}

    # walk the supplied dir, scan new builds
    parse_failures = 0
    build_files = list(
        enumerate([
            "{}/build.xml".format(root)
            for root, dirs, files in os.walk(jobsdir)
            if "build.xml" in files and ("PM_" in root or "PR_" in root)
        ]))
    for count, build in build_files:
        path_groups_match = re.search(
            ('^(?P<build_folder>.*/(?P<job_name>[^/]+)/'
             'builds/(?P<build_num>[0-9]+))/'), build)
        if path_groups_match:
            if (count % 100 == 0):
                gc.collect()
                total = len(build_files)
                print("{}/{} ({:.2f} %)".format(count, total,
                                                float(count / total) * 100))
            path_groups = path_groups_match.groupdict()
            job_name = path_groups['job_name']
            build_num = path_groups['build_num']
            key = "{job_name}_{build_num}".format(job_name=job_name,
                                                  build_num=build_num)
            if key in build_dict:
                try:
                    # build already cached, don't need to rescan
                    # But we do need to ensure that the cached data is age
                    # checked and added to a dict of cached items to be
                    # written out at the end.
                    b = build_dict[key]
                    if dateutil.parser.parse(b["timestamp"]) > age_limit:
                        cached_builds[b["id"]] = build_dict[key]
                        # ensure all referenced failures are also stored
                        for failure_id in b["failures"]:
                            print("f", end="")
                            f = data["failures"][failure_id]
                            cached_failures[f["id"]] = f
                    print("c", end="")
                    continue
                except Exception as e:
                    # failed to process cache, read the build log
                    # as if it wasn't cached.
                    # ! = cache read failure
                    print("cache failure: " + str(e))
                    print("!", end="")
            try:
                build = Build(build_folder=path_groups['build_folder'],
                              job_name=path_groups['job_name'],
                              build_num=path_groups['build_num'])
                if build.timestamp > age_limit:
                    # if build.failed:
                    # failed check removed, as not all failures are fatal
                    # especially those that relate to re infrastructure
                    # as we attempt to insulate those from affecting the
                    # build reult. However measuring their frequency is
                    # still useful

                    # store the log in memory only as long as necessary
                    build.log_lines = build.read_logs()
                    Failure.scan_build(build)
                    build.log_lines = []
                    build_dict[key] = build
                    # . = build read ok
                    print(".", end="")
                    # print("OK: {key}\n".format(key=key))
                else:
                    # o = old
                    print("o", end="")
                    # print("Old Build: {key}\n" .format(key=key))
            except lxml.etree.XMLSyntaxError as e:
                print("\nFAIL: {key} {e}\n".format(key=key, e=e))
                parse_failures += 1
            except Exception as e:
                parse_failures += 1
                print("\nFAIL: {key} {e}\n".format(key=key, e=e))
                if ("can't parse internal" not in str(e)):
                    traceback.print_exc()

    print("\nbuilds: {} failures: {}".format(len(build_dict.keys()),
                                             parse_failures))

    # dump data out to json file
    # remove builds older than RETENTION_DAYS
    # ensure we only dump data newer than RETENTION_DAYS

    with open(jsonfile, "w") as f:

        cache_dict = dict(builds={
            id: build
            for id, build in Build.builds.items()
            if build.timestamp > age_limit
        },
                          failures={
                              id: f
                              for id, f in Failure.failures.items()
                              if f.build.timestamp > age_limit
                          },
                          timestamp=datetime.datetime.now(),
                          retention_days=RETENTION_DAYS)
        # debug statements for combining previously cached
        # builds and failures with builds and failures
        # detected on this run
        print("\nNew Builds: {lcdb}"
              "\nNew Failures: {lcdf}"
              "\nBuilds carried forward: {lcb}"
              "\nFailures carried forward: {lcf}".format(
                  lcdb=len(cache_dict["builds"]),
                  lcdf=len(cache_dict["failures"]),
                  lcb=len(cached_builds),
                  lcf=len(cached_failures)))

        cache_dict["builds"].update(cached_builds)
        cache_dict["failures"].update(cached_failures)

        # convert objects to dicts for storage, this would be done
        # by serialise() but its easier to do the integrity
        # check when all the values are of the same type.
        for id, build in cache_dict["builds"].items():
            if type(build) is not dict:
                cache_dict["builds"][id] = build.get_serialisation_dict()
        for id, failure in cache_dict["failures"].items():
            if type(failure) is not dict:
                cache_dict["failures"][id] = failure.get_serialisation_dict()

        def build_integrity_fail(id):
            print("Integrity fail for build: {}".format(id))
            del cache_dict["builds"][id]

        def failure_integrity_fail(id):
            print("Integrity fail for failure: {}".format(id))
            del cache_dict["failures"][id]

        # integrity check
        # its important the data set is consistent as the
        # UI assumes consistency. Its better to remove a few
        # inconsistent items than have the whole UI die.
        for id, build in cache_dict["builds"].copy().items():
            try:
                if build["id"] != id:
                    build_integrity_fail(id)
                for failure in build["failures"]:
                    if failure not in cache_dict["failures"]:
                        build_integrity_fail(id)
                        break
            except Exception as e:
                print("Build integrity exception: " + str(e))
                build_integrity_fail(id)

        for id, failure in cache_dict["failures"].copy().items():
            try:
                if (failure["id"] != id
                        or failure["build"] not in cache_dict["builds"]):
                    failure_integrity_fail(id)
            except Exception:
                failure_integrity_fail(id)

        cache_string = serialise(cache_dict)
        f.write(cache_string)
Exemplo n.º 15
0
  def __init__(self, src_grid_file_name, dst_grid_file_name, online_flag, src_realdata_file_name):
    src_nc_obj = Loadnc(src_grid_file_name)
    self.src_grid_size, self.src_grid_corners, self.src_grid_rank, self.src_grid_dims, self.src_grid_center_lat, self.src_grid_center_lon, self.src_grid_imask = src_nc_obj.load()
    src_nc_obj.closenc()
    
    # set south pole pnts
    pole_num = 0
    self.pole_south = []
    self.pole_south_indx = []
    for i in xrange(len(self.src_grid_center_lat)):
      if self.src_grid_imask[i] == 1:
        self.pole_south.append((self.src_grid_center_lon[i], self.src_grid_center_lat[i]))
        self.pole_south_indx.append(i)
        pole_num += 1
      if pole_num == 10:
        break  
    self.pole_south_bnd = min([item[1] for item in self.pole_south])
    # set north pole pnts
    pole_num = 0
    self.pole_north = []
    self.pole_north_indx = []
    j = len(self.src_grid_center_lat)
    #while True:
    while 1:
      j -= 1
      if self.src_grid_imask[j] == 1:
        self.pole_north.append((self.src_grid_center_lon[j], self.src_grid_center_lat[j]))
        self.pole_north_indx.append(j)
        pole_num += 1
      if pole_num == 10:
        break
    self.pole_north_bnd = max([item[1] for item in self.pole_north])

    # original grid info
    # used for remap matrix file
    self.original_src_grid_center_lat = copy.deepcopy(self.src_grid_center_lat)
    self.original_src_grid_center_lon = copy.deepcopy(self.src_grid_center_lon)
    self.original_src_grid_imask = copy.deepcopy(self.src_grid_imask)
     
    dst_nc_obj = Loadnc(dst_grid_file_name)
    self.dst_grid_size, self.dst_grid_corners, self.dst_grid_rank, self.dst_grid_dims, self.dst_grid_center_lat, self.dst_grid_center_lon, self.dst_grid_imask = dst_nc_obj.load()
    dst_nc_obj.closenc()
    
    self.stree_base_obj = Build(self.src_grid_size, self.src_grid_corners, self.src_grid_rank, self.src_grid_dims, self.src_grid_center_lat, self.src_grid_center_lon, self.src_grid_imask)
    self.recovery_indx_table, self.stree = self.stree_base_obj.grow()
    
    self.src_grid_name = src_grid_file_name.split('/')[-1].split('.')[0]
    self.dst_grid_name = dst_grid_file_name.split('/')[-1].split('.')[0]
     
    #self.interp_wgt = []
    #self.interp_box_indx = []
    #self.interp_box = []
    self.remap_matrix = []
    self.remap_matrix_indx = []
    
    self.remap_matrix_compact = []
    self.remap_src_indx = []
    self.remap_dst_indx = []
    
    # load real data if online remapping
    # self.src_data = []
    if online_flag:
      src_data_nc_obj = Loadreal(src_realdata_file_name)
      size, self.src_data = src_data_nc_obj.load()
      if size != self.src_grid_size:
        print 'Real data size does not match grid size.'
        sys.exit()
      src_data_nc_obj.closenc()
    
    self.dst_data = [] 
	def test_build_from_config_file_value(self):
		config = Build()
		self.assertEqual("com.piscessera.gen", config.get_pkg_name())
		self.assertEqual("gen_db", config.get_db_name())
Exemplo n.º 17
0
Arquivo: make.py Projeto: btanasoi/fvm
def main():

    parser = OptionParser()
    parser.set_defaults(verbose=0)
    parser.add_option("--build", action="store_true")
    parser.add_option("--test", action="store_true")
    parser.add_option("--update", action="store_true")
    parser.add_option("--submit", action="store_true")
    parser.add_option("--all", action="store_true")
    parser.add_option("-v", "--verbose", action="count")
    parser.add_option("-d", "--debug", action="store_true")
    parser.add_option("--nocolor", action="store_true")
    parser.add_option("--nightly", action="store_true")
    parser.add_option("--clean", action="store_true")
    parser.add_option("--jobs", "-j")
    parser.add_option("-s", type="string")
    (options, args) = parser.parse_args()

    make_path = os.path.abspath(os.path.dirname(os.path.realpath(sys.argv[0])))
    cwd = os.getcwd()


    if options.nightly:
        options.update = options.test = options.submit = True

    if options.all or options.test:
        options.build = True

    cname = ''
    if len(args) == 1:
        cname = args[0]
    if cname == '':
        usage()

    sdir = ''
    if options.s:
        sdir = options.s
    else:
        if os.path.isdir(os.path.join(cwd, 'config')):
            sdir = cwd
        elif os.path.islink(sys.argv[0]):
            sdir = os.path.dirname(sys.argv[0])

    if sdir == '' or not config.read(os.path.join(sdir, 'config'), cname):
        usage()

    build_utils.set_options(options)
    if options.all:
        os.system("/bin/rm -rf %s" % os.path.join(cwd, "build-%s" % cname))

    cmd = config.config('ALL', 'before')
    if cmd and not '_MEMOSA_MAKE' in os.environ:
        cmd = ';'.join(cmd)
        os.environ['_MEMOSA_MAKE'] = '1'
        ret = os.system("/bin/bash -l -c '%s;%s'" %(cmd, ' '.join(sys.argv)))
        sys.exit(ret>>8)

    bld = Build(cname, sdir, make_path)

    # CLEAN
    if options.clean or options.all:
        for p in bld.all_packages:
            p.clean()

    build_utils.fix_path('PATH', bld.bindir, 1, 0)
    build_utils.fix_path('LD_LIBRARY_PATH', bld.libdir, 1, 0)
    build_utils.fix_path('LD_LIBRARY_PATH', os.path.join(bld.blddir, "lib64")   , 1, 0)   
    build_utils.fix_path('C_INCLUDE_PATH', bld.incdir, 1, 0)     
    build_utils.fix_path('CPLUS_INCLUDE_PATH', bld.incdir, 1, 0)         
    
    os.environ['MEMOSA_HOME'] = bld.blddir
    os.environ['MEMOSA_CONFNAME'] = cname
    build_start_time = build_end_time = test_start_time = test_end_time = 0
    try:
        oldpypath = os.environ['PYTHONPATH']
    except:
        oldpypath = ''
    build_utils.set_python_path(bld.blddir)

    # if no options, default to build
    if not options.build and not options.test and not options.submit \
            and not options.update and not options.clean:
        options.build = True

    if options.build:
        # Remove all test results.  They are now invalid
        os.system("/bin/rm -f %s/*.xml" % bld.logdir)

    # UPDATE
    if options.update:
        update.update(bld, cname, options.nightly)

    # BUILDING
    build_failed = 0

    if options.build and bld.packages == []:
        print "No packages need built."

    if options.build and bld.packages != []:
        build_start_time = time.time()
        open(bld.logdir + '/StartBuildTime', 'w').write(str(build_start_time))
        for p in bld.packages:
            try:
                p.build()
            except build_utils.CompileException:
                build_failed = 1
                break
            except:
                traceback.print_exc()
                build_failed = 1
                break
        build_end_time = time.time()
        open(bld.logdir + '/EndBuildTime', 'w').write(str(build_end_time))

        # write out env.[c]sh
        env_name = build_utils.write_env(bld, cwd, cname)

        if not build_failed:
            print "\nDone with building.\nYou need to source %s to use this build.\n" % env_name

    # make sure we are back in the original directory
    os.chdir(cwd)

    # set package list for testing and submit
    bld.packages = []
    bld.build_pkg_list(False)
    bld.done()

    # TESTING
    if build_failed==0 and options.test and not pbs.start(bld, cname) and not moab.start(bld, cname):
        testing.run_all_tests(bld)

    # SUBMIT
    if options.submit:
        cdash.submit(bld, cname, sys.argv, options.nightly)

    if not options.test:
        build_utils.run_commands('ALL', 'after')

    build_utils.fix_path('LD_LIBRARY_PATH', bld.libdir, 1, 1)
    if oldpypath:
        os.environ['PYTHONPATH'] = oldpypath
    else:
        del os.environ['PYTHONPATH']
    build_utils.fix_path('PATH', bld.bindir, 1, 1)

    sys.exit(build_failed)
from orm_email import OrmEmail
from interview import Interview
from interview_slot import InterviewSlot


# Get user input to reach functions
user_input = input("Hello! Would you like to:\
                   \n1. Set up your database (WARNING: this will delete all your data!)\
                   \n2. Set up e-mail stuff\
                   \n3. Update new applicants with code and closest school\
                   \n4. Schedule interviews\
                   \n5. Send e-mails to new applicants\
                   \n")

if user_input == "1":
    Build.create_tables()
    Build.upload_data()

elif user_input == "2":
    Connection.set_smtp()

elif user_input == "3":
    Applicant.get_closest_school()
    Applicant.update_appl_code()

elif user_input == "4":
    InterviewSlot.schedule()

elif user_input == "5":
    msg_list = OrmEmail.create_newappl_msg()
    OrmEmail.send(msg_list)
Exemplo n.º 19
0
class Controller:
    def __init__(self):
        self.cfg = config.Config()
        self.view = View(self.LblLeftClickHandler, self.LblRightClickHandler,
                         self.NotesUpdatedHandler, self.ViewToggleHandler)
        self.view_format = ViewFormat.LABEL_ONLY
        self.view.SetViewFormat(self.view_format)
        self.villain = None
        self.build = None
        self.error_text = None
        self.game_time = 0.0
        self.view.AddTask(self.PollAndUpdate, 1000)

    # client api:
    # https://us.battle.net/forums/en/sc2/topic/20748195420
    def PollAndUpdate(self):
        self.error_text = None
        try:
            response = requests.get("http://localhost:6119/game")
            game_info = json.loads(response.text)
            self.game_time = game_info['displayTime']
            for player in game_info['players']:
                name = player['name']
                if self.villain is not None and name == self.villain.name:
                    break
                elif name != self.cfg.hero:
                    self.villain = Villain(name, self.cfg.data_dir)
                    self.build = Build(player['race'], self.cfg.data_dir)
                    self.view.SetNotesText(self.villain.GetNotes())
                    break
            else:
                self.error_text = 'No villain found.'
        except Exception as e:
            self.error_text = "Can't connect to API."
            print(e)
        self.UpdateView(self.view_format)

    def UpdateView(self, view_format):
        # Try to change view format.
        if self.error_text is not None:
            if view_format == ViewFormat.TAB:
                self.view_format = view_format
            else:
                self.view_format = ViewFormat.LABEL_ONLY
        elif view_format == ViewFormat.TAB or view_format == ViewFormat.LABEL_ONLY:
            self.view_format = view_format
        elif view_format == ViewFormat.NOTES_VIEW:
            if self.villain is not None:
                self.view_format = view_format
        elif view_format == ViewFormat.BUILD_VIEW:
            if self.build is not None:
                self.view_format = view_format

        # Set label text
        if self.view_format == ViewFormat.TAB:
            self.view.SetLabelText('')
        elif self.error_text is not None:
            self.view.SetLabelText(self.error_text)
        elif self.view_format == ViewFormat.BUILD_VIEW:
            self.view.SetLabelText('{}: {}'.format(self.build.race,
                                                   self.build.name))
        else:
            self.view.SetLabelText(self.villain.name)

        # Other stuff
        if self.view_format == ViewFormat.BUILD_VIEW:
            self.view.SetBuildText(
                past=self.build.GetBuildText(before=self.game_time),
                future=self.build.GetBuildText(earliest=self.game_time))
        self.view.SetViewFormat(self.view_format)

    def Run(self):
        self.view.Run()

    # Make us smaller.
    def LblLeftClickHandler(self, e):
        if self.view_format == ViewFormat.TAB:
            pass
        elif self.view_format == ViewFormat.LABEL_ONLY:
            self.UpdateView(ViewFormat.TAB)
        else:
            self.UpdateView(ViewFormat.LABEL_ONLY)

    # Make us bigger.
    def LblRightClickHandler(self, e):
        if self.view_format == ViewFormat.TAB:
            self.UpdateView(ViewFormat.LABEL_ONLY)
        elif self.view_format == ViewFormat.LABEL_ONLY:
            self.UpdateView(ViewFormat.NOTES_VIEW)

    def NotesUpdatedHandler(self, e):
        self.villain.SaveNotes(self.view.GetVillainNotes())

    def ViewToggleHandler(self):
        if self.view_format == ViewFormat.NOTES_VIEW:
            self.UpdateView(ViewFormat.BUILD_VIEW)
        elif self.view_format == ViewFormat.BUILD_VIEW:
            self.UpdateView(ViewFormat.NOTES_VIEW)
	def test_build_from_config_file_exist(self):
		config = Build()
		self.assertEqual(True, config.is_config_file_exist())
            log_data['repository_type'] = repository.identify_repository_type(repository.full_path)

            logging.info("repository: " + log_data['repository'])
            if (log_data['is_head'] == 1):
                logging.info("building branch/revision: " + log_data['branch'] + '/' + log_data['revision'] + ' (HEAD)')
            else:
                logging.info("building branch/revision: " + log_data['branch'] + '/' + log_data['revision'])


            build_dir_name = str(current_time).replace('-', '') + '_bf_' + log_data['branch']

            # the config module ensures that all necessary --run-* options are set
            build_dir = repository.copy_repository(build_dir_name, log_data['branch'], log_data['revision'])

            build = Build(config, repository, build_dir)

            # test if ports for regression tests are available
            if (build.portcheck(log_data['repository_type'], log_data) is True):

                result_configure = build.run_configure(log_data['extra_configure'], build_dir_name, log_data)
                build.add_entry_to_delete_clean(build_dir)
                # FIXME: Orca


                if (result_configure is True):
                    result_make = build.run_make(log_data['extra_make'], log_data)

                    if (result_make is True):
                        install_dir = build.run_make_install(log_data['extra_install'], log_data, log_data['extra_make'])
                        if (install_dir is not False):
	def generate(self):
		print("Start generate file..")
		build = Build()
		# open 
		core_tpl = open("template/DatabaseCore.java.tpl")
		core_new = []
		for line in core_tpl:
			# set package name
			line = line.replace("CONFIG_PKG_NAME", build.get_pkg_name())
			# set db name
			line = line.replace("CONFIG_DB_NAME", build.get_db_name())
			core_new.append(line)
		# close file
		core_tpl.close()
		# w+ is create new file, if file not exist
		core = open("gen/database/DatabaseCore.java", "w+")
		for line in core_new:
			core.write(line)
		core.close()
		print("End generate new file..")
		print("Prepare to generate database helper file..")

		db = SQLiteDatabase()
		cur_tbl = db.connect(build.get_db_name())
		cur_tbl.execute(db.get_database_structure_sql())
		for row in cur_tbl:
			# genereate core file
			if row[0] != "sqlite_sequence" and row[0] != "android_metadata":
				cur_col = db.connect(build.get_db_name())
				cur_col.execute(db.get_table_schema_sql(row[0]))
				sql = cur_col.fetchone()
				p = re.compile(r'"(.*?)"')
				# find value in quote by regex
				m = p.findall(sql[0])
				# create class name
				cls_name = row[0].replace("_", " ")
				cls_name = cls_name.title()
				cls_name = cls_name.replace(" ", "")
				# create domain file
				domain = open("gen/domain/"+cls_name+".java", "w+")
				domain.write("package " + build.get_pkg_name() + ".domain")
				domain.write("\n")
				domain.write("\n")
				domain.write("public class "+cls_name+" {")
				domain.write("\n")
				domain.write("\n")
				# loop for creating column variable
				for tbl in m:
					if tbl != row[0]:
						domain.write("public static final String " + tbl.upper() + " = \" " + tbl + " \";\n")

				# clean sql to normal form
				col_datatype = sql[0].replace("CREATE TABLE", "")
				col_datatype = col_datatype.replace("PRIMARY KEY", "")
				col_datatype = col_datatype.replace("AUTOINCREMENT", "")
				col_datatype = col_datatype.replace("NOT NULL", "")
				col_datatype = col_datatype.replace("UNIQUE", "")
				col_datatype = col_datatype.replace(row[0], "")
				col_datatype = col_datatype.replace("(", "")
				col_datatype = col_datatype.replace(")", "")
				col_datatype = col_datatype.replace("\"", "")
				col_datatype = col_datatype.replace(" ", "")
				col_datatype_list = col_datatype.split(",")
				
				domain.write("\n")
				variable = ""
				get_str = ""
				set_str = ""
				index = 1
				for datatype in col_datatype_list:
					variable += "private "
					get_str += "public "
					set_str += "public "
					# clean datatype
					datatype = datatype.replace(m[index], "")
					# variable
					variable += get_datatype_str(datatype) + " "
					variable += m[index]+";\n"

					method_name = m[index]
					if method_name[0] != "_":
						method_name = m[index].replace("_", " ")
						method_name = method_name.title()
						method_name = method_name.replace(" ", "")

					# get
					get_str += get_datatype_str(datatype) + " " + "get"
					get_str += method_name + " { return this." 
					get_str += m[index] + "; } \n"
					# set
					set_str += "void set" + method_name + "(" + get_datatype_str(datatype)  + " " 
					set_str += m[index] + "){ this." + m[index] + " = " + m[index] + "; }\n"
					index = index + 1

				domain.write(variable)
				domain.write("\n")
				domain.write(get_str)
				domain.write("\n")
				domain.write(set_str)
				domain.write("\n")
				domain.write("}")
						
				
		db.close()
Exemplo n.º 23
0
class Interp(Exception):
  
  def __init__(self, src_grid_file_name, dst_grid_file_name, online_flag, src_realdata_file_name):
    src_nc_obj = Loadnc(src_grid_file_name)
    self.src_grid_size, self.src_grid_corners, self.src_grid_rank, self.src_grid_dims, self.src_grid_center_lat, self.src_grid_center_lon, self.src_grid_imask = src_nc_obj.load()
    src_nc_obj.closenc()
    
    # set south pole pnts
    pole_num = 0
    self.pole_south = []
    self.pole_south_indx = []
    for i in xrange(len(self.src_grid_center_lat)):
      if self.src_grid_imask[i] == 1:
        self.pole_south.append((self.src_grid_center_lon[i], self.src_grid_center_lat[i]))
        self.pole_south_indx.append(i)
        pole_num += 1
      if pole_num == 10:
        break  
    self.pole_south_bnd = min([item[1] for item in self.pole_south])
    # set north pole pnts
    pole_num = 0
    self.pole_north = []
    self.pole_north_indx = []
    j = len(self.src_grid_center_lat)
    #while True:
    while 1:
      j -= 1
      if self.src_grid_imask[j] == 1:
        self.pole_north.append((self.src_grid_center_lon[j], self.src_grid_center_lat[j]))
        self.pole_north_indx.append(j)
        pole_num += 1
      if pole_num == 10:
        break
    self.pole_north_bnd = max([item[1] for item in self.pole_north])

    # original grid info
    # used for remap matrix file
    self.original_src_grid_center_lat = copy.deepcopy(self.src_grid_center_lat)
    self.original_src_grid_center_lon = copy.deepcopy(self.src_grid_center_lon)
    self.original_src_grid_imask = copy.deepcopy(self.src_grid_imask)
     
    dst_nc_obj = Loadnc(dst_grid_file_name)
    self.dst_grid_size, self.dst_grid_corners, self.dst_grid_rank, self.dst_grid_dims, self.dst_grid_center_lat, self.dst_grid_center_lon, self.dst_grid_imask = dst_nc_obj.load()
    dst_nc_obj.closenc()
    
    self.stree_base_obj = Build(self.src_grid_size, self.src_grid_corners, self.src_grid_rank, self.src_grid_dims, self.src_grid_center_lat, self.src_grid_center_lon, self.src_grid_imask)
    self.recovery_indx_table, self.stree = self.stree_base_obj.grow()
    
    self.src_grid_name = src_grid_file_name.split('/')[-1].split('.')[0]
    self.dst_grid_name = dst_grid_file_name.split('/')[-1].split('.')[0]
     
    #self.interp_wgt = []
    #self.interp_box_indx = []
    #self.interp_box = []
    self.remap_matrix = []
    self.remap_matrix_indx = []
    
    self.remap_matrix_compact = []
    self.remap_src_indx = []
    self.remap_dst_indx = []
    
    # load real data if online remapping
    # self.src_data = []
    if online_flag:
      src_data_nc_obj = Loadreal(src_realdata_file_name)
      size, self.src_data = src_data_nc_obj.load()
      if size != self.src_grid_size:
        print 'Real data size does not match grid size.'
        sys.exit()
      src_data_nc_obj.closenc()
    
    self.dst_data = [] 
  
  # for mpi use
  def dst_distribute(self, rank, size):
    # load balance, grid size reps load
    load_sum = self.dst_grid_size
    load = load_sum / size
    if load_sum % size:
      if rank == size - 1:
        load = load_sum - load * (size - 1)
    # dst_grid_dims dst_grid_size is changed in mpi case, but no place using it, so let it be..
    # self.dst_grid_dims = self.dst_grid_dims
    # self.dst_grid_size = load
    start_indx = (load_sum / size) * rank
    self.dst_grid_center_lat = self.dst_grid_center_lat[start_indx : start_indx + load] 
    self.dst_grid_center_lon = self.dst_grid_center_lon[start_indx : start_indx + load]
    self.dst_grid_imask = self.dst_grid_imask[start_indx : start_indx + load] 
  
  # for mpi use
  def dst_merge(self, rank, comm):
    self.remap_matrix = comm.gather(self.remap_matrix, root = 0)
    self.remap_matrix_indx = comm.gather(self.remap_matrix_indx, root = 0)
    self.dst_grid_center_lat = comm.gather(self.dst_grid_center_lat, root = 0)
    self.dst_grid_center_lon = comm.gather(self.dst_grid_center_lon, root = 0)
    self.dst_grid_imask = comm.gather(self.dst_grid_imask, root = 0)
    if rank == 0:
      self.remap_matrix = [val for item in self.remap_matrix for val in item] 
      self.remap_matrix_indx = [val for item in self.remap_matrix_indx for val in item]
      self.dst_grid_center_lat = [val for item in self.dst_grid_center_lat for val in item]
      self.dst_grid_center_lon = [val for item in self.dst_grid_center_lon for val in item]
      self.dst_grid_imask = [val for item in self.dst_grid_imask for val in item]

  def check_wgt(self, wgt):
    for item in wgt:
      if item > 2 or item < -2:
        print item
        print 'wgt is invalid'
        sys.exit()
  
  def check_wgtsum(self, wgt):
    lsum = 0.0
    for item in wgt:
      lsum += item
    if abs(lsum - 1.0) > 0.3:
      print lsum
      print 'sum of local wgts is invalid'
      sys.exit()
  
  # decide if all indx cells are masked out
  def check_all_masks(self, indx, n):
    checksum = 0
    for i in indx:
      if self.src_grid_imask[i] == 0:
        checksum += 1
    if checksum == n:
      return True
    else:
      return False
  
  def indx_recovery(self, indx_lst):
    tmp_indx = [] 
    for i in indx_lst:
      if i >= self.src_grid_size:
        print 'recovery ghost index.'
        tmp_indx.append(self.recovery_indx_table[i])
      else:
        tmp_indx.append(i)
    indx_lst = tmp_indx
    return indx_lst

  def indx_lrec_recovery(self, indx_lst):
    tmp_indx = []
    for i in indx_lst:
      if i >= self.src_grid_size:
        flag = True
        print 'recovery ghost index.'
        if (i / self.src_grid_dims[1]) == 1:
          offset = 0
        else:
          offset = self.src_grid_dims[0] - 1
        tmp_indx.append((i % self.src_grid_dims[1]) * self.src_grid_dims[0] + offset)
      else:
        tmp_indx.append(i)
    indx_lst = tmp_indx
    return indx_lst
      
  # virtual function to do calc wgts  
  def interp(self):
    pass
     
  def compact_remap_matrix(self):
    i = 0
    k = 0
    for matrix_item in self.remap_matrix:
      if matrix_item:
        j = 0
        for wgt in matrix_item:
          self.remap_matrix_compact.append(wgt)
          self.remap_src_indx.append(self.remap_matrix_indx[i][j])
          self.remap_dst_indx.append(k)
          j += 1
      #else:
      #  self.remap_dst_indx.append(k)
      k += 1
      i += 1  
    
  # virtual function to interpolate data 
  def remap(self):
    # init dst_data list as 0.0
    print max(self.remap_dst_indx)
    for i in xrange(max(self.remap_dst_indx) + 1):
      self.dst_data.append(0.0)
    # interpolate    
    for i in xrange(len(self.remap_matrix_compact)):
      self.dst_data[self.remap_dst_indx[i]] += self.remap_matrix_compact[i] * self.src_data[self.remap_src_indx[i]]
    return self.dst_data

  # for mpi use
  # parallelize with rows
  # rank 0 measure load of remapping step
  # only rank 0 needs to exec 
  # [1, 1, 1, 2, 2, 2, 2, 4, 4, 5, 5, 5, 5, 6, 6, 6] -> [0:3], [3:7], [7:9], [9, 13], [13:16] -> [0, 3, 7, 9, 13, 16]
  def learn(self, size):
    tmp = list(set(self.remap_dst_indx))
    learn_lst = [0]
    load_sum = len(tmp)
    load = load_sum / size
    if load_sum % size:
      last_load = load_sum - load * (size - 1)
    else:
      last_load = load
    j = 0
    cnt = 1
    rank_cnt = 0
    for i in xrange(len(self.remap_dst_indx)):
      if self.remap_dst_indx[i] != tmp[j]:
        if cnt == load:
          if rank_cnt == size - 1:
            break
          rank_cnt += 1
          learn_lst.append(i)
          cnt = 0
        cnt += 1
        j += 1
    learn_lst.append(len(self.remap_dst_indx))
    return learn_lst
    
  # for mpi use
  # only rank 0 needs to exec
  # [0, 3, 7, 9, 16] -> [0:3] to rank 0
  #                  -> [3:7] to rank 1
  #                  -> [7:9] to rank 2
  #                  -> [9:16] to rank 3
  def deliver(self, deliver_disp, rank, size, comm):
    if rank == 0:
      for i in xrange(1, size):
        buf1 = self.remap_dst_indx[deliver_disp[i] : deliver_disp[i + 1]]
        buf2 = self.remap_src_indx[deliver_disp[i] : deliver_disp[i + 1]]
        buf3 = self.remap_matrix_compact[deliver_disp[i] : deliver_disp[i + 1]]
        comm.send(buf1, dest = i, tag = i)
        comm.send(buf2, dest = i, tag = 2 * i + 1)
        comm.send(buf3, dest = i, tag = 3 * i + 1)
      self.remap_dst_indx = self.remap_dst_indx[deliver_disp[0] : delive_disp[1]]
      self.remap_src_indx = self.remap_src_indx[deliver_disp[0] : delive_disp[1]]
      self.remap_matrix_compact = self.remap_matrix_compact[deliver_disp[0] : delive_disp[1]]
    else:
      self.remap_dst_indx = comm.recv(source = 0, tag = rank)
      self.remap_src_indx = comm.recv(source = 0, tag = 2 * rank + 1)
      self.remap_matrix_compact = comm.recv(source = 0, tag = 3 * rank + 1)