def main(): if len(sys.argv) > 1: commands_list = ' '.join(sys.argv[1:]).split(';') for command_item in commands_list: engine.run(command_item, voice_enable=False) print('{0:=>45}'.format(' ')) else: engine.interactive_input()
def main(): engine.init() print(colors.WARNING) banner() print(colors.ENDC) ip = input(colors.OKBLUE + '[---] IP : ' + colors.ENDC) port = int(input(colors.OKBLUE + '[---] PORT : ' + colors.ENDC)) victim = input(colors.OKBLUE + '[---] IP OF VICTIM : ' + colors.ENDC) engine.run(ip, port, victim)
def main(): menu = ['Dense Image', 'Non-dense Image'] choice = st.sidebar.selectbox('Options', options=menu) if choice == 'Dense Image': st.markdown("""<h2 style='color:#003399'><b>Result</b></h2>""", unsafe_allow_html=True) image_file = st.sidebar.file_uploader(' ', type=['jpeg', 'png', 'jpg'], key=choice) if image_file is not None: col1, col2 = st.beta_columns(2) with col1: st.image(load_image(image_file), use_column_width='auto', caption='Uploaded image') save_uploadedfile(image_file, 'A') with col2: image_heat = engine.run(image_file, 'A') fig = plt.figure(figsize=(3, 2)) plt.imshow(image_heat, cmap=plt.cm.jet) plt.axis('off') st.pyplot(fig) st.write('Predicted number of people:', round(np.sum(image_heat))) else: st.markdown("""<h2 style='color:#003399'><b>Result</b></h2>""", unsafe_allow_html=True) image_file = st.sidebar.file_uploader(' ', type=['jpeg', 'png', 'jpg'], key=choice) if image_file is not None: col1, col2 = st.beta_columns(2) with col1: st.image(load_image(image_file), use_column_width='auto', caption='Uploaded image') save_uploadedfile(image_file, 'B') with col2: image_heat = engine.run(image_file, 'B') fig = plt.figure(figsize=(3, 2)) plt.imshow(image_heat, cmap=plt.cm.jet) plt.axis('off') st.pyplot(fig) st.write('Predicted number of people:', round(np.sum(image_heat)))
def run(log_level, torrent_path, listening_port, download_dir): if log_level: log_level = getattr(logging, log_level.upper()) else: log_level = getattr(logging, "WARNING") logfile = "tmp/{}.log".format( listening_port) # TODO - directory shouldn't be hardcoded logging.basicConfig( filename=logfile, level=log_level, format= "%(asctime)s %(levelname)s %(filename)s:%(lineno)d `%(funcName)s` -- %(message)s", ) torrent_data, torrent_info = read_torrent_file(torrent_path) download_dir = download_dir if download_dir else os.path.dirname( os.path.abspath(__file__)) port = int(listening_port) if listening_port else None t = Torrent(torrent_data, torrent_info, download_dir, port) engine.run(t)
def main(): lower_limit = -10000 upper_limit = 10000 dx = 10**(-5) itteration = math.ceil((upper_limit - lower_limit) / dx) integral = engine.run(0, itteration, lower_limit, dx) print("the value of the integral is: " + str(integral))
def main(): # with open("ast_example.py", "r") as source: # code = source.read() code = ast_example.rule['R_INV_1'] code = preprocess.add_field_declaration(code) # print(code) code = preprocess.warp_function(code) # print(code) print(engine.run(code)) code = preprocess.un_warp_function(code) # print(code) code = preprocess.remove_field_declaration(code)
def optFunc(args): flow = MyFlow() flow.flow() flow.params_syn.append(("effort", args["syn_effort"])) flow.params_syn.append(("is_incremental", args["syn_inc"])) flow.params_syn.append(("spatial", args["syn_spa"])) flow.params_place.append(("concurrent_macros", args["place_con"])) flow.params_place.append(("incremental", args["place_inc"])) flow.params_place.append(("noPrePlaceOpt", args["place_nop"])) flow.params_route.append(("globalDetail", args["route_gd"])) flow.params_route.append(("clockEco", args["route_clock"])) ckpt = engine.run(design, flow) res = util.getResult(ckpt, "Timing", "Innovus") return res
def main(): args = vars( parse_command_line() ) design_file = args['program'] output_dir = args['output'] proof = engine.run(design_file, output_dir) # Simple mapping for SVCOMP if proof == 'Pass': result = "TRUE" elif proof == 'Error': result = "FALSE" else: result = 'UNKNOWN' print ("") print ("Verification Result: " + result) print ("") return 0
def test_validar_funcao_run(self): self.create_sqs() s3 = self.create_bucket() self.carga_do_template_da_pipeline_na_tabela_dynamodb() bucket = s3_bucket.split('.')[0].replace('https://', '') payloads = ['payload-ecs'] payload_files = [ 'payload_1.yml', ] for payload in payloads: for filename in payload_files: path_filename = f"tests/{payload}/{filename}" f_template = open(path_filename) yml_template = f_template.read() f_template.close() json_template = change_yml_to_json(yml_template) send_payload = { 'payload': json_template, 'requestID': 'xxxx-xxxx-xxxx', 'account': filename } sqs_send(filas['processing'], send_payload) # criar a pipeline template_name = engine.run() print(template_name) # Verificando se o template foi criado no bucket lista_pipelines = [ 'Pipeline-Python-develop-payload_1.yml.json', 'Pipeline-Python-master-payload_1.yml.json' ] s3 = boto3.resource('s3') my_bucket = s3.Bucket(bucket) for object_summary in my_bucket.objects.filter(): print("=====>", object_summary.key) assert object_summary.key in lista_pipelines
def evaluate(): try: filename = str(time.time()).replace('.', '-') req = request.get_json() with open('scripts/{0}.py'.format(filename), 'w') as f: f.write(req['code']) f = __import__('scripts.' + filename) script = getattr(f, filename) view = json.loads(ws_messages[request.remote_addr])["view1"] nparr = np.frombuffer(base64.b64decode(view), np.uint8) view = cv2.imdecode(nparr, cv2.IMREAD_ANYCOLOR) view1 = view #view1 = cv2.cvtColor(view, cv2.COLOR_BGR2GRAY) view = json.loads(ws_messages[request.remote_addr])["view2"] nparr = np.frombuffer(base64.b64decode(view), np.uint8) view2 = cv2.imdecode(nparr, cv2.IMREAD_ANYCOLOR) res = run(script.image_to_speed, int(req['step']), req['position'], script.log, request.remote_addr, view1, view2) pos = res["position"] pos = [x for x in pos] if ws_clients.get( request.remote_addr) is not None and not ws_clients.get( request.remote_addr).closed: msg = """{{ "Event": "update_pos", "position": [{0},{1},{2}] }}""".format(*pos) ws = ws_clients[request.remote_addr] ws.send(msg) return jsonify(res) except Exception: err = traceback.format_exc() err = err.replace("\n", "<br>") err = err.replace(" ", " ") return jsonify({'Error': err})
import input_generator import engine year = 2017 cbc_path = 'C:/Users/user/PycharmProjects/anvil/venv/Lib/site-packages/pulp/solverdir/cbc/win/64' regions_to_price = ['SA1', 'NSW1', 'QLD1', 'VIC1', 'TAS1'] raw_data = 'E:/anvil_data/raw' filtered_data = 'E:/anvil_data/filtered' results = 'E:/anvil_data/results_2' for month in range(12, 13): start_time = '2017/{}/01 00:00:00'.format(str(month).zfill(2)) # inclusive if month != 12: end_time = '2017/{}/01 00:00:00'.format(str(month + 1).zfill(2)) # exclusive else: end_time = '2018/01/01 00:00:00' inputs = input_generator.actual_inputs_replicator(start_time, end_time, raw_data, filtered_data, False) nemlite_results, objective_data_frame = engine.run( inputs, cbc_path, regions_to_price=regions_to_price, save_to=results, feed_back=False) nemlite_results.to_csv( 'E:/anvil_data/no_fs_checks_results_2_{}_{}.csv'.format( year, str(month).zfill(2)))
def main(): engine.run()
return 2**k * (start_size - 1) + 1 def cost_to_eat_size_s(start_size, s): for k in count(): s_current = size_after_k_meals(start_size, k) if s_current > s: return k, s_current + s class Solver: def __init__(self, reader): self.reader = reader def solve(self): a, n = self.reader.get_list() motes = sorted(self.reader.get_list()) assert len(motes) == n best_so_far = len(motes) current = len(motes) if a <= 1: return best_so_far for last_kept_i, last_kept_size in enumerate(motes): cost, a = cost_to_eat_size_s(a, last_kept_size) current += cost - 1 best_so_far = min(best_so_far, current) return best_so_far run(__name__)
for key, value in help_menu.items(): print("{0:2} {1:30} {2}".format("", key, value)) continue if inp_arr[0] == "road": if len(inp_arr) >= 2: if len(inp_arr) == 2 and inp_arr[1] == "--pre-reqs": show_pre_reqs = True else: print_failure("Invalid usage of " + bold(message("road"))) continue else: show_pre_reqs = False run(classes, course + ".req", show_pre_reqs=show_pre_reqs) continue if inp_arr[0] == "course": if len(inp_arr) != 1: print_failure("Invalid usage of " + bold(message("course"))) continue print_message("** You currently are registered as Course " + course + " **" + "\n") inp = input( "Enter a new course number, \"options\", or press enter to return to menu: " ) while True: if inp == "":
model_dir = os.environ["MODEL_DIR"] twitter_consumer_key = os.environ["TWITTER_CONSUMER_KEY"] twitter_consumer_secret = os.environ["TWITTER_CONSUMER_SECRET"] twitter_access_token_key = os.environ["TWITTER_ACCESS_TOKEN_KEY"] twitter_access_token_secret = os.environ["TWITTER_ACCESS_TOKEN_SECRET"] eliteprospect_key = os.environ["EP_API_KEY"] except KeyError: print "Please set all the env variables" sys.exit(1) # where `model_directory points to the folder the mo interpreter = Interpreter.load(model_dir) api = twitter.Api(consumer_key=twitter_consumer_key, consumer_secret=twitter_consumer_secret, access_token_key=twitter_access_token_key, access_token_secret=twitter_access_token_secret) logging.basicConfig(level=logging.INFO) intents = {} intents["whoIs"] = eliteprospect.WhoIsTask(eliteprospect_key) intents["teamRoster"] = eliteprospect.TeamRosterTask(eliteprospect_key) intents["affirm"] = tasks.YesTask() intents["greeting"] = tasks.HelloTask() client = twitter_client.TwitterClient(api) parser = parser.Parser(interpreter) engine = engine.MainEngine(client, parser, intents) engine.run()
from engine import run basConfig = { "strategy_file": "./buy_and_hold.py", "start_date": "2016-06-01", "end_date": "2016-12-01", "stock_starting_cash": 100000, "benchmark": "000300.XSHG", } run(baseConfig)
from engine import run PATH = "data_analytics" run(PATH)
def get(self, key): return { "currenttarget": engine.gettargettemperature(), 'run': engine.run(), }.get(key, ({"result": "key " + key + " not found"}, 404))
####### #AS5 #Name: Zachary Carlson #Partner: Brittany McGarr ####### # create, initialize, and run the 381 engine import engine engine = engine.Engine() engine.init() engine.run()
import engine import dashboards as db import pandas as pd import data_fetch_methods # The time window variables, these define the times for which the program will run the dispatch algo. start_time = '2017/04/01 12:05:00' # inclusive end_time = '2017/04/01 13:10:00' # exclusive ram_disk_path = 'C:/Users/user/PycharmProjects/anvil/venv/Lib/site-packages/pulp/solverdir/cbc/win/64' regions_to_price = ['SA1', 'NSW1', 'QLD1', 'VIC1', 'TAS1'] raw_data = 'E:/anvil_data/raw' filtered_data = 'E:/anvil_data/filtered_small_2' inputs = input_generator.actual_inputs_replicator(start_time, end_time, raw_data, filtered_data, True) nemlite_results, objective_data_frame = engine.run( inputs, start_time, end_time, cbc_path=ram_disk_path, regions_to_price=regions_to_price, save_to='E:/anvil_data/results_2') nemlite_results.to_csv('E:/anvil_data/test.csv') #nemlite_results = pd.read_csv('C:/Users/user/anvil_data/test_new_FSO_min_energy.csv') actual_prices = data_fetch_methods.method_map['DISPATCHPRICE']\ (start_time, end_time, 'DISPATCHPRICE', raw_data, filter_cols=('INTERVENTION',),filter_values=(['0'],)) actual_prices['SETTLEMENTDATE'] = actual_prices['SETTLEMENTDATE'].apply( lambda dt: dt.strftime('%Y/%m/%d %H:%M:%S')) actual_prices['RRP'] = pd.to_numeric(actual_prices['RRP']) db.construct_pdf(nemlite_results, actual_prices, save_as='test.pdf')
import sys # Avoid writing Python Bytecode sys.dont_write_bytecode = True import config import engine # TODO Command Line Parser design_file = sys.argv[1] # Set Configurations config.config(design_file) # Main Procedure proof = engine.run(design_file)
from statistics import mean from matplotlib import pyplot as plt from engine import run from evaluation import evaluate_sum, evaluate_multiplication from utils import batch evaluate = evaluate_multiplication pop_size = 1000 population_history = run(pop_size=pop_size, gen_nbr=2000, mutation_probability=0.01, evaluate=evaluate) # with open(f"saved_{int(time.time())}", "wb") as f: # pickle.dump(population_history, f) avg_list = [] best_list = [] species_count_history = [] species_repr = {} for population in population_history: species_count = {} # print(population[0]) scores = [evaluate(individual) for individual in population] avg_list.append(round(mean(scores), 2)) best_list.append(round(min(scores), 2)) for individual in population: if individual.species_id in species_count:
op_synth = [] op_synth.append(("YosysSynth", "to_synth", "Timing")) op_synth.append(("GenusSynth", "to_synth", "Timing")) self.ops.append(op_synth) op_floorplan = [("InnovusFloorplan", "to_floorplan")] self.ops.append(op_floorplan) self.params_fp.append(("r", "1.0 0.7 0.0 0.0 0.0 0.0")) op_pdn = ["InnovusPDN", "to_pdn"] self.ops.append(op_pdn) op_place = ["InnovusPlace", "to_place"] self.ops.append(op_place) op_cts = ["InnovusCTS", "to_cts"] self.ops.append(op_cts) op_route = ["InnovusRoute", "to_route"] self.ops.append(op_route) if __name__ == "__main__": design = Design("gcd") my_flow = MyFlow() my_flow.flow() res = engine.run(design, my_flow)
def run(self): self.result = engine.run(self.args, self.id)
import sys # Avoid writing Python Bytecode sys.dont_write_bytecode=True import config import engine # TODO Command Line Parser design_file = sys.argv[1] # Set Configurations config.config(design_file) # Main Procedure proof = engine.run( design_file )