def test_col(self): params = '--col=timestamp --file={0} --limit=1' \ .format(self.test_path) with StdOut() as output: main(*params.split(' ')) self.assertEqual(output[0], '2015-06-13T07:20:24.110235Z')
def test_offset(self): params = "--col=request_processing_time --file={0} --limit=1 --offset=1".format(self.test_path) with StdOut() as output: main(*params.split(" ")) self.assertEqual(len(output), 1) self.assertEqual(output[0], "0.000075")
def test_order_reverse(self): params = '--col=request_processing_time --file={0} --limit=1 --order-reverse=request_processing_time' \ .format(self.test_path) with StdOut() as output: main(*params.split(' ')) self.assertEqual(output[0], '0.000089')
def test_limit_lines(self): params = '--col=request_processing_time --file={0} --limit=1' \ .format(self.test_path) with StdOut() as output: main(*params.split(' ')) self.assertEqual(len(output), 1)
def main(): ''' Logging housekeeping ''' logManager.main(logsPath) ''' Load config ''' loadConfig() ''' Set LoggingLevel set in settings.conf ''' logger.setLevel(LOGGING_LEVEL) logger.info('Logger level set to %s', LOGGING_LEVEL) ''' Grab credentials ''' creds = getCredentials() service = build('calendar', 'v3', credentials=creds) ''' Grab events from calendar ''' getEvents(service, 100) ''' Parse website ''' parser.main() ''' Process results from parsing ''' for (eventTitle, eventInfo) in savedLocalEvents.items(): eventDateTime, eventURL = eventInfo if eventURL not in savedRemoteEvents: ''' We might already know the event, see if we should update ''' ''' See if we match any of our existing event names ''' for (remoteURL, remoteInfo) in savedRemoteEvents.items(): remoteTitle, remoteStart, remoteId = remoteInfo ''' we're not support checking matching time yet ''' if (remoteTitle == eventTitle ): #or (remoteStart == eventDateTime): print(bcolors.OKGREEN + 'Update the following event:') print(remoteTitle + ', ' + remoteStart + bcolors.ENDC) logger.info('Update the following event:') logger.info(remoteTitle + ', ' + remoteStart) ''' Lets delete the old event before creating new ''' print('Deleting old event...') logger.info('Deleting old event...') service.events().delete(calendarId=CALENDAR_ID, eventId=remoteId).execute() break createEvent(service, eventDateTime, eventTitle, eventURL) else: ''' We have the URL of the event ''' ''' but we may need to update ''' if 'T' in eventDateTime: ''' Check if there is a time added to the Events Page ''' remoteTitle, remoteStart, remoteId = savedRemoteEvents[ eventURL] if 'T' not in remoteStart: print(bcolors.OKGREEN + 'Update the time of following event: ') print(remoteTitle + ', ' + remoteStart + bcolors.ENDC) logger.info('Update the time of the following event: ') logger.info(remoteTitle + ', ' + remoteStart) ''' Lets delete the old event before creating new ''' print('Deleting old event...') logger.info('Deleting old event...') service.events().delete(calendarId=CALENDAR_ID, eventId=remoteId).execute() createEvent(service, eventDateTime, eventTitle, eventURL) getEvents(service, 100) sendEmail()
def test_integrate_relation_error(tmp_path, monkeypatch): filename = 'a' text = 'a :- a a' (tmp_path / filename).write_text(text) monkeypatch.chdir(tmp_path) parser.main(['--relation', f'{filename}']) res = open(f'{filename}.out', 'r').read() assert res.split(' ', 1)[0] == 'ERROR'
def test_integrate_module(tmp_path, monkeypatch): filename = 'a' text = 'module a.' (tmp_path / filename).write_text(text) monkeypatch.chdir(tmp_path) parser.main(['--module', f'{filename}']) res = open(f'{filename}.out', 'r').read() assert res == 'Module (ID (a))'
def test_integrate_type(tmp_path, monkeypatch): filename = 'a' text = 'type a a->a.' (tmp_path / filename).write_text(text) monkeypatch.chdir(tmp_path) parser.main(['--type', f'{filename}']) res = open(f'{filename}.out', 'r').read() assert res == 'Typedef (ID (a)) (Arrow (Type (Atom (ID (a)))) (Type (Atom (ID (a)))))'
def test_integrate_atom(tmp_path, monkeypatch): filename = 'a' text = 'a a' (tmp_path / filename).write_text(text) monkeypatch.chdir(tmp_path) parser.main(['--atom', f'{filename}']) res = open(f'{filename}.out', 'r').read() assert res == 'Atom (ID (a)) (Atom (ID (a)))'
def test_integrate_prog_2(tmp_path, monkeypatch): filename = 'a' text = 'a a.' (tmp_path / filename).write_text(text) monkeypatch.chdir(tmp_path) parser.main([f'{filename}']) res = open(f'{filename}.out', 'r').read() assert res == 'Program (\nRelation (Atom (ID (a)) (Atom (ID (a))))\n)'
def test_order_reverse(self): params = "--col=request_processing_time --file={0} --limit=1 --order-reverse=request_processing_time".format( self.test_path ) with StdOut() as output: main(*params.split(" ")) self.assertEqual(output[0], "0.000089")
def main(): """This function runs the user interface, prompting the user and calling the necessary functions to resolve their queries""" print "Hello, welcome to the recipe transformer!" print "To get started, please enter the URL of the recipe you would like to transform. At any point, you may enter \"back\" to return to the previous menu." print "You may also enter \"done\" at any point to terminate the program." menu = 0 prompt = ["Enter AllRecipes.com URL: ", "Enter transformation (enter \"list\" for a list of posible transformations): ", "Would you like to apply another transformation to this new recipe? "] URL = "" again = False transformations = ['pescatarian', 'vegetarian', 'east asian', 'italian', 'easy', 'low sodium', 'low carb'] print "Hello world" while(True): command = raw_input(prompt[menu]).lower() if command == "done": return if command == "back": menu -= 1 if menu < 0: menu = 0 continue if menu == 0: if command[:29] == "http://allrecipes.com/recipe/": URL = command menu += 1 continue print "Invalid URL" if menu == 1: if command == "list": print ", ".join(transformations) continue if command in transformations: if again: transformed_recipe.url = '' transformed_recipe.transformation = command recipes = parser.main(transformed_recipe) else: recipe = recipe_classes.Recipe(URL, "recipe", command) recipes = parser.main(recipe) original_recipe = recipes[0] transformed_recipe = recipes[1] menu += 1 continue print "Invalid transformation" if menu == 2: if command == "yes": again = True menu = 1 continue if command == "no": again = False menu = 0 continue print "Invalid response"
def test_integrate_arg_error_2(tmp_path, monkeypatch, capsys): filename = 'a' text = 'kek' (tmp_path / filename).write_text(text) monkeypatch.chdir(tmp_path) parser.main(['kek', f'{filename}']) out, err = capsys.readouterr() assert err == '' assert out == 'Invalid args\n'
def index(): if request.method == 'POST': pdf_file = request.files['file'] if pdf_file and allowed_files(pdf_file.filename): filename = secure_filename(pdf_file.filename) pdf_file.save(os.path.join(app.config['UPLOAD_FOLDER'],filename)) parser.main(pdf=filename) if "@" in filename and filename.count(".") == 2: name_csv = filename.split(".")[0]+"."+filename.split(".")[1]+".csv" else: name_csv = filename.split(".")[0]+".csv" return redirect(url_for('download',filename=name_csv)) return render_template("index.html")
def test_parser(self): ''' Runs the parser on each file in the sentences directory. This is not intended as a true test, but rather as a way to review parser performance with each text file quickly. ''' filepath = os.getcwd() + '/sentences/*.txt' # Get all filenames and reverse list to run sequentially filenames = glob.glob(filepath)[::-1] # Loop through filenames and add them to argv sequentially for filename in filenames: test_argv = ["parser.py", filename] with patch.object(sys, 'argv', test_argv): # Run main and check output main()
def test_forbidden(self): request = self.read_file("tests/forbidden_1.txt") output = parser.main(request) self.assertEqual(output, ["HTCPCP-TEA/1.0 403 Forbidden", "\r\n", "\r\n"]) request = self.read_file("tests/forbidden_2.txt") output = parser.main(request) self.assertEqual(output, ["HTCPCP-TEA/1.0 403 Forbidden", "\r\n", "\r\n"]) request = self.read_file("tests/forbidden_3.txt") output = parser.main(request) self.assertEqual(output, ["HTCPCP-TEA/1.0 403 Forbidden", "\r\n", "\r\n"])
def handle_connection(socket): """ Handles every connection seen by the server. Converts bytes into parser-expected format, then forwards to parser, sending response back over the socket. socket - the socket corresponding to the client. """ buffer = b"" data = socket.recv(2048) buffer = data.decode("UTF-8") splitStr = "" request = [] # convert bytes read in to request format, splitting on the new line # each array has each line of the request, including the /r/n, which # is normally stripped by .split(). while True: if buffer == "": request.append(bytes(splitStr, "UTF-8")) break splitStr += buffer[0] if buffer[0] == "\n": # had to open files as rb in parser, sending splitStr as # bytes avoids having to re-write that stuff request.append(bytes(splitStr, "UTF-8")) splitStr = "" buffer = buffer[1:] response = parser.main(request) for line in response: socket.send(bytes(line, "UTF-8")) socket.close() return
def test_coffee(self): request = self.read_file("tests/coffee.txt") output = parser.main(request) self.assertEqual(output, [ "HTCPCP-TEA/1.0 200 OK", "\r\n", "Content-Type: message/coffee-pot-command\r\n", "\r\n" ]) os.remove("./pot-0")
def test_stop(self): request = self.read_file("tests/stop.txt") output = parser.main(request) self.assertEqual(output, [ "HTCPCP-TEA/1.0 200 OK", "\r\n", "Content-Type: message/teapot\r\n", "\r\n" ]) os.remove("./pot-0/peppermint") os.rmdir("./pot-0")
def get_func(message): if message.text == 'Легковые авто': url = 'search/all/' main(url) if message.text == 'Коммерческие авто': main('commercialsearch/all/') if message.text == 'Спецтехника': main('specsearch/all/') if message.text == 'Мотоциклы': main('motosearch/all/') get_start(message)
def test_index(self): request = self.read_file("tests/index.txt") output = parser.main(request) expectedOutput = [ "HTCPCP-TEA/1.0 300 Multiple Options\r\n", "Alternates: ", "{\"peppermint\" {type message/teapot}}", ",\r\n", "{\"black\" {type message/teapot}}", ",\r\n", "{\"green\" {type message/teapot}}", ",\r\n", "{\"earl-grey\" {type message/teapot}}", "\r\n", "\r\n" ] self.assertEqual(output, expectedOutput)
def test_php(self): request = self.read_file("tests/php_get.txt") output = parser.main(request) self.assertEqual(output, [ "HTCPCP-TEA/1.0 200 OK", "\r\n", "Content-Type: text/plain", "\r\n", "<pre>Array\r\n", "(\r\n", ")\r\n", "</pre><pre>Array\r\n", "(\r\n", ")\r\n", "</pre>\r\n", "\r\n" ]) request = self.read_file("tests/php_get_param.txt") output = parser.main(request) self.assertEqual(output, [ "HTCPCP-TEA/1.0 200 OK", "\r\n", "Content-Type: text/plain", "\r\n", "<pre>Array\r\n", "(\r\n", " [testvar] => test\r\n", ")\r\n", "</pre><pre>Array\r\n", "(\r\n", ")\r\n", "</pre>\r\n", "\r\n" ]) request = self.read_file("tests/php_get_params.txt") output = parser.main(request) self.assertEqual(output, [ "HTCPCP-TEA/1.0 200 OK", "\r\n", "Content-Type: text/plain", "\r\n", "<pre>Array\r\n", "(\r\n", " [testvar] => test\r\n", " [testvar2] => test\r\n", ")\r\n", "</pre><pre>Array\r\n", "(\r\n", ")\r\n", "</pre>\r\n", "\r\n" ]) request = self.read_file("tests/php_get_404.txt") output = parser.main(request) self.assertEqual(output, ["HTCPCP-TEA/1.0 404 Not Found", "\r\n", "\r\n"]) request = self.read_file("tests/php_post.txt") output = parser.main(request) self.assertEqual(output, [ "HTCPCP-TEA/1.0 200 OK", "\r\n", "Content-Type: text/plain", "\r\n", "<pre>Array\r\n", "(\r\n", ")\r\n", "</pre><pre>Array\r\n", "(\r\n", ")\r\n", "</pre>\r\n", "\r\n" ]) request = self.read_file("tests/php_post_param.txt") output = parser.main(request) self.assertEqual(output, [ "HTCPCP-TEA/1.0 200 OK", "\r\n", "Content-Type: text/plain", "\r\n", "<pre>Array\r\n", "(\r\n", ")\r\n", "</pre><pre>Array\r\n", "(\r\n", " [testvar] => test\r\n", ")\r\n", "</pre>\r\n", "\r\n" ]) request = self.read_file("tests/php_post_params.txt") output = parser.main(request) self.assertEqual(output, [ "HTCPCP-TEA/1.0 200 OK", "\r\n", "Content-Type: text/plain", "\r\n", "<pre>Array\r\n", "(\r\n", ")\r\n", "</pre><pre>Array\r\n", "(\r\n", " [testvar] => test\r\n", " [testvar2] => test\r\n", ")\r\n", "</pre>\r\n", "\r\n" ])
def main(): number_of_cores = int(sys.argv[1]) number_of_tasks = int(sys.argv[2]) for utilisation in [90, 80, 70, 60, 50, 40, 30, 20, 10]: path = "TaskSets/" + str(number_of_cores) + "Cores" + str( number_of_tasks) + "Tasks" + str(utilisation) + ".csv" resultPath = "MILPresults" + str(number_of_cores) + "Cores" + str( number_of_tasks) + "Tasks" for jobs, releaseTimes, deadlines, executionTimes, processors, M in parser.main( number_of_cores, path): status = milpForm.runExperiment(jobs, releaseTimes, deadlines, executionTimes, processors, M) lg.log_results(resultPath, [utilisation, status])
def start(input_file, output_file, version): output_file with open(output_file, "w") as output: output.write("zaragoza\n") output.write("t2ri0va94ush0tdu9gpuusq64r\n") counter = 1 for problem in parser.main(input_file): try: start_time = time.time() print("Problem {}".format(counter), end="\r") sys.stdout.flush() with open(output_file, "a") as output: resulting_string = algorithm.solve_problem( counter, version, problem) if not resulting_string.replace(" ", ""): print("Problem {} not completed due to no result found". format(counter)) counter += 1 continue output.write("{}: {}\n".format(counter, resulting_string)) problem_coverage_results = coverage_calculator.go( input_file, output_file, counter) if problem_coverage_results: result = "32mPASSED" if problem_coverage_results[ 0] >= 30 else "31mFAILED" print( "\033[{}\033[0m Problem {}: Coverage: {}%, Score: {} - {} seconds" .format(result, counter, problem_coverage_results[0], problem_coverage_results[1], round(time.time() - start_time, 2))) else: print("\033[91mERROR\033[0m Problem {} - {} seconds".format( counter, round(time.time() - start_time, 2))) counter += 1 except KeyboardInterrupt: print("Problem {} not completed due to Keyboard Interrupt".format( counter)) sys.stdout.flush() counter += 1
def render(self): input_string = self.input_text.get(1.0,tk.END) from parser import main main(input_string) image = tk.PhotoImage(file = "output.png") self.output_image_label.config(image = image)
import parser if __name__ == '__main__': parser.main( '/opt/proj/foodnemo/raw/order.json', '/opt/proj/foodnemo/raw/order.loc', '/opt/proj/foodnemo/raw/order.pid', )
def test_col(self): params = "--col=timestamp --file={0} --limit=1".format(self.test_path) with StdOut() as output: main(*params.split(" ")) self.assertEqual(output[0], "2015-06-13T07:20:24.110235Z")
def update_parser(): print("Parsing Last Week's Data...") main() return 'Update Success!'
def main(): official_pdf_analyzer.main() parser.main() merger.main()
def api(): url = request.form.get('url') hash_ = request.form.get('hash') captcha_code = request.form.get('captcha_code') if captcha_code is not None: print 'input code is :', captcha_code print 'session code is :', session.get('CAPTCHA') if captcha_code.upper() == session.get('CAPTCHA'): return jsonify({'captcha': 'pass'}) if hash_ != session.get('hash'): return jsonify({'captcha': 'ok'}) if hash_ is None: return jsonify({'captcha': 'ok'}) if request.headers['User-Agent'] is None or 'python' in request.headers[ 'User-Agent'].lower(): return jsonify({'captcha': 'ok'}) else: retdata = {} # tumblr单个视频解析 if 'tumblr.com/post' in url: try: video = '' cont = requests.get(url).content pictures = IMAGEREGEX.findall(cont) vid = VIDEOREGEX.findall(cont) poster = POSTERREGEX.findall(cont) isvideo = 0 if vid: video = vhead % vid[0] poster = poster[0] isvideo = 1 # flash('解析成功') retdata['status'] = 'ok' retdata['total'] = 1 retdata['pages'] = 1 retdata['video'] = [{ 'url': video, 'desc': '', 'thumb': poster }] return jsonify(retdata) else: # flash('解析失败') retdata['status'] = 'fail' retdata['message'] = '解析失败,请联系站长解决' return jsonify(retdata) except Exception, e: print e # flash('解析失败') retdata['status'] = 'fail' retdata['message'] = '解析失败,请联系站长解决' return jsonify(retdata) # tumblr批量解析 if 'tumblr.com' in url: id = re.findall('://(.*?)\.', url)[0] if check(id): is_exists = ID.query.filter_by(id=id).first() if is_exists is None: now = datetime.now() inserttime = now.strftime('%Y%m%d %H:%M:%S') a = ID(id=id, updateTime=inserttime, parseTimes=1) db.session.add(a) db.session.commit() retdata['status'] = 'fail' retdata['message'] = '正在解析,请稍等15s再试!' subprocess.Popen('python {clawer} {id}'.format( clawer=clawer, id=id), shell=True) return jsonify(retdata) else: now = datetime.now() is_exists.updateTime = now.strftime('%Y%m%d %H:%M:%S') is_exists.parseTimes += 1 db.session.add(is_exists) db.session.commit() subprocess.Popen('python {clawer} {id}'.format( clawer=clawer, id=id), shell=True) retdata['status'] = 'ok' retdata['total'] = 50 retdata['pages'] = 2 retdata[ 'html'] = '<a href="/download?id={}&type=video" class="btn btn-primary" role="button" title="导出视频">导出视频 <span class="glyphicon glyphicon-film"></span></a>'.format( id) retdata[ 'html'] += ' | <a href="/download?id={}&type=picture" class="btn btn-primary" role="button" title="导出图片">导出图片 <span class="glyphicon glyphicon-picture"></span></a>'.format( id) videos = Context.query.filter_by( id=id, isvideo=1).order_by( Context.posttime.desc()).limit(50).all() for video in videos: retdata.setdefault('video', []).append({ 'url': video.urls, 'desc': video.description, 'thumb': video.poster }) return jsonify(retdata) else: # flash('解析失败') retdata['status'] = 'fail' retdata['message'] = '解析失败,请联系站长解决' return jsonify(retdata) # 2mm else: try: video, title, picture = parser.main(url) retdata['status'] = 'ok' retdata['total'] = 1 retdata['pages'] = 1 retdata['video'] = [{ 'url': video, 'desc': title, 'thumb': picture }] return jsonify(retdata) except Exception, e: print e retdata['status'] = 'fail' retdata['message'] = '解析网站不存在' return jsonify(retdata)
retdata['status'] = 'ok' retdata['total'] = 50 retdata['pages'] = 2 retdata['html'] = '<a href="/download?id={}&type=video" class="btn btn-primary" role="button" title="导出视频">导出视频 <span class="glyphicon glyphicon-film"></span></a>'.format( id) retdata['html'] += ' | <a href="/download?id={}&type=picture" class="btn btn-primary" role="button" title="导出图片">导出图片 <span class="glyphicon glyphicon-picture"></span></a>'.format( id) videos = Context.query.filter_by( uid=id, isvideo=1).order_by(Context.posttime.desc()).limit(50).all() for video in videos: retdata.setdefault('video', []).append( {'url': video.urls, 'desc': video.description, 'thumb': video.poster}) return jsonify(retdata) else: try: video, title, picture = parser.main(url) retdata['status'] = 'ok' retdata['total'] = 1 retdata['pages'] = 1 retdata['video'] = [ {'url': video, 'desc': title, 'thumb': picture}] return jsonify(retdata) except Exception, e: print e retdata['status'] = 'fail' retdata['message'] = '解析网站不存在' return jsonify(retdata) @app.route('/download') def download():
def main(): """This function runs the user interface, prompting the user and calling the necessary functions to resolve their queries""" print "Hello, welcome to the recipe transformer!" print "To get started, please enter the URL of the recipe you would like to transform. At any point, you may enter \"back\" to return to the previous menu." print "You may also enter \"done\" at any point to terminate the program." menu = 0 prompt = [ "Enter AllRecipes.com URL: ", "Enter transformation (enter \"list\" for a list of posible transformations): ", "Would you like to apply another transformation to this new recipe? " ] URL = "" again = False transformations = [ 'pescatarian', 'vegetarian', 'east asian', 'italian', 'easy', 'low sodium', 'low carb' ] print "Hello world" while (True): command = raw_input(prompt[menu]).lower() if command == "done": return if command == "back": menu -= 1 if menu < 0: menu = 0 continue if menu == 0: if command[:29] == "http://allrecipes.com/recipe/": URL = command menu += 1 continue print "Invalid URL" if menu == 1: if command == "list": print ", ".join(transformations) continue if command in transformations: if again: transformed_recipe.url = '' transformed_recipe.transformation = command recipes = parser.main(transformed_recipe) else: recipe = recipe_classes.Recipe(URL, "recipe", command) recipes = parser.main(recipe) original_recipe = recipes[0] transformed_recipe = recipes[1] menu += 1 continue print "Invalid transformation" if menu == 2: if command == "yes": again = True menu = 1 continue if command == "no": again = False menu = 0 continue print "Invalid response"
def main(argv): parser.main(argv) pass
def main(): et.main() p.main()
# Christoph Burschka, 2012 from converters import * from parser import main sig_graph = {'E':('relation',2)} sig_ord = {'≤':('relation',2)} print(main('((.fa. x E(x,x) && .fa. x .fa. y (E(x,y) <-> E(y,x)) ) && .fa. x .fa. y .fa. z ((E(x,y) && E(y,z))->E(x,z)))', sig_graph)) formula=main('(((.fa. x ≤(x,x) && .fa. x .fa. y ((≤(x,y) && ≤(y,x)) -> x=y) ) && .fa. x .fa. y .fa. z ((≤(x,y) && ≤(y,z))->≤(x,z))) && .fa. x .fa. y (≤(x,y) || ≤(y,x)))', sig_ord) print(formula) print(prenex(formula))
# Christoph Burschka, 2012 from converters import * from parser import main sig_graph = {'E': ('relation', 2)} sig_ord = {'≤': ('relation', 2)} print( main( '((.fa. x E(x,x) && .fa. x .fa. y (E(x,y) <-> E(y,x)) ) && .fa. x .fa. y .fa. z ((E(x,y) && E(y,z))->E(x,z)))', sig_graph)) formula = main( '(((.fa. x ≤(x,x) && .fa. x .fa. y ((≤(x,y) && ≤(y,x)) -> x=y) ) && .fa. x .fa. y .fa. z ((≤(x,y) && ≤(y,z))->≤(x,z))) && .fa. x .fa. y (≤(x,y) || ≤(y,x)))', sig_ord) print(formula) print(prenex(formula))
return self.objects[address] def interpret(self, trace): for call in trace.calls: self.interpret_call(call) def handle_call(self, call): sys.stderr.write("%s\n" % call) args = [self.interpret_arg(arg) for name, arg in call.args] if call.klass: obj = args[0] args = args[1:] else: obj = self.globl method = getattr(obj, call.method) ret = method(*args) if call.ret and isinstance(call.ret, model.Pointer): self.register_object(call.ret.address, ret) def interpret_arg(self, node): translator = Translator(self) return translator.visit(node) if __name__ == '__main__': parser.main(Interpreter)
import sys import parser in_filename = sys.argv[1] out_filename = sys.argv[1].rpartition('.')[0] + '.hack' in_file = open(in_filename) out_file = open(out_filename, 'w') compiled = parser.main(in_file) out_file.writelines(compiled)
def main(): # SETTINGS ############################################## userdatadir="./server/data" userconfigdir="./server/config" # COMMAND LINE OPTIONS ################################## oparser=OptionParser() #parser.add_option("-v", "--vardir", dest="vardir", default=None, help="Path to the var directory (server)") oparser.add_option("-d", "--datadir", dest="datadir", default=None, help="Path to the data directory (server)") oparser.add_option("-c", "--configdir", dest="configdir", default=None, help="Path to the config directory (server)") oparser.add_option("-e", "--executable", dest="server", default=None, help="Path of the server executable", metavar="EXECUTABLE") oparser.add_option("-p", "--prefix", dest="prefix", default=None, help="The prefix the server was installed to.") oparser.add_option("-n", "--name", dest="servername", default=None, help="The name of the server", metavar="SERVERNAME") oparser.add_option("--debug",dest="debug", default=False, action="store_true", help="Run in debug mode") oparser.add_option("--disable", dest="disabledCommands", action="append", help="Disable COMMAND.", metavar="COMMAND", default=[]) oparser.add_option("--default", dest="save", action="store_true", default=False, help="Set this configuration as default") oparser.add_option("-D","--disableExt", dest="disabledExtensions", default=[], action="append", help="Dsiable the extension with the name EXTENSION.", metavar="EXTENSION") oparser.add_option("--list-extensions", dest="list_extensions", default=False, action="store_true", help="List all available extensions.") options=oparser.parse_args()[0] options.vardir="server/var" optionsdict=dict() save_options=["vardir","configdir","server","datadir", "servername", "prefix"] # START ################################################# # Get available extensions if options.list_extensions: print("Extensions:") print("\n".join(extensions.getExtensions())) os.chdir(os.path.dirname(sys.argv[0]) ) if not os.path.exists("run"): os.mkdir("run") os.chdir("run") asked=False # Read prefix def read_prefix(): global asked default="" test_prefixes=["/usr","/usr/local"] for test_prefix in test_prefixes: if os.path.exists(os.path.join(test_prefix,"bin/armagetronad-dedicated")): default="["+test_prefix+"]" break while options.prefix==None or not os.path.exists(options.prefix): options.prefix=input("Prefix the server was installed to "+default+": ") if options.prefix.strip()=="": options.prefix=default[1:-1] if not os.path.exists(options.prefix): print("Error: Prefix doesn't exist.") # Read config files ++++++++++++++++++++++++++++++++ if os.path.exists("config.yaml"): optionsdict2=yaml.load(open("config.yaml","r") ) for key,value in optionsdict2.items(): try: if getattr(options, key)==None: setattr(options, key, value) except: pass else: read_prefix() asked=True if ( not options.prefix or not os.path.exists(options.prefix)) and not all((not os.path.exists(i) for i in (options.datadir, options.configdir, options.server))): read_prefix() asked=True options.server=None options.datadir=None options.configdir=None if not options.server: options.server=os.path.join(options.prefix, "bin/armagetronad-dedicated") if not options.datadir: options.datadir=os.path.join(options.prefix, "share/armagetronad-dedicated") if not options.configdir: options.configdir=os.path.join(options.prefix, "etc/armagetronad-dedicated") if not os.path.exists(options.server): options.server=os.path.join(options.prefix,"games/armagetronad-dedicated") if not os.path.exists(options.configdir): options.configdir=os.path.join(options.prefix, "etc/games/armagetronad-dedicated") if not os.path.exists(options.configdir): options.configir="/etc/armagetronad-dedicated" if not os.path.exists(options.datadir): options.datadir=os.path.join(options.prefix, "share/games/armagetronad-dedicated") Global.datadir=options.datadir Global.configdir=options.configdir Global.debug=options.debug if options.servername==None: options.servername=input("Please enter a name for your server: ") asked=True # Write config files +++++++++++++++++++++++++++++++ for save_option in save_options: optionsdict[save_option]=getattr(options, save_option) if options.save or not os.path.exists("config.yaml") or asked: yaml.dump(optionsdict, open("config.yaml","w"), default_flow_style=False ) if not os.path.exists(userconfigdir): os.makedirs(userconfigdir) if not os.path.exists(userdatadir): os.makedirs(userdatadir) if not os.path.exists(options.vardir): os.makedirs(options.vardir) open(os.path.join(options.vardir,"ladderlog.txt"),"w" ).close() print("[START] Starting server. Serverlog can be found in run/server.log") args=["--vardir",options.vardir, "--datadir",options.datadir, "--configdir",options.configdir, "--userdatadir",userdatadir, "--userconfigdir",userconfigdir] print("[START] Executable: "+options.server) t=Thread(None, target=runServerForever,args=([options.server]+args,options.debug) ) t.daemon=True t.start() while(p==None): time.sleep(1) # Give the the server some time to start up atexit.register(exit) sys.stdout=OutputToProcess() if os.path.exists("debug.log"): os.remove("debug.log") sys.stdin=WatchFile(open(os.path.join(options.vardir,"ladderlog.txt"), encoding="latin-1" ) ) sys.stdin.skipUnreadLines() sys.stderr=FlushFile(sys.__stdout__) t2=Thread(None, read_stdin) t2.daemon=True t2.start() sys.stderr.write("Reading commands from stdin.\n") Global.server_name=options.servername extensions.loadExtensions() sys.stderr.write("[START] Starting script.\n") sys.stderr.write("[START] Press ctrl+c or type /quit to exit.\n") sys.stderr.write("\n") sys.stderr.flush() reloaded=False while True: try: parser.main(debug=options.debug, disabledCommands=options.disabledCommands, reloaded=reloaded) except KeyboardInterrupt: break except SystemExit: break except Global.ReloadException: import tools tools.reload_script_modules() reloaded=True continue except Exception: sys.stderr.write("#####################################################################\n") sys.stderr.write("################## SCRIPT CRASHED ###################################\n") sys.stderr.write("#####################################################################\n") traceback.print_exc(file=sys.stderr) sys.stderr.write("#####################################################################\n") sys.stderr.flush() parser.exit(False, quiet=True) try: sys.stderr.write("Restarting in 3 seconds ... \n") sys.stderr.write("\n") time.sleep(3) reloaded=True except KeyboardInterrupt: break except: continue continue break exit()
from datetime import date from dateutil.rrule import rrule, DAILY import scraper import parser a = date(2013, 3, 4) b = date(2013, 3, 5) if __name__ == '__main__': for dt in rrule(DAILY, dtstart=a, until=b): current_date = dt.strftime("%Y/%m/%d") print 'Scraping ' + current_date scraper.main(dt.strftime("%d/%m/%Y")) print 'Parsing ' + current_date parser.main(dt.strftime("%Y/%m/%d"))