def send_email(sender_email, password, student_email, student_code): path = util.get_path(student_code) filename_certificate = util.get_certificate(student_code) filename_declaration = util.get_declaration(student_code) if(util.is_valid_file(path, filename_certificate) and util.is_valid_file(path, filename_declaration)): message = MIMEMultipart() message["From"] = sender_email message["To"] = student_email message["Subject"] = subject message.attach(MIMEText(body, "plain")) part1 = add_file(path, filename_certificate) message.attach(part1) part2 = add_file(path, filename_declaration) message.attach(part2) text = message.as_string() print('Sending from %s to %s' % (sender_email, student_email)) sleep(5) context = ssl.create_default_context() with smtplib.SMTP_SSL("smtp.gmail.com", 465, context=context) as server: server.login(sender_email, password) server.sendmail(sender_email, [student_email, sender_email], text)##Bcc to sender_email return
def test_files(student_name, student_code): path = util.get_path(student_code) filename_certificate = util.get_certificate(student_code) filename_declaration = util.get_declaration(student_code) print('\n-----\nProcessing %d (%s)...' % (student_code, student_name)) if (util.is_valid_file(path, filename_certificate) and util.is_valid_file(path, filename_declaration)): print(f"{bcolors.OKBLUE}OK files.{bcolors.ENDC}") return
def parse_args(): """""" parser = argparse.ArgumentParser( description='Compute Growing Season Statistics', formatter_class=argparse.ArgumentDefaultsHelpFormatter) parser.add_argument( '-i', '--ini', metavar='PATH', type=lambda x: util.is_valid_file(parser, x), help='Input file') parser.add_argument( '--start', default=None, type=util.valid_date, help='Start date (format YYYY-MM-DD)', metavar='DATE') parser.add_argument( '--end', default=None, type=util.valid_date, help='End date (format YYYY-MM-DD)', metavar='DATE') parser.add_argument( '-c', '--crops', default='', type=str, help='Comma separate list or range of crops to compare') parser.add_argument( '--debug', default = logging.INFO, const = logging.DEBUG, help='Debug level logging', action = "store_const", dest = "loglevel") args = parser.parse_args() # Convert relative paths to absolute paths if args.ini and os.path.isfile(os.path.abspath(args.ini)): args.ini = os.path.abspath(args.ini) return args
def arg_parse(): """""" parser = argparse.ArgumentParser( description='ET-Demands Annual Stat Shapefiles', formatter_class=argparse.ArgumentDefaultsHelpFormatter) parser.add_argument('-i', '--ini', metavar='PATH', type=lambda x: util.is_valid_file(parser, x), help='Input file') parser.add_argument('-o', '--overwrite', default=True, action='store_true', help='Overwrite existing file') parser.add_argument('--clean', default=False, action='store_true', help='Remove temporary datasets') parser.add_argument('-y', '--year', default='', type=str, help='Years, comma separate list and/or range') # parser.add_argument( # '--growing_season', default=False, action='store_true', # help='Run statistics on April through October dataset') parser.add_argument('--debug', default=logging.INFO, const=logging.DEBUG, help='Debug level logging', action="store_const", dest="loglevel") args = parser.parse_args() return args
def arg_parse(): """""" parser = argparse.ArgumentParser( description='ET Demands Effective PPT', formatter_class=argparse.ArgumentDefaultsHelpFormatter) parser.add_argument('-i', '--ini', metavar='PATH', type=lambda x: util.is_valid_file(parser, x), help='Input file') parser.add_argument( '-t', '--time_agg', default='monthly', choices=['annual', 'wateryear'], type=str, help='Data output options. monthly, annual, or wateryear.') parser.add_argument( '-y', '--year', default='', type=str, help='Years to include, single year (YYYY) or range (YYYY-YYYY)') parser.add_argument('--debug', default=logging.INFO, const=logging.DEBUG, help='Debug level logging', action="store_const", dest="loglevel") args = parser.parse_args() return args
def parse_args(): """""" parser = argparse.ArgumentParser( description='Update MeanCuttings.txt', formatter_class=argparse.ArgumentDefaultsHelpFormatter) parser.add_argument('-i', '--ini', metavar='PATH', type=lambda x: util.is_valid_file(parser, x), help='Input file') parser.add_argument('--start_yr', default=None, type=int, help='Start Year (format YYYY)', metavar='YEAR') parser.add_argument('--end_yr', default=None, type=int, help='End Year (YYYY)', metavar='YEAR') parser.add_argument('--debug', default=logging.INFO, const=logging.DEBUG, help='Debug level logging', action="store_const", dest="loglevel") args = parser.parse_args() # Convert relative paths to absolute paths if args.ini and os.path.isfile(os.path.abspath(args.ini)): args.ini = os.path.abspath(args.ini) return args
def arg_parse(): """""" parser = argparse.ArgumentParser( description='ET-Demands Annual Stat Shapefiles', formatter_class=argparse.ArgumentDefaultsHelpFormatter) parser.add_argument( '-i', '--ini', metavar='PATH', type=lambda x: util.is_valid_file(parser, x), help='Input file') parser.add_argument( '-o', '--overwrite', default=True, action='store_true', help='Overwrite existing file') parser.add_argument( '--clean', default=False, action='store_true', help='Remove temporary datasets') parser.add_argument( '-y', '--year', type=int, help='Year of interest (single year)') parser.add_argument( '-gs','--growing_season', default=False, action='store_true', help='Growing Season Flag, Include only April-October Data') parser.add_argument( '--debug', default=logging.INFO, const=logging.DEBUG, help='Debug level logging', action="store_const", dest="loglevel") args = parser.parse_args() return args
def parse_args(): """""" parser = argparse.ArgumentParser( description='Plot Crop Daily Timeseries', formatter_class=argparse.ArgumentDefaultsHelpFormatter) parser.add_argument('-i', '--ini', metavar='PATH', type=lambda x: util.is_valid_file(parser, x), help='Input file') parser.add_argument('--size', default=(1000, 300), type=int, nargs=2, metavar=('WIDTH', 'HEIGHT'), help='Figure size in pixels') parser.add_argument('--no_save', default=True, action='store_false', help='Don\'t save timeseries figures in browser') parser.add_argument('--show', default=False, action='store_true', help='Show timeseries figures to disk') parser.add_argument('--start', default=None, type=util.valid_date, help='Start date (format YYYY-MM-DD)', metavar='DATE') parser.add_argument('--end', default=None, type=util.valid_date, help='End date (format YYYY-MM-DD)', metavar='DATE') parser.add_argument( '-c', '--crops', default='', type=str, help='Comma separate list or range of crops to compare') parser.add_argument('-o', '--overwrite', default=None, action="store_true", help='Force overwrite of existing files') parser.add_argument('--debug', default=logging.INFO, const=logging.DEBUG, help='Debug level logging', action="store_const", dest="loglevel") args = parser.parse_args() # Convert relative paths to absolute paths if args.ini and os.path.isfile(os.path.abspath(args.ini)): args.ini = os.path.abspath(args.ini) return args
def arg_parse(): """""" parser = argparse.ArgumentParser( description='ET-Demands Spatial Crop Parameters', formatter_class=argparse.ArgumentDefaultsHelpFormatter) parser.add_argument('-i', '--ini', metavar='PATH', type=lambda x: util.is_valid_file(parser, x), help='Input file') parser.add_argument('--zone', default='county', metavar='', type=str, choices=('huc8', 'huc10', 'county'), help='Zone type [{}]'.format(', '.join( ['huc8', 'huc10', 'county']))) parser.add_argument('--area', default=10, type=float, help='Crop area threshold [acres]') parser.add_argument('--dairy', default=5, type=int, help='Number of dairy hay cuttings') parser.add_argument('--beef', default=4, type=int, help='Number of beef hay cuttings') parser.add_argument('--empty', default=False, action='store_true', help='Remove empty features') parser.add_argument( '-c', '--crops', default='', type=str, help='Comma separate list or range of crops to compare') parser.add_argument('-o', '--overwrite', default=False, action='store_true', help='Overwrite existing file') parser.add_argument('--clean', default=False, action='store_true', help='Remove temporary datasets') parser.add_argument('--debug', default=logging.INFO, const=logging.DEBUG, help='Debug level logging', action="store_const", dest="loglevel") args = parser.parse_args() return args
def write_header(self, file_name): try: results_file = open(file_name, 'w') results_file.writelines('URLHarvest v1.0' + ' by JadeScout\nDate generated: ' + str(datetime.now()) + '\nTarget: ' + URLHarvestResults.target[0] + '\n') if not util.is_valid_file(URLHarvestResults.target[0]): results_file.writelines('IP Address: ' + URLHarvestResults.target[1] + '\n') results_file.writelines('URLs Processed: ' + URLHarvestResults.target[URLHarvestResults.urls_processed] + '\nExtraction Time: ' + URLHarvestResults.target[URLHarvestResults.extraction_time] + ' seconds\n\n') return results_file except FileNotFoundError: util.print_error('Could not create results file: ' + file_name) return None
def arg_parse(): """""" parser = argparse.ArgumentParser( description='ET-Demands Annual Stat Shapefiles', formatter_class=argparse.ArgumentDefaultsHelpFormatter) parser.add_argument('-i', '--ini', metavar='PATH', type=lambda x: util.is_valid_file(parser, x), help='Input file') parser.add_argument( '-t', '--time_filter', default='annual', choices=['annual', 'growing_season', 'doy', 'wateryear'], type=str, help='Data coverage options. If "doy", -start_doy and' ' -end_doy required.') parser.add_argument('-s', '--start_doy', type=int, help='Starting julian doy (inclusive)') parser.add_argument('-e', '--end_doy', type=int, help='Ending julian doy (inclusive)') parser.add_argument( '-y', '--year', default='', type=str, help='Years to include, single year (YYYY) or range (YYYY-YYYY)') parser.add_argument('--debug', default=logging.INFO, const=logging.DEBUG, help='Debug level logging', action="store_const", dest="loglevel") args = parser.parse_args() return args
def parse_args(): """""" parser = argparse.ArgumentParser( description='Plot Crop Summary Maps', formatter_class=argparse.ArgumentDefaultsHelpFormatter) parser.add_argument('-i', '--ini', metavar='PATH', type=lambda x: util.is_valid_file(parser, x), help='Input file') parser.add_argument('--size', default=(12, 12), type=float, nargs=2, metavar=('WIDTH', 'HEIGHT'), help='Figure size in inches') parser.add_argument('--dpi', default=300, type=int, metavar='PIXELS', help='Figure dots per square inch') parser.add_argument('--no_save', default=True, action='store_false', help='Don\'t save maps to disk') parser.add_argument('--show', default=False, action='store_true', help='Display maps as they are generated') parser.add_argument('--label', default=False, action='store_true', help='Label maps with zone values') parser.add_argument('--start', default=None, type=util.valid_date, help='Start date (format YYYY-MM-DD)', metavar='DATE') parser.add_argument('--end', default=None, type=util.valid_date, help='End date (format YYYY-MM-DD)', metavar='DATE') parser.add_argument( '-c', '--crops', default='', type=str, help='Comma separate list or range of crops to compare') parser.add_argument( '--simp', default=None, type=float, help='Shapely simplify tolerance (units same as ET Cell)') parser.add_argument('--area', default=None, type=float, help='Crop area threshold [acres]') parser.add_argument('--debug', default=logging.INFO, const=logging.DEBUG, help='Debug level logging', action="store_const", dest="loglevel") args = parser.parse_args() # Convert relative paths to absolute paths if args.ini and os.path.isfile(os.path.abspath(args.ini)): args.ini = os.path.abspath(args.ini) return args
time.gmtime(elapsed.total_seconds())) ms = elapsed.microseconds / 1000 log.info("Retrieving semantics took %s %s milliseconds.", time_str, ms) if result == util.EXIT_SUCCESS: for pipe_name, pipe_val in package.get_pipes().items(): print_z3_data(pipe_name, pipe_val) return result if __name__ == '__main__': parser = argparse.ArgumentParser() parser.add_argument("-i", "--p4_input", dest="p4_input", default=None, type=lambda x: util.is_valid_file(parser, x), help="The main input p4 file. This can either be a P4" " program or the Python ToZ3 IR.") parser.add_argument("-o", "--out_dir", dest="out_dir", default=OUT_DIR, help="Where intermediate output is stored.") parser.add_argument("-l", "--log_file", dest="log_file", default="semantics.log", help="Specifies name of the log file.") parser.add_argument( "-ll", "--log_level",
def start_extractor(): parser = argparse.ArgumentParser( description= 'URLHarvest is a simple Web scraper that extracts URLs from webpages.') parser.add_argument('-a', '--all', help='print all URLs found', action='store_true') parser.add_argument( '-k', '--keystring', help= 'print only the URL entries whose URLs or attributes contain KEYSTRING' ) parser.add_argument( '-l', '--limit', help='maximum number of URL entries to print. default = 200', default=200, type=int) parser.add_argument('-p', '--print', help='print results to .txt file', action='store_true') parser.add_argument('target', help='target URL or path to target file') args = parser.parse_args() if not args.keystring: args.all = True # Initialize the results lists results = core.URLHarvestResults() results.target[0] = args.target if util.is_valid_file(args.target): try: html = open(args.target, 'r') except FileNotFoundError: util.print_error('File not found') sys.exit(1) else: if util.is_valid_url(args.target): print(args.target[args.target.find('.', 1) + 1:]) util.print_info('Connecting to ' + args.target + ' (' + results.target[1] + ')') # Write the IP address of the target to the URLHarvestResults.target list results.target[1] = socket.gethostbyname( args.target[args.target.find('.', 1) + 1:]) html = urlopen(args.target) else: sys.exit(1) content = html.read() soup = BeautifulSoup(content, 'lxml') extractor = core.Extractor(soup, results.url_list) start_time = time.time() extractor.extract('a') extractor.extract('area') extractor.extract('link') end_time = time.time() if args.keystring: sorter = core.URLListSorter(results.url_list) sorter.keyword_sort(args.keystring) console_output_handler = core.ConsoleOutputHandler(args, results.url_list) console_output_handler.print() core.report_tool_performance(results.url_list, start_time, end_time) if args.print: file_output_handler = core.FileOutputHandler(args.target, results.url_list) file_output_handler.create_file()