def put(self): """ Add or edit a job template. """ data = request.json template_name = data['template'] template = FcTemplate.query.filter_by(name=template_name).one_or_none() if not template: template = FcTemplate(name=template_name, template=data) action = 'added' else: template.template = data action = 'changed' try: db.session.add(template) db.session.commit() except exc.IntegrityError as e: db.session().rollback() abort(500, 'Couldn\'t add template.') return { 'status': True, 'message': 'Template ' + template_name + ' ' + action }
def post(self, id): """ Runs a batch by starting the first job that is ready. """ batch = FcBatch.query.filter_by(id=id).one_or_none() if batch and not current_user.role.OPERATE_ALL_JOBS and batch.creator_id != current_user.id: abort(401, 'Unauthorized to operate this batch.') # check hosts hosts = [ a[0] for a in db.session.query(Host.id).all() ] for job in FcJob.query.filter_by(batch_id=id).all(): if job.host_count == 0: for hostId in hosts: host = FcHostActivity(boinc_host_id=hostId, job_id=job.id) db.session.add(host) starter = FcJob.query.filter_by(batch_id=id).filter_by(status=0).order_by(FcJob.queue_position).first() if not starter: return ('Nothing to do', 200) starter.status = status_to_code['running'] try: db.session.commit() except exc.IntegrityError as e: db.session().rollback() abort(500, 'Couldn\'t run batch, try manually starting the first job.') return ('Sarted running', 200)
def getFilesFromFolder(folder, DBmodel, processFunction): if not os.path.exists(folder): abort(500, 'Directory ' + folder + ' does not exist.') result = [] for file in os.listdir(folder): filePath = os.path.join(folder, file) if os.path.isfile(filePath): modificationTime = datetime.datetime.fromtimestamp( os.path.getmtime(filePath)).strftime('%Y-%m-%d %H:%M:%S') DBrecord = DBmodel.query.filter_by( modification_time=modificationTime, path=file).first() if not DBrecord: DBrecord = DBmodel(modification_time=modificationTime, path=file, name=file) DBrecord = processFunction(DBrecord) try: db.session.add(DBrecord) db.session.commit() except exc.IntegrityError as e: db.session().rollback() abort(500, 'Can\'t create record in DB with ' + file + '.') if not DBrecord.deleted: result.append(DBrecord) return result
def post(self): """ Nahrava slovník na server """ # check if the post request has the file part if 'file' not in request.files: abort(500, 'No file part') return redirect(request.url) file = request.files['file'] # if user does not select file, browser also # submit a empty part without filename if file.filename == '': abort(500, 'No selected file') uploadedFile = fileUpload(file, DICTIONARY_DIR, ALLOWED_EXTENSIONS) if uploadedFile: hc_keyspace = int(shellExec(HASHCAT_PATH + ' --keyspace -a 0 ' + os.path.join(DICTIONARY_DIR, uploadedFile['path']), cwd=HASHCAT_DIR)) dictionary = FcDictionary(name=uploadedFile['filename'], path=uploadedFile['path'], keyspace=hc_keyspace) try: db.session.add(dictionary) db.session.commit() except exc.IntegrityError as e: db.session().rollback() abort(500, 'Dictionary with name ' + uploadedFile['filename'] + ' already exists.') return { 'message': 'Dictionary ' + uploadedFile['filename'] + ' successfuly uploaded.', 'status': True } else: abort(500, 'Wrong file format')
def post(self): """ Spravi slovník so suboru """ args = dictionaryFromFile_parser.parse_args(request) files = args.get('files') for file in files: if not allowed_file(file['name'], ALLOWED_EXTENSIONS): abort(500, 'Wrong file format ' + file['name']) newName = Path(file['name']).stem + '_' + str(int(time.time())) + Path(file['name']).suffix newPath = os.path.join(DICTIONARY_DIR, newName ) os.symlink(file['path'], newPath) hc_keyspace = int(shellExec(HASHCAT_PATH + ' --keyspace -a 0 ' + newPath, cwd=HASHCAT_DIR)) dictionary = FcDictionary(name=file['name'], path=newName, keyspace=hc_keyspace) try: db.session.add(dictionary) db.session.commit() except exc.IntegrityError as e: db.session().rollback() abort(500, 'Dictionary with name ' + file['filename'] + ' already exists.') return { 'message': 'Dictionaries successfuly uploaded.', 'status': True }
def unpack(package): """ Unpacks system data from exported package and saves to DB """ contents = msgpack.unpack(package) # load deps and check for missing deps, missing = dependency_check(contents['deps']) if len(missing) > 0: raise ImportDependencyMissing(missing) # start creating jobs for job in contents['jobs']: # transform directly stored object back job['hashes'] = list( map(lambda h: recreate_hash(h, job['hash_type']), job['hashes'])) if job.get('masks'): job['masks'] = list(map(recreate_mask, job['masks'])) newjob = FcJob() for field in JOB_EXPORTABLE_COLUMNS: setattr(newjob, field, job.get(field)) # manual labor now # # !!! If you added new DEPS to export, make sure to unpack them like these !!! # dep_rule = job.get('rulesFile') if dep_rule: newjob.rulesFile = deps[dep_rule[0]] dep_markov = job.get('markov') if dep_markov: newjob.markov = deps[dep_markov[0]] dep_left_dictionaries = job.get('left_dictionaries') if dep_left_dictionaries: for index in dep_left_dictionaries: jd = FcJobDictionary(is_left=1, dictionary=deps[index]) newjob.left_dictionaries.append(jd) dep_right_dictionaries = job.get('right_dictionaries') if dep_right_dictionaries: for index in dep_right_dictionaries: jd = FcJobDictionary(is_left=0, dictionary=deps[index]) newjob.right_dictionaries.append(jd) dep_pcfg = job.get('pcfg') if dep_pcfg: newjob.pcfg = deps[dep_pcfg[0]] # adding values for useless non-null fields newjob.indexes_verified = 0 newjob.current_index_2 = 0 # add owner newjob.permission_records.append( FcUserPermission(user_id=current_user.id, view=1, modify=1, operate=1, owner=1)) # save db.session.add(newjob) # end for loop over jobs try: db.session.commit() except exc.IntegrityError as e: db.session().rollback() raise e
def put(self, id): """ Assigns jobs to a bin. Add jobs via include array, remove via exclude. """ data = request.json include = data.get('include', []) exclude = data.get('exclude', []) bin = FcBin.query.get(id) bin.jobs = bin.jobs.all() # exclude if any bin.jobs = [job for job in bin.jobs if job.id not in exclude] # include if any jobs = FcJob.query.filter(FcJob.id.in_(include)).all() # remove first to prevent duplicates bin.jobs = [job for job in bin.jobs if job.id not in include] bin.jobs.extend(jobs) if bin is None: abort(404, 'No such bin') else: try: db.session.commit() except exc.IntegrityError as e: db.session().rollback() abort(500, 'Couldn\'t assign to bin.') return ('included {}, excluded {}'.format(include, exclude), 200)
def post(self): """ Creates pcfg from the dictionary """ args = makePcfgFromDictionary_parser.parse_args(request) dict = FcDictionary.query.filter(FcDictionary.id == args['dictionary_id']).first() if not dict: abort(500, 'Can not find selected dictionary.') makePcfgFolder(dict.name) moveGrammarToPcfgDir(dict.name) pcfg_keyspace = calculateKeyspace(dict.name) pcfg = FcPcfg( name=extractNameFromZipfile(dict.name), path=extractNameFromZipfile(dict.name), keyspace=int(pcfg_keyspace)) try: db.session.add(pcfg) db.session.commit() except exc.IntegrityError as e: db.session().rollback() abort(500, 'PCFG with name ' + extractNameFromZipfile(dict.name) + ' already exists.') createPcfgGrammarBin(dict.name) return { 'message': 'PCFG ' + dict.name + ' successfully uploaded.', 'status': True }
def post(self): """ HcStat from dictionary """ args = makeMarkovFromDictionary_parser.parse_args(request) dict = FcDictionary.query.filter( FcDictionary.id == args['dictionary_id']).first() if not dict: abort(500, 'Can not find selected dictionary.') filename = secure_filename(args['name']) path = os.path.join(HCSTATS_DIR, filename) + '.hcstat2' # make hcstat2 file shellExec(HASHCAT_UTILS_PATH + '/hcstat2gen.' + EXE_OR_BIN + ' ' + path + '_tmp < ' + os.path.join(DICTIONARY_DIR, dict.name)) # comprime hcstat2 file shellExec('xz --compress --format=raw --stdout -9e ' + path + '_tmp > ' + path) # delete non-comprimed file os.remove(path + '_tmp') hcstats = FcHcstat(name=filename + '.hcstat2', path=path) try: db.session.add(hcstats) db.session.commit() except exc.IntegrityError as e: db.session().rollback() abort(500, 'HcStats with name ' + filename + ' already exists.') return { 'status': True, 'message': 'HcStat file with name ' + filename + ' created.' }
def put(self, id): """ Assigns specified permissions on specified job to the user """ args = job_permissions_arguments.parse_args(request) user_id = args.get('user_id') record = FcUserPermission.query.filter_by(job_id=id).filter_by( user_id=user_id).one_or_none() if not record: record = FcUserPermission(job_id=id, user_id=user_id, view=0, modify=0, operate=0, owner=0) db.session.add(record) for perm in 'view', 'modify', 'operate': old = getattr(record, perm) new = args.get(perm) setattr(record, perm, old if not new else new) try: db.session.commit() except: db.session().rollback() abort(500, 'Permission management failed.') return 'Permissions updated.', 200
def delete(self, id): role = FcRole.query.filter_by(id=id).one() try: db.session.delete(role) db.session.commit() except exc.IntegrityError as e: db.session().rollback() abort(400, 'Role is in use.') return {'status': True, 'message': 'Role deleted.'}
def delete(self, id): """ Deletes a template """ try: FcTemplate.query.filter_by(id=id).delete() db.session.commit() except exc.IntegrityError as e: db.session().rollback() abort(500, 'Couldn\'t delete template.') return ('Deleted', 200)
def post(self): """ Add a named job bin. """ data = request.json bin = FcBin(name=data['name']) try: db.session.add(bin) db.session.commit() except exc.IntegrityError as e: db.session().rollback() abort(500, 'Couldn\'t add bin.') return ('Created', 201)
def delete(self, id): """ Deletes (unlinks) a batch. """ batch = FcBatch.query.filter_by(id=id).one_or_none() if batch and not current_user.role.EDIT_ALL_JOBS and batch.creator_id != current_user.id: abort(401, 'Unauthorized to unlink this batch.') try: FcBatch.query.filter_by(id=id).delete() db.session.commit() except exc.IntegrityError as e: db.session().rollback() abort(500, 'Couldn\'t delete batch.') return ('Deleted', 200)
def delete(self, id): """ Deletes role. """ if not current_user.role.MANAGE_USERS: abort(401, 'Unauthorized to change roles') role = FcRole.query.filter_by(id=id).one() try: db.session.delete(role) db.session.commit() except exc.IntegrityError as e: db.session().rollback() abort(400, 'Role is in use.') return {'status': True, 'message': 'Role deleted.'}
def post(self): """ Nahrava zaheslovaný subor na server """ # check if the post request has the file part if 'file' not in request.files: abort(500, 'No file part') return redirect(request.url) file = request.files['file'] # if user does not select file, browser also # submit a empty part without filename if file.filename == '': abort(500, 'No selected file') uploadedFile = fileUpload(file, PROTECTEDFILES_DIR, ALLOWED_EXTENSIONS, withTimestamp=True) if uploadedFile: hash = getHashFromFile(filename=uploadedFile['filename'], path=uploadedFile['path']) encFile = FcEncryptedFile(name=uploadedFile['filename'], path=uploadedFile['path'], hash=hash['hash'], hash_type=hash['hash_type']) try: db.session.add(encFile) db.session.commit() except exc.IntegrityError as e: db.session().rollback() abort( 500, 'File with name ' + uploadedFile['filename'] + ' already exists.') return { 'message': 'Successfully extracted hash form file.', 'status': True, 'hash': hash['hash'], 'hash_type': hash['hash_type'], 'hash_type_name': encFile.hash_type_name, 'file_id': encFile.id } else: abort( 500, 'We only support ' + ', '.join(str(x) for x in ALLOWED_EXTENSIONS) + '.')
def post(self): """ Uploads rule file on server. """ # check if the post request has the file part if 'file' not in request.files: abort(500, 'No file part') return redirect(request.url) file = request.files['file'] # if user does not select file, browser also # submit a empty part without filename if file.filename == '': abort(500, 'No selected file') uploadedFile = fileUpload(file, RULE_DIR, ALLOWED_EXTENSIONS, suffix='.rule') if uploadedFile: rule_count = 0 with open(os.path.join(RULE_DIR, uploadedFile['path']), encoding='latin-1') as file: for line in file: if re.match('^\s*(\#.*)?$', line) == None: rule_count += 1 rule = FcRule(name=uploadedFile['filename'], path=uploadedFile['path'], count=rule_count) try: db.session.add(rule) db.session.commit() except exc.IntegrityError as e: db.session().rollback() abort( 500, 'Rule with name ' + uploadedFile['filename'] + ' already exists.') return { 'message': 'File ' + uploadedFile['filename'] + ' successfully uploaded.', 'status': True } else: abort(500, 'Wrong file format')
def post(self): """ Nahrava mask subor na server """ # check if the post request has the file part if 'file' not in request.files: abort(500, 'No file part') return redirect(request.url) file = request.files['file'] # if user does not select file, browser also # submit a empty part without filename if file.filename == '': abort(500, 'No selected file') content = file.read().splitlines() for line in content: check_mask_syntax(line.decode("utf-8")) uploadedFile = fileUpload( file, MASKS_DIR, ALLOWED_EXTENSIONS, "".join(x.decode("utf-8") + "\n" for x in content), suffix='.hcmask', withTimestamp=True) if uploadedFile: maskSet = FcMasksSet(name=uploadedFile['filename'], path=uploadedFile['path']) try: db.session.add(maskSet) db.session.commit() except exc.IntegrityError as e: db.session().rollback() abort( 500, 'Masks set with name ' + uploadedFile['filename'] + ' already exists.') return { 'message': 'File ' + uploadedFile['filename'] + ' successfuly uploaded.', 'status': True } else: abort(500, 'Wrong file format')
def post(self): """ Function for saving of new data into the table fc_server_usage """ args = serverUsage_argument.parse_args(request) serverUsage = FcServerUsage(cpu=args['cpu'], ram=args['ram'], net_recv=args['net_recv'], net_sent=args['net_sent'], hdd_read=args['hdd_read'], hdd_write=args['hdd_write']) try: db.session.add(serverUsage) db.session.commit() except: db.session().rollback() abort(500, 'An error occurred while saving new statistics data') return {'message': 'Usage data saved', 'status': True}
def post(self, id): """ Interrupts a batch by stopping the jobs that are running. """ batch = FcBatch.query.filter_by(id=id).one_or_none() if batch and not current_user.role.OPERATE_ALL_JOBS and batch.creator_id != current_user.id: abort(401, 'Unauthorized to operate this batch.') stoppers = FcJob.query.filter_by(batch_id=id).filter(FcJob.status >= 10).all() if len(stoppers) == 0: return ('Nothing to do', 200) for stopper in stoppers: stopper.status = status_to_code['finishing'] try: db.session.commit() except exc.IntegrityError as e: db.session().rollback() abort(500, 'Couldn\'t interrupt batch, try manually purging the running job.') return ('Interrupted', 200)
def put(self): """ Add or change a batch with job queue. Provide ID to update existing. """ data = request.json # sort out args id = data.get('id', None) name = data['name'] jobs = data['jobs'] is_new = False if id: query = FcBatch.query if not current_user.role.OPERATE_ALL_JOBS or not current_user.role.VIEW_ALL_JOBS: query = query.filter_by(creator_id=current_user.id) batch = query.filter_by(id=id).one_or_none() if not id or not batch: # new batch batch = FcBatch() is_new = True batch.name = name batch.jobs = FcJob.query.filter(FcJob.id.in_(jobs)).all() if is_new: batch.creator_id = current_user.id # queue order for index, job_id in enumerate(jobs): job = FcJob.query.filter_by(id=job_id).one_or_none() if job: job.queue_position = index # save batch try: if is_new: db.session.add(batch) db.session.commit() except exc.IntegrityError as e: db.session().rollback() abort(500, 'Couldn\'t create batch.') return (batch.id, 201) if not id else ('Updated', 200)
def post(self): """ Uploads charset file on server. """ # check if the post request has the file part if 'file' not in request.files: abort(500, 'No file part') return redirect(request.url) file = request.files['file'] # if user does not select file, browser also # submit a empty part without filename if file.filename == '': abort(500, 'No selected file') uploadedFile = fileUpload(file, CHARSET_DIR, ALLOWED_EXTENSIONS, suffix='.hcchr') if uploadedFile: size = os.path.getsize( os.path.join(CHARSET_DIR, uploadedFile['path'])) charset = FcCharset(name=uploadedFile['stem'], path=uploadedFile['path'], keyspace=size) try: db.session.add(charset) db.session.commit() except exc.IntegrityError as e: db.session().rollback() abort( 500, 'Charset with name ' + uploadedFile['filename'] + ' already exists.') return { 'message': 'File ' + uploadedFile['filename'] + ' successfully uploaded.', 'status': True } else: abort(500, 'Wrong file format')
def post(self): """ Upload pcfg on server """ # check if the post request has the file part if 'file' not in request.files: abort(500, 'No file part') return redirect(request.url) file = request.files['file'] # if user does not select file, browser also # submit a empty part without filename if file.filename == '': abort(500, 'No selected file') uploadedFile = fileUpload(file, PCFG_DIR, ALLOWED_EXTENSIONS) if uploadedFile: unzipGrammarToPcfgFolder(uploadedFile['filename']) pcfg_keyspace = calculateKeyspace(uploadedFile['filename']) pcfg = FcPcfg( name=extractNameFromZipfile(uploadedFile['filename']), path=uploadedFile['path'], keyspace=int(pcfg_keyspace)) try: db.session.add(pcfg) db.session.commit() except exc.IntegrityError as e: db.session().rollback() abort(500, 'PCFG with name ' + uploadedFile['filename'] + ' already exists.') createPcfgGrammarBin(uploadedFile['filename']) return { 'message': 'PCFG ' + uploadedFile['filename'] + ' successfully uploaded.', 'status': True } else: abort(500, 'Wrong file format')
def post(self): """ Nahrava hcstats na server """ # check if the post request has the file part if 'file' not in request.files: abort(500, 'No file part') return redirect(request.url) file = request.files['file'] # if user does not select file, browser also # submit a empty part without filename if file.filename == '': abort(500, 'No selected file') uploadedFile = fileUpload(file, HCSTATS_DIR, ALLOWED_EXTENSIONS, suffix='.hcstat2', withTimestamp=True) if uploadedFile: hcstats = FcHcstat(name=uploadedFile['filename'], path=uploadedFile['path']) try: db.session.add(hcstats) db.session.commit() except exc.IntegrityError as e: db.session().rollback() abort( 500, 'HcStats with name ' + uploadedFile['filename'] + ' already exists.') return { 'message': 'File ' + uploadedFile['filename'] + ' successfuly uploaded.', 'status': True } else: abort(500, 'Wrong file format')
def put(self, id): """ Changes created job. """ if not current_user.role.EDIT_ALL_JOBS and not can_edit_job(id): abort(401, 'Unauthorized access to job.') args = editJob_argument.parse_args(request) job = FcJob.query.filter(FcJob.id == id).one() if args['time_start'] == '': args['time_start'] = None if args['time_end'] == '': args['time_end'] = None dicts = job.left_dictionaries if dicts: args['left_dictionaries'] = [] for dct in dicts: d = {} d['name'] = dct.dictionary.name args['left_dictionaries'].append(d) rule_file = job.rulesFile job.name = args['name'] job.comment = args['comment'] job.seconds_per_workunit = args['seconds_per_job'] settings = FcSetting.query.first() if job.seconds_per_workunit < settings.t_pmin: abort( 400, 'Value for \"Time for workunit\" is smaller than absolute minimum seconds per workunit ({}).' .format(settings.t_pmin)) job.time_start = None if not args[ 'time_start'] else datetime.datetime.strptime( args['time_start'], '%Y-%m-%dT%H:%M'), job.time_end = None if not args[ 'time_end'] else datetime.datetime.strptime( args['time_end'], '%Y-%m-%dT%H:%M') if job.attack_mode == attack_modes['prince']: opts_changing_keyspace = [ "case_permute", "check_duplicates", "min_password_len", "max_password_len", "min_elem_in_chain", "max_elem_in_chain" ] if any( getattr(job, option) != args[option] for option in opts_changing_keyspace): # Recompute new keyspace/hc_keyspace new_hc_keyspace = compute_prince_keyspace(args) if new_hc_keyspace == -1: abort( 400, 'Unable to compute new job keyspace.\nJob \"' + job.name + '\" was not edited.') else: new_hc_keyspace = job.hc_keyspace random_rules_count = 0 if args['generate_random_rules']: random_rules_count = int(args['generate_random_rules']) ruleFileMultiplier = random_rules_count if rule_file: ruleFileMultiplier = rule_file.count new_keyspace = new_hc_keyspace if ruleFileMultiplier != 0: new_keyspace = new_hc_keyspace * ruleFileMultiplier if new_keyspace > (2**63) - 1: # due db's bigint range abort( 400, 'Job keyspace is higher than maximal allowed value 2^64.') job.hc_keyspace = new_hc_keyspace job.keyspace = new_keyspace # Prince settings job.case_permute = args['case_permute'] job.check_duplicates = args['check_duplicates'] job.min_password_len = args['min_password_len'] job.max_password_len = args['max_password_len'] job.min_elem_in_chain = args['min_elem_in_chain'] job.max_elem_in_chain = args['max_elem_in_chain'] job.generate_random_rules = args['generate_random_rules'] try: db.session.commit() return { 'status': True, 'message': 'Job \"' + job.name + '\" edited.' } except: db.session().rollback() abort( 400, 'Unable to edit this job. Please check if new settings are correct.' )