def main(): """ Инициализация логгера, бота и хранилища. Обработка почтовых сообщений и сообщений от telegram. """ global bot, db logger = logging.getLogger('tm_monitor') logger.setLevel(logging.INFO) formatter = logging.Formatter('[%(asctime)s] %(levelname)-8s %(filename)s[LINE:%(lineno)d]# %(message)s') handler = DiffFileHandler() handler.setLevel(logging.INFO) handler.setFormatter(formatter) logger.addHandler(handler) utils.log = logger token = get_options('main', 'token') if not token: log.critical('В файле настроек токен бота не найден') return bot = TeleBot(token) db = shelve.open('data') telegram_processing() mail_processing() db.close()
def run(self, target=None, tid=None): if target is None: log.critical("Please set --target param") sys.exit() if tid is None: log.critical("Please set --tid param") sys.exit() # Statistic Code p = subprocess.Popen(['cloc', target], stdout=subprocess.PIPE) (output, err) = p.communicate() rs = output.split("\n") for r in rs: r_e = r.split() if len(r_e) > 3 and r_e[0] == 'SUM:': t = CobraTaskInfo.query.filter_by(id=tid).first() if t is not None: t.code_number = r_e[4] try: db.session.add(t) db.session.commit() log.info("Statistic code number done") except Exception as e: log.error("Statistic code number failed" + str(e.message))
def run(self, target=None, tid=None): if target is None: log.critical("Please set --target param") sys.exit() if tid is None: log.critical("Please set --tid param") sys.exit() # Statistic Code p = subprocess.Popen( ['cloc', target], stdout=subprocess.PIPE) (output, err) = p.communicate() rs = output.split("\n") for r in rs: r_e = r.split() if len(r_e) > 3 and r_e[0] == 'SUM:': t = CobraTaskInfo.query.filter_by(id=tid).first() if t is not None: t.code_number = r_e[4] try: db.session.add(t) db.session.commit() log.info("Statistic code number done") except Exception as e: log.error("Statistic code number failed" + str(e.message))
def load_page_by_url(self, full_url, sleep_time=10): count_retry = 1 try: while not utils.is_connected(): if count_retry in range(6): log.warn("NO INTERNET, Short retry [{0}/5], Next try -> {1} sec".format(count_retry, sleep_time)) time.sleep(sleep_time) elif count_retry in range(11): long_sleep_time = sleep_time * 180 log.warn( "NO INTERNET, Long retry [{0}/5], Next try -> {1} sec".format(count_retry - 5, long_sleep_time)) time.sleep(long_sleep_time) elif count_retry > 10: log.critical("OOPS!! Error. Make sure you are connected to Internet and restart script.") sys.exit(0) count_retry = count_retry + 1 return self.session.get(full_url, allow_redirects=True, timeout=20, headers={'User-Agent': self.user_agent}) except requests.ConnectionError as e: log.warn(e) return except requests.Timeout as e: log.warn(e) return self.load_page_by_url(full_url)
def create_zip(archive, files): '''Creates a zip file containing the files being backed up.''' import zipfile from utils.misc import add_file_hash try: # zipfile always follows links with zipfile.ZipFile(archive, 'w') as zipf: zipf.comment = 'Created by s3-backup' for f in files: f = f.strip() if os.path.exists(f): zipf.write(f) add_file_hash(archive, f) log.debug('Added %s.' % f) else: log.error('%s does not exist.' % f) if zipf.testzip() != None: log.error('An error occured creating the zip archive.') except zipfile.BadZipfile: # I assume this only happens on reads? Just in case... log.critical('The zip file is corrupt.') except zipfile.LargeZipFile: log.critical('The zip file is greater than 2 GB.' ' Enable zip64 functionality.')
def do_backup(schedule, follow_links): '''Handles the backup.''' from shutil import rmtree import utils.filesystem if schedule == 'daily': backup_list = config.daily_backup_list elif schedule == 'weekly': backup_list = config.weekly_backup_list else: backup_list = config.monthly_backup_list try: files = utils.filesystem.read_file_list(backup_list) archive_path, tar_type = create_archive(files, follow_links) if config.enc_backup == True: # We don't add the enc extension to the key - the metadata # will tell us whether the archive is encrypted. enc_file = utils.encrypt.encrypt_file(config.enc_key, archive_path, config.enc_piece_size) send_backup(enc_file, tar_type, schedule) # Delete the plaintext local version os.remove(archive_path) else: # Not encrypting send_backup(archive_path, tar_type, schedule) if config.delete_archive_when_finished == True: log.debug('Deleting archive.') rmtree(config.dest_location) except IOError: raise log.critical('Cannot open file: %s' % backup_list) sys.exit(1)
def main(): cli = docopt(__doc__, version='0.1') if cli['setup']: # Given that the wizard is always run by a human, and that log messages # would interfere with the wizard output, we disable logging for it. logging.disable(logging) wizard = Wizard() wizard.run() logging.disable(logging.NOTSET) exit(os.EX_OK) config = load_config(cli['<roster>']) modify_logger(cli, config) for key in ('--start', '--end', '--at', '<start>', '<end>', '<fuzzy>'): if cli[key] is not None: cli[key] = dtfy(cli[key], tz=config['roster.time_zone']) roster = get_roster(config) if cli['current'] is True: current(roster, cli, config) elif cli['query'] is True: query(roster, cli, config) elif cli['report'] is True: report(roster, cli, config) elif cli['update']: roster.update_cache() elif cli['runway'] is True: runway(roster, cli, config) elif cli['status'] is True: status(roster, cli, config) else: log.critical('Something is odd, you should never hit this point...') exit(os.EX_SOFTWARE)
def create_archive(files, follow_links): """Creates an archive of the given files and stores them in the location specified by config.destination. Returns the full path of the archive.""" from time import strftime try: if not os.path.exists(config.dest_location): os.makedirs(config.dest_location) except OSError: # TODO: Fallback to a tmp directory before failing log.critical('Cannot create directory %s' % config.dest_location) sys.exit(1) if config.compression_method == 'zip': archive_type = '.zip' else: archive_type = '.tar' mode = 'w:' if config.compression_method != 'none': archive_type = archive_type + '.' + config.compression_method mode += config.compression_method archive_name = ('bak' + strftime('%Y%m%d') + archive_type) archive_name = os.path.join(config.dest_location, archive_name) if config.compression_method == 'zip': # zipfile always follows links create_zip(archive_name, files) else: create_tar(archive_name, files, mode, follow_links) return archive_name, archive_type
def functions(self): # parse functions # `grep` (`ggrep` on Mac) grep = '/bin/grep' if 'darwin' == sys.platform: ggrep = '' for root, dir_names, file_names in os.walk( '/usr/local/Cellar/grep'): for filename in file_names: if 'ggrep' == filename or 'grep' == filename: ggrep = os.path.join(root, filename) if ggrep == '': log.critical("brew install ggrep pleases!") sys.exit(0) else: grep = ggrep regex = r'(?:function\s+)(\w+)\s*\(' param = [grep, "-n", "-r", "-P"] + [regex, self.file_path] p = subprocess.Popen(param, stdout=subprocess.PIPE) result = p.communicate() if len(result[0]): functions = {} # log.debug(result[0]) lines = str(result[0]).strip().split("\n") prev_function_name = '' for index, line in enumerate(lines): line = line.strip() if line == '': log.info('Empty') continue function = line.split(':') if len(function) >= 2 and function[1].strip()[:2] not in [ '//', '#', '/*' ]: function_name = re.findall(regex, function[1].strip()) if len(function_name) == 1: function_name = function_name[0] log.info( 'Function name: {0} - {1} - Prev Func: {2}'.format( index, function_name, prev_function_name)) if index > 0 and prev_function_name in functions: functions[prev_function_name]['end'] = function[0] prev_function_name = function_name functions[function_name] = { 'start': function[0], 'end': None # next function's start } else: log.info("Can't find function name: {0}".format(line)) else: print("没有分隔符(:)或改行为注释 {0}".format(function[1])) end = sum(1 for l in open(self.file_path)) for name, value in functions.items(): if value['end'] is None: functions[name]['end'] = end return functions else: return False
def test_recvfrom(self): self.r.start() r2 = ThreadedReactor() r2.listen_udp(tc.SERVER_ADDR[1], lambda x,y:None) log.critical('TESTING: IGNORE CRITICAL MESSAGE') r2.sendto('z', tc.CLIENT_ADDR) # self.r will call recvfrom (which raises socket.error) time.sleep(tc.TASK_INTERVAL) ok_(not self.callback_fired) self.r.stop()
def query(roster, cli, config): '''Print a roster query result.''' start = cli['--start'] or cli['--at'] end = cli['--end'] or cli['--at'] if end < start: msg = 'Tried to query roster for a negative timespan ({} to {})' log.critical(msg.format(start, end)) exit(os.EX_DATAERR) for shift in roster.query(start, end): print '\t'.join(shift.as_string_tuple)
def test_recvfrom(self): self.r.start() r2 = ThreadedReactor() r2.listen_udp(tc.SERVER_ADDR[1], lambda x, y: None) log.critical('TESTING: IGNORE CRITICAL MESSAGE') r2.sendto('z', tc.CLIENT_ADDR) # self.r will call recvfrom (which raises socket.error) time.sleep(tc.TASK_INTERVAL) ok_(not self.callback_fired) self.r.stop()
def functions(self): # parse functions # `grep` (`ggrep` on Mac) grep = '/bin/grep' # `find` (`gfind` on Mac) find = '/bin/find' if 'darwin' == sys.platform: ggrep = '' for root, dir_names, file_names in os.walk( '/usr/local/Cellar/grep'): for filename in file_names: if 'ggrep' == filename or 'grep' == filename: ggrep = os.path.join(root, filename) if ggrep == '': log.critical("brew install ggrep pleases!") sys.exit(0) else: grep = ggrep regex = r'(?:function\s+)(\w+)\s*' param = [grep, "-n", "-r", "-P"] + [regex, self.file_path] p = subprocess.Popen(param, stdout=subprocess.PIPE) result = p.communicate() if len(result[0]): functions = [] # log.debug(result[0]) lines = str(result[0]).strip().split("\n") for index, line in enumerate(lines): line = line.strip() if line == '': log.info('Empty') continue function = line.split(':') if len(function) >= 2: function_name = re.findall(regex, function[1].strip()) if len(function_name) == 1: function_name = function_name[0] if index > 0: functions[index - 1]['end'] = function[0] if index == len(lines) - 1: end = sum(1 for l in open(self.file_path)) log.info('File lines: {0}'.format(end)) else: end = None functions.append({ 'function': function_name, 'start': function[0], 'end': end # next function's start }) else: log.info("Can't find function name: {0}".format(line)) return functions else: return False
def main(): try: config = ConfigParser.ConfigParser() config.read('config') debug = config.get('cobra', 'debug') debug = bool(debug) except ConfigParser.Error: debug = True log.critical("/config File Not Found, copy config.example to config please!") web.debug = debug manager.run()
def main(): try: config = ConfigParser.ConfigParser() config.read('config') debug = config.get('cobra', 'debug') debug = bool(debug) except ConfigParser.Error: debug = True log.critical( "/config File Not Found, copy config.example to config please!") web.debug = debug manager.run()
def report(roster, cli, config): '''Print a human-friendly report about a time-slice of the roster.''' time_zone = config['roster.time_zone'] # We use datetimes even if the ultimate goal is operate at date level as # we need to preserve the timezone information all along fuzzy = cli['<fuzzy>'] # Fuzzy should be interpreted as always indicating a month. if fuzzy: try: start = fuzzy.replace(day=1) end = start + relativedelta(months=1, days=-1) except Exception as e: log.critical('Cannot parse <fuzzy> parameter "{}"'.format(fuzzy)) log.exception(e.message) raise # A range can be whatever elif cli['<start>']: start = cli['<start>'] end = cli['<end>'] if start > end: msg = 'Tried to generate a report for negative timespan ({} to {})' log.critical(msg.format(start, end)) exit(os.EX_DATAERR) else: now = datetime.datetime.now(tz=pytz.timezone(time_zone)) start = now.replace(day=1) + relativedelta(months=-1) end = start + relativedelta(months=1, days=-1) data = roster.report(start, end) weekdays = defaultdict(int) weekends = defaultdict(int) for day, people in data: target = weekdays if day.weekday() < 5 else weekends for person in people: target[person] += 1 print('\n O N - C A L L R O S T E R') print('=====================================================') print(' {} - {}\n\n'.format(start.strftime('%d %b %Y'), end.strftime('%d %b %Y'))) for row in data: print(' {:<20}{}'.format(row[0].strftime('%d %b %Y, %a'), ', '.join(row[1]))) print('\n\n SUMMARY') print('-----------------------------------------------------') print(' Name Weekdays Weekends Total') print('-----------------------------------------------------') names = sorted(list(set(weekends.keys() + weekdays.keys()))) template = ' {:<26}{:>3}{:>10}{:>8}' for name in names: wd = weekdays[name] we = weekends[name] print(template.format(name, wd or '-', we or '-', wd + we)) print('-----------------------------------------------------\n')
def allowed_file(filename): """ Allowd upload file Config Path: ./config [upload] :param filename: :return: """ config_extension = config.Config('upload', 'extensions').value if config_extension == '': log.critical('Please set config file upload->directory') sys.exit(0) allowed_extensions = config_extension.split('|') return '.' in filename and filename.rsplit('.', 1)[1] in allowed_extensions
def run(self, target=None, tid=None, pid=None): if target is None: log.critical("Please set --target param") sys.exit() if tid is not None: task_id = tid # Start Time For Task t = CobraTaskInfo.query.filter_by(id=tid).first() if t is None: log.critical("Task id doesn't exists.") sys.exit() if t.status not in [0, 1]: log.critical("Task Already Scan.") sys.exit() t.status = 1 t.time_start = int(time.time()) t.updated_at = time.strftime('%Y-%m-%d %X', time.localtime()) try: db.session.add(t) db.session.commit() except Exception as e: log.error("Set start time failed" + str(e.message)) else: task_id = None if os.path.isdir(target) is not True: log.critical('Target is not directory') sys.exit() from engine import static static.Static(target, task_id=task_id, project_id=pid).analyse()
def export_xls_brief(raw): """ Выгрузка краткой (без внутренних номеров) статистики звонков :param raw: :return: """ wb = xlwt.Workbook() ws = wb.add_sheet('Краткий список') # Заголовок ws.write(0, 0, 'Гор. номер') ws.write_merge(0, 0, 1, 4, 'Вх.') ws.write_merge(0, 0, 5, 8, 'Исх.') path = get_options('main', 'xls_path_brief', True) if not path: log.critical('Ошибка чтения конфигурационного файла, см. ошибки выше') return line = 1 for kc in sorted(raw): ws.write(line, 0, kc) ws.write(line, 1, format_time(raw[kc]['inc']['duration'])) ws.write(line, 2, raw[kc]['inc']['count']) ws.write(line, 3, format_time(raw[kc]['inc']['billsec'])) ws.write(line, 4, raw[kc]['inc']['answer']) ws.write(line, 5, format_time(raw[kc]['out']['duration'])) ws.write(line, 6, raw[kc]['out']['count']) ws.write(line, 7, format_time(raw[kc]['out']['billsec'])) ws.write(line, 8, raw[kc]['out']['answer']) line += 1 try: wb.save(path) except PermissionError as e: log.error('Недостаточно прав для сохранения файла: %s' % e.filename) return except FileNotFoundError as e: log.error('Неверный путь или имя файла: %s' % e.filename) return return True
def decrypt(archive): '''Decrypts the given file. It deletes the encrypted version and returns the path to the decrypted file, which is the encrypted filename with a '.d' extension appended.''' from utils.encrypt import decrypt_file from os import remove decrypted = archive + '.d' if decrypt_file(config.enc_key, archive, decrypted, config.enc_piece_size): log.debug('Encrypted file: %s' % decrypted) remove(archive) return decrypted else: log.critical('Failed to decrypt the archive %s.' % archive) exit(1)
def run(self, target=None, tid=None, pid=None): if target is None: log.critical("Please set --target param") sys.exit() if tid is not None: task_id = tid # Start Time For Task t = CobraTaskInfo.query.filter_by(id=tid).first() if t is None: log.critical("Task id doesn't exists.") sys.exit() if t.status not in [0, 1]: log.critical("Task Already Scan.") sys.exit() t.status = 1 t.time_start = int(time.time()) t.updated_at = time.strftime('%Y-%m-%d %X', time.localtime()) try: db.session.add(t) db.session.commit() except Exception as e: log.error("Set start time failed" + str(e.message)) else: task_id = None target_type = self.parse_target(target) if target_type is False: log.error(""" Git Repository: must .git end SVN Repository: can http:// or https:// Directory: must be local directory File: must be single file or tar.gz/zip/rar compress file """) from engine import static s = static.Static(target, task_id=task_id, project_id=pid) if target_type is 'directory': s.analyse() elif target_type is 'compress': from utils.decompress import Decompress # load an compressed file. only tar.gz, rar, zip supported. dc = Decompress(target) # decompress it. And there will create a directory named "222_test.tar". dc.decompress() s.analyse() elif target_type is 'file': s.analyse() elif target_type is 'git': from pickup.GitTools import Git g = Git(target, branch='master') g.get_repo() if g.clone() is True: s.analyse() else: log.critical("Git clone failed") elif target_type is 'svn': log.warning("Not Support SVN Repository")
def _parse_contact(message): """ Обработка контактов, добавление нового контакта в список авторизованных :param message: сообщение с контактной информацией :return: bool, True - сообщение обработано, False - ошибка обработки """ # Проверяем, чтобы пользователь послал свои контактные данные if message.contact.user_id == message.from_user.id: host, domain, password = _conn_data() if not all([host, domain, password]): log.critical('В файле настроек параметры host, domain или password не найдены') return phone = str(message.contact.phone_number) # Проверяем существование почтового ящика, должен быть настроен заранее на почтовом сервере box = imaplib.IMAP4(host) data = box.login('%s@%s' % (phone, domain), password) box.logout() if data[0] == 'OK': phones_list = db.get('phones_list', []) if phone not in phones_list: phones_list.append(phone) db['phones_list'] = phones_list if phone not in db: db[phone] = message.chat.id db[str(message.from_user.id)] = db[phone] keyboard_hider = types.ReplyKeyboardRemove() bot.send_message(message.chat.id, 'Хорошо, я тебя запомнил', reply_markup=keyboard_hider) else: bot.send_message(message.chat.id, 'Виделись уже :)') else: bot.send_message(message.chat.id, 'Я тебя не узнаю!') else: bot.send_message(message.chat.id, 'Я сказал свой!') return True
def load_config(string_): '''Load configuration from file.''' try: with open('{}.config'.format(string_)) as file_: config = json.load(file_) config['oauth.directory'] = os.path.dirname(string_) return config except IOError: pass try: with open(string_) as file_: config = json.load(file_) config['oauth.directory'] = os.getcwd() return config except IOError: # The following will always be logged on screen, obviously... log.critical('Could not open configuration for "{}"'.format(string_)) exit(os.EX_DATAERR)
def export_xls(raw): """ Выгрузка структуры телефонной книги в excel :param raw: {string: {string: [[string]]}}, структура тел. книги {организация: {отдел: [[данные_сотрудника]]}} :return: bool, True - если тел. книга выгружена, False - если произошла ошибка выгрузки """ wb = xlwt.Workbook() ws = wb.add_sheet('Список номеров') # Заголовок ws.write(0, 1, 'Гор. номер') ws.write(0, 2, 'Вх.') ws.write(0, 3, 'Исх.') path = get_options('main', 'xls_path', True) if not path: log.critical('Ошибка чтения конфигурационного файла, см. ошибки выше') return line = 1 for kc in sorted(raw): ws.write(line, 1, kc) ws.write(line, 2, ', '.join(raw[kc]['inc'])) ws.write(line, 3, ', '.join(raw[kc]['out'])) line += 1 try: wb.save(path) except PermissionError as e: log.error('Недостаточно прав для сохранения файла: %s' % e.filename) return except FileNotFoundError as e: log.error('Неверный путь или имя файла: %s' % e.filename) return return True
def run(self): """Main loop activated by calling self.start()""" last_task_run = time.time() stop_flag = self.stop_flag while not stop_flag: timeout_raised = False try: data, addr = self.s.recvfrom(BUFFER_SIZE) except (AttributeError): log.warning('udp_listen has not been called') time.sleep(self.task_interval) #TODO2: try using Event and wait timeout_raised = True except (socket.timeout): timeout_raised = True except (socket.error), e: log.critical( 'Got socket.error when receiving (more info follows)') log.exception('See critical log above') else: ip_is_blocked = self.floodbarrier_active and \ self.floodbarrier.ip_blocked(addr[0]) if ip_is_blocked: log.warning('%s blocked' % `addr`) else: self.datagram_received_f(data, addr) if timeout_raised or \ time.time() - last_task_run > self.task_interval: #with self._lock: self._lock.acquire() try: while True: task = self.tasks.consume_task() if task is None: break task.fire_callbacks() stop_flag = self.stop_flag finally: self._lock.release()
def create_tar(archive, files, mode, follow_links): '''Creates a tar archive of the files being backed up.''' import tarfile from utils.misc import add_file_hash try: with tarfile.open(archive, mode, dereference=follow_links) as tar: for f in files: f = f.strip() if os.path.exists(f): tar.add(f) add_file_hash(archive, f); log.debug('Added %s.' % f) else: log.error('%s does not exist.' % f) except tarfile.CompressionError: log.critical('There was an error compressing the backup archive. ' 'Please try again.') sys.exit(1) except tarfile.TarError: log.critical('There was an error creating the backup archive. ' 'Please try again.') sys.exit(1)
def openDB(name, dbname, dbtype="QSQLITE", host="localhost", port=-1, username="", password=""): # open database db = QSqlDatabase.database(name) if not db.isValid(): if dbtype == "QSQLITE": # ensure sqlite db file exist dbFile = os.path.realpath(dbname) dirName = os.path.dirname(dbFile) if not os.path.exists(dirName): os.makedirs(dirName) db = QSqlDatabase.addDatabase(dbtype, name) db.setDatabaseName(dbname) db.setHostName(host) db.setPort(port) db.setUserName(username) db.setPassword(password) if not db.open(): log.critical("Open Database Error: " + db.lastError().text()) return None return db
def sendto(self, data, addr): """Send data to addr using the UDP port used by listen_udp.""" #with self._lock: self._lock.acquire() try: try: bytes_sent = self.s.sendto(data, addr) if bytes_sent != len(data): log.critical( 'Just %d bytes sent out of %d (Data follows)' % ( bytes_sent, len(data))) log.critical('Data: %s' % data) except (socket.error): log.critical( 'Got socket.error when sending (more info follows)') log.critical('Sending data to %r\n%r' % (addr, data)) log.exception('See critical log above') finally: self._lock.release()
def export_xls(raw): """ Выгрузка структуры телефонной книги в excel :param raw: {string: {string: [[string]]}}, структура тел. книги {организация: {отдел: [[данные_сотрудника]]}} :return: bool, True - если тел. книга выгружена, False - если произошла ошибка выгрузки """ wb = xlwt.Workbook() ws = wb.add_sheet('Телефонная книга') # Заголовок ws.write_merge(0, 0, 0, 1, '', ts.sh0) ws.write(0, 2, 'ФИО', ts.sh1) ws.write(0, 3, 'Должность', ts.sh2) ws.write(0, 4, 'Вн. тел.', ts.sh2) ws.write(0, 5, 'Гор. тел.', ts.sh2) ws.write(0, 6, 'Почта', ts.sh3) line = 1 item_len = [0, 0, 0, 0, 0, 0] # Для подсчёта ширины колонок item_style = [ts.ss1, ts.ss2, ts.ss2, ts.ss2, ts.ss3] # Стили для полей сотрудников for kc in sorted(raw): # Наименование организации ws.write_merge(line, line, 0, 6, kc, ts.so0) ws.row(line).level = 0 line += 1 ws.row(line).level = 1 for kd in sorted(raw[kc]): # Наименование отдела ws.write(line, 0, '', ts.sd0) ws.write_merge(line, line, 1, 6, kd, ts.sd1) ws.row(line).level = 1 line += 1 ws.row(line).level = 2 for item in sorted(raw[kc][kd]): ws.write_merge(line, line, 0, 1, '', ts.ss0) for k, field in enumerate(item): # Поля сотрудника: ФИО, Должность, Вн. тел., Гор. тел., Почта ws.write(line, k + 2, field, item_style[k]) # Считаем максимальную ширину для каждой колонки item_len[k] = max(item_len[k], len(field)) line += 1 ws.row(line).level = 2 # Подчёркиваем таблицу ws.write_merge(line, line, 0, 6, '', ts.sb0) ws.row(line).level = 0 # Задаем ширину колонок, для организации и отдела ширина фиксирована ws.col(0).width = int(36.5 * 17) ws.col(1).width = int(36.5 * 17) # Для остальных колонок ширина вычисляется в зависимости от длинны полей for k, v in enumerate(item_len): ws.col(k + 2).width = int(36.5 * 7.3 * v) # Закрепляем заголовок ws.panes_frozen = True ws.horz_split_pos = 1 path = get_options('main', 'xls_path', True) if not path: log.critical('Ошибка чтения конфигурационного файла, см. ошибки выше') return try: wb.save(path) except PermissionError as e: log.error('Недостаточно прав для сохранения файла: %s' % e.filename) return except FileNotFoundError as e: log.error('Неверный путь или имя файла: %s' % e.filename) return return True
def run(self): # create database structure log.debug("Start create database structure...") try: db.create_all() except exc.SQLAlchemyError as e: log.critical("MySQL database error: {0}\nFAQ: {1}".format( e, 'https://github.com/wufeifei/cobra/wiki/Error#mysql')) sys.exit(0) log.debug("Create Structure Success.") # insert base data from app.models import CobraAuth, CobraLanguages, CobraAdminUser, CobraVuls # table `auth` log.debug('Insert api key...') auth = CobraAuth('manual', common.md5('CobraAuthKey'), 1) db.session.add(auth) # table `languages` log.debug('Insert language...') languages = { "php": ".php|.php3|.php4|.php5", "jsp": ".jsp", "java": ".java", "html": ".html|.htm|.phps|.phtml", "js": ".js", "backup": ".zip|.bak|.tar|.tar.gz|.rar", "xml": ".xml", "image": ".jpg|.png|.bmp|.gif|.ico|.cur", "font": ".eot|.otf|.svg|.ttf|.woff", "css": ".css|.less|.scss|.styl", "exe": ".exe", "shell": ".sh", "log": ".log", "text": ".txt|.text", "flash": ".swf", "yml": ".yml", "cert": ".p12|.crt|.key|.pfx|.csr", "psd": ".psd", "iml": ".iml", "spf": ".spf", "markdown": ".md", "office": ".doc|.docx|.wps|.rtf|.csv|.xls|.ppt", "bat": ".bat", "PSD": ".psd", "Thumb": ".db", } for language, extensions in languages.iteritems(): a_language = CobraLanguages(language, extensions) db.session.add(a_language) # table `user` log.debug('Insert admin user...') username = '******' password = '******' role = 1 # 1: super admin, 2: admin, 3: rules admin a_user = CobraAdminUser(username, password, role) db.session.add(a_user) # table `vuls` log.debug('Insert vuls...') vuls = [ 'SQL Injection', 'LFI/RFI', 'Header Injection', 'XSS', 'CSRF', 'Logic Bug', 'Command Execute', 'Code Execute', 'Information Disclosure', 'Data Exposure', 'Xpath Injection', 'LDAP Injection', 'XML/XXE Injection', 'Unserialize', 'Variables Override', 'URL Redirect', 'Weak Function', 'Buffer Overflow', 'Deprecated Function', 'Stack Trace', 'Resource Executable', 'SSRF', 'Misconfiguration', 'Components' ] for vul in vuls: a_vul = CobraVuls(vul, 'Vul Description', 'Vul Repair') db.session.add(a_vul) # commit db.session.commit() log.debug('All Done.')
def mail_processing(): """ Обработка входящих писем. """ phones_list = db.get('phones_list', []) if not phones_list: return host, domain, password = _conn_data() if not all([host, domain, password]): log.critical('В файле настроек параметры host, domain или password не найдены') return xfrom, content_type = get_options('main', ['xfrom', 'content-type']) if not all([xfrom, content_type]): log.critical('В файле настроек параметры xfrom или content-type не найдены') return # Проходим по списку зарегистрированных номеров телефонов for phone in phones_list: box = imaplib.IMAP4(host) result = box.login('%s@%s' % (phone, domain), password) if result[0] == 'OK': box.select() chat_id = db.get(phone, None) typ, data = box.search(None, 'ALL') if typ == 'OK': for num in data[0].split(): try: typ, data = box.fetch(num, '(RFC822)') mail = email.message_from_bytes(data[0][1]) box.store(num, '+FLAGS', '\\Deleted') if mail.get('From') != xfrom or mail.get_content_type() != content_type: continue msg_body = mail.get_payload(decode=True) charset = re.search(r'=(.*)', mail.get('Content-Type')).group(1) msg_body = msg_body.decode(charset) bot.send_message(chat_id, msg_body) except Exception as e: log.critical(e) box.expunge() else: log.error('Не удалось получить список писем с сервера') box.close() else: log.error('Не удалось подключиться к почтовому ящику: %s@%s' % (phone, domain)) box.logout()
def test_sendto_too_large_data_string(self): log.critical('TESTING: IGNORE CRITICAL MESSAGE') self.r.sendto('z'*12345, tc.NO_ADDR)
def test_sendto(self): log.critical('TESTING: IGNORE CRITICAL MESSAGE') self.r.sendto('z', tc.NO_ADDR)
import kadtracker def peers_found(peers): print 'Peers found:' for peer in peers: print peer print '-' * 20 def lookup_done(): print 'Lookup DONE' if len(sys.argv) == 4 and sys.argv[0] == 'server_dht.py': log.critical('argv %r' % sys.argv) RUN_DHT = True my_addr = (sys.argv[1], int(sys.argv[2])) #('192.16.125.242', 7000) logs_path = sys.argv[3] dht = kadtracker.KadTracker(my_addr, logs_path) else: RUN_DHT = False print 'usage: python server_dht.py dht_ip dht_port path' try: print 'Type Control-C to exit.' while (RUN_DHT): time.sleep(10 * 60) info_hash = identifier.RandomId() dht.get_peers(info_hash, peers_found) except (KeyboardInterrupt):
def export_xls_full(raw): """ Выгрузка полной (с внутренними номерами) статистики звонков :param raw: :return: """ wb = xlwt.Workbook() ws = wb.add_sheet('Подробный список') # Заголовок ws.write(0, 0, 'Гор. номер') ws.write_merge(0, 0, 1, 5, 'Вх.') ws.write_merge(0, 0, 6, 10, 'Исх.') path = get_options('main', 'xls_path_full', True) if not path: log.critical('Ошибка чтения конфигурационного файла, см. ошибки выше') return line = 1 for kc in sorted(raw): ws.write(line, 0, kc) ki = raw[kc]['inc'] ko = raw[kc]['out'] kiu = ki['users'] kou = ko['users'] ws.write(line, 2, format_time(ki['duration'])) ws.write(line, 3, ki['count']) ws.write(line, 4, format_time(ki['billsec'])) ws.write(line, 5, ki['answer']) ws.write(line, 7, format_time(ko['duration'])) ws.write(line, 8, ko['count']) ws.write(line, 9, format_time(ko['billsec'])) ws.write(line, 10, ko['answer']) inc_line = 0 for inc in sorted(kiu): inc_line += 1 ws.write(line + inc_line, 1, inc) ws.write(line + inc_line, 4, format_time(kiu[inc]['billsec'])) ws.write(line + inc_line, 5, kiu[inc]['answer']) out_line = 0 for out in sorted(kou): out_line += 1 ws.write(line + out_line, 6, out) ws.write(line + out_line, 7, format_time(kou[out]['duration'])) ws.write(line + out_line, 8, kou[out]['count']) ws.write(line + out_line, 9, format_time(kou[out]['billsec'])) ws.write(line + out_line, 10, kou[out]['answer']) line += max([inc_line, out_line]) + 1 try: wb.save(path) except PermissionError as e: log.error('Недостаточно прав для сохранения файла: %s' % e.filename) return except FileNotFoundError as e: log.error('Неверный путь или имя файла: %s' % e.filename) return return True
def analyse(self): if self.directory is None: log.critical("Please set directory") sys.exit() log.info('Start code static analyse...') d = directory.Directory(self.directory) files = d.collect_files() log.info('Scan Files: {0}, Total Time: {1}s'.format( files['file_nums'], files['collect_time'])) ext_language = { # Image '.jpg': 'image', '.png': 'image', '.bmp': 'image', '.gif': 'image', '.ico': 'image', '.cur': 'image', # Font '.eot': 'font', '.otf': 'font', '.svg': 'font', '.ttf': 'font', '.woff': 'font', # CSS '.css': 'css', '.less': 'css', '.scss': 'css', '.styl': 'css', # Media '.mp3': 'media', '.swf': 'media', # Execute '.exe': 'execute', '.sh': 'execute', '.dll': 'execute', '.so': 'execute', '.bat': 'execute', '.pl': 'execute', # Edit '.swp': 'tmp', # Cert '.crt': 'cert', # Text '.txt': 'text', '.csv': 'text', '.md': 'markdown', # Backup '.zip': 'backup', '.bak': 'backup', '.tar': 'backup', '.rar': 'backup', '.tar.gz': 'backup', '.db': 'backup', # Config '.xml': 'config', '.yml': 'config', '.spf': 'config', '.iml': 'config', '.manifest': 'config', # Source '.psd': 'source', '.as': 'source', # Log '.log': 'log', # Template '.template': 'template', '.tpl': 'template', } for ext in files: if ext in ext_language: log.info('{0} - {1}'.format(ext, files[ext])) continue else: log.info(ext) languages = CobraLanguages.query.all() rules = CobraRules.query.filter_by(status=1).all() extensions = None for rule in rules: for language in languages: if language.id == rule.language: extensions = language.extensions.split('|') if extensions is None: log.warning("Rule Language Error") # grep name is ggrep on mac grep = '/bin/grep' if 'darwin' == sys.platform: log.info('In Mac OS X System') for root, dir_names, file_names in os.walk( '/usr/local/Cellar/grep'): for filename in file_names: if 'ggrep' == filename: grep = os.path.join(root, filename) filters = [] for e in extensions: filters.append('--include=*' + e) # White list white_list = [] ws = CobraWhiteList.query.filter_by(project_id=self.project_id, rule_id=rule.id, status=1).all() if ws is not None: for w in ws: white_list.append(w.path) try: log.info('Scan rule id: {0}'.format(rule.id)) # -n Show Line number / -r Recursive / -P Perl regular expression p = subprocess.Popen([grep, "-n", "-r", "-P"] + filters + [rule.regex, self.directory], stdout=subprocess.PIPE) result = p.communicate() # Exists result if len(result[0]): log.info('Found:') per_line = str(result[0]).split("\n") log.debug(per_line) for r in range(0, len(per_line) - 1): try: rr = str(per_line[r]).replace(self.directory, '').split(':', 1) code = str(rr[1]).split(':', 1) if self.task_id is None: self.task_id = 0 rule_id = rule.id current_time = datetime.now().strftime( '%Y-%m-%d %H:%M:%S') m_file = rr[0].strip() m_line = code[0] m_code = str(code[1].strip()) params = [ self.task_id, rule_id, m_file, m_line, m_code, current_time, current_time ] try: if m_file in white_list or ".min.js" in m_file: log.debug("In White list or min.js") else: # # // /* * match_result = re.match( "(#)?(//)?(\*)?(/\*)?", m_code) if match_result.group( 0 ) is not None and match_result.group( 0) is not "": log.debug("In Annotation") else: log.debug('In Insert') if rule.regex == "": # Didn't filter line when regex is empty r_content = CobraResults.query.filter_by( task_id=self.task_id, rule_id=rule_id, file=m_file).first() m_line = 0 else: r_content = CobraResults.query.filter_by( task_id=self.task_id, rule_id=rule_id, file=m_file, line=m_line).first() if r_content is not None: log.warning("Exists Result") else: results = CobraResults( self.task_id, rule_id, m_file, m_line, m_code, current_time, current_time) db.session.add(results) db.session.commit() log.info('Insert Results Success') except Exception as e: log.error('Insert Results Failed' + str(e.message)) log.debug(params) except Exception as e: log.critical('Error parsing result: ' + str(e.message)) else: log.info('Not Found') except Exception as e: log.critical('Error calling grep: ' + str(e)) # Set End Time For Task t = CobraTaskInfo.query.filter_by(id=self.task_id).first() t.status = 2 t.file_count = files['file_nums'] t.time_end = int(time.time()) t.time_consume = t.time_end - t.time_start t.updated_at = time.strftime('%Y-%m-%d %X', time.localtime()) try: db.session.add(t) db.session.commit() except Exception as e: log.critical("Set start time failed:" + e.message) log.info("Scan Done")
def test_sendto_too_large_data_string(self): log.critical('TESTING: IGNORE CRITICAL MESSAGE') self.r.sendto('z' * 12345, tc.NO_ADDR)
def test_sendto_socket_error(self): log.critical('TESTING: IGNORE CRITICAL MESSAGE') self.client_r.sendto('z', (tc.NO_ADDR[0], 0))
logging_conf.setup(logs_path, logs_level) import identifier import kadtracker def peers_found(peers): print 'Peers found:' for peer in peers: print peer print '-'*20 def lookup_done(): print 'Lookup DONE' if len(sys.argv) == 4 and sys.argv[0] == 'server_dht.py': log.critical('argv %r' % sys.argv) RUN_DHT = True my_addr = (sys.argv[1], int(sys.argv[2])) #('192.16.125.242', 7000) logs_path = sys.argv[3] dht = kadtracker.KadTracker(my_addr, logs_path) else: RUN_DHT = False print 'usage: python server_dht.py dht_ip dht_port path' try: print 'Type Control-C to exit.' while (RUN_DHT): time.sleep(10 * 60) info_hash = identifier.RandomId() dht.get_peers(info_hash, peers_found) except (KeyboardInterrupt):
def run_browse_files(args): '''Execute the subcommand "browse-files". Raises any errors.''' def do_browse_shell(tarinfo_list): '''Allows the user to browse the archive and select the files to restore.''' def show_list(ti_list): '''Prints a list of TarInfo objects''' print(header_format.format('#', 'Mark', 'Name', 'Size', 'Last Modified')) cnt = 0 for ti in ti_list: cnt += 1 if ti in restore_list: marked = '*' else: marked = ' ' print(output_format.format(cnt, marked, ti.name, ti.size, datetime.fromtimestamp(ti.mtime))) def page_list(ti_list): '''Pages a list of TarInfo objects''' go = True last = 0 while go: print(header_format.format('#', 'Mark', 'Name', 'Size', 'Last Modified')) for i in range(30): try: ti = ti_list[last] except IndexError: go = False break if ti in restore_list: marked = '*' else: marked = ' ' print(output_format.format(last + 1, marked, ti.name, ti.size, datetime.fromtimestamp(ti.mtime))) last += 1 inp2 = raw_input('\nq to quit, Enter to continue: ') if inp2 == 'q': go = False # BEGIN do_browse_shell main code help_text = \ '''\n"restore [number]" marks an object for restoration. "cancel [number]" cancels a marked restoration. Previous commands: where "number" is the number printed on the listing. "show" prints all files in the archive. "show restore" shows all files currently marked for restoration. "page" prints the archive's files thirty at a time. "page restore" prints the files marked for restoration, thirty at a time. "quit" quits without restoring. "finish" quits and restores the files. "h" shows this message. All commands are case-sensitive.\n\n''' from datetime import datetime # file number, status, name, size, date modified header_format = '{:3}{:8}{:43}{:12}{}' output_format = '{:<3}{}{:50}{:<6}{:%Y-%m-%d %H:%M:%S}' print(help_text) go = True restore_list = [] while go: inp = raw_input('\n-> ').lower() if inp == 'h': print(help_text) # Quit, no restore elif inp == 'quit': go = False restore_list = None # Quit and restore elif inp == 'finish': go = False # Mark a file for restoration elif inp.startswith('restore'): try: num = inp.split(' ', 1)[1] if num.isdigit(): if tarinfo_list[int(num) -1] not in restore_list: restore_list.append(tarinfo_list[int(num) - 1]) else: print('That file is already marked.') else: print('I do not know what file you are \ referring to.\n') except IndexError: print('There is no file with that index.\n') # Cancel a marked file elif inp.startswith('cancel'): try: num = inp.split(' ', 1)[1] if num.isdigit(): if tarinfo_list[int(num) - 1] in restore_list: restore_list.remove(tarinfo_list[int(num) - 1]) else: print('I do not know what file you are \ referring to.\n') except IndexError: print('There is no file with that index.\n') # Prints the list of objects in the archive or restore list elif inp.startswith('show'): try: show_restore = inp.split(' ', 1)[1] == 'restore' except IndexError: show_restore = False if show_restore: show_list(restore_list) else: show_list(tarinfo_list) # Page the file list. 30 lines per page. elif inp.startswith('page'): try: page_restore = inp.split(' ')[1] == 'restore' except IndexError: page_restore = False if page_restore: page_list(restore_list) else: page_list(tarinfo_list) # Unrecognized command given else: print('I do not understand that command.\n\n') return restore_list # END do_browse_files def do_restore(root, archive, restore_list): '''Restores the list of TarInfo objects into the filesystem.''' import tarfile try: for ti in restore_list: log.info('Extracting %s to %s.' % (ti, root)) tar.extract(ti, root) except: raise # BEGIN run_browse_files main code import tarfile from os.path import exists, join from sys import platform bucket = s3connect() # TODO: Instead of printing archive filenames, I want to get the date # and list the dates backups were made. Probably allow selecting one # here and then browsing it if args.archives: print('Implement browse archives') exit(0) archive = handle_download(bucket, args.schedule, args.date, config.dest_location) log.debug('Archive: %s' % archive) try: tar = tarfile.open(archive) except IOError: log.critical('The archive %s does not exist.' % archive) lst = tar.getmembers() lst_to_extract = do_browse_shell(lst) do_restore(args.root, tar, lst_to_extract)
def run(self): # create database structure log.debug("Start create database structure...") try: db.create_all() except exc.SQLAlchemyError as e: log.critical("MySQL database error: {0}\nFAQ: {1}".format(e, 'https://github.com/wufeifei/cobra/wiki/Error#mysql')) sys.exit(0) log.debug("Create Structure Success.") # insert base data from app.models import CobraAuth, CobraLanguages, CobraAdminUser, CobraVuls # table `auth` log.debug('Insert api key...') auth = CobraAuth('manual', common.md5('CobraAuthKey'), 1) db.session.add(auth) # table `languages` log.debug('Insert language...') languages = { "php": ".php|.php3|.php4|.php5", "jsp": ".jsp", "java": ".java", "html": ".html|.htm|.phps|.phtml", "js": ".js", "backup": ".zip|.bak|.tar|.tar.gz|.rar", "xml": ".xml", "image": ".jpg|.png|.bmp|.gif|.ico|.cur", "font": ".eot|.otf|.svg|.ttf|.woff", "css": ".css|.less|.scss|.styl", "exe": ".exe", "shell": ".sh", "log": ".log", "text": ".txt|.text", "flash": ".swf", "yml": ".yml", "cert": ".p12|.crt|.key|.pfx|.csr", "psd": ".psd", "iml": ".iml", "spf": ".spf", "markdown": ".md", "office": ".doc|.docx|.wps|.rtf|.csv|.xls|.ppt", "bat": ".bat", "PSD": ".psd", "Thumb": ".db", } for language, extensions in languages.iteritems(): a_language = CobraLanguages(language, extensions) db.session.add(a_language) # table `user` log.debug('Insert admin user...') username = '******' password = '******' role = 1 # 1: super admin, 2: admin, 3: rules admin a_user = CobraAdminUser(username, password, role) db.session.add(a_user) # table `vuls` log.debug('Insert vuls...') vuls = [ 'SQL Injection', 'LFI/RFI', 'Header Injection', 'XSS', 'CSRF', 'Logic Bug', 'Command Execute', 'Code Execute', 'Information Disclosure', 'Data Exposure', 'Xpath Injection', 'LDAP Injection', 'XML/XXE Injection', 'Unserialize', 'Variables Override', 'URL Redirect', 'Weak Function', 'Buffer Overflow', 'Deprecated Function', 'Stack Trace', 'Resource Executable', 'SSRF', 'Misconfiguration', 'Components' ] for vul in vuls: a_vul = CobraVuls(vul, 'Vul Description', 'Vul Repair') db.session.add(a_vul) # commit db.session.commit() log.debug('All Done.')
def analyse(self): if self.directory is None: log.critical("Please set directory") sys.exit() log.info('Start code static analyse...') d = directory.Directory(self.directory) files = d.collect_files(self.task_id) log.info('Scan Files: {0}, Total Time: {1}s'.format(files['file_nums'], files['collect_time'])) ext_language = { # Image '.jpg': 'image', '.png': 'image', '.bmp': 'image', '.gif': 'image', '.ico': 'image', '.cur': 'image', # Font '.eot': 'font', '.otf': 'font', '.svg': 'font', '.ttf': 'font', '.woff': 'font', # CSS '.css': 'css', '.less': 'css', '.scss': 'css', '.styl': 'css', # Media '.mp3': 'media', '.swf': 'media', # Execute '.exe': 'execute', '.sh': 'execute', '.dll': 'execute', '.so': 'execute', '.bat': 'execute', '.pl': 'execute', # Edit '.swp': 'tmp', # Cert '.crt': 'cert', # Text '.txt': 'text', '.csv': 'text', '.md': 'markdown', # Backup '.zip': 'backup', '.bak': 'backup', '.tar': 'backup', '.rar': 'backup', '.tar.gz': 'backup', '.db': 'backup', # Config '.xml': 'config', '.yml': 'config', '.spf': 'config', '.iml': 'config', '.manifest': 'config', # Source '.psd': 'source', '.as': 'source', # Log '.log': 'log', # Template '.template': 'template', '.tpl': 'template', } for ext in files: if ext in ext_language: log.info('{0} - {1}'.format(ext, files[ext])) continue else: log.info(ext) languages = CobraLanguages.query.all() rules = CobraRules.query.filter_by(status=1).all() extensions = None # `grep` (`ggrep` on Mac) grep = '/bin/grep' # `find` (`gfind` on Mac) find = '/bin/find' if 'darwin' == sys.platform: ggrep = '' gfind = '' for root, dir_names, file_names in os.walk('/usr/local/Cellar/grep'): for filename in file_names: if 'ggrep' == filename: ggrep = os.path.join(root, filename) for root, dir_names, file_names in os.walk('/usr/local/Cellar/findutils'): for filename in file_names: if 'gfind' == filename: gfind = os.path.join(root, filename) if ggrep == '': log.critical("brew install ggrep pleases!") sys.exit(0) else: grep = ggrep if gfind == '': log.critical("brew install findutils pleases!") sys.exit(0) else: find = gfind for rule in rules: log.info('Scan rule id: {0} {1} {2}'.format(self.project_id, rule.id, rule.description)) # Filters for language in languages: if language.id == rule.language: extensions = language.extensions.split('|') if extensions is None: log.critical("Rule Language Error") sys.exit(0) # White list white_list = [] ws = CobraWhiteList.query.filter_by(project_id=self.project_id, rule_id=rule.id, status=1).all() if ws is not None: for w in ws: white_list.append(w.path) try: if rule.regex.strip() == "": filters = [] for index, e in enumerate(extensions): if index > 1: filters.append('-o') filters.append('-name') filters.append('*' + e) # Find Special Ext Files param = [find, self.directory, "-type", "f"] + filters else: filters = [] for e in extensions: filters.append('--include=*' + e) # Explode SVN Dir filters.append('--exclude-dir=.svn') filters.append('--exclude-dir=.cvs') filters.append('--exclude-dir=.hg') filters.append('--exclude-dir=.git') filters.append('--exclude-dir=.bzr') filters.append('--exclude=*.svn-base') # -n Show Line number / -r Recursive / -P Perl regular expression param = [grep, "-n", "-r", "-P"] + filters + [rule.regex, self.directory] # log.info(' '.join(param)) p = subprocess.Popen(param, stdout=subprocess.PIPE) result = p.communicate() # Exists result if len(result[0]): lines = str(result[0]).split("\n") for line in lines: line = line.strip() if line == '': continue if rule.regex.strip() == '': # Find file_path = line.strip().replace(self.directory, '') log.debug('File: {0}'.format(file_path)) vul = CobraResults(self.task_id, rule.id, file_path, 0, '') db.session.add(vul) else: # Grep line_split = line.replace(self.directory, '').split(':', 1) file_path = line_split[0].strip() code_content = line_split[1].split(':', 1)[1].strip() line_number = line_split[1].split(':', 1)[0].strip() if file_path in white_list or ".min.js" in file_path: log.info("In white list or min.js") else: # Annotation # # // /* * match_result = re.match("(#)?(//)?(\*)?(/\*)?", code_content) if match_result.group(0) is not None and match_result.group(0) is not "": log.info("In Annotation") else: log.info('In Insert') exist_result = CobraResults.query.filter_by(task_id=self.task_id, rule_id=rule.id, file=file_path, line=line_number).first() if exist_result is not None: log.warning("Exists Result") else: log.debug('File: {0}:{1} {2}'.format(file_path, line_number, code_content)) vul = CobraResults(self.task_id, rule.id, file_path, line_number, code_content) db.session.add(vul) log.info('Insert Results Success') db.session.commit() else: log.info('Not Found') except Exception as e: log.critical('Error calling grep: ' + str(e)) # Set End Time For Task t = CobraTaskInfo.query.filter_by(id=self.task_id).first() t.status = 2 t.file_count = files['file_nums'] t.time_end = int(time.time()) t.time_consume = t.time_end - t.time_start t.updated_at = time.strftime('%Y-%m-%d %X', time.localtime()) try: db.session.add(t) db.session.commit() except Exception as e: log.critical("Set start time failed:" + e.message) log.info("Scan Done")
def analyse(self): if self.directory is None: log.critical("Please set directory") sys.exit() log.info('Start code static analyse...') d = directory.Directory(self.directory) files = d.collect_files(self.task_id) log.info('Scan Files: {0}, Total Time: {1}s'.format( files['file_nums'], files['collect_time'])) ext_language = { # Image '.jpg': 'image', '.png': 'image', '.bmp': 'image', '.gif': 'image', '.ico': 'image', '.cur': 'image', # Font '.eot': 'font', '.otf': 'font', '.svg': 'font', '.ttf': 'font', '.woff': 'font', # CSS '.css': 'css', '.less': 'css', '.scss': 'css', '.styl': 'css', # Media '.mp3': 'media', '.swf': 'media', # Execute '.exe': 'execute', '.sh': 'execute', '.dll': 'execute', '.so': 'execute', '.bat': 'execute', '.pl': 'execute', # Edit '.swp': 'tmp', # Cert '.crt': 'cert', # Text '.txt': 'text', '.csv': 'text', '.md': 'markdown', # Backup '.zip': 'backup', '.bak': 'backup', '.tar': 'backup', '.rar': 'backup', '.tar.gz': 'backup', '.db': 'backup', # Config '.xml': 'config', '.yml': 'config', '.spf': 'config', '.iml': 'config', '.manifest': 'config', # Source '.psd': 'source', '.as': 'source', # Log '.log': 'log', # Template '.template': 'template', '.tpl': 'template', } for ext in files: if ext in ext_language: log.info('{0} - {1}'.format(ext, files[ext])) continue else: log.info(ext) languages = CobraLanguages.query.all() rules = CobraRules.query.filter_by(status=1).all() extensions = None # `grep` (`ggrep` on Mac) grep = '/bin/grep' # `find` (`gfind` on Mac) find = '/bin/find' if 'darwin' == sys.platform: ggrep = '' gfind = '' for root, dir_names, file_names in os.walk( '/usr/local/Cellar/grep'): for filename in file_names: if 'ggrep' == filename or 'grep' == filename: ggrep = os.path.join(root, filename) for root, dir_names, file_names in os.walk( '/usr/local/Cellar/findutils'): for filename in file_names: if 'gfind' == filename: gfind = os.path.join(root, filename) if ggrep == '': log.critical("brew install ggrep pleases!") sys.exit(0) else: grep = ggrep if gfind == '': log.critical("brew install findutils pleases!") sys.exit(0) else: find = gfind for rule in rules: log.info('Scan rule id: {0} {1} {2}'.format( self.project_id, rule.id, rule.description)) # Filters for language in languages: if language.id == rule.language: extensions = language.extensions.split('|') if extensions is None: log.critical("Rule Language Error") sys.exit(0) # White list white_list = [] ws = CobraWhiteList.query.filter_by(project_id=self.project_id, rule_id=rule.id, status=1).all() if ws is not None: for w in ws: white_list.append(w.path) try: if rule.regex_location.strip() == "": filters = [] for index, e in enumerate(extensions): if index > 1: filters.append('-o') filters.append('-name') filters.append('*' + e) # Find Special Ext Files param = [find, self.directory, "-type", "f"] + filters else: filters = [] for e in extensions: filters.append('--include=*' + e) # Explode SVN Dir filters.append('--exclude-dir=.svn') filters.append('--exclude-dir=.cvs') filters.append('--exclude-dir=.hg') filters.append('--exclude-dir=.git') filters.append('--exclude-dir=.bzr') filters.append('--exclude=*.svn-base') # -n Show Line number / -r Recursive / -P Perl regular expression param = [grep, "-n", "-r", "-P"] + filters + [ rule.regex_location, self.directory ] # log.info(' '.join(param)) p = subprocess.Popen(param, stdout=subprocess.PIPE) result = p.communicate() # Exists result if len(result[0]): lines = str(result[0]).strip().split("\n") for line in lines: line = line.strip() if line == '': continue if rule.regex_location.strip() == '': # Find file_path = line.strip().replace( self.directory, '') log.debug('File: {0}'.format(file_path)) vul = CobraResults(self.task_id, rule.id, file_path, 0, '') db.session.add(vul) else: # Grep line_split = line.split(':', 1) file_path = line_split[0].strip() code_content = line_split[1].split(':', 1)[1].strip() line_number = line_split[1].split(':', 1)[0].strip() if file_path in white_list or ".min.js" in file_path: log.info("In white list or min.js") else: # annotation # # // /* * match_result = re.match( "(#)?(//)?(\*)?(/\*)?", code_content) if match_result.group( 0) is not None and match_result.group( 0) is not "": log.info("In Annotation") else: # parse file function structure if file_path[ -3:] == 'php' and rule.regex_repair.strip( ) != '': parse_instance = parse.Parse( rule.regex_location, file_path, line_number, code_content) if parse_instance.is_controllable_param( ): if parse_instance.is_repair( rule.regex_repair, rule.block_repair): log.info("Static: repaired") continue else: found_vul = True else: log.info( "Static: uncontrollable param") continue else: found_vul = True file_path = file_path.replace( self.directory, '') if found_vul: log.info('In Insert') exist_result = CobraResults.query.filter_by( task_id=self.task_id, rule_id=rule.id, file=file_path, line=line_number).first() if exist_result is not None: log.warning("Exists Result") else: log.debug( 'File: {0}:{1} {2}'.format( file_path, line_number, code_content)) vul = CobraResults( self.task_id, rule.id, file_path, line_number, code_content) db.session.add(vul) log.info('Insert Results Success') db.session.commit() else: log.info('Not Found') except Exception as e: print(traceback.print_exc()) log.critical('Error calling grep: ' + str(e)) # Set End Time For Task t = CobraTaskInfo.query.filter_by(id=self.task_id).first() t.status = 2 t.file_count = files['file_nums'] t.time_end = int(time.time()) t.time_consume = t.time_end - t.time_start t.updated_at = time.strftime('%Y-%m-%d %X', time.localtime()) try: db.session.add(t) db.session.commit() except Exception as e: log.critical("Set start time failed:" + e.message) log.info("Scan Done")