def push(url, api_key, file_path): #log('Push: {}, {}, {}'.format(url, api_key, file_path)) #log('Pushing `{}` to server...'.format(file_path)) try: mtime = os.stat(file_path).st_mtime except: log('Push abort, could not stat `{}`'.format(file_path)) return False buf = '' with open(file_path, 'r') as fh: buf = fh.read() sha1_checksum = sha1(buf.encode('UTF-8')).hexdigest() data = { 'api_key': api_key, 'file_path': file_path, 'mtime': int(mtime), 'content': buf, 'sha1_checksum': sha1_checksum, 'case_sensitive': lib.lib.is_case_sensitive(), } req = requests.post(url, data=json.dumps(data)) resp_body = lib.lib.get_response_body(req) payload = json.loads(resp_body) lib.lib.check_for_error(payload) #log('Payload: {}'.format(json.dumps(payload, indent=4))) print(lib.lib.check_local_file(payload))
def _start_streaming(self): log("Streaming process - clean up") kill_all([self.streaming_command]) log("Streaming process - starting") try: self._clean_temp_files() DEVNULL = open(os.devnull, 'wb') self.streaming_process_pid = Popen( self.streaming_command.split(' '), stdin=DEVNULL).pid self.streaming_process_name = get_process_name( self.streaming_process_pid) attach_sub_process(self.streaming_process_pid) log("Streaming process - started") # Wait till a new file is generated timeout = 20 # 20 seconds timeout for he process startup while not self._get_oldest_snapshot() and timeout > 0: time.sleep(0.05) timeout -= 0.05 if timeout <= 0: raise RuntimeError( "Timeout while waiting for streaming process to start") log("Streaming process - verified") except Exception as e: log('Failed to run RTSP client: %s' % repr(e))
def _verify_streaming(self, force_restart=False): """ Check if the background streaming process is running and if not - start it. :param force_restart: if True - kill the old streaming process and start new one. """ if self.streaming_process_pid: background_process_running = \ check_background_process(self.streaming_process_pid, self.streaming_process_name) if not force_restart and background_process_running: return if force_restart and background_process_running: log('Killing background process, force_restart=%s' % force_restart) kill(self.streaming_process_pid) self.streaming_process_pid = None self.streaming_process_name = None try: log('Starting background process') import os DEVNULL = open(os.devnull, 'wb') LOG = open('/tmp/ffm.log', 'wb') streaming_pid = Popen(self.streaming_command.split(' '), stdin=DEVNULL).pid register_sub_process(streaming_pid) self.streaming_process_pid = streaming_pid self.streaming_process_name = get_pid_process_name(streaming_pid) log('Started background process') # Wait till the streaming process starts working self.snapshot(10, 1, ignore_errors=True) log('Started background verified') except Exception as e: log('Failed to run RTSP client: %s' % repr(e))
def _validate_image(self, img): # Check if the image is defective. We get such images where a given line is repeated till # the end of the image. This triggers false alarm. So we compare the last two lines of the # array and if they are the same - this is defective image. height = img.shape[0] result = not (img[height - 2] == img[height - 3]).all() if not result: log("Detected image with defect") return result
def run(self): """Start the motion detection loop.""" while True: try: motion = self.check_next_frame() except Exception as e: log('Got exception: %s' % repr(e)) motion = None if (motion is not False) or \ ('debug' in config and config['debug']): log('Camera "%s", frame %s, motion: %s' % (config['identifier'], self.frame_count, motion))
def snapshot_img(self, timeout=config['motion']['snapshot_timeout']): """ Get snapshot from the camera. :param timeout: timeout for attempting to retrieve an image :return: cv2 image object representing the snapshot """ start = time.time() img_fd = self._get_snapshot(timeout=timeout) if not img_fd: log("Time: %s, FAIL" % round(time.time() - start, 2)) return None # log("Time: %s, OK, %s" % (round(time.time() - start, 2), len(img_fd.getvalue()))) img_array = numpy.fromstring(img_fd.getvalue(), dtype=numpy.uint8) result = cv2.imdecode(img_array, flags=cv2.IMREAD_UNCHANGED) return result
def run(self): """Start the motion detection loop.""" interval = int(config['motion']['interval']) while True: last_snapshot = datetime.now() try: motion = self.check_next_frame() except Exception as e: log('Got exception: %s' % repr(e)) motion = None if (motion is not False) or \ ('debug' in config and config['debug']): log('Camera "%s", frame %s, motion: %s' % (config['identifier'], self.frame_count, '\033[92mTrue\033[0m' if motion is True else motion)) if (datetime.now() - last_snapshot).total_seconds() < interval: sleep(interval - (datetime.now() - last_snapshot).total_seconds())
def _check_lock(lock_file): """ Check if the lock is active. If so - return the PID of the process that has locked the resource. If the lock is not valid - return None. :param resource: Name of the resource to lock. :param dir_name: Base directory to store lock files. :return: PID of the process holding the lock iff the lock is active, None otherwise. """ # Check if the process that locked the file is still running try: if os.path.exists(lock_file): with open(lock_file, 'r') as lock_handle: data = json.load(lock_handle) if check_background_process(data['pid'], data['name']): return data['pid'] except Exception as exc: log('Unexpected exception, unlocking: %s' % repr(exc)) return None
def check(): for cam in config['cameras']: cam_state = state[cam] is_running = is_locked(cam) should_be_running = cam_state['active'] if is_running != should_be_running: if should_be_running: log('arming %s' % cam) Popen(config[cam]['command'], shell=True) else: log('disarming %s' % cam) force_unlock(cam) sleep(5) global last_alarm_state if state['alarm'] != last_alarm_state: last_alarm_state = state['alarm'] if last_alarm_state: log('Activating alarm') else: log('Deactivating alarm')
parser.add_argument('-action', required=True, type=str, choices=['bypass', 'gofirewall']) parser.add_argument('-log_level', required=False, type=str, choices=['info', 'debug'], default='info') parser.add_argument('-demo', action='store_true') args = parser.parse_args() opt_log_level = args.log_level action = args.action area = args.area demo = args.demo logger = log(opt_log_level, 'FirewallBypass') logger.info(args) the_sunny_boy = generate_switch_route(device_info[area], action) dc = re.search(r'^D[CR]', area, re.I).group() dc_area = re.split(r'_', area) the_cool_boy = generate_switch_route( device_info['{}_COR_{}'.format(dc, dc_area[1])], action) if not demo: with concurrent.futures.ThreadPoolExecutor() as t_pool: futures = {} # for key, value in the_sunny_boy.items(): # after_value = {key: value for key, value in value.items() if key in net_miko_key} # command_set = value['commands'] # future = t_pool.submit(switch_route, after_value, command_set) # futures[future] = key # for key, value in the_cool_boy.items():
from etc.config import * from lib.lib import GenURL from lib.lib import log if __name__ == "__main__": parser = argparse.ArgumentParser() parser.add_argument('-wideip', required=True, type=str, help="输入域名,多个之间使用英文逗号分隔,例如:ibs.bjrcb.com,asgw.bjrcb.com") parser.add_argument('-new_ip', required=True, type=str, help="输入要被替换的新 IP 地址") parser.add_argument('-log_level', required=False, type=str, choices=['info', 'debug'], default='info') args = parser.parse_args() opt_wideip = args.wideip opt_new_ip = args.new_ip opt_log_level = args.log_level logger = log(opt_log_level, 'GTM_Repl_Pool') logger.info("接收到的参数: {}".format(args)) domain_names = opt_wideip.split(",") logger.info("解析到的域名: {}".format(domain_names)) req_sess = requests.session() class_url = GenURL() class_url.set_record_type('a') GTMs = Pri_GTM for domain_name in domain_names: for GTM in GTMs: class_url.set_host(GTM) URL = class_url.get_all_dn_url('/mgmt/tm/gtm/wideip/') r = req_sess.get(URL, auth=auth, headers=Headers, verify=False, timeout=(3, 5)) dn_dict = r.json() if "items" in dn_dict:
def _restart_streaming(self): log("Streaming process - killing") kill(self.streaming_process_pid) self.streaming_process_pid = None self._start_streaming()
choices=['cw', 'cmcc', 'ct', 'cu', 'dc', 'dr', 'hk-cmcc', 'hk-cw']) parser.add_argument('-log_level', required=False, type=str, choices=['info', 'debug'], default='debug') parser.add_argument('-demo', action='store_true', help='是否进入演示模式,默认为否') args = parser.parse_args() opt_src_ip = args.src_ip opt_dst_dn = args.dst_dn opt_record_type = args.record_type opt_dst_location = args.dst_location opt_log_level = args.log_level opt_demo = args.demo logger = log(opt_log_level, 'GTM_Topo') logger.info("接收到的参数: {}".format(args)) src_ip_list = opt_src_ip.split(",") dst_dn_list = opt_dst_dn.split(",") logger.info('{}'.format(src_ip_list)) logger.info('{}'.format(dst_dn_list)) req_sess = requests.session() record_types = [] if args.record_type == 'v4': record_types = ['a'] elif args.record_type == 'v6': record_types = ['aaaa'] elif args.record_type == 'all': record_types = ['a', 'aaaa'] step = 10
def sync(args, config): path = '/clients/poll/' url = lib.lib.get_config_url(config, path) api_key = lib.lib.get_config_api_key(config) req = requests.get(url, params={'api_key': api_key}) payload = lib.lib.get_response_body(req) payload = json.loads(payload) lib.lib.check_for_error(payload) if payload.get('is_disabled', False) or payload.get('is_blacklisted', False): log('Exiting: this host has been disabled.') sys.exit(1) conf_count = payload.get('configuration_count', 0) if conf_count == 0: log('Exiting: there are no configurations to update.') sys.exit(0) configs = payload.get('configurations', []) if len(configs) != conf_count: log('Exiting: configuration count does not match the number of returned configurations.') sys.exit(1) for configuration in configs: file_path = configuration.get('file_path') remote_file = lib.lib.RemoteConfigFile(file_path, config) remote_file.resolve() local_file = lib.lib.LocalConfigFile(file_path, config) local_file.resolve() print('Remote config: {}'.format(remote_file.config)) print('Local config: {}'.format(local_file.config)) log(lib.lib.check_local_file(configuration)) if remote_file.config.get('is_disabled') == True: #if configuration.get('is_disabled', True): log(' D `- {} is disabled, skipping...'.format(configuration.get('file_path'))) continue if local_file.is_file_not_found == True: log(' E `- {} could not be found, fetching...'.format(file_path)) if local_file < remote_file: log(' F `- {} is older than the server, fetching...'.format(file_path)) fetch_url = lib.lib.get_config_url(config, '/clients/fetch/') fetch(fetch_url, api_key, file_path, configuration.get('mtime', 0)) elif local_file == remote_file: log(' I `- {} is same age as the server, ignoring...'.format(file_path)) elif local_file > remote_file: log(' P `- {} is newer than the server, pushing...'.format(file_path)) push_url = fetch_url = lib.lib.get_config_url(config, '/clients/push/') push(push_url, api_key, file_path)
default='debug') parser.add_argument('-skip_clear', action='store_true', help="是否清除连接表,默认为是") parser.add_argument('-demo', action='store_true', help='是否进入演示模式,默认为否') args = parser.parse_args() opt_wideip = args.wideip opt_record_type = args.record_type opt_action = args.action opt_gtm_id = args.gtm_id opt_skip_sync = args.skip_sync opt_log_level = args.log_level opt_skip_clear = args.skip_clear opt_demo = args.demo logger = log(opt_log_level, 'GTM_Oper_VS') domain_names = opt_wideip.split(",") action = Disabled_JSON if opt_action == "deactive" else Enabled_JSON logger.info("接收到的参数: {}".format(args)) logger.info("解析到的域名: {}".format(domain_names)) logger.info(("跳过同步: {} ; 跳过清除连接表: {} ".format(opt_skip_sync, opt_skip_clear))) logger.info(action) record_types = [] if args.record_type == 'v4': record_types = ['a'] elif args.record_type == 'v6': record_types = ['aaaa'] elif args.record_type == 'all':
# check_req_sess_res(r.status_code) version_info = r.json() if isinstance(version_info, dict): for i in version_info['items']: if 'active' in i: if i['active']: f5_version = i['version'] else: logger.error('api 接口错误') info_dict[host]['F5_Version'] = f5_version if __name__ == "__main__": start_time = time.time() lock = threading.Lock() logger = log('debug', 'GTM_Conn_Test') class_url = GenURL() req_sess = requests.session() info_dict = Vividict() all_mgtip(associate_f5_with_mgtip) logger.debug('mgt_dict: {}'.format(info_dict)) threads = [] for mgtip in info_dict.keys(): threads.append( threading.Thread(target=f5_get_version, args=(mgtip, ), name=f"f5_get_version_{mgtip}")) for thread in threads: thread.start() for thread in threads: thread.join()
r = client_shell.recv(9999).decode('gbk') logger.info(r) finally: client.close() return r if __name__ == "__main__": start_time = time.time() args = parser() opt_log_level = args.log_level opt_zh, opt_wd = args.zh, args.wd opt_demo = args.demo opt_link = args.link opt_all_og = args.all_og logger = log(opt_log_level, 'BatchDevice') logger.info(args) if opt_zh is not None and opt_wd is None: df_mode = 'zh_mode' list_organ = list(opt_zh.split(',')) elif opt_zh is None and opt_wd is not None: df_mode = 'wd_mode' list_organ = list(opt_wd.split(',')) elif opt_all_og: df_mode = 'all' pd_df = pd_open_file(fpath) logger.info("打开文件耗时: {}".format(time.time() - start_time)) prd_ips, oa_ips, prd_names, oa_names = generate_ips(