def backup_data(self, repos): """TAR remote repos.""" log.print_log("Backing up remote repos on host: %s" % self.config.get('backup', 'ssh_hostname')) self._copy_script_files_to_remote() self.run_backup_cmd("--backup")
def log_message(msg, error_flag=False): global logger if logger is not None and error_flag is True: log.print_log(logger, msg, error_flag=True) elif logger is not None and error_flag is False: log.print_log(logger, msg)
def restore_data(self, repos): """Restore Gerrit.""" log.print_log("Restoring remote repos on host: %s" % self.config.get('backup', 'ssh_hostname')) self._copy_script_files_to_remote() self.run_backup_cmd("--restore")
def upload_file(self, s3_key, file_path): """Upload a files to S3.""" if not os.path.exists(file_path): raise RuntimeError("ERROR: File not found: %s" % file_path) log.print_log("Uploading file: %s" % file_path) conn = self.get_connection() s3_bucket = conn.get_bucket(self.bucket) file_size = os.stat(file_path).st_size file_human_size = utils.human_size(file_size) log.print_log("Uploading to S3 key: %s (%s)" % (s3_key, file_human_size)) key = s3_bucket.new_key(s3_key) if self.content_type: key.set_metadata('Content-Type', self.content_type) if self.encrypt_files is True: key.set_metadata('x-amz-meta-s3tools-gpgenc', 'gpg') # FYI: For s3cmd bytes_written = key.set_contents_from_filename(file_path, encrypt_key=True) if bytes_written != file_size: msg = "ERROR: Mismatch in bytes synced to S3 bucket and local file: " \ "{0} != {1}".format(bytes_written, file_size) raise RuntimeError(msg)
def get_versions(self, repos): """Get Versions Remote.""" log.print_log("Getting Versions of remote repos on host: %s" % self.config.get('backup', 'ssh_hostname')) self._copy_script_files_to_remote() self.run_backup_cmd("--get-versions")
def main(): """Main function.""" log.print_log("Backing up / Restoring Gerrit Repos") start_time = datetime.now().replace(microsecond=0) args = _parse_args() if _test_args(args) != 0: return config_file_path = args.config_file[0] if not os.path.isabs(args.config_file[0]): config_file_path = os.path.join(os.getcwd(), args.config_file[0]) if not os.path.exists(config_file_path): raise RuntimeError("ERROR: File not found: %s" % config_file_path) config = utils.load_config(config_file_path) _set_config(args, config, config_file_path) utils.set_common_args(config, args) process(config, args) end_time = datetime.now().replace(microsecond=0) log.print_log("Processing Time: %s" % str(end_time - start_time))
def wait_until_jobs_finished(self, jobName): """Wait until the job is finished.""" data = self.get_job_info(jobName) curJobNum = data['number'] timeStamp = datetime.fromtimestamp(data['timestamp'] / 1e3).replace(microsecond=0) log.print_log("Job Started At: {0:s}".format( timeStamp.strftime('%Y-%m-%d %H:%M:%S'))) while data['building'] and data['number'] == curJobNum: duration = datetime.now().replace(microsecond=0) - timeStamp estimatedPercentage = ((duration.total_seconds() * 1000) / data['estimatedDuration']) * 100 log.print_log("Building: {0:d} {1:s} {2:.2f}%".format( data['number'], str(duration), estimatedPercentage)) time.sleep(5) data = self.get_job_info(jobName) print(data['result']) if data['result'] == 'FAILURE': log.failed("Jenkins job '%s' %s" % (data['result'], jobName)) log.info("Fix Jenkins job before trying again.") return 1 return 0
def diskusage(self, repos): """Disk Usage Remote.""" log.print_log("Disk Usage of remote repos on host: %s" % self.config.get('backup', 'ssh_hostname')) self._copy_script_files_to_remote() self.run_backup_cmd("--diskusage")
def check_md5(md5,filename): checked = get_md5(filename) log.print_log(logger,'md5: {0}, file: {1}'.format(md5,checked)) res = (md5 == checked) if not res: raise ValueError('md5 doesnt match') return res
def send_pkts(self): while True: if self.window.completed_transmission(): print_log("Finished sending all the packets.") break # if window is full then wait till we get some space to send packets if self.window.get_ws() == self.window.get_max_ws(): continue if self.pkt_bucket.get_size() < self.window.get_next_pkt(): continue curr_pkt = self.pkt_bucket.next_pkt(self.window.get_next_pkt()) if curr_pkt is None: continue seq_num = curr_pkt.get_seq_num() #Sending Sn; Timer started print_log("Sending " + str(seq_num) + "; Timer started") final_pkt = self.format_pkt(seq_num, curr_pkt.get_payload(), self.window.get_next_pkt()) self.sock.sendto(final_pkt, ("127.0.0.1", self.port)) self.window.reduceWindow(seq_num)
def router(): table, peers, listen_socket, port, send_sock = init() #Calls init t = time.time() #Gets current time log.print_log(port, table) #Prints initial routing table (this will be just neighbors) while(1): listen_socket.setblocking(0) ready = select.select([listen_socket],[],[], .005) #Sees if there is any data to be recieved, if so receive #If not, continue in the loop if ready[0]: data = listen_socket.recv(1024) #Receives data try: data = cPickle.loads(data) #After data is received, "unpickles" it #i.e turns it back into a list from a byte stream updated, peers, table = receive.update(data, table, peers, port) #calls the receive module's update function once the table is received except cPickle.UnpicklingError: continue #error handling if updated == True: log.print_log(port, table) #If the table was updated, print cur_t = time.time() #Gets current time if (cur_t - t) > 5: #If more than 5 seconds has gone by... t = cur_t #Sets the t to cur_t (so that the loop can be executed again) send.send(peers, table, send_sock, port)
def list_bucket_keys(self): """List S3 buckets.""" conn = self.get_connection() s3_bucket = conn.get_bucket(self.bucket) log.print_log("Listing S3 Bucket Files: %s" % s3_bucket.name) for key in s3_bucket.list(delimiter="/"): log.print_log(key.name)
def output_repo_list_to_file(self, repos, repo_list_file): """Write Repo List to file.""" log.print_log("Repo Count: %d" % len(repos)) repos = sorted(repos) with open(repo_list_file, 'w') as output_file: output_file.write('\n'.join(repos)) return repo_list_file
def make_wav_file(): log.print_log("make_wav_file start.") try: m4a_file = AudioSegment.from_file("audio.m4a", format="m4a") m4a_file.export("audio.wav", format="wav") os.listdir() log.print_log("make_wav_file Success.") except: pass
def connect(self): # handle exception try: self.sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) except Exception as e: print_log("Falied to create the UDP socket") return False return True
def save_m4a_file(content): log.print_log("save_m4a_file start.") try: current_dir = str(os.getcwd()) + "/audio.m4a" with open(current_dir, "wb") as fd: for chunk in content.iter_content(): fd.write(chunk) os.listdir() except: pass log.print_log("save_m4a_file Success.")
def send(self): request_handler = RequestHandler(self.sock, self.port, self.window, self.pkt_bucket) response_handler = ResponseHandler(self.sock, self.nPkts, self.timeout, self.window) print_log("Generating packets. \nNote: This step may take little more time based on the segment size & no. of pkts which needs to be generated.") self.pkt_bucket.create_pkts() print_log("\nFinished generating packets\n") request_handler.start() response_handler.start()
def line_not_to_db(self, line, upload_time, client_time, error): #print type(line) #print line.encode("gbk") #log.error(u"line don't enter db upload_time:%s client_time:%s ;%s",upload_time,client_time,line.encode("gbk")) try: log.print_log(line.encode("gb2312", 'ignore')) log.error(error) except: #print line #log.error("miss") return
def to_googleSpeechApi(): log.print_log("to_googleSpeechApi start.") try: r = speech_recognition.Recognizer() with speech_recognition.AudioFile("audio.wav") as src: audio = r.record(src) text = r.recognize_google(audio, key=api_key, language="ja-JP") log.print_log("to_googleSpeechApi Success.") return text except: pass
def format_pkt(self, seq_num, payload, pkt_num): header = int('0101010101010101', 2) max_seq_num = self.window.get_max_seq_num() checksum = computeChecksum(payload) # Inject corruption if pkt_num is not 0 and inject_error(BIT_ERROR_PROBABILITY): print_log("Injecting bit error for segment " + str(seq_num)) checksum = 0 return pack('IHHH' + str(len(payload)) + 's', seq_num, checksum, max_seq_num, header, payload)
def get_versions(self, repo): """Get version.""" # TODO: Implement in all backup classes for backup_class in self.backup_classes: backup_path = backup_class.get_backup_repo_path(repo) versions = backup_class.get_all_versions(backup_path) for version in versions: log.print_log("- %s" % version) return 0
def get_repos(self): """Get Gerrit Repos via API.""" log.print_log("Getting Gerrit Repos via API") api_url = self.url + '/projects/?t' if self.verbose: extra_options = '' if not self.verify_ssl: extra_options += 'k' log.verbose("curl -L%s %s" % (extra_options, api_url)) try: context = ssl.create_default_context() if not self.verify_ssl: context = ssl._create_unverified_context() response = urllib2.urlopen(api_url, context=context) except urllib2.HTTPError as err: raise RuntimeError("ERROR: (Gerrit API) HTTPError = %s (%s)" % (str(err.code), err.reason)) except urllib2.URLError as err: raise RuntimeError("ERROR: (Gerrit API) URLError = %s (%s)" % (str(err.reason), err.reason)) except httplib.HTTPException as err: raise RuntimeError("ERROR: (Gerrit API) HTTPException = %s" % str(err.reason)) except Exception: import traceback raise RuntimeError('ERROR: (Gerrit API) ' + traceback.format_exc()) if response.getcode() != 200: raise RuntimeError( "ERROR: (Gerrit API) Did not get 200 response from: %s" % api_url) magic_prefix = ")]}'" response_body = response.read() if response_body.startswith(magic_prefix): response_body = response_body[len(magic_prefix):] data = json.loads(response_body) repos = [] for repo in data: repos.append(repo) return repos
def backup_repo_list(self): """Backing up Repo List File.""" log.print_log("Backing up Repo List File") repo_list_filename = self.config.get('script', 'repo_list_filename') script_path = self._get_script_path() repo_list_file = os.path.join(script_path, repo_list_filename) if not self.dry_run: for backup_class in self.backup_classes: backup_path = backup_class.get_backup_repo_list_path() backup_class.upload_file(backup_path, repo_list_file)
def set_weather_info(weather_json, today, tomorrow): log.print_log("set_weather_info start.") weather_MAP = { "晴": chr(0x1000A9), "曇": chr(0x1000AC), "雨": chr(0x1000AA), "雪": chr(0x1000AB), } try: weather_today = weather_json["forecasts"][today]["telop"] weather_tomorrow = weather_json["forecasts"][tomorrow]["telop"] location = weather_json["location"]["prefecture"] # 東京都 time = weather_json["description"]["publicTime"] date = datetime.strptime(time.replace("+0900", ""), "%Y-%m-%dT%H:%M:%S") public_time = date.strftime("%Y/%m/%d %H:%M") # 2020/03/10 22:32 max_temperature = None min_temperature = None if weather_json["forecasts"][today]["temperature"]["max"] is not None: max_temperature = weather_json["forecasts"][today]["temperature"][ "max"]["celsius"] if weather_json["forecasts"][today]["temperature"]["min"] is not None: min_temperature = weather_json["forecasts"][today]["temperature"][ "min"]["celsius"] text = weather_json["description"]["text"] except TypeError: # temperature data is None etc... pass msg = "【{0}】\n{1} 現在の天気予報です。\n".format(location, public_time) msg = msg + "【今日】\n" + weather_today # if len(weather_today) > 2: # msg = msg + weather_MAP[weather_today[0]] + "\n" # else: msg = (msg + weather_MAP[weather_today[0]] + weather_MAP[weather_today[-1]] + "\n" if len(weather_today) > 2 else msg + weather_MAP[weather_today[0]] + "\n") msg = msg + "最高 " + max_temperature + "\n" if max_temperature is not None else msg msg = msg + "最低 " + min_temperature + "\n" if min_temperature is not None else msg msg = msg + "【明日】\n" + weather_tomorrow msg = (msg + weather_MAP[weather_tomorrow[0]] + weather_MAP[weather_tomorrow[-1]] + "\n" if len(weather_tomorrow) > 2 else msg + weather_MAP[weather_today[0]] + "\n") # msg = msg + "【概況】\n" + text.replace('\n', '') #改行入れるか msg = msg + "【概況】\n" + text log.print_log("set_weather_info Success.") return msg
def trigger_retransmission(self): with LOCK: self.retransmit = True self.next_seq_num = self.expected_ack # build the pkt nums string which we are going to retransmit pkt_nums = "" for k,v in self.transmissionWindow.items(): pkt_nums += str(k) pkt_nums += "," print_log("Timer expired; Will be resending the pkts with seq num " + pkt_nums) self.next_pkt = self.next_pkt - len(self.transmissionWindow) self.transmissionWindow.clear()
def run_shell_cmd(cmd, path='.', verbose=False): """Run Shell Command.""" if verbose: log.verbose(cmd) process = sp.Popen(cmd, shell=True, cwd=path) stdout, stderr = process.communicate() exit_code = process.wait() for output in [stdout, stderr]: if output is not None: for line in output: log.print_log(line.strip('\n')) return exit_code
def copy_files_to_remote(self, files, remote_path): """Copy Files to Remote.""" log.print_log("Copy files to remote: %s" % self.hostname) self.run_command("mkdir -p %s" % remote_path) for local_file_path in files: remote_file_path = os.path.join(remote_path, os.path.basename(local_file_path)) if not os.path.exists(local_file_path): raise RuntimeError("ERROR: File not found: %s" % local_file_path) self.scp_copy_file(local_file_path, remote_file_path)
def download_file(self, s3_key, to_file, file_ext=''): """Download a files from S3.""" file_ext = file_ext.lower() s3_key = s3_key + file_ext key = self.get_key(s3_key) file_extension = os.path.splitext(to_file)[1] if file_extension.lower() != file_ext and \ self.encrypt_files is True: to_file += '.gpg' log.print_log(" Downloading To: %s" % to_file) key.get_contents_to_filename(to_file) if not os.path.exists(to_file): raise RuntimeError("ERROR: File not download: %s" % to_file)
def has_error(self): if "0" == self.errorcode: log.print_log(self.logger, "*** API SUCCESS ***") return False else: log.print_log(self.logger, "*** !! API ERROR !! ***") log.print_log(self.logger, self.errorcode) log.print_log(self.logger, self.errormessage) return True
def findEpisodeFileAtPath(self, path, fileName): log.print_log( [ "try to find downloaded file", fileName, path ] ) log.print_break( "-" ) dirList = os.listdir( path ) for fname in dirList: log.print_log( [ "file in path", fname ] ) splitR = os.path.splitext( fname ) root = splitR[ 0 ] ext = splitR[ 1 ] log.print_log( [ 'file root', root ] ) log.print_log( [ 'file extension', ext ] ) if ext == ".mkv" or ext == ".avi" or ext == ".mpg" or ext == ".mpeg": if root == fileName: log.print_log( [ "file found", fileName ] ) log.print_break( "-" ) return os.path.join( path, fname ) log.print_break( "-" ) return None
def resend_pkts(self, start_seq_num): pkt_num = start_seq_num while pkt_num < self.window.get_max_seq_num(): pkt = self.pkt_bucket.next_pkt(self.window.get_next_pkt()) if pkt is None: break seq_num = pkt.get_seq_num() print_log("Timer expired; Resending " + str(seq_num) + "; Timer started") final_pkt = self.format_pkt(seq_num, pkt.get_payload(), self.window.get_next_pkt()) self.sock.sendto(final_pkt, ("127.0.0.1", self.port)) pkt_num = pkt_num + 1 self.window.reduceWindow(seq_num) self.window.reset_retransmission()
def getGoogleNews(word): log.print_log("getGoogleNews start.") headers = {"User-Agent": "hoge"} url = f"https://news.google.com/search?q={word}&hl=ja&gl=JP&ceid=JP%3Aja" res = requests.get(url, timeout=1, headers=headers) soup = BeautifulSoup(res.text, "html.parser") lists = soup.find_all("h3") topics = [] for i in range(5): title = lists[i].find("a").text # 記事タイトルを取得 link = "https://news.google.com" + lists[i].find("a").get( "href") # 記事リンクを取得 topics.append(title) topics.append(link) result = "\n".join(topics) # 改行で区切る log.print_log("getGoogleNews OK.") return result
def recv_pkts(self): while not self.shutdown_flag.is_set(): if self.window.get_num_received_acks() == self.nPkts: self.window.markTransmissionFinished() print_log("Finished receiving all the acks") return # Add timer to make sure sender get back Ack with in a time T. data = select.select([self.sock], [], [], self.timeout) if not data[0]: continue pkt, addr = self.sock.recvfrom(8) response = unpack('IHH', pkt) ack_num = int(response[0]) # Received ACK: ACK_NUM print_log("Received ACK: " + str(ack_num)) if (self.window.ignore_ack(ack_num) == False): continue # inject ack loss if inject_error(ACK_LOSS_PROBABILITY): print_log("Injecting ack loss for ack " + str(ack_num)) continue # increment the window size after receiving ack. self.window.recv_ack(ack_num)
def copy_to_remote(self, local_path, remote_path, exclude_files=[], exclude_folders=[]): """Copy Folders to Remote.""" log.print_log("Copy path to remote: %s" % self.hostname) created_remote_directories = [] for root, dirnames, filenames in os.walk(local_path, topdown=True): dirnames[:] = [d for d in dirnames if d not in exclude_folders] for filename in filenames: skip = False for exclude_file in exclude_files: if fnmatch.fnmatch(filename, exclude_file): skip = True if skip: continue remote_file_path = os.path.join(root, filename) remote_file_path = str.replace(remote_file_path, local_path, '') remote_file_path = remote_path.rstrip( '/') + '/' + remote_file_path.lstrip('/') remote_file_dir = os.path.dirname(remote_file_path) if remote_file_dir not in created_remote_directories: if self.verbose: log.verbose("Creating remote path: %s" % remote_file_dir) self.run_command("mkdir -p %s" % remote_file_dir) created_remote_directories.append(remote_file_dir) local_filename = os.path.join(root, filename) self.scp_copy_file(local_filename, remote_file_path)
def __init__(self,argv): argvlen = len(argv) _argvlen = 3 try: if argvlen < _argvlen: message = 'usage: python %s start(yyyymmdd) end(yyyymmdd) type(optional)' % argv[0] raise ValueError(message) elif argvlen == _argvlen: self.start = argv[1] self.end = argv[2] self.types = None elif argvlen > _argvlen: self.start = argv[1] self.end = argv[2] self.types = argv[3] is_defined_p_api_data_type(self.types) except Exception as e: log.except_log(self.logger,e) sys.exit(1) message = 'p_api_params: start:{0},end:{1},types:{2}'.format(self.start,self.end,self.types) log.print_log(self.logger,message)
def get_metrics(obj, ip, port, username, passwd, instance_name): base.clear_log_messages() log_messages = base.get_log_messages() msg = "function: get_metrics |input:- ip:" msg += "%s,port:%s,user:%s,password:%s" % (ip, port, username, passwd) log.print_log(obj, msg) # get data from server if username == 'n/s': info = base.get_node_info(ip, port) node_latency = base.get_node_latency(ip, port) namespaces = base.get_namespaces(ip, port) else: info = base.get_node_info(ip, port, user=username, password=passwd) node_latency = base.get_node_latency(ip, port, user=username, password=passwd) namespaces = base.get_namespaces(ip, port, user=username, password=passwd) # stats kept out of if for alerts to work stats = base.get_node_statistics(info) # publish node metrics if info != {}: base.extract_tps_parameter_from_statistics(stats) read_tps = base.get_read_tps() write_tps = base.get_write_tps() free_disk_stats = str(base.get_free_disk_stats(stats)) free_memory_stats = str(base.get_free_memory_stats(stats)) total_disk_stats = str(base.get_total_disk_stats(stats)) total_memory_stats = str(base.get_total_memory_stats(stats)) for key, value in stats.iteritems(): obj.gauge('aerospike.node.' + str(key), str(value), tags=['node:' + str(ip) + ':' + str(port), 'name:' + str(instance_name)]) no_of_nodes = base.get_no_of_nodes(stats) if node_latency != {}: for key, value in node_latency.iteritems(): data_list = node_latency[key]['data'] for item in data_list: for k, v in item.iteritems(): metric_name = 'aerospike.node.' + str(key) tags_pct = ['latency_type:' + str(k), 'value_type:percentage', 'node:' + str(ip) + ':' + str(port), 'name:' + str(instance_name)] tags_value = ['latency_type:' + str(k), 'value_type:value', 'node:' + str(ip) + ':' + str(port), 'name:' + str(instance_name)] obj.gauge(metric_name, v['value'], tags_value) obj.gauge(metric_name, v['pct'], tags_pct) if free_disk_stats != 'n/s': obj.gauge('aerospike.disk_usage_free', free_disk_stats, tags=['node:' + str(ip) + ':' + str(port), 'name:' + str(instance_name)]) if total_disk_stats != 'n/s': obj.gauge('aerospike.disk_usage_total', total_disk_stats, tags=['node:' + str(ip) + ':' + str(port), 'name:' + str(instance_name)]) if free_memory_stats != 'n/s': obj.gauge('aerospike.memory_usage_free', free_memory_stats, tags=['node:' + str(ip) + ':' + str(port), 'name:' + str(instance_name)]) if total_memory_stats != 'n/s': obj.gauge('aerospike.memory_usage_total', total_memory_stats, tags=['node:' + str(ip) + ':' + str(port), 'name:' + str(instance_name)]) if no_of_nodes is not None: obj.gauge('aerospike.cluster_size', no_of_nodes, tags=['node:' + str(ip) + ':' + str(port), 'name:' + str(instance_name)]) else: log.print_log(obj, 'Number of Nodes are None!', error_flag=True) if read_tps['y'] is not None: obj.gauge('aerospike.successful_read_tps', read_tps['y'], tags=['node:' + str(ip) + ':' + str(port), 'name:' + str(instance_name)]) if read_tps['secondary'] is not None: obj.gauge('aerospike.total_read_tps', read_tps['secondary'], tags=['node:' + str(ip) + ':' + str(port), 'name:' + str(instance_name)]) if write_tps['y'] is not None: obj.gauge('aerospike.successful_write_tps', write_tps['y'], tags=['node:' + str(ip) + ':' + str(port), 'name:' + str(instance_name)]) if write_tps['secondary'] is not None: obj.gauge('aerospike.total_write_tps', write_tps['secondary'], tags=['node:' + str(ip) + ':' + str(port), 'name:' + str(instance_name)]) # publish namespace metrics and alerts if namespaces not in ERROR_CODES: base.init_namespace_list(namespaces) for ns in namespaces: if username == 'n/s': ns_stats = base.get_namespace_statistics(ip, port, ns) else: ns_stats = base.get_namespace_statistics( ip, port, ns, user=username, password=passwd) if ns_stats not in ERROR_CODES: for key, value in ns_stats.iteritems(): metric_name = 'aerospike.namespace.' + \ str(ns) + '.' + str(key) obj.gauge( metric_name, str(value), tags=[ 'node:' + str(ip) + ':' + str(port), 'namespace:' + str(ns), 'name:' + str(instance_name)]) ns_alerts = base.get_namespace_alerts( ns, ns_stats, str(ip) + ':' + str(port)) for alert in ns_alerts: if alert is not None: obj.event({ 'timestamp': int(time.time()), 'event_type': 'Namespace_Alert', 'msg_title': str(alert['msg_title']), 'msg_text': str(alert['msg_text']), 'alert_type': str(alert['alert_type']) }) # publish node alerts node_alerts = base.get_node_alerts(stats, str(ip) + ':' + str(port)) for alert in node_alerts: if alert is not None: obj.event({ 'timestamp': int(time.time()), 'event_type': 'Node_Alert', 'msg_title': str(alert['msg_title']), 'msg_text': str(alert['msg_text']), 'alert_type': str(alert['alert_type']) }) log.print_log(obj, 'function: get_metrics |output:- No return Values') log_messages = base.get_log_messages() for message in log_messages: log.print_log(obj, message['message'], message['error_flag']) return namespaces