def mongod_start(mongod, replset_name='', auth=None): [host, port, path] = mongod if _alive(mongod, auth): logging.info(' %s already alive: we do nothing!' % mongod) return cmd = 'cd %s && numactl --interleave=all ./bin/mongod -f ./conf/mongod.conf --port %d --fork ' % ( path, port) if replset_name: tmp = '--replSet %s ' % replset_name cmd += tmp if auth: common.system( 'echo "%s" > ./mongodb-base/conf/mongokey && chmod 700 ./mongodb-base/conf/mongokey' % auth['key'], logging.debug) tmp = '--keyFile=%s/conf/mongokey ' % path cmd += tmp _copy_files(mongod) r = _remote_run(conf.USER, host, cmd) logging.debug(r) if r.find('forked process') == -1: raise Exception("%s mongod start Fail" % mongod) if not _alive(mongod, auth): logging.warning("%s start Fail" % mongod) logging.info("%s start Success" % mongod)
def cmd2(cmd): for line in cmd.split('\n'): line = line.strip() if not line: continue print line print common.system(line)
def mongod_start(mongod, replset_name='', auth=None): [host, port, path] = mongod if _alive(mongod, auth): logging.info(' %s already alive: we do nothing!' % mongod) return cmd = 'cd %s && numactl --interleave=all ./bin/mongod -f ./conf/mongod.conf --port %d --fork ' % (path, port) if replset_name: tmp = '--replSet %s ' % replset_name cmd += tmp if auth: common.system('echo "%s" > ./mongodb-base/conf/mongokey && chmod 700 ./mongodb-base/conf/mongokey' % auth['key'], logging.debug) tmp = '--keyFile=%s/conf/mongokey ' % path cmd += tmp _copy_files(mongod) r = _remote_run(conf.USER, host, cmd) logging.debug(r) if r.find('forked process') == -1: raise Exception("%s mongod start Fail" % mongod) if not _alive(mongod, auth): logging.warning("%s start Fail" % mongod) logging.info("%s start Success" % mongod)
def _copy_files(mongod): [host, port, path] = mongod cmd = 'mkdir -p %s ' % path _remote_run(conf.USER, host, cmd) cmd = 'rsync -avP ./mongodb-base/ %s@%s:%s 1>/dev/null 2>/dev/null' % (conf.USER, host, path) common.system(cmd, logging.debug)
def _copy_files(mongod): [host, port, path] = mongod cmd = 'mkdir -p %s ' % path _remote_run(conf.USER, host, cmd) cmd = 'rsync -avP ./mongodb-base/ %s@%s:%s 1>/dev/null 2>/dev/null' % ( conf.USER, host, path) common.system(cmd, logging.debug)
def _copy_files(self): cmd = 'echo "%(key)s" > ./mongodb-base/conf/mongokey && chmod 700 ./mongodb-base/conf/mongokey' % self.args common.system(cmd, logging.debug) cmd = "mkdir -p %(path)s " % self.args self._remote_run(cmd) cmd = "rsync -avP ./mongodb-base/ %(ssh_user)s@%(host)s:%(path)s 1>/dev/null 2>/dev/null" % self.args common.system(cmd, logging.debug)
def _copy_files(self): cmd = 'echo "%(key)s" > ./mongodb-base/conf/mongokey && chmod 700 ./mongodb-base/conf/mongokey' % self.args common.system(cmd, logging.debug) cmd = 'mkdir -p %(path)s ' % self.args self._remote_run(cmd) cmd = 'rsync -avP ./mongodb-base/ %(ssh_user)s@%(host)s:%(path)s 1>/dev/null 2>/dev/null' % self.args common.system(cmd, logging.debug)
def main(): """docstring for main""" common.system('rm stat.log') fout = file('stat.log', 'a+') print >> fout, 'benchmark start!!!!!!!!!!!!!!!!!!!!!!!!' fout.close() logging.debug(PWD) LoadThread().start() IoStatThread().start() StatThread().start()
def configserver_start(configserver, auth): [host, port, path] = configserver if _alive(configserver, auth): logging.info('%s already alive: we do nothing!' % configserver) return cmd = 'cd %s ; ./bin/mongod --configsvr --dbpath ./db --logpath ./log/mongod.log --port %d --fork ' % (path, port) if auth: common.system('echo "%s" > ./mongodb-base/conf/mongokey && chmod 700 ./mongodb-base/conf/mongokey' % auth['key'], logging.debug) tmp = '--keyFile=%s/conf/mongokey ' % path cmd += tmp _copy_files(configserver) print _remote_run(conf.USER, host, cmd)
def mongos_start(mongos, configdb, auth): [host, port, path] = mongos if _alive(mongos, auth): logging.info('%s already alive: we do nothing!' % mongos) return cmd = 'cd %s ; numactl --interleave=all ./bin/mongos --configdb %s --logpath ./log/mongod.log --port %d --fork ' % (path, configdb, port) if auth: common.system('echo "%s" > ./mongodb-base/conf/mongokey && chmod 700 ./mongodb-base/conf/mongokey' % auth['key'], logging.debug) tmp = '--keyFile=%s/conf/mongokey ' % path cmd += tmp _copy_files(mongos) print _remote_run(conf.USER, host, cmd)
def _mongoexport(host, user, passwd, db_name, collection_name, data_file, query='', callback=None): (host, port) = host.split(':') port = int(port) mongo_path = conf.MONGO_PATH cmd = '''%(mongo_path)s/mongoexport -u %(user)s -p %(passwd)s --host %(host)s --port %(port)d --db %(db_name)s --collection %(collection_name)s %(query)s >> %(data_file)s ''' % locals() cmd = re.sub('\n *', ' ', cmd) r = common.system(cmd, logging.debug) logging.debug(r) #callback m = re.search(r'exported (\d+) records', r, re.DOTALL) if not m: info = "error on export, " + r logging.error(info) raise MException(info) records_export = int(m.group(1)) if callback: callback(records_export)
def configserver_start(configserver, auth): [host, port, path] = configserver if _alive(configserver, auth): logging.info('%s already alive: we do nothing!' % configserver) return cmd = 'cd %s ; ./bin/mongod --configsvr --dbpath ./db --logpath ./log/mongod.log --port %d --fork ' % ( path, port) if auth: common.system( 'echo "%s" > ./mongodb-base/conf/mongokey && chmod 700 ./mongodb-base/conf/mongokey' % auth['key'], logging.debug) tmp = '--keyFile=%s/conf/mongokey ' % path cmd += tmp _copy_files(configserver) print _remote_run(conf.USER, host, cmd)
def _alive(self): cmd = "mongostat --host %(host)s --port %(port)s -u __system -p %(key)s -n1 " % self.args r = common.system(cmd, logging.debug) if r.find("insert") >= 0: alive = True else: alive = False logging.debug("%s alive = %s" % (self, alive)) return alive
def _alive(self): cmd = 'mongostat --host %(host)s --port %(port)s -u __system -p %(key)s -n1 ' % self.args r = common.system(cmd, logging.debug) if r.find('insert') >= 0: alive = True else: alive = False logging.debug("%s alive = %s" % (self, alive)) return alive
def mongos_start(mongos, configdb, auth): [host, port, path] = mongos if _alive(mongos, auth): logging.info('%s already alive: we do nothing!' % mongos) return cmd = 'cd %s ; numactl --interleave=all ./bin/mongos --configdb %s --logpath ./log/mongod.log --port %d --fork ' % ( path, configdb, port) if auth: common.system( 'echo "%s" > ./mongodb-base/conf/mongokey && chmod 700 ./mongodb-base/conf/mongokey' % auth['key'], logging.debug) tmp = '--keyFile=%s/conf/mongokey ' % path cmd += tmp _copy_files(mongos) print _remote_run(conf.USER, host, cmd)
def _runjs(self, js): logging.debug('_run_js: \n' + js.replace(' ', '').replace('\n', ' ')) filename = TmpFile().content_to_tmpfile(js) cmd = './mongodb-base/bin/mongo %(host)s:%(port)s/admin -u __system -p %(key)s' % self.args cmd += ' ' + filename r = common.system(cmd, logging.debug) if r.find('command failed') >=0 or r.find('uncaught exception') >=0: raise Exception('run js error: \n' + r) logging.debug(r) return r
def _runjs(self, js): logging.debug("_run_js: \n" + js.replace(" ", "").replace("\n", " ")) filename = TmpFile().content_to_tmpfile(js) cmd = "./mongodb-base/bin/mongo %(host)s:%(port)s/admin -u __system -p %(key)s" % self.args cmd += " " + filename r = common.system(cmd, logging.debug) if r.find("command failed") >= 0 or r.find("uncaught exception") >= 0: raise Exception("run js error: \n" + r) logging.debug(r) return r
def _run_js(host, port, js, auth=None): logging.info('run_js: \n' + js.replace(' ', '').replace('\n', ' ')) filename = TmpFile().content_to_tmpfile(js) cmd = './mongodb-base/bin/mongo %s:%d/admin ' % (host, port) if auth: tmp = '-u %s -p %s ' % ('__system', auth['key']) cmd += tmp cmd += filename rst = common.system(cmd, logging.info) if rst.find('command failed') >=0 or rst.find('uncaught exception') >=0: raise Exception('run js error: \n' + rst) logging.info(rst)
def _run_js(host, port, js, auth=None): logging.info('run_js: \n' + js.replace(' ', '').replace('\n', ' ')) filename = TmpFile().content_to_tmpfile(js) cmd = './mongodb-base/bin/mongo %s:%d/admin ' % (host, port) if auth: tmp = '-u %s -p %s ' % ('__system', auth['key']) cmd += tmp cmd += filename rst = common.system(cmd, logging.info) if rst.find('command failed') >= 0 or rst.find('uncaught exception') >= 0: raise Exception('run js error: \n' + rst) logging.info(rst)
def do_pre_check(self): mongo_path = conf.MONGO_PATH cmd = '''%(mongo_path)s/mongoimport --version ''' % locals() r = common.system(cmd, logging.debug) self.orig_src_balancer_disabled = self._is_balancer_disabled(self.src_conn) self.orig_dest_balancer_disabled = self._is_balancer_disabled(self.dest_conn) #check if balance stopped if self.args.stop_balancer: if not self.orig_src_balancer_disabled: logging.info("disable balancer for %s !!! " % str(self.src_conn)) self._disable_balancer(self.src_conn) if not self.orig_dest_balancer_disabled: logging.info("disable balancer for %s !!! " % str(self.dest_conn)) self._disable_balancer(self.dest_conn) self._check_balancer_disabled(self.src_conn) self._check_balancer_disabled(self.dest_conn) #check count src_cnt = self._src_data_conn().count() dest_cnt = self._dest_data_conn().count() self.pre_src_count = src_cnt self.pre_dest_count = dest_cnt if dest_cnt: if not self.args.force: raise MException("in do_pre_check: dest already has %d records " % dest_cnt) logging.warn("befor migrate, dest collection got records: %d" % dest_cnt) logging.info("in pre_check [src_count: %d] [dest_cnt: %d]" % (src_cnt, dest_cnt)) #check replset logging.info("in pre_check src_replsets : %s " % (str(self._get_src_replsets()))) #check shard info self._print_src_shard_info() self._print_dest_shard_info() if os.path.exists(self.data_file): if not self.args.force: raise MException("in do_export data file: %s already exist"%self.data_file) logging.warn("data file: %s already exist, we clean it" % self.data_file) shutil.rmtree(self.data_file) os.makedirs(self.data_file) #作为目录 #save info self.src_chunks = self._get_src_chunks()
def _alive(mongod, auth=None): #logging.debug("alive %s %s" % (mongod, auth) ) [host, port, path] = mongod cmd = 'mongostat --host %s --port %s -n1 ' % (host, port) if auth: tmp = '-u %s -p %s ' % ('__system', auth['key']) cmd += tmp r = common.system(cmd, logging.debug) if r.find('insert') >= 0: alive = True else: alive = False logging.info("%s alive = %s" % (mongod, alive)) return alive
def do_export_parallel(self): host, user, passwd = self.src db_name, collection_name = self.src_ns.split('.') shardkey = self.src_chunks[0]['min'].keys()[0] shardkey_type = type(self.src_chunks[0]['max'][shardkey]) wm = WorkerManager(self.args.worker) for c in self.src_chunks: r1 = str(c['min'][shardkey]).replace('()', '') r2 = str(c['max'][shardkey]).replace('()', '') #为每个chunk计算一个文件名 this_data_file = '%s-%s' % (r1, r2) c['data_file'] = self.data_file + '/' + this_data_file logging.debug("add export task %s -- %s @ %s -> %s " % (c['min'], c['max'], c['shard'], c['data_file'])) cond = [] if r1 != 'MinKey': if shardkey_type in [int, long]: cond.append('$gte: %s' % r1) elif shardkey_type in [str, unicode]: cond.append('$gte: "%s"' % r1) else: raise MException("unknown shardkey_type: %s" % shardkey_type) if r2 != 'MaxKey': if shardkey_type in [int, long]: cond.append('$lt: %s' % r2) elif shardkey_type in [str, unicode]: cond.append('$lt: "%s"' % r2) cond = ','.join(cond) query = "-q '{ $query: {%(shardkey)s: {%(cond)s}}}'" % locals() wm.add_job(_mongoexport, host, user, passwd, db_name, collection_name, c['data_file'], query, self.export_callback) wm.start() wm.wait_for_complete() wc = common.system('wc -l %s/* | tail -1' % self.data_file, logging.debug) wc_records_export = int(wc.split()[0]) logging.debug("wc_records_export: %d" % wc_records_export) if wc_records_export != self.records_export: info = "wc_records_export != self.records_export" raise MException(info)
def _mongoimport(host, user, passwd, db_name, collection_name, data_file, callback=None, reimport_data_file=None): (host, port) = host.split(':') port = int(port) mongo_path = conf.MONGO_PATH cmd = '''%(mongo_path)s/mongoimport -u %(user)s -p %(passwd)s --host %(host)s --port %(port)d --db %(db_name)s --collection %(collection_name)s --file %(data_file)s ''' % locals() cmd = re.sub('\n *', ' ', cmd) r = common.system(cmd, logging.debug) logging.debug(r) #callback m = re.search(r'imported (\d+) objects', r, re.DOTALL) if not m: info = "error on import: " + r logging.error(info) raise MException(info) logging.debug(m.group(0)) records_import = int(m.group(1)) if callback: callback(records_import) if reimport_data_file: reimport_fd = file(reimport_data_file, 'a') #write it to the reimport records file for line in r.split('\n'): pos = line.find('error_on_import:') if pos != -1: pos += len('error_on_import:') line = line[pos:] print >> reimport_fd, line reimport_fd.close()
def do_export(self): host, user, passwd = self.src if os.path.exists(self.data_file): if not self.args.force: raise MException("in do_export data file: %s already exist"%self.data_file) logging.warn("data file: %s already exist, we clean it" % self.data_file) shutil.rmtree(self.data_file) logging.info("time estimate for export : %d min (5000q/s)" % (self.pre_src_count/5000/60)) db_name, collection_name = self.src_ns.split('.') _mongoexport(host, user, passwd, db_name, collection_name, self.data_file) size = os.path.getsize(self.data_file) wc = common.system('wc -l %s ' % self.data_file, logging.debug) self.records_export = int(wc.split()[0]) logging.info("export done [data_file:%s] [size:%d] [records:%d]" % (self.data_file, size, self.records_export)) logging.info("time estimate for import: %d min (5000/s)" % (self.records_export/5000/60))
def _runjs(self, js, need_primary): logging.debug('_run_js: \n' + js.replace(' ', '').replace('\n', ' ')) filename = TmpFile().content_to_tmpfile(js) if need_primary: primary = self._get_primary() host, port = primary.split(':') else: primary = self.args['mongod'][0] host = socket.gethostbyname(primary[0]) port = primary[1] key = self.args['key'] cmd = './mongodb-base/bin/mongo --quiet %(host)s:%(port)s/admin -u __system -p %(key)s' % locals() cmd += ' ' + filename r = common.system(cmd, logging.debug) if r.find('command failed') >=0 or r.find('uncaught exception') >=0: raise Exception('run js error: \n' + r) logging.debug(r) return r
def _runjs(self, js, need_primary): logging.debug("_run_js: \n" + js.replace(" ", "").replace("\n", " ")) filename = TmpFile().content_to_tmpfile(js) if need_primary: primary = self._get_primary() host, port = primary.split(":") else: primary = self.args["mongod"][0] host = socket.gethostbyname(primary[0]) port = primary[1] key = self.args["key"] cmd = "./mongodb-base/bin/mongo --quiet %(host)s:%(port)s/admin -u __system -p %(key)s" % locals() cmd += " " + filename r = common.system(cmd, logging.debug) if r.find("command failed") >= 0 or r.find("uncaught exception") >= 0: raise Exception("run js error: \n" + r) logging.debug(r) return r
from collections import defaultdict from argparse import RawTextHelpFormatter from pcl import common from pcl import crontab from string import Template # we have to do this here, so that lib/monitor.py can use conf.xxx # import config in conf/REDIS_DEPLOY_CONFIG.py if 'REDIS_DEPLOY_CONFIG' not in os.environ: logging.error('please export REDIS_DEPLOY_CONFIG=conf && . ./bin/active') exit(1) config_name = os.environ['REDIS_DEPLOY_CONFIG'] conf = __import__(config_name, globals(), locals(), [], 0) #import config_module common.system('mkdir -p data tmp', None) def my_json_encode(j): return json.dumps(j, cls=common.MyEncoder) def strstr(s1, s2): return s1.find(s2) != -1 def lets_sleep(SLEEP_TIME = 0.1): time.sleep(SLEEP_TIME) def TT(template, args): #todo: modify all return Template(template).substitute(args) def system_with_timeout(cmd, log_fun=logging.info, timeout=60*60*24*30): if log_fun: log_fun(cmd)
def _remote_run(self, raw_cmd): if raw_cmd.find('"') >= 0: raise Exception("bad cmd: " + raw_cmd) cmd = 'ssh -n -f %s@%s "%s"' % (self.args["ssh_user"], self.args["host"], raw_cmd) return common.system(cmd, logging.debug)
def _remote_run(user, host, raw_cmd): if raw_cmd.find('"') >= 0: error('bad cmd: ' + raw_cmd) return cmd = 'ssh -n -f %s@%s "%s"' % (user, host, raw_cmd) return common.system(cmd, logging.info)
def cmd(cmd): print cmd #print cmd.replace('\n', '') return common.system(cmd.replace('\n', ' '))
def _init(): # common.system('rm -rf ./mongodb-base', logging.debug) common.system("mkdir -p ./mongodb-base/bin", logging.debug) common.system("mkdir -p ./mongodb-base/conf", logging.debug) common.system("mkdir -p ./mongodb-base/log", logging.debug) common.system("mkdir -p ./mongodb-base/db", logging.debug) common.system("cp -u %s/bin/mongo ./mongodb-base/bin" % conf.MONGO_DB_PATH, logging.debug) common.system("cp -u %s/bin/mongostat ./mongodb-base/bin" % conf.MONGO_DB_PATH, logging.debug) common.system("cp -u %s/bin/mongod ./mongodb-base/bin" % conf.MONGO_DB_PATH, logging.debug) common.system("cp -u %s/bin/mongos ./mongodb-base/bin" % conf.MONGO_DB_PATH, logging.debug) common.system("cp conf/mongod.conf ./mongodb-base/conf", logging.debug)
def _init(): #common.system('rm -rf ./mongodb-base', logging.debug) common.system('mkdir -p ./mongodb-base/bin', logging.debug) common.system('mkdir -p ./mongodb-base/conf', logging.debug) common.system('mkdir -p ./mongodb-base/log', logging.debug) common.system('mkdir -p ./mongodb-base/db', logging.debug) common.system('cp -u %s/bin/mongo ./mongodb-base/bin' % conf.MONGO_DB_PATH, logging.debug) common.system( 'cp -u %s/bin/mongostat ./mongodb-base/bin' % conf.MONGO_DB_PATH, logging.debug) common.system( 'cp -u %s/bin/mongod ./mongodb-base/bin' % conf.MONGO_DB_PATH, logging.debug) common.system( 'cp -u %s/bin/mongos ./mongodb-base/bin' % conf.MONGO_DB_PATH, logging.debug) common.system('cp conf/mongod.conf ./mongodb-base/conf', logging.debug)
from argparse import RawTextHelpFormatter from pcl import common from pcl import crontab from string import Template # we have to do this here, so that lib/monitor.py can use conf.xxx # import config in conf/REDIS_DEPLOY_CONFIG.py if 'REDIS_DEPLOY_CONFIG' not in os.environ: logging.error('please export REDIS_DEPLOY_CONFIG=conf && . ./bin/active') exit(1) config_name = os.environ['REDIS_DEPLOY_CONFIG'] conf = __import__(config_name, globals(), locals(), [], 0) #import config_module common.system('mkdir -p data tmp', None) def my_json_encode(j): return json.dumps(j, cls=common.MyEncoder) def strstr(s1, s2): return s1.find(s2) != -1 def lets_sleep(SLEEP_TIME=0.1): time.sleep(SLEEP_TIME) def TT(template, args): #todo: modify all
def _remote_run(self, raw_cmd): if raw_cmd.find('"') >= 0: raise Exception('bad cmd: ' + raw_cmd) cmd = 'ssh -n -f %s@%s "%s"' % (self.args['ssh_user'], self.args['host'], raw_cmd) return common.system(cmd, logging.debug)