def get_product_md5(p_name, conn, cursor, log_file, proc_id='Unknown', pid=False): CMD = 'get-prod-md5' query = db_query.get_md5_product(p_name) err = submit_query(query, cursor, conn=conn) check_error(proc_id, err['code'], CMD, log_file, arg_err=err['msg']) if err['code'] != 0: if pid: return err['code'], None, None else: return err['code'], None res = cursor.fetchone() if res is None: if pid: return 501, None, None else: return 501, None else: try: if pid: return 0, res['md5'], res['id'] else: return 0, res['md5'] except: if pid: return 0, res[0], res[1] else: return 0, res[0]
def get_available_media_list(args, log_file, exit_on_error=True, arch=None, proc_id='Unknown', check_usage=False): CMD = 'get-available-media-list' ws_cmd = '/media/fsmedlist/state' tape_list = [] param = {'available':'true'} if arch: if arch=='vault': param['available'] = 'false' if args.policy =='blank': param['blank']='true' else: param['policy']=args.policy err, ws_rsp = do_webservices_cmd(ws_cmd, args, param) check_error(proc_id, err, CMD, log_file, arg_err=ws_rsp, exit_on_error=exit_on_error) ws_rsp = json_load(ws_rsp) err=check_ws_rsp_status(ws_rsp, log_file, CMD, exit_on_error, proc_id=proc_id) if err == 0: med_lst =ws_rsp['classes'][0]['available']['medias'] if arch is not None: tape_list = [ m['mediaId'] for m in med_lst if check_if_media_available(m['mediaId'],arch, args, check_usage=check_usage) ] else: tape_list = [ m['mediaId'] for m in med_lst] return tape_list
def move_tape(media_lst, args, log_file, destination='vault', proc_id='Unknown'): ''' this will move the tapes in media_lst to an archive destination by default the operation vault the tapes ''' CMD='move-tape' ws_cmd = '/media/vsmove' err_code, ws_rsp = do_webservices_cmd(ws_cmd, args, params={'media': media_lst, 'archive': destination}) check_error(proc_id, err_code, CMD, log_file, arg_err=ws_rsp) if err_code !=0: return {'code':err_code} #print ws_rsp ws_rsp = json_load(ws_rsp) # check operation check_for_err = True if 'vsmoveSuccess' in ws_rsp['vsmoveOutput'].keys(): check_for_err = not int(ws_rsp['vsmoveOutput']['vsmoveSuccess']['vsmoveSuccessCompleted']) == len(media_lst) return {'code':0, 'msg': 'All {0} media(s) are moving to {1}'.format(len(media_lst), destination)} if check_for_err: error = 0 msgs = [] for err_tmp in ws_rsp['vsmoveOutput']['mediaErrors']: if err_tmp['mediaErrorText'] !='no err': error += 1 msgs.append( { 'media' : err_tmp['mediaErrorMedium'], 'error' : err_tmp['mediaErrorText']}) msg = '{0} out of {2} error(s) moving media(s) to {1}'.format(error, destination, len(media_lst)) return {'code': 313, 'msg':msg, 'errors':msgs}
def get_product_md5_and_path(p_name, conn, cursor, log_file, proc_id='Unknown', p_id=None): CMD = 'get-prod-md5-path' query = db_query.get_prod_path_md5(p_name) err = submit_query(query, cursor, conn=conn) check_error(proc_id, err['code'], CMD, log_file, arg_err=err['msg']) info = { 'md5': None, 'path': None, } if err['code'] != 0: err_code = err['code'] else: res = cursor.fetchone() if res: err_code = 0 if p_id: info['id'] = None try: for k in info.keys(): info[k] = res[k] except: for i, k in enumerate(info.keys()): info[k] = res[i] else: err_code = 501 return err_code, info
def get_file_sample_from_file(media, sample_size, log_file, fsmedinfofile,exit_on_error=False, proc_id='Unknown', info=None): CMD='fs-get-file-sample' with open(fsmedinfofile,'r') as f: try: ws_rsp = json_f_load(f) except: msg= 'Media {0} - file {1}: unable to parse the fsmedinfo out'.format(media, fsmedinfofile) check_error(proc_id, 500, CMD, log_file, arg_err=msg, exit_on_error=exit_on_error) if info is None: return [], 517 else: return [], 517, {} products = [ fix_stored_name(prod['path']) for prod in ws_rsp['medias'][0]['files']] n_file = len(products) n_sel = int(sample_size*n_file) msg = 'Media {0}: {2} out of {1} files selected'.format(media, n_file, n_sel) check_error(proc_id, 0, CMD, log_file, msg=msg, log_msg=True) products = sample(products, n_sel) if info is not None: info_out = {k: ws_rsp['medias'][0][k] for k in info if k in ws_rsp['medias'][0].keys()} return products,0, info_out return products, 0
def get_prod_info(media, args, cursor, query, sample_size, log_file, max_attempt=3, exit_on_error=False, conn=None, proc_id='Unknown'): CMD='get-prod_info' products, err = get_file_sample(media, args,sample_size, log_file, max_attempt=max_attempt,exit_on_error=exit_on_error, proc_id=proc_id) n_sel = len(products) if err==0: product_names = {get_prod_name(prod):prod for prod in products} prod_names = "('"+"','".join(list(product_names.keys()))+"')" err = submit_query(query(prod_names), cursor, conn=conn) check_error(proc_id, err['code'], CMD, log_file, arg_err=err['msg']) if err['code']==0: prod_info = { product_names[prod[0]]:{'md5': prod[2], 'path': prod[1]} for prod in cursor } products = list(prod_info.keys()) n_prod = len(products) msg = 'Media {0}: {1} out of {2} of selected files were products'.format(media, n_prod, n_sel) check_error(proc_id, 0, CMD, log_file, msg=msg, log_msg=True) return prod_info, n_prod, products else: return None, 0, None
def get_md5_path_from_db(f_lst, conn, cursor, log_file, proc_id='Unknown', hsm_root=False, dms_root=False, product_id=False, cursor_factory=False): CMD = 'get-prod-md5-path' f_lst_str = convert_list_to_db_str(f_lst) query = db_query.get_prod_path_md5_products(p_name_lst=f_lst_str, hsm_root=hsm_root, dms_root=dms_root) err = submit_query(query, cursor, conn=conn) check_error(proc_id, err['code'], CMD, log_file, arg_err=err['msg']) keys = ['product_file', 'md5', 'id', 'name'] if product_id: keys.append('id') out = {k: [] for k in keys} if err['code'] == 0: err_code = 0 if cursor_factory: for res in cursor: for k in keys: out[k].append(res[k]) else: for res in cursor: for i, k in enumerate(keys): out[k].append(res[i]) return err['code'], out
def get_full_media_list(args, log_file, policy, exit_on_error=True, proc_id='Unknown'): CMD = 'get-full-media-list' ws_cmd = '/media/fsmedlist/state' param = { 'protect' :'true', 'unformatted' : 'true' } err, ws_rsp = do_webservices_cmd(ws_cmd, args, param) check_error(proc_id, err, CMD, log_file, arg_err=ws_rsp, exit_on_error=exit_on_error) ws_rsp = json_load(ws_rsp) err=check_ws_rsp_status(ws_rsp, log_file, CMD, exit_on_error, proc_id=proc_id) tape_report = { p: { 'suspect' : [], 'error' : [], 'available' : [], 'unavailable' : [], 'vault' : [], 'tot' : 0 } for p in policy } tape_report['blank'] = { 'write-unprotect': 0, 'tot' : 0 } '''
def check_ws_rsp_status(ws_rsp, log_file, CMD, exit_on_error, proc_id='Unknown', retrieval=False): err=201 err_msg = ws_rsp['statuses'][0]['statusText'] for status in ws_rsp['statuses']: if status['commandStatus'] == 'completed' or status['statusNumber'] in [355, 390]: err = 0 err_msg = 'command completed' break ''' if ws_rsp['statuses'][0]['statusNumber'] == 0: err = 0 err_msg='' elif if ws_rsp['statuses'][0]['statusNumber'] != 0: err=201 err_msg = ws_rsp['statuses'][0]['statusText'] else: err = 0 err_msg='' ''' check_error(proc_id, err, CMD, log_file, arg_err=err_msg, exit_on_error=exit_on_error) return err
def retrieve_file_from_tape(f_lst, log_file, args, proc_id='Unknown'): CMD='retrieve-file-from-tape' ws_cmd='/file/fsretrieve' err_code, ws_rsp = do_webservices_cmd(ws_cmd, args, params={'file': f_lst}) check_error(proc_id, err_code, CMD, log_file, arg_err=ws_rsp) if err_code !=0: return err_code ws_rsp = json_load(ws_rsp) err=check_ws_rsp_status(ws_rsp, log_file, CMD, exit_on_error=False, proc_id=proc_id) return err
def check_available_media(tape_lst, args, proc_id, log_file): n_tape = len(tape_lst) CMD='check-tapes-availability' cmd='/media/fsmedinfo/' arch=args.arch err_code, ws_rsp=do_webservices_cmd(cmd, args, params={'media': tape_lst}) if err_code != 0: check_error(proc_id, 201, CMD, log_file, arg_err=ws_rsp, exit_on_error=True) else: try: ws_rsp = json_load(ws_rsp) av_tape = [ a['mediaId'] for a in ws_rsp['medias'] if a['currentArchive']==arch] except: check_error(proc_id, 202, CMD, log_file,exit_on_error=True) n_av_tape=len(av_tape) if n_av_tape ==0: check_error(proc_id, 302, CMD, log_file,exit_on_error=True) else: if n_tape==n_av_tape: msg = 'all identified tapes are available'.format(n_av_tape, n_tape) else: msg = '{0} over {2} available tapes were identified: {1}'.format(n_av_tape, ','.join(av_tape), n_tape) check_error(proc_id, 0, CMD, log_file, log_msg=True, msg=msg) return av_tape
def sort_prod_on_tape(prod_list, args, media ,log_file, arch=None, exit_on_error=False, proc_id='Unknown', get_prod=False): ''' Sort the products on the base of starting block :param: prod_list, list with full path of product in stornext ''' CMD = 'sort-products' ws_cmd = '/file/fsfiletapeloc' block_id = [] block_id_app = block_id.append for prod in prod_list: prod_fixed = fix_stored_name(prod) err, ws_rsp = do_webservices_cmd(ws_cmd, args, {'file':prod_fixed}) check_error(proc_id, err, CMD, log_file, arg_err=ws_rsp, exit_on_error=exit_on_error) if err != 0: continue ws_rsp = json_load(ws_rsp) if ws_rsp['statuses'][0]["statusText"] == "Command Successful.": block_id_app(ws_rsp["fileInfo"]["startBlock"]) if arch is not None: if ws_rsp["fileInfo"]["libraryId"]!=arch: err_msg = 'media {0} not in {1} but in {2}'.format( media, arch, ws_rsp["fileInfo"]["libraryId"] ) check_error(proc_id, 309, CMD, log_file, arg_err=err_msg, exit_on_error=exit_on_error) return [], 310 else: block_id_app(None) err_msg = 'media: '+media err_msg += prod + ws_rsp['statuses'][0]["statusText"] check_error(proc_id, 304, CMD, log_file, arg_err=err_msg, exit_on_error=exit_on_error) product_sorted = [p for _,p in sorted(zip(block_id,prod_list))] if not get_prod: pTmp = [fix_stored_name(p) for p in product_sorted] product_sorted = pTmp ''' block_id_sorted = deepcopy(block_id) block_id_sorted.sort() ind_block = [block_id.index(id) for id in block_id_sorted if id is not None] if get_prod: product_sorted = [get_prod_name(prod_list[id]) for id in ind_block] else: product_sorted = [fix_stored_name(prod_list[id]) for id in ind_block] ''' err_msg = 'Media {0}: {1} out of {2} products were sorted'.format(media, len(product_sorted), len(prod_list)) check_error(proc_id, 0, CMD, log_file, log_msg=True, msg=err_msg) return product_sorted, 0
def get_import_date(args, media, log_file, exit_on_error=False, proc_id='Unknown'): CMD='get-tape-importdate' ws_cmd = '/media/fsmedinfo' err, ws_rsp = do_webservices_cmd(ws_cmd, args, {'media': media}) check_error(proc_id, err, CMD, log_file, arg_err=ws_rsp, exit_on_error=exit_on_error) import_date = None if err==0: ws_rsp = json_load(ws_rsp) err=check_ws_rsp_status(ws_rsp, log_file, CMD, exit_on_error, proc_id=proc_id) if err==0: import_date = ws_rsp['medias'][0]["importDate"] return import_date
def update_tape_status(media_lst, args, status, log_file,proc_id='Unknown'): ''' this will change the status of a list of tapes ''' CMD='update-tape-status' suitable_status =['unsusp', 'protect', 'unprotect', 'avail', 'unavail', 'unmark'] if status not in suitable_status: check_error(proc_id, 318, CMD, log_file, arg_err=status) return {'code':err_code} ws_cmd = '/media/fschmedstate' err_code, ws_rsp = do_webservices_cmd(ws_cmd, args, params={'media': media_lst, 'state': status}) check_error(proc_id, err_code, CMD, log_file, arg_err=ws_rsp) return {'code':err_code}
def sort_prod_on_tape_db(file_list, args, media ,log_file, conn, cursor, arch=None, exit_on_error=False, proc_id='Unknown', get_prod=False, cursor_factory=False): ''' Sort the products on the base of starting block :param: prod_list, list with full path of product in stornext ''' CMD = 'sort-products' ws_cmd = '/file/fsfiletapeloc' block_id = [] block_id_app = block_id.append prod_lst = [get_prod_name(f) for f in file_list] # --- retrieve prod path and md5 err, out = get_md5_path_from_db(prod_lst,conn, cursor, log_file, proc_id=proc_id, hsm_root=True, cursor_factory=cursor_factory) for prod_fixed in out['product_file']: err, ws_rsp = do_webservices_cmd(ws_cmd, args, {'file':prod_fixed}) check_error(proc_id, err, CMD, log_file, arg_err=ws_rsp, exit_on_error=exit_on_error) if err != 0: continue ws_rsp = json_load(ws_rsp) if ws_rsp['statuses'][0]["statusText"] == "Command Successful.": block_id_app(ws_rsp["fileInfo"]["startBlock"]) if arch is not None: if ws_rsp["fileInfo"]["libraryId"]!=arch: err_msg = 'media {0} not in {1} but in {2}'.format( media, arch, ws_rsp["fileInfo"]["libraryId"] ) check_error(proc_id, 309, CMD, log_file, arg_err=err_msg, exit_on_error=exit_on_error) return [], 310 else: block_id_app(None) print prod_fixed err_msg = 'media: {} {} '.format(media, prod_fixed) err_msg += ws_rsp['statuses'][0]["statusText"] print err_msg check_error(proc_id, 304, CMD, log_file, arg_err=err_msg, exit_on_error=exit_on_error) product_sorted = [p for _,p in sorted(zip(block_id,out['product_file']))] if not get_prod: pTmp = [fix_stored_name(p) for p in product_sorted] product_sorted = pTmp err_msg = 'Media {0}: {1} out of {2} products were sorted'.format(media, len(product_sorted), len(out['product_file'])) check_error(proc_id, 0, CMD, log_file, log_msg=True, msg=err_msg) return product_sorted, err
def get_md5_file(file_lst,args,log_file, proc_id='Unknown'): CMD = 'get-file-info' cmd = '/file/fsfileinfo/' ws_rsp = do_webservices_cmd(cmd, args, {'file': file_lst,'checksum' : 'true' }) try: ws_rsp = do_webservices_cmd(cmd, args, {'file': file_lst,'checksum' : 'true' }) if isinstance(ws_rsp, tuple): ws_rsp=ws_rsp[1] ws_rsp = json_load(ws_rsp) except: check_error(proc_id, 202, CMD, log_file) return 202, [] # check if commamnd aws succefull if len(ws_rsp['fileInfos'])<len(file_lst): for m in ws_rsp['statuses'][:-1]: #print m['statusText'] check_error(proc_id, 203, CMD, log_file, arg_err=m['statusText']) md5_lst=[] for i,f in enumerate(ws_rsp['fileInfos']): if f['location'] == 'DISK': check_error(proc_id, 204, CMD, log_file, arg_err=f['fileName']) else: md5_lst.append({ 'i': file_lst.index(f['fileName']), 'md5':f['checksums'][0]['checksumValue'] }) return 0, md5_lst
def process_args_ingestion(log_file): parser = argparse.ArgumentParser() parser.add_argument("--product-id", action='store', dest='product_id', metavar='[product id]', help=" product id to be ingested") parser.add_argument("--format", action='store', dest='format', metavar='[metadata format]', help=" format of metadata") parser.add_argument("--pattern", action='store', dest='pattern', metavar='[metadata pattern]', help=" pattern metadata source") args = parser.parse_args() if args.product_id is None: check_error(proc_id, 1031, CMD, log_file, exit_on_error=True) return args, 0
def retrieve_tape_lst(f_lst, log_file, proc_id, args, f_sample=40): CMD='retrieve-tape-list' cmd='/file/fsfileinfo' n_files= len(f_lst) file_by_tape= {} n_truncks = get_truncks_number(n_files, f_sample) for i in range(n_truncks): i0 = i*f_sample i1 = min((i+1)*f_sample, n_files+1) err_code, ws_rsp = do_webservices_cmd(cmd, args, params={'file': f_lst[i0:i1]}) if err_code != 0: check_error(proc_id, 201, CMD, log_file, arg_err=ws_rsp) continue else: try: ws_rsp = json_load(ws_rsp) for item in ws_rsp['fileInfos']: if int(item['medias'][0]['copy'])==1: media_id=item['medias'][0]['mediaId'] else: media_id=item['medias'][1]['mediaId'] if media_id not in file_by_tape.keys(): file_by_tape[media_id] = [] file_by_tape[media_id].append(item['fileName']) ''' tape_lst = tape_lst | { a['medias'][0]['mediaId'] if int(a['medias'][0]['copy'])==1 else a['medias'][1]['mediaId'] for a in ws_rsp['fileInfos'] } ''' except: check_error(proc_id, 202, CMD, log_file) continue tape_lst = list(file_by_tape.keys()) n_tape = len(tape_lst) if n_tape == 0: check_error(proc_id, 301, CMD, log_file, exit_on_error=True) else: msg = '{0} tapes were identified: {1}'.format(len(tape_lst), ','.join(tape_lst)) check_error(proc_id, 0, CMD, log_file, log_msg=True, msg=msg) return file_by_tape, tape_lst
def write_fsmedinfo_on_file(media, args,log_file, proc_id, f_name=None, max_attempt=3,exit_on_error=False, format='text' ): CMD='ws-get-fsmedinfo-out' ws_cmd = '/media/fsmedinfo' # --- query prod list from stornext attempt = 0 check = True products = [] while check and attempt<max_attempt: attempt += 1 err, ws_rsp = do_webservices_cmd(ws_cmd, args, { 'media': media, 'verbose': 'true', 'format':format }) check_error(proc_id, err, CMD, log_file, arg_err=ws_rsp, exit_on_error=exit_on_error) if err != 0: continue err = int(ws_rsp.split('\n')[0].split(':')[-1].strip()) if err==0: check=False try: with open(f_name, 'w') as f: f.write(ws_rsp) f.close() err_msg = 'Media {0}: fsmedinfo succefully written in {1}'.format(media, f_name) err_code = 0 except: err_code = 518 err_msg = '' check_error(proc_id, err_code, CMD, log_file, arg_err=err_msg, exit_on_error=exit_on_error) return err_code if attempt==max_attempt and check: err_msg='Media {0}: failed to get fsmedinfo {1} attempts'.format(media, attempt) err_code= 308 else: err_code= 0 err_msg='' check_error(proc_id, err_code, CMD, log_file, arg_err=err_msg, exit_on_error=exit_on_error) return err_code
def main(): log_dir = str(os_getenv('SYSTEM_LOG')) log_dir += '/DL197' if not isdir(log_dir): makedirs(log_dir) # log file log_global = os_join(log_dir, 'global_ingestion.log') log_zip_chk = os_join(log_dir, 'check_zip_content.log') args = process_args(log_global, default_arg) product_id = args.product_id proc_id += '_' + str(product_id) if args.format is None: metadata_fromat = 'xml' else: metadata_fromat = args.format if args.pattern is None: metadata_pattern = '/*/*.metadata' else: metadata_pattern = args.pattern #--- check processing directories pid = str(getpid()) processing_dir = '{}/{}'.format(os_getenv('PROCESSING_DIR'), pid) processing_dir = '{}/{}'.format('/tmp', getpid()) if not isdir(processing_dir): try: makedirs(processing_dir) except: msg = 'Unable to create the directory {}'.format(processing_dir) check_error(proc_id, 500, 'create-processing-dir', log_global, exit_on_error=True, arg_err=msg) dir_lst = [pid, 'testzipdir', 'testzipdir2'] for d in dir_lst: dTmp = '{}/{}'.format(processing_dir, d) if not isdir(dTmp): try: makedirs(dTmp) except: msg = 'Unable to create the directory {}'.format(dTmp) check_error(proc_id, 500, 'create-processing-dir', log_global, exit_on_error=True, arg_err=msg) #--- go to local working directory chdir(processing_dir) #-- db connection conn, cursor, err = db_connect() check_error(proc_id, err['code'], 'db-connect', log_global, exit_on_error=True) #--- Getting the product status query = db_query.get_product_status(product_id) err = submit_query(query, cursor, conn=conn) check_error(proc_id, err['code'], 'get-product-status', log_global, exit_on_error=True, arg_err=err['msg']) check_query_res(cursor, 'get-product-status', log_global, conn=conn, exit_on_error=True) product_status = cursor.fetchone()[0] # check if this is a new attempt to ingest a previously ARCHIVED product print('PRODUCT_STATUS : ' + product_status) if product_status != 'NEW': conn.close() check_error(proc_id, 800, 'get-product-status', log_global, exit_on_error=True, arg_err=product_id) # update the product status to ACTIVE query = db_query.update_product_status(product_id, 'ACTIVE') err = submit_query(query, cursor, conn=conn, commit=True) check_error(proc_id, err['code'], 'upd-product-status', log_global, exit_on_error=True, arg_err=err['msg']) # retrieve the ingestion parameters query = db_query.get_product_info(product_id) err = submit_query(query, cursor, conn=conn) check_error(proc_id, err['code'], 'get-product-info', log_global, exit_on_error=True, arg_err=err['msg']) check_query_res(cursor, 'get-product-info', log_global, conn=conn, exit_on_error=True) dTmp = cursor.fetchone() product_name = dTmp[0] product_type = dTmp[1] print 'Product Name: {}'.format(product_name) print 'Product Type: {}'.format(product_type) query = db_query.get_initial_path(product_id) err = submit_query(query, cursor, conn=conn) check_error(proc_id, err['code'], 'get-product-info', log_global, exit_on_error=True, arg_err=err['msg']) check_query_res(cursor, 'get-product-info', log_global, conn=conn, exit_on_error=True) query = db_query.get_duplicated_prod(product_id) err = submit_query(query, cursor, conn=conn) check_error(proc_id, err['code'], 'get-product-info', log_global, exit_on_error=True, arg_err=err['msg'])
def process_args_hsm_run_job(log_file, default_arg): parser = argparse.ArgumentParser() parser.add_argument("--verbose", action='store_true', dest='verbose', help=" Be verbose in output.") parser.add_argument("--batch-id", action='store', dest='batch_id', metavar='[batch id]', help=" batch id") parser.add_argument( "--authenticate", action='store', dest='authenticate', default=default_arg['auth'], metavar='[authentication]', help=" Authenticate all WS calls, value is 'username/password'.") parser.add_argument("--protocol", action='store', dest='protocol', metavar='[PROTOCOL]', choices=['http', 'https'], default=default_arg['protocol'], help=" The protocol to use.") parser.add_argument("--format", action='store', dest='format', choices=['text', 'json', 'xml'], default=default_arg['format'], help=" The format for response.") parser.add_argument("--ip", action='store', dest='ip', default=default_arg['ip'], help=" The ip of the MDC.") parser.add_argument("--policy", action='store', dest='policy', default=default_arg['policy'], help=" The policy for the media") args = parser.parse_args() if args.batch_id is None: check_error(proc_id, 103, 'parse-arg', log_file, exit_on_error=True) #--- check the ip, standard 4 numbers '.' separated m = re.compile('^\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}$') if not m.match(args.ip): err_code = 100 check_error(proc_id, 100, CMD, log_file, arg_err=args.ip, exit_on_error=True) #--- check authentication if '/' not in args.authenticate: check_error(proc_id, 101, CMD, log_file, arg_err=args.authenticate, exit_on_error=True) return args
def check_query_res(cursor, cmd, log_file, conn=None, proc_id='Unknown', exit_on_error=False): if cursor.rowcount == 0: if conn and not exit_on_error: conn.close() check_error(proc_id, 45, cmd, log_file, exit_on_error=exit_on_error)
def get_file_sample(media, args,sample_size, log_file, max_attempt=3,exit_on_error=False, proc_id='Unknown', info=None, perc=True): CMD='ws-get-file-sample' ws_cmd = '/media/fsmedinfo' # --- query prod list from stornext attempt = 0 check = True products = [] while check and attempt<max_attempt: attempt += 1 err, ws_rsp = do_webservices_cmd(ws_cmd, args, {'media': media, 'verbose': 'true'}) check_error(proc_id, err, CMD, log_file, arg_err=ws_rsp, exit_on_error=exit_on_error) if err != 0: continue try: ws_rsp = json_load(ws_rsp) except: check_error(proc_id, 311 , CMD, log_file, arg_err=media, exit_on_error=exit_on_error) attempt=max_attempt break err=check_ws_rsp_status(ws_rsp, log_file, CMD, exit_on_error, proc_id=proc_id) if err != 0: continue products = [ fix_stored_name(prod['path']) for prod in ws_rsp['medias'][0]['files']] n_file = len(products) if perc: if sample_size<100: n_sel = min(int(sample_size*n_file), n_file) else: n_sel =n_file else: n_sel = min(n_file, sample_size) if n_sel < n_file: msg = 'Media {0}: {2} out of {1} files selected'.format(media, n_file, n_sel) check_error(proc_id, err, CMD, log_file, msg=msg, log_msg=True) products = sample(products, n_sel) check = False if attempt==max_attempt and check: err_msg='Media {0}: failed to get product list after {1} attempts'.format(media, attempt) err_code= 308 else: err_code= 0 err_msg='' check_error(proc_id, err_code, CMD, log_file, arg_err=err_msg, exit_on_error=exit_on_error) if info is not None: if err_code ==0: return products, err_code, {k: ws_rsp['medias'][0][k] for k in info if k in ws_rsp['medias'][0].keys()} else: return products, err_code, {} else: return products, err_code
def update_prod_info(media, args, cursor,conn, log_file, max_attempt=3, exit_on_error=False, proc_id='Unknown', info=None): err_msg = 'Media {0}: '.format(media) err_msg += '{}' CMD='update-prod-info-list' print '{0} - Media {1}: start query hsm'.format(get_cur_time(), media) if info is None: products, err = get_file_sample(media, args,100, log_file, max_attempt=max_attempt, exit_on_error=exit_on_error, proc_id=proc_id) else: products, err, info_out = get_file_sample(media, args,100, log_file, max_attempt=max_attempt, exit_on_error=exit_on_error, proc_id=proc_id, info=info) print '{0} - Media {1}: end query hsm'.format(get_cur_time(), media) if err!=0: if info is None: return {'code' : err} else: return {'code' : err}, {} product_names = [get_prod_name(prod) for prod in products] prod_names = "('"+"','".join(product_names)+"')" CMD='update-prod-info-check' # print '{0} - Media {1}: start status check'.format(get_cur_time(), media) query = db_query().count_prod_not_in_tape(prod_names) err = submit_query(query, cursor, conn=conn) check_error(proc_id, err['code'], CMD, log_file, arg_err=err_msg.format(err['msg'])) print '{0} - Media {1}: end status check'.format(get_cur_time(), media) if err['code']!=0: if info: return err, {} else: return err n_not_tape =cursor.fetchone()[0] print '{0} - Media {1}: {2} files do not have tape status'.format(get_cur_time(), media, n_not_tape) if n_not_tape > 0: err = {'code':314, 'msg': '{} out of {} products are not in tape status'.format(n_not_tape, len(product_names))} check_error(proc_id, err['code'], CMD, log_file, arg_err=err_msg.format(err['msg'])) query = db_query().get_prod_not_in_tape(prod_names) err_0 = submit_query(query, cursor, conn=conn) check_error(proc_id, err_0['code'], CMD, log_file, arg_err=err_msg.format(err['msg'])) for c in cursor: msg ='{0} - Media {1}: {2}, {3}, {4}'.format(' '*19,media, c[0], c[1], c[2] ) print msg check_error(proc_id, 314, CMD, log_file, arg_err=msg) if info: return err, {} else: return err CMD='update-prod-info' print '{0} - Media {1}: start status update'.format(get_cur_time(), media) query = db_query().update_prod_status(prod_names) err = submit_query(query, cursor, conn=conn, commit=True) check_error(proc_id, err['code'], CMD, log_file, arg_err=err_msg.format(err['msg'])) print '{0} - Media {1}: end status update'.format(get_cur_time(), media) if info is None: return err else: return err, info_out