def _get_md5sum(cls, file_path, block_size=256 * 128): """ Gets md5sum of input file :param file_path: The full file path :type file_path: str :param block_size: define block size for reading file by chunk by chunk (mandatory for huge files) :type block_size: int :rtype: tuple :return: status and output log (The md5 hash) """ try: hex_value = None md5hash = hashlib.md5() # pylint: disable=E1101 with open(file_path, 'rb') as f: for chunk in iter(lambda: f.read(block_size), b''): md5hash.update(chunk) hex_value = md5hash.hexdigest() except Exception as ex: # pylint: disable=W0703 error_msg = "GET_MD5: Fail, %s" % str(ex) LOGGER.error(error_msg) return Global.FAILURE, error_msg LOGGER.info("MD5 HASH: " + hex_value) return Global.SUCCESS, hex_value
def get_result(self, cmd_id): """ Get result of deferred command :type cmd_id: int :param cmd_id: ID of the command :rtype: tuple :return: the status and the result of the command """ request = json.dumps(["join", [cmd_id], {}]) LOGGER_FWK.debug("Joining command server %d" % cmd_id) try: conn = self._proto_factory.create() conn.connect() conn.send(request) status, result = json.loads(conn.receive()) self._check_cmd_status(status, result) except socket.error as E: err, err_msg = E msg = "Socket error (%d): %s" % (err, err_msg) LOGGER_FWK.error(msg) raise DeviceException(DeviceException.OPERATION_FAILED, msg) except: msg = "Failed to communicate with embedded command server" LOGGER_FWK.error(msg) raise DeviceException(DeviceException.OPERATION_FAILED, msg) finally: conn.disconnect() return status, result
def zip_tcr_campaign_data(original_folder, dest_folder, folders_and_files): """ This archive contains: dut log, acs logs """ import zipfile try: acs_logfile_name = folders_and_files["acs_logfile_name"] tcr_live_reporting_logfile = folders_and_files[ "TCR_LIVE_REPORTING"] aplog_folder = folders_and_files["AP_LOGS"] bplog_folder = folders_and_files["LOGCAT_LOGS"] dbglog_folder = folders_and_files["DEBUG_LOGS"] logcat_folder = folders_and_files["BP_LOGS"] pti_folder = folders_and_files["PTI_LOGS"] serial_folder = folders_and_files["SERIAL_LOGS"] root_dut_name = folders_and_files["ROOT_DUT_NAME"] report_style_filename = folders_and_files["REPORT_STYLE_FILENAME"] filename = "{0}.zip".format(dest_folder) zip_file = zipfile.ZipFile(filename, 'w', compression=zipfile.ZIP_DEFLATED, allowZip64=True) LOGGER.info('Create TCR campaign zip file: {0}'.format(filename)) ZipFolderUtilities.add_file_to_zip( zip_file, os.path.basename("{0}.log".format(acs_logfile_name)), original_folder) ZipFolderUtilities.add_file_to_zip( zip_file, os.path.basename("{0}.xml".format(acs_logfile_name)), original_folder) if os.path.exists(tcr_live_reporting_logfile): ZipFolderUtilities.add_file_to_zip(zip_file, tcr_live_reporting_logfile, original_folder) ZipFolderUtilities.add_file_to_zip(zip_file, report_style_filename, original_folder) for f in [ f for f in get_subdirectories(original_folder) if f.startswith(root_dut_name) ]: for local_path in [ "{0}{2}{1}".format(f, folder, os.path.sep) for folder in (aplog_folder, bplog_folder, logcat_folder, dbglog_folder, pti_folder, serial_folder) ]: ZipFolderUtilities.add_folder_to_zip( zip_file, os.path.join(original_folder, local_path), local_path) zip_file.close() status = Global.SUCCESS out_file = os.path.abspath(filename) except IOError as error: LOGGER.error('Cannot create zip file: {0} - {1}'.format( filename, error)) status = Global.FAILURE out_file = "" return status, out_file
def create_md5sum_file(self, file_path): """ Create md5sum file of input file :param file_path: The full file path :type file_path: str :rtype: tuple :return: status and output log (The md5sum file path and hash value) """ try: md5hash = self._get_md5sum(file_path)[1] md5sum_file_name = file_path + ".md5sum" file_name = os.path.basename(file_path) md5sum_file = open(md5sum_file_name, 'w') md5sum_file.write(md5hash) md5sum_file.write(' ') md5sum_file.write(file_name) md5sum_file.write('\r\n') md5sum_file.close() except Exception as ex: # pylint: disable=W0703 error_msg = "CREATE_MD5_FILE: Fail, " + str(ex) LOGGER.error(error_msg) return Global.FAILURE, error_msg LOGGER.info("MD5SUM FILE: %s" % md5sum_file_name) return Global.SUCCESS, md5sum_file_name
def __zip_analysis(self, zip_status, zip_output): if zip_status != Global.SUCCESS: LOGGER.error( "Archiving file FAIL, please check log file for more details") status = Global.FAILURE else: LOGGER.info("Archiving file OK, zip file size is {}".format( self._get_file_size(zip_output))) # Create md5sum file and retrieve md5sum file path LOGGER.info( "Create md5sum file from previously created archive ...") (status, output) = self.create_md5sum_file(zip_output) if status == Global.SUCCESS: LOGGER.info("Md5sum file creation OK") self.md5_acs_report_tcr = output else: LOGGER.error( "Md5sum file creation FAIL, please check log file for more details" ) status = Global.FAILURE msg = "File are not ready to upload!" if status == Global.SUCCESS: msg = "File are ready to upload!" return status, msg
def load_json_info(self, json_path): """ Load all public class attributes from a json file. :type json_path: str :param json_path: path to a json file :rtype: None :return: None """ def remove_protected_and_private_attrib(obj): filtered_dict = dict((key, value) for key, value in obj.iteritems() if not key.startswith("_")) return filtered_dict if os.path.exists(json_path): with open(json_path, 'r') as json_file: try: self.__dict__.update( json.load( json_file, object_hook=remove_protected_and_private_attrib)) except Exception as ex: LOGGER.error("Cannot load the json file %s (%s)" % (json_file, str(ex))) return None return True
def create_url_shortcut(self, campaign_url): """ Create a shortcut to open given url :rtype: tuple :return: Status and output log """ try: if os.path.exists(Folders.REPORTS): output_path = os.path.join(Folders.REPORTS, self._report_name + ".html") if not os.path.isfile(output_path): LOGGER_FWK.info( "CREATE_URL_SHORTCUT: Creating url shortcut to campaign result" ) html_content = "<html>\n" html_content += "<head>\n" html_content += "<meta http-equiv=\"refresh\" content=\"0; URL=%s\">\n" % campaign_url html_content += "</head>\n" html_content += "<body></body>\n" html_content += "</html>" with open(output_path, "w") as fd: fd.write(html_content) except Exception as ex: # pylint: disable=W0703 error_msg = "CREATE_URL_SHORTCUT: Fail, " + str(ex) LOGGER_FWK.error(error_msg) return Global.FAILURE, error_msg msg = "CREATE_URL_SHORTCUT: Created link to %s" % str(campaign_url) return Global.SUCCESS, msg
def _parse_bench_node(self, node): """ Parses XML `Phone` node(s) from Bench Catalog file and maps it into a python structure (dict) :return: A dict mapping XML configuration :rtype: AttributeDict :raise AcsConfigException.INVALID_BENCH_CONFIG: If a (or more) deprecated parameter(s) is/are found in a Phone node """ LOGGER_FWK.info( 'Loading optional device model parameters from CLI and/or BenchConfig ' 'for {0} ({1})...'.format(self._device_name, self._device_model_name)) buf_params = AttributeDict() # Storing value to avoid recomputing each call device_model_name = self.device_model_name if device_model_name: buf_params["Name"] = device_model_name if self.device_conf_filename: buf_params["DeviceConfigPath"] = self.device_conf_filename # Get phone properties for attrs in node.xpath(".//Parameter"): name, value, description = attrs.get("name"), attrs.get( "value"), attrs.get("description") if name in self.PROHIBITIVE_KEYS: # Do not allow to override internal keys # as it would lead to nasty & unexpected behavior !! continue # Not supported anymore, raise AcsConfigException.INVALID_BENCH_CONFIG !! if name and value: buf_params[name] = value self._bench_conf_deprecated[name] = (value, description) else: buf_params.update(attrs.attrib) # Report Errors if so if self.bench_contains_errors: LOGGER_FWK.error(self._report_errors()) _error( 'Invalid Bench Parameters format found! {0}'.format(', '.join( self._bench_conf_deprecated.keys())), AcsConfigException.INVALID_BENCH_CONFIG) # Extracting device modules if so buf_params.device_modules = self.extract_device_modules(node) return buf_params
def _check_cmd_status(self, status, result): """ Check server command status: """ if status == CommandServerApi.SRV_CMD_UNKNWON: msg = "Wrong embedded agent output" LOGGER_FWK.error(msg) raise DeviceException(DeviceException.OPERATION_FAILED, msg) elif status == CommandServerApi.SRV_CMD_FAILURE: # Just log the error now, let UEcmd decide of what to do in case of failure # In this case, result contains the error message to log LOGGER_FWK.error(result)
def setup(self): """ Setup files which will be push to TCR server :rtype: tuple :return: Status and output log """ # Check original acs report path status = Global.FAILURE msg = "" if not os.path.exists(self.original_report_path): msg = "Cannot retrieve original ACS results: %s" % self.original_report_path LOGGER.error(msg) status = Global.FAILURE else: # compute the report folder name (used to generate html file) if not self.report_name: # If folder ending character is present, should remove it before treatment if self.original_report_path[-1:] in ('\\\\', '\\', '/'): self.report_name = str( os.path.basename(self.original_report_path[:-1])) else: self.report_name = str( os.path.basename(self.original_report_path)) if not self.new_report_path: (status, output) = self.build_file_name(self.original_report_path) if status == Global.SUCCESS: LOGGER.info("Building file name OK") # each push must have its own dedicated directory # it will be easier to manage reports cache like this : each folder # contains zip + md5 file + json info sub_folder_push = os.path.join( self.cache_reports, "%s_%s" % (CACHE_PUSH_BASE_FOLDER, time.strftime("%Y-%m-%d_%Hh%M.%S"))) self._lock_file = os.path.join(sub_folder_push, LOCK) if not os.path.isdir(sub_folder_push): os.makedirs(sub_folder_push) with open(self._lock_file, 'w') as lock: lock.write("locked") # output contains computed report dirname self.new_report_path = os.path.join( sub_folder_push, output) # create report zip file & md5 file associated status, msg = self.prepare_files() else: msg = "Building file name FAIL: %s" % output status = Global.FAILURE if status == Global.SUCCESS: msg = "TCR push SETUP : OK" return status, msg
def check_keys(dictionary, keys): """ Check if keys are in given dictionary, raise an error if not. :type dictionary: dict :param dictionary: dict to test :type keys: string :param keys: keys to check :rtype: list :return: list of missing keys """ key_list = [] for element in keys: if element not in dictionary: LOGGER_FWK.error("KEY %s missing on your dictionary" % element) key_list.append(element) return key_list
def safe_remove_file(file_path, max_retry=5): """ Removes the file. .. note:: Due to windows limitation, sometimes, the file cannot be removed immediately so implements retry loop. :type file_path: str :param file_path: The path to the folder/file to remove. :type max_retry: int :param max_retry: Max remove retry. :rtype: tuple :return: Status and output log """ remove_ok = False retry = 1 status = Global.FAILURE output = '' file_name = os.path.basename(file_path) while not remove_ok and retry <= max_retry: try: if os.path.isfile(file_path): os.remove(file_path) elif os.path.isdir(file_path): shutil.rmtree(file_path) remove_ok = True status = Global.SUCCESS output = 'Remove {0} OK after {1}/{2} tries'.format( file_name, str(retry), str(max_retry)) LOGGER.info(output) except Exception as ex: # pylint: disable=W0703 time.sleep(1) retry += 1 output = 'Fail to remove {0} after {1}/{2} tries ({ex})'.format( file_name, str(retry), str(max_retry), ex=ex) LOGGER.error(output) return status, output
def send_campaign_resource(self, campaign_uuid, resource_info): """ Push a resource onto (TCR Only) REST API for a given Test Case. :param dict campaign_uuid: id of the campaign. :param dict resource_info: Local resource to be pushed onto TCR at Test Case level. """ if self._tcr_instance: # Archive file to push context_dict = { "acs_logfile_name": Files.acs_output_name, "TCR_LIVE_REPORTING": REPORT_FILE_NAME, "ROOT_DUT_NAME": Folders.ROOT_DUT_NAME, "AP_LOGS": Folders.AP_LOGS, "BP_LOGS": Folders.BP_LOGS, "PTI_LOGS": Folders.PTI_LOGS, "SERIAL_LOGS": Folders.SERIAL_LOGS, "LOGCAT_LOGS": Folders.LOGCAT_LOGS, "DEBUG_LOGS": Folders.DEBUG_LOGS, "REPORT_STYLE_FILENAME": Files.REPORT_STYLE } tcr_push = TCRpush( original_acs_report=resource_info["campaign_report_path"], metacampaign_uuid=campaign_uuid, user_mail=resource_info["user_email"], cache_reports=Paths.CACHE_PUSH_REPORTS, dev_campaign=resource_info["dev_campaign"], log_folders_and_files=context_dict) # Build archive to push status, msg = tcr_push.setup() if status == Global.SUCCESS: # Push a resource to TCR Reporting tool via REST API interface (Test case level) self._tcr_instance.send_campaign_resource( resource=tcr_push.zip_acs_report_tcr) else: LOGGER_FWK.error( "Error when building campaign result archive! {}".format( msg)) else: LOGGER_FWK.warning('Only TCR offers REST API for resources!')
def Zip(folder, filename): status = Global.FAILURE if ON_POSIX: from distutils import archive_util try: out_file = archive_util.make_archive(filename, format="gztar", root_dir=folder) LOGGER.info( "folder {0} has been properly zipped as tarball {1}". format(folder, out_file)) status = Global.SUCCESS except Exception as ex: # pylint: disable=W0703 LOGGER.error( "ZIP_ERROR: An error occured during file zipping (%s)" % str(ex)) out_file = "" else: import zipfile try: filename = filename + '.zip' zip_file = zipfile.ZipFile(filename, 'w', allowZip64=True) LOGGER.info('Create zip file: {0}'.format(filename)) ZipFolderUtilities.addFolderToZip(zip_file, folder) LOGGER.info( "folder {0} has been properly zipped as {1}".format( folder, filename)) zip_file.close() status = Global.SUCCESS out_file = os.path.abspath(filename) except IOError as error: LOGGER.error('Cannot create zip file: {0} - {1}'.format( filename, error)) status = Global.FAILURE out_file = "" if status == Global.SUCCESS: LOGGER.info("ZIP FILE: {0}".format(out_file)) return status, out_file
def compute(campaign_report_path): """ Compute checksum for all files in the given path and save it in a file :type campaign_report_path: str :param campaign_report_path: The path to the folder to compute checksum. """ # Compute checksum only if FEATURE_FILE_ACTIVATION is present if ComputeReportChecksum.compute_checksum_activated(): try: hash_code = compute_checksum(campaign_report_path) except Exception as error: LOGGER_FWK.error("Error while compute checksum (%s)" % str(error)) try: if hash_code: with open( os.path.join(campaign_report_path, CHECK_SUM_FILES), 'w') as file_: file_.write(str(hash_code)) else: LOGGER_FWK.error("No checksum computed") except Exception as error: LOGGER_FWK.error("Error while write in file (%s)" % str(error))