def copy_pkg_to_temp(self, pkg_name): """Copy package directory to temp directory""" src_dir = '{0}{1}'.format( normalize_path(self.node_modules_dir), pkg_name) dest_dir = '{0}{1}'.format( normalize_path(self.download_dir), pkg_name) distutils.dir_util.copy_tree(src_dir, dest_dir)
def test_pip_analyser(self): python_project_path = normalize_path(os.path.join(TEST_DIR, 'example/py_project')) project_info = determine_build_tool(normalize_path(python_project_path)) requirements_file_path = normalize_file_path(os.path.join(TEST_DIR, 'example/py_project/requirements.txt')) assert "pip" in project_info[0] pkg_mgr = PipPackageManager(normalize_path(python_project_path), True) assert pkg_mgr.project_dir == normalize_path(os.path.join(TEST_DIR, 'example/py_project')) assert pkg_mgr.in_tests == True assert pkg_mgr.files_to_parse == [[requirements_file_path]]
def __init__(self, project_dir, package_list: str, num_of_workers: int): self._download_dir = '{0}{1}'.format(normalize_path(project_dir), TEMP_DIR) self._num_of_workers = num_of_workers self._workers = [] self.project_dir = project_dir self.node_modules_dir = '{0}node_modules/'.format( normalize_path(project_dir)) self._package_groups = list(self._group_packages(package_list)) create_tmp_dir(self.project_dir)
def __init__(self, directory_to_scan, pkg_list, num_workers=10): self.directory_to_scan = directory_to_scan self.pkg_list = pkg_list self.pkf_file_list = '' self.download_dir = '{0}{1}'.format( normalize_path(self.directory_to_scan), TEMP_DIR) self.node_modules_dir = '{0}node_modules'.format( normalize_path(self.directory_to_scan)) create_tmp_dir(self.directory_to_scan) self.create_dep_file()
def analyse_dir(verbose, package_dir): npm_deps = [] project_info = determine_build_tool(normalize_path(package_dir)) if "npm" in project_info[0]: print_to_command_line("NPM Project", "success") NpmPackageManager(normalize_path(package_dir)) if "pip" in project_info[0]: print_to_command_line("Python Project", "success") PipPackageManager(normalize_path(package_dir)) return project_info
def extract_content_of_temp_dir(self): temp_directory = '{0}{1}'.format( normalize_path(self.directory_to_scan), TEMP_DIR) os.chdir(os.path.abspath(self.SCANCODE_PATH)) if os.path.exists(temp_directory): for filename in os.listdir(temp_directory): extract_str = "./extractcode {0}{1}{2}".format( normalize_path(self.directory_to_scan), normalize_path(TEMP_DIR), filename) os.popen(extract_str).read() return
def test_determine_build_tool(self): python_project_path = normalize_path(os.path.join(TEST_DIR, 'example/py_project')) py_path_result = (['pip', 'pip'], [[], [os.path.join(TEST_DIR, 'example/py_project/requirements.txt')], [os.path.join(TEST_DIR, 'example/py_project/setup.py')]]) npm_project_path = normalize_path(os.path.join(TEST_DIR, 'example/npm_project')) npm_path_result = (['npm'], [[os.path.join(TEST_DIR, 'example/npm_project/package.json')], [], []]) assert determine_build_tool(npm_project_path) == npm_path_result assert determine_build_tool(python_project_path) == py_path_result
def __init__(self, req_file, project_dir, pkg_name): self.req_file = req_file self.pkg_name = pkg_name self.dest_dir = '{0}{1}'.format(normalize_path(project_dir), TEMP_DIR) self.download_output = None create_tmp_dir(project_dir) self.download()
def check_if_pkg_exists(self, pkg_name): """Checks if the package exists in the node_modules folder. If it does, it copies it to the temp_directory, if not, it adds it to the download list""" item_dir = '{0}{1}/'.format( normalize_path(self.node_modules_dir), pkg_name) pkg_exists = os.path.exists(item_dir) if pkg_exists: self.copy_pkg_to_temp(pkg_name) return pkg_exists
def node_modules_dir_for_package_json(self): if self.file_dir: dir_list = self.file_dir.split('/') parent_dir = '/'.join(dir_list[:-1]) for name in os.listdir(parent_dir): if os.path.isdir('{0}/{1}'.format(parent_dir, name)): if name == "node_modules": self.node_modules_dir = normalize_path( '{0}/{1}'.format(parent_dir, name))
def create_dep_file(self): file_to_write = '{0}pkg_list.txt'.format( normalize_path(self.download_dir)) self.pkf_file_list = file_to_write pkg_list_file = open(file_to_write, 'w+') for item in self.pkg_list: if os.path.exists(self.node_modules_dir): if not self.check_if_pkg_exists(item[0]): pkg_list_file.write("{0}@{1}\n".format(item[0], item[1])) else: pkg_list_file.write("{0}@{1}\n".format(item[0], item[1])) pkg_list_file.close()
def download(self): # Create setup file before downloading the packages to a directory touch_cmd = "touch {0}setup.py".format(normalize_path(self.dest_dir)) # Run the Command if is_connected(): cmd_output = os.popen("pip download -r {0} -d {1}".format( self.req_file, self.dest_dir)).read() # delete setup file after packages have been downloaded os.popen("rm -f {0}setup.py".format(self.dest_dir)) self.download_output = cmd_output return cmd_output else: print_to_command_line( "You are not online, we cannot download project dependencies. You need to be online.", "failure") return
def archive_path(archive_definition, year, day, archive_dir='', output_level=0): """ Returns path in archives dir to an archive file archive_definition format: (station, channel, subdir, location, start date, end date) :param archive_definition: tuple - archive definition tuple (see archive_def method) :param year: string/int - year of record :param day: string/int - day of record :param archive_dir: string - path to archive directories, empty by default :param output_level: int - 0 - min output, 5 - max output, default - 0 :return: string - partial path of archive record (path to archive directory itself can be found in config/vars.py) """ if day is int and day > 365 and output_level >= 0: logging.warning("In archive_path: day value is bigger than 365") # Subdirectories: path = archive_definition[2] + '/' + archive_definition[0] + '/' # Record file: path += archive_definition[0] + '.' + archive_definition[ 2] + '.' # station.subdir. path += archive_definition[3] + '.' + archive_definition[ 1] + '.' # location.channel year_str = str(year) while len(year_str) != 4: year_str = '0' + year_str path += year_str + '.' day_str = str(day) while len(day_str) != 3: day_str = '0' + day_str path += day_str if len(archive_dir) != 0: return utils.normalize_path(archive_dir) + '/' + path return path
logging.info(config.picks_help_message) print(config.picks_help_message) sys.exit() elif opt in ("-s", "--save"): config.save_dir = arg elif opt in ("-r", "--rea"): config.full_readings_path = arg elif opt in ("-w", "--wav"): config.full_waveforms_path = arg # Initialize random seed with current time random.seed() # Get file names nordic_file_names = utils.get_nordic_files( utils.normalize_path(config.full_readings_path)) # Get all archive definitions definitions = [] if type(config.seisan_definitions_path) is list: for path in config.seisan_definitions_path: defs = seisan.read_archive_definitions(path) definitions.extend(defs) else: definitions = seisan.read_archive_definitions( utils.normalize_path(config.seisan_definitions_path)) # Picking statistics initialization stats = stats.PickStats() rewrite_duplicates = config.rewrite_duplicates
def read_picks(save_dir, phase_hint): """ Reads picks of specified phase :param save_dir: Base directory of waveforms database :param phase_hint: Specified phase :return: ljst of picks """ save_dir = utils.normalize_path(save_dir) assert os.path.isdir(save_dir), 'No save directory found: \"{}\"'.format( save_dir) # Read picking statistics try: picking_stats = stats.PickStats() picking_stats.read(save_dir + '/' + config.picking_stats_file) except FileNotFoundError as e: picking_stats = None print('No picking statistics file found in path {}'.format( save_dir + '/' + config.picking_stats_file)) result_list = [picking_stats] # Read events for dir in os.listdir(save_dir): dir_full_path = save_dir + '/' + dir if os.path.isfile(dir_full_path): continue # Get stats event_stats = stats.EventStats() event_stats.read(dir_full_path + '/' + config.event_stats_file) # Magnitude check if event_stats.magnitude is not None and event_stats.magnitude < config.min_magnitude: continue # Depth check if event_stats.depth is not None and event_stats.depth > config.max_depth: continue event_list = [event_stats] # Read event pick group for subdir in os.listdir(dir_full_path): subdir_full_path = dir_full_path + '/' + subdir if os.path.isfile(subdir_full_path): continue # Get stats slice_stats = stats.SliceStats() slice_stats.read(subdir_full_path + '/' + config.picks_stats_file) # Phase hint check if slice_stats.phase_hint != phase_hint: continue pick_list = [slice_stats] # Read picks for pick_file_name in os.listdir(subdir_full_path): # Parse name name_split = pick_file_name.split('.') if type(name_split) is not list: continue if len(name_split) != 5: continue spip = name_split[2] file_format = name_split[4] # Check if its accelerogramm if config.ignore_acc and spip in config.acc_codes: continue # Check file format if type(name_split) is list: if file_format == slice_stats.file_format: pick_list.append(subdir_full_path + '/' + pick_file_name) event_list.append(pick_list) result_list.append(event_list) return result_list
def save_picks(picks, save_dir, file_format="MSEED"): """ Writes an event to a specified save dir :param picks: :param save_dir: :param file_format: :return: """ # Create save_dir save_dir = utils.normalize_path(save_dir) if not os.path.isdir(save_dir): os.makedirs(save_dir) # Retrieve event data event_id = picks[0] reading_path = picks[1] magnitude = picks[2] depth = picks[3] event_picks = picks[4] # If no event ID, quit if event_id is None or len(event_id) == 0: print("In {}: Event ID is empty, cannot create save dir!".format( reading_path)) return -1 # Create event directory event_dir = save_dir + '/' + event_id if not os.path.isdir(event_dir): os.mkdir(event_dir) # Write event info with open(event_dir + '/' + config.event_stats_file, 'w') as f: print("[Event Description]", file=f) print("{}={}".format("EventID", event_id), file=f) print("{}=\"{}\"".format("SFilePath", reading_path), file=f) print("{}={}".format("Magnitude", magnitude), file=f) print("{}={}".format("Depth", depth), file=f) # Save picks for pick in event_picks: # Retrieve pick info station = pick[0] phase_hint = pick[1] distance = pick[2] phase_picks = pick[3] # Create picks directory index = 0 picks_dir = "{event_dir}/{station}.{phase_hint}.{index}".format( event_dir=event_dir, station=station, phase_hint=phase_hint, index=index) while os.path.isdir(picks_dir): index += 1 picks_dir = "{event_dir}/{station}.{phase_hint}.{index}".format( event_dir=event_dir, station=station, phase_hint=phase_hint, index=index) os.mkdir(picks_dir) # Write picks info with open(picks_dir + '/' + config.picks_stats_file, 'w') as f: print("[Picks Description]", file=f) print("{}={}".format("EventID", event_id), file=f) print("{}=\"{}\"".format("SFilePath", reading_path), file=f) print("{}={}".format("Station", station), file=f) print("{}={}".format("PhaseHint", phase_hint), file=f) print("{}={}".format("Magnitude", magnitude), file=f) print("{}={}".format("Depth", depth), file=f) print("{}={}".format("Distance", distance), file=f) print("{}=\"{}\"".format("FileFormat", file_format), file=f) if len(phase_picks) > 0: p_pick = phase_picks[0] pick_time = p_pick[4] pick_start_time = p_pick[2] pick_end_time = p_pick[3] print("{name}={day}.{month}.{year}-{hour}:{minute}:{second}". format(name="WavePhaseTime", day=pick_time.day, month=pick_time.month, year=pick_time.year, hour=pick_time.hour, minute=pick_time.minute, second=pick_time.second), file=f) print("{name}={day}.{month}.{year}-{hour}:{minute}:{second}". format(name="WaveStartTime", day=pick_start_time.day, month=pick_start_time.month, year=pick_start_time.year, hour=pick_start_time.hour, minute=pick_start_time.minute, second=pick_start_time.second), file=f) print("{name}={day}.{month}.{year}-{hour}:{minute}:{second}". format(name="WaveEndTime", day=pick_end_time.day, month=pick_end_time.month, year=pick_end_time.year, hour=pick_end_time.hour, minute=pick_end_time.minute, second=pick_end_time.second), file=f) # Save trace slices for p_pick in phase_picks: archive_def = p_pick[0] slices = p_pick[5] base_file_name = "{picks_dir}/{location}.{station}.{spip}.{phase}".format( picks_dir=picks_dir, location=archive_def[2], station=archive_def[0], spip=archive_def[1], phase=phase_hint) index = 0 for sl in slices: trace_slice = sl[2] if len(slices) == 1: trace_file = "{base_file_name}.{format}".format( base_file_name=base_file_name, format=file_format) else: trace_file = "{base_file_name}.{index}.{format}".format( base_file_name=base_file_name, index=index, format=file_format) trace_slice.write(trace_file, format=file_format) index += 1
def test_normalize_path(self): x = "/home/user/path/example" x_result = "/home/user/path/example/" assert normalize_path(x) == x_result assert normalize_path(x_result) == x_result
def test_temp_dir(self): python_project_path = normalize_path(os.path.join(TEST_DIR, 'example/py_project')) temp_dir_path = normalize_path(os.path.join(TEST_DIR, 'example/py_project/{0}'.format(TEMP_DIR))) create_tmp_dir(python_project_path) assert os.path.isdir(temp_dir_path) == True delete_tmp_dir(python_project_path)
def test_check_file_in_dir(self): npm_project = os.path.join(TEST_DIR, 'example/npm_project') assert check_file_in_dir(normalize_path(npm_project), 'package.json') == [os.path.join(TEST_DIR, 'example/npm_project/package.json')]