def push_command(curdir, config, update, version=None, files=()): api = ProjectAPI(config) project = api.get_project() remote_content_type_codes = project['content_type_codes'] init_language_storage(api) add_project_file_formats(get_project_file_formats(config)) if not files: pattern_list = get_push_pattern(config) if pattern_list is None: log.info("No push pattern found in config. Taking files from current directory") pass if files: pattern_list = [] for file_ in files: pattern_list.append(file_) for pattern in pattern_list: assert len(pattern_list) != 0 if pattern[-2:] == '/*': pattern_extension = pattern.split('/')[-1] directory_list = find_directories(pattern) for dir_ in directory_list: dir_ = dir_ + '/' + pattern_extension final_push(project, curdir, dir_, api, update, version, remote_content_type_codes) else: final_push(project, curdir, pattern, api, update, version, remote_content_type_codes)
def push_command(curdir, config, update, file_path=False, version=None, files=()): api = ProjectAPI(config) project = api.get_project() remote_content_type_codes = project['content_type_codes'] init_language_storage(api) add_project_file_formats(get_project_file_formats(config)) if not files: pattern_list = get_push_pattern(config) if pattern_list is None: log.info( "No push pattern found in config. Taking files from current directory" ) files = os.listdir(curdir) files = [ file for file in files if os.path.isfile(file) and not str(file).startswith(".") and not str(file).startswith("__") ] if files: pattern_list = [] for file_ in files: pattern_list.append(file_) for pattern in pattern_list: assert len(pattern_list) != 0 if pattern[-2:] == slash + '*': pattern_extension = pattern.split(slash)[-1] directory_list = find_directories(pattern, slash) for dir_ in directory_list: if dir_.endswith(slash): dir_ = dir_ + pattern_extension else: dir_ = dir_ + slash + pattern_extension final_push(project, curdir, dir_, api, update, version, remote_content_type_codes, file_path) else: final_push(project, curdir, pattern, api, update, version, remote_content_type_codes, file_path)
def find_new_command(curdir, config, files=()): api = ProjectAPI(config) init_language_storage(api) project = api.get_project() source_lang = get_source_language(project) pattern = get_push_pattern(config) if not files: files = list(find_files_by_pattern(curdir, pattern, source_lang)) if not files: raise FilesNotFound( 'Files not found by pattern `{}`'.format(pattern)) for file_name in files: blob = curdir + '/' + ''.join( pattern.split('/')[:-1]) + '/' + str(file_name) if not vendored_or_documented(blob) and not not_valid(blob): language = [strategy.find(blob) for strategy in STRATEGIES] RESULTS.append(language) print(RESULTS)
def pull_command(curdir, config, files=(), force=False, bulk=False, workflow=False, workflow_all=None, version=None, distinct=False, languages=(), in_progress=False, update_action=None, custom=False, **kwargs): api = ProjectAPI(config) init_language_storage(api) project = api.get_project() dest_languages = list(get_destination_languages(project)) if languages: languages = validate_languges_input(languages, dest_languages) else: languages = dest_languages # prepare variables for pull_bulk command src_language = get_source_language(project) src_language_code = src_language.code src_language_id = src_language.id dest_languages_page_ids = [] dest_languages_ids = [src_language_id] src_to_dest_paths = [] pattern_list = get_pull_pattern(config, default=None) if pattern_list is None: pattern_list = [None] # based on the configuration in .qordoba.yml the destination for the pulled files will be set. Default path is '.qordoba-cli/qordoba/' for pattern in pattern_list: for language in languages: status_filter = [ PageStatus.enabled, ] # generally only completed files will be pulled if in_progress is False: log.debug('Pull only completed translations.') status_filter = [ PageStatus.completed, ] is_started = False pages_completed = api.page_search(language.id, status=status_filter) pages_all = [ pages_completed, ] # if workflow flag exists, enabled files will be pulled too if workflow or workflow_all: pages_enabled = api.page_search(language.id, status=[ PageStatus.enabled, ]) pages_all = [pages_completed, pages_enabled] milestone_all = None for pages in pages_all: for page in pages: is_started = True page_status = api.get_page_details( language.id, page['page_id'], ) dest_languages_page_ids.append(page['page_id']) dest_languages_ids.append(language.id) milestone = page_status['status']['id'] version_tag = page_status['version_tag'] filename = page['url'] if str(version_tag) != str( version) and version is not None: print("dismissing file `{}` with wrong version {}". format(filename, version_tag)) continue if distinct: source_name = page_status['name'] tag = page_status['version_tag'] try: pattern_name = pattern.split('/')[-1] except AttributeError: pattern_name = files[0] if tag: real_filename = tag + '_' + source_name else: real_filename = source_name if real_filename != pattern_name: continue # when '--workflow' parameter is set, user can pick of which workflow files should be downloaded if workflow or workflow_all: milestones_resp = api.get_milestone( language.id, page_status['assignees'][0]['id']) milestone_dict = dict() for i in milestones_resp: milestone_dict[i['name']] = i['id'] if workflow: log.info( 'For file {} and language {} pick workflow step' .format(format_file_name(page), language)) # takes the milestone answer from stdin pick = ask_select( MilestoneOptions().all(milestone_dict), prompt='Pick a milestone: ') milestone = milestone_dict[pick] if workflow_all: if milestone_dict[workflow_all]: milestone = milestone_dict[workflow_all] else: log.info( "The given Milestone `{}` does not exists in your project" .format(workflow_all)) if in_progress: log.debug('Selected status for page `{}` - {}'.format( page_status['id'], page_status['status']['name'])) dest_path = create_target_path_by_pattern( curdir, language, pattern=pattern, distinct=distinct, version_tag=page_status['version_tag'], source_name=page_status['name'], content_type_code=page_status['content_type_code'], ) if pattern is not None: stripped_dest_path = ((dest_path.native_path).rsplit( '/', 1))[0] src_to_dest_paths.append( tuple((language.code, stripped_dest_path))) src_to_dest_paths.append( tuple((language.code, language.code))) # adding the src langauge to the dest_path_of_src_language pattern dest_path_of_src_language = create_target_path_by_pattern( curdir, src_language, pattern=pattern, distinct=distinct, version_tag=page_status['version_tag'], source_name=page_status['name'], content_type_code=page_status['content_type_code'], ) if pattern is not None: stripped_dest_path_of_src_language = (( dest_path_of_src_language.native_path).rsplit( '/', 1))[0] src_to_dest_paths.append( tuple((src_language_code, stripped_dest_path_of_src_language))) src_to_dest_paths.append( tuple((src_language_code, src_language_code))) if not bulk: """ Checking if file extension in config file matches downloaded file. If not, continue e.g. *.resx should only download resx files from Qordoba """ valid_extension = pattern.split( '.')[-1] if pattern else None file_extension = page['url'].split('.')[-1] if not custom and pattern and valid_extension != "<extension>" and valid_extension != file_extension: continue log.info( 'Starting Download of translation file(s) for src `{}`, language `{}` and pattern {}' .format(format_file_name(page), language.code, pattern)) if os.path.exists(dest_path.native_path) and not force: log.warning( 'Translation file already exists. `{}`'.format( dest_path.native_path)) answer = FileUpdateOptions.get_action( update_action) or ask_select( FileUpdateOptions.all, prompt='Choice: ') if answer == FileUpdateOptions.skip: log.info( 'Download translation file `{}` was skipped.' .format(dest_path.native_path)) continue elif answer == FileUpdateOptions.new_name: while os.path.exists(dest_path.native_path): dest_path = ask_question( 'Set new filename: ', answer_type=dest_path.replace) # pass to replace file if workflow: log.info( '- note: pulls only from workflowstep `{}` '. format(pick)) if workflow_all: assert milestone_dict[workflow_all] == milestone log.info( '- note: pulls only from workflowstep `{}` '. format(workflow_all)) res = api.download_file(page_status['id'], language.id, milestone=milestone) res.raw.decode_content = True # required to decompress content if not os.path.exists( os.path.dirname(dest_path.native_path)): try: os.makedirs( os.path.dirname(dest_path.native_path)) log.info("Creating folder path {}".format( dest_path.native_path)) except OSError as exc: # Guard against race condition if exc.errno != errno.EEXIST: pass with open(dest_path.native_path, 'wb') as f: shutil.copyfileobj(res.raw, f) log.info( 'Downloaded translation file `{}` for src `{}` and language `{}`' .format(dest_path.native_path, format_file_name(page), language.code)) if not is_started and not bulk: log.info( 'Nothing to download for language `{}`. Check if your file translation status is `completed`.' .format(language.code)) if bulk: pull_bulk(api, src_to_dest_paths, dest_languages_page_ids, dest_languages_ids, pattern=pattern)
def mock_lang_storage(language_response): api = MagicMock() api.get_languages.return_value = language_response init_language_storage(api)
def pull_command(curdir, config, force=False, bulk=False, languages=(), in_progress=False, update_action=None, files=(), **kwargs): api = ProjectAPI(config) init_language_storage(api) project = api.get_project() dest_languages = list(get_destination_languages(project)) if languages: languages = validate_languges_input(languages, dest_languages) else: languages = dest_languages '''prepare variables for pull_bulk command''' src_language = get_source_language(project) src_language_code = src_language.code src_language_id = src_language.id dest_languages_page_ids = [] dest_languages_ids = [src_language_id] src_to_dest_paths = [] pattern = get_pull_pattern(config, default=None) status_filter = [PageStatus.enabled, ] if in_progress is False: log.debug('Pull only completed translations.') status_filter = [PageStatus.completed, ] for language in languages: is_started = False current_page_path = None if not files: pages = api.page_search(language.id, status=status_filter) else: pages = [] for search_term in files: result = api.page_search(language.id, status=status_filter, search_string=search_term) pages = itertools.chain(pages, result) for page in pages: is_started = True page_status = api.get_page_details(language.id, page['page_id'], ) """ If searching for specific files, skip those that don't match the search criteria. """ if files: needle = None for file_name in files: remote_file_name = page_status['url'].partition('_')[2] if file_name in remote_file_name: needle = page_status['url'] if needle is None: continue else: log.info('Found matching translation file src `{}` language `{}`'.format( format_file_name(page), language.code, )) dest_languages_page_ids.append(page['page_id']) dest_languages_ids.append(language.id) milestone = None if in_progress: milestone = page_status['status']['id'] log.debug('Selected status for page `{}` - {}'.format(page_status['id'], page_status['status']['name'])) dest_path = create_target_path_by_pattern(curdir, language, pattern=pattern, source_name=page_status['name'], content_type_code=page_status['content_type_code']) if pattern is not None: stripped_dest_path = ((dest_path.native_path).rsplit('/', 1))[0] src_to_dest_paths.append(tuple((language.code, stripped_dest_path))) src_to_dest_paths.append(tuple((language.code, language.code))) '''adding the src langauge to the dest_path_of_src_language pattern''' dest_path_of_src_language = create_target_path_by_pattern(curdir, src_language, pattern=pattern, source_name=page_status['name'], content_type_code=page_status['content_type_code']) if pattern is not None: stripped_dest_path_of_src_language = ((dest_path_of_src_language.native_path).rsplit('/', 1))[0] src_to_dest_paths.append(tuple((src_language_code, stripped_dest_path_of_src_language))) src_to_dest_paths.append(tuple((src_language_code, src_language_code))) if not bulk: '''checking if file extension wanted in config file matches downloaded file. If not, continue''' valid_extension = pattern.split('.')[-1] if pattern else None file_extension = page['url'].split('.')[-1] if valid_extension != "<extension>" and valid_extension != file_extension: log.info('{} is not a valid file extension'.format(file_extension)) continue log.info('Starting Download of translation file(s) for src `{}` and language `{}`'.format(format_file_name(page), language.code)) if os.path.exists(dest_path.native_path) and not force: log.warning('Translation file already exists. `{}`'.format(dest_path.native_path)) answer = FileUpdateOptions.get_action(update_action) or ask_select(FileUpdateOptions.all, prompt='Choice: ') if answer == FileUpdateOptions.skip: log.info('Download translation file `{}` was skipped.'.format(dest_path.native_path)) continue elif answer == FileUpdateOptions.new_name: while os.path.exists(dest_path.native_path): dest_path = ask_question('Set new filename: ', answer_type=dest_path.replace) # pass to replace file res = api.download_file(page_status['id'], language.id, milestone=milestone) res.raw.decode_content = True # required to decompress content if not os.path.exists(os.path.dirname(dest_path.native_path)): try: os.makedirs(os.path.dirname(dest_path.native_path)) log.info("Creating folder path {}".format(dest_path.native_path)) except OSError as exc: # Guard against race condition if exc.errno != errno.EEXIST: pass with open(dest_path.native_path, 'wb') as f: shutil.copyfileobj(res.raw, f) log.info('Downloaded translation file `{}` for src `{}` and language `{}`'.format(dest_path.native_path, format_file_name(page), language.code)) if not is_started: log.info('Nothing to download for language `{}`'.format(language.code)) if bulk: pull_bulk(api, src_to_dest_paths, dest_languages_page_ids, dest_languages_ids, pattern=pattern)