def main(): logging.basicConfig(stream=sys.stdout, level=logging.INFO) options = _get_argument_parser().parse_args() try: repo = git.Repo('.', search_parent_directories=True) except git.exc.InvalidGitRepositoryError: print bright_red('This is not a valid git repository!') return try: branch = repo.branches[options.branch] except IndexError: print bright_red('"{}" is not a valid branch!'.format(options.branch)) return active_branch = repo.active_branch if options.into: to_merge = repo.active_branch base = branch else: to_merge = branch base = repo.active_branch if _needs_rebase(base, to_merge): print bright_red('To merge "{}" into "{}" you must rebase its ' 'contents first!'.format(to_merge, base)) print "To do that, run:" print " git checkout {} && git rebase {}".format(to_merge, base) return if options.skip_tests: print bright_yellow('Skipping tests') tests_passed = True else: to_merge.checkout() tests_passed = _run_tests() if tests_passed: print bright_green('Merging "{}" into "{}"'.format(to_merge, base)) base.checkout() try: if subprocess.call(['git', 'merge', '--no-ff', to_merge.name]) != 0: print bright_red('Something went wrong while merging. ' 'Restoring original branch') active_branch.checkout() except Exception: active_branch.checkout() raise else: print bright_red('Tests FAILED! Not merging.') active_branch.checkout()
def _run_tests(): makefile = _get_makefile() if makefile: with open(makefile) as fin: if any(l.startswith('test:') for l in fin): print bright_green('Running tests from makefile: {}'.format( os.path.abspath(makefile))) return subprocess.call(['make', 'test']) == 0 if os.path.isfile('manage.py'): print bright_green('Running tests from manage.py') return subprocess.call(['python', 'manage.py', 'test']) == 0 return True
def _process_handle(handle, patch=False): with Logger.add_level("Reading from '{}'", handle.name): str_contents = handle.read() Logger.info("Size: {} bytes", len(str_contents)) if os.path.exists(handle.name): fname, extension = os.path.basename(handle.name).rsplit(".", 1) backup_file = os.path.join("backups", "{}-{}.{}".format( fname, int(time.time()), extension)) try: os.makedirs('backups') except OSError: pass Logger.info("Writing backup...") with open(backup_file, 'wb') as fout: fout.write(str_contents) Logger.info("Done writing backup") Logger.info('Converting to binary string') binary_str = str_to_bits(str_contents) Logger.info('Decoding...') parser = (BinarySchema(_SHARED_STASH_SCHEMA) if binary_str.startswith(_SHARED_STASH_HEADER) else BinarySchema(_PERSONAL_STASH_SCHEMA)) stash = parser.decode(binary_str) Logger.info('Decoded') if not _check_stash(stash): Logger.error("Failed stash checking") return _show_stash(stash) Logger.info(color.bright_green("Items with missing info:")) _show_missing_parses_in_stash(stash) filters = _get_all_filters(_ITEMS_SORT_ORDER, _ITEM_FILTERS) Logger.info('There are {} filter', len(filters)) extracted = _extract_items(stash['pages'], filters) for item_type, items in sorted(extracted.items()): count = sum(len(r) for r in items) if count: Logger.info("Extracted items '{}' ({})", item_type, count) Logger.info("Sorting items") sorted_items = _sort_items(extracted, _ITEMS_SORT_ORDER) Logger.info("Paging items") pages = items_to_pages(sorted_items) empty_page = { 'header': bits_to_str(_PAGE_HEADER), 'item_count': 0, 'items': [], } new_pages = [empty_page] new_pages.extend([p for p in stash['pages'] if p['items']]) new_pages.append(empty_page) new_pages.extend([{ 'header': bits_to_str(_PAGE_HEADER), 'item_count': len(p), 'items': p, } for p in pages if p]) stash['pages'] = new_pages stash['page_count'] = len(new_pages) # _show_stash(stash) Logger.info("Encoding...") binary = parser.encode(stash) contents = bits_to_str(binary) Logger.info("Encoded. Size: {} ({} bits)", len(contents), len(binary)) if os.path.exists(handle.name) and patch: Logger.info('Patching: {}', handle.name) with open(handle.name, 'wb') as fout: fout.write(contents) with open("/tmp/test.d2x", "w") as fout: Logger.info('Writing to: /tmp/test.d2x', handle.name) fout.write(contents)
item_has_quantity, item_has_durability) from .logger import Logger from .pager import item_type_filter, ItemFilter, items_to_pages from .props import PropertyList, MISSING_PROPERTY_IDS from .schema import (SchemaPiece, Integer, Chars, BinarySchema, Until, NullTerminatedChars) from .utils import str_to_bits, bits_to_str, bits_to_int logger = logging.getLogger(__name__) # pylint: disable=invalid-name _SHARED_STASH_HEADER = str_to_bits("\x53\x53\x53\x00\x30\x31") _STASH_HEADER = str_to_bits("\x43\x53\x54\x4d\x30\x31") _PAGE_HEADER = str_to_bits("\x53\x54\x00\x4a\x4d") _ITEM_HEADER = str_to_bits("\x4a\x4d") _GREEN_TICK = color.bright_green(u"[✓]") _RED_CROSS = color.bright_red(u"[✗]") _ITEM_PARSES = collections.Counter() _FAILED_PARSES = collections.Counter() def _check_stash(stash): item_schema = BinarySchema(_ITEM_SCHEMA) Logger.info("Checking stash. Has {} pages", stash['page_count']) item_count = 0 for page_no, page in enumerate(stash['pages']): for item_no, item_data in enumerate( sorted(page['items'], key=lambda i: Item(i).position())): item_count += 1