def build_single_page_version(lang, args, cfg): logging.info('Building single page version for ' + lang) with autoremoved_file(os.path.join(args.docs_dir, lang, 'single.md')) as single_md: concatenate(lang, args.docs_dir, single_md) with temp_dir() as temp: cfg.load_dict({ 'docs_dir': os.path.join(args.docs_dir, lang), 'site_dir': temp, 'extra': { 'single_page': True, 'search': { 'language': 'en, ru' } }, 'pages': [{ cfg.data.get('site_name'): 'single.md' }] }) mkdocs_build.build(cfg) shutil.copytree(os.path.join(temp, 'single'), os.path.join(args.output_dir, lang, 'single'))
def build_single_page_version(lang, args, cfg): logging.info('Building single page version for ' + lang) with autoremoved_file(os.path.join(args.docs_dir, lang, 'single.md')) as single_md: concatenate(lang, args.docs_dir, single_md) with temp_dir() as site_temp: with temp_dir() as docs_temp: docs_temp_lang = os.path.join(docs_temp, lang) shutil.copytree(os.path.join(args.docs_dir, lang), docs_temp_lang) for root, _, filenames in os.walk(docs_temp_lang): for filename in filenames: if filename != 'single.md' and filename.endswith( '.md'): os.unlink(os.path.join(root, filename)) cfg.load_dict({ 'docs_dir': docs_temp_lang, 'site_dir': site_temp, 'extra': { 'single_page': True }, 'nav': [{ cfg.data.get('site_name'): 'single.md' }] }) mkdocs_build.build(cfg) single_page_output_path = os.path.join(args.docs_dir, args.output_dir, lang, 'single') if os.path.exists(single_page_output_path): shutil.rmtree(single_page_output_path) shutil.copytree(os.path.join(site_temp, 'single'), single_page_output_path) single_page_index_html = os.path.abspath( os.path.join(single_page_output_path, 'index.html')) single_page_pdf = single_page_index_html.replace( 'index.html', 'clickhouse_%s.pdf' % lang) create_pdf_command = [ 'wkhtmltopdf', '--print-media-type', single_page_index_html, single_page_pdf ] logging.debug(' '.join(create_pdf_command)) subprocess.check_call(' '.join(create_pdf_command), shell=True)
def build_single_page_version(lang, args, cfg): logging.info('Building single page version for ' + lang) with autoremoved_file(os.path.join(args.docs_dir, lang, 'single.md')) as single_md: concatenate(lang, args.docs_dir, single_md) with temp_dir() as site_temp: with temp_dir() as docs_temp: docs_temp_lang = os.path.join(docs_temp, lang) shutil.copytree(os.path.join(args.docs_dir, lang), docs_temp_lang) for root, _, filenames in os.walk(docs_temp_lang): for filename in filenames: if filename != 'single.md' and filename.endswith('.md'): os.unlink(os.path.join(root, filename)) cfg.load_dict({ 'docs_dir': docs_temp_lang, 'site_dir': site_temp, 'extra': { 'single_page': True }, 'nav': [ {cfg.data.get('site_name'): 'single.md'} ] }) mkdocs_build.build(cfg) single_page_output_path = os.path.join(args.docs_dir, args.output_dir, lang, 'single') if os.path.exists(single_page_output_path): shutil.rmtree(single_page_output_path) shutil.copytree( os.path.join(site_temp, 'single'), single_page_output_path ) single_page_index_html = os.path.abspath(os.path.join(single_page_output_path, 'index.html')) single_page_pdf = single_page_index_html.replace('index.html', 'clickhouse_%s.pdf' % lang) create_pdf_command = ['wkhtmltopdf', '--print-media-type', single_page_index_html, single_page_pdf] logging.debug(' '.join(create_pdf_command)) subprocess.check_call(' '.join(create_pdf_command), shell=True)
def test_concatenate(self): self.assertEqual( concatenate.concatenate([[1, 2], [1, 2], [1, 2], [1, 2], [3, 4], [3, 4], [3, 4]]), '''[[1 2] [1 2] [1 2] [1 2] [3 4] [3 4] [3 4]]''')
def build_single_page_version(lang, args, cfg): logging.info('Building single page version for ' + lang) with autoremoved_file(os.path.join(args.docs_dir, lang, 'single.md')) as single_md: concatenate(lang, args.docs_dir, single_md) with temp_dir() as site_temp: with temp_dir() as docs_temp: docs_temp_lang = os.path.join(docs_temp, lang) shutil.copytree(os.path.join(args.docs_dir, lang), docs_temp_lang) for root, _, filenames in os.walk(docs_temp_lang): for filename in filenames: if filename != 'single.md' and filename.endswith( '.md'): os.unlink(os.path.join(root, filename)) cfg.load_dict({ 'docs_dir': docs_temp_lang, 'site_dir': site_temp, 'extra': { 'single_page': True, 'opposite_lang': 'en' if lang == 'ru' else 'ru' }, 'nav': [{ cfg.data.get('site_name'): 'single.md' }] }) mkdocs_build.build(cfg) single_page_output_path = os.path.join(args.docs_dir, args.output_dir, lang, 'single') if os.path.exists(single_page_output_path): shutil.rmtree(single_page_output_path) shutil.copytree(os.path.join(site_temp, 'single'), single_page_output_path)
def test_concatenate_voxels(self): merged_img = concat.concatenate(self.img, [[10, 50], [55, 75], [100, 105]]) command = ['fslmeants', '-i', merged_img, '-c', '33', '28', '11'] output = Popen(command, stdout=PIPE).communicate()[0].split('\n') #TR 10 t1 = int(float(output[0])) #TR 40 t2 = int(float(output[30])) self.assertTrue(t1==12227) self.assertTrue(t2==12420) command = ['fslmeants', '-i', merged_img, '-c', '39', '28', '16'] output = Popen(command, stdout=PIPE).communicate()[0].split('\n') #TR 60 t3 = int(float(output[59])) #TR 65 t4 = int(float(output[64])) self.assertTrue(t1==12227) self.assertTrue(t2==12420)
def build_single_page_version(lang, args, cfg): logging.info('Building single page version for ' + lang) os.environ['SINGLE_PAGE'] = '1' with util.autoremoved_file(os.path.join(args.docs_dir, lang, 'single.md')) as single_md: concatenate(lang, args.docs_dir, single_md) with util.temp_dir() as site_temp: with util.temp_dir() as docs_temp: docs_src_lang = os.path.join(args.docs_dir, lang) docs_temp_lang = os.path.join(docs_temp, lang) shutil.copytree(docs_src_lang, docs_temp_lang) for root, _, filenames in os.walk(docs_temp_lang): for filename in filenames: if filename != 'single.md' and filename.endswith('.md'): os.unlink(os.path.join(root, filename)) cfg.load_dict({ 'docs_dir': docs_temp_lang, 'site_dir': site_temp, 'extra': { 'single_page': True }, 'nav': [ {cfg.data.get('site_name'): 'single.md'} ] }) mkdocs_build.build(cfg) if args.version_prefix: single_page_output_path = os.path.join(args.docs_dir, args.docs_output_dir, args.version_prefix, lang, 'single') else: single_page_output_path = os.path.join(args.docs_dir, args.docs_output_dir, lang, 'single') if os.path.exists(single_page_output_path): shutil.rmtree(single_page_output_path) shutil.copytree( os.path.join(site_temp, 'single'), single_page_output_path ) if not args.skip_pdf: single_page_index_html = os.path.abspath(os.path.join(single_page_output_path, 'index.html')) single_page_pdf = single_page_index_html.replace('index.html', 'clickhouse_%s.pdf' % lang) create_pdf_command = ['wkhtmltopdf', '--print-media-type', single_page_index_html, single_page_pdf] logging.debug(' '.join(create_pdf_command)) subprocess.check_call(' '.join(create_pdf_command), shell=True) with util.temp_dir() as test_dir: cfg.load_dict({ 'docs_dir': docs_temp_lang, 'site_dir': test_dir, 'extra': { 'single_page': False }, 'nav': [ {cfg.data.get('site_name'): 'single.md'} ] }) mkdocs_build.build(cfg) if not args.version_prefix: # maybe enable in future test.test_single_page(os.path.join(test_dir, 'single', 'index.html'), lang) if args.save_raw_single_page: shutil.copytree(test_dir, args.save_raw_single_page)
def build_single_page_version(lang, args, cfg): logging.info(f'Building single page version for {lang}') os.environ['SINGLE_PAGE'] = '1' extra = cfg.data['extra'] extra['single_page'] = True with util.autoremoved_file(os.path.join(args.docs_dir, lang, 'single.md')) as single_md: concatenate(lang, args.docs_dir, single_md) with util.temp_dir() as site_temp: with util.temp_dir() as docs_temp: docs_src_lang = os.path.join(args.docs_dir, lang) docs_temp_lang = os.path.join(docs_temp, lang) shutil.copytree(docs_src_lang, docs_temp_lang) for root, _, filenames in os.walk(docs_temp_lang): for filename in filenames: if filename != 'single.md' and filename.endswith( '.md'): os.unlink(os.path.join(root, filename)) cfg.load_dict({ 'docs_dir': docs_temp_lang, 'site_dir': site_temp, 'extra': extra, 'nav': [{ cfg.data.get('site_name'): 'single.md' }] }) mkdocs_build.build(cfg) if args.version_prefix: single_page_output_path = os.path.join( args.docs_dir, args.docs_output_dir, args.version_prefix, lang, 'single') else: single_page_output_path = os.path.join( args.docs_dir, args.docs_output_dir, lang, 'single') if os.path.exists(single_page_output_path): shutil.rmtree(single_page_output_path) shutil.copytree(os.path.join(site_temp, 'single'), single_page_output_path) if not args.skip_pdf: with util.temp_dir() as test_dir: single_page_pdf = os.path.abspath( os.path.join(single_page_output_path, f'clickhouse_{lang}.pdf')) extra['single_page'] = False cfg.load_dict({ 'docs_dir': docs_temp_lang, 'site_dir': test_dir, 'extra': extra, 'nav': [{ cfg.data.get('site_name'): 'single.md' }] }) mkdocs_build.build(cfg) css_in = ' '.join(website.get_css_in(args)) js_in = ' '.join(website.get_js_in(args)) subprocess.check_call( f'cat {css_in} > {test_dir}/css/base.css', shell=True) subprocess.check_call( f'cat {js_in} > {test_dir}/js/base.js', shell=True) port_for_pdf = util.get_free_port() with socketserver.TCPServer( ('', port_for_pdf), http.server.SimpleHTTPRequestHandler) as httpd: logging.info( f"serving for pdf at port {port_for_pdf}") thread = threading.Thread( target=httpd.serve_forever) with util.cd(test_dir): thread.start() create_pdf_command = [ 'wkhtmltopdf', '--print-media-type', '--no-stop-slow-scripts', '--log-level', 'warn', f'http://localhost:{port_for_pdf}/single/', single_page_pdf ] try: if args.save_raw_single_page: shutil.copytree( test_dir, args.save_raw_single_page) logging.info(' '.join(create_pdf_command)) subprocess.check_call( ' '.join(create_pdf_command), shell=True) finally: httpd.shutdown() thread.join(timeout=5.0) if not args.version_prefix: # maybe enable in future test.test_single_page( os.path.join(test_dir, 'single', 'index.html'), lang) logging.info(f'Finished building single page version for {lang}')
def build_single_page_version(lang, args, cfg): logging.info(f'Building single page version for {lang}') os.environ['SINGLE_PAGE'] = '1' extra = cfg.data['extra'] extra['single_page'] = True with util.autoremoved_file(os.path.join(args.docs_dir, lang, 'single.md')) as single_md: concatenate(lang, args.docs_dir, single_md) with util.temp_dir() as site_temp: with util.temp_dir() as docs_temp: docs_src_lang = os.path.join(args.docs_dir, lang) docs_temp_lang = os.path.join(docs_temp, lang) shutil.copytree(docs_src_lang, docs_temp_lang) for root, _, filenames in os.walk(docs_temp_lang): for filename in filenames: if filename != 'single.md' and filename.endswith( '.md'): os.unlink(os.path.join(root, filename)) cfg.load_dict({ 'docs_dir': docs_temp_lang, 'site_dir': site_temp, 'extra': extra, 'nav': [{ cfg.data.get('site_name'): 'single.md' }] }) mkdocs_build.build(cfg) if args.version_prefix: single_page_output_path = os.path.join( args.docs_dir, args.docs_output_dir, args.version_prefix, lang, 'single') else: single_page_output_path = os.path.join( args.docs_dir, args.docs_output_dir, lang, 'single') if os.path.exists(single_page_output_path): shutil.rmtree(single_page_output_path) shutil.copytree(os.path.join(site_temp, 'single'), single_page_output_path) logging.info(f'Re-building single page for {lang} pdf/test') with util.temp_dir() as test_dir: single_page_pdf = os.path.abspath( os.path.join(single_page_output_path, f'clickhouse_{lang}.pdf')) extra['single_page'] = False cfg.load_dict({ 'docs_dir': docs_temp_lang, 'site_dir': test_dir, 'extra': extra, 'nav': [{ cfg.data.get('site_name'): 'single.md' }] }) mkdocs_build.build(cfg) css_in = ' '.join(website.get_css_in(args)) js_in = ' '.join(website.get_js_in(args)) subprocess.check_call( f'cat {css_in} > {test_dir}/css/base.css', shell=True) subprocess.check_call( f'cat {js_in} > {test_dir}/js/base.js', shell=True) if not args.skip_pdf: port_for_pdf = util.get_free_port() httpd = socketserver.TCPServer( ('', port_for_pdf), http.server.SimpleHTTPRequestHandler) logging.info( f"Serving for {lang} pdf at port {port_for_pdf}") process = multiprocessing.Process( target=httpd.serve_forever) with util.cd(test_dir): process.start() create_pdf_command = [ 'wkhtmltopdf', '--print-media-type', '--disable-javascript', # TODO: restore '--log-level', 'warn', f'http://localhost:{port_for_pdf}/single/', single_page_pdf ] try: if args.save_raw_single_page: shutil.copytree(test_dir, args.save_raw_single_page) logging.info(' '.join(create_pdf_command)) subprocess.check_call( ' '.join(create_pdf_command), shell=True) finally: logging.info( f'Stop serving for {lang} pdf at port {port_for_pdf}' ) process.kill() while True: time.sleep(0.25) try: process.close() break except ValueError: logging.info( f'Waiting for {lang} httpd at port {port_for_pdf} to stop' ) if not args.version_prefix: # maybe enable in future logging.info(f'Running tests for {lang}') test.test_single_page( os.path.join(test_dir, 'single', 'index.html'), lang) logging.info(f'Finished building single page version for {lang}')
import concatenate chapters = ["Chapter{}.pdf".format(n) for n in range(3, 12)] concatenate.concatenate("Chapter1.pdf", "Chapter2.pdf", 2) for chapter in chapters: concatenate.concatenate("final.pdf", chapter, 2)
def build_single_page_version(lang, args, nav, cfg): logging.info(f'Building single page version for {lang}') os.environ['SINGLE_PAGE'] = '1' extra = cfg.data['extra'] extra['single_page'] = True with util.autoremoved_file(os.path.join(args.docs_dir, lang, 'single.md')) as single_md: concatenate(lang, args.docs_dir, single_md, nav) with util.temp_dir() as site_temp: with util.temp_dir() as docs_temp: docs_src_lang = os.path.join(args.docs_dir, lang) docs_temp_lang = os.path.join(docs_temp, lang) shutil.copytree(docs_src_lang, docs_temp_lang) for root, _, filenames in os.walk(docs_temp_lang): for filename in filenames: if filename != 'single.md' and filename.endswith('.md'): os.unlink(os.path.join(root, filename)) cfg.load_dict({ 'docs_dir': docs_temp_lang, 'site_dir': site_temp, 'extra': extra, 'nav': [ {cfg.data.get('site_name'): 'single.md'} ] }) mkdocs_build.build(cfg) if args.version_prefix: single_page_output_path = os.path.join(args.docs_dir, args.docs_output_dir, args.version_prefix, lang, 'single') else: single_page_output_path = os.path.join(args.docs_dir, args.docs_output_dir, lang, 'single') if os.path.exists(single_page_output_path): shutil.rmtree(single_page_output_path) shutil.copytree( os.path.join(site_temp, 'single'), single_page_output_path ) single_page_index_html = os.path.join(single_page_output_path, 'index.html') single_page_content_js = os.path.join(single_page_output_path, 'content.js') with open(single_page_index_html, 'r') as f: sp_prefix, sp_js, sp_suffix = f.read().split('<!-- BREAK -->') with open(single_page_index_html, 'w') as f: f.write(sp_prefix) f.write(sp_suffix) with open(single_page_content_js, 'w') as f: if args.minify: import jsmin sp_js = jsmin.jsmin(sp_js) f.write(sp_js) logging.info(f'Re-building single page for {lang} pdf/test') with util.temp_dir() as test_dir: extra['single_page'] = False cfg.load_dict({ 'docs_dir': docs_temp_lang, 'site_dir': test_dir, 'extra': extra, 'nav': [ {cfg.data.get('site_name'): 'single.md'} ] }) mkdocs_build.build(cfg) css_in = ' '.join(website.get_css_in(args)) js_in = ' '.join(website.get_js_in(args)) subprocess.check_call(f'cat {css_in} > {test_dir}/css/base.css', shell=True) subprocess.check_call(f'cat {js_in} > {test_dir}/js/base.js', shell=True) if args.save_raw_single_page: shutil.copytree(test_dir, args.save_raw_single_page) if not args.version_prefix: # maybe enable in future logging.info(f'Running tests for {lang}') test.test_single_page( os.path.join(test_dir, 'single', 'index.html'), lang) if not args.skip_pdf: single_page_index_html = os.path.join(test_dir, 'single', 'index.html') single_page_pdf = os.path.abspath( os.path.join(single_page_output_path, f'clickhouse_{lang}.pdf') ) with open(single_page_index_html, 'r') as f: soup = bs4.BeautifulSoup( f.read(), features='html.parser' ) soup_prefix = f'file://{test_dir}' for img in soup.findAll('img'): if img['src'].startswith('/'): img['src'] = soup_prefix + img['src'] for script in soup.findAll('script'): script['src'] = soup_prefix + script['src'].split('?', 1)[0] for link in soup.findAll('link'): link['href'] = soup_prefix + link['href'].split('?', 1)[0] with open(single_page_index_html, 'w') as f: f.write(str(soup)) create_pdf_command = [ 'wkhtmltopdf', '--print-media-type', '--log-level', 'warn', single_page_index_html, single_page_pdf ] logging.info(' '.join(create_pdf_command)) subprocess.check_call(' '.join(create_pdf_command), shell=True) logging.info(f'Finished building single page version for {lang}')
def script(dirname, subdirname): unzip(dirname, subdirname) if not os.path.isdir('check/{}'.format(dirname)): os.mkdir('check/{}'.format(dirname)) if not os.path.isdir('/mnt/volume-nyc1-01/{}'.format(dirname)): os.mkdir('/mnt/volume-nyc1-01/{}'.format(dirname)) if not os.path.isdir(os.path.join('check', dirname, 'xlses')): os.mkdir(os.path.join('check', dirname, 'xlses')) if not os.path.isdir(os.path.join('check', dirname, 'xlses', subdirname)): os.mkdir(os.path.join('check', dirname, 'xlses', subdirname)) try: if os.path.isfile( os.path.join('/root/Novosad/mouses/Data', dirname, subdirname + '.zip')): ext = '.zip' if os.path.isfile( os.path.join('/root/Novosad/mouses/Data', dirname, subdirname + '.rar')): ext = '.rar' shutil.move( os.path.join('/root/Novosad/mouses/Data', dirname, subdirname + ext), os.path.join('/mnt/volume-nyc1-01/', dirname)) except Exception as e: print(e, 'already moved', subdirname) part(dirname, subdirname) start(dirname, subdirname) time.sleep(20) try: while True: if len([ f for f in os.listdir( os.path.join('check', dirname, subdirname)) if 'done.txt' in f ]) == N_THREADS: xls_files = [] for numb in range(N_THREADS): xls_files += [ os.path.join('check', dirname, subdirname, str(numb), f) for f in os.listdir( os.path.join('check', dirname, subdirname, str(numb))) if '.xls' in f ] for xls in xls_files: shutil.move( xls, os.path.join('check', dirname, 'xlses', subdirname)) break else: time.sleep(60 * 1) except Exception as e: print(e) clear(dirname, subdirname) add_empt(dirname, subdirname) create(dirname, subdirname) concatenate('0', dirname, subdirname) concatenate('1', dirname, subdirname) infected = [] if os.path.isdir(os.path.join('check', dirname, 'xlses', 'avg', '144')): infected = [ f for f in os.listdir( os.path.join('check', dirname, 'xlses', 'avg', '144')) if '.xls' in f ] infected = list(set([f[0] for f in infected])) for m in infected: m = '0' if m == 'l' else '1' test(m, dirname)
vlist=files().getfiles() #select subclip subclip=subclip() subclip.getsubclip(vlist) #check size/audio videofix=videofix() videofix.videofix(subclip.vlistedit) #slomotion random slowmo=slowmo() slowmo.getslowmo(subclip.vlistedit) #concatenate all video concatenate=concatenate() concatenate.getconcatenate(subclip.vlistedit,subclip.vlist2) #audio audio=audio() audio.audiogain() audio.addaudio() audio.fade() #movieeffect movieeffect=movieeffect() movieeffect.filter() movieeffect.blackbar() # Delete tmp files deletefiles=deletefiles()
def test_concatenate_filename(self): merged_img = concat.concatenate(self.img, [[2, 12], [15, 25], [36, 56]]) self.assertTrue(merged_img == os.path.join(os.getcwd(), 'filtered_func_data_merged'))