def run(self): # Clear source and build data libdir = "build/lib/%s" %const.APP_NAME clear() print os.system("/bin/rm -rf build") makeDirs(libdir) makeDirs("build/desktop") makeDirs("build/bin") makeDirs("build/locales") print "Build codes..." os.system("cp -Rv src/*.py %s" %libdir) # Collect UI files print "Build ui..." for filename in glob.glob1("qt4", "*.ui"): print os.system("/usr/bin/pyuic4 -o %s/ui_%s.py qt4/%s" % (libdir, filename.split(".")[0], filename)) for filename in glob.glob1("qt4", "*.qrc"): print os.system("/usr/bin/pyrcc4 -o %s/%s_rc.py qt4/%s" % (libdir, filename.split(".")[0], filename)) print "Build locales..." for filename in glob.glob1("po", "*.po"): lang = filename.rsplit(".", 1)[0] print os.system("msgfmt po/%s.po -o build/locales/%s.mo" % (lang, lang)) print "Build .desktop file" print os.system("intltool-merge -d po addfiles/%s.desktop.in build/desktop/%s.desktop" %(const.APP_NAME, const.APP_NAME) ) print "Build bin file" self.copy_file("src/%s.py" %const.APP_NAME, "build/bin/%s" %const.APP_NAME ) self.copy_file("src/%s.py" %const.APP_NAME, "build/lib/" ) print("\n\nYou can run %s by this command; \n python build/lib/%s.py" %(const.APP_NAME, const.APP_NAME))
def get_captcha(): '''获取验证码 返回 (hash_key, image_path)''' captcha_cnt = CustomDefie.captcha_total m_captcha_key, fn = '', '' b_memcache = False # 从memcache中获得一个验证码文件 memecache_captcha = cache.get(CAPTCHA_M) if memecache_captcha and len(memecache_captcha) == captcha_cnt: captcha_index = CAPTCHA_RI.randint(0, len(memecache_captcha) - 1) captcha_i = memecache_captcha[captcha_index] m_captcha_key, fn = captcha_i['m_captcha_key'], captcha_i['fn'] if os.path.exists(os.path.join(CAPTCHA_PATH, fn)): b_memcache = True if not b_memcache: # 新建验证码文件 file_ls = glob.glob1(CAPTCHA_PATH, '*.jpg') if captcha_cnt > len(file_ls): __make_captcha(captcha_cnt - len(file_ls)) file_ls = glob.glob1(CAPTCHA_PATH, '*.jpg') __set_captcha_memcache(file_ls) # 获取验证码 captcha_index = CAPTCHA_RI.randint(0, len(file_ls) - 1) fn = file_ls[captcha_index] hash_v = __get_captcha_hash_by_fn(file_ls[captcha_index]) m_captcha_key = __get_mcaptchahash_by_captchahash(hash_v) return m_captcha_key, "/site_media/captcha/%s" % fn
def showEnvDetails(): toDir = app.config['UPLOAD_DIR'] s="Now: "+str( datetime.now()) s= s+"\nUpload Directory: "+toDir s= s+"\n Is Exists: " +str(os.path.exists(toDir)) if os.path.exists(toDir): s= s+"\n No. of zip Files(Load): "+str(len(glob.glob1(toDir,"*.zip"))) s= s+"\n No. of Log Files(Sessions): "+str(len(glob.glob1(toDir,"*.log"))) quota=os.path.join(app.config['DATA_DIR'],"quota.txt") if os.path.exists(quota): with open(quota, 'r') as content_file: content = content_file.read() s=s+"\nQuota Details:" s=s+"\n"+content last_hourly_cron_ran=os.path.join(app.config['DATA_DIR'],"last_hourly_cron_ran") if os.path.exists(last_hourly_cron_ran): with open(last_hourly_cron_ran, 'r') as content_file: content = content_file.read() s=s+"\nLast Hourly Cron exeuted at:" s=s+" "+content return lib.pushText(s)
def run(self): # Clear all os.system("rm -rf build") # Copy codes print "Copying PYs..." os.system("cp -R src build/") # Copy kde-themes print "Copying kde-themes..." os.system("cp -R data/kde-themes build/kapudan/") #update_messages() # Copy compiled UIs and RCs print "Generating UIs..." for filename in glob.glob1("ui", "*.ui"): # if not "ui_scrFolder" in filename: # os.system("pykdeuic4 -o build/kapudan/screens/%s.py ui/%s" % (filename.split(".")[0], filename)) # else: # shutil.copy("ui/ui_scrFolder.py", "build/kapudan/screens/ui_scrFolder.py") os.system("pykdeuic4 -o build/kapudan/screens/%s.py ui/%s" % (filename.split(".")[0], filename)) print "Generating RCs..." for filename in glob.glob1("data", "*.qrc"): os.system("pyrcc4 data/%s -o build/kapudan/%s_rc.py" % (filename, filename.split(".")[0])) os.system("sed -i 's/kapudan_rc/kapudan.\kapudan_rc/g' build/kapudan/screens/ui_*")
def run(self): # Clear all os.system("rm -rf build") build.run(self) # Copy codes print "Copying PYs..." os.system("cp -R src/ build/") # Copy icons print "Copying Images..." os.system("cp -R data/ build/") print "Generating .desktop files..." for filename in glob.glob("data/*.desktop.in"): os.system("intltool-merge -d po %s %s" % (filename, filename[:-3])) print "Generating UIs..." for filename in glob.glob1("ui", "*.ui"): os.system("py2uic5 -o build/ui_%s.py ui/%s" % (filename.split(".")[0], filename))#, PROJECT)) print "Generating RCs..." for filename in glob.glob1("data", "*.qrc"): os.system("py2rcc5 data/%s -o build/%s_rc.py" % (filename, filename.split(".")[0])) print "Generating QMs..." makeDirs("build/lang") #Temporary bindir to avoid qt4 conflicts #os.system("lrelease-qt5 lang/*.ts") os.system("lrelease lang/*.ts") for filename in glob.glob1("lang", "*.qm"): shutil.copy("lang/{}".format(filename), "build/lang")
def update_messages(): files = tempfile.mkstemp()[1] # Collect UI files filelist = [] for filename in glob.glob1("ui", "*.ui"): os.system("pyuic4 -o ui/ui_%s.py ui/%s -g %s" % (filename.split(".")[0], filename, PROJECT)) # Generate POT file os.system("xgettext --default-domain=%s \ --keyword=_ \ --keyword=N_ \ --keyword=i18n \ --keyword=ki18n \ --kde \ -ci18n -ki18n:1 -ki18nc:1c,2 -ki18np:1,2 -ki18ncp:1c,2,3 -ktr2i18n:1 \ -kI18N_NOOP:1 -kI18N_NOOP2:1c,2 -kaliasLocale -kki18n:1 -kki18nc:1c,2 \ -kki18np:1,2 -kki18ncp:1c,2,3 \ --files-from=%s \ -o po/%s.pot" % (PROJECT, files, PROJECT)) # Update PO files for item in glob.glob1("po", "*.po"): print "Updating .. ", item os.system("msgmerge --update --no-wrap --sort-by-file po/%s po/%s.pot" % (item, PROJECT)) # Cleanup os.unlink(files) for f in [_f for _f in filelist if _f.startswith("ui/") or _f.endswith(".h")]: try: os.unlink(f) except OSError: pass
def run(self): # Clear all os.system("rm -rf build") # Copy codes print "Copying PYs..." os.system("cp -R src/ build/") # Copy icons print "Copying Images..." os.system("cp -R data/ build/") print "Generating .desktop files..." for filename in glob.glob("data/*.desktop.in"): os.system("intltool-merge -d po %s %s" % (filename, filename[:-3])) print "Generating UIs..." for filename in glob.glob1("ui", "*.ui"): if FOR_KDE_4: os.system("pykde4uic -o build/firewallmanager/ui_%s.py ui/%s -g %s" % (filename.split(".")[0], filename, PROJECT)) else: os.system("pyuic4 -o build/firewallmanager/ui_%s.py ui/%s -g %s" % (filename.split(".")[0], filename, PROJECT)) print "Generating RCs..." for filename in glob.glob1("data", "*.qrc"): os.system("pyrcc4 data/%s -o build/%s_rc.py" % (filename, filename.split(".")[0]))
def run(self): # Clear all os.system("rm -rf build") # Copy codes print "Copying PYs..." os.system("cp -R src/ build/") # Copy icons print "Copying Data..." os.system("cp -R data/ build/") # Copy UI print "Copying User Interfaces..." os.system("cp -R ui/ build/") print "Generating .desktop files..." for filename in glob.glob("data/*.desktop.in"): os.system("intltool-merge -d po %s %s" % (filename, filename[:-3])) print "Generating UIs..." # Collect UI for pure-qt for filename in glob.glob1("ui", "*.ui"): os.system("pyuic4 -o build/ui/%s.py ui/%s -g %s" % (filename.split(".")[0], filename, PROJECT)) print "Creating...", filename # Remove UI files os.system("rm -rf build/ui/*.ui") print "Generating RCs..." for filename in glob.glob1("data", "*.qrc"): os.system("pyrcc4 data/%s -o build/%s_rc.py" % (filename, filename.split(".")[0]))
def run(self): # Clear all os.system("rm -rf build") # Copy codes print "Copying PYs..." os.system("cp -R src/ build/") # Copy icons print "Copying Images..." os.system("cp -R data/ build/") print "Generating .desktop files..." for filename in glob.glob("data/*.desktop.in"): os.system("intltool-merge -d po %s %s" % (filename, filename[:-3])) print "Generating UIs..." # Collect UI for pure-qt for filename in glob.glob1("ui", "*.ui"): os.system("py2uic5 -o build/servicemanager/ui_%s.py ui/%s" % (filename.split(".")[0], filename))#, PROJECT)) print "Generating RCs..." for filename in glob.glob1("data", "*.qrc"): os.system("py2rcc5 data/%s -o build/%s_rc.py" % (filename, filename.split(".")[0]))
def __init__(self): self.for_upload = [] self.url_stats = {} self.tempdir = 'tmp' self.current_date = datetime.datetime.today().strftime("%Y-%m-%d") self.create_temp_dir() self.get_image_data() for chunk in self.chunks(glob.glob1(self.tempdir, "*.jpg"), 50): worker = Thread(target=self.create_thumbnail, args=(chunk,)) worker.setDaemon(True) worker.start() while (activeCount() > 1): time.sleep(5) s3key = 'AKIAIYZERMTB6Z5NPF5Q' s3secret = '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' bucket_name = "hrachya-test" self.s3_conn = boto.connect_s3(s3key, s3secret) self.bucket_obj = self.s3_conn.get_bucket(bucket_name) for chunk in self.chunks(glob.glob1(self.tempdir, "*.jpg"), 100): worker = Thread(target=self.aws_s3_uploader, args=(chunk,)) worker.setDaemon(True) worker.start() while (activeCount() > 1): time.sleep(5) #self.aws_s3_uploader() self.update_record() self.cleaner()
def multi_readout_analyze(folder, ccd_height = 100., plot = True, freq = None): """Analyze several readout measurements in different files for readout diagnosys The readout files in dm3 format must be contained in a folder, preferentely numered in the order of acquisition. Parameters ---------- folder : string Folder where the dm3 readout files are stored ccd_heigh : float plot : bool freq : float Frequency of the camera Returns ------- Dictionary """ from spectrum import Spectrum files = glob.glob1(folder, '*.nc') if not files: files = glob.glob1(folder, '*.dm3') spectra = [] variances = [] binnings = [] for f in files: print os.path.join(folder,f) s = Spectrum(os.path.join(folder,f)) variance, channel_mean, norm_time_mean = analyze_readout(s) s.readout_analysis = {} s.readout_analysis['variance'] = variance.mean() s.readout_analysis['pattern'] = channel_mean s.readout_analysis['time'] = norm_time_mean if not hasattr(s,'binning'): s.binning = float(os.path.splitext(f)[0][1:]) if freq: s.readout_frequency = freq s.ccd_height = ccd_height s.save(f) spectra.append(s) binnings.append(s.binning) variances.append(variance.mean()) pixels = ccd_height / np.array(binnings) plt.scatter(pixels, variances, label = 'data') fit = np.polyfit(pixels, variances,1, full = True) if plot: x = np.linspace(0,pixels.max(),100) y = x*fit[0][0] + fit[0][1] plt.plot(x,y, label = 'linear fit') plt.xlabel('number of pixels') plt.ylabel('variance') plt.legend(loc = 'upper left') print "Variance = %s * pixels + %s" % (fit[0][0], fit[0][1]) dictio = {'pixels': pixels, 'variances': variances, 'fit' : fit, 'spectra' : spectra} return dictio
def test_legal(): img = glob.glob1(des_img_dir,"*.jpg") txt = glob.glob1(des_label_dir,"*.txt") img = map(lambda x: x.split('.')[0], img) txt = map(lambda x: x.split('.')[0], txt) count = sum([1 if each in img else 0 for each in txt]) print (count)
def dir(imagetag, source_directory, additional_docker_options): """Builds a binary RPM from a directory. IMAGETAG should be a docker image id or a repository:tag, e.g something like a682b68bbaba or alanfranz/drb-epel-6-x86-64:latest SOURCE_DIRECTORY should be a directory containing the .spec or the .spectemplate file and all the source files and patches referenced in such spec. ADDITIONAL_DOCKER_OPTIONS whatever is passed will be forwarded straight to docker. PLEASE REMEMBER to insert a double dash (--) before the first additional options, otherwise it will be mistaken for a docker-rpm-builder option. example: docker-rpm-builder dir a682b68bbaba . -- --dns=10.2.0.1 """ # TODO: let spectemplate and/or spec be optional parameters # TODO: let the user choose $-delimited templates deletespec = False spectemplates = glob.glob1(source_directory, "*.spectemplate") specfiles = glob.glob1(source_directory, "*.spec") if len(spectemplates) > 1: raise ValueError("More than one spectemplate found in source directory") if spectemplates: if specfiles: raise ValueError("Found both .spec and .spectemplate in source directory.") spectemplate = spectemplates[0] template = DoubleDelimiterTemplate(codecs.open(spectemplate, "rb", "utf-8").read()) with_substitutions = template.substitute(os.environ) finalspec = os.path.splitext(spectemplate)[0] + ".spec" with codecs.open(finalspec, "wb", "utf-8") as f: f.write(with_substitutions) specfiles.append(finalspec) deletespec = True if not specfiles or len(specfiles) > 1: raise ValueError("No specfiles or more than one specfile in source directory") specfile = specfiles[0] # FIXME: delete written specfile if using a spectemplate logging.info("Now building project from %s on image %s", source_directory, imagetag) # TODO: let this be something more configurable and/or injected dockerexec = which("docker") try: sp("{0} run -v {1}:/dockerscripts -v {2}:/src -w /dockerscripts {3} ./rpmbuild-in-docker.sh {4}:{5} {6}", dockerexec, getpath("drb/dockerscripts"), source_directory, imagetag, os.getuid(), os.getgid(), " ".join(additional_docker_options)) finally: if deletespec: os.unlink(specfile)
def run(self): os.system("./setup.py build") if self.root: kde_dir = "%s/usr" % self.root else: kde_dir = "/usr" bin_dir = os.path.join(kde_dir, "bin") locale_dir = os.path.join(kde_dir, "share/locale") autostart_dir = os.path.join(kde_dir, "share/autostart") project_dir = os.path.join(kde_dir, "share/kde4/apps", about.appName) # Make directories print "Making directories..." makeDirs(bin_dir) #makeDirs(locale_dir) makeDirs(autostart_dir) makeDirs(project_dir) # Install desktop files print "Installing desktop files..." for filename in glob.glob("data/*.desktop.in"): os.system("intltool-merge -d po %s %s" % (filename, filename[:-3])) for filename in glob.glob1("data", "*.desktop"): shutil.copy("data/%s" % filename, autostart_dir) # Install codes print "Installing codes..." os.system("cp -R build/* %s/" % project_dir) # Install locales print "Installing locales..." for filename in glob.glob1("po", "*.po"): lang = filename.rsplit(".", 1)[0] os.system("msgfmt po/%s.po -o po/%s.mo" % (lang, lang)) try: os.makedirs(os.path.join(locale_dir, "%s/LC_MESSAGES" % lang)) except OSError: pass shutil.copy("po/%s.mo" % lang, os.path.join(locale_dir, "%s/LC_MESSAGES" % lang, "%s.mo" % about.catalog)) # Rename print "Renaming application.py..." #shutil.move(os.path.join(project_dir, "application.py"), os.path.join(project_dir, "%s.py" % about.appName)) # Modes print "Changing file modes..." os.chmod(os.path.join(project_dir, "%s.py" % about.appName), 0755) # Symlink try: if self.root: os.symlink(os.path.join(project_dir.replace(self.root, ""), "%s.py" % about.appName), os.path.join(bin_dir, about.appName)) else: os.symlink(os.path.join(project_dir, "%s.py" % about.appName), os.path.join(bin_dir, about.appName)) except OSError: pass
def run(self): # Clear all os.system("rm -rf build") # Copy codes os.system("cp -R src build/") # Copy compiled UIs and RCs for filename in glob.glob1("src/migration/gui/ui", "*.ui"): os.system("/usr/kde/4/bin/pykde4uic -o build/migration/gui/ui/%s.py src/migration/gui/ui/%s" % (filename.split(".")[0], filename)) for filename in glob.glob1("src/migration/gui/ui", "*.qrc"): os.system("/usr/bin/pyrcc4 src/migration/gui/ui/%s -o build/%s_rc.py" % (filename, filename.split(".")[0]))
def _FindSolutionFiles(): folder = os.path.dirname( vim.current.buffer.name ) solutionfiles = glob.glob1( folder, '*.sln' ) while not solutionfiles: lastfolder = folder folder = os.path.dirname( folder ) if folder == lastfolder: break solutionfiles = glob.glob1( folder, '*.sln' ) return solutionfiles, folder
def get_block_candidates(self): cpp_blocks = filter(lambda x: not (x.startswith('qa_') or x.startswith('test_')), glob.glob1("lib", "*.cc")) python_blocks = filter(lambda x: not (x.startswith('qa_') or x.startswith('build') or x.startswith('__init__')), glob.glob1("python", "*.py")) block_candidates = [x.split('_impl')[0] for x in cpp_blocks] + [x.split('.')[0] for x in python_blocks] return block_candidates
def add_common_layers(location, dem_file): location_path = os.path.join( 'C:/Users/kristydahl/Desktop/GIS_data/UCS_tidal_flooding_maps', location) folders_path = os.path.join(location_path, 'general_map_elements') folders = glob.glob1(folders_path, '*') map_path = os.path.join(location_path, 'map_docs') maps = glob.glob1(map_path, '*.mxd') for each_map in maps: map_with_full_path = os.path.join(map_path, each_map) mxd = arcpy.mapping.MapDocument(map_with_full_path) df = arcpy.mapping.ListDataFrames(mxd, "Layers")[0] each_map_no_ext = os.path.splitext(map_with_full_path) clipping_polygon = create_clipping_polygon(location, dem_file) for folder in folders: full_folder_path = os.path.join(folders_path, folder) shp = glob.glob1(full_folder_path, '*.shp') if len(shp) >= 1: for i in shp: shp_with_full_path = os.path.join(full_folder_path, i) print(shp) sym_layer = glob.glob1(full_folder_path, '*_style*') sym_layer_with_full_path = os.path.join( full_folder_path, sym_layer[0]) print('Repairing Geometry') arcpy.RepairGeometry_management(shp_with_full_path) print('Repaired Geometry') outname_clipped_lyr = os.path.join(full_folder_path, 'clipped_' + i) clipped_lyr = arcpy.Clip_analysis(shp_with_full_path, clipping_polygon, outname_clipped_lyr) print(clipped_lyr) lyr = arcpy.mapping.Layer(outname_clipped_lyr) print(lyr) arcpy.ApplySymbologyFromLayer_management( lyr, sym_layer_with_full_path) arcpy.mapping.AddLayer(df, lyr, "TOP") print('Added ' + i + ' to ' + each_map) outmap = each_map_no_ext[0] + '_gen_lyrs_added.mxd' mxd.saveACopy(outmap)
def _FindSolutionFiles( filepath ): """ Find solution files by searching upwards in the file tree """ folder = os.path.dirname( filepath ) solutionfiles = glob.glob1( folder, '*.sln' ) while not solutionfiles: lastfolder = folder folder = os.path.dirname( folder ) if folder == lastfolder: break solutionfiles = glob.glob1( folder, '*.sln' ) return solutionfiles, folder
def load(context): route = context['route'] destiny = context['destiny'] Picture = context['Picture'] db = context['db'] User = context['User'] total = 0 # los usuarios son el primer nivel en la carpeta users = sorted([folder for folder in os.listdir(route) if os.path.isdir(os.path.join(route, folder))]) # comprobamos que las carpetas son validas, la funcion check elimina las que # no users = check_users(db, User, users) # por cada usuario print "Incorporando nuevas imagenes a la base de datos" for user in users: userroute = os.path.join(route, user) # sacamos los años por usuario years = sorted([folder for folder in os.listdir(userroute) if os.path.isdir(os.path.join(userroute, folder))]) # por cada año for year in years: yearroute = os.path.join(userroute, year) #sacamos los meses por el año months = sorted([folder for folder in os.listdir(yearroute) if os.path.isdir(os.path.join(yearroute, folder))]) #por cada mes for month in months: monthroute = os.path.join(yearroute, month) days = sorted([folder for folder in os.listdir(monthroute) if os.path.isdir(os.path.join(monthroute, folder))]) for day in days: dayroute = os.path.join(monthroute, day) #pictures = [folder for folder in os.listdir(dayroute)] # aqui tenemos todas las fotos de este dia concreto, imprimos la ruta relativa pictures = glob.glob1(dayroute, '*jpg') pictures.extend(glob.glob1(dayroute, '*.png')) pictures = sorted(pictures) print "Importing %s pictures" % len(pictures) total += len(pictures) for picture in pictures: path = os.path.join(user, year, month, day, picture) time = getTimeFromName(picture) nDatetime = getDateTimeFromParams(year, month, day, time) nPicture = Picture(path=path, user_id=user, label_id=DEFAULT_LABEL, date=nDatetime) # ya tenemos el nuevo objeto imagen, solo hay que guardarlo en la base de datos db.session.add(nPicture) db.session.commit() # movemos todos los arhivos una vez insertados en la bd moveFilesToFolder(route, destiny, users) if total > 0: print "Up to %s pictures imported, let's tag them!!" %total else: print "0 pictures imported, looks like something went wrong :("
def compareDir(*args): firstdir, seconddir, fileextension = args path.normcase(firstdir) path.normcase(seconddir) firstallfile = glob.glob1( firstdir, fileextension ) secondallfile = glob.glob1( seconddir, fileextension ) firstDiffToSecond = [ f for f in firstallfile if f not in secondallfile ] print "firstDiffToSecond" print firstDiffToSecond secondDiffToFirst = [ f for f in secondallfile if f not in firstallfile ] print "secondDiffToFirst" print secondDiffToFirst
def run(logger=None): shapefiles_dir = download_shapefiles(shapefile_url, logger) try: shapefile = os.path.join( shapefiles_dir, glob.glob1(shapefiles_dir, "M274TaxPar.shp")[0]) assessor_data = os.path.join( shapefiles_dir, glob.glob1(shapefiles_dir, "M274Assess.dbf")[0]) import_shapes(shapefile, logger) add_assessor_data(assessor_data, logger) finally: shutil.rmtree(shapefiles_dir)
def test_soundcloud_hard(self): for f in glob.glob('*.mp3'): os.unlink(f) mp3_count = len(glob.glob1('', "*.mp3")) vargs = {'folders': False, 'group': False, 'track': '', 'num_tracks': 9223372036854775807, 'bandcamp': False, 'downloadable': False, 'likes': False, 'open': False, 'artist_url': 'puptheband'} process_soundcloud(vargs) new_mp3_count = len(glob.glob1('', "*.mp3")) self.assertTrue(new_mp3_count > mp3_count) for f in glob.glob('*.mp3'): os.unlink(f)
def test_bandcamp_slashes(self): for f in glob.glob('*.mp3'): os.unlink(f) mp3_count = len(glob.glob1('', "*.mp3")) vargs = {'path':'', 'folders': False, 'group': False, 'track': '', 'num_tracks': 9223372036854775807, 'bandcamp': False, 'downloadable': False, 'likes': False, 'open': False, 'artist_url': 'https://defill.bandcamp.com/track/amnesia-chamber-harvest-skit'} process_bandcamp(vargs) new_mp3_count = len(glob.glob1('', "*.mp3")) self.assertTrue(new_mp3_count > mp3_count) for f in glob.glob('*.mp3'): os.unlink(f)
def test_bandcamp(self): for f in glob.glob('*.mp3'): os.unlink(f) mp3_count = len(glob.glob1('', "*.mp3")) vargs = {'path':'', 'folders': False, 'group': False, 'track': '', 'num_tracks': 9223372036854775807, 'bandcamp': False, 'downloadable': False, 'likes': False, 'open': False, 'artist_url': 'https://atenrays.bandcamp.com/track/who-u-think'} process_bandcamp(vargs) new_mp3_count = len(glob.glob1('', "*.mp3")) self.assertTrue(new_mp3_count > mp3_count) for f in glob.glob('*.mp3'): os.unlink(f)
def test_soundcloud(self): for f in glob.glob('*.mp3'): os.unlink(f) mp3_count = len(glob.glob1('', "*.mp3")) vargs = {'path':'', 'folders': False, 'group': False, 'track': '', 'num_tracks': 9223372036854775807, 'bandcamp': False, 'downloadable': False, 'likes': False, 'open': False, 'artist_url': 'https://soundcloud.com/fzpz/revised', 'keep': True} process_soundcloud(vargs) new_mp3_count = len(glob.glob1('', "*.mp3")) self.assertTrue(new_mp3_count > mp3_count) for f in glob.glob('*.mp3'): os.unlink(f)
def test_audiomack(self): for f in glob.glob('*.mp3'): os.unlink(f) mp3_count = len(glob.glob1('', "*.mp3")) vargs = {'path':'', 'folders': False, 'group': False, 'track': '', 'num_tracks': 9223372036854775807, 'bandcamp': False, 'audiomack': True, 'downloadable': False, 'likes': False, 'open': False, 'artist_url': 'https://www.audiomack.com/song/bottomfeedermusic/power'} process_audiomack(vargs) new_mp3_count = len(glob.glob1('', "*.mp3")) self.assertTrue(new_mp3_count > mp3_count) for f in glob.glob('*.mp3'): os.unlink(f)
def test_musicbed(self): for f in glob.glob('*.mp3'): os.unlink(f) mp3_count = len(glob.glob1('', "*.mp3")) vargs = {'login':'******', 'password':'******', 'path':'', 'folders': False, 'group': False, 'track': '', 'num_tracks': 9223372036854775807, 'bandcamp': False, 'downloadable': False, 'likes': False, 'open': False, 'artist_url': 'https://www.musicbed.com/albums/be-still/2828'} process_musicbed(vargs) new_mp3_count = len(glob.glob1('', "*.mp3")) self.assertTrue(new_mp3_count > mp3_count) for f in glob.glob('*.mp3'): os.unlink(f)
def setUp(self): self.btdir = './backtraces' self.konqifiles = [os.path.join(self.btdir, f) for f in glob.glob1(self.btdir, 'konqi*')] self.abrtfiles = [os.path.join(self.btdir, f) for f in glob.glob1(self.btdir, 'abrt*')] self.gdbfiles = [os.path.join(self.btdir, f) for f in glob.glob1(self.btdir, '*.gdb')] # files that look like gdb self.abrtgdbfiles = [os.path.join(self.btdir, f) for f in glob.glob1(self.btdir, '_abrt*')] # files that are expected to raise an exception self.expect2fail = [os.path.join(self.btdir, f) for f in glob.glob1(self.btdir, '*fail*')]
def test_soundcloud(self): for f in glob.glob('*.mp3'): os.unlink(f) mp3_count = len(glob.glob1('', "*.mp3")) vargs = {'folders': False, 'group': False, 'track': '', 'num_tracks': 9223372036854775807, 'bandcamp': False, 'downloadable': False, 'likes': False, 'open': False, 'artist_url': 'https://soundcloud.com/bxsswxrshp/the-king-is-dead-and-i-couldnt-be-happier'} process_soundcloud(vargs) new_mp3_count = len(glob.glob1('', "*.mp3")) self.assertTrue(new_mp3_count > mp3_count) for f in glob.glob('*.mp3'): os.unlink(f)
# area.type = original_type # ## ############################################################################### directory = os.path.dirname(os.path.realpath(sys.argv[0])) + '/SFT_with_CNN/' #outdir = directory + 'datasets/dataset_def_rt+fl+l+bg/train/' #outdir = '/home/arvardaz/Dropbox/datasets/dataset_def_rt+fl+l+bg5/train/' outdir = '/home/arvardaz/Dropbox/datasets/pillow_deform/' #outdir_test = '/home/arvardaz/Dropbox/datasets/fl/def_rt+fl+l+bg/test/' #outdir = directory + '/temp/' #background #bg_dir = directory + 'SBU-RwC90/mixed/slices/' bg_dir = '/home/arvardaz/SFT_with_CNN/thesis/SBU-RwC90/mixed/slices/' bg_list = glob1(bg_dir, '*.jpg') iters = 200 frames = 20 ## Import object ############################################################## #bpy.ops.import_scene.obj(filepath = directory + '3D_models/American_pillow/3d_decimated_norm/pillow_2k.obj') #bpy.ops.wm.open_mainfile(filepath = '/home/arvardaz/SFT_with_CNN/deformation.blend') bpy.ops.wm.open_mainfile( filepath='/home/arvardaz/SFT_with_CNN/datagen/pillow_def.blend') cam = prepareCamera((0, 30, 0)) #(0, 40, 0) ############################################################################### filename = str(time())