def record(self):
        #Start recording:
        self.status = 1
        while self.status:
            try:
                received_line = self.get_message()[:-1] #strip \cr
                temp = received_line.split(" ")
                distance = int(temp[0][1:])
                activeValue = int(temp[1][1:])
                if distance > settings.DMAX:
                    activeValue = 0
                else:
                    activeValue = 1
                logging.info(received_line)
                
                # Write to a temp file then rename to the final file,
                # on mac, linux and unix this is atomic and on windows
                # it's pretty fast
                open(settings.OUPTUT_FILE + ".temp", "wb").write(json.dumps([{"active": activeValue}]))
                shutil.move(settings.OUPTUT_FILE + ".temp", settings.OUPTUT_FILE)
                if not settings.SILENT:
                    print "\nsensor data: " + received_line
                    print "Max distance: %s in." % (settings.DMAX,)

                time.sleep(settings.RATE)
            except ValueError:
                if not settings.SILENT:
                    print "Serial error, skipping measurement"
            except (KeyboardInterrupt, SystemExit):
                self.cur.close()
                self.db.close()
                raise
Exemple #2
0
def savefile(path, tagdata):
    """Saves tagdata to file at path."""
    fp = open(path + ".new", "w")
    pickle.dump(tagdata, fp)
    fp.close()

    shutil.move(path + ".new", path)
Exemple #3
0
	def __update_hotkey(self, command, hotkey):
		""" Update the hotkey for 'command' to 'hotkey'. """
		""" If 'command' is not found, add it with the new 'hotkey'. """
		""" Return 'True' on success, 'False' otherwise. """
		self.__touch_config_file()
		oldfile = open(XBINDKEYS_CONFIG_FILE, "r")
		newfile = open(XBINDKEYS_CONFIG_FILE + ".new", "w")
		# Search for command
		commandfound = False
		skipnextline = False
		for line in oldfile:
			if skipnextline == False:
				newfile.write(line)
			else:
				skipnextline = False
			if line == '"' + command + '"\n':
				newfile.write("  " + hotkey + "\n") # update hotkey
				commandfound = True
				skipnextline = True
		if commandfound == False:
			# command not found, add it
			newfile.write('"' + command + '"\n')
			newfile.write("  " + hotkey + "\n")
		oldfile.close()
		newfile.close()
		try:
			os.remove(XBINDKEYS_CONFIG_FILE)
		except:
			sessionlog.write("ERROR: 'Hotkeys.__update_hotkey()' - Cannot replace '" + XBINDKEYS_CONFIG_FILE + "'.")
			os.remove(XBINDKEYS_CONFIG_FILE + ".new")
			return False
		shutil.move(XBINDKEYS_CONFIG_FILE + ".new", XBINDKEYS_CONFIG_FILE)
		return True
Exemple #4
0
def generateLocalConfig(localFile, build):
    """
    Create resources/conf/localconfig.json and set the build values
    appropriately.
    """
    tmpOut = localFile + ".out"
    with open(tmpOut, 'w') as f:
        if os.path.exists(localFile):
            data = json.load(open(localFile))
        else:
            data = {}

        if build == 'debug-android' or build == 'debug-ios' or build == 'billingtest-android':
            data['release'] = "debug"
        elif build == 'ads-android' or build == 'ads-ios':
            data['release'] = "ads"
        elif build == 'paid-android' or build == 'paid-ios':
            data['release'] = "paid"
        elif build == 'samsung-android':
            data['release'] = "samsung"
        else:
            assert 0, "run with build=something"

        # force off the debug flag while building.
        if data['release'] in ['paid', 'ads']:
            data['debug'] = False

        json.dump(data, f, indent=4)

    shutil.move(tmpOut, localFile)
Exemple #5
0
	def __remove_hotkey(self, command):
		""" Remove the hotkey for 'command' (and 'command' too, of course). """
		""" Return 'True' on success, 'False' otherwise. """
		self.__touch_config_file()
		oldfile = open(XBINDKEYS_CONFIG_FILE, "r")
		newfile = open(XBINDKEYS_CONFIG_FILE + ".new", "w")
		commandfound = False
		skipnextline = False
		for line in oldfile:
			if skipnextline != True:
				if line != '"' + command + '"\n':
					newfile.write(line)
				else:
					commandfound = True
					skipnextline = True
			else:
				skipnextline = False
		oldfile.close()
		newfile.close()
		if commandfound == True:
			try:
				os.remove(XBINDKEYS_CONFIG_FILE)
			except:
				sessionlog.write("ERROR: 'Hotkeys.__remove_hotkey()' - Cannot replace '" + XBINDKEYS_CONFIG_FILE + "'.")
				os.remove(XBINDKEYS_CONFIG_FILE + ".new")
				return False
			shutil.move(XBINDKEYS_CONFIG_FILE + ".new", XBINDKEYS_CONFIG_FILE)
		else:
			os.remove(XBINDKEYS_CONFIG_FILE + ".new")
		return True
Exemple #6
0
def get(url, path, verbose=False):
    sha_url = url + ".sha256"
    with tempfile.NamedTemporaryFile(delete=False) as temp_file:
        temp_path = temp_file.name
    with tempfile.NamedTemporaryFile(suffix=".sha256", delete=False) as sha_file:
        sha_path = sha_file.name

    try:
        download(sha_path, sha_url, False, verbose)
        if os.path.exists(path):
            if verify(path, sha_path, False):
                if verbose:
                    print("using already-download file " + path)
                return
            else:
                if verbose:
                    print("ignoring already-download file " + path + " due to failed verification")
                os.unlink(path)
        download(temp_path, url, True, verbose)
        if not verify(temp_path, sha_path, verbose):
            raise RuntimeError("failed verification")
        if verbose:
            print("moving {} to {}".format(temp_path, path))
        shutil.move(temp_path, path)
    finally:
        delete_if_present(sha_path, verbose)
        delete_if_present(temp_path, verbose)
Exemple #7
0
def docss():
    """ Compresses the  CSS files """

    listCSS = []

    theme = settings.get_theme()
    print "Using theme %s" % theme
    css_cfg = os.path.join("..", "..", "..", "private", "templates", theme, "css.cfg")
    f = open(css_cfg, "r")
    files = f.readlines()
    f.close()
    for file in files[:-1]:
        p = re.compile("(\n|\r|\t|\f|\v)+")
        file = p.sub("", file)
        listCSS.append("../../styles/%s" % file)

    outputFilenameCSS = "eden.min.css"

    # Merge CSS files
    print "Merging Core styles."
    mergedCSS = mergeCSS(listCSS, outputFilenameCSS)

    # Compress CSS files
    print "Writing to %s." % outputFilenameCSS
    compressCSS(mergedCSS, outputFilenameCSS)

    # Move files to correct locations
    print "Deleting %s." % outputFilenameCSS
    try:
        os.remove("../../themes/%s/%s" % (theme, outputFilenameCSS))
    except:
        pass
    print "Moving new %s." % outputFilenameCSS
    shutil.move(outputFilenameCSS, "../../themes/%s" % theme)
Exemple #8
0
  def _fetch_pkg(self, gopath, pkg, rev):
    """Fetch the package and setup symlinks."""
    fetcher = self._get_fetcher(pkg)
    root = fetcher.root()
    root_dir = os.path.join(self.workdir, 'fetches', root, rev)

    # Only fetch each remote root once.
    if not os.path.exists(root_dir):
      with temporary_dir() as tmp_fetch_root:
        fetcher.fetch(dest=tmp_fetch_root, rev=rev)
        safe_mkdir(root_dir)
        for path in os.listdir(tmp_fetch_root):
          shutil.move(os.path.join(tmp_fetch_root, path), os.path.join(root_dir, path))

    # TODO(John Sirois): Circle back and get get rid of this symlink tree.
    # GoWorkspaceTask will further symlink a single package from the tree below into a
    # target's workspace when it could just be linking from the fetch_dir.  The only thing
    # standing in the way is a determination of what we want to artifact cache.  If we don't
    # want to cache fetched zips, linking straight from the fetch_dir works simply.  Otherwise
    # thought needs to be applied to using the artifact cache directly or synthesizing a
    # canonical owner target for the fetched files that 'child' targets (subpackages) can
    # depend on and share the fetch from.
    dest_dir = os.path.join(gopath, 'src', root)
    # We may have been `invalidate`d and not `clean-all`ed so we need a new empty symlink
    # chroot to avoid collision; thus `clean=True`.
    safe_mkdir(dest_dir, clean=True)
    for path in os.listdir(root_dir):
      os.symlink(os.path.join(root_dir, path), os.path.join(dest_dir, path))
Exemple #9
0
    def configure_linter(self, language):
        """Fill out the template and move the linter into Packages."""

        try:
            if language is None:
                return

            if not self.fill_template(self.temp_dir, self.name, self.fullname, language):
                return

            git = util.which('git')

            if git:
                subprocess.call((git, 'init', self.temp_dest))

            shutil.move(self.temp_dest, self.dest)

            util.open_directory(self.dest)
            self.wait_for_open(self.dest)

        except Exception as ex:
            sublime.error_message('An error occurred while configuring the plugin: {}'.format(str(ex)))

        finally:
            if self.temp_dir and os.path.exists(self.temp_dir):
                shutil.rmtree(self.temp_dir)
Exemple #10
0
def moveOutput_VRay(arg,frameNr, verbose):
    if ((not arg.ElementsFolder) or (not arg.vraySeperateRenderChannels)):
        return
    import shutil
    VRayExt=arg.FExt
    if (arg.vraySeperateRenderChannels):
        VRayExt=os.path.splitext(MaxPlus.Core.EvalMAXScript("renderers.production.output_splitfilename").Get())[1]
        if (len(VRayExt)==0):
            VRayExt=arg.FExt
    FNameVar_Rendered= arg.FNameVar;
    FNameVar_Rendered= FNameVar_Rendered.replace("<Channel>\\","")
    if (argValid(arg.FNameVar) and MaxPlus.Core.EvalMAXScript("(maxOps.GetCurRenderElementMgr()).GetElementsActive()").Get()):
        nrOfElements=MaxPlus.Core.EvalMAXScript("(maxOps.GetCurRenderElementMgr()).NumRenderElements()").Get()
        for elemNr in xrange(0,nrOfElements):
            elemName=MaxPlus.Core.EvalMAXScript("((maxOps.GetCurRenderElementMgr()).GetRenderElement "+str(elemNr)+").elementName").Get()
            elemName=elemName.replace(" ","_")
            fileout_Rendered=FNameVar_Rendered+str(frameNr).zfill(int(arg.FPadding))+VRayExt
            fileout_Should  =arg.FNameVar     +str(frameNr).zfill(int(arg.FPadding))+VRayExt
            fileout_Rendered=fileout_Rendered.replace("<Channel>",elemName)
            fileout_Should  =fileout_Should  .replace("<Channel>",elemName)

            if (not os.path.isfile(fileout_Rendered)):
                logMessageGen("WRN", "Element file to be moved to subfolder not found: "+fileout_Rendered,1)
                continue;
            if (verbose):
                logMessage("Moving element '"+fileout_Rendered+"' => '"+fileout_Should+"'")
            shutil.move(fileout_Rendered,fileout_Should)
 def extract_zip(self, filename):
     with zipfile.ZipFile(filename) as f:
         # extractall doesn't work if zipfiles do not include directories
         # as separate members (as for github and bitbucket)
         topdirs = []
         for m in f.namelist():
             if m.endswith("/"):
                 if len(m.split("/")) == 2:
                     topdirs.append(m.split("/")[0])
                 continue
             directory = "/".join(m.split("/")[0:-2])
             if directory.find("/") == -1:
                 topdirs.append(directory)
             if (len(directory) > 0 and
                 not directory.startswith("/") and
                 (directory.find("..") == -1) and
                 not os.path.exists(directory)):
                 os.makedirs(directory)
             sys.stdout.write(".")
             f.extract(m)
         topdirs = filter(lambda x: len(x) != 0, topdirs)
         topdirs = list(set(topdirs))
         if not topdirs:
             topdirs = [f.namelist()[0]]
         if len(topdirs) > 1:
             os.makedirs(self.path)
             for d in topdirs:
                 shutil.move(d, "%s/%s" % (self.path, d))
         else:
             shutil.move(topdirs[0], self.path)
def gen_moc():
  workDir = os.getcwd() + "/Gui"
  print workDir
  dirList = os.listdir(workDir)

  for files in 	dirList:
    if files.endswith(".h"):
      print files
      outp = "moc_" + files[:-2] + ".cpp"
      print outp
      affected = "./Gui/" + files
      call(["moc", affected, "-o", outp])
      #os.rename(outp, "./Gui/" + outp)
      l = -1
      f = open(outp)
      lines = f.readlines()
      f.close()
      for line in lines:
        if line[0] == '#':
            break;
        l = l + 1

      if l >= 0:
        f = open(outp, 'w');
        lines[l] = '\n#include "EdPrec.h"\n'
        f.writelines(lines)
        f.close()
      
      shutil.move(outp, "./Gui/" + outp)
 def filter_and_persist_proprietary_tool_panel_configs( self, tool_configs_to_filter ):
     """Eliminate all entries in all non-shed-related tool panel configs for all tool config file names in the received tool_configs_to_filter."""
     for proprietary_tool_conf in self.proprietary_tool_confs:
         persist_required = False
         tree, error_message = xml_util.parse_xml( proprietary_tool_conf )
         if tree:
             root = tree.getroot()
             for elem in root:
                 if elem.tag == 'tool':
                     # Tools outside of sections.
                     file_path = elem.get( 'file', None )
                     if file_path:
                         if file_path in tool_configs_to_filter:
                             root.remove( elem )
                             persist_required = True
                 elif elem.tag == 'section':
                     # Tools contained in a section.
                     for section_elem in elem:
                         if section_elem.tag == 'tool':
                             file_path = section_elem.get( 'file', None )
                             if file_path:
                                 if file_path in tool_configs_to_filter:
                                     elem.remove( section_elem )
                                     persist_required = True
         if persist_required:
             fh = tempfile.NamedTemporaryFile( 'wb', prefix="tmp-toolshed-fapptpc"  )
             tmp_filename = fh.name
             fh.close()
             fh = open( tmp_filename, 'wb' )
             tree.write( tmp_filename, encoding='utf-8', xml_declaration=True )
             fh.close()
             shutil.move( tmp_filename, os.path.abspath( proprietary_tool_conf ) )
             os.chmod( proprietary_tool_conf, 0644 )
Exemple #14
0
def env_teardown():
    '''
    Teardown the testing environment.
    '''
    print('\n-----DESTROY ENV')

    if len(PATHS) == 1:
        print('Env setup not completed, check network connection.')
        return

    src_log = [path for path in PATHS if '.log' in path][-1]
    dst_log = os.path.join('/tmp', 'test.log')
    try:
        delete_it(dst_log)
        shutil.move(src_log, dst_log)
    except IOError:
        pass

    env_status()

    for path in PATHS:
        delete_it(path)

    env_config_teardown()
    print('\n-----DESTROY ENV FINISHED')
Exemple #15
0
    def _stage_final_image(self):
        try:
            fs_related.makedirs(self.__ensure_isodir() + "/LiveOS")

            minimal_size = self._resparse()

            if not self.skip_minimize:
                fs_related.create_image_minimizer(self.__isodir + \
                                                      "/LiveOS/osmin.img",
                                                  self._image,
                                                  minimal_size)

            if self.skip_compression:
                shutil.move(self._image, self.__isodir + "/LiveOS/ext3fs.img")
            else:
                fs_related.makedirs(os.path.join(
                                        os.path.dirname(self._image),
                                        "LiveOS"))
                shutil.move(self._image,
                            os.path.join(os.path.dirname(self._image),
                                         "LiveOS", "ext3fs.img"))
                fs_related.mksquashfs(os.path.dirname(self._image),
                           self.__isodir + "/LiveOS/squashfs.img")

            self.__create_iso(self.__isodir)

            if self.pack_to:
                isoimg = os.path.join(self._outdir, self.name + ".iso")
                packimg = os.path.join(self._outdir, self.pack_to)
                misc.packing(packimg, isoimg)
                os.unlink(isoimg)

        finally:
            shutil.rmtree(self.__isodir, ignore_errors = True)
            self.__isodir = None
Exemple #16
0
    def _stage_final_image(self):
        try:
            makedirs(self.__ensure_isodir() + "/LiveOS")

            self._resparse()

            if not self.skip_minimize:
                create_image_minimizer(self.__isodir + "/LiveOS/osmin.img", self._image, self.compress_type)

            if self.skip_compression:
                shutil.move(self._image, self.__isodir + "/LiveOS/ext3fs.img")
                if os.stat(self.__isodir + "/LiveOS/ext3fs.img").st_size >= 4*1024*1024*1024:
                    self._isofstype = "udf"
                    logging.warn("Switching to UDF due to size of LiveOS/ext3fs.img")
            else:
                makedirs(os.path.join(os.path.dirname(self._image), "LiveOS"))
                shutil.move(self._image,
                            os.path.join(os.path.dirname(self._image),
                                         "LiveOS", "ext3fs.img"))
                mksquashfs(os.path.dirname(self._image),
                           self.__isodir + "/LiveOS/squashfs.img",
                           self.compress_type)
                if os.stat(self.__isodir + "/LiveOS/squashfs.img").st_size >= 4*1024*1024*1024:
                    self._isofstype = "udf"
                    logging.warn("Switching to UDF due to size of LiveOS/squashfs.img")


            self.__create_iso(self.__isodir)
        finally:
            shutil.rmtree(self.__isodir, ignore_errors = True)
            self.__isodir = None
Exemple #17
0
def hadoop_jar(stdout, stderr, environ, *args):
    if len(args) < 1:
        stderr.write('RunJar jarFile [mainClass] args...\n')
        return -1

    jar_path = args[0]
    if not os.path.exists(jar_path):
        stderr.write(
            'Exception in thread "main" java.io.IOException: Error opening job'
            ' jar: %s\n' % jar_path)
        return -1

    # only simulate for streaming steps
    if HADOOP_STREAMING_JAR_RE.match(os.path.basename(jar_path)):
        streaming_args = args[1:]
        output_idx = list(streaming_args).index('-output')
        assert output_idx != -1
        output_dir = streaming_args[output_idx + 1]
        real_output_dir = hdfs_path_to_real_path(output_dir, environ)

        mock_output_dir = get_mock_hadoop_output()
        if mock_output_dir is None:
            stderr.write('Job failed!')
            return -1

        if os.path.isdir(real_output_dir):
            os.rmdir(real_output_dir)

        shutil.move(mock_output_dir, real_output_dir)

    now = datetime.datetime.now()
    stderr.write(now.strftime('Running job: job_%Y%m%d%H%M_0001\n'))
    stderr.write('Job succeeded!\n')
    return 0
Exemple #18
0
 def _move_additional_files(self, old_filename, new_filename):
     """Move extra files, like playlists..."""
     old_path = encode_filename(os.path.dirname(old_filename))
     new_path = encode_filename(os.path.dirname(new_filename))
     patterns = encode_filename(config.setting["move_additional_files_pattern"])
     patterns = filter(bool, [p.strip() for p in patterns.split()])
     try:
         names = os.listdir(old_path)
     except os.error:
         log.error("Error: {} directory not found".format(old_path))
         return
     filtered_names = filter(lambda x: x[0] != '.', names)
     for pattern in patterns:
         pattern_regex = re.compile(fnmatch.translate(pattern), re.IGNORECASE)
         file_names = names
         if pattern[0] != '.':
             file_names = filtered_names
         for old_file in file_names:
             if pattern_regex.match(old_file):
                 new_file = os.path.join(new_path, old_file)
                 old_file = os.path.join(old_path, old_file)
                 # FIXME we shouldn't do this from a thread!
                 if self.tagger.files.get(decode_filename(old_file)):
                     log.debug("File loaded in the tagger, not moving %r", old_file)
                     continue
                 log.debug("Moving %r to %r", old_file, new_file)
                 shutil.move(old_file, new_file)
Exemple #19
0
    def __renameTopperFileStage1 (self):
        """
        Rotate (move) Tooper Records File To Temp File
        When All Nginx Processes close the file move it to final File in stage2            
        """
                                            
        dataFileName = "data" + str('%08d' % self.__topperFileNum) + ".data.tmp"
        
        newFileName = os.path.join(self.__conf.topperDir, self.__conf.kConf.kTopperDataDir, dataFileName)

        try:
            shutil.move(self.__conf.actualNgxRecordsLogFile,newFileName)            
            retVal = True
            self.__log("rename-topper-base-file").debug3("Rename Topper File (Rotation) = %s to Temp File = %s",self.__conf.actualNgxRecordsLogFile,dataFileName)
        except:
            self.__log("rename-topper-base-file-fail").error("Rename Topper File (Rotation) = %s  to Temp File = %s Failed",self.__conf.actualNgxRecordsLogFile, dataFileName)
            retVal = False


        if retVal:
            self.__topperTempFile = newFileName

            self.__topperFileNum =  self.__topperFileNum + 1
    
            # This is because '%08'
            self.__topperFileName = self.__topperFileNum % 100000000
        else:
            self.__topperTempFile = ""

        
        return retVal
Exemple #20
0
	def _initialize_metadata(self):
		self._logger.info("Initializing the file metadata for {}...".format(self.basefolder))

		old_metadata_path = os.path.join(self.basefolder, "metadata.yaml")
		backup_path = os.path.join(self.basefolder, "metadata.yaml.backup")

		if os.path.exists(old_metadata_path):
			# load the old metadata file
			try:
				with open(old_metadata_path) as f:
					import yaml
					self._old_metadata = yaml.safe_load(f)
			except:
				self._logger.exception("Error while loading old metadata file")

			# make sure the metadata is initialized as far as possible
			self._list_folder(self.basefolder)

			# rename the old metadata file
			self._old_metadata = None
			try:
				import shutil
				shutil.move(old_metadata_path, backup_path)
			except:
				self._logger.exception("Could not rename old metadata.yaml file")

		else:
			# make sure the metadata is initialized as far as possible
			self._list_folder(self.basefolder)

		self._logger.info("... file metadata for {} initialized successfully.".format(self.basefolder))
 def _move_into_dir(self, file, dir):
     if not self._dryrun and not os.path.exists(dir):
         os.makedirs(dir)
     filename = os.path.basename(file)
     if not self._dryrun:
         shutil.move(file, os.path.join(dir, filename))
     print file, " => ", dir
 def cleanup(self, action):
     if not self.steps_filename:
         return
     if not self.question_yes_no("All unused PPM files will be moved to a"
                                 " backup directory. Are you sure?",
                                 "Clean up data directory?"):
         return
     # Remember the current step index
     current_step_index = self.current_step_index
     # Get the backup dir
     backup_dir = os.path.join(self.steps_data_dir, "backup")
     # Create it if it doesn't exist
     if not os.path.exists(backup_dir):
         os.makedirs(backup_dir)
     # Move all files to the backup dir
     for filename in glob.glob(os.path.join(self.steps_data_dir,
                                            "*.[Pp][Pp][Mm]")):
         shutil.move(filename, backup_dir)
     # Get the used files back
     for step in self.steps:
         self.set_state_from_step_lines(step, backup_dir, warn=False)
         self.get_step_lines(self.steps_data_dir)
     # Remove the used files from the backup dir
     used_files = os.listdir(self.steps_data_dir)
     for filename in os.listdir(backup_dir):
         if filename in used_files:
             os.unlink(os.path.join(backup_dir, filename))
     # Restore step index
     self.set_step(current_step_index)
     # Inform the user
     self.message("All unused PPM files may be found at %s." %
                  os.path.abspath(backup_dir),
                  "Clean up data directory")
Exemple #23
0
    def _put_filename(self, key, filename):
        target = self._build_filename(key)
        shutil.move(filename, target)

        # we do not know the permissions of the source file, rectify
        self._fix_permissions(target)
        return key
Exemple #24
0
    def make_target_directory(self, path):
        path = os.path.abspath(path)
        try:
            os.makedirs(path)
        except OSError as e:
            self.abort('Could not create target folder: %s' % e)

        if os.path.isdir(path):
            try:
                if len(os.listdir(path)) != 0:
                    raise OSError('Directory not empty')
            except OSError as e:
                self.abort('Bad target folder: %s' % e)

        scratch = os.path.join(tempfile.gettempdir(), uuid.uuid4().hex)
        os.makedirs(scratch)
        try:
            yield scratch
        except:
            shutil.rmtree(scratch)
            raise
        else:
            # Use shutil.move here in case we move across a file system
            # boundary.
            for filename in os.listdir(scratch):
                if isinstance(path, unicode):
                    filename = filename.decode(fs_enc)
                shutil.move(os.path.join(scratch, filename),
                            os.path.join(path, filename))
            os.rmdir(scratch)
    def initialize(self, test, log):
        '''Does the init part of the test
        1.Finds initial count of entry in log
        2.Creates a file 'cron' under cron.d
        3.Backs up /etc/crontab
        4.Modifies /etc/crontab    '''
        self.log = log

        self.initial_count = self.count_log('Cron automation')
        f = open('/etc/cron.d/cron', 'w')
        f.write('''#!/bin/bash
touch  %s
echo 'Cron automation' >>  %s
        ''' % (self.log, self.log))
        f.close()
        utils.system('chmod +x /etc/cron.d/cron')
        shutil.copyfile('/etc/crontab', '/tmp/backup')
        f = open('/etc/crontab', 'w')
        f.write('* * * * * root run-parts /etc/cron.d/\n')
        f.close()
        if test == 'deny_cron':
            if os.path.exists('/etc/cron.d/jobs.deny'):
                shutil.move('/etc/cron.d/jobs.deny', '/tmp/jobs.deny')
            f = open('/etc/cron.d/jobs.deny', 'w')
            f.write('cron')
            f.close()
        elif test == 'allow_cron' :
            os.remove('/etc/cron.d/jobs.deny')
            if os.path.exists('/etc/cron.d/jobs.allow'):
                shutil.move('/etc/cron.d/jobs.allow', '/tmp/jobs.allow')
            f = open('/etc/cron.d/jobs.allow', 'w')
            f.write('cron')
            f.close()
    def forwards(self, orm):
        "Write your forwards migration here"
        for item in orm.AlbumConvertableItem.objects.all():
            try:
                image_path = item.image.image.path
            except:
                image_path = os.path.join(settings.MEDIA_ROOT, item.thumbFilename)

            try:
                os.stat(image_path)
            except OSError as e:
                if e.errno != 2:
                    raise e
                else:
                    continue

            old_dir, filename = os.path.split(image_path)
            new_path = os.path.join('albums', str(item.parent.pk), str(item.pk))
            new_dir = os.path.join(settings.MEDIA_ROOT, new_path)

            try:
                os.makedirs(new_dir)
            except OSError as e:
                if(e.errno != 17):
                    raise e
                print "Directory %s already exists" % new_dir

            print "Moving %s" % image_path
            if(image_path != os.path.join(new_dir, filename)):
                shutil.move(image_path, new_dir)
            else:
                print "Skipping"

            item.preview = os.path.join(new_path, filename)
            item.save()
Exemple #27
0
def deploy_wnmp():
    os.chdir(os.path.join(BASE_DIR, 'wnmp'))
    git_export('wnmp', TARGET_DIR)

    # PHP
    wget('http://windows.php.net/downloads/releases/'
            'php-5.4.5-Win32-VC9-x86.zip',
        sha1='028eb12e09fe011e20097c82064d6c550bf896c4')
    logging.info('Extracting PHP...')
    path = os.path.join(BASE_DIR, '_tmp', 'php')
    makedirs(path, exist_ok=True)
    ar = zipfile.ZipFile(
        os.path.join(BASE_DIR, 'php-5.4.5-Win32-VC9-x86.zip'))
    ar.extractall(path)
    shutil.rmtree(os.path.join(TARGET_DIR, 'php'))
    shutil.copytree(path, os.path.join(TARGET_DIR, 'php'))

    # nginx
    wget('http://nginx.org/download/nginx-1.2.2.zip',
        sha1='0a5dfbb766bfefa238207db25d7b64b69aa37908')
    logging.info('Extracting nginx...')
    path = os.path.join(BASE_DIR, '_tmp')
    makedirs(path, exist_ok=True)
    ar = zipfile.ZipFile(
        os.path.join(BASE_DIR, 'nginx-1.2.2.zip'))
    ar.extractall(path)
    shutil.rmtree(os.path.join(TARGET_DIR, 'nginx'))
    shutil.copytree(os.path.join(path, 'nginx-1.2.2'),
        os.path.join(TARGET_DIR, 'nginx'))
    shutil.move(os.path.join(TARGET_DIR, 'example.nginx.conf'),
        os.path.join(TARGET_DIR, 'nginx', 'conf', 'nginx.conf'))

    # cleanup
    shutil.rmtree(os.path.join(BASE_DIR, '_tmp'))
Exemple #28
0
def main(max_stations=0, folder='.'):
    try:
        makedirs(output_folder+'/'+folder)
    except OSError:
        pass

    all_files = [ f for f in listdir(data_folder) if isfile(join(data_folder,f)) and f.endswith('.gz') ]
    
    for ndf in all_files:
        string = '_%dstations' % max_stations
        new_name=ndf[:-7]+string+ndf[-7:]
        rename(data_folder+'/'+ndf, data_folder+'/'+new_name)
        
    all_files = [ f for f in listdir(data_folder) if isfile(join(data_folder,f)) and f.endswith('.gz') ]
    
    for a_f in all_files:
        move(data_folder+'/'+a_f, output_folder+'/'+folder+'/'+a_f)
        print "Moved:", a_f[0:-3]
        
    data_files = [ f for f in listdir(output_folder+'/'+folder) if isfile(join(output_folder+'/'+folder,f)) and f.endswith('.dat.gz') ]

    print "\n"

    for d_f in data_files:
        fin = gzip.open(output_folder+'/'+folder+'/'+d_f, 'rb')
        data = fin.read()
        fin.close()

        with open(output_folder+'/'+folder+'/'+d_f[0:-3],'w') as fout:
            fout.write(data)

        print "Unzipped:", d_f[0:-3]
Exemple #29
0
def docss():
    """ Compresses the  CSS files """
    listCSS = []

    f = open("sahana.css.cfg", "r")
    files = f.readlines()
    f.close()
    for file in files[:-1]:
        p = re.compile("(\n|\r|\t|\f|\v)+")
        file = p.sub("", file)
        listCSS.append("../../styles/%s" % file)

    outputFilenameCSS = "sahana.min.css"

    # Merge CSS files
    print "Merging Core styles."
    mergedCSS = mergeCSS(listCSS, outputFilenameCSS)

    # Compress CSS files
    print "Writing to %s." % outputFilenameCSS
    compressCSS(mergedCSS, outputFilenameCSS)

    # Move files to correct locations
    print "Deleting %s." % outputFilenameCSS
    try:
        os.remove("../../styles/S3/%s" % outputFilenameCSS)
    except:
        pass
    print "Moving new %s." % outputFilenameCSS
    shutil.move(outputFilenameCSS, "../../styles/S3")
Exemple #30
0
    def load(self, fmu_path, output_dir):
        """Load the FMU for continued simulation in :meth:`continue_run`.
        """

        import pyfmi

        if 'log_level' in self._options:
            self.fmu = pyfmi.load_fmu(fmu_path, log_level=self.log_level)
        else:
            self.fmu = pyfmi.load_fmu(fmu_path)

        # Initialize the fmu, only call setup_experiment for FMUs 2.0
        try:
            self.fmu.setup_experiment()
            version = 2
        except AttributeError:
            version = 1

        self.fmu.initialize()

        # Copy the log file to the result directory
        log = ''
        if version == 1:
            log = self.fmu.get_identifier()
        if version == 2:
            log = self.fmu.get_name()
        log += '_log.txt'
        source = os.path.join(os.getcwd(), log)
        destination = os.path.join(output_dir, 'log%i.txt' % self.interval)
        move(source, destination)
def move_files(top_level_path):
    for folder in os.listdir(top_level_path):
        for file in os.listdir(os.path.join(top_level_path, folder)):
            print(file)  
            shutil.move(os.path.join(top_level_path, folder, file), os.path.join(top_level_path, file))
Exemple #32
0
def gestion_importation_sap(event):
    try:
        if event.wd == 1:
            if event.name.endswith(".csv") or event.name.endswith(".CSV"):
                log_print("Importation " + event.pathname)
                err = stock_labo.models.import_navette_sap(
                    emplacement=event.pathname,
                    nom="",
                    extension="",
                    fin_de_ligne="\r\n")
                if err == "":
                    multi_file_backups(event.path + "/backup/",
                                       event.name[:-4] + ".ok")
                    shutil.move(
                        event.pathname,
                        event.path + "/backup/" + event.name[:-4] + ".ok")
                else:
                    multi_file_backups(event.path + "/error/",
                                       event.name[:-4] + ".err")
                    multi_file_backups(event.path + "/error/",
                                       event.name[:-4] + ".log")
                    shutil.move(
                        event.pathname,
                        event.path + "/error/" + event.name[:-4] + ".err")
                    ferr = codecs.open(event.path + "/error/" +
                                       event.name[:-4] + ".log",
                                       "w",
                                       encoding="utf-8")
                    ferr.writelines(err)
                    ferr.close()

                    mail_corps = "Le fichier %s a provoqué une erreur : \n\n" % (
                        event.name)
                    ferr = codecs.open(event.path + "/error/" +
                                       event.name[:-4] + ".err",
                                       "r",
                                       encoding="utf-8")
                    mail_corps += ferr.readline()
                    ferr.close()
                    mail_corps += "\n\n"
                    mail_corps = "Le fichier %s a provoqué une erreur : \n\n" % (
                        event.name)
                    ferr = codecs.open(event.path + "/error/" +
                                       event.name[:-4] + ".log",
                                       "r",
                                       encoding="utf-8")
                    mail_corps += ferr.readline()
                    ferr.close()
                    m = mail.Mail(mail_corps)
                    m.from_("*****@*****.**")
                    m.reply_to("*****@*****.**")
                    #m.to(["*****@*****.**","*****@*****.**"])
                    m.to(
                        ["*****@*****.**", "*****@*****.**"])
                    if version.status_developement["data"] == "prod":
                        m.subject(
                            "Stock Labo Paris, Erreur importation fichier navette SAP"
                        )
                    else:
                        m.subject(
                            "Ne pas tenir compte de cette email, je fait des tests sur le serveur, charly"
                        )
                    #m.send("smtp.laposte.net", "*****@*****.**", "charly.gontero", "1Addyson")
                    m.send(smtp="par-srv-cas01.eu.takasago.com",
                           expediteur="*****@*****.**",
                           user=None,
                           password=None,
                           starttls=False)
                connection_db_ouverte = True
            else:
                shutil.move(event.pathname,
                            event.path + "/error/" + event.name)
                log_print(
                    "rien n'a faire, le fichier n'a pas la bonne extention : %s"
                    % (event.name))
        else:
            log_print("rien n'a faire, événement d'un dossier fils wd : %d" %
                      (event.wd))
    except:
        tb = traceback.format_exc()
        log_print("!!!!! ERREUR !!!!!")
        log_print(tb)
        log_print("!!!!! ERREUR !!!!!\n\n")
        raise ImportationErreur
    def generate(self, should_notify=True):
        """
        Generate the app
        :return: the path to the generated apk
        """
        self.update_status('Preparing parent source code')

        self.prepare_source()

        self.app_package_name = 'org.fossasia.openevent.' + re.sub('\W+', '', self.app_name)

        config = {
            'Email': self.creator_email,
            'App_Name': self.app_name,
            'Api_Link': self.api_link
        }

        self.update_status('Generating app configuration')

        with open(self.get_path("app/src/main/assets/config.json"), "w+") as config_file:
            config_file.write(json.dumps(config))

        self.update_status('Generating launcher icons & background image')

        resize_launcher_icon(self.app_launcher_icon, self.app_working_dir)
        resize_background_image(self.app_background_image, self.app_working_dir)

        self.update_status('Updating resources')

        replace(self.get_path("app/src/main/res/values/strings.xml"), 'OpenEvent', self.app_name)
        replace(self.get_path("app/src/main/res/layout/nav_header.xml"), 'twitter', 'background')
        replace(self.get_path("app/build.gradle"), '"org.fossasia.openevent"', '"%s"' % self.app_package_name)

        self.update_status('Loading assets')

        for f in os.listdir(self.app_temp_assets):
            path = os.path.join(self.app_temp_assets, f)
            if os.path.isfile(path):
                shutil.copyfile(path, self.get_path("app/src/main/assets/" + f))

        self.update_status('Preparing android build tools')

        build_tools_version = get_build_tools_version(self.get_path('app/build.gradle'))
        build_tools_path = os.path.abspath(os.environ.get('ANDROID_HOME') + '/build-tools/' + build_tools_version)

        self.update_status('Building android application package')

        self.run_command([os.path.abspath(self.config['BASE_DIR'] + '/scripts/build_apk.sh'), build_tools_path])

        self.update_status('Application package generated')

        self.apk_path = self.get_path('release.apk')
        if should_notify:
            self.notify()

        apk_url = '/static/releases/%s.apk' % self.identifier

        shutil.move(self.apk_path, os.path.abspath(self.config['BASE_DIR'] + '/app/' + apk_url))

        self.update_status('SUCCESS', message=apk_url)

        self.cleanup()

        return apk_url
Exemple #34
0
def backup_DB():
    os.makedirs("backup", exist_ok=True)
    shutil.move("justice.db", "backup/justice.db")
Exemple #35
0
def main(argv=None,
         experiment_name="",
         run_id=0,
         csv_filename="gpt2_parity_results.csv"):
    result = {}
    from transformers import __version__ as transformers_version

    if version.parse(transformers_version) < version.parse(
            "3.1.0"):  # past_key_values name does not exist in 3.0.2 or older
        raise RuntimeError("This tool requires transformers 3.1.0 or later.")

    args = parse_arguments(argv)
    setup_logger(args.verbose)

    if not experiment_name:
        import sys

        experiment_name = " ".join(argv if argv else sys.argv[1:])

    if args.tolerance == 0:
        args.tolerance = DEFAULT_TOLERANCE[args.precision]

    logger.info(f"Arguments:{args}")

    cache_dir = args.cache_dir
    output_dir = args.output if not args.output.endswith(
        ".onnx") else os.path.dirname(args.output)
    prepare_environment(cache_dir, output_dir, args.use_gpu)

    if args.precision != Precision.FLOAT32:
        assert args.optimize_onnx, "fp16/int8 requires --optimize_onnx"

    if args.precision == Precision.FLOAT16:
        assert args.use_gpu, "fp16 requires --use_gpu"

    if args.precision == Precision.INT8:
        assert not args.use_gpu, "quantization only supports CPU"

    model_class = MODEL_CLASSES[args.model_class][0]
    use_padding = MODEL_CLASSES[args.model_class][2]

    if args.model_class == "GPT2LMHeadModel_BeamSearchStep":
        model_type = "beam_search_step"
    elif args.model_class == "GPT2LMHeadModel_ConfigurableOneStepSearch":
        model_type = "configurable_one_step_search"
    else:
        model_type = "default"

    gpt2helper = Gpt2HelperFactory.create_helper(model_type)
    gpt2tester = Gpt2TesterFactory.create_tester(model_type)
    config = AutoConfig.from_pretrained(args.model_name_or_path,
                                        cache_dir=cache_dir)
    if model_type == "beam_search_step":
        model = model_class.from_pretrained(
            args.model_name_or_path,
            config=config,
            batch_size=1,
            beam_size=args.beam_size,
            cache_dir=cache_dir,
        )
    elif model_type == "configurable_one_step_search":
        model = model_class.from_pretrained(
            args.model_name_or_path,
            config=config,
            batch_size=1,
            beam_size=args.beam_size,
            ignore_eos=args.ignore_eos,
            temperature=args.temperature,
            repetition_penalty=args.repetition_penalty,
            excluded_token_ids=args.excluded_token_ids,
            length_penalty=args.length_penalty,
            do_sample=args.do_sample,
            do_sample_top_p=args.do_sample_top_p,
            do_sample_top_k=args.do_sample_top_k,
            cache_dir=cache_dir,
        )
    else:
        model = model_class.from_pretrained(args.model_name_or_path,
                                            config=config,
                                            cache_dir=cache_dir)

    device = torch.device("cuda:0" if args.use_gpu else "cpu")
    model.eval().to(device)

    if (not args.use_external_data_format) and (config.n_layer > 24):
        logger.info(f"Try --use_external_data_format when model size > 2GB")

    onnx_model_paths = gpt2helper.get_onnx_paths(
        output_dir,
        args.model_name_or_path,
        args.model_class,
        new_folder=(args.precision == Precision.INT8),
        remove_existing=["fp32", "fp16", "int8"],
    )  # Do not remove raw model to save time in parity test

    raw_onnx_model = onnx_model_paths["raw"]

    if os.path.exists(raw_onnx_model) and not args.overwrite:
        logger.warning(
            f"Skip exporting ONNX model since it existed: {raw_onnx_model}")
    else:
        logger.info(f"Exporting ONNX model to {raw_onnx_model}")
        gpt2helper.export_onnx(
            model,
            device,
            raw_onnx_model,
            args.verbose,
            args.use_external_data_format,
            has_position_ids=use_padding,
            has_attention_mask=use_padding,
            input_ids_dtype=torch.int32
            if args.use_int32_inputs else torch.int64,
            position_ids_dtype=torch.int32
            if args.use_int32_inputs else torch.int64,
            attention_mask_dtype=torch.int32
            if args.use_int32_inputs else torch.int64,
        )

    fp16_params = {"keep_io_types": args.keep_io_types}
    if args.io_block_list:
        fp16_params["keep_io_types"] = args.io_block_list
    if args.node_block_list:
        fp16_params["node_block_list"] = args.node_block_list
    if args.op_block_list:
        fp16_params["op_block_list"] = args.op_block_list
    if args.force_fp16_initializers:
        fp16_params["force_fp16_initializers"] = args.force_fp16_initializers

    is_io_float16 = args.precision == Precision.FLOAT16 and not args.keep_io_types

    if args.optimize_onnx or args.precision != Precision.FLOAT32:
        output_path = onnx_model_paths[str(args.precision) if args.
                                       precision != Precision.INT8 else "fp32"]

        logger.info(f"Optimizing model to {output_path}")
        gpt2helper.optimize_onnx(
            raw_onnx_model,
            output_path,
            args.precision == Precision.FLOAT16,
            model.config.num_attention_heads,
            model.config.hidden_size,
            args.use_external_data_format,
            auto_mixed_precision=args.auto_mixed_precision,
            **fp16_params,
        )
    else:
        output_path = raw_onnx_model

    if args.precision == Precision.INT8:
        logger.info("quantizing model...")
        QuantizeHelper.quantize_onnx_model(output_path,
                                           onnx_model_paths["int8"],
                                           args.use_external_data_format)
        model = QuantizeHelper.quantize_torch_model(model)
        logger.info("finished quantizing model")
        output_path = onnx_model_paths["int8"]

    if args.output.endswith(
            ".onnx"
    ) and output_path != args.output and not args.use_external_data_format:
        import shutil

        shutil.move(output_path, args.output)
        output_path = args.output

    logger.info(f"Output path: {output_path}")
    model_size_in_MB = int(
        get_onnx_model_size(output_path, args.use_external_data_format) /
        1024 / 1024)

    session = create_onnxruntime_session(output_path,
                                         args.use_gpu,
                                         enable_all_optimization=True,
                                         verbose=args.verbose)
    if args.model_class == "GPT2LMHeadModel" and session is not None:
        parity_result = gpt2helper.test_parity(
            session,
            model,
            device,
            is_io_float16,
            rtol=args.tolerance,
            atol=args.tolerance,
            model_class=args.model_class,
            has_position_ids=use_padding,
            has_attention_mask=use_padding,
            input_ids_dtype=torch.int32
            if args.use_int32_inputs else torch.int64,
            position_ids_dtype=torch.int32
            if args.use_int32_inputs else torch.int64,
            attention_mask_dtype=torch.int32
            if args.use_int32_inputs else torch.int64,
            test_cases_per_run=args.test_cases,
            total_runs=args.test_runs,
            verbose=args.verbose,
        )

        latency = gpt2helper.test_performance(
            session,
            model,
            device,
            is_io_float16,
            total_runs=100,
            use_io_binding=True,
            model_class=args.model_class,
            has_position_ids=use_padding,
            has_attention_mask=use_padding,
            input_ids_dtype=torch.int32
            if args.use_int32_inputs else torch.int64,
            position_ids_dtype=torch.int32
            if args.use_int32_inputs else torch.int64,
            attention_mask_dtype=torch.int32
            if args.use_int32_inputs else torch.int64,
            batch_size=8,
            sequence_length=1,
            past_sequence_length=32,
        )

        if args.precision == Precision.FLOAT16:
            logger.info(f"fp16 conversion parameters:{fp16_params}")

        # Write results to file
        import csv

        from onnxruntime import __version__ as ort_version

        latency_name = get_latency_name()
        csv_file_existed = os.path.exists(csv_filename)
        with open(csv_filename, mode="a", newline="") as csv_file:
            column_names = [
                "experiment",
                "run_id",
                "model_name",
                "model_class",
                "gpu",
                "precision",
                "optimizer",
                "test_cases",
                "runs",
                "keep_io_types",
                "io_block_list",
                "op_block_list",
                "node_block_list",
                "force_fp16_initializers",
                "auto_mixed_precision",
                "ORT_TRANSFORMER_OPTIONS",
                "ORT_CUDA_GEMM_OPTIONS",
                "onnxruntime",
                latency_name,
                "top1_match_rate",
                "onnx_size_in_MB",
                "diff_50_percentile",
                "diff_90_percentile",
                "diff_95_percentile",
                "diff_99_percentile",
                "diff_pass_rate",
                "nan_rate",
                "top1_match_rate_per_run",
            ]
            csv_writer = csv.DictWriter(csv_file, fieldnames=column_names)
            if not csv_file_existed:
                csv_writer.writeheader()
            row = {
                "experiment": experiment_name,
                "run_id": run_id,
                "model_name": args.model_name_or_path,
                "model_class": args.model_class,
                "gpu": args.use_gpu,
                "precision": args.precision,
                "optimizer": args.optimize_onnx,
                "test_cases": args.test_cases,
                "runs": args.test_runs,
                "keep_io_types": args.keep_io_types,
                "io_block_list": args.io_block_list,
                "op_block_list": args.op_block_list,
                "node_block_list": args.node_block_list,
                "force_fp16_initializers": args.force_fp16_initializers,
                "auto_mixed_precision": args.auto_mixed_precision,
                "ORT_TRANSFORMER_OPTIONS":
                os.getenv("ORT_TRANSFORMER_OPTIONS"),
                "ORT_CUDA_GEMM_OPTIONS": os.getenv("ORT_CUDA_GEMM_OPTIONS"),
                "onnxruntime": ort_version,
                latency_name: f"{latency:.2f}",
                "diff_50_percentile": parity_result["max_diff_percentile_50"],
                "diff_90_percentile": parity_result["max_diff_percentile_90"],
                "diff_95_percentile": parity_result["max_diff_percentile_95"],
                "diff_99_percentile": parity_result["max_diff_percentile_99"],
                "diff_pass_rate": parity_result["diff_pass_rate"],
                "nan_rate": parity_result["nan_rate"],
                "top1_match_rate": parity_result["top1_match_rate"],
                "top1_match_rate_per_run":
                parity_result["top1_match_rate_per_run"],
                "onnx_size_in_MB": "{}".format(model_size_in_MB),
            }
            logger.info(f"result: {row}")
            result.update(row)
            csv_writer.writerow(row)

    if args.input_test_file:
        test_inputs = []
        # Each line of test file is a JSON string like:
        # {"input_ids": [[14698, 257, 1310, 13688, 319, 326]]}
        with open(args.input_test_file) as read_f:
            for _, line in enumerate(read_f):
                line = line.rstrip()
                data = json.loads(line)
                input_ids = torch.from_numpy(
                    numpy.asarray(data["input_ids"],
                                  dtype=numpy.int64)).to(device)

                if use_padding:
                    if "attention_mask" in data:
                        numpy_float = numpy.float16 if is_io_float16 else numpy.float32
                        attention_mask = torch.from_numpy(
                            numpy.asarray(data["attention_mask"],
                                          dtype=numpy_float)).to(device)
                    else:
                        padding = -1
                        attention_mask = (input_ids != padding).type(
                            torch.float16 if is_io_float16 else torch.float32)
                        input_ids.masked_fill_(input_ids == padding, 0)

                    if "position_ids" in data:
                        position_ids = torch.from_numpy(
                            numpy.asarray(data["position_ids"],
                                          dtype=numpy.int64)).to(device)
                    else:
                        position_ids = attention_mask.long().cumsum(-1) - 1
                        position_ids.masked_fill_(position_ids < 0, 0)

                    inputs = {
                        "input_ids":
                        input_ids.to(torch.int32)
                        if args.use_int32_inputs else input_ids,
                        "position_ids":
                        position_ids.to(torch.int32)
                        if args.use_int32_inputs else position_ids,
                        "attention_mask":
                        attention_mask.to(torch.int32)
                        if args.use_int32_inputs else attention_mask,
                    }
                else:
                    inputs = {
                        "input_ids":
                        input_ids.to(torch.int32)
                        if args.use_int32_inputs else input_ids
                    }

                if model_type == "beam_search_step" or model_type == "configurable_one_step_search":
                    beam_select_idx = torch.zeros([1,
                                                   input_ids.shape[0]]).long()

                    input_log_probs = torch.zeros([input_ids.shape[0], 1])
                    input_unfinished_sents = torch.ones(
                        [input_ids.shape[0], 1], dtype=torch.bool)
                    inputs.update({
                        "beam_select_idx":
                        beam_select_idx,
                        "input_log_probs":
                        input_log_probs,
                        "input_unfinished_sents":
                        input_unfinished_sents,
                    })

                test_inputs.append(inputs)

        gpt2tester.test_generation(
            session,
            model,
            device,
            test_inputs,
            precision=args.precision,
            model_class=args.model_class,
            top_k=20,
            top_k_no_order=True,
            max_steps=24,
            max_inputs=0,
            verbose=args.verbose,
            save_test_data=3,
            save_test_data_dir=Path(output_path).parent,
        )

    logger.info(f"Done. Output model: {output_path}")
    return result
Exemple #36
0
datePattern = re.compile(
    r"""^(.*?)((0|1)?\d)-((0|1|2|3)?\d)-((19|20)?\d\d)(.*?)$""", re.VERBOSE)
#zeroPattern = re.compile(r"""^(.*?)(0?)(.*?)$""", re.VERBOSE)

# TODO: Loop over the files in the working directory.

for amerFile in os.listdir('/somedir'):
    mo = datePattern.search(amerFile)
    if mo == None:
        continue
    before = mo.group[1]
    month = mo.group[2]
    day = mo.group[4]
    year = mo.group[6]
    after = mo.group[8]

    euroFile = before + day + '-' + month + '-' + year + after

    absWorkingDir = os.path.abspath('.')
    amerFile = os.path.join(absWorkingDir, amerFile)
    euroFile = os.path.join(absWorkingDir, euroFile)

    print('Renaming "%s" to "%s" ...' % (amerFile, euroFile))
    shutil.move(amerFile, euroFile)

# TODO: Skip files without a date.
# TODO: Get the different parts of the filename.
# TODO: Form the European-style filename.
# TODO: Get the full, absolute file paths.
# TODO: Rename the files.
Exemple #37
0
def main():
	global connect, cursor
	
	try:
		noise_query=[]
		file_list=sorted(glob.glob('*.txt')) # by default sort by name;	key = lambda file: os.path.getctime(file)) # find all txt in the folders
		# try:
		we_have_data=False
		for file_temp in file_list:

			# print(file_temp)
			with open(file_temp, 'r', encoding='cp1251') as txt:
				entry = list()
				all_strings=txt.readlines()
				
			for string in all_strings:
				value=string.strip().split('=')[1]
				entry.append(value)

			if len(entry)==0:
				continue
			# print('ENTRY', entry)
			sql_query = single_parse(entry)
			
			# CHECK, THAT WE HAVE time in air > than this noise time (means potential aircraft already in DB)
			
			if (sql_query[1].startswith("VNK")):
				SQL = '''
				SELECT (time_track) FROM eco.tracks ORDER BY time_track desc LIMIT 1;
				'''
			elif (sql_query[1].startswith("OMSK")):
				SQL = '''
				SELECT (time_track) FROM omsk.tracks ORDER BY time_track desc LIMIT 1;
				'''


			cursor.execute(SQL)
			last_time_air= cursor.fetchall()
			# print(last_time_air1)
			if len(last_time_air)>0: # the table can be empty..
				last_time_air=last_time_air[0][0]

			else:
				shutil.move(file_temp, path+"/no_aircraft/"+file_temp)
				continue # go to next file
			# print('LAST AIR:', last_time_air)
			# print('CURRENT NOISE:', sql_query[0])

			if last_time_air-sql_query[0]>minimum_time:#minimum_time: # if noise_time is greater than noise datetime by 10 sec -> fine
				# print('AIR IS ALREADY IN DB')
				# SQL = '''
				# SELECT (track, distance_1, time_track) FROM eco.tracks WHERE time_track - (%s) <=  INTERVAL '10 seconds' and (%s)-time_track <=  INTERVAL '10 seconds' and distance_1 IS NOT NULL ORDER BY distance_1 asc LIMIT 1;
				# '''

				# dont forget make index on time_track by CREATE INDEX idx_time_track ON eco.tracks ( time_track ); check index,  via \d eco.tracks;
				# CREATE INDEX time_dist_track ON eco.tracks  (time_track, distance_1, track);




				if (sql_query[1].startswith("VNK")):
					SQL='''
					SELECT (track, distance_1, time_track)
					  FROM eco.tracks 
					  WHERE time_track >= (%s)::timestamp - INTERVAL '10 seconds'
					    and time_track <= (%s)::timestamp + INTERVAL '10 seconds'
					    and distance_1 IS NOT NULL
					ORDER BY distance_1 asc LIMIT 1;
					'''
				elif (sql_query[1].startswith("OMSK")):
					SQL='''
					SELECT (track, distance_1, time_track)
					  FROM omsk.tracks 
					  WHERE time_track >= (%s)::timestamp - INTERVAL '10 seconds'
					    and time_track <= (%s)::timestamp + INTERVAL '10 seconds'
					    and distance_1 IS NOT NULL
					ORDER BY distance_1 asc LIMIT 1;
					'''



				# print('SQL QUERY', sql_query)
				data = (sql_query[0],sql_query[0])
				cursor.execute(SQL, data)
				answer= cursor.fetchall()
			
				if len(answer)==0: ## 
					shutil.move(file_temp, path+"/no_aircraft/"+file_temp)
					track_distance=(None,None,None)
				else:
					we_have_data=True
					track_distance=make_tuple(answer[0][0])
					os.remove(file_temp)


				if (sql_query[1].startswith("VNK")):
					SQL = '''
					INSERT INTO eco.noise (
					time_noise, base_name, stat_1, stat_2, stat_3, leq, slow, spectrum, meteo_stat, temperature,
					humadity, presure, wind, dir, gps_coordinate, gps_stat, temperature_core, temperature_mb, temperature_hdd, free_hdd,
					ups_stat, ups_mode, ups_time, track, distance, aircraft_time) VALUES (%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,  %s,%s,%s,%s,%s,%s,%s,%s,%s,%s,   %s,%s,%s,%s,%s,%s) ON CONFLICT DO NOTHING;
					'''
				elif (sql_query[1].startswith("OMSK")):
					SQL = '''
					INSERT INTO omsk.noise (
					time_noise, base_name, stat_1, stat_2, stat_3, leq, slow, spectrum, meteo_stat, temperature,
					humadity, presure, wind, dir, gps_coordinate, gps_stat, temperature_core, temperature_mb, temperature_hdd, free_hdd,
					ups_stat, ups_mode, ups_time, track, distance, aircraft_time) VALUES (%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,  %s,%s,%s,%s,%s,%s,%s,%s,%s,%s,   %s,%s,%s,%s,%s,%s) ON CONFLICT DO NOTHING;
					'''






				data = sql_query + track_distance
				noise_query.append(cursor.mogrify(SQL, data).decode('utf-8'))




		# do it after processing of the all txt list 
		if we_have_data:
			full_noise_query=''.join([x for x in noise_query])
			
			# print(full_noise_query)
			cursor.execute(full_noise_query)
			connect.commit()



	except psycopg2.ProgrammingError as e:
		connect.rollback()
		print('something wrong with file  at time' +str(e) + str(datetime.now().strftime('%Y-%m-%d-%H-%M-%S')))
		shutil.move(txt_temp, path+"/wrong_files/")
	# except psycopg2.InterfaceError as e:
		# print('AZAZA')

	except Exception as e:
		print(str(e))
		try:
			connect = psycopg2.connect(database='eco_db', user='******', host='localhost', password='******', port=5432)
			cursor = connect.cursor()
		except Exception as e:
			print(str(e))
Exemple #38
0
def decompile_apk(apk_path, output_path, verbose):
    print("[+] Decompiling the apk\n")

    if verbose:
        stdout = None
        stderr = None
    else:
        stdout = DEVNULL
        stderr = DEVNULL

    if not os.path.exists(apk_path):
        print("[-] Error: couldn't find the apk!")
        return

    apk_name = os.path.splitext(os.path.basename(apk_path))[0]

    if os.path.exists("temp"):
        print("[~] Removing old temp directory")
        shutil.rmtree("temp")

    print("[+] Creating temp directory")
    os.makedirs("temp")

    apk_zip = "temp/" + apk_name + ".zip"
    shutil.copy2(apk_path, apk_zip)

    apk_unziped_dir = "temp/" + apk_name + "_unziped"
    os.makedirs(apk_unziped_dir)

    zip_ref = zipfile.ZipFile(apk_zip, 'r')
    zip_ref.extractall(apk_unziped_dir)
    zip_ref.close()

    apk_classes = apk_unziped_dir + "/classes.dex"
    if not os.path.exists(apk_classes):
        print("[-] Error: the apk doesn't have the classes.dex")
        return

    print("[+] Getting the jar")
    apk_jar = "temp/" + apk_name + ".jar"
    call(dex2jar_path + " " + apk_classes + " -o " + apk_jar,
         stdout=stdout, stderr=stderr, shell=True)

    print("[+] Decompiling the jar")
    apk_java = "temp/" + apk_name + "_java/src"
    call(jd_path + " " + apk_jar + " -od " + apk_java,
         stdout=stdout, stderr=stderr, shell=True)

    print("[+] Reverse engineering the apk")
    apk_re = r"C:\Users\jake_\OneDrive\Desktop\Macquarie University\Personal Projects\Cybersecurity\Django\three\mysite\apkDownloads" + apk_name + "_re"
    call(apktool_path + " d " + apk_path + " -o " + apk_re,
         stdout=stdout, stderr=stderr, shell=True)

    print("[+] Organizing everything")
    output_dir = os.path.join(output_path, apk_name)
    if os.path.exists(output_dir):
        shutil.rmtree(output_dir)
    os.makedirs(output_dir)

    print("[+] Moving reverse engineering files")
    re_list = os.listdir(apk_re)
    for re_files in re_list:
        shutil.move(os.path.join(apk_re, re_files), output_dir)

    print("[+] Moving java files")
    shutil.move(apk_java, output_dir)

    if os.path.exists("temp"):
        print("[~] Removing temp directory")
        shutil.rmtree("temp")

    print("\n[+] Done decompiling the apk")
for path, subdirs, files in os.walk(videos_dir):
    for name in files:
        filenames.append(os.path.join(path, name))

size_dir = []
for file in filenames:
    size_dir.append((file.split('/')[-1], file, os.path.getsize(file)))
    #print((file.split('/')[-1], file, os.path.getsize(file)))

import operator
size_dir.sort(key=operator.itemgetter(2))
#print(size_dir)

lstoflst = []
for i in range(nparts):
    lstoflst.append(size_dir[i::nparts])
print(len(lstoflst))
print(sum(len(lst) for lst in lstoflst))

for lst in lstoflst:
    print(sum(pair[2] for pair in lst))

import shutil
for i in range(nparts):
    newdir = '/home/wuzhenyu_sjtu/DAN_sbu/SBU/SBU_videos/clean_version/part{}'.format(
        i)
    if not os.path.exists(newdir):
        os.makedirs(newdir)
    for file in lstoflst[i]:
        shutil.move(file[1], os.path.join(newdir, file[0]))
Exemple #40
0
check = ModelCheckpoint(
    filepath='./test/check/{epoch:02d}-{val_loss:.4f}.hdf5',
    save_best_only=True,
    save_weights_only=False)

model.fit([hite_x_train, samsung_x_train],
          y_train,
          batch_size=500,
          epochs=500,
          validation_split=0.4,
          callbacks=[early, check])

## best값 폴더 밖으로 빼고 나머지 다 지우는 코드
tmp = os.path.dirname(os.path.realpath(__file__))
bestfile = os.listdir(tmp + '\\check')[-1]
shutil.move(tmp + '\\check' + '\\' + bestfile, tmp + '\\' + bestfile)
#os.rename(bestfile, 'Test0602_kkt.hdf5')
if os.path.isdir(tmp + '\\check'):
    shutil.rmtree(tmp + '\\check')
os.mkdir(tmp + '\\check')

# best모델 불러오기
model = load_model(tmp + '\\' + bestfile)  #Test0602_kkt.hdf5')
## 평가

from sklearn.metrics import r2_score
y_pred = model.predict([hite_x_test, samsung_x_test])
r2_y = r2_score(y_test, y_pred)
print("결정계수 : ", r2_y)

answer = model.predict([hite_x_predict, samsung_x_predict])
def ModifyFileLocation(root_directory, filename):
    new_file_location = root_directory + '\\' + DetermineSubdirectory(filename) + '\\' + filename
    shutil.move(root_directory + '\\' + filename, root_directory + '\\' + DetermineSubdirectory(filename) + '\\' + filename)
    return new_file_location
Exemple #42
0
import os
import shutil
import sys
import argparse
import uuid
import fnmatch

parser = argparse.ArgumentParser()

parser.add_argument("--dir", help="Source Path")

args = parser.parse_args()

tilefiles = []

for root, dirs, files in os.walk(args.dir):
    for name in fnmatch.filter(files, "*title*"):
        tilefiles.append(os.path.join(root, name))

if len(tilefiles) == 0:
    sys.stdout("Error no title file!")

for titlefile_path in tilefiles:

    unsortedfile = os.path.join(os.path.dirname(titlefile_path),
                                str(uuid.uuid1()) + ".pdf")

    try:
        shutil.move(titlefile_path, unsortedfile)
    except:
        print("can not move file %s -> %s" % (titlefile_path, unsortedfile))
Exemple #43
0
def main():
    try:
        os.mkdir("results")
    except:
        pass
    try:
        os.mkdir("gfx")
    except:
        pass

    sumoHome = os.path.abspath(
        os.path.join(os.path.dirname(__file__), '..', '..', '..', '..'))
    if "SUMO_HOME" in os.environ:
        sumoHome = os.environ["SUMO_HOME"]
    sumo = os.environ.get("SUMO_BINARY", os.path.join(sumoHome, 'bin', 'sumo'))
    assert (sumo)

    for f1 in range(int(flow1def[0]), int(flow1def[1]), int(flow1def[2])):
        pWE = float(f1) / 3600  # [veh/s]
        pEW = pWE
        for f2 in range(int(flow2def[0]), int(flow2def[1]), int(flow2def[2])):
            pNS = float(f2) / 3600  # [veh/s]
            pSN = pNS
            print("Computing for %s<->%s" % (f1, f2))
            buildDemand(simSteps, pWE, pEW, pNS, pSN)
            for t in types:
                print(" for tls-type %s" % t)
                patchTLSType('input_additional_template.add.xml', '%tls_type%',
                             'input_additional.add.xml', t)
                args = [
                    sumo,
                    '--no-step-log',
                    #'--no-duration-log',
                    #'--verbose',
                    #'--duration-log.statistics',
                    '--net-file',
                    'input_net.net.xml',
                    '--route-files',
                    'input_routes.rou.xml',
                    '--additional-files',
                    'input_additional.add.xml',
                    '--tripinfo-output',
                    'results/tripinfos_%s_%s_%s.xml' % (t, f1, f2),
                    '--summary-output',
                    'results/summary_%s_%s_%s.xml' % (t, f1, f2),
                    '--device.emissions.probability',
                    '1',
                    '--queue-output',
                    'results/queue_%s_%s_%s.xml' % (t, f1, f2),
                ]
                retCode = subprocess.call(args)
                shutil.move("results/e2_output.xml",
                            "results/e2_output_%s_%s_%s.xml" % (t, f1, f2))
                shutil.move("results/e2_tl0_output.xml",
                            "results/e2_tl0_output_%s_%s_%s.xml" % (t, f1, f2))
                shutil.move("results/edgeData_3600.xml",
                            "results/edgeData_3600_%s_%s_%s.xml" % (t, f1, f2))
                shutil.move("results/laneData_3600.xml",
                            "results/laneData_3600_%s_%s_%s.xml" % (t, f1, f2))
                shutil.move(
                    "results/edgesEmissions_3600.xml",
                    "results/edgesEmissions_3600_%s_%s_%s.xml" % (t, f1, f2))
                shutil.move(
                    "results/lanesEmissions_3600.xml",
                    "results/lanesEmissions_3600_%s_%s_%s.xml" % (t, f1, f2))
                shutil.move("results/TLSStates.xml",
                            "results/TLSStates_%s_%s_%s.xml" % (t, f1, f2))
                shutil.move(
                    "results/TLSSwitchTimes.xml",
                    "results/TLSSwitchTimes_%s_%s_%s.xml" % (t, f1, f2))
                shutil.move(
                    "results/TLSSwitchStates.xml",
                    "results/TLSSwitchStates_%s_%s_%s.xml" % (t, f1, f2))
    def 模具_判断子目录元组目录列表个数(self):
        print('==================')
        for 根路径, 元组目录列表, 文件列表 in os.walk(
                self.子目录):  # "."目录,topdown=False先子文件夹再到元组文件夹
            print('一层元组目录列表:', 元组目录列表)
            if len(元组目录列表) != 0:  # break # 结束循环 continue # 跳过当前循环,继续进行下一轮循环

                元组目录列表与文件列表 = os.listdir(self.子目录)  # 分离出目录列表与文件列表
                for 子目录或文件 in 元组目录列表与文件列表:
                    元组目录的子目录或文件路径 = os.path.join(self.子目录,
                                                 子目录或文件)  # 合并为完全 的 访问路径
                    if os.path.isdir(元组目录的子目录或文件路径):  # 判断为 目录 文件夹
                        self.目录路径 = 元组目录的子目录或文件路径

                        self.模具_压缩()
                        self.模具_创建移动文件目录()
                        try:

                            移动文件名 = str(self.压缩目录名).replace(
                                "下载种子目录4", "压缩-下载种子")  # 替换   , 1) 次数 1
                            if not os.path.exists(移动文件名):  # 必有条件选择,否则出错
                                shutil.move(self.压缩目录名, self.移动文件目录名)
                            else:  # 否则
                                os.unlink(移动文件名)
                                shutil.move(self.压缩目录名, self.移动文件目录名)
                            print('移动至目录:', self.移动文件目录名)

                        except (FileNotFoundError, OSError) as 异常:
                            print('异常# 跳过当前循环,继续进行', 异常)
                            continue  # 跳过当前循环,继续进行下一轮循环

                    else:  # 否则 为文件
                        文件路径 = 元组目录的子目录或文件路径
                        self.模具_创建移动文件目录()

                        移动文件名 = str(文件路径).replace("下载种子目录4",
                                                  "压缩-下载种子")  # 替换   , 1) 次数 1
                        if not os.path.exists(移动文件名):  # 必有条件选择,否则出错
                            try:
                                shutil.copy(文件路径, self.移动文件目录名)
                                print(文件路径, '复制文件:', self.移动文件目录名)
                            except (FileNotFoundError, OSError) as 异常:
                                print('异常# 跳过当前循环,继续进行', 异常)
                                continue  # 跳过当前循环,继续进行下一轮循环
                        else:  # 否则
                            print('复制文件已存在:跳过当前文件')
                            continue  # 跳过当前循环,继续进行下一轮循环

            else:  # 否则
                if len(文件列表) != 0:
                    self.目录路径 = self.子目录
                    self.模具_压缩()

                移动文件名 = str(self.压缩目录名).replace(
                    "下载种子目录4\动漫", "压缩-下载种子\动漫打包")  # 替换   , 1) 次数 1
                try:
                    if not os.path.exists(移动文件名):  # 必有条件选择,否则出错
                        shutil.move(self.压缩目录名, self.移动文件目录名)
                    else:  # 否则
                        os.unlink(移动文件名)  #删除原来 文件
                        shutil.move(self.压缩目录名, self.移动文件目录名)
                except (FileNotFoundError, OSError) as 异常:
                    print('异常# 跳过当前循环,继续进行', 异常)
                    continue  # 跳过当前循环,继续进行下一轮循环
                print('移动至目录:', 移动文件名)

            break  # 结束循环
Exemple #45
0
        found = extractext(srcfile)
        if found:
            size = size + (os.path.getsize(srcfile) / (1024 * 1024.0))
            named = os.path.join(new_folder, folder)
            try:
                #Create folder
                os.mkdir(named)
                log(0, "Successfully created the directory %s " % named)

            except OSError:
                log(1, "Creation of the directory %s failed" % named)

            destfile = os.path.join(named, filename)
            destfile
            if not os.path.exists(destfile):
                shutil.move(srcfile, destfile)
                log(0, "Archived '" + filename + "'.")
                files_success = files_success + 1
            else:
                shutil.move(srcfile, errorfile)
                log(1, "File Exists '" + filename + "'.")
                files_with_errors = files_with_errors + 1
        else:
            shutil.move(srcfile, errorfile)
            log(1, "Archived '" + filename + "'.")
            files_with_errors = files_with_errors + 1

log(
    0, "Successfully achieved " + str(files_success) + " files, totalling " +
    str(round(size, 2)) + "MB. files with errors " + str(files_with_errors))
end(0)
Exemple #46
0
def _record(args, dom, tmpdir):
    def create_ffmpeg_stream(display):
        path = os.path.join(tmpdir, '{:03}-{}x{}.mp4'.format(
            display.index, display.width, display.height))
        return FFmpegRawStream(
            path = path,
            display = display,
            framerate = args.framerate,
            outcodec = args.vcodec,
            loglevel = logging_to_ffmpeg_loglevel(args.loglevel),
            )

    domain_wait(dom, libvirt.VIR_DOMAIN_RUNNING)

    with TtyCbreakMode():
        # Open spice session
        sp = SpiceRecorder(dom,
                framerate = args.framerate,
                create_display_stream = create_ffmpeg_stream,
                )
        sp.open()

        # Record raw video
        def periodic_update(sp):
            qprint("\r" + " "*80, end="")
            qprint("\r{:<20}{:<20}{:<20}".format(
                "{:0.02f} sec".format(sp.elapsed_time),
                "{} frames".format(sp.frames_recorded),
                format_datasize(sp.bytes_recorded),
                ), end="")

        def on_stopped(sp, msg):
            qprint("\nRecording stopped:", msg)

        sp.connect("periodic-update", periodic_update)
        sp.connect("recording-stopped", on_stopped)

        qprint("Recording... Press Q to stop")
        sp.run()


    # Finalize all intermediate FFmpeg processes
    for d in sp.displays:
        rc = d.outfile.close()


    qprint("-"*80)
    qprint("Recorded displays:")
    maxw, maxh = 0, 0
    for n,d in enumerate(sp.displays):
        qprint("  {}: {}x{} {:>4} frames  {:>10}  {:0.02f} sec".format(n, d.width, d.height,
            d.frames_recorded,
            format_datasize(os.path.getsize(d.outfile.name)),
            d.duration))
        maxw = max(maxw, d.width)
        maxh = max(maxh, d.height)
    qprint("Final: {}x{}".format(maxw, maxh))
    qprint("-"*80)


    # Filter out displays with no frames
    displays = [d for d in sp.displays if d.frames_recorded]

    if len(displays) == 1 and H264_PIX_FMT_INTERMEDIATE(displays[0]) == H264_PIX_FMT_FINAL:
        # Optimization: use the only intermediate video as the final
        d = displays[0]
        src = d.outfile.name
        d.outfile = None
        logging.info("Moving {} to {}".format(src, args.output))
        shutil.move(src, args.output)

    else:
        # Convert video
        qprint("\nDone recording. Converting...")
        convert_concat_videos(
                displays = displays,
                framerate = args.framerate,
                outcodec = args.vcodec,
                outpath = args.output,
                loglevel = logging_to_ffmpeg_loglevel(args.loglevel),
                )
    qprint("\n{} written!".format(args.output))
Exemple #47
0
import os
import sys
import shutil

input = sys.argv[1]
filecount = 1

#列出輸入參數位置底下的文件
os.mkdir("YOLO")

for filename in os.listdir(input):
    os.system(
        "E:\darknet\\build\darknet\\x64\darknet.exe detect cfg\yolov3.cfg weights\yolov3.weights "
        + input + "\\" + filename)
    os.rename("predictions.jpg", str(filecount) + ".jpg")
    shutil.move(str(filecount) + ".jpg", "YOLO")
    filecount += 1
Exemple #48
0
    def download_and_prepare(
        self,
        download_config: Optional[DownloadConfig] = None,
        download_mode: Optional[GenerateMode] = None,
        ignore_verifications: bool = False,
        save_infos: bool = False,
        try_from_hf_gcs: bool = True,
        dl_manager: Optional[DownloadManager] = None,
        **download_and_prepare_kwargs,
    ):
        """Downloads and prepares dataset for reading.

        Args:
            download_config (Optional ``nlp.DownloadConfig``: specific download configuration parameters.
            download_mode (Optional `nlp.GenerateMode`): select the download/generate mode - Default to REUSE_DATASET_IF_EXISTS
            ignore_verifications (bool): Ignore the verifications of the downloaded/processed dataset information (checksums/size/splits/...)
            save_infos (bool): Save the dataset information (checksums/size/splits/...)
            try_from_hf_gcs (bool): If True, it will try to download the already prepared dataset from the Hf google cloud storage
            dl_manager (Optional ``nlp.DownloadManager``): specific Download Manger to use
        """
        download_mode = GenerateMode(download_mode or GenerateMode.REUSE_DATASET_IF_EXISTS)

        data_exists = os.path.exists(self._cache_dir)
        if data_exists and download_mode == REUSE_DATASET_IF_EXISTS:
            logger.info("Reusing dataset %s (%s)", self.name, self._cache_dir)
            return

        # Currently it's not possible to overwrite the data because it would
        # conflict with versioning: If the last version has already been generated,
        # it will always be reloaded and cache_dir will be set at construction.
        if data_exists and download_mode != REUSE_CACHE_IF_EXISTS:
            raise ValueError(
                "Trying to overwrite an existing dataset {} at {}. A dataset with "
                "the same version {} already exists. If the dataset has changed, "
                "please update the version number.".format(self.name, self._cache_dir, self.config.version)
            )

        logger.info("Generating dataset %s (%s)", self.name, self._cache_dir)
        if not is_remote_url(self._cache_dir):  # if cache dir is local, check for available space
            os.makedirs(self._cache_dir_root, exist_ok=True)
            if not utils.has_sufficient_disk_space(self.info.size_in_bytes or 0, directory=self._cache_dir_root):
                raise IOError(
                    "Not enough disk space. Needed: {} (download: {}, generated: {})".format(
                        utils.size_str(self.info.size_in_bytes or 0),
                        utils.size_str(self.info.download_size or 0),
                        utils.size_str(self.info.dataset_size or 0),
                    )
                )

        @contextlib.contextmanager
        def incomplete_dir(dirname):
            """Create temporary dir for dirname and rename on exit."""
            if is_remote_url(dirname):
                yield dirname
            else:
                tmp_dir = dirname + ".incomplete"
                os.makedirs(tmp_dir)
                try:
                    yield tmp_dir
                    if os.path.isdir(dirname):
                        shutil.rmtree(dirname)
                    os.rename(tmp_dir, dirname)
                finally:
                    if os.path.exists(tmp_dir):
                        shutil.rmtree(tmp_dir)

        # Try to download the already prepared dataset files
        if try_from_hf_gcs:
            try:
                # Create a tmp dir and rename to self._cache_dir on successful exit.
                with incomplete_dir(self._cache_dir) as tmp_data_dir:
                    # Temporarily assign _cache_dir to tmp_data_dir to avoid having to forward
                    # it to every sub function.
                    with utils.temporary_assignment(self, "_cache_dir", tmp_data_dir):
                        relative_data_dir = self._relative_data_dir(with_version=True)
                        reader = ArrowReader(self._cache_dir, self.info)
                        # use reader instructions to download the right files
                        reader.download_from_hf_gcs(self._cache_dir, relative_data_dir)
                        downloaded_info = DatasetInfo.from_directory(self._cache_dir)
                        self.info.update(downloaded_info)
                        # download post processing resources
                        remote_cache_dir = os.path.join(HF_GCP_BASE_URL, relative_data_dir)
                        for resource_file_name in self._post_processing_resources().values():
                            if "/" in resource_file_name:
                                raise ValueError(
                                    "Resources shouldn't be in a sub-directory: {}".format(resource_file_name)
                                )
                            try:
                                resource_path = utils.cached_path(os.path.join(remote_cache_dir, resource_file_name))
                                shutil.move(resource_path, os.path.join(self._cache_dir, resource_file_name))
                            except ConnectionError:
                                logger.info(
                                    "Couldn't download resourse file {} from Hf google storage.".format(
                                        resource_file_name
                                    )
                                )

                logger.info("Dataset downloaded from Hf google storage.")
                print(
                    f"Dataset {self.name} downloaded and prepared to {self._cache_dir}. "
                    f"Subsequent calls will reuse this data."
                )
                return
            except (DatasetNotOnHfGcs, MissingFilesOnHfGcs):
                logger.info("Dataset not on Hf google storage. Downloading and preparing it from source")

        # Print is intentional: we want this to always go to stdout so user has
        # information needed to cancel download/preparation if needed.
        # This comes right before the progress bar.
        print(
            f"Downloading and preparing dataset {self.info.builder_name}/{self.info.config_name} "
            f"(download: {utils.size_str(self.info.download_size)}, generated: {utils.size_str(self.info.dataset_size)}, "
            f"total: {utils.size_str(self.info.size_in_bytes)}) to {self._cache_dir}..."
        )

        if dl_manager is None:
            if download_config is None:
                download_config = DownloadConfig()
                download_config.cache_dir = os.path.join(self._cache_dir_root, "downloads")
                download_config.force_download = download_mode == FORCE_REDOWNLOAD

            dl_manager = DownloadManager(
                dataset_name=self.name, download_config=download_config, data_dir=self.config.data_dir
            )

        if self.manual_download_instructions is not None:
            assert (
                dl_manager.manual_dir is not None
            ), "The dataset {} with config {} requires manual data. \n Please follow the manual download instructions: {}. \n Manual data can be loaded with `nlp.load_dataset({}, data_dir='<path/to/manual/data>')".format(
                self.name, self.config.name, self.manual_download_instructions, self.name
            )

        # Create a tmp dir and rename to self._cache_dir on successful exit.
        with incomplete_dir(self._cache_dir) as tmp_data_dir:
            # Temporarily assign _cache_dir to tmp_data_dir to avoid having to forward
            # it to every sub function.
            with utils.temporary_assignment(self, "_cache_dir", tmp_data_dir):
                verify_infos = not save_infos and not ignore_verifications
                self._download_and_prepare(
                    dl_manager=dl_manager, verify_infos=verify_infos, **download_and_prepare_kwargs
                )
                # Sync info
                self.info.dataset_size = sum(split.num_bytes for split in self.info.splits.values())
                self.info.download_checksums = dl_manager.get_recorded_sizes_checksums()
                self.info.size_in_bytes = self.info.dataset_size + self.info.download_size
                # Save info
                self._save_info()

        # Save to datasetinfos
        if save_infos:
            DatasetInfosDict(**{self.config.name: self.info}).write_to_directory(self.get_imported_module_dir())

        print(
            f"Dataset {self.name} downloaded and prepared to {self._cache_dir}. "
            f"Subsequent calls will reuse this data."
        )
# ORGANISING DESKTOP
import os
import shutil
print('Please wait, process is running...')

systemdrivename=os.getenv('systemdrive')
username=os.getenv('username')
homedir=systemdrivename+'\\users\\'+username

for (dirname,dirs,files) in os.walk(homedir+'\\desktop'):	
	for file in files:
		try:
			filepath=os.path.join(dirname,file)
			ext=file.split('.')[-1]
			if ext!='exe' and ext!='lnk':
				if os.path.isdir(homedir+'\\documents\\'+ext):
					pass
				else: os.mkdir(homedir+'\\documents\\'+ext)
				if os.path.exists(homedir+'\\documents\\'+ext+'\\'+file):
					os.remove(homedir+'\\documents\\'+ext+'\\'+file)
				shutil.move(filepath,homedir+'\\documents\\'+ext)
		except:
			pass		
	break
input('Desktop has been organized.')
Exemple #50
0
        ax3.plot(time_total, viscosities[-1], "x")
        ax3.autoscale()
        fig3.canvas.draw()

        ax4.plot(time_total, energy, "x")
        ax4.autoscale()
        fig4.canvas.draw()

        ax5.plot(time_total, factor, 'x')
        ax5.autoscale()
        fig5.canvas.draw()

        plt.pause(1e-4)

if mpi.rank == 0:
    if len(paths_sim) == 0:
        to_print = ("gamma,nx,nu8 \n")
        to_print += ("{},{},{} \n".format(
            gamma, sim.params.oper.nx, params.nu_8))
        mode_write = "w"

    else:
        to_print = ("{},{},{} \n".format(
            gamma, sim.params.oper.nx, params.nu_8))
        mode_write = "a"

    write_to_file(path_file_write, to_print, mode=mode_write)

    shutil.move(sim.params.path_run, path)
Exemple #51
0
    def state_cmd(self, item):
        if item['cmd'] == 'PROCESS':
            
            output_filename = self.pictures.get_next()
            offset =[]
            photos = []
            if self.cfg.get("image__format") == "fancy":
                
                #Create Image With Template
                if self.cfg.get("image__background"):
                    out_img = cv.imread(self.cfg.get("image__background"))
                else:
                    out_img = np.ones([self.image_size[1],self.image_size[0],3], dtype=np.uint8)*255
            
                dpi = float(self.image_size[0])/6
                dpm = int(dpi/25.4)

                lr_off = dpm*20

                new_w = int( (self.image_size[0] - (2*lr_off)) / 2 )
                new_h = int( float(new_w)/1.5 )

                thumb_size = ( new_w, new_h )

                v_space = (self.image_size[1] - (2*new_h))
                top_off = int(0.33*v_space)
                mid_off = int(0.16*v_space)
                bot_off = int(0.5 *v_space)
               
                m = int(dpm)

                offset.append( ( lr_off - m ,top_off ))
            
                offset.append( ( lr_off + new_w +m, top_off ))
                
                offset.append( ( lr_off - m , top_off + new_h + mid_off ) )

                offset.append( ( lr_off + new_w + m , top_off + new_h + mid_off ) )
                
                Logger.info(__name__, "fancy print thumb_size: {0}".format(thumb_size))
                Logger.info(__name__, "fancy print thumb coords: {0}".format(offset))

            else:
                """Assembles four pictures into a 2x2 grid

                It assumes, all original pictures have the same aspect ratio as
                the resulting image.

                For the thumbnail sizes we have:
                h = (H - 2 * a - 2 * b) / 2
                w = (W - 2 * a - 2 * b) / 2

                                            W
                       |---------------------------------------|

                  ---  +---+-------------+---+-------------+---+  ---
                   |   |                                       |   |  a
                   |   |   +-------------+   +-------------+   |  ---
                   |   |   |             |   |             |   |   |
                   |   |   |      0      |   |      1      |   |   |  h
                   |   |   |             |   |             |   |   |
                   |   |   +-------------+   +-------------+   |  ---
                 H |   |                                       |   |  2*b
                   |   |   +-------------+   +-------------+   |  ---
                   |   |   |             |   |             |   |   |
                   |   |   |      2      |   |      3      |   |   |  h
                   |   |   |             |   |             |   |   |
                   |   |   +-------------+   +-------------+   |  ---
                   |   |                                       |   |  a
                  ---  +---+-------------+---+-------------+---+  ---

                       |---|-------------|---|-------------|---|
                         a        w       2*b       w        a
                """
            
                # Thumbnail size of pictures
                outer_border = 100
                inner_border = 20
                thumb_box = ( int( self.image_size[0] / 2 ) ,
                              int( self.image_size[1] / 2 ) )
            
                thumb_size = ( thumb_box[0] - outer_border - inner_border ,
                               thumb_box[1] - outer_border - inner_border )
            
                offset.append( ( thumb_box[0] - inner_border - img.size[0] ,
                                 thumb_box[1] - inner_border - img.size[1] ))
            
                offset.append( ( thumb_box[0] + inner_border,
                                 thumb_box[1] - inner_border - img.size[1] ))
                
                offset.append( ( thumb_box[0] - inner_border - img.size[0] ,
                                 thumb_box[1] + inner_border ) )
                
                offset.append( ( thumb_box[0] + inner_border ,
                                 thumb_box[1] + inner_border ) )

            for i in range(4):
                o = offset[i] 
                img = cv.imread(self.photo_set[i])
                lar_thumb = cv.resize(img, self.image_size, interpolation=cv.INTER_AREA )
                cv.imwrite( self.photo_set[i], lar_thumb, [int(cv.IMWRITE_JPEG_QUALITY), 82]   )
                
                sma_thumb = cv.resize(lar_thumb, thumb_size, interpolation=cv.INTER_AREA )
                out_img[o[1]:o[1]+sma_thumb.shape[0], o[0]:o[0]+sma_thumb.shape[1]] = sma_thumb

                newname = output_filename.replace(".","."+str(i)+".")
                shutil.move( self.photo_set[i], newname )
                photos.append(newname)

            # Save assembled image
            cv.imwrite( output_filename, out_img, [int(cv.IMWRITE_JPEG_QUALITY), 90] )
            
            thumb_filename = output_filename.replace("/pic","/thumb.pic")
            thumb = cv.resize(out_img, ( int(self.image_size[0]/6), int(self.image_size[1]/6)),
                              interpolation=cv.INTER_AREA )
            cv.imwrite( thumb_filename, thumb, [int(cv.IMWRITE_JPEG_QUALITY), 90] )
            
            self.final_photos= {'primary': output_filename, 
                                'primary_thumb': thumb_filename, 
                                'secondary' : photos }

            self.ani_q_cmd_push("PROCESSPREVIEW")

        elif item['cmd'] == 'PROCESSPREVIEW':
            self.gpio.set('green_led', 1)
            self.gpio.set('red_led', 1)
            
            self.gameDisplay.fill((200,200,200))
            img = pygame.image.load(self.final_photos['primary'])
            ratio = 0.275
            #ratio = 0.55
            shrink = ( int(img.get_size()[0]*ratio), int(img.get_size()[1]*ratio))
            img = pygame.transform.scale(img, shrink) 
            img_pos = ((self.disp_w-img.get_size()[0])/2+180,65)

            if self.cfg.get("upload__enabled"):
                pass
            elif self.cfg.get("printer__enabled"): 
                self.gen_processing_menu("Print") #Regen with Print
            else:
                self.gen_processing_menu("Finish") #Regen with finish
            
            self.ani_q_img_push( self.processing_menu, self.processing_menu_pos, 0.1, False, True)
            self.ani_q_img_push( img, img_pos, 0.3, True, False)
            self.ani_q_cmd_push("COMPLETE")
import os
import os.path
import shutil

# You can add more file formats here
image_formats = ["jpg", "png", "jpeg", "gif", "webp", "tiff"]
audio_formats = ["mp3", "wav"]
video_formats = ["mp4", "avi", "webm"]
docs_formats = ["ai", "ait", "txt", "rtf", "pdf"]

files = os.listdir("./")

for file in files:
    if os.path.isfile(file) and file != "smartDirectoryManagementSystem.py":
        ext = (file.split(".")[-1]).lower()

        if ext in image_formats:
            shutil.move(file, "images/" + file)
        elif ext in audio_formats:
            shutil.move(file, "audio/" + file)
        elif ext in video_formats:
            shutil.move(file, "videos/" + file)
        elif ext in docs_formats:
            shutil.move(file, "docs/" + file)
        else:
            shutil.move(file, "others/" + file)
 def _restore_fstab(self, fstab):
     """Restore the saved fstab in rootfs"""
     if fstab is None:
         return
     shutil.move(fstab + ".orig", fstab)
Exemple #54
0
def Rfoil(coord_foil_name,foil_name,alpha,Re,Mach,Ncrit,obj): 
#  
## for function test 
##alpha=7
##Re=1e6
##Mach=0
##Ncrit=9
##savepath='geo'

  

    os.chdir('geo')   
    with open('rfoil.inp','w') as f:
        f.write('load \n')
    #    f.write('airfoil.dat \n')
        f.write('%s\n'%coord_foil_name)
        f.write('airfoil\n')
        f.write('ppar\n')
        f.write('N \n')
        f.write('300 \n\n')
        f.write('isav \n')
        f.write('isav.dat \n\n\n')
        f.write('oper\n')
        f.write('visc\n')
        f.write('%e\n'%Re)
        f.write('Mach\n')
        f.write('%e\n'%Mach)
        if Ncrit == 0:
            f.write('vpar\n')
            f.write('XTR\n')
            f.write('0.05 \n')
            f.write('0.05 \n\n')
        else:
            f.write('vpar\n')
            f.write('N\n')
            f.write('%g\n\n'%Ncrit)
        f.write('pacc\n')
        f.write('polar.dat \n')
        f.write('\n')
        f.write('arbl\n')
        f.write('aseq\n')
        f.write('%g\n'%np.min(alpha))
        f.write('%g\n'%np.max(alpha))
        f.write('1 \n')
        
        #tf = strcmp(version,'RFOIL.v4.0');
        #if tf == 1
        #    fprintf(fid,'y \n');
        #end
        f.write('svbl\n');
        f.write('1 \n');
        f.write('bl \n');
        f.write('pacc\n');
        f.write('\n');
        f.write('\nquit\n');
        f.close();    
    
    os.system("rfoil.exe < rfoil.inp")
    
    # # # CALCULATE THE cl/cd 
    alpha_out=np.zeros(np.size(alpha), dtype=np.float)
    cl=np.zeros(np.size(alpha), dtype=np.float)
    cd=np.zeros(np.size(alpha), dtype=np.float)
    polar_file = open('polar.dat', "r")
    data = [row for row in polar_file]
    for  j in range(0,len(data)-13):   # # # j is the number of AOAs 
         str_list = list(filter(None, data[j+13].split(' '))) 
         alpha_out[j]=float(str_list[0])
         cl[j]=float(str_list[1])
         cd[j]=float(str_list[2])
    polar_file.close()
    clOcd=cl/cd
    if np.count_nonzero(alpha_out)==0 :
        f2=(np.count_nonzero(alpha_out)+1.0)*1.0/len(alpha)        
    else:       
        f2=(np.count_nonzero(alpha_out))*1.0/len(alpha)
    
    if obj==0:  # # max cl 
        if len(cl)>1:
            area=(cl[0:-1]+cl[1:])*(alpha_out[1:]-alpha_out[0:-1])/2 # # # for target max(cl) 
            area = area[~np.isnan(area)]  # # # remove nan
            target=np.array([np.sum(area)])
        else:
            target=np.array([cl])    
    else:
        if len(cl)>1:
            area=(clOcd[0:-1]+clOcd[1:])*(alpha_out[1:]-alpha_out[0:-1])/2 # # # for target max(cl/cd)
            area = area[~np.isnan(area)]  # # # remove nan
            target=np.array([np.sum(area)])
        else:
            target=np.array([clOcd])
        
        
        
     # # # save the aerodynamic data into sub_folder 
    files=os.listdir('.') 
    if not os.path.exists(foil_name):
        os.makedirs(foil_name) 
        
    shutil.move(coord_foil_name,foil_name)  
    shutil.move('rfoil.inp',foil_name) 
    shutil.move('polar.dat',foil_name) 
    shutil.move('isav.dat',foil_name)  
    shutil.move('bl.cfx',foil_name)  
    shutil.move('bl.cpx',foil_name)  
    shutil.move('bl.dst',foil_name)  
    shutil.move('bl.tet',foil_name)     
    #for f in files:
    #    if not f.endswith("exe") and os.path.isfile(os.path.join(os.getcwd()+f)):            
    #        shutil.move(f,foil_name)
    os.chdir("..")
    return {'lift':cl, 'drag':cd,'OBJ':target,'conver_alpha':f2}
    

    
    
def init_seafile_server():
    version_stamp_file = get_version_stamp_file()
    if exists(join(shared_seafiledir, 'seafile-data')):
        if not exists(version_stamp_file):
            update_version_stamp(os.environ['SEAFILE_VERSION'])
        # sysbol link unlink after docker finish.
        latest_version_dir='/opt/seafile/seafile-server-latest'
        current_version_dir='/opt/seafile/' + get_conf('SEAFILE_SERVER', 'seafile-server') + '-' +  read_version_stamp()
        if not exists(latest_version_dir):
            call('ln -sf ' + current_version_dir + ' ' + latest_version_dir)
        loginfo('Skip running setup-seafile-mysql.py because there is existing seafile-data folder.')
        return

    loginfo('Now running setup-seafile-mysql.py in auto mode.')
    env = {
        'SERVER_NAME': 'seafile',
        'SERVER_IP': get_conf('SEAFILE_SERVER_HOSTNAME', 'seafile.example.com'),
        'MYSQL_USER': '******',
        'MYSQL_USER_PASSWD': str(uuid.uuid4()),
        'MYSQL_USER_HOST': '%.%.%.%',
	'MYSQL_HOST': get_conf('DB_HOST','127.0.0.1'),
        # Default MariaDB root user has empty password and can only connect from localhost.
        'MYSQL_ROOT_PASSWD': get_conf('DB_ROOT_PASSWD', ''),
    }

    # Change the script to allow mysql root password to be empty
    # call('''sed -i -e 's/if not mysql_root_passwd/if not mysql_root_passwd and "MYSQL_ROOT_PASSWD" not in os.environ/g' {}'''
    #     .format(get_script('setup-seafile-mysql.py')))

    # Change the script to disable check MYSQL_USER_HOST
    call('''sed -i -e '/def validate_mysql_user_host(self, host)/a \ \ \ \ \ \ \ \ return host' {}'''
        .format(get_script('setup-seafile-mysql.py')))

    call('''sed -i -e '/def validate_mysql_host(self, host)/a \ \ \ \ \ \ \ \ return host' {}'''
        .format(get_script('setup-seafile-mysql.py')))

    setup_script = get_script('setup-seafile-mysql.sh')
    call('{} auto -n seafile'.format(setup_script), env=env)

    domain = get_conf('SEAFILE_SERVER_HOSTNAME', 'seafile.example.com')
    proto = 'https' if is_https() else 'http'
    with open(join(topdir, 'conf', 'seahub_settings.py'), 'a+') as fp:
        fp.write('\n')
        fp.write("""CACHES = {
    'default': {
        'BACKEND': 'django_pylibmc.memcached.PyLibMCCache',
        'LOCATION': 'memcached:11211',
    },
    'locmem': {
        'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
    },
}
COMPRESS_CACHE_BACKEND = 'locmem'""")
        fp.write('\n')
        fp.write("TIME_ZONE = '{time_zone}'".format(time_zone=os.getenv('TIME_ZONE',default='Etc/UTC')))
        fp.write('\n')
        fp.write('FILE_SERVER_ROOT = "{proto}://{domain}/seafhttp"'.format(proto=proto, domain=domain))
        fp.write('\n')

    # By default ccnet-server binds to the unix socket file
    # "/opt/seafile/ccnet/ccnet.sock", but /opt/seafile/ccnet/ is a mounted
    # volume from the docker host, and on windows and some linux environment
    # it's not possible to create unix sockets in an external-mounted
    # directories. So we change the unix socket file path to
    # "/opt/seafile/ccnet.sock" to avoid this problem.
    with open(join(topdir, 'conf', 'ccnet.conf'), 'a+') as fp:
        fp.write('\n')
        fp.write('[Client]\n')
        fp.write('UNIX_SOCKET = /opt/seafile/ccnet.sock\n')
        fp.write('\n')

    # Disabled the Elasticsearch process on Seafile-container
    # Connection to the Elasticsearch-container
    if os.path.exists(join(topdir, 'conf', 'seafevents.conf')):
        with open(join(topdir, 'conf', 'seafevents.conf'), 'r') as fp:
            fp_lines = fp.readlines()
            if '[INDEX FILES]\n' in fp_lines:
               insert_index = fp_lines.index('[INDEX FILES]\n') + 1
               insert_lines = ['es_port = 9200\n', 'es_host = elasticsearch\n', 'external_es_server = true\n']
               for line in insert_lines:
                   fp_lines.insert(insert_index, line)
    
        with open(join(topdir, 'conf', 'seafevents.conf'), 'w') as fp:
            fp.writelines(fp_lines)

    # After the setup script creates all the files inside the
    # container, we need to move them to the shared volume
    #
    # e.g move "/opt/seafile/seafile-data" to "/shared/seafile/seafile-data"
    files_to_copy = ['conf', 'ccnet', 'seafile-data', 'seahub-data', 'pro-data']
    for fn in files_to_copy:
        src = join(topdir, fn)
        dst = join(shared_seafiledir, fn)
        if not exists(dst) and exists(src):
            shutil.move(src, shared_seafiledir)
            call('ln -sf ' + join(shared_seafiledir, fn) + ' ' + src)

    loginfo('Updating version stamp')
    update_version_stamp(os.environ['SEAFILE_VERSION'])
Exemple #56
0
    def handle_entry(self, task, config, entry, siblings):
        src = entry['location']
        src_isdir = os.path.isdir(src)
        src_path, src_name = os.path.split(src)

        # get the proper path and name in order of: entry, config, above split
        dst_path = entry.get(self.destination_field, config.get('to', src_path))
        if config.get('rename'):
            dst_name = config['rename']
        elif entry.get('filename') and entry['filename'] != src_name:
            # entry specifies different filename than what was split from the path
            # since some inputs fill in filename it must be different in order to be used
            dst_name = entry['filename']
        else:
            dst_name = src_name

        try:
            dst_path = entry.render(dst_path)
        except RenderError as err:
            raise plugin.PluginError('Path value replacement `%s` failed: %s' % (dst_path, err.args[0]))
        try:
            dst_name = entry.render(dst_name)
        except RenderError as err:
            raise plugin.PluginError('Filename value replacement `%s` failed: %s' % (dst_name, err.args[0]))

        # Clean invalid characters with pathscrub plugin
        dst_path = pathscrub(os.path.expanduser(dst_path))
        dst_name = pathscrub(dst_name, filename=True)

        # Join path and filename
        dst = os.path.join(dst_path, dst_name)
        if dst == entry['location']:
            raise plugin.PluginWarning('source and destination are the same.')

        if not os.path.exists(dst_path):
            if task.options.test:
                self.log.info('Would create `%s`', dst_path)
            else:
                self.log.info('Creating destination directory `%s`', dst_path)
                os.makedirs(dst_path)
        if not os.path.isdir(dst_path) and not task.options.test:
            raise plugin.PluginWarning('destination `%s` is not a directory.' % dst_path)

        # unpack_safety
        if config.get('unpack_safety', entry.get('unpack_safety', True)):
            count = 0
            while True:
                if count > 60 * 30:
                    raise plugin.PluginWarning('The task has been waiting unpacking for 30 minutes')
                size = os.path.getsize(src)
                time.sleep(1)
                new_size = os.path.getsize(src)
                if size != new_size:
                    if not count % 10:
                        self.log.verbose('File `%s` is possibly being unpacked, waiting ...', src_name)
                else:
                    break
                count += 1

        src_file, src_ext = os.path.splitext(src)
        dst_file, dst_ext = os.path.splitext(dst)

        # Check dst contains src_ext
        if config.get('keep_extension', entry.get('keep_extension', True)):
            if not src_isdir and dst_ext != src_ext:
                self.log.verbose('Adding extension `%s` to dst `%s`', src_ext, dst)
                dst += src_ext
                dst_file += dst_ext  # this is used for sibling files. dst_ext turns out not to be an extension!

        funct_name = 'move' if self.move else 'copy'
        funct_done = 'moved' if self.move else 'copied'

        if task.options.test:
            self.log.info('Would %s `%s` to `%s`', funct_name, src, dst)
            for s, ext in siblings.items():
                # we cannot rely on splitext for extensions here (subtitles may have the language code)
                d = dst_file + ext
                self.log.info('Would also %s `%s` to `%s`', funct_name, s, d)
        else:
            # IO errors will have the entry mark failed in the base class
            if self.move:
                shutil.move(src, dst)
            elif src_isdir:
                shutil.copytree(src, dst)
            else:
                shutil.copy(src, dst)
            self.log.info('`%s` has been %s to `%s`', src, funct_done, dst)
            # further errors will not have any effect (the entry has been successfully moved or copied out)
            for s, ext in siblings.items():
                # we cannot rely on splitext for extensions here (subtitles may have the language code)
                d = dst_file + ext
                try:
                    if self.move:
                        shutil.move(s, d)
                    else:
                        shutil.copy(s, d)
                    self.log.info('`%s` has been %s to `%s` as well.', s, funct_done, d)
                except Exception as err:
                    self.log.warning(str(err))
        entry['old_location'] = entry['location']
        entry['location'] = dst
        if self.move and not src_isdir:
            self.clean_source(task, config, entry)
Exemple #57
0
    def install_from_urls(self, urls):
        if not self.env.user.has_group('base.group_system'):
            raise AccessDenied()

        # One-click install is opt-in - cfr Issue #15225
        ad_dir = tools.config.addons_data_dir
        if not os.access(ad_dir, os.W_OK):
            msg = (_("Automatic install of downloaded Apps is currently disabled.") + "\n\n" +
                   _("To enable it, make sure this directory exists and is writable on the server:") +
                   "\n%s" % ad_dir)
            _logger.warning(msg)
            raise UserError(msg)

        apps_server = urls.url_parse(self.get_apps_server())

        OPENERP = odoo.release.product_name.lower()
        tmp = tempfile.mkdtemp()
        _logger.debug('Install from url: %r', urls)
        try:
            # 1. Download & unzip missing modules
            for module_name, url in pycompat.items(urls):
                if not url:
                    continue    # nothing to download, local version is already the last one

                up = urls.url_parse(url)
                if up.scheme != apps_server.scheme or up.netloc != apps_server.netloc:
                    raise AccessDenied()

                try:
                    _logger.info('Downloading module `%s` from OpenERP Apps', module_name)
                    response = requests.get(url)
                    response.raise_for_status()
                    content = response.content
                except Exception:
                    _logger.exception('Failed to fetch module %s', module_name)
                    raise UserError(_('The `%s` module appears to be unavailable at the moment, please try again later.') % module_name)
                else:
                    zipfile.ZipFile(io.BytesIO(content)).extractall(tmp)
                    assert os.path.isdir(os.path.join(tmp, module_name))

            # 2a. Copy/Replace module source in addons path
            for module_name, url in pycompat.items(urls):
                if module_name == OPENERP or not url:
                    continue    # OPENERP is special case, handled below, and no URL means local module
                module_path = modules.get_module_path(module_name, downloaded=True, display_warning=False)
                bck = backup(module_path, False)
                _logger.info('Copy downloaded module `%s` to `%s`', module_name, module_path)
                shutil.move(os.path.join(tmp, module_name), module_path)
                if bck:
                    shutil.rmtree(bck)

            # 2b.  Copy/Replace server+base module source if downloaded
            if urls.get(OPENERP):
                # special case. it contains the server and the base module.
                # extract path is not the same
                base_path = os.path.dirname(modules.get_module_path('base'))

                # copy all modules in the SERVER/odoo/addons directory to the new "odoo" module (except base itself)
                for d in os.listdir(base_path):
                    if d != 'base' and os.path.isdir(os.path.join(base_path, d)):
                        destdir = os.path.join(tmp, OPENERP, 'addons', d)    # XXX 'odoo' subdirectory ?
                        shutil.copytree(os.path.join(base_path, d), destdir)

                # then replace the server by the new "base" module
                server_dir = tools.config['root_path']      # XXX or dirname()
                bck = backup(server_dir)
                _logger.info('Copy downloaded module `odoo` to `%s`', server_dir)
                shutil.move(os.path.join(tmp, OPENERP), server_dir)
                #if bck:
                #    shutil.rmtree(bck)

            self.update_list()

            with_urls = [module_name for module_name, url in pycompat.items(urls) if url]
            downloaded = self.search([('name', 'in', with_urls)])
            installed = self.search([('id', 'in', downloaded.ids), ('state', '=', 'installed')])

            to_install = self.search([('name', 'in', list(urls)), ('state', '=', 'uninstalled')])
            post_install_action = to_install.button_immediate_install()

            if installed or to_install:
                # in this case, force server restart to reload python code...
                self._cr.commit()
                odoo.service.server.restart()
                return {
                    'type': 'ir.actions.client',
                    'tag': 'home',
                    'params': {'wait': True},
                }
            return post_install_action

        finally:
            shutil.rmtree(tmp)
        input_file = gzip.GzipFile(input_file_name, 'r')
        graph = json.loads(input_file.read().decode('utf-8'))

    if 'edges' not in graph:
        print("ERROR: Input JSON file doesn't have an 'edges' property!",
              file=sys.stderr)
    else:
        edges = graph['edges']
        retired_cuis_by_type = get_retired_cuis_by_type()

        stats = {
            '_report_datetime':
            datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S"),
            '_total_number_of_edges':
            len(
                edges
            ),  # underscore is to make sure it sorts to the top of the report
            'number_of_edges_with_retired_cui':
            count_edges_with_cui_in_set(edges,
                                        retired_cuis_by_type.get('ALL')),
            'number_of_edges_with_retired_cui_with_synonym':
            count_edges_with_cui_in_set(edges, retired_cuis_by_type.get('SY')),
            'number_of_edges_with_invalid_cui':
            count_edges_with_invalid_cui(edges)
        }

        temp_output_file = tempfile.mkstemp(prefix='kg2-')[1]
        with open(temp_output_file, 'w') as outfile:
            json.dump(stats, outfile, indent=4, sort_keys=True)
        shutil.move(temp_output_file, args.outputFile[0])
Exemple #59
0
def test_filesystem_notifications(qtbot, projects, tmpdir):
    """
    Test that filesystem notifications are emitted when creating,
    deleting and moving files and directories.
    """
    # Create a directory for the project and some files.
    project_root = tmpdir.mkdir('project0')
    folder0 = project_root.mkdir('folder0')
    folder1 = project_root.mkdir('folder1')
    file0 = project_root.join('file0')
    file1 = folder0.join('file1')
    file2 = folder0.join('file2')
    file3 = folder1.join('file3')
    file0.write('')
    file1.write('')
    file3.write('ab')

    # Open the project
    projects.open_project(path=to_text_string(project_root))

    # Get a reference to the filesystem event handler
    fs_handler = projects.watcher.event_handler

    # Test file creation
    with qtbot.waitSignal(fs_handler.sig_file_created,
                          timeout=30000) as blocker:
        file2.write('')

    file_created, is_dir = blocker.args
    assert file_created == to_text_string(file2)
    assert not is_dir

    # Test folder creation
    with qtbot.waitSignal(fs_handler.sig_file_created,
                          timeout=3000) as blocker:
        folder2 = project_root.mkdir('folder2')

    folder_created, is_dir = blocker.args
    assert folder_created == osp.join(to_text_string(project_root), 'folder2')

    # Test file move/renaming
    new_file = osp.join(to_text_string(folder0), 'new_file')
    with qtbot.waitSignal(fs_handler.sig_file_moved,
                          timeout=3000) as blocker:
        shutil.move(to_text_string(file1), new_file)

    original_file, file_moved, is_dir = blocker.args
    assert original_file == to_text_string(file1)
    assert file_moved == new_file
    assert not is_dir

    # Test folder move/renaming
    new_folder = osp.join(to_text_string(project_root), 'new_folder')
    with qtbot.waitSignal(fs_handler.sig_file_moved,
                          timeout=3000) as blocker:
        shutil.move(to_text_string(folder2), new_folder)

    original_folder, folder_moved, is_dir = blocker.args
    assert original_folder == to_text_string(folder2)
    assert folder_moved == new_folder
    assert is_dir

    # Test file deletion
    with qtbot.waitSignal(fs_handler.sig_file_deleted,
                          timeout=3000) as blocker:
        os.remove(to_text_string(file0))

    deleted_file, is_dir = blocker.args
    assert deleted_file == to_text_string(file0)
    assert not is_dir
    assert not osp.exists(to_text_string(file0))

    # Test folder deletion
    with qtbot.waitSignal(fs_handler.sig_file_deleted,
                          timeout=3000) as blocker:
        shutil.rmtree(to_text_string(folder0))

    deleted_folder, is_dir = blocker.args
    assert to_text_string(folder0) in deleted_folder

    # For some reason this fails in macOS
    if not sys.platform == 'darwin':
        # Test file/folder modification
        with qtbot.waitSignal(fs_handler.sig_file_modified,
                              timeout=3000) as blocker:
            file3.write('abc')

        modified_file, is_dir = blocker.args
        assert modified_file in to_text_string(file3)
    def combine_movie_files(self):
        """
        Used internally by Manim to combine the separate
        partial movie files that make up a Scene into a single
        video file for that Scene.
        """
        # Manim renders the scene as many smaller movie files
        # which are then concatenated to a larger one.  The reason
        # for this is that sometimes video-editing is made easier when
        # one works with the broken up scene, which effectively has
        # cuts at all the places you might want.  But for viewing
        # the scene as a whole, one of course wants to see it as a
        # single piece.
        kwargs = {
            "remove_non_integer_files": True,
            "extension": self.movie_file_extension,
        }
        if self.scene.start_at_animation_number is not None:
            kwargs["min_index"] = self.scene.start_at_animation_number
        if self.scene.end_at_animation_number is not None:
            kwargs["max_index"] = self.scene.end_at_animation_number
        else:
            kwargs["remove_indices_greater_than"] = self.scene.num_plays - 1
        partial_movie_files = get_sorted_integer_files(
            self.partial_movie_directory, **kwargs)
        if len(partial_movie_files) == 0:
            print("No animations in this scene")
            return

        # Write a file partial_file_list.txt containing all
        # partial movie files
        file_list = os.path.join(self.partial_movie_directory,
                                 "partial_movie_file_list.txt")
        with open(file_list, 'w') as fp:
            for pf_path in partial_movie_files:
                if os.name == 'nt':
                    pf_path = pf_path.replace('\\', '/')
                fp.write("file \'file:{}\'\n".format(pf_path))

        movie_file_path = self.get_movie_file_path()
        commands = [
            FFMPEG_BIN,
            '-y',  # overwrite output file if it exists
            '-f',
            'concat',
            '-safe',
            '0',
            '-i',
            file_list,
            '-loglevel',
            'error',
            '-c',
            'copy',
            movie_file_path
        ]
        if not self.includes_sound:
            commands.insert(-1, '-an')

        combine_process = subprocess.Popen(commands)
        combine_process.wait()

        if self.includes_sound:
            sound_file_path = movie_file_path.replace(
                self.movie_file_extension, ".wav")
            # Makes sure sound file length will match video file
            self.add_audio_segment(AudioSegment.silent(0))
            self.audio_segment.export(
                sound_file_path,
                bitrate='312k',
            )
            temp_file_path = movie_file_path.replace(".", "_temp.")
            commands = [
                "ffmpeg",
                "-i",
                movie_file_path,
                "-i",
                sound_file_path,
                '-y',  # overwrite output file if it exists
                "-c:v",
                "copy",
                "-c:a",
                "aac",
                "-b:a",
                "320k",
                # select video stream from first file
                "-map",
                "0:v:0",
                # select audio stream from second file
                "-map",
                "1:a:0",
                '-loglevel',
                'error',
                # "-shortest",
                temp_file_path,
            ]
            subprocess.call(commands)
            shutil.move(temp_file_path, movie_file_path)
            os.remove(sound_file_path)

        self.print_file_ready_message(movie_file_path)