def osm_postgis_transform(osm_files, skip_start_db, data_dir=data_dir_default, db_host=db_host_default, db_port=db_port_default, db_user=db_user_default, db_password=db_password_default, db_name=db_name_default, cache_size=cache_size_default, osm2pgsql=osm2pgsql_default):
    # the text for the help transformed by plac:
    """
    This script sets up PostGIS database with data from an OSM (.osm) file. It 
    is essentially a wrapper around `osm2pgsql`. By default it will either spawn a database process based on the data directory speified with the `--data-dir` argument (if the data directory is non-empty) or create a database data directory and spawn a database process based on that newly created data directory and feed data to it. If the nested database process can't be connected to with the default value for database connection parameters, they have to be overwritten, otherwise the script will fail with the error message of the `postgres` process.
    
    The start of a nested database process can be skipped if `--skip-start-db` command line flag is set. In this case the database connection parameters will be used to connect to an external already running `postgres` process where data will be fed to.
    
    WARNING: The script has not yet been tested completely to hide database credentials (including the password) from output and/or other logging backends (files, syslog, etc.). It is currently recommended to specify a separate database and local host for the script only and to not care about it at all (as OSM data is as far from a secret as it could be).
    """
    if osm_files is None:
        raise ValueError("osm_files mustn't be None")
    if str(type(osm_files)) != "<type 'list'>":
        raise ValueError("osm_files has to be a list")
    if len(osm_files) == 0:
        raise ValueError("osm_files mustn't be empty")
    if pg_version == (9,2):
        if postgis_version > (2,0):
            raise ValueError("postgis > %s is not compatible with postgresql %s" % (postgis_version_string, pg_version_string))
    if data_dir is None:
        raise ValueError("data_dir mustn't be None")
    if os.path.exists(data_dir) and not os.path.isdir(data_dir):
        raise ValueError("data_dir '%s' exists, but isn't a directory" % (data_dir,))
    
    # always check, even after install_prequisites
    #@TODO: not sufficient to binary name; necessary to evaluate absolute path with respect to $PATH
    if os_utils.which(osm2pgsql) is None:
        raise RuntimeError("osm2pgsql not found, make sure you have invoked osm_postgis_transform_prequisites.py")
    
    # parsing
    # postgres binary refuses to run when process uid and effective uid are not identical    
    postgres_proc = None
    try:
        if not skip_start_db:
            # database process is either started by postgis_utils.bootstrap_datadir or with pexpect.spawn if the data_dir isn't empty (indicating start of database based on existing data directory)
            if not os.path.exists(data_dir) or len(os.listdir(data_dir)) == 0:
                logger.info("creating PostGIS data directory in data_dir '%s'" % (data_dir,))
                if not os.path.exists(data_dir):
                    logger.info("creating inexisting data_dir '%s'" % (data_dir,))
                    os.makedirs(data_dir)
                postgis_utils.bootstrap_datadir(data_dir, db_user, password=db_password, initdb=initdb)
                postgis_utils.bootstrap_database(data_dir, db_port, db_host, db_user, db_name, password=db_password, initdb=initdb, postgres=postgres, createdb=createdb, psql=psql, socket_dir=db_socket_dir)
            if postgres_proc is None:
                logger.info("spawning database process based on existing data directory '%s'" % (data_dir,))
                postgres_proc = pexpect.spawn(str.join(" ", [postgres, "-D", data_dir, "-p", str(db_port), "-h", db_host, "-k", db_socket_dir]))
                postgres_proc.logfile = sys.stdout
                logger.info("sleeping %s s to ensure postgres server started" % postgres_server_start_timeout)
                time.sleep(postgres_server_start_timeout) # not nice (should poll connection until success instead)
        logger.debug("using osm2pgsql binary %s" % osm2pgsql)
        osm2pgsql_proc = pexpect.spawn(str.join(" ", [osm2pgsql, "--create", "--database", db_name, "--cache", str(cache_size), "--number-processes", str(osm2pgsql_number_processes), "--slim", "--port", str(db_port), "--host", db_host, "--username", db_user, "--latlong", "--password", "--keep-coastlines", "--extra-attributes", "--hstore-all"]+osm_files))
        osm2pgsql_proc.logfile = sys.stdout
        osm2pgsql_proc.expect(['Password:'******'s no check for subprocess.Popen 
Example #2
0
def bootstrap(cpan=cpan_default):
    if not os_utils.which(cpan):
        raise ValueError("cpan binary '%s' doesn't exist or isn't executable" % (cpan,))
    # there's seriously no smart way to avoid cpan questions without manipulating the local configurtion (which conflicts with the idea of making it possible to run the script locally and not only on CI services)
    cpan_packages = ["DBD::PgPP", "Log::Log4perl", "DBD::Pg"]
    logger.info("installing %s with cpan '%s'" % (str(cpan_packages), cpan))
    cpan_proc = pexpect.spawn(str.join(" ", [cpan]+cpan_packages))
    cpan_proc.logfile = sys.stdout
    cpan_proc.timeout = 10000000
    expect_result = cpan_proc.expect(['\\[yes\\]',
        pexpect.EOF # if already installed
        ])
    if expect_result == 0:
        cpan_proc.sendline("yes")
        cpan_proc.expect(['\\[local::lib\\]']) # need to add surrounding [] in order to
             # avoid double match
        cpan_proc.sendline("sudo")
        cpan_proc.expect(["\\[yes\\]"])
        cpan_proc.sendline("yes")
        cpan_proc.expect(pexpect.EOF) # wait for termination
    if check_os.check_debian() or check_os.check_ubuntu():
        pm_utils.install_packages(["postgresql"])
    else:
        raise ValueError("operating system not supported")
    logger.info("You're ready to run `python bootstrap_unprivileged.py`")
 def __init__(self, input_path, output_dir_path, melt=melt_default, melt_command_tail=melt_command_tail_default, recursive=recursive_default):
     """
     @args input_path %(__input_path_doc__)s
     @args output_dir_path %(__output_dir_path_doc__)s
     @args melt_doc %(__melt_doc__)s
     @args melt_command_tail %(__melt_command_tail_doc__)s
     @args recursive %(__recursive_doc__)s
     """ % {"__input_path_doc__": __input_path_doc__, "__output_dir_path_doc__": __output_dir_path_doc__, "__melt_doc__": __melt_doc__, "__melt_command_tail_doc__": __melt_command_tail_doc__, "__recursive_doc__": __recursive_doc__}
     if not os.path.exists(input_path):
         raise ValueError("input_path '%s' doesn't exist" % (input_path, ))
     if not os.path.exists(output_dir_path):
         logger.info("creating non-existing output directory '%s'" % (output_dir_path, ))
         os.makedirs(output_dir_path)
     elif len(os.listdir(output_dir_path)) > 0:
         raise ValueError("output_dir_path '%s' isn't empty" % (output_dir_path, ))
     if not os.path.isdir(output_dir_path):
         raise ValueError("output_dir_path '%s' isn't a directory" % (output_dir_path, ))
     self.output_dir_path = output_dir_path
     if os.path.isfile(input_path):
         self.input_files = [input_path.decode("utf-8")] # .decode("utf-8") avoids `UnicodeDecodeError: 'ascii' codec can't decode byte 0xc2 in position 28: ordinal not in range(128)`
     elif os.path.isdir(input_path):
         if recursive is False:
             self.input_files = [os.path.join(input_path, i).decode("utf-8") for i in os.listdir(input_path)]
         else:
             self.input_files = []
             for dirpath, dirnames, filenames in os.walk(input_path):
                 self.input_files += [os.path.join(dirpath, i).decode("utf-8") for i in filenames]
             logger.debug("added %d files under '%s' recursively" % (len(self.input_files), input_path, ))
     else:
         raise AssertionError("file_name '%s' is neither file nor directory" % (file_name, ))
     # validating installation of aac audio codec (there might be other codecs available, but not figured out yet how to check their availability in melt)
     aac_binary = "aac-enc"
     if os_utils.which(aac_binary) is None:
         raise RuntimeError("The aac codec is not installed on your system (the binary '%s' is missing). Install it and try again" % (aac_binary, ))
     if os_utils.which(melt) is None:
         raise RuntimeError("The melt binary '%s' is not available. Install it and try again" % (melt, ))
     analyseplugin_binary = "/usr/bin/analyseplugin"
     applyplugin_binary = "/usr/bin/applyplugin"
     listplugin_binary = "/usr/bin/listplugins"
     if not os.path.exists(analyseplugin_binary) or not os.path.exists(applyplugin_binary) or not os.path.exists(listplugin_binary):
         raise RuntimeError("one or more of the binaries '%s', '%s' and '%s' are missing which indicates that ladspa-sdk is missing. Install it and try again." % (analyseplugin_binary, applyplugin_binary, listplugin_binary, ))
     self.melt = melt
     self.melt_command_tail = melt_command_tail
 def __handle_diff__(file0, file1):
     # check whether difftool exists and is executable
     if os_utils.which(difftool) is None:
         raise ValueError("specified difftool '%s' isn't available or not executable" % (difftool,))
     logger.warn("template content doesn't match with content of existing target file '%s', opening difftool '%s' in order to investigate" % (target, difftool))
     difftool_cmds = [difftool, file0, file1]
     logger.info("invoking difftool command '%s' in order to visualize changes" % (str.join(" ", difftool_cmds),))
     sp.check_call(difftool_cmds)
     answer = None
     while answer != "y" and answer != "n":
         answer = raw_input("Proceed with script (y/n)? ")
     if answer == "n":
         raise RuntimeError("Aborted by user")
 def generate_cudaminer_param_checker_values(self, cudaminer=cudaminer_default, cudaminer_parameters_prepend=[], cudaminer_additional_parameters=[], output_scan_interval=output_scan_interval_default, output_scan_max_count=output_scan_max_count_default, hash_rate_count=hash_rate_count_default, update_callback=None, check_running_callback=None, ):
     """invokes `cudaminer` with all combinations of the values in the `param_dict`. The combinations are produced by creating the cartesion product of tuples of each key in `param_dict` and each value in it. `cudaminer` doesn't seem to have an option to specify the number of runs in a benchmark. Therefore the process is simply killed after it printed the first hash/s value to stderr (`cudaminer` prints to stderr for some reason). The check of the output is done every `output_scan_interval` seconds.
     @args cudaminer %(__cudaminer_docstring__)s
     @args output_scan_interval %(__output_scan_interval_docstring__)s
     @args output_scan_max_count %(__output_scan_max_count_docstring__)s
     @args hash_rate_count %(__hash_rate_count_docstring__)s
     @args param_dict a dictionary in the form of `cudaminer option (long or short)` x `list of option values to test each`. The `=` in the long for of arguments seems to be optional, reported as https://github.com/cbuchner1/CudaMiner/issues/148 for clearification and improvement
     @args cudaminer_parameters_prepend a list of strings which represents a command or a list of commands which is prepended to the `cudaminer` command (e.g. `["optirun"]` in case you're using the `bumblebee` program). The resulting process tree is killed recursively by sending `SIGTERM`. Make sure that the command accepts it and exits after receiving it, otherwise you risk that the invokation with timeout systematically (see documentation of `output_scan_max_count` for details) and not produce any results.
     @args cudaminer_additional_parameters a list of strings representing `cudaminer` arguments, see `cudaminer --help` for available arguments
     @args update_callback a callable which called with a tuple consisting of a float ranging between `0` and `1` representing the progress of the generation and a tuple representing the newly added tuple to the return value. In case the `cudaminer` process fails the invokation of `update_callback` is skipped
     @args check_running_callback a callable which is invoked regularily and should return `True` if the generation can continue to run and should return `False` if not. If `check_running_callback` is `None` the check doesn't occur""" %  {"__cudaminer_docstring__": __cudaminer_docstring__, "__output_scan_interval_docstring__": __output_scan_interval_docstring__, "__output_scan_interval_docstring__": __output_scan_interval_docstring__, "__output_scan_max_count_docstring__": __output_scan_max_count_docstring__, "__hash_rate_count_docstring__": __hash_rate_count_docstring__}
     # internal implementation notes:
     # - `result_dict_skip` needs to contain the measured values in order to be able 
     # to complete the table when the generation is resumed
     if os_utils.which(cudaminer) is None:
         raise ValueError("cudaminer binary '%s' doesn't exist or isn't accessible, aborting" % (cudaminer, ))
     if str(type(cudaminer_parameters_prepend)) != "<type 'list'>":
         raise ValueError("cudaminer_parameters_prepend has to be a list") 
             # ducktyping is nice, but validation is nicer (this is not good 
             # python practice, though)
     if str(type(cudaminer_additional_parameters)) != "<type 'list'>":
         raise ValueError("cudaminer_additional_parameters has to be a list")
     if output_scan_interval > 0.5:
         logger.warn("an output_scan_interval above 0.5 (specified '%d') is strongly discouraged because it makes the application non-responsive for at least(!) that amount of time" % (output_scan_interval, ))
     
     logger.info("testing with '%d' parameter combinations, reusing results of '%d' combinations from resumption" % (self.param_count_max-len(self.result_dict), len(self.result_dict), ))
     # breakable nested loops need to be imitated with functions
     # @return `True` when `result_dict` has been updated, `False` otherwise
     def __outer_loop__(param_dict_cartesian_item):
         result_dict_key = __generate_result_dict_key__(param_dict_cartesian_item)
         result_dict_key_shelve = __marshal_shelve_key__(result_dict_key)
         if result_dict_key_shelve in self.result_dict:
             return False
     
         cmd_tail = []
         for param_tuple in param_dict_cartesian_item:
             cmd_tail.append(param_tuple[0])
             cmd_tail.append(param_tuple[1])
         if len(cudaminer_parameters_prepend) > 0:
             logger.debug("invoking cudaminer with requested prepended commands '%s'" % (str(cudaminer_parameters_prepend), ))
         if len(cudaminer_additional_parameters) > 0:
             logger.debug("invoking cudaminer with requested additional parameters '%s'" % (str(cudaminer_additional_parameters), ))
         cmds = cudaminer_parameters_prepend+[cudaminer, "--benchmark", "--no-autotune", ]+cudaminer_additional_parameters+cmd_tail
         cmd = str.join(" ", cmds)
         logger.debug("testing '%s'" % (cmd, ))
         self.cudaminerProcess = sp.Popen(cmds, stdout=sp.PIPE, stderr=sp.PIPE)
             # cudaminer seems to print everything to stderr
             # due to the fact that preceeding commands don't necessarily 
             # forward `SIGTERM`, e.g. `optirun` holds back `SIGTERM`, kill 
             # the process tree recursively when it is terminated (see below)
         cudaminer_process_output = ""
         output_scan_count = 0
         logger.debug("waiting for cudaminer output containing a hash/s value")
         while cudaminer_process_output.count("hash/s") < hash_rate_count:
             time.sleep(output_scan_interval)
             cudaminer_process_output += str(self.cudaminerProcess.stderr.read(100)) # str conversion necessary in python3
                 # cudaminer produces endless output once it is running and EOF 
                 # when it is terminated (e.g. externally), multiple invokations 
                 # after EOF seem to return ''
                 # @TODO: delete/free preceeding input (we just need to avoid to 
                 # break inside search string `hash/s` (adjust logging message 
                 # below then)            
             output_scan_count += 1
             cudaminer_process_returncode = self.cudaminerProcess.poll()
             if not self.generationRunning:
                 # hard termination requested
                 kill_process_recursively(self.cudaminerProcess.pid)
                 return False # return codes for this function need to be 
                     # introduced if more complex code is inserted after the 
                     # call below
             if not cudaminer_process_returncode is None:
                 cudaminer_process_output += self.cudaminerProcess.stderr.read() 
                     # read the rest (`read` return immediately when process 
                     # is terminated)
                 logger.warn("cudaminer returned unexpectedly with returncode '%s', consider adjusting param_dict, skipping (output so far has been '%s')" % (str(cudaminer_process_returncode), cudaminer_process_output))
                 return False
             if output_scan_count > output_scan_max_count:
                 logger.info("waited longer than '%d' seconds for cudaminer output, aborting test running and skipping (output so far has been '%s')" % ((output_scan_count*output_scan_interval), cudaminer_process_output))
                 continue
         kill_process_recursively(self.cudaminerProcess.pid)
         # retrieve the hash/s value
         cudaminer_process_output_splits = cudaminer_process_output.split("hash/s") # in case the string ends with the split term `''` is added to the split result (which is very smart :))
         hash_rates_list = []
         for cudaminer_process_output_split in cudaminer_process_output_splits[:-1]: # last item can always be skipped, see above
             cudaminer_process_output_split_split = cudaminer_process_output_split.split(" ")
             hash_rate_suffix = cudaminer_process_output_split_split[-1] # 'k' or 'm' (theoretically others)
             hash_rate = float(cudaminer_process_output_split_split[-2])
             hash_rates_list.append((hash_rate, hash_rate_suffix))
         logger.debug("results are '%s'" % (str(["%s %s" % (i[0], i[1]) for i in hash_rates_list])))
         
         # store results for sorted summary (store mean as key for 
         # sorting and keep values in dict value as part of a tuple)
         hash_rate_mean = numpy.mean([i[0] for i in hash_rates_list])
         self.result_dict[result_dict_key_shelve] = (hash_rate_mean, tuple(["%s %s" % (i[0], i[1]) for i in hash_rates_list])) # storing in tuple makes in possible to store in dict (list are not hashable)
         return True
     for param_dict_cartesian_item in self.param_dict_cartesian:
         if not check_running_callback is None and not check_running_callback():
             logger.info("check_running_callback triggered interruption of generation process, returning intermediate result(s)")
             break
         result_dict_updated = __outer_loop__(param_dict_cartesian_item)
         if result_dict_updated:
             self.param_count_current += 1
             update_callback(self.param_count_current, self.param_count_max)
import cudaminer_param_checker_globals
import threading
import plac
import psutil
import signal
import shelve
import marshal
import tempfile

logger = logging.getLogger(__name__)
logger.setLevel(logging.INFO)
ch = logging.StreamHandler()
ch.setLevel(logging.INFO)
logger.addHandler(ch)

cudaminer_default = os_utils.which("cudaminer")

FRONTEND_CONSOLE_GUI = "text-gui"
FRONTEND_WXPYTHON_GUI = "wxpython-gui"
frontends = [FRONTEND_CONSOLE_GUI, FRONTEND_WXPYTHON_GUI]
frontend_default = FRONTEND_WXPYTHON_GUI

hash_rate_count_default = 8
output_scan_max_count_default = 1200
output_scan_interval_default = 0.1
debug_default = False

__cudaminer_docstring__ = "a path to a cudaminer binary"
__output_scan_interval_docstring__ = "the time between two checks of the output the program might pause longer if `cudaminer` doesn't return output so fast"
__output_scan_max_count_docstring__ = "determines how many times `output_scan_interval` can elapse until the invokation of `cudaminer` is considered as failed and the test run is aborted and skipped"
__hash_rate_count_docstring__ = "the number of hash/s rate values which ought to be retrieved from output before the `cudaminer` process is killed"
Example #7
0
    def __init__(self,
                 parent,
                 id,
                 title,
                 mp4box,
                 categories=["1", "2", "3", "4", "5", "split"],
                 input_directory=None,
                 review_folder=None):
        """
        @args mp4box %(__mp4box_doc__)s
        """ % {
            "__mp4box_doc__": __mp4box_doc__
        }
        wx.Frame.__init__(self, parent, id, title, size=(600, 500))
        if os_utils.which(mp4box) is None:
            raise ValueError(
                "mp4box binary '%s' not found or not executable (on Ubuntu make sure the package `gpac` is installed)"
                % (mp4box, ))
        self.mp4box = mp4box
        self.undoStack = collections.deque()  # the undo stack to track un- and
        # redoable categorization
        self.redoStack = collections.deque()  # the redo stack to track un- and
        # redoable categorization
        # there's no need to manage state of redo components with a flag (they
        # only ought to be enabled if an undoing request has been processed and
        # no new categorization has been performed) because information can be
        # stored in the enabled state of the redo menu item
        mainSplitter = wx.SplitterWindow(self,
                                         wx.ID_ANY,
                                         style=wx.SP_LIVE_UPDATE)
        mainSplitter.SetMinimumPaneSize(100)
        self.listsPanel = wx.Panel(mainSplitter)
        self.videoPanel = wx.Panel(mainSplitter)

        self.workingSet = set([])
        if input_directory is None:
            logger.debug("using empty initial working set")
            working_set = set([])  # don't add to self.workingSet before
            # addFilesToWorkingSet has been called below
        else:
            logger.debug("using '%s' as initial input directory" %
                         (input_directory, ))
            working_set = set(
                filter(lambda x: os.path.isfile(x), [
                    os.path.abspath(os.path.join(input_directory, i))
                    for i in os.listdir(input_directory)
                ])
            )  # filter necessary in order to avoid failure of retrieval of min and max in __split_item__
        standardPaths = wx.StandardPaths.Get()
        self.reviewFolder = review_folder  # the folder where video which aren't splitted
        # correctly or need other reviewing are moved to; a default value
        # is too confusing and setting value by the user should be enforced
        self.currentFolder = standardPaths.GetDocumentsDir(
        )  # stores the folder of the
        # last selection of input files (initially set to a convenient
        # default value, like $HOME)
        self.currentVolume = 0.5

        self.menuBar = wx.MenuBar()
        self.fileMenu = wx.Menu()
        add_files_menu_item = self.fileMenu.Append(wx.ID_ANY, "&Add files",
                                                   "Add media file(s)")
        add_from_directory_menu_item = self.fileMenu.Append(
            wx.ID_ANY, "Add from &directory",
            "Add all media files of a directory")
        set_review_folder_menu_item = self.fileMenu.Append(
            wx.ID_ANY, "&Set review folder", "Set review folder")
        self.menuBar.Append(self.fileMenu, '&File')
        self.editMenu = wx.Menu()
        self.editMenuItemUndo = self.editMenu.Append(wx.ID_ANY, "Undo")
        self.editMenuItemRedo = self.editMenu.Append(wx.ID_ANY, "Redo")
        self.Bind(wx.EVT_MENU, self.onEditMenuItemUndoClick,
                  self.editMenuItemUndo)
        self.Bind(wx.EVT_MENU, self.onEditMenuItemRedoClick,
                  self.editMenuItemRedo)
        self.menuBar.Append(self.editMenu, "&Edit")
        self.helpMenu = wx.Menu()
        self.helpMenuAboutItem = self.helpMenu.Append(-1, '&About')
        self.Bind(wx.EVT_MENU, self.onAboutBox, self.helpMenuAboutItem)
        self.menuBar.Append(self.helpMenu, '&Help')
        self.SetMenuBar(self.menuBar)
        self.Bind(wx.EVT_MENU, self.onAddFiles, add_files_menu_item)
        self.Bind(wx.EVT_MENU, self.onAddFromDirectory,
                  add_from_directory_menu_item)
        self.Bind(wx.EVT_MENU, self.onSetReviewFolder,
                  set_review_folder_menu_item)

        # create sizers (no need for a sizer for splitter)
        videoSizer = wx.BoxSizer(wx.VERTICAL)
        controlSizer = wx.BoxSizer(wx.HORIZONTAL)
        sliderSizer = wx.BoxSizer(wx.HORIZONTAL)

        # build the audio bar controls
        playButtonImg = self.getBmpFromSvg(
            resource_string(
                "video_splitter",
                os.path.join("resources", "icons", 'play-button.svg')),
            icon_size_default, icon_size_default)
        self.playButton = buttons.GenBitmapButton(self.videoPanel,
                                                  bitmap=playButtonImg,
                                                  name="play")
        self.playButton.SetInitialSize()
        self.playButton.Bind(
            wx.EVT_BUTTON,
            self.
            onPause  # handles both play and pause depending on state of the button
        )
        self.playButton.Disable()
        controlSizer.Add(self.playButton, 0, wx.LEFT, 3)
        stopButtonImg = self.getBmpFromSvg(
            resource_string(
                "video_splitter",
                os.path.join("resources", "icons", 'stop-button.svg')),
            icon_size_default, icon_size_default)
        self.stopButton = buttons.GenBitmapButton(self.videoPanel,
                                                  bitmap=stopButtonImg,
                                                  name="stop")
        self.stopButton.SetInitialSize()
        self.stopButton.Bind(wx.EVT_BUTTON, self.onStop)
        self.stopButton.Disable()
        controlSizer.Add(self.stopButton, 0, wx.LEFT, 3)

        self.mplayerCtrl = wx.media.MediaCtrl(
            self.videoPanel,
            -1,  #szBackend=wx.media.MEDIABACKEND_GSTREAMER
            # `wx.media.MEDIABACKEND_DIRECTSHOW`, `wx.media.MEDIABACKEND_MCI` and `wx.media.MEDIABACKEND_QUICKTIME` fail due to `(video-manager:1392): Gtk-CRITICAL **: IA__gtk_range_set_range: assertion 'min < max' failed` at every video start
        )
        #self.mplayerCtrl.ShowPlayerControls(wx.media.MEDIACTRLPLAYERCONTROLS_DEFAULT)
        self.trackPath = None
        self.playbackState = PLAYBACK_STATE_STOPPED
        # could be checked with self.playbackTime.IsRunning, but then the
        # status depends on using the timer and it's harder to debug issues
        # with it
        self.playbackSlider = wx.Slider(self.videoPanel, size=wx.DefaultSize)
        self.playbackSlider.Bind(wx.EVT_SLIDER, self.onOffsetSet)
        sliderSizer.Add(self.playbackSlider, 1, wx.ALL | wx.EXPAND, 5)

        # create volume control
        self.volumeCtrl = wx.Slider(self.videoPanel, size=(200, -1))
        self.volumeCtrl.SetRange(0, 100)  # slider only seems to take integers
        # (multiply and divide with/by 100)
        self.volumeCtrl.SetValue(self.currentVolume * 100)
        self.volumeCtrl.Bind(wx.EVT_SLIDER, self.onVolumeSet)
        controlSizer.Add(self.volumeCtrl, 0, wx.ALL | wx.EXPAND, 5)

        # create track counter
        self.trackCounter = wx.StaticText(self.videoPanel, label="00:00")
        sliderSizer.Add(self.trackCounter, 0, wx.ALL | wx.CENTER, 5)

        # set up playback timer

        videoSizer.Add(self.mplayerCtrl, 1, wx.ALL | wx.EXPAND, 5)
        videoSizer.Add(sliderSizer, 0, wx.ALL | wx.EXPAND, 5)
        videoSizer.Add(controlSizer, 0, wx.ALL | wx.CENTER, 5)
        self.videoPanel.SetSizer(videoSizer)

        # setup file lists (a splitter has to be used in order to provide
        # minimal flexibility; the resize control should be left or right of
        # the select and deselect buttons, but it's just a question of decision
        # -> use left)
        listsButtonSizer = wx.BoxSizer(wx.VERTICAL)
        listsPanelSizer = wx.BoxSizer(wx.HORIZONTAL)
        listsSplitterPanelRightSizer = wx.BoxSizer(wx.HORIZONTAL)
        listsSplitter = wx.SplitterWindow(
            self.listsPanel, style=wx.SP_LIVE_UPDATE
        )  # doesn't expand automatically (although should because it's the only component in listPanel) -> use listsPanelSizer
        listsSplitter.SetMinimumPaneSize(100)
        listsPanelSizer.Add(listsSplitter, 1, wx.ALL | wx.EXPAND, 5)
        self.listsPanel.SetSizer(listsPanelSizer)
        listsSplitterPanelLeft = wx.Panel(parent=listsSplitter)
        listsSplitterPanelRight = wx.Panel(parent=listsSplitter)
        self.workingSetList = wx.ListCtrl(
            parent=listsSplitterPanelLeft, id=wx.ID_ANY,
            style=wx.LC_REPORT)  # don't make entries editable
        workingSetListSizer = wx.BoxSizer(wx.VERTICAL)
        categoryButtonSizer = wx.WrapSizer(wx.HORIZONTAL)
        mergeListSizer = wx.BoxSizer(wx.VERTICAL)
        self.selectButton = wx.Button(parent=listsSplitterPanelRight,
                                      id=wx.ID_ANY,
                                      label=">",
                                      size=wx.Size(icon_size_default,
                                                   icon_size_default))
        self.deselectButton = wx.Button(parent=listsSplitterPanelRight,
                                        id=wx.ID_ANY,
                                        label="<",
                                        size=wx.Size(icon_size_default,
                                                     icon_size_default))
        self.mergeList = wx.ListCtrl(parent=listsSplitterPanelRight,
                                     id=wx.ID_ANY,
                                     style=wx.LC_REPORT)
        self.workingSetList.InsertColumn(0,
                                         heading="File",
                                         width=wx.LIST_AUTOSIZE)
        self.mergeList.InsertColumn(0, heading="File", width=wx.LIST_AUTOSIZE)
        self.mergeButton = wx.Button(parent=listsSplitterPanelRight,
                                     id=wx.ID_ANY,
                                     label="merge")
        for category in categories:
            category_button = wx.Button(parent=listsSplitterPanelLeft,
                                        id=wx.ID_ANY,
                                        label=str(category))
            categoryButtonSizer.Add(category_button, 0, wx.ALL, 5)

            # unable to determine minimal width at this point
            def __createCategoryButtonClickCallback__(category):
                def __onCategoryButtonClick__(event):
                    if self.reviewFolder is None:
                        wx.MessageBox("review folder isn't set", 'Info',
                                      wx.OK | wx.ICON_INFORMATION)
                        return
                    selected_index = self.workingSetList.GetNextSelected(-1)
                    if selected_index == -1:
                        logger.debug(
                            "no item selected in working set list, so nothing to categorize"
                        )
                        return
                    selected_item = self.workingSetList.GetItem(selected_index,
                                                                col=0)
                    # playback should be stopped before moving file
                    selected_item_playbacked = False  # store info for later (much simpler code for the price of one flag)
                    if selected_item.GetText() == self.trackPath:
                        selected_item_playbacked = True
                        self.stopPlayback()
                    category_folder = os.path.join(self.reviewFolder,
                                                   str(category))
                    if not os.path.exists(category_folder):
                        os.makedirs(category_folder)
                    logger.debug("moving '%s' into category folder '%s'" %
                                 (selected_item.GetText(), category_folder))
                    shutil.move(
                        selected_item.GetText(),
                        os.path.join(category_folder,
                                     os.path.basename(
                                         selected_item.GetText())))
                    self.workingSetList.DeleteItem(selected_index)
                    self.undoStack.append(
                        (selected_item.GetText(), category, selected_index))
                    self.editMenuItemRedo.Enable(False)
                    self.redoStack.clear()
                    # automatically start the next item after the categorized in workingSetList in order to proceed faster and select it (but only if the just moved item is currently playbacked because otherwise the playback of another item would be interrupted)
                    if self.workingSetList.GetItemCount() > 0 \
                            and selected_index < self.workingSetList.GetItemCount()-1 \
                            and selected_item_playbacked: # there needs to be one more item after the categorized one (refers to item count after removal of categorized item)
                        selected_item = self.workingSetList.GetItem(
                            selected_index, col=0)
                        self.trackPath = selected_item.GetText()
                        logger.info("starting video '%s'" % (self.trackPath, ))
                        self.startVideo(self.trackPath)
                        self.workingSetList.SetItemState(
                            selected_index,  # item
                            wx.LIST_STATE_SELECTED,  # state
                            wx.LIST_STATE_SELECTED  # stateMask
                        )

                return __onCategoryButtonClick__

            category_button.Bind(
                wx.EVT_BUTTON, __createCategoryButtonClickCallback__(category))
        workingSetListSizer.Add(categoryButtonSizer, 0, wx.ALL | wx.EXPAND, 5)
        workingSetListSizer.Add(self.workingSetList, 1, wx.ALL | wx.EXPAND, 5)
        listsSplitterPanelLeft.SetSizer(workingSetListSizer)
        listsButtonSizer.Add(self.selectButton, 0, wx.ALL, 5)
        listsButtonSizer.Add(self.deselectButton, 0, wx.ALL, 5)
        listsSplitterPanelRightSizer.Add(listsButtonSizer, 0, wx.ALL, 5)
        mergeListSizer.Add(self.mergeList, 1, wx.ALL | wx.EXPAND, 5)
        mergeListSizer.Add(self.mergeButton, 0, wx.ALIGN_BOTTOM | wx.ALL, 5)
        listsSplitterPanelRightSizer.Add(mergeListSizer, 1, wx.EXPAND, 5)
        listsSplitterPanelRight.SetSizer(listsSplitterPanelRightSizer)
        self.workingSetList.Bind(
            wx.
            EVT_LIST_ITEM_ACTIVATED,  #The item has been activated (ENTER or double click). Processes a wxEVT_LIST_ITEM_ACTIVATED event type.
            self.onWorkingSetListDoubleClick)
        self.workingSetList.Bind(wx.EVT_LIST_ITEM_SELECTED,
                                 self.onWorkingSetListSelect)
        self.workingSetList.Bind(wx.EVT_LIST_ITEM_DESELECTED,
                                 self.onWorkingSetListDeselect)
        self.mergeList.Bind(
            wx.
            EVT_LIST_ITEM_ACTIVATED,  #The item has been activated (ENTER or double click). Processes a wxEVT_LIST_ITEM_ACTIVATED event type.
            self.onMergeListDoubleClick)
        self.mergeList.Bind(wx.EVT_LIST_ITEM_SELECTED, self.onMergeListSelect)
        self.mergeList.Bind(wx.EVT_LIST_ITEM_DESELECTED,
                            self.onMergeListDeselect)
        self.selectButton.Bind(wx.EVT_BUTTON, self.onSelectButtonClick)
        self.deselectButton.Bind(wx.EVT_BUTTON, self.onDeselectButtonClick)
        self.mergeButton.Bind(wx.EVT_BUTTON, self.onMergeButtonClick)
        self.workingSetList.Bind(wx.EVT_LIST_ITEM_RIGHT_CLICK,
                                 self.onWorkingSetListRightClick)

        listsSplitter.SplitVertically(listsSplitterPanelLeft,
                                      listsSplitterPanelRight)
        mainSplitter.SplitVertically(self.listsPanel, self.videoPanel)

        self.Bind(wx.media.EVT_MEDIA_PLAY, self.onMediaStarted)
        self.Bind(wx.media.EVT_MEDIA_FINISHED, self.onMediaFinished)

        # working set list popup menu
        self.workingSetListPopupMenu = wx.Menu()
        self.workingSetListPopupMenuItemClear = self.workingSetListPopupMenu.Append(
            wx.ID_ANY, "Clear working set")
        self.workingSetListPopupMenuItemDelete = self.workingSetListPopupMenu.Append(
            wx.ID_ANY, "Move to trash")
        self.Bind(wx.EVT_MENU, self.onWorkingSetListPopupMenuItemClearClick,
                  self.workingSetListPopupMenuItemClear
                  )  # MenuItem doesn't have a Bind function
        self.Bind(wx.EVT_MENU, self.onWorkingSetListPopupMenuItemDeleteClick,
                  self.workingSetListPopupMenuItemDelete)

        # set up components
        self.statusBar = self.CreateStatusBar(style=wx.STB_DEFAULT_STYLE)
        self.statusBar.SetFieldsCount(1)
        self.statusBar.SetStatusStyles(
            [wx.SB_NORMAL]
        )  # @TODO: wx.SB_SUNKEN only available after 2.9.5<ref>http://wxpython.org/Phoenix/docs/html/StatusBar.html</ref> -> assert 3.0.x at run and compile time somewhere
        self.addFilesToWorkingSet(
            working_set)  # run after self.workingSetList has been initialized
        self.workingSet = working_set
        self.updateReviewFolderStatusText()

        self.Show(True)
Example #8
0
def osm_postgis_transform(osm_files,
                          skip_start_db,
                          data_dir=data_dir_default,
                          db_host=db_host_default,
                          db_port=db_port_default,
                          db_user=db_user_default,
                          db_password=db_password_default,
                          db_name=db_name_default,
                          cache_size=cache_size_default,
                          osm2pgsql=osm2pgsql_default):
    # the text for the help transformed by plac:
    """
    This script sets up PostGIS database with data from an OSM (.osm) file. It 
    is essentially a wrapper around `osm2pgsql`. By default it will either spawn a database process based on the data directory speified with the `--data-dir` argument (if the data directory is non-empty) or create a database data directory and spawn a database process based on that newly created data directory and feed data to it. If the nested database process can't be connected to with the default value for database connection parameters, they have to be overwritten, otherwise the script will fail with the error message of the `postgres` process.
    
    The start of a nested database process can be skipped if `--skip-start-db` command line flag is set. In this case the database connection parameters will be used to connect to an external already running `postgres` process where data will be fed to.
    
    WARNING: The script has not yet been tested completely to hide database credentials (including the password) from output and/or other logging backends (files, syslog, etc.). It is currently recommended to specify a separate database and local host for the script only and to not care about it at all (as OSM data is as far from a secret as it could be).
    """
    if osm_files is None:
        raise ValueError("osm_files mustn't be None")
    if str(type(osm_files)) != "<type 'list'>":
        raise ValueError("osm_files has to be a list")
    if len(osm_files) == 0:
        raise ValueError("osm_files mustn't be empty")
    if pg_version == (9, 2):
        if postgis_version > (2, 0):
            raise ValueError(
                "postgis > %s is not compatible with postgresql %s" %
                (postgis_version_string, pg_version_string))
    if data_dir is None:
        raise ValueError("data_dir mustn't be None")
    if os.path.exists(data_dir) and not os.path.isdir(data_dir):
        raise ValueError("data_dir '%s' exists, but isn't a directory" %
                         (data_dir, ))

    # always check, even after install_prequisites
    #@TODO: not sufficient to binary name; necessary to evaluate absolute path with respect to $PATH
    if os_utils.which(osm2pgsql) is None:
        raise RuntimeError(
            "osm2pgsql not found, make sure you have invoked osm_postgis_transform_prequisites.py"
        )

    # parsing
    # postgres binary refuses to run when process uid and effective uid are not identical
    postgres_proc = None
    try:
        if not skip_start_db:
            # database process is either started by postgis_utils.bootstrap_datadir or with pexpect.spawn if the data_dir isn't empty (indicating start of database based on existing data directory)
            if not os.path.exists(data_dir) or len(os.listdir(data_dir)) == 0:
                logger.info(
                    "creating PostGIS data directory in data_dir '%s'" %
                    (data_dir, ))
                if not os.path.exists(data_dir):
                    logger.info("creating inexisting data_dir '%s'" %
                                (data_dir, ))
                    os.makedirs(data_dir)
                postgis_utils.bootstrap_datadir(data_dir,
                                                db_user,
                                                password=db_password,
                                                initdb=initdb)
                postgis_utils.bootstrap_database(data_dir,
                                                 db_port,
                                                 db_host,
                                                 db_user,
                                                 db_name,
                                                 password=db_password,
                                                 initdb=initdb,
                                                 postgres=postgres,
                                                 createdb=createdb,
                                                 psql=psql,
                                                 socket_dir=db_socket_dir)
            if postgres_proc is None:
                logger.info(
                    "spawning database process based on existing data directory '%s'"
                    % (data_dir, ))
                postgres_proc = pexpect.spawn(
                    str.join(" ", [
                        postgres, "-D", data_dir, "-p",
                        str(db_port), "-h", db_host, "-k", db_socket_dir
                    ]))
                postgres_proc.logfile = sys.stdout
                logger.info("sleeping %s s to ensure postgres server started" %
                            postgres_server_start_timeout)
                time.sleep(
                    postgres_server_start_timeout
                )  # not nice (should poll connection until success instead)
        logger.debug("using osm2pgsql binary %s" % osm2pgsql)
        osm2pgsql_proc = pexpect.spawn(
            str.join(" ", [
                osm2pgsql, "--create", "--database", db_name, "--cache",
                str(cache_size), "--number-processes",
                str(osm2pgsql_number_processes), "--slim", "--port",
                str(db_port), "--host", db_host, "--username", db_user,
                "--latlong", "--password", "--keep-coastlines",
                "--extra-attributes", "--hstore-all"
            ] + osm_files))
        osm2pgsql_proc.logfile = sys.stdout
        osm2pgsql_proc.expect(['Password:'******'s no check for subprocess.Popen