def _start_tasks(self): tasks = [ Task('_xmlrpc', start_xmlrpc_server, self._xmlrpc_conn2), # This is not working nice when using NTK lib (maybe related to the multiprocess lib). # Must be executed as a separate process for now. #Task('_flask', start_flask_server), Task('_updater', start_plugins_update_scheduler, self._updater_event, self._doing_backup), Task('_backup', start_backup_scheduler, self._doing_backup), ] # TODO: Make those work on windows if not _is_windows: tasks.extend([ Task('_htsql', start_htsql, self._htsql_port), Task('_server', start_server), Task('_rtc', start_rtc), ]) manager = get_plugin_manager() for plugin_name in manager.installed_plugins_names: plugin = manager.get_plugin(plugin_name) if not hasattr(plugin, 'get_server_tasks'): continue # FIXME: Check that the plugin implements IPluginTask when # we Stoq 1.11 is released for plugin_task in plugin.get_server_tasks(): task_name = plugin_task.name name = _get_plugin_task_name(plugin_name, task_name) if self._manager.is_running(name): continue kwargs = {} if plugin_task.handle_actions: conn1, conn2 = multiprocessing.Pipe(True) self._plugins_pipes[name] = conn1 kwargs['pipe_connection'] = conn2 # Since Windows has no os.fork, multiprocessing will actually # run the process again and pass the required objects by # pickling them. For some reason, passing a plugin task will # break some places, since the it will make some objects # like PluginManager be pickled/unpicled, and when unlicking # it will run its contructor again, but it should wait # to do that until we have configured the database. func = (plugin_name, task_name) tasks.append(Task(name, func, **kwargs)) for task in tasks: if not self._manager.is_running(task.name): self._manager.run_task(task) # Close the default store because it not functioning anymore since the # forked processes closed its "clone", but open a new one later # or else Stoq will not be able to find this instance set_default_store(None) get_default_store()
def action_resume_tasks(self): logger.info("Resuming the tasks as requested..") if self._paused: # get_default_store will recreate it (since we closed it above) get_default_store() self._start_tasks() self._paused = False return True, "Tasks resumed successfully"
def restore_database(user_hash, time=None): assert user_hash # If the database doesn't exist, get_default_store will fail try: default_store = get_default_store() except Exception: default_store = None if default_store is not None and db_settings.has_database(): try: default_store.lock_database() except DatabaseError: raise TaskException( "Could not lock database. This means that there are other " "clients connected. Make sure to close every Stoq client " "before updating the database") except Exception: raise TaskException( "Database is empty or in a corrupted state. Fix or drop it " "before trying to proceed with the restore") else: default_store.unlock_database() # FIXME: Windows will not liberate resource for other process to # write to the file. We should write our own TemporaryFile on Stoq # that handles all those cases for us and use here with tempfile.NamedTemporaryFile(delete=False) as f: pass try: if not db_settings.dump_database(f.name): raise TaskException("Failed to dump the database") backup_name = db_settings.restore_database(f.name) logger.info("Created a backup of the current database state on %s", backup_name) finally: os.unlink(f.name) tmp_path = tempfile.mkdtemp() try: restore_path = os.path.join(tmp_path, 'stoq') logger.info("restoring database to %s", restore_path) backup.restore(restore_path, user_hash, time=time) # None will make the default store be closed, which we need # to sucessfully restore the database set_default_store(None) db_settings.clean_database(db_settings.dbname, force=True) db_settings.execute_sql(os.path.join(restore_path, 'stoq.dump'), lock_database=True) logger.info("Backup restore finished sucessfully") finally: # get_default_store will recreate it (since we closed it above) get_default_store()
def _try_connect(self): from stoqlib.lib.message import error try: store_dsn = self._config.get_settings().get_store_dsn() except: type, value, trace = sys.exc_info() error(_("Could not open the database config file"), _("Invalid config file settings, got error '%s', " "of type '%s'") % (value, type)) from stoqlib.database.exceptions import PostgreSQLError from stoqlib.database.runtime import get_default_store from stoqlib.exceptions import DatabaseError from stoqlib.lib.pgpass import write_pg_pass from stoq.lib.startup import setup # XXX: progress dialog for connecting (if it takes more than # 2 seconds) or creating the database log.debug('calling setup()') try: setup(self._config, self._options, register_station=False, check_schema=False, load_plugins=False) # the setup call above is not really trying to connect (since # register_station, check_schema and load_plugins are all False). # Try to really connect here. get_default_store() except (StoqlibError, PostgreSQLError) as e: log.debug('Connection failed.') error(_('Could not connect to the database'), 'error=%s uri=%s' % (str(e), store_dsn)) except DatabaseError: log.debug('Connection failed. Tring to setup .pgpass') # This is probably a missing password configuration. Setup the # pgpass file and try again. password = self._get_password() if not password: # There is no password stored in data file. Abort raise from stoqlib.database.settings import db_settings write_pg_pass(db_settings.dbname, db_settings.address, db_settings.port, db_settings.username, password) # Now that there is a pg_pass file, try to connect again try: get_default_store() except DatabaseError as e: log.debug('Connection failed again.') error(_('Could not connect to the database'), 'error=%s uri=%s' % (str(e), store_dsn))
def restore_database(user_hash, time=None): assert user_hash # If the database doesn't exist, get_default_store will fail try: default_store = get_default_store() except Exception: default_store = None if default_store is not None and db_settings.has_database(): try: default_store.lock_database() except DatabaseError: raise TaskException( "Could not lock database. This means that there are other " "clients connected. Make sure to close every Stoq client " "before updating the database") except Exception: raise TaskException( "Database is empty or in a corrupted state. Fix or drop it " "before trying to proceed with the restore") else: default_store.unlock_database() with tempfile.NamedTemporaryFile() as f: if not db_settings.dump_database(f.name): raise TaskException("Failed to dump the database") backup_name = db_settings.restore_database(f.name) logger.info("Created a backup of the current database state on %s", backup_name) tmp_path = tempfile.mkdtemp() try: # None will make the default store be closed, which we need # to sucessfully restore the database set_default_store(None) restore_path = os.path.join(tmp_path, 'stoq') backup.restore(restore_path, user_hash, time=time) db_settings.clean_database(db_settings.dbname, force=True) db_settings.execute_sql(os.path.join(restore_path, 'stoq.dump'), lock_database=True) logger.info("Backup restore finished sucessfully") finally: # get_default_store will recreate it (since we closed it above) get_default_store() shutil.rmtree(tmp_path, ignore_errors=True)
def testCacheInvalidation(self): # First create a new person in an outside transaction outside_store = new_store() outside_person = Person(name=u'doe', store=outside_store) outside_store.commit() # Get this person in the default store default_store = get_default_store() db_person = default_store.find(Person, id=outside_person.id).one() self.assertEqual(db_person.name, u'doe') # Now, select that same person in an inside store inside_store = new_store() inside_person = inside_store.fetch(outside_person) # Change and commit the changes on this inside store inside_person.name = u'john' # Flush to make sure the database was updated inside_store.flush() # Before comminting the other persons should still be 'doe' self.assertEqual(db_person.name, u'doe') self.assertEqual(outside_person.name, u'doe') inside_store.commit() # We expect the changes to reflect on the connection self.assertEqual(db_person.name, u'john') # and also on the outside store self.assertEqual(outside_person.name, u'john') outside_store.close() inside_store.close()
def _create_eggs_cache(self): log.info("Creating cache for plugins eggs") # $HOME/.stoq/plugins default_store = get_default_store() path = os.path.join(get_application_dir(), 'plugins') if not os.path.exists(path): os.makedirs(path) existing_eggs = { unicode(os.path.basename(f)[:-4]): md5sum_for_filename(f) for f in glob.iglob(os.path.join(path, '*.egg'))} # Now extract all eggs from the database and put it where stoq know # how to load them for plugin_name, egg_md5sum in default_store.using(PluginEgg).find( (PluginEgg.plugin_name, PluginEgg.egg_md5sum)): # A little optimization to avoid loading the egg in memory if we # already have a valid version cached. if existing_eggs.get(plugin_name, u'') == egg_md5sum: log.info("Plugin %r egg md5sum matches. Skipping it..." % ( plugin_name, )) continue log.info("Creating egg cache for plugin %r" % (plugin_name, )) egg_filename = '%s.egg' % (plugin_name, ) plugin_egg = default_store.find( PluginEgg, plugin_name=plugin_name).one() with open(os.path.join(path, egg_filename), 'wb') as f: f.write(plugin_egg.egg_content)
def __init__(self): self._ui = None self.default_store = get_default_store() self._printer_verified = False # Delay printer creation until we are accessing pos or till app. Other # applications should still be accessible without a printer self._printer = None SaleStatusChangedEvent.connect(self._on_SaleStatusChanged) ECFIsLastSaleEvent.connect(self._on_ECFIsLastSale) TillOpenEvent.connect(self._on_TillOpen) TillCloseEvent.connect(self._on_TillClose) TillAddCashEvent.connect(self._on_TillAddCash) TillAddTillEntryEvent.connect(self._on_AddTillEntry) TillRemoveCashEvent.connect(self._on_TillRemoveCash) StartApplicationEvent.connect(self._on_StartApplicationEvent) StopApplicationEvent.connect(self._on_StopApplicationEvent) CouponCreatedEvent.connect(self._on_CouponCreatedEvent) GerencialReportPrintEvent.connect(self._on_GerencialReportPrintEvent) GerencialReportCancelEvent.connect(self._on_GerencialReportCancelEvent) CheckECFStateEvent.connect(self._on_CheckECFStateEvent) HasPendingReduceZ.connect(self._on_HasPendingReduceZ) HasOpenCouponEvent.connect(self._on_HasOpenCouponEvent) self._till_summarize_action = gtk.Action( 'Summary', _('Summary'), None, None) self._till_summarize_action.connect( 'activate', self._on_TillSummary__activate) add_bindings([ ('plugin.ecf.read_memory', '<Primary>F9'), ('plugin.ecf.summarize', '<Primary>F11'), ])
def _check_branch(self): from stoqlib.database.runtime import (get_default_store, new_store, get_current_station, set_current_branch_station) from stoqlib.domain.person import Company from stoqlib.lib.parameters import sysparam from stoqlib.lib.message import info default_store = get_default_store() compaines = default_store.find(Company) if (compaines.count() == 0 or not sysparam.has_object('MAIN_COMPANY')): from stoqlib.gui.base.dialogs import run_dialog from stoqlib.gui.dialogs.branchdialog import BranchDialog if self._ran_wizard: info(_("You need to register a company before start using Stoq")) else: info(_("Could not find a company. You'll need to register one " "before start using Stoq")) store = new_store() person = run_dialog(BranchDialog, None, store) if not person: raise SystemExit branch = person.branch sysparam.set_object(store, 'MAIN_COMPANY', branch) current_station = get_current_station(store) if current_station is not None: current_station.branch = branch store.commit() store.close() set_current_branch_station(default_store, station_name=None)
def _get_interface(cls, iface): store = get_default_store() station = get_current_station(store) device = DeviceSettings.get_by_station_and_type(store, station, iface) if not device: return None return device.get_interface()
def _check_param_online_services(self): from stoqlib.database.runtime import get_default_store, new_store from stoqlib.lib.parameters import sysparam import gtk sparam = sysparam(get_default_store()) if sparam.ONLINE_SERVICES is None: from kiwi.ui.dialogs import HIGAlertDialog # FIXME: All of this is to avoid having to set markup as the default # in kiwi/ui/dialogs:HIGAlertDialog.set_details, after 1.0 # this can be simplified when we fix so that all descriptions # sent to these dialogs are properly escaped dialog = HIGAlertDialog( parent=None, flags=gtk.DIALOG_MODAL, type=gtk.MESSAGE_WARNING) dialog.add_button(_("Not right now"), gtk.RESPONSE_NO) dialog.add_button(_("Enable online services"), gtk.RESPONSE_YES) dialog.set_primary(_('Do you want to enable Stoq online services?')) dialog.set_details(PRIVACY_STRING, use_markup=True) dialog.set_default_response(gtk.RESPONSE_YES) response = dialog.run() dialog.destroy() store = new_store() sysparam(store).ONLINE_SERVICES = int(bool(response == gtk.RESPONSE_YES)) store.commit() store.close()
def download_plugin(self, plugin_name): """Download a plugin from webservice :param plugin_name: the name of the plugin to download :returns: a deferred """ from stoqlib.lib.webservice import WebService def callback(filename): md5sum = unicode(md5sum_for_filename(filename)) with open(filename) as f: with new_store() as store: existing_egg = store.find(PluginEgg, plugin_name=plugin_name).one() if existing_egg is not None: existing_egg.egg_content = f.read() existing_egg.md5sum = md5sum else: PluginEgg( store=store, plugin_name=plugin_name, egg_md5sum=md5sum, egg_content=f.read(), ) self._reload() default_store = get_default_store() existing_egg = default_store.find(PluginEgg, plugin_name=plugin_name).one() md5sum = existing_egg and existing_egg.egg_md5sum webapi = WebService() return webapi.download_plugin(plugin_name, callback=callback, md5sum=md5sum)
def _build_field_cache(self): # Instead of making one query for each field, let's build a cache for # all fields at once. If there's no cache built yet, builds it. if self._field_cache: return default_store = get_default_store() for field in default_store.find(UIField): self._field_cache[(field.ui_form_id, field.field_name)] = field
def __init__(self): super(ServiceImporter, self).__init__() default_store = get_default_store() self.tax_constant = SellableTaxConstant.get_by_type( TaxType.SERVICE, default_store) self._code = 11 assert self.tax_constant
def validate_city(value): default_store = get_default_store() city_l10n = get_l10n_field(default_store, 'city') state = sysparam(default_store).STATE_SUGGESTED country = sysparam(default_store).COUNTRY_SUGGESTED if not city_l10n.validate(value, state=state, country=country): return ValidationError(_("'%s' is not a valid %s.") % (value, city_l10n.label.lower()))
def _enable_plugin(self, plugin_model): plugin_name = plugin_model.name # This should not really be necessary, but there may be deadlocks when # activating the plugin. See bug 5272 default_store = get_default_store() default_store.commit() self._manager.install_plugin(plugin_name) self._manager.activate_plugin(plugin_name)
def __init__(self): self._ui = None self.default_store = get_default_store() StartApplicationEvent.connect(self._on_StartApplicationEvent) EditorSlaveCreateEvent.connect(self._on_EditorSlaveCreateEvent) add_bindings([ ('plugin.books.search_books', '<Primary><Alt>B'), ('plugin.books.search_publishers', '<Primary><Alt>P'), ])
def get_header_data(): default_store = get_default_store() branch = get_current_branch(default_store) person = branch.person company = person.company main_address = person.get_main_address() if not person.name: # pragma nocover raise DatabaseInconsistency("The person by ID %r should have a " "name at this point" % (person.id, )) data = { 'title': branch.get_description(), 'lines': [], } # Address if main_address: address_parts = [] address_parts.append(main_address.get_address_string()) if main_address.postal_code: address_parts.append(main_address.postal_code) if main_address.get_city(): address_parts.append(main_address.get_city()) if main_address.get_state(): address_parts.append(main_address.get_state()) if address_parts: data['lines'].append(' - '.join(address_parts)) # Contact contact_parts = [] if person.phone_number: contact_parts.append(format_phone_number(person.phone_number)) if person.mobile_number: contact_parts.append(format_phone_number(person.mobile_number)) if person.fax_number: contact_parts.append(_("Fax: %s") % format_phone_number(person.fax_number)) if person.email: contact_parts.append(person.email) if contact_parts: data['lines'].append(' - '.join(contact_parts)) # Company details if company: company_parts = [] if company.get_cnpj_number(): company_parts.append(_("CNPJ: %s") % company.cnpj) if company.get_state_registry_number(): company_parts.append(_("State Registry: %s") % company.state_registry) if company_parts: data['lines'].append(' - '.join(company_parts)) return data
def __init__(self): self._ui = None self.default_store = get_default_store() StartApplicationEvent.connect(self._on_StartApplicationEvent) StopApplicationEvent.connect(self._on_StopApplicationEvent) EditorSlaveCreateEvent.connect(self._on_EditorSlaveCreateEvent) add_bindings([ ('plugin.optical.pre_sale', ''), ])
def _logout(self): from stoqlib.database.runtime import (get_current_user, get_default_store) log.debug('Logging out the current user') try: user = get_current_user(get_default_store()) if user: user.logout() except StoqlibError: pass
def __init__(self): if self.patch_resource is None: raise ValueError( _("%s needs to have the patch_resource class variable set") % ( self.__class__.__name__)) if self.patch_patterns is None: raise ValueError( _("%s needs to have the patch_patterns class variable set") % ( self.__class__.__name__)) self.default_store = get_default_store()
def download_plugin(self, plugin_name): """Download a plugin from webservice :param plugin_name: the name of the plugin to download :returns: a deferred """ from stoqlib.lib.webservice import WebService default_store = get_default_store() existing_egg = default_store.find(PluginEgg, plugin_name=plugin_name).one() md5sum = existing_egg and existing_egg.egg_md5sum webapi = WebService() r = webapi.download_plugin(plugin_name, md5sum=md5sum) try: response = r.get_response() except Exception as e: return False, _("Failed to do the request: %s" % (e, )) code = response.status_code if code == 204: msg = _("No update needed. The plugin is already up to date.") log.info(msg) return True, msg if code != 200: return_messages = { 400: _("Plugin not available for this stoq version"), 401: _("The instance is not authorized to download the plugin"), 404: _("Plugin does not exist"), 405: _("This instance has not acquired the specified plugin"), } msg = return_messages.get(code, str(code)) log.warning(msg) return False, msg content = response.content md5sum = unicode(hashlib.md5(content).hexdigest()) with new_store() as store: existing_egg = store.find(PluginEgg, plugin_name=plugin_name).one() if existing_egg is not None: existing_egg.egg_content = content existing_egg.egg_md5sum = md5sum else: PluginEgg( store=store, plugin_name=plugin_name, egg_md5sum=md5sum, egg_content=content, ) self._reload() return True, _("Plugin download successful")
def _enable_plugin(self, plugin_model): plugin_name = plugin_model.name # This should not really be necessary, but there may be deadlocks when # activating the plugin. See bug 5272 default_store = get_default_store() default_store.commit() self._manager.install_plugin(plugin_name) self._manager.activate_plugin(plugin_name) info(_("The plugin %s was successfully activated. Please, restart all " "Stoq instances connected to this installation.") % (plugin_name, ))
def __init__(self): self._ui = None self.default_store = get_default_store() StartApplicationEvent.connect(self._on_StartApplicationEvent) StopApplicationEvent.connect(self._on_StopApplicationEvent) EditorCreateEvent.connect(self._on_EditorCreateEvent) RunDialogEvent.connect(self._on_RunDialogEvent) add_bindings([ ('plugin.optical.pre_sale', ''), ('plugin.optical.search_medics', ''), ])
def get_full_date(date): """Return a date in it's full format taking l10n in consideration For example, for Brazil, it will return something like: 01 de janeiro de 2012 In the generic case, it will return something like: January 01, 2012 """ full_date_format = get_l10n_field(get_default_store(), "full_date_format") return date.strftime(full_date_format)
def _maybe_correct_demo_position(self, shell_window): # Possibly correct window position (livecd workaround for small # screens) from stoqlib.database.runtime import get_default_store from stoqlib.lib.parameters import sysparam from stoqlib.lib.pluginmanager import get_plugin_manager manager = get_plugin_manager() if (sysparam(get_default_store()).DEMO_MODE and manager.is_active(u'ecf')): pos = shell_window.toplevel.get_position() if pos[0] < 220: shell_window.toplevel.move(220, pos[1])
def _start_tasks(self): tasks = [ Task("_backup", start_backup_scheduler), Task("_server", start_server), Task("_rtc", start_rtc), Task("_xmlrpc", start_xmlrpc_server, self._xmlrpc_conn2), Task("_updater", start_plugins_update_scheduler, self._updater_event), ] manager = get_plugin_manager() for plugin_name in manager.installed_plugins_names: plugin = manager.get_plugin(plugin_name) if not hasattr(plugin, "get_server_tasks"): continue # FIXME: Check that the plugin implements IPluginTask when # we Stoq 1.11 is released for plugin_task in plugin.get_server_tasks(): task_name = plugin_task.name name = _get_plugin_task_name(plugin_name, task_name) if self._manager.is_running(name): continue kwargs = {} if plugin_task.handle_actions: conn1, conn2 = multiprocessing.Pipe(True) self._plugins_pipes[name] = conn1 kwargs["pipe_connection"] = conn2 tasks.append(Task(name, plugin_task.start, **kwargs)) for task in tasks: if not self._manager.is_running(task.name): self._manager.run_task(task) # Close the default store because it not functioning anymore since the # forked processes closed its "clone", but open a new one later # or else Stoq will not be able to find this instance set_default_store(None) get_default_store()
def _terminate(self, restart=False, app=None): log.info("Terminating Stoq") # This removes all temporary files created when calling # get_resource_filename() that extract files to the file system import pkg_resources pkg_resources.cleanup_resources() log.debug('Stopping deamon') from stoqlib.lib.daemonutils import stop_daemon stop_daemon() # Finally, go out of the reactor and show possible crash reports log.debug("Show some crash reports") self._show_crash_reports() # Make sure that no connection is left open (specially on Windows) try: from stoqlib.database.runtime import get_default_store get_default_store().close() except Exception: pass if restart: from stoqlib.lib.process import Process log.info('Restarting Stoq') args = [sys.argv[0], '--no-splash-screen'] if app is not None: args.append(app) Process(args) # os._exit() forces a quit without running atexit handlers # and does not block on any running threads # FIXME: This is the wrong solution, we should figure out why there # are any running threads/processes at this point log.debug("Terminating by calling os._exit()") os._exit(0) raise AssertionError("Should never happen")
def __init__(self): super(ProductImporter, self).__init__() default_store = get_default_store() suppliers = default_store.find(Supplier) if not suppliers.count(): raise ValueError(u'You must have at least one suppliers on your ' u'database at this point.') self.supplier = suppliers[0] self.units = {} for unit in default_store.find(SellableUnit): self.units[unit.description] = unit self.tax_constant = sysparam(default_store).DEFAULT_PRODUCT_TAX_CONSTANT
def __init__(self): self._ui = None self.default_store = get_default_store() self._printer_verified = False # Delay printer creation until we are accessing pos or till app. Other # applications should still be accessible without a printer self._printer = None self._setup_params() self._setup_events() add_bindings([ ('plugin.ecf.read_memory', '<Primary>F9'), ('plugin.ecf.summarize', '<Primary>F11'), ])
def _get_booklets_data(self, payments): payments = sorted(payments, key=operator.attrgetter('due_date')) n_total_inst = payments[0].group.installments_number default_store = get_default_store() branch = get_current_branch(default_store) for i, payment in enumerate(payments): if payment.method.method_name != 'store_credit': continue group = payment.group sale = group.sale drawer_company = self._get_drawer(payment) drawer_person = drawer_company.person drawee_person = group.payer emission_address = branch.person.get_main_address() emission_location = emission_address.city_location if sale: order_identifier = unicode(sale.identifier) total_value = sale.get_total_sale_amount() else: # Support non-sale booklets order_identifier = '' total_value = None booklet = Settable( order_identifier=order_identifier, payment_number=unicode(payment.identifier), installment=self._format_installment(payment.installment_number, n_total_inst), emission_date=datetime.date.today(), due_date=payment.due_date, value=payment.value, total_value=total_value, drawer=drawer_company.get_description(), drawee=drawee_person.name, drawer_document=self._get_person_document(drawer_person), drawee_document=self._get_person_document(drawee_person), drawee_phone_number=self._get_person_phone(drawee_person), drawee_address=self._get_person_address(drawee_person), drawer_address=self._get_person_address(drawer_person), instructions=self._get_instructions(payment), demonstrative=self._get_demonstrative(payment), emission_city=emission_location.city, ) yield booklet
def __init__(self): super(ProductImporter, self).__init__() default_store = get_default_store() suppliers = default_store.find(Supplier) if not suppliers.count(): raise ValueError(u'You must have at least one suppliers on your ' u'database at this point.') self.supplier = suppliers[0] self.units = {} for unit in default_store.find(SellableUnit): self.units[unit.description] = unit self.tax_constant_id = sysparam.get_object_id( 'DEFAULT_PRODUCT_TAX_CONSTANT') self._code = 1
def __init__(self): if self.patch_resource is None: raise ValueError( _("%s needs to have the patch_resource class variable set") % (self.__class__.__name__)) if self.patch_patterns is None: raise ValueError( _("%s needs to have the patch_patterns class variable set") % (self.__class__.__name__)) self.default_store = get_default_store() try: check_extensions(store=self.default_store) except ValueError: error("Missing PostgreSQL extension on the server, " "please install postgresql-contrib")
def start_plugins_update_scheduler(event, doing_backup): _setup_signal_termination() if not api.sysparam.get_bool('ONLINE_SERVICES'): logger.info( "ONLINE_SERVICES not enabled. Not scheduling plugin updates...") return manager = get_plugin_manager() while True: last_check_str = UserSettings().get('last-plugins-update', None) last_check = (dateutil.parser.parse(last_check_str) if last_check_str else datetime.datetime.min) # Check for updates once per day if last_check.date() == datetime.date.today(): time.sleep(24 * 60 * 60) continue logger.info("Checking for plugins updates...") updated = False default_store = get_default_store() for egg in default_store.find(PluginEgg): md5sum = egg.egg_md5sum manager.download_plugin(egg.plugin_name) # If download_plugin updated the plugin, autoreload will # make egg. be reloaded from the database if md5sum != egg.egg_md5sum: updated = True settings = UserSettings() settings.set('last-plugins-update', datetime.datetime.now().isoformat()) settings.flush() if updated: # Wait until any running backup is finished and restart while doing_backup.value: time.sleep(60) logger.info("Some plugins were updated. Restarting now " "to reflect the changes...") event.set() else: logger.info("No update was found...")
def generate(filename, start, end): """Generate a sintegra file for all changes in the system between start and end dates. Start and end are normally the first and last day of a month :param filename: filename to save the sintegra file :param start: start date :type start: datetime.date :param end: end date :type start: datetime.date """ generator = StoqlibSintegraGenerator(get_default_store(), start, end) fp = open(filename, 'wb') for register in generator.sintegra.get_registers(): fp.write(register.get_string().replace('2007', '2006')) fp.close()
def _create_eggs_cache(self): self._eggs_cache = tempfile.mkdtemp(prefix='stoq', suffix='eggs') log.info("Eggs cache created in %s", self._eggs_cache) # Now extract all eggs from the database and put it where stoq know # how to load them default_store = get_default_store() for plugin_egg in default_store.find(PluginEgg): plugin_name = plugin_egg.plugin_name log.info("Creating egg cache for plugin %r" % (plugin_name, )) egg_filename = '{}.egg'.format(plugin_name) with open(os.path.join(self._eggs_cache, egg_filename), 'wb') as f: f.write(plugin_egg.egg_content) atexit.register( lambda: shutil.rmtree(self._eggs_cache, ignore_errors=True))
def feedback(self, screen, email, feedback): default_store = get_default_store() params = { 'hash': sysparam.get_string('USER_HASH'), 'cnpj': get_main_cnpj(default_store), 'demo': sysparam.get_bool('DEMO_MODE'), 'dist': ' '.join(platform.dist()), 'email': email, 'feedback': feedback, 'plugins': ', '.join(InstalledPlugin.get_plugin_names(default_store)), 'product_key': get_product_key(), 'screen': screen, 'time': datetime.datetime.today().isoformat(), 'uname': ' '.join(platform.uname()), 'version': self._get_version(), } return self._do_request('GET', 'feedback.json', **params)
def feedback(self, screen, email, feedback, **kwargs): default_store = get_default_store() params = { 'cnpj': get_main_cnpj(default_store), 'demo': sysparam.get_bool('DEMO_MODE'), 'dist': ' '.join(platform.dist()), 'email': email, 'feedback': feedback, 'plugins': ', '.join(InstalledPlugin.get_plugin_names(default_store)), 'product_key': get_product_key(), 'screen': screen, 'time': datetime.datetime.today().isoformat(), 'uname': ' '.join(platform.uname()), 'version': self._get_version(), } endpoint = 'api/stoq/v1/feedback/%s' % (sysparam.get_string('USER_HASH'), ) return self._do_request('POST', endpoint, json=params, **kwargs)
def create_window(self): """ Creates a new shell window. Note that it will not contain any applications and it will be hidden. :returns: the shell_window """ from stoq.gui.shell.shellwindow import ShellWindow from stoqlib.database.runtime import get_default_store shell_window = ShellWindow(self._options, shell=self, store=get_default_store()) self.windows.append(shell_window) self._maybe_correct_demo_position(shell_window) return shell_window
def __init__(self, filename, payments_list, salesperson_name, *args, **kwargs): branch = get_current_branch(get_default_store()).get_description() self.payments_list = payments_list if salesperson_name: singular = _("payment for {salesperson} on branch {branch}").format( salesperson=salesperson_name, branch=branch) plural = _("payments for {salesperson} on branch {branch}").format( salesperson=salesperson_name, branch=branch) else: singular = _("payment on branch %s") % branch plural = _("payments on branch %s") % branch self.main_object_name = (singular, plural) self.landscape = (salesperson_name is None) self._sales_person = salesperson_name TableReport.__init__(self, filename, payments_list, self.title, *args, **kwargs)
def _get_cnpj(self): # We avoid using SQLObject, otherwise crash-reporting will break # for errors that happens in patches modifying any of the # tables in the FROM clause below try: default_store = get_default_store() except StoqlibError: return '' result = default_store.execute("""SELECT company.cnpj FROM parameter_data, branch, company, person WHERE field_name = 'MAIN_COMPANY' AND branch.id = field_value::int AND branch.person_id = person.id AND company.person_id = person.id;""") data = result.get_one() result.close() if data: return data[0] return ''
def __init__(self): self._ui = None self.default_store = get_default_store() self._printer_verified = False # Delay printer creation until we are accessing pos or till app. Other # applications should still be accessible without a printer self._printer = None self._setup_params() self._setup_events() self._till_summarize_action = Gtk.Action( 'Summary', _('Summary'), None, None) self._till_summarize_action.connect( 'activate', self._on_TillSummary__activate) add_bindings([ ('plugin.ecf.read_memory', '<Primary>F9'), ('plugin.ecf.summarize', '<Primary>F11'), ])
def __init__(self): # This will contain a mapping of (appname, uimanager) -> extra_ui # We need to store that like this because each windows has it's unique # uimanager, and we have an extra_ui for different apps self._app_ui = dict() self.default_store = get_default_store() StartApplicationEvent.connect(self._on_StartApplicationEvent) StopApplicationEvent.connect(self._on_StopApplicationEvent) EditorCreateEvent.connect(self._on_EditorCreateEvent) RunDialogEvent.connect(self._on_RunDialogEvent) PrintReportEvent.connect(self._on_PrintReportEvent) SearchDialogSetupSearchEvent.connect( self._on_SearchDialogSetupSearchEvent) ApplicationSetupSearchEvent.connect( self._on_ApplicationSetupSearchEvent) add_bindings([ ('plugin.optical.pre_sale', ''), ('plugin.optical.search_medics', ''), ])
def feedback(self, screen, email, feedback): app_info = get_utility(IAppInfo, None) if app_info: app_version = app_info.get('version') else: app_version = 'Unknown' default_store = get_default_store() params = { 'cnpj': self._get_cnpj(), 'demo': sysparam(default_store).DEMO_MODE, 'dist': ' '.join(platform.dist()), 'email': email, 'feedback': feedback, 'plugins': ', '.join(InstalledPlugin.get_plugin_names(default_store)), 'product_key': get_product_key(), 'screen': screen, 'time': datetime.datetime.today().isoformat(), 'uname': ' '.join(platform.uname()), 'version': app_version, } return self._do_request('GET', 'feedback.json', **params)
def __init__(self): self._setup_params() self.default_store = get_default_store() StartApplicationEvent.connect(self._on_StartApplicationEvent) EditorCreateEvent.connect(self._on_EditorCreateEvent) RunDialogEvent.connect(self._on_RunDialogEvent) PrintReportEvent.connect(self._on_PrintReportEvent) SearchDialogSetupSearchEvent.connect(self._on_SearchDialogSetupSearchEvent) WorkOrderStatusChangedEvent.connect(self._on_WorkOrderStatusChangedEvent) ApplicationSetupSearchEvent.connect(self._on_ApplicationSetupSearchEvent) add_bindings([ ('plugin.optical.pre_sale', ''), ('plugin.optical.search_medics', ''), ]) # Whenever the model of WorkOrderActions change, we should also change ours actions = WorkOrderActions.get_instance() actions.connect('model-set', self._on_work_order_actions__model_set) # Add a new option to the WorkOrderRow options menu WorkOrderRow.options.append((_('Create new purchase...'), 'optical_work_order.OpticalNewPurchase'))
def test_cache_invalidation(self): # First create a new person in an outside transaction outside_store = new_store() outside_person = Person(name=u'doe', store=outside_store) outside_store.commit() # Get this person in the default store default_store = get_default_store() default_person = default_store.find(Person, id=outside_person.id).one() self.assertEqual(default_person.name, u'doe') # Now, select that same person in an inside store inside_store = new_store() inside_person = inside_store.fetch(outside_person) # Change and commit the changes on this inside store inside_person.name = u'john' # Flush to make sure the database was updated inside_store.flush() # Before comminting the other persons should still be 'doe' self.assertEqual(default_person.name, u'doe') self.assertEqual(outside_person.name, u'doe') inside_store.commit() # We expect the changes to reflect on the connection self.assertEqual(default_person.name, u'john') # and also on the outside store self.assertEqual(outside_person.name, u'john') outside_store.close() inside_store.close()
def _values(self): if self._values_cache is None: self._values_cache = dict( (p.field_name, p.field_value) for p in get_default_store().find(ParameterData)) return self._values_cache
def __init__(self, form_name): self.store = get_default_store() self.form_name = form_name self.form = self.store.find(UIForm, form_name=form_name).one()
def get_formatted_cost(float_value, symbol=True): from stoqlib.lib.parameters import sysparam precision = sysparam(get_default_store()).COST_PRECISION_DIGITS return get_formatted_price(float_value, symbol=symbol, precision=precision)
def get_price_as_cardinal(value): function = get_cardinal_function('to_words_as_money') currency_names = get_l10n_field(get_default_store(), 'currency_names') return function(value, currency_names)
def _provide_current_user(): default_store = get_default_store() user = default_store.find(LoginUser, username=u'admin').one() assert user provide_utility(ICurrentUser, user, replace=True)
def validate_state(value): state_l10n = get_l10n_field(get_default_store(), 'state') if not state_l10n.validate(value): return ValidationError( _("'%s' is not a valid %s.") % (value, state_l10n.label.lower(), ))
def download_plugin(self, plugin_name, channel=None): """Download a plugin from webservice :param plugin_name: the name of the plugin to download :param channel: the channel the plugin belongs :returns: a deferred """ from stoqlib.lib.webservice import WebService default_store = get_default_store() existing_egg = default_store.find(PluginEgg, plugin_name=plugin_name).one() md5sum = existing_egg and existing_egg.egg_md5sum webapi = WebService() r = webapi.download_plugin(plugin_name, md5sum=md5sum, channel=channel) try: response = r.get_response() except Exception as e: return False, _("Failed to do the request: %s" % (e, )) code = response.status_code if code == 204: msg = _("No update needed. The plugin is already up to date.") log.info(msg) return True, msg if code != 200: return_messages = { 400: _("Plugin not available for this stoq version"), 401: _("The instance is not authorized to download the plugin"), 404: _("Plugin does not exist"), 405: _("This instance has not acquired the specified plugin"), } msg = return_messages.get(code, str(code)) log.warning(msg) return False, msg try: with io.BytesIO() as f: f.write(response.content) with ZipFile(f) as egg: if egg.testzip() is not None: raise BadZipfile md5sum = hashlib.md5(f.getvalue()).hexdigest() with new_store() as store: existing_egg = store.find(PluginEgg, plugin_name=plugin_name).one() if existing_egg is not None: existing_egg.egg_content = f.getvalue() existing_egg.egg_md5sum = md5sum else: PluginEgg( store=store, plugin_name=plugin_name, egg_md5sum=md5sum, egg_content=f.getvalue(), ) except BadZipfile: return False, _("The downloaded plugin is corrupted") self._reload() return True, _("Plugin download successful")
def egg_plugins_names(self): """A list of names of all plugins installed as eggs""" default_store = get_default_store() return [p.plugin_name for p in default_store.find(PluginEgg)]
def get_installed_plugins_names(self, store=None): """A list of names of all installed plugins""" return InstalledPlugin.get_plugin_names(store or get_default_store())
def _start_tasks(self): tasks = [ Task('_xmlrpc', start_xmlrpc_server, self._xmlrpc_conn2), # This is not working nice when using NTK lib (maybe related to the multiprocess lib). # Must be executed as a separate process for now. #Task('_flask', start_flask_server), Task('_updater', start_plugins_update_scheduler, self._updater_event, self._doing_backup), Task('_backup', start_backup_scheduler, self._doing_backup), ] # TODO: Make those work on windows if not _is_windows: tasks.extend([ Task('_htsql', start_htsql, self._htsql_port), Task('_server', start_server), Task('_rtc', start_rtc), ]) store = get_default_store() is_link = store.is_link_server() manager = get_plugin_manager() for plugin_name in manager.installed_plugins_names: plugin = manager.get_plugin(plugin_name) if not hasattr(plugin, 'get_server_tasks'): continue # FIXME: Check that the plugin implements IPluginTask when # we Stoq 1.11 is released for plugin_task in plugin.get_server_tasks(): link_only = getattr(plugin_task, 'link_only', False) if is_link != link_only: continue task_name = plugin_task.name name = _get_plugin_task_name(plugin_name, task_name) if self._manager.is_running(name): continue kwargs = {} if plugin_task.handle_actions: conn1, conn2 = multiprocessing.Pipe(True) self._plugins_pipes[name] = conn1 kwargs['pipe_connection'] = conn2 # Since Windows has no os.fork, multiprocessing will actually # run the process again and pass the required objects by # pickling them. For some reason, passing a plugin task will # break some places, since the it will make some objects # like PluginManager be pickled/unpicled, and when unlicking # it will run its contructor again, but it should wait # to do that until we have configured the database. func = (plugin_name, task_name) tasks.append(Task(name, func, **kwargs)) for task in tasks: if not self._manager.is_running(task.name): self._manager.run_task(task) # Close the default store because it not functioning anymore since the # forked processes closed its "clone", but open a new one later # or else Stoq will not be able to find this instance set_default_store(None) get_default_store()
def get_default_store(self): return get_default_store()
def __init__(self, filename): self.filename = filename self.logo_data = get_logo_data(get_default_store())
def collect_report(): report = {} # Date and uptime report['date'] = datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S') report['tz'] = time.tzname report['uptime'] = get_uptime() report['locale'] = get_system_locale() # Python and System import platform report['architecture'] = platform.architecture() report['distribution'] = platform.dist() report['python_version'] = tuple(sys.version_info) report['system'] = platform.system() report['uname'] = platform.uname() # Stoq application info = get_utility(IAppInfo, None) if info and info.get('name'): report['app_name'] = info.get('name') report['app_version'] = info.get('ver') # External dependencies import gtk report['pygtk_version'] = gtk.pygtk_version report['gtk_version'] = gtk.gtk_version import kiwi report['kiwi_version'] = kiwi.__version__.version + (_get_revision(kiwi), ) import psycopg2 try: parts = psycopg2.__version__.split(' ') extra = ' '.join(parts[1:]) report['psycopg_version'] = tuple(map(int, parts[0].split('.'))) + (extra, ) except: report['psycopg_version'] = psycopg2.__version__ import reportlab report['reportlab_version'] = reportlab.Version.split('.') import stoqdrivers report['stoqdrivers_version'] = stoqdrivers.__version__ + ( _get_revision(stoqdrivers), ) # PostgreSQL database server try: default_store = get_default_store() result = default_store.execute('SHOW server_version;') pg_version = result.get_one() result.close() report['postgresql_version'] = list(map(int, pg_version[0].split('.'))) except StoqlibError: pass # Tracebacks report['tracebacks'] = {} for i, trace in enumerate(_tracebacks): t = ''.join(traceback.format_exception(*trace)) # Eliminate duplicates: md5sum = hashlib.md5(t).hexdigest() report['tracebacks'][md5sum] = t if info and info.get('log'): report['log'] = open(info.get('log')).read() report['log_name'] = info.get('log') return report
def installed_plugins_names(self): """A list of names of all installed plugins""" default_store = get_default_store() return [p.plugin_name for p in default_store.find(InstalledPlugin)]