def main(): parser = OptionParser(usage='%prog [options] dburl', version=VERSION) parser.add_option('--input', action='store', dest='input', metavar='FILE', help='the name of the file to read from') parser.add_option('--ignore-errors', action='store_true', dest='ignore_errors', help='whether to ignore errors in document creation ' 'and continue with the remaining documents') parser.add_option('--json-module', action='store', dest='json_module', help='the JSON module to use ("simplejson", "cjson", ' 'or "json" are supported)') parser.add_option('-u', '--username', action='store', dest='username', help='the username to use for authentication') parser.add_option('-p', '--password', action='store', dest='password', help='the password to use for authentication') parser.set_defaults(input='-') options, args = parser.parse_args() if len(args) != 1: return parser.error('incorrect number of arguments') if options.input != '-': fileobj = open(options.input, 'rb') else: fileobj = sys.stdin if options.json_module: json.use(options.json_module) load_db(fileobj, args[0], username=options.username, password=options.password, ignore_errors=options.ignore_errors)
def init_app(self): config = ConfigParser() config.read(self.args.config) settings = dict(config.items(self.args.section)) self.db_name = settings.get('couchdb.db_name') self.db_url = settings.get('couchdb.url') self.server_id = settings.get('id', '1') self.open_db() if self.args.cjson: LOG.info("Enable cjson library") from couchdb import json json.use('cjson') # init api client self.api_url = self.args.api_url if self.api_url != 'disable': if '://' not in self.api_url: self.api_url = 'http://' + self.api_url if '/api/' not in self.api_url: self.api_url += '/api/2.3/tenders' get_with_retry(self.api_url, 'data') # init docs list if self.args.docid: LOG.info("Process {} documents".format(len(self.args.docid))) self.docs_list = self.args.docid elif self.args.changes: LOG.info("Process all documents by changes feed") self.docs_list = self.db_changes() else: LOG.info("Process all documents") self.docs_list = self.db_all_docs()
def main(): """Command-line entry point for running the view server.""" import getopt from couchdb import __version__ as VERSION try: option_list, argument_list = getopt.gnu_getopt( sys.argv[1:], 'h', ['version', 'help', 'json-module=', 'debug', 'log-file='] ) message = None for option, value in option_list: if option in ('--version'): message = _VERSION % dict(name=os.path.basename(sys.argv[0]), version=VERSION) elif option in ('-h', '--help'): message = _HELP % dict(name=os.path.basename(sys.argv[0])) elif option in ('--json-module'): json.use(module=value) elif option in ('--debug'): log.setLevel(logging.DEBUG) elif option in ('--log-file'): if value == '-': handler = logging.StreamHandler(sys.stderr) handler.setFormatter(logging.Formatter( ' -> [%(levelname)s] %(message)s' )) else: handler = logging.FileHandler(value) handler.setFormatter(logging.Formatter( '[%(asctime)s] [%(levelname)s] %(message)s' )) log.addHandler(handler) if message: sys.stdout.write(message) sys.stdout.flush() sys.exit(0) except getopt.GetoptError as error: message = '%s\n\nTry `%s --help` for more information.\n' % ( str(error), os.path.basename(sys.argv[0]) ) sys.stderr.write(message) sys.stderr.flush() sys.exit(1) sys.exit(run())
def main(): """Command-line entry point for running the view server.""" import getopt from couchdb import __version__ as VERSION try: option_list, argument_list = getopt.gnu_getopt( sys.argv[1:], 'h', ['version', 'help', 'json-module=', 'debug', 'log-file='] ) message = None for option, value in option_list: if option in ('--version'): message = _VERSION % dict(name=os.path.basename(sys.argv[0]), version=VERSION) elif option in ('-h', '--help'): message = _HELP % dict(name=os.path.basename(sys.argv[0])) elif option in ('--json-module'): json.use(module=value) elif option in ('--debug'): log.setLevel(logging.DEBUG) elif option in ('--log-file'): if value == '-': handler = logging.StreamHandler(sys.stderr) handler.setFormatter(logging.Formatter( ' -> [%(levelname)s] %(message)s' )) else: handler = logging.FileHandler(value) handler.setFormatter(logging.Formatter( '[%(asctime)s] [%(levelname)s] %(message)s' )) log.addHandler(handler) if message: sys.stdout.write(message) sys.stdout.flush() sys.exit(0) except getopt.GetoptError, error: message = '%s\n\nTry `%s --help` for more information.\n' % ( str(error), os.path.basename(sys.argv[0]) ) sys.stderr.write(message) sys.stderr.flush() sys.exit(1)
def main(): parser = OptionParser(usage='%prog [options] dburl', version=VERSION) parser.add_option('--json-module', action='store', dest='json_module', help='the JSON module to use ("simplejson", "cjson", ' 'or "json" are supported)') parser.add_option('-u', '--username', action='store', dest='username', help='the username to use for authentication') parser.add_option('-p', '--password', action='store', dest='password', help='the password to use for authentication') parser.set_defaults() options, args = parser.parse_args() if len(args) != 1: return parser.error('incorrect number of arguments') if options.json_module: json.use(options.json_module) dump_db(args[0], username=options.username, password=options.password)
def _go(): """ Go """ usage = 'usage: %prog SRC_URL' parser = OptionParser(usage=usage) options, args = parser.parse_args() if len(args) > 1: parser.error('Invalid arguments.') elif len(args) == 0: parser.error('SRC_URL argument is required.') couchdb_json.use('simplejson') d = Dump(*args) start_time = time.time() start_clock = time.clock() d.run() end_time = time.time() end_clock = time.clock() sys.stderr.write("Finished.\n") sys.stderr.write("Elapsed Time: %f\n" % (end_time - start_time)) sys.stderr.write("Elapsed Clock: %f\n" % (end_clock - start_clock)) return True
AUCTION_WORKER_SERVICE_END_FIRST_PAUSE) from openprocurement.auction.esco.utils import (prepare_initial_bid_stage, prepare_results_stage, sorting_start_bids_by_amount, dumps, loads) from openprocurement.auction.utils import\ get_latest_bid_for_bidder, sorting_by_amount, delete_mapping LOGGER = logging.getLogger('Auction Esco') SCHEDULER = GeventScheduler(job_defaults={"misfire_grace_time": 100}, executors={'default': AuctionsExecutor()}, logger=LOGGER) SCHEDULER.timezone = TIMEZONE use(encode=dumps, decode=loads) class Auction(ESCODBServiceMixin, RequestIDServiceMixin, EscoAuditServiceMixin, ESCOBiddersServiceMixin, DateTimeServiceMixin, EscoStagesMixin, EscoPostAuctionMixin): """ESCO Auction Worker Class""" def __init__(self, tender_id, worker_defaults, auction_data={}, lot_id=None): self.generate_request_id() self.tender_id = tender_id self.lot_id = lot_id if lot_id:
AUCTION_WORKER_SERVICE_AUCTION_RESCHEDULE from openprocurement.auction.insider.utils import prepare_audit,\ update_auction_document, lock_bids, prepare_results_stage, normalize_audit,\ normalize_document from openprocurement.auction.utils import delete_mapping, sorting_by_amount LOGGER = logging.getLogger('Auction Worker Insider') SCHEDULER = GeventScheduler(job_defaults={"misfire_grace_time": 100}, executors={'default': AuctionsExecutor()}, logger=LOGGER) SCHEDULER.timezone = TIMEZONE import simplejson use( encode=partial(simplejson.dumps, use_decimal=True), decode=partial(simplejson.loads, use_decimal=True), ) class Auction(DutchDBServiceMixin, AuditServiceMixin, DateTimeServiceMixin, RequestIDServiceMixin, DutchAuctionPhase, SealedBidAuctionPhase, BestBidAuctionPhase, DutchPostAuctionMixin): """ Dutch Auction Worker Class """ def __init__(self, tender_id, worker_defaults={}, auction_data={}): self.tender_id = tender_id self.auction_doc_id = tender_id self._end_auction_event = Event() self.tender_url = urljoin( worker_defaults["resource_api_server"], '/api/{0}/auctions/{1}'.format( worker_defaults["resource_api_version"], tender_id))
def main(): usage = '%prog [options] SOURCE_URL TARGET_URL1 [TARGET_URL2 ...]' parser = optparse.OptionParser(usage=usage, version=VERSION) parser.add_option('--batch-threshold', action='store', dest='batch_threshold', default=0, metavar='NUM', help='number of changes that are to be replicated') parser.add_option('--wait-threshold', action='store', dest='wait_threshold', default=0.01, metavar='SECS', help='number of seconds to wait before triggering replication') parser.add_option('--ignore-deletes', action='store_true', dest='ignore_deletes', help='whether to ignore "delete" notifications') parser.add_option('--debug', action='store_true', dest='debug', help='enable debug logging; requires --log-file to be specified') parser.add_option('--log-file', action='store', dest='log_file', metavar='FILE', help='name of the file to write log messages to, or "-" to enable ' 'logging to the standard error stream') parser.add_option('--json-module', action='store', dest='json_module', metavar='NAME', help='the JSON module to use ("simplejson", "cjson", or "json" are ' 'supported)') options, args = parser.parse_args() if len(args) < 2: parser.error("need at least one source and target server") sys.exit(1) src_url = args[0] targets = [ URLSPLIT_RE.match(url).groupdict() for url in args[1:] ] if options.debug: log.setLevel(logging.DEBUG) if options.log_file: if options.log_file == '-': handler = logging.StreamHandler(sys.stderr) handler.setFormatter(logging.Formatter( ' -> [%(levelname)s] %(message)s' )) else: handler = logging.FileHandler(options.log_file) handler.setFormatter(logging.Formatter( '[%(asctime)s] [%(levelname)s] %(message)s' )) log.addHandler(handler) if options.json_module: json.use(options.json_module) log.debug('Syncing changes from %r to %r', src_url, targets) try: ReplicationHelper(src_url, targets, options)() except Exception, e: log.exception(e)