Exemple #1
0
def query_prepare_parameters(tables, grouping, ordering, fields, joins, querymappings, ua, condition, limit, cachingtime):
    # fixup sloppy parameter passing
    if isinstance(tables, basestring):
        tables = [tables]
    if isinstance(grouping, basestring):
        grouping = [grouping]
    if isinstance(ordering, basestring):
        ordering = [ordering]
    if isinstance(fields, basestring):
        fields = [fields]

    if not joins:
        joins = []
    if not grouping:
        grouping = []
    if not ordering:
        ordering = []
    if not fields:
        fields = []
    tablenames = [_get_tablename(x) for x in tables]

    if querymappings == {} and not fields:
        raise RuntimeError("Please give fieldnames.")
    if querymappings is None and len(fields) != 1:
        querymappings = {}
        jointables = [table for table, foo, bar in joins]
        for table in tables + jointables:
            # dubletten = set(querymappings.values()) & set(MAPPINGDIR.get(table, {}).values())
            # if dubletten:
            #     logging.warning('field name clash: %s' % list(dubletten))
            querymappings.update(MAPPINGDIR.get(table, {}))

    if not fields:  # decuce fieldnames from querymappings
        fields = querymappings.keys()
    if not fields:  # still nothing found
        raise RuntimeError("can't deduce field names, check fields.py")

    args = dict(fields=fields, tablenames=tablenames, tag=ua)
    if condition:
        args['condition'] = condition
    if grouping:
        args['grouping'] = grouping
    if ordering:
        args['ordering'] = ordering
    if limit:
        args['limit'] = limit
    if joins:
        # ensure a list of 3-tuples
        joins = [(_get_tablename(a), b, c) for (a, b, c) in joins]
        args['joins'] = joins

    bust_cache = True
    if cachingtime > 0:
        bust_cache = False
    return args, bust_cache, querymappings, fields
Exemple #2
0
def query(tables=None, condition=None, fields=None, querymappings=None,
          joins=None,
          grouping=None, ordering=None, limit=None, ua='', cachingtime=300):
    r"""Execute a SELECT on the AS/400 turning the results in a list of dicts.

    In fields you can give a list of fields you are interested in. If fields is left empty the engine
    generates a list of field on it own by consulting the field mapping database in from
    fields.MAPPINGDIR.

    >>> query('ALK00', condition="LKLFSN=4034544") #doctest: +ELLIPSIS
    [{'lager': 100, ...}]

    To suppress mapping provide querymappings={} and fields=[].
    >>> query(tables=['XPN00'], condition="PNSANR=2255")
    [{'satznummer': 2255, 'preis': Decimal('16.10')}]
    >>> query(tables=['XPN00'], condition="PNSANR=2255",
    ... fields=['PNSANR', 'PNPRB'], querymappings={})
    [(2255, Decimal('16.10'))]

    To get only certain fields give a list of fieldnames in fields=[...].
    >>> query(tables=['XPN00'], condition="PNSANR=2255", fields=['PNPRB'])
    [(Decimal('16.10'),)]

    Joins are straightforward if used with condition="<expression>":
    >>> query(['XPN00', 'XPR00'], condition="PNSANR=PRSANR and PNSANR=2255",
    ... fields=['PRDTVO', 'PNPRB'])
    [{'preis': Decimal('16.10'), 'gueltig_ab_date': datetime.date(2004, 12, 16)}]

    Aggregate functions can be created by using the "grouping" keyword:
    >>> sorted(query('XLF00', fields=['LFARTN', 'SUM(LFMGLP)'], grouping=['LFARTN'],
    ... condition="LFLGNR=3"))
    [(u'65166/01', u'0'), (u'65198', u'0'), (u'76095', u'0'), (u'76102', u'0'), (u'ED76095', u'0')]

    If desired "querymappings" can be used to return a list of dicts:
    >>> sorted(query('XLF00', fields=['LFARTN', 'SUM(LFMGLP)'], grouping=['LFARTN'],
    ... condition="LFLGNR=3", querymappings={'LFARTN': 'artnr',
    ... 'SUM(LFMGLP)': 'menge'})) #doctest: +ELLIPSIS
    [{'menge': u'0', 'artnr': u'65166/01'}, {'menge': u'0', 'artnr': u'65198'}, ...]

    You can use 'joins' to define LEFT OUTER JOINs. E.g.:
    >>> rows = query(['XKD00'],
    ...              condition="KDKDNR='%8d'" % int(66669),
    ...              joins=[('XXC00', 'KDKDNR', 'XCADNR'),
    ...                     ('XKS00', 'KDKDNR', 'KSKDNR'),
    ...                     ('AKZ00', 'KDKDNR', 'KZKDNR')])

    Will result in "SELECT * FROM XKD00 LEFT OUTER JOIN XXC00 ON KDKDNR=XCADNR LEFT OUTER
    JOIN XKS00 ON KDKDNR=KSKDNR LEFT OUTER JOIN AKZ00 ON KDKDNR=KZKDNR WHERE KDKDNR='   10001'".

    We also should be - to a certain degree - be Unicode aware:
    >>> query(u'XKD00', u"KDKDNR LIKE '%18287'")[0]['ort'].encode('utf8')
    'G\xc3\xbcnzburg'

    Results are cached for 300 seconds unless you set something else via the cachingtime parameter.
    """

    # fixup sloppy parameter passing
    if isinstance(tables, basestring):
        tables = [tables]
    if isinstance(grouping, basestring):
        grouping = [grouping]
    if isinstance(ordering, basestring):
        ordering = [ordering]
    if isinstance(fields, basestring):
        fields = [fields]

    if not joins:
        joins = []
    if not grouping:
        grouping = []
    if not ordering:
        ordering = []
    if not fields:
        fields = []
    tablenames = [_get_tablename(x) for x in tables]

    if querymappings == {} and not fields:
        raise RuntimeError("Please give fieldnames.")
    if querymappings is None and len(fields) != 1:
        querymappings = {}
        jointables = [table for table, foo, bar in joins]
        for table in tables + jointables:
            # dubletten = set(querymappings.values()) & set(MAPPINGDIR.get(table, {}).values())
            # if dubletten:
            #     logging.warning('field name clash: %s' % list(dubletten))
            querymappings.update(MAPPINGDIR.get(table, {}))

    if not fields:  # decuce fieldnames from querymappings
        fields = querymappings.keys()
    if not fields:  # still nothing found
        raise RuntimeError("can't deduce field names, check fields.py")

    args = dict(fields=fields, tablenames=tablenames, tag=ua)
    if condition:
        args['condition'] = condition
    if grouping:
        args['grouping'] = grouping
    if ordering:
        args['ordering'] = ordering
    if limit:
        args['limit'] = limit
    if joins:
        # ensure a list of 3-tuples
        joins = [(_get_tablename(a), b, c) for (a, b, c) in joins]
        args['joins'] = joins

    bust_cache = True
    if cachingtime > 0:
        bust_cache = False
        rows = memcache.get('husoftm_query_%r_%r' % (querymappings, args))
        if rows:
            return rows

    start = time.time()
    result = execute('sql', args, ua=ua, bust_cache=bust_cache)
    rows = hujson.loads(result)

    if querymappings:
        rows = _rows2dict(fields, querymappings, rows)
    else:
        rows = [tuple([_fix_field(data, name) for data, name in zip(row, fields)]) for row in rows]

    delta = time.time() - start
    if delta > 5:
        logging.warning("Slow (%.3fs) SQL query in  %s", delta, args)
    try:
        memcache.add(key='husoftm_query_%r_%r' % (querymappings, args),
                     value=rows, time=cachingtime)
    except:
        pass  # value 'rows' was probably to big for memcache or memcache was offline
    return rows