def get_fund_nav(mfund_list, start_date, end_date, limit_n): kk = 0 symbol_position = {} symbol_data = {} comb_index = None new_mfund_list = [] chinese_name = [] # print ('mfund_list=', mfund_list) for s in mfund_list: y = utility.ttld_jim.get_dao('fund').select( 'ff_nav', where={ '_id': s, 'da >=': start_date, 'da <=': end_date }, columns=raw( 'to_char(da, \'YYYY-MM-DD\') da, fund_chinese_name, nav'), order_by='da asc', limit=5000000) if len(y) >= limit_n: new_mfund_list.append(s) da_list = [] price_list = [] for dt in y: da_list.append(dt['da']) price_list.append(dt['nav']) data_bars = DataFrame(price_list, index=da_list, columns=['nav']) data_bars.index.names = ['date'] data_bars.index = pd.to_datetime(data_bars.index) symbol_data[s] = data_bars kk = kk + 1 print(kk) print(len(y)) # Combine the index to pad forward values if comb_index is None: comb_index = symbol_data[s].index else: comb_index.union(symbol_data[s].index) for s in new_mfund_list: c_name = utility.ttld_jim.get_dao('fund').select( 'ff_nav', where={'_id': s}, columns=raw('fund_chinese_name'), limit=5000000)[0] symbol_data[s] = symbol_data[s].reindex(index=comb_index, method='pad') return symbol_data
def test_insert_returing(): gen = insert('person', OrderedDict([ ('person_id', 'mosky'), ('name', 'Mosky Liu'), ]), returning=raw('*')) exp = ('INSERT INTO "person" ("person_id", "name") ' 'VALUES (\'mosky\', \'Mosky Liu\') RETURNING *') eq_(gen, exp)
def get_star(self, queryset): """Generates a ``<table_name>.*`` representation :rtype: :class:`mosql.util.raw` """ table = queryset._params['alias'] or queryset.model._meta.db_table return [raw('{table}.*'.format(table=identifier(table)))]
def get_where_for_delete(self, queryset): """Generates a mapping to be used as the ``where`` parameter for a ``DELETE`` query Used when ``mosql.query.delete`` is called. This implementation simply generate a ``SELECT`` subquery so that the ``WHERE`` clause will be of form ``<primary key> IN (<subquery>)``. """ pkcol = queryset.model._meta.pk.get_attname_column()[1] key = '{pkcol} IN'.format(pkcol=pkcol) value = raw(paren(queryset._get_select_query([pkcol]))) return {key: value}
def _get_select_query(self, fields=None): """The raw SQL that will be used to resolve the queryset.""" handler = get_engine_handler(self.db) with handler.patch(): params = copy.deepcopy(self._params) if params['joins']: params['joins'] = [ join(table=(j.pop('table'),), **j) for j in params['joins'] ] table = self.model._meta.db_table alias = params.pop('alias', None) kwargs = {k: v for k, v in params.items() if v} # Inject default field names. # If this query does not contain a GROUP BY clause, we can safely # use a "*" to indicate all fields; # If the query has aggregation (GROUP BY), however, we will need to # choose a value to display for each field (especially pk because # it is needed by Django). Since accessing those fields doesn't # really make sense anyway, We arbitrarily use MIN. table_name = alias or table if fields is not None: kwargs['select'] = [ f if '.' in f or isinstance(f, raw) else raw('{table}.{field}'.format( table=identifier(table_name), field=identifier(f)) ) for f in fields ] elif self._params['group_by']: kwargs['select'] = ( handler.get_aggregated_columns_for_group_by(self, 'MIN') ) else: kwargs['select'] = handler.get_star(self) kwargs['select'].extend(self.extra_fields) if 'offset' in kwargs and 'limit' not in kwargs: kwargs['limit'] = handler.no_limit_value() if alias: table = ((table, alias),) query = select(table, **kwargs) return query
def get_aggregated_columns_for_group_by(self, queryset, aggregate): """Generates a sequence of fully-qualified column names This is used in an aggregated query, when :method:`get_star` cannot be used safety without ambiguity. Values for each field in the model are calculated by a SQL function specified by ``aggregate``, and then injected (using ``AS``) back into the fields. :param: aggregate: The SQL function to be used for aggregation. :type aggregate: str :returns: A sequence of fully qualified ``SELECT`` identifiers. """ table = queryset._params['alias'] or queryset.model._meta.db_table return [ raw('{func}({table}.{field}) AS {field}'.format( func=aggregate, table=identifier(table), field=identifier(field.get_attname_column()[1]))) for field in queryset.model._meta.fields ]
def join(self, model, alias, on=None, using=None, join_type=None): """Create a ``JOIN`` clause in the query. :param model: A model to be joined on. This can be a model class, or a ``<appname>.<ModelName>`` string to lazy-load the model. For joining a non-Django model, you can also provide a plain table name. :type model: `str` or `django.db.models.Model` :param alias: The alias for the to-be-joined model. An ``AS`` clause will be created automatically based on this value. :param on: A mapping for fields to be joined on. Results in a ``JOIN ... ON`` query. :type on: `dict` :param using: A sequence of fields to be joined on. Results in a ``JOIN ... USING`` query. :param join_type: The type of ``JOIN`` to be used. Possible values include ``INNER``, ``LEFT``, ``CROSS`` and other standard SQL ``JOIN`` types. If ommited, a suitable type will be inferred automatically. """ if isinstance(model, six.string_types): # Try to lazy-load the model parts = model.split('.') if len(parts) == 2 and all(parts): model = get_model(*parts) or model elif isinstance(model, MoQuerySet): # Handle subquery model = raw(paren(model.query)) if inspect.isclass(model) and issubclass(model, Model): table = model._meta.db_table elif isinstance(model, six.string_types): table = model else: raise TypeError('join() arg 1 must be a Django model or a str ' 'subclass instance') clone = self._clone() join_info = {'table': (table, alias), 'on': on, 'using': using} if join_type is not None: join_info['type'] = join_type clone._params['joins'].append(join_info) return clone
from mosql.query import select, left_join from mosql.db import Database, group db = Database(psycopg2, host='127.0.0.1') with db as cur: ## Use PostgreSQL to group: cur.execute( select( 'person', joins=left_join('detail', using='person_id'), where={'key': 'email'}, group_by='person_id', select=('person_id', raw('array_agg(val)')), # It is optional here. order_by='person_id', )) print 'Group the rows in PostgreSQL:' for row in cur: print row print ## Use MoSQL (app-level) to group: cur.execute( select( 'person', joins=left_join('detail', using='person_id'),
def test_raw_repr(): eq_(repr(raw('Mosky')), "raw('Mosky')")
from mosql.util import raw from mosql.query import select, left_join from mosql.db import Database, group db = Database(psycopg2, host='127.0.0.1') with db as cur: ## Use PostgreSQL to group: cur.execute(select( 'person', joins = left_join('detail', using='person_id'), where = {'key': 'email'}, group_by = 'person_id', select = ('person_id', raw('array_agg(val)')), # It is optional here. order_by = 'person_id', )) print 'Group the rows in PostgreSQL:' for row in cur: print row print ## Use MoSQL (app-level) to group: cur.execute(select( 'person', joins = left_join('detail', using='person_id'), where = {'key': 'email'},