def test_setup_db(self): from tattoo.run import setup_db from ming import Session from tattoo.models import SESSION_NAME db_uri = 'mongo://foo.com:1235/dbname' setup_db(db_uri) session = Session.by_name(SESSION_NAME) self.assertTrue(SESSION_NAME in session._registry.keys())
class __mongometa__: session = Session.by_name(_get_dataobj_id()) name = 'classification_modes' _id = Field(schema.ObjectId) mode_type = Field(str) maxalleles = Field(int) num_modes = Field(int) boundary_map = Field([mode_boundary])
def test_configure_optional_params(self, Connection): ming.configure(**{ 'ming.main.uri':'mongodb://localhost:27017/test_db', 'ming.main.replicaSet': 'foobar', 'ming.main.w': 2, 'ming.main.ssl': True, }) session = Session.by_name('main') assert session.bind.conn is not None assert session.bind.db is not None
def test_configure(self): ming.configure(**{ 'ming.main.master':'mongodb://localhost:27017/', 'ming.main.database':'test_db', 'ming.main.network_timeout':'0.1', 'ming.main.connect_retry': 1, 'ming.main.tz_aware': False, }) session = Session.by_name('main') assert session.bind.conn is not None assert session.bind.db is not None
def test_configure(self, Connection): ming.configure(**{ 'ming.main.uri':'mongodb://localhost:27017/test_db', 'ming.main.connect_retry': 1, 'ming.main.tz_aware': False, }) session = Session.by_name('main') assert session.bind.conn is not None assert session.bind.db is not None args, kwargs = Connection.call_args assert 'database' not in kwargs
def test_configure(self, Connection): ming.configure(**{ 'ming.main.uri':'mongodb://localhost:27017/test_db', 'ming.main.connect_retry': 1, 'ming.main.tz_aware': False, }) session = Session.by_name('main') assert session.bind.conn is not None assert session.bind.db is not None assert session.bind.bind._auto_ensure_indexes args, kwargs = Connection.call_args assert 'database' not in kwargs
class __mongometa__: session = Session.by_name(_get_dataobj_id()) name = 'individual_samples' _id = Field(schema.ObjectId) simulation_time = Field(int) replication = Field(int) dimensionality = Field(int) sample_size = Field(int) population_size = Field(int) mutation_rate = Field(float) simulation_run_id = Field(str) # a sample is a list of dicts, where each dict has an individual ID and a list of ints as a value sample = Field([dict(id=int, genotype=[int])])
def test_configure_with_database(self): ming.configure( **{ "ming.main.uri": "mongodb://localhost:27017/test_db", "ming.main.database": "another_test_db", "ming.main.connect_retry": 1, "ming.main.tz_aware": False, }) session = Session.by_name("main") assert session.bind.conn is not None assert session.bind.db is not None assert session.bind.bind._auto_ensure_indexes args, kwargs = self.MockConn.call_args assert "database" in kwargs
class __mongometa__: session = Session.by_name(_get_dataobj_id()) name = 'persimrun_stats_postclassification' _id = Field(schema.ObjectId) # pertaining to simulation run replication = Field(int) sample_size = Field(int) population_size = Field(int) mutation_rate = Field(float) simulation_run_id = Field(str) # fields pertaining to classification classification_id = Field(str) classification_type = Field(str) classification_dim = Field(int) classification_coarseness = Field(float) class_time_first_appearance = Field([dict(classid=str, time=int)]) class_innovation_interval_mean = Field(float) class_innovation_interval_sd = Field(float)
class __mongometa__: session = Session.by_name(_get_dataobj_id()) name = 'individual_samples_classified' _id = Field(schema.ObjectId) # fields pertaining to classification classification_id = Field(str) classification_type = Field(str) classification_dim = Field(int) classification_coarseness = Field(float) # fields pertaining to simulation run simulation_time = Field(int) replication = Field(int) sample_size = Field(int) population_size = Field(int) mutation_rate = Field(float) simulation_run_id = Field(str) # a sample is a list of dicts, where each dict has an individual ID and a list of ints as a value sample = Field([ dict(id=int, classid=int), ])
class __mongometa__: session = Session.by_name(_get_dataobj_id()) name = 'pergeneration_stats_traits' _id = Field(schema.ObjectId) # fields pertaining to simulation run simulation_run_id = Field(str) simulation_time = Field(int) replication = Field(int) sample_size = Field(int) population_size = Field(int) mutation_rate = Field(float) dimensionality = Field(int) # statistics about this generation of this simulation run mean_trait_richness = Field(float) mean_evenness_shannon_entropy = Field(float) mean_evenness_iqv = Field(float) loci_trait_richness = Field([int]) loci_evenness_shannon_entropy = Field([float]) loci_evenness_iqv = Field([float]) loci_neutrality_slatkin = Field([float]) mean_neutrality_slatkin = Field(float)
class __mongometa__: session = Session.by_name(_get_dataobj_id()) name = 'pergeneration_stats_postclassification' _id = Field(schema.ObjectId) # fields pertaining to classification classification_id = Field(str) classification_type = Field(str) classification_dim = Field(int) classification_coarseness = Field(float) classification_num_classes = Field(int) # fields pertaining to simulation run simulation_time = Field(int) replication = Field(int) sample_size = Field(int) population_size = Field(int) mutation_rate = Field(float) simulation_run_id = Field(str) # statistics about this generation of this simulation run mode_richness = Field( [int]) # a list of richness values, in order of locus ID class_richness = Field( int ) # a single value, for the population in this generation of this sim run mode_evenness_iqv = Field( [float]) # a list of evenness values, in order of locus ID mode_evenness_shannon_entropy = Field([float]) class_evenness_iqv = Field( float ) # a single value, for the population in this generation of this sim run class_shannon_entropy = Field(float) design_space_occupation = Field( float) # a single value, denoting the fraction of occupied classes class_innovation_interval_times = Field( [int]) # a list of intervals between appearances of a new class class_neutrality_slatkin = Field(float)
class __mongometa__: session = Session.by_name(_get_dataobj_id()) name = 'seriationct_sample_unaveraged'
class __mongometa__: session = Session.by_name(_get_dataobj_id()) name = 'trait_count_sample'
from ming import Session from ming.orm import ThreadLocalORMSession doc_session = Session.by_name('chef') orm_session = ThreadLocalORMSession(doc_session)
class __mongometa__: session = Session.by_name(_get_dataobj_id()) name = 'experiment_tracking'
import sys import time from datetime import datetime from ming import Document, Field, Session from ming import schema as S from ming.datastore import DataStore NUM_ITER = 100 doc_session = Session.by_name('benchmark') doc_session.bind = DataStore('mongodb://localhost:27017', database='benchmark') class StringNotNone(S.String): ''' Stronger than if_missing='', this also converts an explicit None to '' ''' def validate(self, value, **kw): if value == None or value is S.Missing: value = '' return S.String.validate(self, value, **kw) class Project(Document): class __mongometa__: session = doc_session name = 'projects' indexes = [ ('shortname', ),
class __mongometa__: session = Session.by_name(_get_dataobj_id()) name = 'simulation_runs'
class __mongometa__: session = Session.by_name(_get_dataobj_id()) name = 'richness_population'
class __mongometa__: session = Session.by_name(_get_dataobj_id()) name = 'axelrod_stats_extensible'
import logging import cPickle as pickle import bson from pymongo.cursor import _QUERY_OPTIONS from ming import Session from mongotools.util import LazyProperty from mongotools.pubsub import Channel doc_session = Session.by_name('chapman') log = logging.getLogger(__name__) def dumps(value): if value is None: return value return bson.Binary(pickle.dumps(value)) def loads(value): return pickle.loads(value) class pickle_property(object): def __init__(self, pname): self._pname = pname def __get__(self, obj, cls=None): if obj is None: return self
from ming.odm import ODMSession, Mapper from ming.odm.mapper import MapperExtension from ming.odm.property import ForeignIdProperty, FieldProperty, RelationProperty from ming.odm.declarative import MappedClass import config from includes.tokenizer import tokenize from information_value.calculator import InformationValueCalculator MIN_TOKENS = 2000 log = logging.getLogger('lenin') session = Session.by_name('document_store') odm_session = ODMSession(doc_session=session) class DocumentWindowSizeDuplicateHash(MapperExtension): """ Used as unique key for Document - WindowSize """ def before_insert(self, instance, state, session): doc_window_hash = hashlib.sha1(str(instance.document_id) + str(instance.window_size)).hexdigest() if instance.__class__.query.find({'doc_window_hash': doc_window_hash}).count() > 0: raise DuplicateKeyError('Duplicate hash found ', doc_window_hash) instance.doc_window_hash = doc_window_hash class InformationValueResult(MappedClass):
from cStringIO import StringIO from datetime import datetime from pyramid.decorator import reify from ming import Session from ming import schema as S from ming import collection, Field from ming.orm import ThreadLocalODMSession from sutil import util from lib import Workbook log = logging.getLogger(__name__) doc_session = Session.by_name('dvlp_ss') odm_session = ThreadLocalODMSession(doc_session) list_ = collection( 'list', doc_session, Field('_id', S.ObjectId()), Field('created', datetime, if_missing=datetime.utcnow), Field('status', str, if_missing='active'), Field('user_id', S.ObjectId, if_missing=None, index=True), Field('url', str), Field('mapping', dict( header=bool, sheet=int, email=int)), Field('stats', {str: None}))
class __mongometa__: session = Session.by_name(_get_dataobj_id()) name = 'richness_sample'
class __mongometa__: session = Session.by_name(_get_dataobj_id()) name = 'axelrod_stats_treestructured'
class __mongometa__: session = Session.by_name(_get_dataobj_id()) name = 'classifications'
class __mongometa__: session = Session.by_name('default') name = 'my_model'
class __mongometa__: session = ThreadLocalORMSession(Session.by_name('sprox_tests'))
'''The Zarkov data model.''' import logging from datetime import datetime import gevent import pymongo.errors from ming import Session, collection, Field from ming import schema as S from ming.orm import ContextualORMSession, session log = logging.getLogger(__name__) doc_session = Session.by_name('zarkov') orm_session = ContextualORMSession( lambda:id(gevent.getcurrent), doc_session) # Singleton collection idgen = collection( 'idgen', doc_session, Field('_id', int, if_missing=0), Field('inc', int)) # Things we want to track -- the 'fact table' from an OLAP perspective event = collection( 'event', doc_session, Field('_id', int), Field('timestamp', datetime, if_missing=datetime.utcnow), Field('type', str), Field('context', {str:None}),
self.update_index(self.objects_deleted, arefs) for obj in self.objects_added: g.zarkov_event('create', extra=obj.index_id()) for obj in self.objects_modified: g.zarkov_event('modify', extra=obj.index_id()) for obj in self.objects_deleted: g.zarkov_event('delete', extra=obj.index_id()) self.objects_added = [] self.objects_modified = [] self.objects_deleted = [] def update_index(self, objects_deleted, arefs): # Post delete and add indexing operations from allura.tasks import index_tasks if objects_deleted: index_tasks.del_artifacts.post( [obj.index_id() for obj in objects_deleted]) if arefs: index_tasks.add_artifacts.post([aref._id for aref in arefs]) main_doc_session = Session.by_name('main') project_doc_session = Session.by_name('project') main_orm_session = ThreadLocalORMSession(main_doc_session) project_orm_session = ThreadLocalORMSession(project_doc_session) artifact_orm_session = ThreadLocalORMSession( doc_session=project_doc_session, extensions = [ ArtifactSessionExtension ]) repository_orm_session = ThreadLocalORMSession( doc_session=main_doc_session, extensions = [ ])
from pecan import conf from ming import Session, configure as ming_configure from ming.orm import Mapper from ming.orm import ThreadLocalORMSession dbname = conf.nosqldb.database dbname = 'ming.%s.' % dbname print 'Connecting to %s\n' % dbname ming_configure(**conf.nosqldb.as_dict(prefix=dbname)) session = ThreadLocalORMSession(doc_session=Session.by_name(conf.nosqldb.database))
def setUp(self): self.ds = datastore.DataStore( 'mim:///', database='test') self.session = Session.by_name('monq') self.session.bind = self.ds self.ds.db.clear()
from pymongo import MongoClient def init_logging(): logger = logging.getLogger('lenin') logger.setLevel(logging.INFO) # create file handler which logs even debug messages fh = logging.FileHandler('lenin.log') fh.setLevel(logging.INFO) # create console handler with a higher log level ch = logging.StreamHandler() ch.setLevel(logging.INFO) # create formatter and add it to the handlers formatter = logging.Formatter( '%(asctime)s - %(name)s - %(levelname)s - %(message)s') fh.setFormatter(formatter) ch.setFormatter(formatter) # add the handlers to the logger logger.addHandler(fh) logger.addHandler(ch) init_logging() client = MongoClient() db = client[config.DATABASE_NAME] session = Session.by_name('document_store') odm_session = ODMSession(doc_session=session)
class __mongometa__: session = Session.by_name(_get_dataobj_id()) name = 'axelrod_stats_original'
import time import traceback from datetime import datetime import pymongo from ming import collection, Session, Field, Index from ming import schema as S from ming.utils import LazyProperty from ming.odm import ThreadLocalODMSession, session doc_session = Session.by_name('monq') odm_session = ThreadLocalODMSession(doc_session) STATES=('ready', 'busy', 'error', 'complete') RESULT_TYPES=('keep', 'forget') def task(func): '''Decorator to add some methods to task functions''' def post(*args, **kwargs): return TaskObject.post(func, args, kwargs) func.post = post return func TaskDoc = collection( 'monq.task', doc_session, Field('_id', S.ObjectId), Field('state', S.OneOf(*STATES)), Field('priority', int), Field('result_type', S.OneOf(*RESULT_TYPES)), Field('time', dict(
import bson import json import logging from ming import collection, Field, Index, Session from ming import schema as S from pwcred import security doc_session = Session.by_name('pwcred') log = logging.getLogger(__name__) client = collection( 'pwcred.client', doc_session, Field('_id', str), Field('ip_addrs', [ str ]), Field('context', str), Field('public_key', str)) credentials = collection( 'pwcred.credentials', doc_session, Field('_id', S.ObjectId()), Field('key', str), Field('client_id', str), Field('enc_aes_key', S.Binary()), Field('aes_iv', S.Binary()), Field('enc_creds', S.Binary()), Index('client_id', 'key', unique=True)) def encrypt_credentials(client, key, **creds):
class __mongometa__: session = Session.by_name(_get_dataobj_id()) name = 'trait_lifetime'
import sys import time from datetime import datetime from ming import Document, Field, Session from ming import schema as S from ming.datastore import DataStore NUM_ITER = 100 doc_session = Session.by_name('benchmark') doc_session.bind = DataStore('mongodb://localhost:27017', database='benchmark') class StringNotNone(S.String): ''' Stronger than if_missing='', this also converts an explicit None to '' ''' def validate(self, value, **kw): if value == None or value is S.Missing: value = '' return S.String.validate(self, value, **kw) class Project(Document): class __mongometa__: session = doc_session name='projects' indexes=[ ('shortname',), ('source',), ('sf_id',),
# is written using the original extensions session.flush() session.close() try: session._kwargs['extensions'] = extensions or [] yield session # if successful, flush the session to ensure everything # new is written using the modified extensions session.flush() session.close() finally: # restore proper session extension even if everything goes horribly awry session._kwargs['extensions'] = original_exts main_doc_session = Session.by_name('main') project_doc_session = Session.by_name('project') task_doc_session = Session.by_name('task') main_orm_session = ThreadLocalORMSession( doc_session=main_doc_session, extensions=[IndexerSessionExtension] ) project_orm_session = ThreadLocalORMSession( doc_session=project_doc_session, extensions=[IndexerSessionExtension] ) task_orm_session = ThreadLocalORMSession(task_doc_session) artifact_orm_session = ThreadLocalORMSession( doc_session=project_doc_session, extensions=[ArtifactSessionExtension]) repository_orm_session = ThreadLocalORMSession(