Exemple #1
0
def export(exp_id, loader, path):
    path = path.rstrip('/') + '/{}/'.format(exp_id)
    os.makedirs(path, exist_ok=True)

    exp = loader.find_by_id(exp_id)
    metrics = exp.metrics

    exp_dict = exp.to_dict()

    cout = exp_dict.pop('captured_out')
    config = exp_dict.pop('config')

    with open(path + 'cout.txt', mode='w') as f:
        f.write(cout)

    with open(path + 'run.json', mode='w') as f:
        json.dump(json.loads(bson_dumps(exp_dict)), f, indent=4)

    with open(path + 'config.json', mode='w') as f:
        json.dump(json.loads(bson_dumps(config)), f, indent=4)

    with open(path + 'metrics.json', mode='w') as f:
        json.dump(json.loads(bson_dumps(metrics)), f, indent=4)

    for k in exp.artifacts:
        artifact_content = torch.load(BytesIO(exp.artifacts[k].content))

        with open(path + k, mode='wb') as f:
            torch.save(artifact_content, f)
Exemple #2
0
def deserialize(to_deserialize, *args, **kwargs):
    """
    Deserializes a string into a PyMongo BSON
    """
    if isinstance(to_deserialize, string_types):
        if re.match('^[0-9a-f]{24}$', to_deserialize):
            return ObjectId(to_deserialize)
        try:
            return bson_loads(to_deserialize, *args, **kwargs)
        except:
            return bson_loads(bson_dumps(to_deserialize), *args, **kwargs)
    else:
        return bson_loads(bson_dumps(to_deserialize), *args, **kwargs)
Exemple #3
0
    def add(self, meta, asname=None, store=None):
        store = store or self.store
        name = asname or meta.name
        meta_dict = meta.to_dict()
        # prepare local paths
        lpaths = self._local_paths(self.path, name, store)
        # data in gridfile
        data = meta.gridfile.read()
        if data is not None:
            lpaths.gridfile.write_bytes(data)
            meta_dict['gridfile'] = lpaths.gridfile.name  # basename
        # data in collection
        if meta.collection:
            def remove_id(obj):
                obj.pop('_id', None)
                return obj

            data = store.collection(name).find()
            with lpaths.collection.open('w') as fout:
                IterableJsonDump.dump(data, fout,
                                      transform=remove_id,
                                      default=bson_dumps)
        # metadata
        lpaths.meta.write_text(bson_dumps(meta_dict))
        self.manifest['members'][self._manifest_key(name, store)] = lpaths.key
Exemple #4
0
    def save_sniffer_session(self, session):
        session_data = session.flow_status(include_payload=True, encoding='binary')
        session_data['nodes'] = session.nodes
        session_data['edges'] = session.edges

        dict = {
            'date_created': session.date_created,
            'name': session.name,
            'filter': session.filter,
            'intercept_tls': session.intercept_tls,
            'pcap': session.pcap,
            'pcap_filename': session.pcap_filename,
            'packet_count': session.packet_count,
            'session_data': bson_dumps(session_data),
            'public': session.public,
        }

        if not session.id:
            # we're creating a new session
            dict['_id'] = ObjectId()
            session.id = dict['_id']
        else:
            dict['_id'] = session.id

        self.sniffer_sessions.save(dict)
        return str(session.id)
Exemple #5
0
    def save_sniffer_session(self, session):
        session_data = session.flow_status(include_payload=True, encoding='binary')
        session_data['nodes'] = session.nodes
        session_data['edges'] = session.edges

        dict = {
            'date_created': session.date_created,
            'name': session.name,
            'filter': session.filter,
            'intercept_tls': session.intercept_tls,
            'pcap': session.pcap,
            'pcap_filename': session.pcap_filename,
            'packet_count': session.packet_count,
            'session_data': bson_dumps(session_data),
            'public': session.public,
        }

        if not session.id:
            # we're creating a new session
            dict['_id'] = ObjectId()
            session.id = dict['_id']
        else:
            dict['_id'] = session.id

        self.sniffer_sessions.save(dict)
        return str(session.id)
Exemple #6
0
    def run(self):
        self.thread_active = True

        self.reset_session_progress()
        debug_output("[+] Sniffing session %s started" % self.name)
        debug_output("[+] Filter: %s" % self.filter)
        self.stopSniffing = False

        if self.pcap:
            self.load_pcap()
        else:
            self.sniff(stopper=self.stop_sniffing,
                       filter=self.filter,
                       prn=self.handlePacket,
                       stopperTimeout=1,
                       store=0)

        debug_output("[+] Sniffing session %s stopped" % self.name)
        self.engine.commit_to_db(self)

        data = {'type': 'sniffdone', 'session_name': self.name}
        self.engine.messenger.broadcast(bson_dumps(data), 'sniffer-data',
                                        'sniffdone')

        self.thread_active = False
        return
Exemple #7
0
def token(logged_in_member):
    if logged_in_member:
        return jwt.encode({"member_id": bson_dumps(logged_in_member.id)},
                       current_app.config['SECRET'],
                       current_app.config['ALGORITHM'])
    else:
        return None
Exemple #8
0
    def send_flow_statistics(self, flow):
        data = {}
        data['flow'] = flow.get_statistics()
        data['type'] = 'flow_statistics_update'
        data['session_name'] = self.name

        self.engine.messenger.broadcast(bson_dumps(data), 'sniffer-data', 'flow_statistics_update')
Exemple #9
0
def token(logged_in_user):
    if logged_in_user:
        return jwt.encode(
            {
                "user_id": bson_dumps(logged_in_user.id),
                "is_master": logged_in_user.master_role
            }, current_app.config['SECRET'], current_app.config['ALGORITHM'])
    else:
        return None
Exemple #10
0
def createOrUpdateSSS():
    """
    Updates already existing sss record of a user
    else creates one.
    return: id of newly created document else True.
    """
    sss = SSS.fromJSON(request.json)
    result = sss.update()
    print(result)
    return jsonify({'result': bson_dumps(result)})
Exemple #11
0
    def send_nodes(self, elts=[], edges=[]):
        for e in elts:
            e['fields'] = e.default_fields

        data = {'querya': {}, 'nodes': elts, 'edges': edges, 'type': 'nodeupdate', 'session_name': self.name}
        try:
            if (len(elts) > 0 or len(edges) > 0):
                self.engine.messenger.broadcast(bson_dumps(data), 'sniffer-data', 'nodeupdate')
        except Exception, e:
            debug_output("Could not send nodes: {}".format(e), 'error')
Exemple #12
0
def json(obj):
    if hasattr(obj, '__class__') and obj.__class__.__name__ == 'Cursor':
        try:
            obj = list(obj)
        except:
            obj = dict(obj)

    if isinstance(obj, list):
        obj = {'data': obj}

    return json_loads(bson_dumps(obj))
Exemple #13
0
def deserialize(to_deserialize, *args, **kwargs):
    """
    Deserializes a string into a PyMongo BSON
    """
    if isinstance(to_deserialize, string_types):
        try:
            return ObjectId(to_deserialize)
        except Exception:
            return bson_loads(to_deserialize, *args, **kwargs)
    else:
        return bson_loads(bson_dumps(to_deserialize), *args, **kwargs)
Exemple #14
0
 def save_module_entry(self, session_id, module_name, entry, timeout=None):
     asd = self.modules.update(
         {
             'name': module_name,
             'session_id': session_id
         }, {
             'name': module_name,
             'session_id': session_id,
             'entry': bson_dumps(entry),
             'timeout': timeout
         },
         upsert=True)
Exemple #15
0
def tweets_to_bson(tweets, tweetfile, append=False):
    """
    Exports a collection of tweets (any iterable of tweet objects) to given
    file in MongoDB's native BSON format. Not line-separated.
    To append to given filename, pass append=True
    """
    if append:
        handle = open(tweetfile, "ab+")
    else:
        handle = open(tweetfile, "wb")
    for tweet in tweets:
        handle.write(bson_dumps(tweet))
    handle.close()
Exemple #16
0
def tweets_to_bson(tweets, tweetfile, append=False):
    """
    Exports a collection of tweets (any iterable of tweet objects) to given
    file in MongoDB's native BSON format. Not line-separated.
    To append to given filename, pass append=True
    """
    if append:
        handle = open(tweetfile, "ab+")
    else:
        handle = open(tweetfile, "wb")
    for tweet in tweets:
        handle.write(bson_dumps(tweet))
    handle.close()
Exemple #17
0
    def post(self):
        args = parser.parse_args()
        # We're "adding" a new Greeting so it's id will be 'max plus one'
        # greeting_id = get_mongo().db.greetings.count_documents({})
        greeting_id = str(uuid.uuid4())
        message = args['message']

        greeting_data = {'unique_id': greeting_id, 'message': message}

        result = get_mongo().db.greetings.insert_one(greeting_data)

        greeting_data = json.loads(bson_dumps(greeting_data))

        return greeting_data, 201
    def response(self):
        payload = {
            "label": self.label,
            "_status": "ERR",
            "_error": {
                "message": self.message,
                "code": self.code
            }
        }

        text = bson_dumps(payload, indent=2)
        resp = make_response(text, self.status_code)
        resp.mimetype = "application/json"

        return resp
    def response(self):
        payload = {
            "label": self.label,
            "_status": "ERR",
            "_error": {
                "message": self.message,
                "code": self.code
            }
        }

        text = bson_dumps(payload, indent=2)
        resp = make_response(text, self.status_code)
        resp.mimetype = "application/json"
        import logging
        logging.warn("this is a test %s", resp)
        return resp
Exemple #20
0
def jsonify(obj, *args, **kwargs):
    """Same call signature as `flask.jsonify`.  Works with MongoDB
    BSON objects.

    .. code-block:: python

        @app.route('/get_bson_doc')
        def get_bson_doc():
            data = bson.BSON.encode({'a': 1, 'id_': ObjectId()})
            return jsonify(data)

    :param object obj: The object to be jsonified.

    :return Response response: The response object to be returned.
    """

    return flask_jsonify(**json.loads(bson_dumps(obj, *args, **kwargs)))
Exemple #21
0
    def _type_transform(self, value):
        """
            Transforms ObjectId types to str type and vice versa.

            Any ObjectId types present are serialized to a str.
            The same str is converted back to an ObjectId while de-serializing.

            :param value: the object to be transformed
            :type  value: Object

            :returns: recursively transformed object
            :rtype: Object
        """
        # Encoding ObjectId to str
        if isinstance(value, ObjectId):
            return bson_dumps(value)

        # Recursive checks inside dict
        if isinstance(value, dict):
            if len(value) == 0:
                return value
            # Decoding '$oid' back to ObjectId
            if '$oid' in value.keys():
                return bson_loads(value)

            return dict((self._type_transform(k), self._type_transform(v))
                        for k, v in value.iteritems())

        # Recursive checks inside a list
        if isinstance(value, list):
            if len(value) == 0:
                return value
            for i, val in enumerate(value):
                value[i] = self._type_transform(val)
            return value

        # Recursive checks inside a tuple
        if isinstance(value, tuple):
            if len(value) == 0:
                return value
            return tuple([self._type_transform(val) for val in value])

        return value
Exemple #22
0
    def _type_transform(self, value):
        """
            Transforms ObjectId types to str type and vice versa.

            Any ObjectId types present are serialized to a str.
            The same str is converted back to an ObjectId while de-serializing.

            :param value: the object to be transformed
            :type  value: Object

            :returns: recursively transformed object
            :rtype: Object
        """
        # Encoding ObjectId to str
        if isinstance(value, ObjectId):
            return bson_dumps(value)

        # Recursive checks inside dict
        if isinstance(value, dict):
            if len(value) == 0:
                return value
            # Decoding '$oid' back to ObjectId
            if '$oid' in value.keys():
                return bson_loads(value)

            return dict((self._type_transform(k), self._type_transform(v))
                        for k, v in value.iteritems())

        # Recursive checks inside a list
        if isinstance(value, list):
            if len(value) == 0:
                return value
            for i, val in enumerate(value):
                value[i] = self._type_transform(val)
            return value

        # Recursive checks inside a tuple
        if isinstance(value, tuple):
            if len(value) == 0:
                return value
            return tuple([self._type_transform(val) for val in value])

        return value
Exemple #23
0
def publishArtist(artist_id):
    is_published = ('true' == request.form['is_published'])
    db.artist.update({'_id': ObjectId(artist_id)},
                     {'$set': {
                         'is_published': is_published
                     }})
    ## Update this artist on exhibitions as well
    db.exhibitions.update({"artist._id": ObjectId(artist_id)},
                          {"$set": {
                              "artist.is_published": is_published
                          }},
                          multi=True)
    ## Should update this artist on group exhibitions as well
    db.exhibitions.update({"artists._id": ObjectId(artist_id)},
                          {"$set": {
                              "artists.$.is_published": is_published
                          }},
                          multi=True)

    return bson_dumps(db.artist.find_one({'_id': ObjectId(artist_id)}))
Exemple #24
0
    def content(self):
        # Check if the session packets are set to 0 (i.e. session packets are not loaded in memory)
        if not self.dns_requests:
            # Try to load results from database
            debug_output("Loading entry from DB")
            self.dns_requests = self.load_entry()
            if not self.dns_requests:
                debug_output("No results in DB, processing PCAP")
                filename = self.session.pcap_filename
                self.session.pkts = sniff(stopper=self.session.stop_sniffing, filter=self.session.filter, prn=self.on_packet, stopperTimeout=1, offline=self.session.engine.setup['SNIFFER_DIR']+"/"+filename)
                # now that everything has been processed, save the results to DB
                self.save_entry(bson_dumps(self.dns_requests))
            else:
                self.dns_requests = bson_loads(self.dns_requests)

        content = "<table class='table table-condensed'><tr><th>Query</th><th>Answers</th><th>Count</th></tr>"
        for q in self.dns_requests:
            content += "<tr><td>{}</td><td>{}</td><td>{}</td></tr>".format(q, ", ".join(self.dns_requests[q]['answers']), self.dns_requests[q]['count'])
        content += "</table>"
        return content
Exemple #25
0
def dict_from_cursor(data=None, keys=None):
    filtered_dict = {}
    # Convert Uids to str
    data = bson_dumps(data)
    python_dict = json.loads(data)

    for key in keys:
        value = python_dict.get(key)
        if type(value) is dict:
            # Try to get mongo_id
            mongo_id = value.get('$oid')
            if mongo_id:
                value = mongo_id
        
        if key == '_id':
            key = 'id'

        filtered_dict[key] = value


    return filtered_dict
Exemple #26
0
    def run(self):
        self.thread_active = True

        self.reset_session_progress()
        debug_output("[+] Sniffing session {} started".format(self.name))
        debug_output("[+] Filter: {}".format(self.filter))
        self.stopSniffing = False

        if self.pcap:
            self.load_pcap()
        else:
            self.sniff(stopper=self.stop_sniffing, filter=self.filter, prn=self.handlePacket, stopperTimeout=1, store=0)

        debug_output("[+] Sniffing session {} stopped".format(self.name))
        self.engine.commit_to_db(self)

        data = {'type': 'sniffdone', 'session_name': self.name}
        self.engine.messenger.broadcast(bson_dumps(data), 'sniffer-data', 'sniffdone')

        self.thread_active = False
        return
Exemple #27
0
def embed(blog_id, theme=None, output=None, api_host=None):
    from liveblog.themes import UnknownTheme
    # adding import here to avoid circular references
    from liveblog.advertisements.utils import get_advertisements_list
    from liveblog.advertisements.amp import AdsSettings, inject_advertisements

    api_host = api_host or request.url_root
    blog = get_resource_service('client_blogs').find_one(req=None, _id=blog_id)
    if not blog:
        return 'blog not found', 404

    # if the `output` is the `_id` get the data.
    if output:
        if isinstance(output, str):
            output = get_resource_service('outputs').find_one(req=None, _id=output)
        if not output:
            return 'output not found', 404
        else:
            collection = get_resource_service('collections').find_one(req=None, _id=output.get('collection'))
            output['collection'] = collection

    # Retrieve picture url from relationship.
    if blog.get('picture', None):
        blog['picture'] = get_resource_service('archive').find_one(req=None, _id=blog['picture'])

    # Retrieve the wanted theme and add it to blog['theme'] if is not the registered one.
    try:
        theme_name = request.args.get('theme', theme)
    except RuntimeError:
        # This method can be called outside from a request context.
        theme_name = theme

    blog_preferences = blog.get('blog_preferences')
    if blog_preferences is None:
        return 'blog preferences are not available', 404

    blog_theme_name = blog_preferences.get('theme')
    if not theme_name:
        # No theme specified. Fallback to theme in blog_preferences.
        theme_name = blog_theme_name

    theme_service = get_resource_service('themes')
    theme = theme_service.find_one(req=None, name=theme_name)

    if theme is None:
        raise SuperdeskApiError.badRequestError(
            message='You will be able to access the embed after you register the themes')

    try:
        assets, template_content = collect_theme_assets(theme, parents=[])
    except UnknownTheme as e:
        return str(e), 500

    if not template_content:
        logger.warning('Template file not found for theme "%s". Theme: %s' % (theme_name, theme))
        return 'Template file not found', 500

    # Compute the assets root.
    if theme.get('public_url', False):
        assets_root = theme.get('public_url')
    else:
        assets_root = theme_service.get_theme_assets_url(theme_name)

    theme_settings = theme_service.get_default_settings(theme)
    i18n = theme.get('i18n', {})

    # the blog level theme overrides the one in theme level
    # this way we allow user to enable commenting only for certain blog(s)
    # or the other way around
    unset = 'unset'
    blog_users_can_comment = blog.get('users_can_comment', unset)
    if blog_users_can_comment != unset:
        theme_settings['canComment'] = True if blog_users_can_comment == 'enabled' else False

    # also when blog has been archived, we should disable commenting
    if blog.get('blog_status') == 'closed':
        theme_settings['canComment'] = False

    theme_settings['watermark'] = ACTIVATE_WATERMARK

    # Check if theme is SEO and/or AMP compatible.
    is_amp = theme.get('ampTheme', False)
    is_seo = theme.get('seoTheme', False)

    if is_seo:
        # Fetch initial blog posts for SEO theme
        blog_instance = Blog(blog)
        page_limit = theme_settings.get('postsPerPage', 10)
        sticky_limit = theme_settings.get('stickyPostsPerPage', 10)
        ordering = theme_settings.get('postOrder', blog_instance.default_ordering)

        # let's get the output channel tags if any
        tags = []
        if output:
            tags = output.get('tags', [])

        posts = blog_instance.posts(wrap=True, limit=page_limit, ordering=ordering, deleted=is_amp, tags=tags)
        sticky_posts = blog_instance.posts(wrap=True, limit=sticky_limit, sticky=True,
                                           ordering='newest_first', deleted=is_amp, tags=tags)

        api_response = {
            'posts': posts,
            'stickyPosts': sticky_posts
        }
        embed_env = theme_service.get_theme_template_env(theme, loader=CompiledThemeTemplateLoader)
        embed_template = embed_env.from_string(template_content)
        template_content = embed_template.render(
            blog=blog,
            output=output,
            options=theme,
            json_options=bson_dumps(theme),
            settings=theme_settings,
            api_response=api_response,
            assets_root=assets_root,
            i18n=i18n,
            api_host=api_host
        )

    asyncTheme = theme.get('asyncTheme', False)
    api_host = api_host.replace('//', app.config.get('EMBED_PROTOCOL')) if api_host.startswith('//') else api_host
    api_host = api_host.replace('http://', app.config.get('EMBED_PROTOCOL'))

    scope = {
        'blog': blog,
        'settings': theme_settings,
        'assets': assets,
        'api_host': api_host,
        'output': output,
        'template': template_content,
        'debug': app.config.get('LIVEBLOG_DEBUG'),
        'assets_root': assets_root,
        'async': asyncTheme,
        'i18n': i18n,
        'hook_urls': bool(TRIGGER_HOOK_URLS)
    }
    if is_amp:
        # Add AMP compatible css to template context
        styles = theme.get('files', {}).get('styles', {}).values()
        if len(styles):
            scope['amp_style'] = next(iter(styles))

    embed_template = 'embed_amp.html' if is_amp else 'embed.html'

    blog_archived = blog['blog_status'] == 'closed'
    solo_subscription = 'solo' in SUBSCRIPTION_LEVEL
    if blog_archived and solo_subscription:
        scope['template'] = render_template('blog-unavailable.html', **scope)
        scope['assets']['scripts'] = []

    response_content = render_template(embed_template, **scope)

    # TODO: move to somewhere else to simplify this method
    if is_amp and output and theme.get('supportAdsInjection', False):
        parsed_content = BeautifulSoup(response_content, 'lxml')
        ads = get_advertisements_list(output)

        frequency = output['settings'].get('frequency', 4)
        order = output['settings'].get('order', 1)

        ad_template = get_theme_template(theme, 'template-ad-entry.html')
        ads_settings = AdsSettings(
            frequency=frequency, order=order,
            template=ad_template, tombstone_class='hide-item')

        # let's remove hidden elements initially because they're just garbage
        # complex validation because `embed` it's also called from outside without request context
        if not request or request and not request.args.get('amp_latest_update_time', False):
            hidden_items = parsed_content.find_all('article', class_=ads_settings.tombstone_class)
            for tag in hidden_items:
                tag.decompose()

        styles_tmpl = get_theme_template(theme, 'template-ad-styles.html')
        amp_style = BeautifulSoup(styles_tmpl.render(frequency=frequency), 'html.parser')

        style_tag = parsed_content.find('style', attrs={'amp-custom': True})
        if style_tag:
            style_tag.append(amp_style.find('style').contents[0])

        inject_advertisements(parsed_content, ads_settings, ads, theme)
        response_content = parsed_content.prettify()

    return response_content
Exemple #28
0
 def get(self):
     # db = self.application.db
     messages = yield Message.find_raw()
     json_messages = bson_dumps(messages)
     self.render('chatapp/chat_angular.html', messages=json_messages)
Exemple #29
0
def run_details(run_id):
    return jsonify(json.loads(bson_dumps(sacred_mongo.get_run(run_id))))
Exemple #30
0
def list_files():
    return jsonify(
        json.loads(
            bson_dumps({'files': [f for f in sacred_mongo.list_files()]})))
Exemple #31
0
def bsonify(bson_data):
    response = make_response(
        bson_dumps(bson_data, json_options=RELAXED_JSON_OPTIONS))
    response.headers['Content-Type'] = 'application/json'
    return response
Exemple #32
0
 def save(collection, data):
     cls = Repo()
     obj_id = cls.db[collection].insert_one(data).inserted_id
     obj = Repo.find(collection, obj_id)
     return json.loads(bson_dumps(obj))
Exemple #33
0
    def command_handler(self, msg):
        self.command_lock.acquire()
        msg = json.loads(msg)
        cmd = msg['msg']
        params = msg.get('params', {})
        src = msg['src']
        queryid = msg['queryid']
        final_msg = None

        if cmd == 'newsession':
            _id = self.snifferengine.new_session(params)
            final_msg = bson_dumps(_id)

        if cmd == 'sessionlist':
            session_list = []
            user = params.get('user', None)
            page = params.get('page', 0)
            private = params.get('private', False)

            for session in self.snifferengine.model.get_sniffer_sessions(
                    private=private, username=user, page=page):

                if session['_id'] in self.snifferengine.sessions:
                    session = self.snifferengine.sessions[session['_id']]
                    active = session.status()
                    session = session.__dict__
                else:
                    active = False
                    session_data = bson_loads(session['session_data'])
                    session['nodes'] = session_data['nodes']
                    session['edges'] = session_data['edges']
                    session['id'] = session['_id']

                session_list.append({
                    'id': str(session.get('id')),
                    'date_created': session.get('date_created'),
                    'name': session.get('name'),
                    'packets': session.get('packet_count'),
                    'nodes': len(session.get('nodes')),
                    'edges': len(session.get('edges')),
                    'status': "Running" if active else "Stopped",
                    'public': session.get('public'),
                })

            final_msg = bson_dumps(session_list)

        if params.get('session_id', False):

            session = self.snifferengine.fetch_sniffer_session(
                params['session_id'])

            if not session:
                final_msg = 'notfound'

            if session:

                if cmd == 'sessioninfo':

                    final_msg = {
                        'name':
                        session.name,
                        'filter':
                        session.filter,
                        'pcap':
                        session.pcap,
                        'packet_count':
                        session.packet_count,
                        'pcap_filename':
                        session.pcap_filename,
                        'id':
                        str(session.id),
                        'public':
                        session.public,
                        'status':
                        session.status(),
                        'node_list':
                        session.get_nodes(),
                        'modules': [(session.modules[module_name].display_name,
                                     module_name)
                                    for module_name in session.modules],
                    }

                if cmd == 'sniffstatus':
                    final_msg = {'status': session.status()}

                if cmd == 'sniffupdate':
                    # this needs to be stringyfied, or else encoding errors will ensue
                    final_msg = session.update_nodes()
                    final_msg = json.dumps(final_msg,
                                           default=json_util.default)

                if cmd == 'sniffstart':
                    #self.snifferengine.start_session(params['session_name'], params['remote_addr'])
                    session.start()
                    final_msg = True

                if cmd == 'sniffstop':
                    session.stop()
                    final_msg = True

                if cmd == 'flowstatus':
                    flow = session.flow_status()
                    # this needs to be stringyfied, or else encoding errors will ensue
                    final_msg = flow

                if cmd == 'flow_statistics_update':
                    print "Received 'flow_statistics_update' message. Please implement me? "

                if cmd == 'get_flow_payload':
                    if params['flowid'] in session.flows:
                        final_msg = session.flows[
                            params['flowid']].get_payload(encoding='base64')
                    else:
                        final_msg = False

                if cmd == 'sniffdelete':
                    result = self.snifferengine.delete_session(
                        params['session_id'])
                    final_msg = result

                if cmd == 'sniffpcap':
                    result = self.snifferengine.commit_to_db(session)
                    final_msg = result

                if cmd == 'call_module_function':
                    module_name = params['module_name']
                    function = params['function']
                    args = params['args']
                    module = session.modules.get(module_name, None)
                    if module:
                        try:
                            final_msg = getattr(module, function)(args)
                        except Exception, e:
                            import traceback
                            final_msg = "[{} in function {}] Module error: {}\n".format(
                                module_name, function, e)
                            final_msg += traceback.format_exc()
                            final_msg = "<pre>{}</pre>".format(final_msg)
                    else:
                        final_msg = False
Exemple #34
0
	def command_handler(self, msg):
		self.command_lock.acquire()
		msg = json.loads(msg)
		cmd = msg['msg']
		params = msg.get('params', {})
		src = msg['src']
		queryid = msg['queryid']
		final_msg = None

		if cmd == 'newsession':
			_id = self.snifferengine.new_session(params)
			final_msg = bson_dumps(_id)

		if cmd == 'sessionlist':
			session_list = []
			user = params.get('user', None)
			page = params.get('page', 0)
			private = params.get('private', False)
			
			for session in self.snifferengine.model.get_sniffer_sessions(private=private, username=user, page=page):
			
				if session['_id'] in self.snifferengine.sessions:
					session = self.snifferengine.sessions[session['_id']]
					active = session.status()
					session = session.__dict__
				else:
					active = False
					session_data = bson_loads(session['session_data'])
					session['nodes'] = session_data['nodes']
					session['edges'] = session_data['edges']
				
				session_list.append( {   	'id': str(session.get('_id')),
											'date_created': session.get('date_created'),
											'name': session.get('name'),
											'packets': session.get('packet_count'),
											'nodes': len(session.get('nodes')),
											'edges': len(session.get('edges')),
											'status': "Running" if active else "Stopped",
											'public': session.get('public'),
										} )

			final_msg = bson_dumps(session_list)

		if params.get('session_id', False):
			
			session = self.snifferengine.fetch_sniffer_session(params['session_id'])

			if not session:
				final_msg = False

			if session:

				if cmd == 'sessioninfo':

					final_msg = {
							'name' : session.name,
							'filter' : session.filter,
							'pcap' : session.pcap,
							'pcap_filename': session.pcap_filename,
							'id' : str(session.id),
							'public': session.public,
							'status': session.status(),
					}

				if cmd == 'sniffstatus':
					final_msg = {'status': session.status()}

				if cmd == 'sniffupdate':
					# this needs to be stringyfied, or else encoding errors will ensue
					final_msg = session.update_nodes()
					final_msg = json.dumps(final_msg, default=json_util.default)

				if cmd == 'sniffstart':
					#self.snifferengine.start_session(params['session_name'], params['remote_addr'])
					session.start(params['remote_addr'])
					final_msg = True

				if cmd == 'sniffstop':
					session.stop()
					final_msg = True

				if cmd == 'flowstatus':
					flow = session.flow_status()
					# this needs to be stringyfied, or else encoding errors will ensue
					final_msg = flow

				if cmd == 'flow_statistics_update':
					print "Received 'flow_statistics_update' message. Please implement me? "

				if cmd == 'get_flow_payload':
					if params['flowid'] in session.flows:
						final_msg = session.flows[params['flowid']].get_payload(encoding='base64')
					else:
						final_msg = False

				if cmd == 'sniffdelete':
					result = self.snifferengine.delete_session(params['session_id'])
					final_msg = result

				if cmd == 'sniffpcap':
					result = self.snifferengine.commit_to_db(session)
					final_msg = result

		if final_msg != None:
			reply = {'msg': final_msg, 'queryid': queryid, 'dst': src, 'src':self.name}
			self.publish_to_channel('sniffer-commands', json.dumps(reply))
		self.command_lock.release()

		return
Exemple #35
0
def get_repo_downloads(mongo, collection):
    return bson_dumps(mongo[collection].find({}, {"name": 1, "downloads": 1}))
Exemple #36
0
def serialize(to_serialize, *args, **kwargs):
    """
    Serializes a PyMongo BSON into a string
    """
    return bson_dumps(to_serialize, *args, **kwargs)
Exemple #37
0
def serialize(to_serialize, *args, **kwargs):
    """
    Serializes a PyMongo BSON into a string
    """
    return bson_dumps(to_serialize, *args, **kwargs)
Exemple #38
0
 def save_module_entry(self, session_id, module_name, entry, timeout=None):
     asd = self.modules.update({'name': module_name, 'session_id': session_id}, {'name': module_name, 'session_id': session_id, 'entry': bson_dumps(entry), 'timeout': timeout}, upsert=True)
def createGroupExhibition():
    form = forms.GroupExhibitionForm()
    artist = db.artist.find()
    form.artists.choices = [(str(artist['_id']), artist['name'])
                            for artist in db.artist.find().sort("artist_sort")]

    selectedArtworks = []

    if form.is_submitted():
        if form.validate():

            formdata = form.data

            exhibition = utils.handle_form_data(
                {}, formdata, ['press_release', 'artists', 'extra_artists'])
            exhibition['artists'] = [
                db.artist.find_one({'_id': ObjectId(artist_id)})
                for artist_id in request.form.getlist('artists')
            ]
            exhibition['slug'] = utils.slugify(exhibition['exhibition_name'])
            exhibition_md = form.wysiwig_exhibition_description.data
            exhibition['is_group_expo'] = True
            extra_artists = list(
                zip(request.form.getlist('extra_artists_name'),
                    request.form.getlist('extra_artists_sort')))
            exhibition['extra_artists'] = [{
                'name': name,
                'artist_sort': sort
            } for name, sort in extra_artists]

            exhibition['artworks'] = []
            uploaded_artworks = []

            if 'artworks' in request.files:
                for uploaded_image in request.files.getlist('artworks'):
                    image_path = utils.handle_uploaded_file(
                        uploaded_image, config.upload['ARTWORK_IMAGE'],
                        utils.setfilenameroot(
                            uploaded_image.filename,
                            exhibition['artists'][0]['slug']))

                    exhibition['artists'][0]['images'].append({
                        '_id':
                        ObjectId(),
                        'path':
                        image_path,
                        'published':
                        False
                    })
                    uploaded_artworks.append(image_path)

                db.artist.update({'_id': exhibition['artists'][0]['_id']},
                                 exhibition['artists'][0])
                ## Update this artist on exhibitions as well
                db.exhibitions.update({"artist._id": ObjectId(artist_id)},
                                      {"$set": {
                                          "artist": artist
                                      }},
                                      multi=True)
                ## Should update this artist on group exhibitions as well
                db.exhibitions.update({"artists._id": ObjectId(artist_id)},
                                      {"$set": {
                                          "artists.$": artist
                                      }},
                                      multi=True)

            if 'artworks' in request.form:
                for artwork_image_path in request.form.getlist('artworks'):
                    if artwork_image_path:
                        if artwork_image_path[0:9] == 'uploaded:':
                            artwork_index = int(artwork_image_path[9:])
                            artwork_image_path = uploaded_artworks[
                                artwork_index]

                        for artist in exhibition['artists']:
                            image = utils.find_where('path',
                                                     artwork_image_path,
                                                     artist['images'])

                            if image:
                                exhibition['artworks'].append(image)
                                break

            if request.files['press_release']:
                exhibition['press_release'] = utils.handle_uploaded_file(
                    request.files['press_release'],
                    config.upload['PRESS_RELEASE'],
                    utils.setfilenameroot(
                        request.files['press_release'].filename,
                        exhibition['slug']))
                exhibition['press_release_size'] = utils.getfilesize(
                    exhibition['press_release'])

            if 'coverimage' in request.files:
                uploaded_image = request.files.getlist('coverimage')[0]
                exhibition['coverimage'] = {
                    'path':
                    utils.handle_uploaded_file(
                        uploaded_image,
                        config.upload['EXHIBITION_COVER_IMAGE'],
                        utils.setfilenameroot(uploaded_image.filename,
                                              exhibition['slug']))
                }

            exhibition['images'] = []
            uploaded_images = []

            if 'image' in request.files:
                for uploaded_image in request.files.getlist('image'):
                    uploaded_images.append(
                        utils.handle_uploaded_file(
                            uploaded_image, config.upload['EXHIBITION_VIEW'],
                            utils.setfilenameroot(uploaded_image.filename,
                                                  exhibition['slug'])))

            if 'image' in request.form:
                for path in request.form.getlist('image'):
                    if path[0:9] == 'uploaded:':
                        image_index = int(path[9:])
                        path = uploaded_images[image_index]

                        exhibition['images'].append({'path': path})

            inserted_id = db.exhibitions.insert(exhibition)

            flash(
                'You successfully created the group exhibition, <a href="{1}">{0}</a>'
                .format(
                    exhibition['exhibition_name'],
                    url_for('.updateGroupExhibition',
                            exhibition_id=inserted_id)), 'success')

            if request.is_xhr:
                return bson_dumps(exhibition), 201
            else:
                return redirect_flask(url_for('.index'))
        elif request.is_xhr:
            return json.dumps(form.errors), 400

        selectedArtworks = request.form.getlist('artworks')

    return render_template('admin/group-exhibition/exhibitionCreate.html',
                           form=form,
                           selectedArtworks=json.dumps(selectedArtworks))
Exemple #40
0
def embed(blog_id, theme=None, output=None, api_host=None):
    api_host = api_host or request.url_root
    blog = get_resource_service('client_blogs').find_one(req=None, _id=blog_id)
    if not blog:
        return 'blog not found', 404

    # if the `output` is the `_id` get the data.
    if output:
        if isinstance(output, str):
            output = get_resource_service('outputs').find_one(req=None,
                                                              _id=output)
        if not output:
            return 'output not found', 404
        else:
            collection = get_resource_service('collections').find_one(
                req=None, _id=output.get('collection'))
            output['collection'] = collection

    # Retrieve picture url from relationship.
    if blog.get('picture', None):
        blog['picture'] = get_resource_service('archive').find_one(
            req=None, _id=blog['picture'])

    # Retrieve the wanted theme and add it to blog['theme'] if is not the registered one.
    try:
        theme_name = request.args.get('theme', theme)
    except RuntimeError:
        # This method can be called outside from a request context.
        theme_name = theme

    blog_preferences = blog.get('blog_preferences')
    if blog_preferences is None:
        return 'blog preferences are not available', 404

    blog_theme_name = blog_preferences.get('theme')
    if not theme_name:
        # No theme specified. Fallback to theme in blog_preferences.
        theme_name = blog_theme_name

    theme = get_resource_service('themes').find_one(req=None, name=theme_name)
    if theme is None:
        raise SuperdeskApiError.badRequestError(
            message=
            'You will be able to access the embed after you register the themes'
        )

    try:
        assets, template_content = collect_theme_assets(theme, parents=[])
    except UnknownTheme as e:
        return str(e), 500

    if not template_content:
        logger.error('Template file not found for theme "%s". Theme: %s' %
                     (theme_name, theme))
        return 'Template file not found', 500

    theme_service = get_resource_service('themes')

    # Compute the assets root.
    if theme.get('public_url', False):
        assets_root = theme.get('public_url')
    else:
        assets_root = theme_service.get_theme_assets_url(theme_name)

    theme_settings = theme_service.get_default_settings(theme)
    i18n = theme.get('i18n', {})

    # Check if theme is SEO and/or AMP compatible.
    is_amp = theme.get('ampTheme', False)
    is_seo = theme.get('seoTheme', False)

    if is_seo:
        # Fetch initial blog posts for SEO theme
        blog_instance = Blog(blog)
        page_limit = theme_settings.get('postsPerPage', 10)
        sticky_limit = theme_settings.get('stickyPostsPerPage', 10)
        ordering = theme_settings.get('postOrder',
                                      blog_instance.default_ordering)
        posts = blog_instance.posts(wrap=True,
                                    limit=page_limit,
                                    ordering=ordering,
                                    deleted=is_amp)
        sticky_posts = blog_instance.posts(wrap=True,
                                           limit=sticky_limit,
                                           sticky=True,
                                           ordering='newest_first',
                                           deleted=is_amp)

        api_response = {'posts': posts, 'stickyPosts': sticky_posts}
        embed_env = theme_service.get_theme_template_env(
            theme, loader=CompiledThemeTemplateLoader)
        embed_template = embed_env.from_string(template_content)
        template_content = embed_template.render(
            blog=blog,
            output=output,
            options=theme,
            json_options=bson_dumps(theme),
            settings=theme_settings,
            api_response=api_response,
            assets_root=assets_root,
            i18n=i18n)

    async = theme.get('asyncTheme', False)
    api_host = api_host.replace('//', app.config.get(
        'EMBED_PROTOCOL')) if api_host.startswith('//') else api_host
    api_host = api_host.replace('http://', app.config.get('EMBED_PROTOCOL'))

    scope = {
        'blog': blog,
        'settings': theme_settings,
        'assets': assets,
        'api_host': api_host,
        'output': output,
        'template': template_content,
        'debug': app.config.get('LIVEBLOG_DEBUG'),
        'assets_root': assets_root,
        'async': async,
        'i18n': i18n
    }
    if is_amp:
        # Add AMP compatible css to template context
        styles = theme.get('files', {}).get('styles', {}).values()
        if len(styles):
            scope['amp_style'] = next(iter(styles))

    embed_template = 'embed.html'
    if is_amp:
        embed_template = 'embed_amp.html'

    return render_template(embed_template, **scope)
#!/usr/bin/python
import cgi
import cgitb
import json
import sys
import uuid

from bson.json_util import dumps as bson_dumps

cgitb.enable()
request = json.loads(cgi.FieldStorage()["request"].value)


sys.stderr.write(request)    

print 'Content-type: application/json\n\n' 
print bson_dumps( { "message" : "bye-bye" } )
def handle_POST(password_dict, path, data, wfile):		
	try:
		#TRY TO RUNFUNCTIONS ON PROXY....IF ANY ERRORS ARE ENCOUNTERED, ERRORS ARE REPORTED IN ERROR LOG BELOW (SEE EXCEPT). ERROR LOG IS CALLED "mongoproxy_error_log.txt"
		if 'authkey' not in data:			
			data['authkey'] = ''
		if data['authkey'] == '':
			print('get rid of this authkey hack!!')
		ap_user = members.getFullName(data['authkey'])
		
		db_connect_data = (password_dict['dbpath'],'reader',password_dict['reader'],ap_user) 		
		
		#print datetime.now()				
				
		if not os.path.isdir(ap_user):
			os.mkdir(ap_user)
		#[db_writer,_ig_]= igdbconnect.connectToIgDatabase('writer',password_dict['writer'])
		[db_reader,_ig_]= igdbconnect.connectToIgDatabase('reader',password_dict['reader'])						
		user_dict = db_reader.users.find_one({'user':ap_user},{'_id':0})
				
		if not user_dict: #user not found in database		
			user_dict = {
				'user':ap_user,
				'name':'',
				'email':'',
				'administrator':False,
				'lab':[],
				'write_access': False
			}
			
		if data.get('db_action') == 'updates': #for insert and updating to databse
			print "debug: updates"
			bennigoetz_writes.db = db_writer
			data.pop('db_action', None)
			#[db_writer,_ig_] = igdbconnect.connectToIgDatabase('writer',password_dict['writer'])
			print data['module']
			data['module'] = 'bennigoetz_writes'
			print data['module']
			print data['command']
#			print data['args']
			print data['kwargs']
			print globals()['bennigoetz_writes']
			data['kwargs']['user_info'] = user_dict
	#		getattr(globals()['bennigoetz_writes'], data['command'])(db=db_writer, user_info=user_dict, *data['args'], **data['kwargs'])
			return_value = getattr(globals()['bennigoetz_writes'], data['command'])(*data['args'], **data['kwargs'])
			print "return value: {}".format(return_value)
			wfile.write(return_value)
	#		db_writer.test_exps.insert({'animal': 'walrus'})
	
		elif data.get('db_action') == 'query': #this is what actually processes a request sent to proxy 
			
			#query_id = data['query_object_id']
	
			#if query_id not in query_objects:
			#	query_objects[query_id] = proxytest_query_functions.RunQuery(query_id,password_dict,db_method=data['connection_type'])
									
			#RunQuery Parameters => db_connect_data,db_user_name,modify_query_values_to_follow_db_schema=True,redirect_fields_for_improved_queries=True,to_file=True,file_prefix=None,proxy_path = ''			
			#see _return function to see how the data dictionary is populated
			#important => set proxy_path to None because we are already on the proxy 
			class_args = [db_connect_data]			
			class_karg = {'proxy_path':None}
			if 'to_file' in data:
				class_karg['to_file'] = data['to_file']
			if 'modify_query_values' in data:
				class_karg['modify_query_values_to_follow_db_schema'] = data['modify_query_values']
			if 'redirect_query_fields' in data:
				class_karg['redirect_fields_for_improved_queries'] = data['redirect_query_fields']
			if 'file_prefix' in data:
				class_karg['file_prefix'] = data['file_prefix']						
									
			new_query_object=immunogrep_db_query_api.RunQuery(*class_args,**class_karg)
			
			#new_query_object= immunogrep_db_query_api.RunQuery(db_connect_data,ap_user,proxy_path=None,modify_query_values_to_follow_db_schema=data['modify_query_values'],redirect_fields_for_improved_queries=data['redirect_query_fields'],to_file=data['query_to_file'],file_prefix=data['file_prefix'])
									
			for function_to_run in data['command']: #data['command'] contains a list of functions to run and all of the accompanying parameters passed in by teh user on appsoma side
				#print function_to_run['args']
				#print dumps(function_to_run['args'])
				#print function_to_run
				#print 'args'
				#print str(function_to_run['args'])
				#print function_to_run['kwargs']
				#print 'keyargs'
				#print bson_dumps(function_to_run['kwargs'])
				#print str(function_to_run['kwargs'])
				
				#if we use bson_loads, then parameters such as ObjectId and re.compile are not passed in correcto to the funciton using exec 
				#however, if we jsut use bson_dumps or json_dumps, then variables such as True are converted to true which are not correct 
				#so instead, we => load as json by first dumps using bson and then reload using json, then convert to string
				exec('new_query_object.{0}(*{1},**{2})'.format(function_to_run['command'],str(json.loads(bson_dumps(function_to_run['args']))),str(json.loads(bson_dumps(function_to_run['kwargs'])))))
				
				#getattr(globals()['__builtins__']['new_query_object'],function_to_run['command'])(*function_to_run['args'], **function_to_run['kwargs'])		
								
			#return_query = query_objects.pop(query_id)
			new_query_object._return_results(wfile) #ok all commands have been run, lets return the results via proxy			
			del new_query_object				
			#OK, all done.... now, lets delete me...
			#query_objects.pop(query_id,None)				
		
	except Exception as e: #CATCH any errors						
		exc_type, exc_obj, exc_tb = sys.exc_info()
		fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1]    
		tb_error = traceback.format_exc()		
		
		with open(ap_user+'/'+str(ap_user)+"_mongoproxy_error_log.txt","a") as error_log: #WRITE errors to file 							
			#tb_error = tb_error.replace('\n',';')						
			#tb_error = tb_error.replace('\t',',')					
			error_log.write('###ERROR AT: '+str(datetime.now())+'\n')
			error_log.write('Username: '******'\n')
			error_log.write('Proxy Request: '+bson_dumps(data,indent=4, separators=(',', ': '))+'\n')
			error_log.write('ERROR Message: '+tb_error)
			error_log.write('########################################\n\n\n')			
			#error_log.write(str(datetime.now())+'\t'+str(ap_user)+'\t'+json.dumps(data)+'\t'+tb_error+'\n')
		
		raise Exception(tb_error)
Exemple #43
0
    def command_handler(self, msg):
        self.command_lock.acquire()
        msg = json.loads(msg)
        cmd = msg["msg"]
        params = msg.get("params", {})
        src = msg["src"]
        queryid = msg["queryid"]
        final_msg = None

        if cmd == "newsession":
            _id = self.snifferengine.new_session(params)
            final_msg = bson_dumps(_id)

        if cmd == "sessionlist":
            session_list = []
            user = params.get("user", None)
            page = params.get("page", 0)
            private = params.get("private", False)

            for session in self.snifferengine.model.get_sniffer_sessions(private=private, username=user, page=page):

                if session["_id"] in self.snifferengine.sessions:
                    session = self.snifferengine.sessions[session["_id"]]
                    active = session.status()
                    session = session.__dict__
                else:
                    active = False
                    session_data = bson_loads(session["session_data"])
                    session["nodes"] = session_data["nodes"]
                    session["edges"] = session_data["edges"]
                    session["id"] = session["_id"]

                session_list.append(
                    {
                        "id": str(session.get("id")),
                        "date_created": session.get("date_created"),
                        "name": session.get("name"),
                        "packets": session.get("packet_count"),
                        "nodes": len(session.get("nodes")),
                        "edges": len(session.get("edges")),
                        "status": "Running" if active else "Stopped",
                        "public": session.get("public"),
                    }
                )

            final_msg = bson_dumps(session_list)

        if params.get("session_id", False):

            session = self.snifferengine.fetch_sniffer_session(params["session_id"])

            if not session:
                final_msg = "notfound"

            if session:

                if cmd == "sessioninfo":

                    final_msg = {
                        "name": session.name,
                        "filter": session.filter,
                        "pcap": session.pcap,
                        "packet_count": session.packet_count,
                        "pcap_filename": session.pcap_filename,
                        "id": str(session.id),
                        "public": session.public,
                        "status": session.status(),
                        "node_list": session.get_nodes(),
                        "modules": [
                            (session.modules[module_name].display_name, module_name) for module_name in session.modules
                        ],
                    }

                if cmd == "sniffstatus":
                    final_msg = {"status": session.status()}

                if cmd == "sniffupdate":
                    # this needs to be stringyfied, or else encoding errors will ensue
                    final_msg = session.update_nodes()
                    final_msg = json.dumps(final_msg, default=json_util.default)

                if cmd == "sniffstart":
                    # self.snifferengine.start_session(params['session_name'], params['remote_addr'])
                    session.start()
                    final_msg = True

                if cmd == "sniffstop":
                    session.stop()
                    final_msg = True

                if cmd == "flowstatus":
                    flow = session.flow_status()
                    # this needs to be stringyfied, or else encoding errors will ensue
                    final_msg = flow

                if cmd == "flow_statistics_update":
                    print "Received 'flow_statistics_update' message. Please implement me? "

                if cmd == "get_flow_payload":
                    if params["flowid"] in session.flows:
                        final_msg = session.flows[params["flowid"]].get_payload(encoding="base64")
                    else:
                        final_msg = False

                if cmd == "sniffdelete":
                    result = self.snifferengine.delete_session(params["session_id"])
                    final_msg = result

                if cmd == "sniffpcap":
                    result = self.snifferengine.commit_to_db(session)
                    final_msg = result

                if cmd == "call_module_function":
                    module_name = params["module_name"]
                    function = params["function"]
                    args = params["args"]
                    module = session.modules.get(module_name, None)
                    if module:
                        try:
                            final_msg = getattr(module, function)(args)
                        except Exception, e:
                            import traceback

                            final_msg = "[{} in function {}] Module error: {}\n".format(module_name, function, e)
                            final_msg += traceback.format_exc()
                            final_msg = "<pre>{}</pre>".format(final_msg)
                    else:
                        final_msg = False