Example #1
0
import couchdb
couch = couchdb.Server('http://localhost:5984/')
couch.resource.credentials = ('whisk_admin','some_passw0rd')
couchdb = 'whisk_local_activations'
db = couch[couchdb]
for id in db:
	doc = {}
	if "_design" not in id:
		#print(db[id].rev)
		just_id = id.split('/')
		doc['_id'] = just_id[1]
		if db[id].rev is list:
			doc['_rev'] = db[id].rev
		else:
			doc['_rev'] = db[id].rev
		print(doc)
		db.delete(db[id])
		#print(id)
		db.purge([doc])
db.compact()
Example #2
0
File: Paypal.py Project: tuian/mq
from datetime import datetime
from pytz import timezone

import re

from cgi import parse_qsl

import settings
import logging
logging.basicConfig(level=logging.INFO, format='%(asctime)s %(message)s')

import locale
locale.setlocale(locale.LC_ALL, 'en_US.UTF-8')

couch_server = couchdb.Server(settings.COUCH_DSN)
db_client_docs = couch_server['client_docs']

utc = timezone('UTC')
phtz = timezone('Asia/Manila')


@task(ignore_result=True)
def charge_fees(doc_id):
    """given the doc_id, create fees
    """
    logging.info('checking document %s ' % doc_id)
    couch_server = couchdb.Server(settings.COUCH_DSN)
    db_client_docs = couch_server['client_docs']
    doc = db_client_docs.get(doc_id)
Example #3
0
def couch_load_nosql(objects, numbers):
    import couchdb
    server = couchdb.Server()
 def teardown(self):
     server = couchdb.Server(SERVER)
     try:
         server.delete(DATABASE)
     except ResourceNotFound:
         pass
#-*- coding: utf-8 -*-
import sys
import couchdb
import urllib2
import json
URL = 'localhost'
db_name = 'rusiatotal'
'''========couchdb'=========='''
server = couchdb.Server(
    'http://localhost:5984/'
)  # ('http://245.106.43.184:5984/') poner la url de su base de datos
try:
    print db_name
    db = server[db_name]
    print 'success'

except:
    sys.stderr.write("Error: DB not found. Closing...\n")
    sys.exit()

url = 'http://localhost:5984/rusiatotal/_design/hora/_view/hora'
req = urllib2.Request(url)
f = urllib2.urlopen(req)
d = json.loads(f.read())
dic = {}
for x in d['rows']:
    a = x['value']
    arreglo = str(a).split(" ")
    hora = arreglo[0] + ' ' + arreglo[1] + ' ' + arreglo[2]
    if dic.has_key(hora):
        dic[hora] = dic.get(hora) + 1
Example #6
0
 def __init__(self):
     password = '******'
     couchdb_ip = 'localhost'
     self.server = couchdb.Server("http://*****:*****@{}:5984".format(
         password, couchdb_ip))
Example #7
0
import couchdb


#our key to twitter API
consumer_key = 'XsIT8AInFpWzKOxqxxUGIyjUF'
consumer_secret = 'SqJchyijqas6re8YCGhHDhm9QgAe2DOYPhnQQGF2YLXFc8NNED'
access_token = '1128155636858904576-Na27CLWShUdxbkRVMmnGgnsiLifdKV'
access_secret = 'aTsFpIIyFrFX8Tyk2F95KzU1y5KVUM0gVgAO9E3j9Y49w'

auth = OAuthHandler(consumer_key, consumer_secret)
auth.set_access_token(access_token, access_secret)
api = tweepy.API(auth)

db_host = "http://*****:*****@45.113.233.247:5984"
# db_host = "http://*****:*****@localhost:5984"
couch = couchdb.Server(db_host)


# db_name is a string
def save_to_db(data, db_name):
    try:
        db = couch[db_name]
    except:
        db = couch.create(db_name)
    db.update(data)

def update_informationsdb(data, doc_id):
    try:
        db = couch["informations"]
    except:
        db = couch.create("informations")
Example #8
0
QUERY = sys.argv[1:]

SERVER_URL = 'YOUR COUCHDB URL'  #ex: http://localhost:5984
DB_USER = '******'
DB_PASSWD = 'YOUR PASSWORD'
DB = 'YOUR DB NAME'

CONSUMER_KEY = 'YOUR CONSUMERKEY'
CONSUMER_SECRET = 'YOUR CONSUMER_SECRET'
ACCESS_TOKEN = 'YOUR ACCESS_TOKEN'
ACCESS_TOKEN_SECRET = 'YOUR ACCESS_TOKEN_SECRET'

auth = tweepy.OAuthHandler(CONSUMER_KEY, CONSUMER_SECRET)
auth.set_access_token(ACCESS_TOKEN, ACCESS_TOKEN_SECRET)

server = couchdb.Server(SERVER_URL)
server.resource.credentials = (DB_USER, DB_PASSWD)

try:
    db = server.create(DB)

except couchdb.http.PreconditionFailed, e:
    db = server[DB]


class CustomStreamListener(tweepy.StreamListener):
    def on_status(self, status):
        results = {}
        try:
            if status.id_str in db:
                return True
Example #9
0
import tweepy
import couchdb
import re

server = couchdb.Server('http://*****:*****@172.26.133.41:8082/')
# db = server.create('lockdown_can')
db = server['lockdown_can']

# # Kevin
# consumer_key = "v918I7H2QJsIkYymHo5khE74f"
# consumer_secret = "5VcGYE1TUDx7i5aE2onTpWKcTPgJBx3U3LqybbE7kOu133l3z1"
# access_token = "1251803998555762693-eO27dyOYtNlfEMguCnPA2YHtvsYauA"
# access_token_secret = "jfckecjtPbaXoKU5LjFxL8NUSvOtxXrtqRCsfaycXrgiV"

# 理查德老师
# consumer_key = "Rz89nxFrVyhZTUX3eoBi05m6j"
# consumer_secret = "o9Cdfk7W6qfpTDYTx4tHFW2FQxEkALvIu2VSCPbbNVkyTniVjf"
# access_token = "1251802835177504770-VEcU0oHI2Kx0BTPv3MEKgpMarQcUqG"
# access_token_secret = "iyhMSbmY5xm2w3BZEVvgZ1Y6TUwJcgKmSIniBGMi4oEyL"

# 泰霸
consumer_key = "n0FRrHfmZImwPiuzPQ5CGmFiI"
consumer_secret = "BFUOm6PuPBAY5jzi4CvIsfwk97dwtpdmRlEmWCCkOF1IlsQ9dj"
access_token = "1251879426746277889-dp7UT86bYaxB9dYZ1SR9i4LlPLHcwY"
access_token_secret = "cC9zCNr7Awqvra8DtZkQl8TaYPAMNLGikKrpDJI4UGUSx"


auth = tweepy.OAuthHandler(consumer_key, consumer_secret)
auth.set_access_token(access_token, access_token_secret)
api = tweepy.API(auth, wait_on_rate_limit=True)
Example #10
0
def load_settings(filepath=None):
    """Load and return the settings from the given settings file,
    or from the first existing file in a predefined list of filepaths.
    Raise IOError if no readable settings file was found.
    Raise KeyError if a settings variable is missing.
    Raise ValueError if the settings variable value is invalid."""
    homedir = os.path.expandvars('$HOME')
    basedir = os.path.dirname(__file__)
    localdir = '/var/local/charon'
    if not filepath:
        hostname = socket.gethostname().split('.')[0]
        for filepath in [os.path.join(homedir, "{0}.yaml".format(hostname)),
                         os.path.join(homedir, 'default.yaml'),
                         os.path.join(basedir, "{0}.yaml".format(hostname)),
                         os.path.join(basedir, 'default.yaml'),
                         os.path.join(localdir, "{0}.yaml".format(hostname)),
                         os.path.join(localdir, 'default.yaml')]:
            if os.path.exists(filepath) and \
               os.path.isfile(filepath) and \
               os.access(filepath, os.R_OK):
                break
        else:
            raise IOError('no readable settings file found')
    with open(filepath) as infile:
        settings.update(yaml.safe_load(infile))
    # Set logging state
    if settings.get('LOGGING_DEBUG'):
        kwargs = {'level':logging.DEBUG}
    else:
        kwargs = {'level':logging.INFO}
    try:
        kwargs['format'] = settings['LOGGING_FORMAT']
    except KeyError:
        pass
    try:
        kwargs['filename'] = settings['LOGGING_FILENAME']
    except KeyError:
        pass
    try:
        kwargs['filemode'] = settings['LOGGING_FILEMODE']
    except KeyError:
        pass
    logging.basicConfig(**kwargs)
    logging.info("settings from file %s", filepath)
    # Check settings
    for key in ['BASE_URL', 'DB_SERVER', 'DB_DATABASE',
                'COOKIE_SECRET', 'AUTH']:
        if key not in settings:
            raise KeyError("no settings['{0}'] item".format(key))
        if not settings[key]:
            raise ValueError("settings['{0}'] has invalid value".format(key))
    # Only Userman is available currently
    key = 'SERVICE'
    if settings['AUTH'].get(key) != 'Userman':
        raise ValueError("settings['{0}'] has invalid value".format(key))
    for key in ['HREF', 'USER_HREF', 'AUTH_HREF', 'API_TOKEN']:
        if key not in settings['AUTH']:
            raise KeyError("no settings['AUTH']['{0}'] item".format(key))
    if len(settings['COOKIE_SECRET']) < 10:
        raise ValueError("settings['COOKIE_SECRET'] too short")
    # Settings computable from others
    settings['DB_SERVER_VERSION'] = couchdb.Server(settings['DB_SERVER']).version()
    if 'PORT' not in settings:
        parts = urlparse.urlparse(settings['BASE_URL'])
        items = parts.netloc.split(':')
        if len(items) == 2:
            settings['PORT'] = int(items[1])
        elif parts.scheme == 'http':
            settings['PORT'] =  80
        elif parts.scheme == 'https':
            settings['PORT'] =  443
        else:
            raise ValueError('could not determine port from BASE_URL')
    return settings
Example #11
0
            else:
                compound = (int(number.group(0)), 0)
            number_and_change.append(compound)

        except Exception as e:
            print("wrong length of the result")

    results[state]['Confirmed'] = number_and_change[0]
    results[state]['Deaths'] = number_and_change[1]
    results[state]['Cured'] = number_and_change[2]
    results[state]['Active'] = number_and_change[3]
    number = re.search('\d+', daily_increase[-1].replace(',', ''))
    results[state]['Tested'] = int(number.group(0))

# use key date as the id for the daily increase
today = date.today()
key_date = today.strftime("%Y-%m-%d")
results['_id'] = key_date

# upload the result to couchdb
couch = couchdb.Server('http://*****:*****@172.26.130.162:5984/')
db = couch['daily_increase']
db.save(results)

print(results)

with open('new-' + key_date + ".json", "w+", encoding="utf-8") as f:
    results = json.dump(results, f, indent=2)

driver.quit()
Example #12
0
from flask import Blueprint, jsonify, request
import couchdb
from configuration.config import Config

bp = Blueprint('Read and configure analysis',
               __name__,
               url_prefix='/api/analysis')

couchdb = couchdb.Server(Config.COUCHDB_URL)
twitter = couchdb['twitter']


@bp.route('/', methods=['GET'])
def list_views():
    '''
  return all analysis (couchDB views)
  ---
  responses:
    200:
      description: A list of couchDB views definitions
      schema:
        type: array
        items:
          type: object
          properties:
            id:
              type: string
              description: The id of the view
            view:
              type: object
              description: The content of the view
Example #13
0
def delete_soledad_server_db(user_id, username):
    couch_credentials = _netrc_couch_credentials()
    server = couchdb.Server("http://%(login)s:%(password)s@%(machine)s:5984" %
                            couch_credentials)
    _delete_identity(server, username)
    _delete_data(server, user_id)
Example #14
0
args = parser.parse_args()

if args.debug == True: print(args)

if args.member is '':
    server = 'http://' + args.host + ':' + str(args.port) + '/'
else:
    server = 'http://' + args.member + ':' + args.pw + '@' + args.host + ':' + str(
        args.port) + '/'

if args.debug is True:
    print('Connecting to ', server)

try:
    # db = pycouchdb.Server(server).database(args.dbname)
    db = couchdb.Server(server)[args.dbname]
    if args.debug is True: print('connection successful')
except Exception as e:
    print('Connection unsuccessful')
    sys.exit()

total = 0
docs = []
with open(args.file) as f:
    for line in f:
        # find the start and end of the json string
        lo = line.find('{')
        hi = line.rfind('}')
        if lo is not -1 and hi is not -1:
            tweet = json.loads(line[lo:hi + 1])['json']
            doc = {}
Example #15
0
# encoding=utf8
import sys
import json
import couchdb
import googlemaps
from geopy.geocoders import Nominatim

reload(sys)
sys.setdefaultencoding('utf8')

couch = couchdb.Server('http://115.146.89.191:5984')
counter = 0
try:
    data_base = couch.create('melbourne_tweets')
except couchdb.http.PreconditionFailed as e:
    data_base = couch['melbourne_tweets']

geolocator = Nominatim()
gmaps = googlemaps.Client(key='AIzaSyDjhkGbQoHqpDM5Hiaii-giSCYzFoJMRJY')

countlist = []
for row in data_base.view('helper/checkpostcode'):
    countlist.append(row['key'])
    # lat_long = str(latitude) + "," + str(longitude)
    # location = geolocator.reverse(lat_long)
    # return [location.raw['address']['postcode'],location.raw['address']['suburb']]

for doc_id in countlist[2500:5000]:
    data = data_base[doc_id]
    if counter >= 2500:
        break
Example #16
0
atoken = ""
asecret = ""


class listener(StreamListener):
    def on_data(self, data):
        dictTweet = json.loads(data)
        if "Lasso" in dictTweet["text"].lower():
            try:
                dictTweet["_id"] = str(dictTweet['id'])
                doc = db.save(dictTweet)
                print("SAVED" + str(doc) + "=>" + str(data))
            except:
                print("Already exists")
                pass
            return True

    def on_error(self, status):
        print(status)


auth = OAuthHandler(ckey, csecret)
auth.set_access_token(atoken, asecret)
streamListen = listener()
twitterStream = Stream(auth, streamListen)
server = couchdb.Server('http://*****:*****@localhost:5984/')
try:
    db = server.create('twitter-proyecto-lasso')
except:
    db = server['twitter-proyecto-lasso']
twitterStream.filter(locations=[-92.21, -5.02, -75.19, 1.88])
Example #17
0
# -*- coding: utf-8 -*-
import json
import tweepy
import couchdb
from shapely.geometry import Point
from shapely.geometry.polygon import Polygon
import re

# global variables:
# boundary file - dictionary
boundaryJS = json.load(open('melb.json'))
# all suburb names - list
sub_list = [ele['properties']["SA2_NAME16"] for ele in boundaryJS["features"]]
# target database object
server = couchdb.Server('http://*****:*****@localhost:5984/')
try:
    database = server.create('temp')
except Exception as e:
    server.delete('temp')
    database = server.create('temp')


def sub_name_normalisation(input_name):
    for standard_sub in sub_list:
        if input_name.lower() == standard_sub.lower():
            return standard_sub
    for standard_sub in sub_list:
        if input_name.lower() in standard_sub.lower():
            return standard_sub
    for standard_sub in sub_list:
        if input_name.replace(" ", " - ").lower() == standard_sub.lower():
Example #18
0
File: soa.py Project: tuian/mq
def process_doc_id(doc_id):
    import calendar
    logging.info('soa.process_doc_id checking %s from sc.remotestaff.com.au' %
                 doc_id)
    s = couchdb.Server(settings.COUCH_DSN)

    db = s['subconlist_reporting']
    doc = db.get(doc_id)
    if doc == None:
        raise Exception('subconlist_reporting document not found : %s' %
                        doc_id)

    subcontractor_ids = doc['subcontractor_ids']
    date_search = doc['date_search']

    database = MySQLdb.connect(**settings.DB_ARGS)
    conn = database.cursor()

    str = "\n"
    result = {}
    subcon_client_rates = {}
    for d in date_search:
        d = datetime.strptime(d, '%Y-%m-%d')
        #end_date = datetime(d.year, d.month, calendar.mdays[d.month])
        last_day_of_end_date = calendar.monthrange(int(d.year), int(d.month))
        end_date = datetime(d.year, d.month, last_day_of_end_date[1])
        date_reference = '%s' % d.strftime('%m/%Y')

        subcons = []
        client_rates = []
        for sid in subcontractor_ids:
            userid = doc['subcon_userid'][sid]

            dates = get_rates_reference_date(conn, sid, d, end_date)
            #str += '\n\n%s %s' % (d, end_date)

            subcon = get_subcon_details(conn, sid)
            starting_date = datetime.strptime('%s' % subcon["starting_date"],
                                              '%Y-%m-%d')

            rates = []

            for idx, date in enumerate(dates):
                #str += '\n\t%s' % (date['start_date'])
                #print str
                start_date_ref = dates[idx]['start_date']

                if date_reference != '%s' % date['start_date'].strftime(
                        '%m/%Y'):
                    #str += '\n\t%s use this date => %s' % (date['start_date'], d)
                    rates = get_subcon_rate(conn, sid, d)
                    if rates:
                        for r in rates:
                            #str += '\n\n\t->%s %s %s %s\n' % (r['rate'], r['work_status'] , d, end_date )
                            client_rates.append(
                                dict(rate=r['rate'],
                                     work_status=r['work_status'],
                                     start_date=d,
                                     end_date=end_date,
                                     sid=sid))

                if date_reference == '%s' % date['start_date'].strftime(
                        '%m/%Y'):

                    end_date_ref = dates[idx]['start_date'] - timedelta(days=1)
                    start_date_ref = dates[idx - 1]['start_date']

                    same_month = True
                    if '%s' % dates[idx]['start_date'].strftime(
                            '%m/%Y') != '%s' % end_date_ref.strftime('%m/%Y'):
                        #end_date_ref =  dates[idx]['start_date']
                        same_month = False
                    if same_month:
                        if int(idx) == 0:
                            #str += '\n\t%s %s %s' % (idx, d, end_date_ref)
                            rates = get_subcon_rate(conn, sid, end_date_ref)
                            if rates:
                                for r in rates:
                                    #str += '\n\t\t->%s %s' % (r['rate'], r['work_status'])
                                    client_rates.append(
                                        dict(rate=r['rate'],
                                             work_status=r['work_status'],
                                             start_date=d,
                                             end_date=end_date_ref,
                                             sid=sid))

                    if int(idx) > 0:
                        #str += '\n\t%s %s %s' % (idx, start_date_ref, end_date_ref)
                        rates = get_subcon_rate(conn, sid, end_date_ref)
                        if rates:
                            for r in rates:
                                #str += '\n\t\t->%s %s' % (r['rate'], r['work_status'])
                                client_rates.append(
                                    dict(rate=r['rate'],
                                         work_status=r['work_status'],
                                         start_date=start_date_ref,
                                         end_date=end_date_ref,
                                         sid=sid))
                    if len(dates) == (idx + 1):
                        #str += '\n\t%s %s %s' % ((idx+1), dates[idx]['start_date'], end_date)
                        rates = get_subcon_rate(conn, sid, end_date)
                        if rates:
                            for r in rates:
                                #str += '\n\t\t->%s %s' % (r['rate'], r['work_status'])
                                client_rates.append(
                                    dict(rate=r['rate'],
                                         work_status=r['work_status'],
                                         start_date=dates[idx]['start_date'],
                                         end_date=end_date,
                                         sid=sid))

            #subcon_client_rates[int(sid)]=client_rates
            for c in client_rates:

                start_date_ref = datetime.strptime(
                    '%s' % c['start_date'].strftime("%Y-%m-%d"), '%Y-%m-%d')
                end_date_ref = datetime.strptime(
                    '%s' % c['end_date'].strftime("%Y-%m-%d"), '%Y-%m-%d')
                adj_hrs = get_total_adj_hrs(conn, sid, start_date_ref,
                                            end_date_ref)

                if date_reference == '%s' % c['start_date'].strftime('%m/%Y'):
                    if sid == c['sid']:
                        str += '\n[%s] %s %s %s %s' % (
                            sid, c['start_date'].strftime("%Y-%m-%d"),
                            c['end_date'].strftime("%Y-%m-%d"), c['rate'],
                            c['work_status'])
                        if subcon["status"] == 'ACTIVE' or subcon[
                                "status"] == 'suspended':
                            if starting_date <= end_date:
                                subcons.append(
                                    dict(sid=sid,
                                         rate=c['rate'],
                                         work_status=c['work_status'],
                                         start_date='%s' %
                                         c['start_date'].strftime("%Y-%m-%d"),
                                         end_date='%s' %
                                         c['end_date'].strftime("%Y-%m-%d"),
                                         total_adj_hrs='%s' % adj_hrs))

                        if subcon["status"] == 'terminated' or subcon[
                                "status"] == 'resigned':
                            if subcon["end_date"]:
                                ending_date = subcon["end_date"]
                                if starting_date <= end_date:
                                    if ending_date >= d and ending_date >= end_date:
                                        subcons.append(
                                            dict(sid=sid,
                                                 rate=c['rate'],
                                                 work_status=c['work_status'],
                                                 start_date='%s' %
                                                 c['start_date'].strftime(
                                                     "%Y-%m-%d"),
                                                 end_date='%s' %
                                                 c['end_date'].strftime(
                                                     "%Y-%m-%d"),
                                                 total_adj_hrs='%s' % adj_hrs))

                                    if ending_date >= d and ending_date <= end_date:
                                        subcons.append(
                                            dict(sid=sid,
                                                 rate=c['rate'],
                                                 work_status=c['work_status'],
                                                 start_date='%s' %
                                                 c['start_date'].strftime(
                                                     "%Y-%m-%d"),
                                                 end_date='%s' %
                                                 c['end_date'].strftime(
                                                     "%Y-%m-%d"),
                                                 total_adj_hrs='%s' % adj_hrs))

            result['%s' % d.strftime('%Y-%m-%d')] = subcons
    #return str
    doc['result'] = result
    doc['result_date_time'] = get_ph_time().strftime('%Y-%m-%d %H:%M:%S')
    conn.close()
    db.save(doc)
import couchdb
import datetime
import json
import sys


def log(s):
    print '%s: %s' % (datetime.datetime.today(), s)


def deleteDocs(db):
    print 'Loading old tweets ...'
    oldTweets = db.view('Tweet/by_day_older_five_days')
    print 'Found %d tweets' % len(oldTweets)
    numTweets = 0
    for row in oldTweets:
        doc = db[row.id]
        print 'Deleting: %s' % doc['created_at']
        db.delete(doc)
        numTweets += 1
    print '%d tweets deleted from %s' % (numTweets, db)


if __name__ == '__main__':
    dbName = sys.argv[1]
    couch = couchdb.Server('http://166.78.236.179:5984/')
    couch.resource.credentials = ('admin', 'admin')
    db = couch[dbName]
    deleteDocs(db)
Example #20
0
import couchdb

if __name__ == "__main__":

    db = couchdb.Server()

    print dir(db)
    for coll in db:
        print coll
        for tmp in db[coll]:
            print tmp
import couchdb
from couchdb.design import ViewDefinition

admin = 'admin'
password = '******'
server = couchdb.Server('http://*****:*****@115.146.86.96:5984')

db = server['sentiment_ratio']

view1 = ViewDefinition('Res', 'res', 'function(doc) {emit(doc.results, 1);}')
view1.get_doc(db)
view1.sync(db)

db1 = server['sentiment_sum']

view2 = ViewDefinition('Res', 'res', 'function(doc) {emit(doc.results, 1);}')
view2.get_doc(db1)
view2.sync(db1)

db2 = server['posts_per_city']

view2 = ViewDefinition('Res', 'res', 'function(doc) {emit(doc.count, 1);}')
view2.get_doc(db2)
view2.sync(db2)

db3 = server['dist_user']

view3 = ViewDefinition('Res', 'res', 'function(doc) {emit(doc.distcount, 1);}')
view3.get_doc(db3)
view3.sync(db3)
            dictTweet["_id"] = str(dictTweet['id'])
            doc = db.save(dictTweet)
            print("SAVED" + str(doc) + "=>" + str(data))
        except:
            print("Already exists")
            pass
        return True

    def on_error(self, status):
        print(status)


auth = OAuthHandler(ckey, csecret)
auth.set_access_token(atoken, asecret)
twitterStream = Stream(auth, listener())
'''========couchdb'=========='''
server = couchdb.Server('http://*****:*****@localhost:5984/'
                        )  #('http://115.146.93.184:5984/')
try:
    db = server.create('chimborazoaa')
except:
    db = server('chimborazoaa')
'''===============LOCATIONS=============='''

#Filtro por geolocalización
twitterStream.filter(locations=[-79.2545, -2.5673, -78.3585, -1.4301])
#Filtro por palabras
twitterStream.filter(
    track=['Elecciones Ecuador 2021', 'Andres Arauz', 'CENTRO'])
Example #23
0
            try:
                db.save(element)
            except Exception as e:
                print('doc error:', feature['properties'][idName], e)


with open('/home/ec2-user/couchdb.txt', 'r') as db:
    content = f.read().split()
    host = content[0]
    user = content[1]
    passw = content[2]
    port = 5984

url = 'http://' + user + ':' + passw + '@' + host + ':' + str(port) + '/'

couch = couchdb.Server(url)

web_files = [
    'web-sa2-zscore', 'web-sa3-zscore', 'web-sa4-zscore',
    'web-zresult-obese-park', 'web-zresult-rest', 'web-result-obese'
]

feature_files_ids = [{
    'nm': 'sa1-polygons_melb-greater',
    'id': 'SA1_MAIN16'
}, {
    'nm': 'sa2-polygons_melb-greater',
    'id': 'SA2_MAIN16'
}, {
    'nm': 'sa3-polygons_melb-greater',
    'id': 'SA3_CODE16'
import couchdb
from data_processing import data_process

user = "******"
password = "******"
server = couchdb.Server("http://" + user + ":" + password + "@" + "45.113.234.131:32773/")
old_db = server['historical_twitter']
new_db = server["new_tweets"]
# new_db = server["processed_tweets"]

counter = 0
for id in old_db:
    try:
        data = old_db[id]
        new_data = data_process.process_json(data)
    # new_db.save(data)
    # new_db.save(new_data)
        new_id = data["id"]
    except:
        print("Processing error")
    try:
        new_db[str(new_id)] = new_data
        print(new_data)
    except:
        print("Duplicate error.")

    # for element in data:
    #     print(type(element))
    #     print(element)
    # print(type(data))
    # print(data)
Example #25
0
                    'Lang': tweet['lang']
                }
                print(Dict, '\n')
                db.save(Dict)
        except couchdb.http.ResourceConflict:
            pass

    def on_error(self, status):
        if status == 420:
            # return false on_status method in case rate limit occurs
            return False


if __name__ == '__main__':
    # ----------------------------------------------CloudDB-------------------------------------------------------------
    server = couchdb.Server('http://*****:*****@172.26.130.124:5984/')
    db_name = 'db_status_filter_test'

    try:
        db = server.create(db_name)
    except:
        db = server[db_name]

    print('-----------------------------------------CouchDB Connected-------------------------------------------------')

    # ----------------------------------------------Stream--------------------------------------------------------------

    # Searching tweets by keywords and save into CouchDB
    KeyWords = ['COV19', 'COV-19', 'cov19', 'cov-19', 'coronavirus',
                'virus', 'epidemic', 'covid19', 'covid_19']
    Stream = Stream(auth, MyListener())
Example #26
0
#####################################


class listener(StreamListener):
    def on_data(self, data):
        dictTweet = json.loads(data)
        try:
            dictTweet["_id"] = str(dictTweet['id'])
            doc = db.save(dictTweet)
            print("SAVED" + str(doc) + "=>" + str(data))
        except:
            print("Already exists")
            pass
        return True

    def on_error(self, status):
        print(status)


auth = OAuthHandler(ckey, csecret)
auth.set_access_token(atoken, asecret)
twitterStream = Stream(auth, listener())
'''========couchdb'=========='''
server = couchdb.Server('http://*****:*****@localhost:5984/')
try:
    db = server.create('covid')
except:
    db = server['covid']
'''===============LOCATIONS=============='''
twitterStream.filter(track=['COVID', 'Covid', 'Cov-19'])
Example #27
0
File: Paypal.py Project: tuian/mq
def charge_fees(doc_id):
    """given the doc_id, create fees
    """
    logging.info('checking document %s ' % doc_id)
    couch_server = couchdb.Server(settings.COUCH_DSN)
    db_client_docs = couch_server['client_docs']
    doc = db_client_docs.get(doc_id)

    if doc == None:
        raise Exception('document %s not found for creating paypal fees' %
                        doc_id)

    if doc.has_key('type') == False:
        raise Exception('%s doc has no type field' % doc_id)

    if doc['type'] != 'paypal_transaction':
        raise Exception('%s is not a secure_pay_transaction type doc' % doc_id)

    if doc.has_key('response') == False:
        raise Exception('%s does not have a response field' % doc_id)

    if doc.has_key('order_id') == False:
        raise Exception('%s does not have order_id field' % doc_id)

    #get leads_id
    order_id = doc['order_id']
    x = re.split('-', order_id)
    leads_id = int(x[0])

    response = dict(parse_qsl(doc['response']))
    if (('TOKEN' in response.keys()) and ('ACK' in response.keys())
            and ('PAYMENTINFO_0_AMT' in response.keys())
            and ('PAYMENTINFO_0_FEEAMT' in response.keys())
            and ('PAYMENTINFO_0_TRANSACTIONID' in response.keys())
            and (response['ACK'] == 'Success')):
        amount = Decimal(response['PAYMENTINFO_0_AMT'])
        charge = Decimal(response['PAYMENTINFO_0_FEEAMT'])
        paypal_transaction_id = response['PAYMENTINFO_0_TRANSACTIONID']
    else:
        raise Exception('%s does not seem to be a paid paypal transaction' %
                        doc_id)

    now = __get_phil_time__(as_array=True)

    particular = 'Paypal Service Fee Ex GST for transaction %s' % (
        paypal_transaction_id)
    remarks = 'Payment amt of %s. Payment fee of %s. invoice_id:%s' % (
        amount, charge, order_id)

    r = db_client_docs.view('client/running_balance', key=leads_id)

    if len(r.rows) == 0:
        running_balance = Decimal('0')
    else:
        running_balance = Decimal('%s' % r.rows[0].value)

    running_balance -= charge

    doc_transaction = dict(
        added_by='automatic charge on payment (celery:Paypal.charge_fees)',
        added_on=now,
        charge='%0.2f' % charge,
        client_id=leads_id,
        credit='0.00',
        credit_type='PAYPAL_FEE',
        currency=doc['currency'],
        remarks=remarks,
        type='credit accounting',
        running_balance='%0.2f' % running_balance,
        particular=particular,
        paypal_doc_id=doc_id,
    )

    db_client_docs.save(doc_transaction)
    send_task('notify_devs.send', [
        'PAYPAL FEE EXTRACTED',
        'Please check invoice %s, paypal fee:%s' % (order_id, charge)
    ])  #TODO delete this notification once stable
Example #28
0
def scenario2(request):
	Maps.objects.filter(mapname="map5aurin").delete()
	Maps.objects.filter(mapname="map5twitter").delete()
	server = couchdb.Server( url='http://115.146.86.96:5984')
	db = server['language']
	for docid in db.view('Res/res'):
		i = docid['key']
		break
	languagedict=json.loads(i);
	db = server['lang_aurin']
	with open("/home/ubuntu/zip/sc1.json", "r", encoding="utf8") as sc1:
		map1 = json.load(sc1)
		for row in map1["features"]:
			map=Maps()
			map2=Maps()
			city=row['cityname']
			print("city:",city)
			map.mapname="map5aurin";
			map2.mapname="map5twitter";
			map.cityname=row['cityname']
			map2.cityname=row['cityname']
			map.lat=map2.lat=row['latlong'][0]
			map.lon=map2.lon=row['latlong'][1]
			for docid in db.view('Result/'+city):
				if docid['key']:
				    c=docid['key']["SOL_Chin_lang_Tot_P"];
				    ia=docid['key']["SOL_In_Ar_Lang_Tot_P"]
				    i=docid['key']["SOL_Italian_P"]
				    j=docid['key']["SOL_Japanese_P"]
				    s=docid['key']["SOL_Spanish_P"]
				    k=docid['key']["SOL_Korean_P"]
				    map.infotitle="Chinese"
				    map.infomsg=c;
				    map.infotitle2="Korean"
				    map.infomsg2=k;
				    map.infotitle3="Japanese"
				    map.infomsg3=j;
				    map.infotitle4="Italian"
				    map.infomsg4=i;
				    map.infotitle5="Spanish"
				    map.infomsg5=s;
				    map.save()
				    map2.infotitle="Chinese"
				    ch=0
				    for k in languagedict[city]:
				        if "zh" in k:
				            ch+=languagedict[city][k]
				    map2.infomsg=ch;
				    map2.infotitle2="Korean"
				    if "ko" in languagedict[city]:
				        map2.infomsg2=languagedict[city]["ko"];
				    else:
				        map2.infomsg2=0
				    map2.infotitle3="Japanese"
				    if "ja" in languagedict[city]:
				        map2.infomsg3=languagedict[city]["ja"];
				    else:
				        map2.infomsg3=0
				    map2.infotitle4="Italian"
				    if "it" in languagedict[city]:
				        map2.infomsg4=languagedict[city]["it"];
				    else:
				        map2.infomsg4=0
				    map2.infotitle5="Spanish"
				    if "es" in languagedict[city]:
				        map2.infomsg5=languagedict[city]["es"];
				    else:
				        map2.infomsg5=0
				    map2.save()
	aurindata = Maps.objects.filter(mapname="map5aurin").values()
	twitterdata = Maps.objects.filter(mapname="map5twitter").values('cityname','lat','lon','infotitle','infomsg','infotitle2','infomsg2','infotitle3','infomsg3','infotitle4','infomsg4','infotitle5','infomsg5',)
	aurin_list = list(aurindata)
	twitter_list = list(twitterdata)   
	return render(request, "results/scenario2.html", {'maps_list1':twitter_list,'maps_list2':aurin_list})
Example #29
0
    print "Unique Retweet IDs = ", uniqueIDs


# the main program tests this function by loading all tweets from a search database
#   and printing the entities from the first XX tweets
if __name__ == '__main__':
    # this should be the name of a DB with tweets
    DBname = 'fairly'

    # open the database directly from CouchDB so that we can delete items as necessary
    # connect to database on couchdb server
    search_results = load_from_DB(DBname)

    # open the database directly from CouchDB so that we can delete items as necessary
    # connect to database on couchdb server
    server = couchdb.Server('http://localhost:5984')
    try:
        db = server[DBname]
        print "Connected to DB named", DBname
    except couchdb.http.PreconditionFailed, e:
        db = server[DB]
        print "Could not find DB named", DBname
        sys.exit(0)
    except ValueError, e:
        print "Invalid DB name"
        sys.exit(0)

    print 'number tweets loaded', len(search_results)

    remove_retweets(search_results)
import couchdb
import re
import sys
import urllib2
import json
import textblob
import matplotlib.pyplot as plt
from googletrans import Translator
import re
from pylab import *
from couchdb import view
URL = '192.168.100.16'
db_name = 'brasil'
db_nameSecond = 'belgica'
db_nameThird = 'rusia'
server = couchdb.Server('http://' + URL + ':5984/')
lstHashtags = {
    "principal": "#Rusia2018",
    "secundario": "#WorldCup",
    "alterno": "#WorldCupFinal",
    "argentina": "#ARG",
    "belgica": "#BEL",
    "brasil": "#BRA",
    "colombia": "#COL",
    "croacia": "#CRO",
    "dinamarca": "#DEN",
    "inglaterra": "#ENG",
    "espana": "#ESP",
    "francia": "#FRA",
    "japon": "#JPN",
    "mexico": "#MEX",