Пример #1
0
def connection():
    create_db('_test_db')
    connection = db_connection('_test_db')
    yield connection

    drop_tables(connection)
    connection.close()
Пример #2
0
def connection():
    create_db('_test_db')
    connection = db_connection('_test_db')
    yield connection

    drop_tables(connection)
    connection.close()
Пример #3
0
 def setUp(self) -> None:
     self.db_name = "test.db"
     with open("requests.json", "r") as file:
         self.requests = json.loads(file.read())
     utils.create_db(self.db_name)
     self.user_model = models.User(self.db_name)
     self.auth_token_model = models.AuthToken(self.db_name)
     self.post_model = models.Post(self.db_name)
Пример #4
0
def connection():
    create_db("_test_db")
    connection = db_connection("_test_db")
    yield connection

    drop_tables(connection)
    connection.close()
    os.remove("_test_db")
Пример #5
0
  def start(self):
    if not self.conn:
      self.conn = utils.create_db(self.config["dbfile"], self.config["schemafile"])

    try:
      pt = multiprocessing.Process(target=populatetxs.populatetxs().run)

      txp = multiprocessing.Process(target=txparser.txparser().run)
      two = multiprocessing.Process(target=tweetout.tweetout().run)
      ch = multiprocessing.Process(target=commandhandler.commandhandler().run)

      twi = multiprocessing.Process(target=tweetin.tweetin().run)
      sc = multiprocessing.Process(target=statscollector.statscollector().run)
      am = multiprocessing.Process(target=addressmonitor.addressmonitor().run)

      # these modules are independent of others
      # update all txs in case we miss any
      pt.start()

      # provide features like tweet replies and statscollection
      sc.start() ; two.start()

      # these modules monitor and process txs
      txp.start() ; am.start()

      # these modules provide cnc feature
      ch.start() ; twi.start()

      pt.join()
      sc.join() ; two.join()
      txp.join() ; am.join()
      ch.join() ; twi.join()
    except:
      import traceback
      traceback.print_exc()
Пример #6
0
 def __init__(self):
     super(populatetxs, self).__init__()
     self.conn = utils.create_db("cryptopaymon.sqlite", "schema.sql")
     self.config = {
         "txupdatedelay": 0,
         "satoshi2btc": 1e8,
     }
Пример #7
0
    def __init__(self):
        seqfn = 'Vibrio_cholerae.GFC_11.dna.nonchromosomal.fa'
        gfffn = 'Vibrio_cholerae.GFC_11.37.gff3'
        dbfn = 'Vibrio_cholerae.GFC_11.37.db'
        db = create_db(gfffn, dbfn)

        self.avg_len_genic = get_avg_len_genic(seqfn, db)
        self.avg_len_intergenic = get_avg_len_intergenic(seqfn, db)
        self.nuc_freq_intergenic = get_nuc_freq_intergenic(seqfn, db)
        self.codon_freq_genic = get_codon_freq_genic(seqfn, db)
Пример #8
0
 def __init__(self):
   super(tweetin, self).__init__()
   self.conn = utils.create_db("cryptopaymon.sqlite", "schema.sql")
   self.config = {
     "cmdqueue": None,
     "mentions": None,
     "twitterconsumerkey": None,
     "twitterconsumersecret": None,
     "twitteraccesskey": None,
     "twitteraccesssecret": None
   }
Пример #9
0
    def __init__(self):
        """
        1. init the block list
        2. create a genesis block for this block chain
        """

        self.blocks = []
        self.db = None
        self.latest_block = None

        if ('block.db' not in os.listdir('.')):
            print("No existing blockchain found, create a new one...")
            utils.create_db('block')
            self.db = pickledb.load('block.db', True)
            genesis_block = block(prev_hash=None,
                                  data='Genesis Block',
                                  height=0)
            genesis_block.pow_of_block()
            self.db.set(genesis_block.hash, utils.serialize(genesis_block))
            self.db.set('latest', genesis_block.hash)
        else:
            self.db = pickledb.load('block.db', True)
Пример #10
0
 def __init__(self):
   super(txparser, self).__init__()
   self.conn = utils.create_db("cryptopaymon.sqlite", "schema.sql")
   self.config = {
     "taskqueue": None,
     "tweetqueue": None,
     "generichashtags": None,
     "ignorehashtags": None,
     "happyemojis": None,
     "neutralemojis": None,
     "sademojis": None,
     "queuemonitordelay": 0,
     "satoshi2btc": 1e8,
     "exchangerates": {
       "btc2usd": 0
     },
   }
Пример #11
0
 def __init__(self):
   super(tweetout, self).__init__()
   self.conn = utils.create_db("cryptopaymon.sqlite", "schema.sql")
   self.config = {
     "twitterusers": None,
     "tweetqueue": None,
     "tweetmediaqueue": None,
     "tweetdelay": None,
     "twitterconsumerkey": None,
     "twitterconsumersecret": None,
     "twitteraccesskey": None,
     "twitteraccesssecret": None,
     "generichashtags": None,
     "tmpfile": "/tmp/.imgfile.png",
   }
   self.auth = None
   self.api = None
Пример #12
0
 def __init__(self):
     super(statscollector, self).__init__()
     self.conn = utils.create_db("cryptopaymon.sqlite", "schema.sql")
     self.config = {
         "tweetqueue": None,
         "tweetmediaqueue": None,
         "generichashtags": None,
         "statscollectiondelay": 0,
         "satoshi2btc": 100000000,
         "templateopts": dict(),
         "basepath":
         "/media/shiv/red_third/stoolbox/gdrive-bckup/toolbox/cryptopaymon",
         "templatedir":
         "/media/shiv/red_third/stoolbox/gdrive-bckup/toolbox/cryptopaymon/html",
         "htmldir":
         "/media/shiv/red_third/stoolbox/gdrive-bckup/toolbox/cryptopaymon/html",
         "downloaddir": "/home/shiv",
         "exchangerates": {
             "btc2usd": 0
         },
         "imagesavedelay": 10,
         "heading_bad": "Statistics for Bitcoin Ransom",
         "heading_good": "Statistics for Bitcoin Donations",
     }
Пример #13
0
def connection():
    create_db('_test_db')
    connection = db_connection('_test_db')
    yield connection

    connection.close()
Пример #14
0
		metavar='create',
		action='store_const',
		const=True, 
		default=False,
		help='create a new database'
	)

	parser.add_argument(
		'-d', 
		'--database', 
		metavar='db_path',
		type=str,
		default='users.sqlite',
		help='the sqlite db file to use'
	)

	args = parser.parse_args()

	conf.DB_NAME = args.database
	conf.init()

	if args.create:
		print('Creating new db "{db}"'.format(db=args.database))
		utils.create_db()
	else:
		simpleauth.listen(args.port)
		print('Listening on port {port}'.format(port=args.port))
		print('Using user db "{db}"'.format(db=args.database))
		iol = tornado.ioloop.IOLoop.instance()
		iol.start()
Пример #15
0
def main():
    db_fields = (
        'vacancy_id',
        'vacancy_link',
        'vacancy_name',
        'company',
        'address',
        'description',
        'salary',
    )
    create_db(db_fields)
    page = 0

    while True:
        page += 1
        print(f'start to parse page: {page}'
              )  # NOQA helps to follow and observe parsing process

        headers = {'User-Agent': ua.random}

        response = requests.get(PARSE_URL,
                                params={'page': page},
                                headers=headers)
        response.raise_for_status()

        soup = BeautifulSoup(response.text, 'lxml')

        res = soup.find('div', {'id': 'pjax-job-list'})

        if res is None:
            break

        res = res.find_all('h2')
        for elem in res:
            href = elem.find('a').attrs['href']
            vacancy_name = elem.find('a').text.strip()

            # vacancy description (gain extra params from vacancy)
            details = requests.get(BASE_URL + href, headers=headers)
            vacancy_text = details.text.strip()
            vacancy_link = BASE_URL + href
            vacancy_id = ''.join(i for i in href if i.isdigit())
            vacancy_card = BeautifulSoup(vacancy_text, 'lxml')
            company = vacancy_card.find('span', {
                'class': 'glyphicon-company'
            }).findNext('a').find('b').text.strip()
            address = vacancy_card.find('span', {
                'class': 'glyphicon-map-marker'
            }).findParent('p').contents[2].strip()
            address = address.replace('\n', '').replace('\\',
                                                        '').replace("/", '')
            description = vacancy_card.find('div', {
                'id': 'job-description'
            }).text.strip()
            description = description.replace('\n',
                                              '').replace('\\',
                                                          '').replace("/", '')
            try:
                salary = vacancy_card.find('span', {
                    'class': 'glyphicon-hryvnia'
                }).findNext('b', {
                    'class': 'text-black'
                }).text.strip()
                salary = salary.replace('\u202f', '')
                salary = salary.replace('\u2009', '')
            except AttributeError:
                salary = 'NULL'
            # end vacancy description

            save_into_db(
                vacancy_id,
                vacancy_link,
                vacancy_name,
                company,
                address,
                description,
                salary,
            )
    save_into_json()
Пример #16
0
from urllib.parse import urljoin
from flask import Flask, render_template, request, redirect, url_for
from utils import create_db, get_shorten_url, get_origin_url_from_shorten_url

app = Flask(__name__)
create_db()


@app.route('/')
def home():
    return render_template("home.html", origin_url="", message="")


@app.route('/', methods=['POST'])
def change():
    origin_url = request.form['url']
    custom_url = request.form.get('custom_url')
    shorten_url_after_check = get_shorten_url(
        origin_url, custom_url)  # returns error string or random string
    if shorten_url_after_check.startswith('ERROR '):
        return render_template("home.html",
                               origin_url=origin_url,
                               message=shorten_url_after_check)
    full_shorten_url = urljoin(request.url_root,
                               url_for('convert', url=shorten_url_after_check))
    return render_template("home.html",
                           origin_url=origin_url,
                           message=full_shorten_url)


@app.route('/convert/<url>')
Пример #17
0
def connection():
    create_db('_test_db')
    connection = db_connection('_test_db')
    yield connection

    connection.close()
Пример #18
0
        # As we scale the image, we need to scale back the contour to the original image
        frame_orig = list(
            map(lambda x: ut.rescale(x, size_ini, size_end), frame))

        # Save data to export
        frames.append([ut.rad2deg(angles[-1]), frame_orig])

    # Save the data in a pickle file
    ut.bbox_to_pkl(frames, fname='frames', folder='pkl')
    """
    ##################################  TASK 3: Filter keypoints  ####################################
    """
    # Check for training database file
    if not os.path.exists(PICKLE_MUSEUM_DATASET):
        logger.info("Creating pickle database for museum dataset...")
        db_museum = ut.create_db(TRAIN_MUSEUM_DIR, FEATURES, candidates)
        ut.save_db(db_museum, PICKLE_MUSEUM_DATASET)
    else:
        logger.info("Reading pickle database for museum dataset...")
        db_museum = ut.get_db(PICKLE_MUSEUM_DATASET)

    logger.info("Loaded data")
    """
    ##################################  TASK 4: Retrieval system and evaluation  ####################################
    ############################### WARNING: Don't touch below this sign. Ask Pablo #################################
    """

    # Check for query database file
    if not os.path.exists(PICKLE_QUERY_DATASET):
        logger.info("Creating pickle database for query dataset...")
        db_query = ut.create_db(TRAIN_QUERY_DIR, FEATURES, query=True)
Пример #19
0
 def __init__(self):
     super(commandhandler, self).__init__()
     self.conn = utils.create_db("cryptopaymon.sqlite", "schema.sql")
     self.config = {
         "tweetqueue": None,
         "twitteruser": None,
         "cmdqueue": None,
         "mentions": None,
         "rootuser": None,
         "authorizedusers": None,
         "queuemonitordelay": 0,
         "statuses": {
             "unknown": 0,
             "good": 1,
             "bad": 2
         },
         "exchangerates": {
             "btc2usd": 0
         },
     }
     self.commands = {
         "add": {
             "user": "******",
             "usage":
             "add address name1|name2 #hashtag1|#hashtag2 good|bad|unknown",
             "help":
             "will add new entry and enable tracking|tweeting|txstats by default",
             "handler": self.add,
         },
         "remove": {
             "user": "******",
             "usage": "remove address|name1|hashtag2",
             "help":
             "will loop over all addresses and remove those that match params",
             "handler": self.remove,
         },
         "txtrack": {
             "user": "******",
             "usage": "txtrack start|stop address|name1|hashtag2",
             "help":
             "will loop over all addresses and enable|disable tracking live txs for those that match params",
             "handler": self.txtrack,
         },
         "txtweet": {
             "user": "******",
             "usage": "txtweet start|stop address|name1|hashtag2",
             "help":
             "will loop over all addresses and enable|disable tweeting live txs for those that match params",
             "handler": self.txtweet,
         },
         "txstats": {
             "user": "******",
             "usage": "txstats start|stop address|name1|hashtag2",
             "help":
             "will loop over all addresses and enable|disable stats tweet for those that match params",
             "handler": self.txstats,
         },
         "auth": {
             "user": "******",
             "usage": "auth add|remove handle",
             "help": "will add|remove handle in authorized users list",
             "handler": self.auth,
         },
         "update": {
             "user": "******",
             "usage": "update address|name1|hashtag2 good|bad|unknown",
             "help":
             "will loop over all addresses and update status for those that match params",
             "handler": self.update,
         },
         "show": {
             "user": "******",
             "usage": "show address|name1|hashtag2",
             "help":
             "will loop over all addresses and generate combined stats for those that match params",
             "handler": self.show,
         },
         "show": {
             "user": "******",
             "usage": "show address|name1|hashtag2",
             "help":
             "will loop over all addresses and generate combined stats for those that match params",
             "handler": self.show,
         },
         "help": {
             "user": "******",
             "usage": "help",
             "help": "will show help for available commands",
             "handler": self.help,
         },
     }
     self.error = None
Пример #20
0
def connection():
    create_db("_test_db")
    connection = db_connection("_test_db")
    yield connection

    connection.close()
Пример #21
0
# Logger setup
logging.basicConfig(
    # level=logging.DEBUG,
    level=logging.INFO,
    format='%(asctime)s - %(name)s - %(levelname)s - %(message)s',
)
logger = logging.getLogger(__name__)


if __name__ == '__main__':

    logger.info("START")

    if not os.path.exists(PICKLE_MUSEUM_DATASET):
        logger.info("Creating pickle database for museum dataset...")
        db_museum = ut.create_db(TRAIN_MUSEUM_DIR)
        ut.save_db(db_museum, PICKLE_MUSEUM_DATASET)
    else:
        logger.info("Reading pickle database for museum dataset...")
        db_museum = ut.get_db(PICKLE_MUSEUM_DATASET)

    if not os.path.exists(PICKLE_QUERY_DATASET):
        logger.info("Creating pickle database for query dataset...")
        db_query = ut.create_db(TRAIN_QUERY_DIR)
        ut.save_db(db_query, PICKLE_QUERY_DATASET)
    else:
        logger.info("Reading pickle database for query dataset...")
        db_query = ut.get_db(PICKLE_QUERY_DATASET)

    logger.info("LOADED DATA")
Пример #22
0
 def __init__(self):
     super(dbupdate, self).__init__()
     self.conn = utils.create_db("cryptopaymon.sqlite", "schema.sql")
     self.config = {}
Пример #23
0
def init():
	create_dir()
	create_db()
	click.echo("ToDo project initialized!")