Example #1
0
def main():
    ip = settings.IP_ADDRESS
    scanner = Scanner(ip)
    scanner.scan(settings.START_PORT, settings.END_PORT)
    for port in scanner.open_ports:
        try:
            grabber = Grabber(ip, port)
            print(f'{port}: {grabber.read()}')
            grabber.close()
        except Exception as e:
            print('Error', e)
Example #2
0
    def __init__(self, sfrag, buffer_len=2):
        self.grb = Grabber(bbox=sfrag)

        # Calculate screen size
        size = (sfrag[2]-sfrag[0], sfrag[3]-sfrag[1])
        self.dimmensions = size + (3,)

        # Set the frame buffer to zeros
        self.buffer_write = np.zeros(self.dimmensions, dtype=np.int8)
        self.buffer_read = np.zeros(self.dimmensions, dtype=np.int8)

        self.space_pressed = False   
Example #3
0
def main():
    ip = '192.168.42.42'
    portrange = (1, 65535)
    scanner = Scanner(ip)
    scanner.scan(*portrange)
    for port in scanner.open_ports:
        try:
            grabber = Grabber(ip, port)
            print('Result is {} on port: {}'.format(grabber.read(), port))
            grabber.close()
        except Exception as e:
            print('Result is Blocking on port: {}'.format(port))
def main():
    ip = '127.0.0.1'
    portrange = (1, 1001)
    scanner = Scanner(ip)
    scanner.scan(*portrange)
    for port in scanner.open_ports:
        try:
            grabber = Grabber(ip, port)
            print(grabber.read())
            grabber.close()
        except Exception:
            print("Error", e)
Example #5
0
    def __init__(self, onRobot):
        IO = IOTools(onRobot)
        print('Grabber initialised')
        self.camera = IO.camera.initCamera('pi', 'low')
        self.getInputs = IO.interface_kit.getInputs
        self.getSensors = IO.interface_kit.getSensors
        self.mc = IO.motor_control
        self.mc.stopMotors()
        self.sc = IO.servo_control
        self.sc.engage()
        self.grabber = Grabber(self.mc, self.MOTOR_PORT, self.sc)
        #self.grabber.prepare_grabber()
        self.lift = Lift(onRobot, self.mc)

        self.lift_pos = 0
        self.s = None
Example #6
0
async def main():
    async with aiohttp.ClientSession(headers=VkApi.headers) as session:
        api = VkApi(VK_TOKEN, session)
        detector = Detector(PATH_TO_WEIGHTS)
        grabber = Grabber(
            api=api,
            detector=detector,
            profiles=PROFILES,
            save_dir=DEST_DIR,

            COUNTRY_CODES=COUNTRY_CODES,
            MIN_PHOTOS=MIN_PHOTOS,
            MAX_PHOTOS=MAX_PHOTOS,
            MIN_PHOTO_W=MIN_PHOTO_W,
            MIN_PHOTO_H=MIN_PHOTO_H,
            MIN_CROPS=MIN_CROPS,
            MAX_CROPS=MAX_CROPS,
            MIN_CROP_SIZE=MIN_CROP_SIZE,
        )
        try:
            # TODO: Automate workers start, so main gets start/end ids from argv
            # and all workers mill it together
            # TODO: User concurrent.futures.ProcessPoolExcecutor with loop.run_in_executror(...)
            mil = 10 ** 6
            tasks = [
                asyncio.create_task(grabber.user_fetcher(1, mil)),
                asyncio.create_task(grabber.user_fetcher(mil, 2 * mil)),
                asyncio.create_task(grabber.user_fetcher(2 * mil, 3 * mil)),
                asyncio.create_task(grabber.user_fetcher(3 * mil, 4 * mil)),
                asyncio.create_task(grabber.user_fetcher(4 * mil, 5 * mil)),
                asyncio.create_task(grabber.user_fetcher(5 * mil, 6 * mil)),
                asyncio.create_task(grabber.user_fetcher(6 * mil, 7 * mil)),
                asyncio.create_task(grabber.user_fetcher(7 * mil, 8 * mil)),
                asyncio.create_task(grabber.user_fetcher(8 * mil, 9 * mil)),
                asyncio.create_task(grabber.user_fetcher(9 * mil, 10 * mil)),
                asyncio.create_task(grabber.user_fetcher(10 * mil, 11 * mil)),

                asyncio.create_task(grabber.photo_fetcher()),
                asyncio.create_task(grabber.photo_fetcher()),
                asyncio.create_task(grabber.photo_fetcher()),

                asyncio.create_task(grabber.cropper())
            ]
            await asyncio.gather(*tasks)
        except KeyboardInterrupt:
            logger.info('Manual stop triggered')
Example #7
0
def run_grabber():
    args = get_args()
    check_args(args)
    url = args.url
    json_output = args.json
    html_output = args.html
    output_path = os.path.expanduser(args.output)

    grabber = Grabber(url)
    grabber.run()

    if json_output:
        with open(os.path.join(output_path, '{}.json'.format(grabber.title)),
                  'w') as output:
            json.dump(grabber.json_output, output, indent=2)

    if html_output:
        raise NotImplementedError("HTML output is not implemented yet.")
Example #8
0
def news_list(db: Session = Depends(utils.get_db),
              limit: int = Query(None, gt=0,
                                 description='Количество новостей')):
    grabber = Grabber()
    data = grabber.news(limit)

    for event in data:
        news = get_news_by_link(db, event.get('link'))
        if news is None:
            try:
                news = grabber.grub(event.get('link'))
                pub_date = dt.strptime(event.get('published'),
                                       '%d.%m.%Y %H:%M')
                news = NewsCreate(**news, pub_date=pub_date)
                create_news(db, news)
            except Exception as e:
                logger.info(
                    f'Не возможно обработать новость: {event}. Error {e}')

    return get_all_news(db, limit)
Example #9
0
    def get(self):
        try:
            # target = json.loads(self.request.body)["target"]
            target = self.request.GET['target']
            isCourse = 'course' in self.request.GET

            # if target is a number its an ID!
            try:
                target = long(target)
            except ValueError:
                pass

            # This is required so that fetch requests doesn't time out!
            urlfetch.set_default_fetch_deadline(60)
            g = Grabber(target, isCourse)
            self.response.headers[
                'Content-Type'] = 'application/rss+xml; charset=utf-8'
            self.response.write(g.grab_rss_feed())
        except ValueError:
            self.response.status = '400 malformed request body'
        except KeyError:
            self.response.status = '400 no target url specified'
        except InvalidTarget:
            self.response.status = '400 Could not find ID'
Example #10
0
 def __init__(self, *args, **kwargs):
     self.commander = actions.Commander(self)
     self.grabber = Grabber()
Example #11
0
# -*- coding: utf-8 -*-
from grabber import Grabber
import gevent.monkey;
import processor
from bs4 import BeautifulSoup, SoupStrainer
from urllib.request import urlopen
import time
import progressbar
import multiprocessing
import csv
import datetime

grab = Grabber()

# bar1 = progressbar.ProgressBar(max_value=progressbar.UnknownLength)
# bar2 = progressbar.ProgressBar(max_value=progressbar.UnknownLength)
# bar3 = progressbar.ProgressBar(max_value=progressbar.UnknownLength)

logging = True
processor.logging = logging
grab.logging = logging


# output_file = 'output/' + 'doc.csv'
output_file = 'output/doc {}.csv'.format(str(datetime.datetime.now())[:-7])
failed_file = 'failed.txt'
res = 'res/'


class Scraper:
Example #12
0
import logging

from grabber import Grabber
from database import config

logger = logging.getLogger('youtuber')
handler = logging.FileHandler('error.log')
handler.setLevel(logging.ERROR)
formatter = logging.Formatter('%(asctime)s - %(name)s - %(message)s')
handler.setFormatter(formatter)
logger.addHandler(handler)

if __name__ == '__main__':
    grabber = Grabber(config.get('api_key', ''), config.get('channels', []))
    grabber.run()
Example #13
0
 def __init__(self, db, token):
     self.g = Grabber(token)
     self.db = db
    def test_title(self):
        grabber = Grabber(BR_THREAD)
        grabber.page_html = grabber.soup(TEST_HTML)
        grabber.find_posts()

        self.assertEqual("Directors And Other Artists On Blade Runner", grabber.title)
 def setUp(self):
     self.grabber = Grabber(BR_THREAD)
     self.grabber.page_html = self.grabber.soup(TEST_HTML)
 def test_mobile_url(self):
     grabber = Grabber(BR_THREAD_MOBILE)
     self.assertEqual(BR_THREAD + '/', grabber.url)
 def test_pagination(self):
     grabber = Grabber(BR_THREAD)
     for x in range(1, 11):
         grabber.page_index = x
         self.assertTrue(grabber.current_url.endswith('?p={}'.format(x)))
 def test_init(self):
     grabber = Grabber(BR_THREAD)
     self.assertEqual(BR_THREAD, grabber.url)
     self.assertEqual(BR_THREAD_FLAT, grabber.flat_thread)
Example #19
0
from grabber import Grabber


def open_connection():
    cnx = mysql.connector.connect(user=os.getenv('db_username'),
                                  password=os.getenv('db_password'),
                                  host=os.getenv('db_host'),
                                  database=os.getenv('db_name'))
    return cnx


t = open_connection()
cursor = t.cursor()
query = 'SELECT player FROM player'

rows = cursor.execute(query)
player_ids = cursor.fetchall()
player_ids = ["%s/" % x for x in player_ids]

i = 0
player_load = []
while i < len(player_ids):
    player_load.append(player_ids[i:i + 100])
    i += 100

t = Grabber('tweets')

for players in player_ids:
    t.set_player_ids(players)
    t.grab_data()
    print(t.json_load)
from time import sleep, time
from grabber import Grabber

SCALE = 0.003

if __name__ == '__main__':

    servers = []
    servers.append(('192.168.1.110', 40000))
    #servers.append(('192.168.1.110', 40001))
    #servers.append(('192.168.1.110', 40002))
    grab = Grabber(addr_list=servers, precision=8)

    grab.init()
    #sleep(1)

    try:  # do while CTRL + C  not pressed
        i = 0
        gtime = time()
        startTime = time()
        cnt = 0
        timeStep = 1
        import os
        #while i <= 1000:
        while startTime > time(
        ) - 60 * 100000:  # limit execution time to 60 seconds
            # print(i)
            i += 1
            #sleep(0.1)

            grab.process()
Example #21
0
from grabber import Grabber
from flask import Flask, jsonify, request, Response
import json
import os.path
import re

# Variables
URL = "https://cat-fact.herokuapp.com/facts"
FILENAME = "data.json"

if not os.path.isfile(FILENAME):
    g = Grabber()
    g.grab(URL, FILENAME)

app = Flask(__name__)


@app.route('/', methods=["GET"])
def home():
    return "Welcome to cat facts"


@app.route('/api/v1/catfacts', methods=["GET"])
def get_catfacts():

    # Create a params dict to hold query parameters
    params = {}
    params['firstname'] = request.args.get('firstname')
    params['lastname'] = request.args.get('lastname')
    params['id'] = request.args.get('id')
    print("params is ", params)
Example #22
0
 def __init__(self, token):
     self.g = Grabber(token)