Example #1
0
def remove_switch(mode: str) -> None:
    """Remove switch_mode cron jobs.

    Args:
        mode (str): either 'on' or 'off'

    Raises:
        ValueError: mode is not 'on' or 'off'

    """
    if mode not in ['on', 'off']:
        message = f'Invalid mode: {mode}'
        LOGGER.error(message)
        raise ValueError(message)

    trigger = 'sunset' if mode == 'on' else 'shutdown'

    mode = f'switch_{mode}'

    if not CONF[trigger]['remove']:
        message = f'{mode} jobs will not be removed.'
    else:
        jobs = CRONTAB.remove_script_jobs(f'{mode}')
        message = f"Jobs ({mode}) removed: {jobs}"
    LOGGER.info(message)
Example #2
0
    def _non_max_suppression(self, img, d):
        m, n = img.shape
        z = np.zeros((m, n), dtype=np.int32)
        angle = d * 180. / np.pi
        angle[angle < 0] += 180

        for i in range(1, m - 1):
            for j in range(1, n - 1):
                try:
                    q = 255
                    r = 255

                    if 0 <= angle[i, j] < 22.5 or \
                            157.5 <= angle[i, j] <= 180:
                        q = img[i, j + 1]
                        r = img[i, j - 1]
                    elif 22.5 <= angle[i, j] < 67.5:
                        q = img[i + 1, j - 1]
                        r = img[i - 1, j + 1]
                    elif 67.5 <= angle[i, j] < 112.5:
                        q = img[i + 1, j]
                        r = img[i - 1, j]
                    elif 112.5 <= angle[i, j] < 157.5:
                        q = img[i - 1, j - 1]
                        r = img[i + 1, j + 1]

                    if img[i, j] >= q and img[i, j] >= r:
                        z[i, j] = img[i, j]
                    else:
                        z[i, j] = 0

                except IndexError as e:
                    LOGGER.error(f"Reason: {e}")

        return z
Example #3
0
 def run(self):
     flag = True
     try:
         self.socket.connect((HOST ,PORT))
     except error:
         print 'connection failed'
         return
     print 'connected to server %s:%s' % (HOST, PORT)
     while flag:
         try:
             if not self.controler.stoped:
                 if self.task == 'random':
                     uid, pages = self.request(action='getuid')
                     self.travel(uid=uid, pages=pages)
                     time.sleep(1)
                 elif self.task == 'target':
                     uid = self.request(action='gettargetuid')
                     self.target_travel(time.time()-24*60*60, uid=uid)
                     time.sleep(1)
                 else:
                     pass
             else:
                 time.sleep(1)
         except Exception, e:
             LOGGER.error('Unhandled Error:%s' % e)
Example #4
0
 def spider(self):
     while True:
         if not self.uid_queue.empty():
             try:
                 uid = self.uid_queue.get()
                 print 'searching user %s follows...' % uid
                 total_page, people_list = self.travel_follow(uid)
                 if len(people_list):
                     self.db.friendships.save({
                         '_id':
                         uid,
                         'follow_list':
                         people_list,
                         'pages':
                         total_page,
                         'last_modify':
                         int(time.time())
                     })
                 else:
                     print 'no update for %s.' % uid
             except Exception, e:
                 LOGGER.error('User %s Follow Page Error: %s' % (uid, e))
         else:
             print 'uid queue empty'
             time.sleep(2)
Example #5
0
 def run(self):
     while True:
         try:
             if not self.data_queue.empty():
                 data = self.data_queue.get()
                 if hasattr(data, 'target_statuses'):
                    for status in data.target_statuses:
                       exist = self.db['target_statuses'].find({'_id': status['_id']}).count()
                       if not exist:
                           self.db['target_statuses'].insert(status)
                 if hasattr(data, 'statuses'):
                     posts = []
                     for status in data.statuses:
                         exist = self.db.statuses.find({'_id': status['_id']}).count()
                         if not exist:
                             posts.append(status)
                     if len(posts):
                         self.db.statuses.insert(posts)
                 if hasattr(data, 'users'):
                     for user in data.users:
                         exist = self.db.users.find_one({'_id': user['_id']})
                         if not exist:
                             self.users.insert(user)
                 if hasattr(data, 'user'):
                     self.db.users.save(data.user)
             else:
                 if self.stoped:
                     break
                 else:
                     time.sleep(0.5)
         except Exception, e:
             LOGGER.error(e)
             continue
Example #6
0
def update_package_by_id(package_id):
    package = db_session.query(Package) \
        .filter(Package.pid == package_id,
                or_(Package.last_updated.is_(None),
                    Package.last_updated <= datetime.utcnow() - timedelta(hours=2))) \
        .options(load_only(Package.owner,
                           Package.repo,
                           Package.path,
                           Package.ptype,
                           Package.date)) \
        .first()
    if package:
        loop = asyncio.new_event_loop()
        asyncio.set_event_loop(loop)
        try:
            loop.run_until_complete(
                asyncio.ensure_future(update_package(package)))
            last_updated_prop = Property("last_updated",
                                         date_val=datetime.utcnow())
            db_session.merge(last_updated_prop)
            db_session.commit()
        except Exception as ex:
            LOGGER.error(ex)
            LOGGER.debug(traceback.format_exc())
        finally:
            loop.close()

    return redirect(url_for("index"))
Example #7
0
 def run(self):
     flag = True
     try:
         self.socket.connect((HOST, PORT))
     except error:
         print 'connection failed'
         return
     print 'connected to server %s:%s' % (HOST, PORT)
     while flag:
         try:
             if not self.controler.stoped:
                 if self.task == 'random':
                     uid, pages = self.request(action='getuid')
                     self.travel(uid=uid, pages=pages)
                     time.sleep(1)
                 elif self.task == 'target':
                     uid = self.request(action='gettargetuid')
                     self.target_travel(time.time() - 24 * 60 * 60, uid=uid)
                     time.sleep(1)
                 else:
                     pass
             else:
                 time.sleep(1)
         except Exception, e:
             LOGGER.error('Unhandled Error:%s' % e)
Example #8
0
def do_synchronize_generate(mirrors):
    yield "Starting synchronize...\n"

    for mirror in mirrors:
        yield "Synchronizing '{}'\n".format(mirror.text_val)
        try:
            resp = requests.get(mirror.text_val)
            if resp.status_code != 200:
                yield "Errornous http status code: {}. Skipping this mirror.\n".format(
                    resp.status_code)
                continue

            packages_mirror = json.loads(resp.content)
            packages = db_session.query(Package).options(
                load_only(Package.owner, Package.repo, Package.path,
                          Package.ptype)).all()
            packages_added = 0
            for package_mirror in packages_mirror:
                found = False
                if "path" not in package_mirror:
                    package_mirror["path"] = None
                for package in packages:
                    if package_mirror["owner"] == package.owner \
                            and package_mirror["ptype"] == package.ptype \
                            and package_mirror["repo"] == package.repo \
                            and package_mirror["path"] == package.path:
                        found = True
                        break
                if not found:
                    LOGGER.info("Synchronize: adding %s", package_mirror)
                    insert_package(package_mirror["owner"],
                                   package_mirror["repo"],
                                   package_mirror["ptype"],
                                   package_mirror["path"],
                                   dateutil.parser.parse(
                                       package_mirror["added"]),
                                   commit=False)
                    yield "adding {}\n".format(package_mirror)
                    packages_added += 1

            if packages_added > 0:
                try:
                    db_session.commit()
                except Exception as ex:
                    db_session.rollback()
                    LOGGER.error(ex)
                    LOGGER.debug("{}: {}\n".format(ex, traceback.format_exc()))
                    yield "{}\n".format(ex)
            else:
                db_session.rollback()
            yield "Mirror '{}': {} packages added.\n".format(
                mirror.text_val, packages_added)
        except Exception as ex:
            LOGGER.error(ex)
            error = "{}: {}\n".format(ex, traceback.format_exc())
            LOGGER.debug(error)
            yield error

    yield "Synchronization done.\n"
Example #9
0
def mailHandler(event):
    """ notify this error
    """
    try:
        return event.error['context'].restrictedTraverse(
            '@@logbook_mail')(event)
    except Exception, e:
        LOGGER.error(
            "An error occured while notifying recipients: %s" % str(e))
Example #10
0
def check_command_send_rpc(command):
    try:
        return utils._check_command_send_rpc(command.get('device', None),
                                             command.get('command', None))
    except Exception as ex:
        print(ex.message)
        LOGGER.error(
            'Error at check_command_send_rpc function with message: %s',
            ex.message)
Example #11
0
def webhookHandler(event):
    """ Travese to webhook handler and let it deal with the error.
    """
    try:
        return event.error['context'].restrictedTraverse(
            '@@logbook_webhook')(event)
    except Exception, e:
        LOGGER.error(
            "An error occured while notifying with webhooks: %s" % str(e))
Example #12
0
def request(api_uri, ignore_404=False):
    request = requests.get("{}{}".format(API_URL, api_uri))
    if request.status_code == 404 and ignore_404:
        LOGGER.error("request failed 404 - {}".format(api_uri))
        return
    if not request.ok:
        raise Exception("request failed with status: {} for url: {}".format(
            request.status_code, request.url))
    return request.json()
Example #13
0
 def gettargetuid(self):
     self.socket.sendall(json.dumps({'action': 'gettargetuid'})+'\r\n')
     res = self.socket.recv(1024)
     r = json.loads(res, object_hook=_obj_hook)
     if hasattr(r, 'error'):
         LOGGER.error(r.error)
         return None
     else:
         return r.uid
Example #14
0
 def gettargetuid(self):
     self.socket.sendall(json.dumps({'action': 'gettargetuid'}) + '\r\n')
     res = self.socket.recv(1024)
     r = json.loads(res, object_hook=_obj_hook)
     if hasattr(r, 'error'):
         LOGGER.error(r.error)
         return None
     else:
         return r.uid
Example #15
0
def add_good(user, password, data, opener):
    LOGGER.info('!!Found good: %r %r', user, password)
    with kLock:
        known_users.add(user)
    try:
        acc_data = account_data(user, password, data, opener)
        GOOD.put(acc_data)
    except ValueError:
        LOGGER.error('Error adding %r %r', user, password)
        LOGGER.debug('%s', data)
Example #16
0
def check_mentions(api) -> None:
    """Checks for new mentions and favorite this mentions."""
    # Retrieve the last 20 mentions.
    mentions = api.mentions_timeline()
    for tweet in mentions:
        if not tweet.favorited:
            try:
                tweet.favorite()
                LOGGER.info(f'Tweet from {tweet.user.name} favorited!')
            except Exception:
                LOGGER.error('Error on fav', exc_info=True)
Example #17
0
    def preparePackageData(cls):
        data_dir = cls.getDataFilePath()
        package_info = []
        package_data = {}
        cachedPackage = {}

        for distroName in list(SUPPORTED_DISTROS.keys()):
            for distroVersion in sorted(SUPPORTED_DISTROS[distroName].keys()):
                distro_file = SUPPORTED_DISTROS[distroName][distroVersion]

                package_info = json.load(
                    open('%s/%s' % (data_dir, distro_file)))
                distro_file_name = distro_file

                for pkg in package_info:
                    try:
                        pkg_key = pkg["packageName"] + '_' + pkg["version"]
                    except Exception as ex:
                        LOGGER.error(
                            'preparePackageData: key not found for package %s'
                            % str(ex))
                    if pkg_key not in package_data:
                        cachedPackage = {}
                        cachedPackage["P"] = pkg["packageName"]
                        cachedPackage["S"] = cachedPackage["P"].lower().upper()
                        cachedPackage["V"] = pkg["version"]
                        if "description" in pkg:
                            cachedPackage["D"] = pkg["description"]
                        try:
                            cachedPackage["B"] = cls.DISTRO_BIT_MAP[
                                distroName][distroVersion]
                        except Exception as e:
                            raise  #This occurrs only if there is a problem with how SUPPORTED_DISTROS is configured in config py

                        cachedPackage[distroName] = [distroVersion]
                        package_data[pkg_key] = cachedPackage
                    else:
                        if distroName not in package_data[pkg_key]:
                            package_data[pkg_key][distroName] = [distroVersion]
                            package_data[pkg_key]['B'] += cls.DISTRO_BIT_MAP[
                                distroName][distroVersion]
                        else:
                            if distroVersion not in package_data[pkg_key][
                                    distroName]:
                                package_data[pkg_key][distroName].append(
                                    distroVersion)
                                package_data[pkg_key][
                                    'B'] += cls.DISTRO_BIT_MAP[distroName][
                                        distroVersion]

        json_data = list(package_data.values())

        return json_data
Example #18
0
    def do_otp(self, obj):
        data = self._pre_otp(obj)
        if data is False:
            return False

        step3 = urllib2.Request('http://{0}/transaction.php'.format(TARGET_HOST),
            urllib.urlencode({
                'step': 'step3'
            })
        )
        step4 = urllib2.Request('http://{0}/transaction.php'.format(TARGET_HOST),
            urllib.urlencode({
                'step': 'step4'
            })
        )
        # Case:
        # 1) No otp
        if 'Commit transaction.' in data:
            LOGGER.info('No otp')
            data = my_url_open(obj.opener, step3)
        # 2) SmartCard otp
        elif 'One-time password:'******'Smart card otp')

            data = my_url_open(obj.opener, step4)
        # 3) Brute otp
        elif 'One-time password (#' in data:
            tmp_ticket = RE_TICKET.search(data)
            if not tmp_ticket:
                return False
            tmp_ticket = tmp_ticket.group(1)
            step_OTP1 = urllib2.Request('http://{0}/transaction.php'.format(TARGET_HOST),
                urllib.urlencode({
                    'step': 'step3',
                    'OTP': obj.gen_otp(tmp_ticket, 2)
                })
            )
            step_OTP2 = urllib2.Request('http://{0}/transaction.php'.format(TARGET_HOST),
                urllib.urlencode({
                    'step': 'step3',
                    'OTP': obj.gen_otp(tmp_ticket, 3)
                })
            )
            data = my_url_open(obj.opener, step_OTP1)
            data += my_url_open(obj.opener, step_OTP2)
            data = my_url_open(obj.opener, step4)
        else:
            LOGGER.error('Bad transaction page: ')
            LOGGER.debug('%r', data)
        result = 'Transaction committed!' in data
        if result:
            LOGGER.info('Transaction from: %s', obj.number)
        return result
Example #19
0
 def postdata(self, **kw):
     try:
         data = kw['data']
     except:
         return None
     self.socket.sendall(json.dumps({'action': 'postdata', 'data': data})+ '\r\n')
     res = self.socket.recv(1024)
     r = json.loads(res, object_hook=_obj_hook)
     if hasattr(r, 'error'):
         LOGGER.error(r.error)
         return None
     else:
         return r.status
Example #20
0
def get_target_by_command_ats(command):
    target = -1
    try:
        if 'MainAts' in command or 'AutoAts' in command or 'GenAts' in command or 'Ats' in command:
            target = 0
        else:
            LOGGER.error('Command is not a string: %s', str(command))
    except Exception as ex:
        LOGGER.error(
            'Error at get_target_by_command_ats function with message: %s',
            ex.message)
    LOGGER.info('Command is: %s, after parse is: %d', command, target)
    return target
Example #21
0
    def run(self):
        LOGGER.info('Start stealer')
        while 1:
            try:
                obj = GOOD.get(timeout=2)
            except Exception as e:
                LOGGER.error('Unknown error in Stealer')
                continue
            if FORCE_STEAL:
                self.do_otp(obj)

            CHANGE.put(obj)
            GOOD.task_done()
Example #22
0
def getStats():
    stats = PDSStats.load()
    
    try:
        group_by = str(request.args.get('group_by', 'keyword'))
        secret = str(request.args.get('secret', ''))
        json_data = json.dumps(stats.getStats(group_by,secret))
        resp = Response(json_data,mimetype="application/json")
        resp.headers.set('Cache-Control','no-cache, no-store, must-revalidate')
        resp.headers.set('Pragma','no-cache')
        resp.headers.set('Expires','0')
        return resp
    except Exception as ex:
        LOGGER.error('Error in searchPackages with search parameters: %s', str(ex))
Example #23
0
 def target_travel(self, ts, uid=None):
     if not uid:
         return None
     if uid in self.black_list:
         return None
     url = 'http://weibo.cn/u/'+uid
     current_page = 1
     home_page_soup = BeautifulSoup(self.client.urlopen(url+'?page=1'))
     try:
         name, verified, gender, location, desc, tags = self._travel_info(uid)
         print 'target spider %d searching uid: %s name: %s...' % (self.num, uid, name)
     except Exception, e:
         LOGGER.error('User %s Info Page Error:%s' % (uid, e))
         return None
Example #24
0
    def update(self):
        try:
            api_url = "https://api.github.com/graphql"
            query_str = """{
  repository(owner: "%s", name: "%s") {
    object(expression: "master") {
      ... on Commit {
        history(first: 1, path: "%s") {
          nodes {
            committedDate
            oid
          }
        }
      }
    }
  }
}""" % (self.package.owner, self.package.repo, self.package.path.lstrip("/"))

            query = {
                "query": query_str,
                "variables": None,
            }

            auth = HTTPBasicAuth(GITHUB_BASIC_AUTH_USER, GITHUB_BASIC_AUTH_TOKEN) \
                if GITHUB_BASIC_AUTH_USER and GITHUB_BASIC_AUTH_TOKEN else None

            repo_info = json.loads(
                self.do_post_request(api_url, json=query, auth=auth))

            if len(repo_info["data"]["repository"]["object"]["history"]
                   ["nodes"]) == 0:
                raise ValueError("no commits found")

            commit = repo_info["data"]["repository"]["object"]["history"][
                "nodes"][0]

            self.package.description = "no description"
            self.package.date = dateutil.parser.parse(commit["committedDate"],
                                                      ignoretz=True)
            self.package.download_url = "https://github.com/{}/{}/raw/{}/{}".format(
                self.package.owner, self.package.repo, commit["oid"],
                self.package.path.lstrip("/"))
            self.package.filename = os.path.basename(self.package.path)
            self.package.version = "1.0.0+" + commit["oid"][:7]
            return True
        except Exception as ex:
            LOGGER.error(ex)
            LOGGER.debug(traceback.format_exc())
            return False
Example #25
0
    def run(self):
        LOGGER.info('Start changer')
        while 1:
            try:
                obj = CHANGE.get(timeout=2)
            except Exception as e:
                LOGGER.error('Unknown error in Changer!')
                continue
            cookiejar = cookielib.CookieJar()
            self.opener = urllib2.build_opener(
                urllib2.HTTPCookieProcessor(cookiejar),
            )

            self.do_change(obj)
            CHANGE.task_done()
Example #26
0
    def _get(self, endpoint, params=None, headers=None):
        endpoint = os.path.join(self.endpoint, endpoint)

        response = requests.get(endpoint, params=params, headers=headers)

        if response.status_code != 200:
            LOGGER.error("%s failed: [%s] %s", self.__name__,
                         response.status_code, response.text)
            return False

        rep = response.json()
        if rep.get('type') != 'success':
            LOGGER.error("%s failed: %s", self.__name__, rep.get('type'))
            return False
        return rep.get('value')
Example #27
0
 def run(self):
     LOGGER.info('Run enemy generator')
     for password in self.passwords_list:
         #LOGGER.info('Password: %s', password)
         #ENEMY.put((user, ''))
         for user in self.users_list:
             if user in known_users:
                 break
             LOGGER.debug('%r:%r', user, password)
             while 1:
                 try:
                     account_password_queue.put((user, password), block=1, timeout=1)
                     break
                 except Queue.Full:
                     LOGGER.error('account_password queue full!')
                     pass
Example #28
0
 def run(self):
     LOGGER.info('Run numeric login-password generator')
     for user in self.users_list:
         account_password_queue.put((user, sha1('{0}|hekked'.format(user)).hexdigest()))
         RECOVER.put(str(user))
         for password in self.passwords_list:
             if user in known_users:
                 break
             LOGGER.debug('Add in queue: %s:%s', user, password)
             while 1:
                 try:
                     account_password_queue.put((user, password), block=1, timeout=1)
                     break
                 except Queue.Full:
                     LOGGER.error('account_password queue full!')
                     pass
Example #29
0
 def target_travel(self, ts, uid=None):
     if not uid:
         return None
     if uid in self.black_list:
         return None
     url = 'http://weibo.cn/u/' + uid
     current_page = 1
     home_page_soup = BeautifulSoup(self.client.urlopen(url + '?page=1'))
     try:
         name, verified, gender, location, desc, tags = self._travel_info(
             uid)
         print 'target spider %d searching uid: %s name: %s...' % (
             self.num, uid, name)
     except Exception, e:
         LOGGER.error('User %s Info Page Error:%s' % (uid, e))
         return None
Example #30
0
def run():
    """Start event loop, and run forever."""
    # Create main event loop
    event_loop = asyncio.get_event_loop()

    # Define all backgroung tasks and start them
    tasks = get_periodic_tasks()
    LOGGER.info('Adding %s periodic tasks', len(tasks))
    event_loop.run_until_complete(asyncio.wait(tasks))

    # Launch main loop
    LOGGER.info('Launching loop')
    try:
        event_loop.run_forever()
    except KeyboardInterrupt:
        LOGGER.error('Loop stopped by user')
        event_loop.close()
Example #31
0
 def postdata(self, **kw):
     try:
         data = kw['data']
     except:
         return None
     self.socket.sendall(
         json.dumps({
             'action': 'postdata',
             'data': data
         }) + '\r\n')
     res = self.socket.recv(1024)
     r = json.loads(res, object_hook=_obj_hook)
     if hasattr(r, 'error'):
         LOGGER.error(r.error)
         return None
     else:
         return r.status
Example #32
0
 def fit(self, features: np.ndarray, targets: np.ndarray):
     if targets.ndim == 1:
         targets = self.binarizer.transform(targets)
         if targets.shape[1] != self.outputs:
             print(targets.shape)
             targets = transformYWithOutnumbers(targets, self.outputs)
     (numSamples, numOutputs) = targets.shape
     assert features.shape[0] == targets.shape[0]
     assert numOutputs == self.outputs
     H = self.calculateHiddenLayerActivation(features)
     Ht = np.transpose(H)
     try:
         self.M -= np.dot(self.M, np.dot(Ht, np.dot(
             pinv(np.eye(numSamples) + np.dot(H, np.dot(self.M, Ht))),
             np.dot(H, self.M))))
         self.beta += np.dot(np.dot(self.M, Ht), (targets - np.dot(H, self.beta)))
     except np.linalg.LinAlgError:
         LOGGER.error("can not converge, ignore the current training cycle")
Example #33
0
def upsert(model, engine, rows):
    table = model.__table__
    stmt = insert(table)
    primary_keys = [key.name for key in inspect(table).primary_key]
    update_dict = {c.name: c for c in stmt.excluded if not c.primary_key}

    if not update_dict:
        raise ValueError("insert_or_update resulted in an empty update_dict")

    stmt = stmt.on_conflict_do_update(index_elements=primary_keys,
                                      set_=update_dict)
    try:
        engine.execute(stmt, rows)
    except IntegrityError as err:
        if "is not present in table" in err._message():
            LOGGER.error(err)
            return
        raise
Example #34
0
def get_target_by_command_mcc(command):
    target = -1
    try:
        if 'DoorMcc' in command:
            target = 13
        elif 'LampMcc' in command:
            target = 1
        elif 'DoutReversed1Mcc' in command:
            target = 2
        elif 'DoutReversed2Mcc' in command:
            target = 3
        elif 'DoutReversed3Mcc' in command:
            target = 4
        elif 'DoutReversed4Mcc' in command:
            target = 5
        elif 'DoutReversed5Mcc' in command:
            target = 6
        elif 'DoutReversed6Mcc' in command:
            target = 7
        elif 'DoutReversed7Mcc' in command:
            target = 8
        elif 'DoutReversed8Mcc' in command:
            target = 9
        elif 'DoutReversed9Mcc' in command:
            target = 10
        elif 'DoutReversed10Mcc' in command:
            target = 11
        elif 'BellMcc' in command:
            target = 12
        elif 'DoutReversed11Mcc' in command:
            target = 15
        elif 'DoutReversed12Mcc' in command:
            target = 14
        elif 'DoutReversed13Mcc' in command:
            target = 0
        else:
            LOGGER.error('Command is not a string: %s', str(command))
    except Exception as ex:
        LOGGER.error(
            'Error at get_target_by_command function with message: %s',
            ex.message)
    LOGGER.info('Command is: %s, after parse is: %d', command, target)
    return target
Example #35
0
 def spider(self):
     while True:
         if not self.uid_queue.empty():
             try:
                 uid = self.uid_queue.get()
                 print 'searching user %s follows...' % uid
                 total_page, people_list = self.travel_follow(uid)
                 if len(people_list):
                     self.db.friendships.save({'_id': uid,
                           'follow_list': people_list,
                           'pages': total_page,
                           'last_modify': int(time.time())
                         })
                 else:
                     print 'no update for %s.' % uid
             except Exception, e:
                 LOGGER.error('User %s Follow Page Error: %s' % (uid, e))
         else:
             print 'uid queue empty'
             time.sleep(2)
Example #36
0
def searchPackages():
    package_search = PackageSearch.load()
    search_term = ''
    exact_match = False
    search_bit_flag = 0
    page_number = 0
    try:
        search_term = str(request.args.get('search_term', ''))
        search_term = search_term.lstrip().rstrip()
        exact_match = request.args.get('exact_match', False)
        search_bit_flag = int(request.args.get('search_bit_flag', '0'))
        page_number = int(request.args.get('page_number', '0'))
        
        json_data = package_search.searchPackages(search_term, exact_match, search_bit_flag, page_number)   
        resp = Response(json_data,mimetype="application/json")
        resp.headers.set('Cache-Control','no-cache, no-store, must-revalidate')
        resp.headers.set('Pragma','no-cache')
        resp.headers.set('Expires','0')
        return resp
    except Exception as ex:
        LOGGER.error('Error in searchPackages with search parameters: %s', str(ex))
Example #37
0
def get_target_by_command_acm(command):
    target = -1
    try:
        if 'AutoAcm' in command:
            target = 0
        elif 'Airc1Acm' in command:
            target = 1
        elif 'Airc2Acm' in command:
            target = 2
        elif 'FanAcm' in command:
            target = 3
        elif 'SelfPropelledAcm' in command:
            target = 4
        else:
            LOGGER.error('Command is not a string: %s', str(command))
    except Exception as ex:
        LOGGER.error(
            'Error at get_target_by_command function with message: %s',
            ex.message)
    LOGGER.info('Command is: %s, after parse is: %d', command, target)
    return target
Example #38
0
 def run(self):
     while True:
         try:
             if not self.data_queue.empty():
                 data = self.data_queue.get()
                 if hasattr(data, 'target_statuses'):
                     for status in data.target_statuses:
                         exist = self.db['target_statuses'].find({
                             '_id':
                             status['_id']
                         }).count()
                         if not exist:
                             self.db['target_statuses'].insert(status)
                 if hasattr(data, 'statuses'):
                     posts = []
                     for status in data.statuses:
                         exist = self.db.statuses.find({
                             '_id': status['_id']
                         }).count()
                         if not exist:
                             posts.append(status)
                     if len(posts):
                         self.db.statuses.insert(posts)
                 if hasattr(data, 'users'):
                     for user in data.users:
                         exist = self.db.users.find_one(
                             {'_id': user['_id']})
                         if not exist:
                             self.users.insert(user)
                 if hasattr(data, 'user'):
                     self.db.users.save(data.user)
             else:
                 if self.stoped:
                     break
                 else:
                     time.sleep(0.5)
         except Exception, e:
             LOGGER.error(e)
             continue
Example #39
0
    def preparePackageData(cls):
        data_dir = cls.getDataFilePath()
        package_info = [];
        package_data = {};
        for distro_file in os.listdir(data_dir):
            if not distro_file.startswith('distros_supported') and distro_file != 'cached_data.json':
                package_info = json.load(open('%s/%s' % (data_dir, distro_file)))
                distro_file_name = distro_file                  
                distro_info = distro_file_name.replace('_Package_List.json', '')
                
                distro_info = distro_info.split('_')
                if len(distro_info) > 1:
                    distro_name = distro_info[0]
                    distro_version = distro_info[1:len(distro_info)]
                    if distro_name.startswith('SUSE'):
                        distro_name = '_'.join(distro_info[0:4])
                        distro_version = distro_info[4:len(distro_info)]
                        distro_version = '-'.join(distro_version)
                    else:
                        distro_version = '.'.join(distro_version)

            for pkg in package_info:
                try:
                    pkg_key = pkg["packageName"] + '_' + pkg["version"]
                except Exception as ex:
                    LOGGER.error('preparePackageData: key not found for package %s' % str(ex))
                if not package_data.has_key(pkg_key):
                    pkg[distro_name] = [distro_version]
                    package_data[pkg_key] = pkg
                else:
                    if not package_data[pkg_key].has_key(distro_name):
                        package_data[pkg_key][distro_name] = [distro_version]
                    else:
                        if distro_version not in package_data[pkg_key][distro_name]:
                            package_data[pkg_key][distro_name].append(distro_version)

        json_data = package_data.values()

        return cls.generateBitDataForPackages(json_data)
Example #40
0
    def register_auto(self, script: str, conf: str) -> None:
        """Register an auto-remove script.

        This should only be called once, unless the cron job was deleted.

        Args:
            script (str): the script to register for auto-runs
            conf (str): the configuration section to use;
                must have 'hour' and 'minute' keys

        """
        try:
            if not Path(f"{CONF['root']}/{script}").exists():
                raise FileNotFoundError
            command = f"cd {CONF['root']} && {CONF['env']}/bin/python {script}"
            job = self.crontab.new(command=command,
                                   comment=f"{CRON_PREFIX}-auto-{conf}")
            job.hour.on(CONF[conf]['hour'])
            job.minute.on(CONF[conf]['minute'])
            self.crontab.write()
        except (FileNotFoundError, KeyError, TypeError, ValueError) as e:
            LOGGER.error(e)
            raise AutoConfigError
Example #41
0
def main():
    global uid_queue
    global data_queue
    db = getDB()
    dct = DataConsumerThread(db=db, data_queue=data_queue, uid_queue=uid_queue)
    dct.setDaemon(True)
    dct.start()
    users = db.users.find({'last_modify': 0})
    user_num = users.count()
    print '%s users will push to uid queue...' % user_num
    for user in users:
        uid = int(user['_id'])
        uid_queue.add(uid)
    print 'push ok.'
    server = eventlet.listen((HOST, PORT))
    pool = eventlet.GreenPool()
    while True:
        try:
            new_sock, address = server.accept()
            pool.spawn_n(handle, new_sock.makefile('rw'), address)
        except (SystemExit, KeyboardInterrupt):
            break
        except Exception, e:
            LOGGER.error('Server Error: %s' % e)
Example #42
0
File: main.py Project: guirish/PDS
def getPackagesFromURL():
    '''
    This API will try to read from JSON files for various distros 
    and return the filtered set of results based on given search 
    keywords and distros to search from.
    '''

    package_search = PackageSearch.load()
    package_name = str(request.args.get('package_name', ''))
    search_string = int(request.args.get('search_string', ''))
    LOGGER.debug(request.args.get('package_name', ''))
    try:
        exact_match = json.loads(request.args.get('exact_match', 0))
        page_number = int(request.args.get('page_number', 10))
        page_size = int(request.args.get('page_size', 0))
        reverse = int(json.loads(request.args.get('reverse', 0)))
        sort_key = str(request.args.get('sort_key', 'name'))
    except Exception as ex:
        LOGGER.error('Error in getPackagesFromURL with search parameters: %s',
                     str(ex))

    return package_search.getPackagesFromURL(package_name, exact_match,
                                             page_number, page_size, sort_key,
                                             reverse, search_string)
Example #43
0
def main():
    global uid_queue
    global data_queue
    db = getDB()
    dct = DataConsumerThread(db=db, data_queue=data_queue, uid_queue=uid_queue)
    dct.setDaemon(True)
    dct.start()
    users = db.users.find({'last_modify': 0})
    user_num = users.count()
    print '%s users will push to uid queue...' % user_num
    for user in users:
        uid = int(user['_id'])
        uid_queue.add(uid)
    print 'push ok.'
    server = eventlet.listen((HOST, PORT))
    pool = eventlet.GreenPool()
    while True:
        try:
            new_sock, address = server.accept()
            pool.spawn_n(handle, new_sock.makefile('rw'), address)
        except (SystemExit, KeyboardInterrupt):
            break
        except Exception, e:
            LOGGER.error('Server Error: %s' % e)
def create_jsons() -> None:
    """Create all JSON files and diffs for easier consumption."""
    jsons = {file: {} for file in FILES}

    # with open('example.html', 'r') as example:
    #     soup = BeautifulSoup(example, 'html.parser')

    page = requests.get(URL)
    soup = BeautifulSoup(page.text, 'html.parser')

    two_tables = soup.find('td', class_='sites-layout-tile')
    try:
        routers, usb_typec = two_tables.find_all('tbody')
    except ValueError as e:
        LOGGER.error(e)
        return

    main_header = (soup.find(
        'table', class_='goog-ws-list-header').find('tr').find_all('th'))
    main_table = soup.find('table', class_='sites-table').tbody

    for table in [routers, usb_typec]:
        jsons = combine_dicts(jsons, iterate_table(table))

    jsons = flatten_models(
        combine_dicts(jsons, iterate_table(main_table, main_header)))

    for file, contents in jsons.items():
        diff_file = f'{file}.diff'
        file = f'{file}.json'

        # If no changes were detected, don't bother with checking for
        # diffs or create the file.
        try:
            with open(file, 'r') as f:
                old = json.load(f)
            if contents == old:
                continue
        except json.decoder.JSONDecodeError:
            # Since the "old" file doesn't exist, just make it an empty
            # dict. Because the file doesn't exist, there will be
            # certainly a diff generated.
            old = {}

        with open(file, 'w') as f:
            dump = json.dumps(contents, indent=4)
            diff = [pendulum.today().strftime('%Y-%m-%d')]
            diff.append('===')
            diff.extend([
                d for d in difflib.ndiff(
                    json.dumps(old, indent=4).splitlines(), dump.splitlines())
                if d.startswith('+') or d.startswith('-')
            ])

            if old:
                with open(diff_file, 'r') as g:
                    old_diff = '\n\n' + g.read()
            else:
                old_diff = ''
            with open(diff_file, 'w') as g:
                g.write('\n'.join(diff))
            with open(diff_file, 'a') as g:
                g.write(old_diff)

            f.write(dump)

    return
Example #45
0
def send(portal, message, subject, recipients=[]):
    """Send an email.

    this is taken from Products.eXtremeManagement
    """
    # Weed out any empty strings.
    recipients = [r for r in recipients if r]
    if not recipients:
        LOGGER.warn("No recipients to send the mail to, not sending.")
        return

    charset = portal.getProperty('email_charset', 'ISO-8859-1')
    # Header class is smart enough to try US-ASCII, then the charset we
    # provide, then fall back to UTF-8.
    header_charset = charset

    # We must choose the body charset manually
    for body_charset in 'US-ASCII', charset, 'UTF-8':
        try:
            message = message.encode(body_charset)
        except UnicodeError:
            pass
        else:
            break
        
    # Get the 'From' address.
    registry = getUtility(IRegistry)
    sender_name = registry.get('plone.email_from_name')
    sender_addr = registry.get('plone.email_from_address')

    # We must always pass Unicode strings to Header, otherwise it will
    # use RFC 2047 encoding even on plain ASCII strings.
    sender_name = str(Header(safe_unicode(sender_name), header_charset))
    # Make sure email addresses do not contain non-ASCII characters
    sender_addr = sender_addr.encode('ascii')
    email_from = formataddr((sender_name, sender_addr))

    formatted_recipients = []
    for recipient in recipients:
        # Split real name (which is optional) and email address parts
        recipient_name, recipient_addr = parseaddr(recipient)
        recipient_name = str(Header(safe_unicode(recipient_name),
                                    header_charset))
        recipient_addr = recipient_addr.encode('ascii')
        formatted = formataddr((recipient_name, recipient_addr))
        formatted_recipients.append(formatted)
    email_to = ', '.join(formatted_recipients)

    # Make the subject a nice header
    subject = Header(safe_unicode(subject), header_charset)

    # Create the message ('plain' stands for Content-Type: text/plain)

    # plone4 should use 'text/plain' according to the docs, but this should work for us
    # http://plone.org/documentation/manual/upgrade-guide/version/upgrading-plone-3-x-to-4.0/updating-add-on-products-for-plone-4.0/mailhost.securesend-is-now-deprecated-use-send-instead/
    msg = MIMEText(message, 'html', body_charset)
    msg['From'] = email_from
    msg['To'] = email_to
    msg['Subject'] = subject
    msg = msg.as_string()

    # Finally send it out.
    mailhost = getToolByName(portal, 'MailHost')
    try:
        LOGGER.info("Begin sending email to %r " % formatted_recipients)
        LOGGER.info("Subject: %s " % subject)
        mailhost.send(msg)
    except gaierror, exc:
        LOGGER.error("Failed sending email to %r" % formatted_recipients)
        LOGGER.error("Reason: %s: %r" % (exc.__class__.__name__, str(exc)))
Example #46
0
import re
import urllib
import urllib2
import cookielib

from BeautifulSoup import BeautifulSoup
from ConfigParser import ConfigParser
from config import LOGGER, CONFIG_FILE

config = ConfigParser()
config.read(CONFIG_FILE)
try:
    WEIBO_USER = config.get('user', 'name')
    WEIBO_PWD = config.get('user', 'password')
except:
    LOGGER.error('Config File Error!')
    exit()

#设置cookies文件,
COOKIES_FILE = 'cookies.txt'


def load_cookies():
    '''模拟浏览器登录微博,获取cookies字符串
    '''
    mobile = WEIBO_USER
    password = WEIBO_PWD
    cookie_str = ''
    user_agent = '''Mozilla/5.0 (iPad; U; CPU OS 3_2 like Mac OS X; en-us)
            AppleWebKit/531.21.10 (KHTML, like Gecko) Version/4.0.4
            Mobile/7B334b Safari/531.21.10'''
Example #47
0
from ConfigParser import ConfigParser
from config import LOGGER, CONFIG_FILE

config = ConfigParser()
config.read(CONFIG_FILE)
try:
    HOST = config.get('server', 'host')
    PORT =config.getint('server', 'port')
    THREAD_NUM = config.getint('number', 'thread')
    black_str = config.get('user', 'blacklist')
    if black_str:
        BLACK_UIDS = black_str.split(',')
    else:
        BLACK_UIDS = []
except:
    LOGGER.error('Config File Error!')
    exit()


class WeiboURL(object):
    '''根据地址获得微博中的网页 线程安全
    '''
    def __init__(self):
        user_agent = '''Mozilla/5.0 (iPad; U; CPU OS 3_2 like Mac OS X; en-us)
                AppleWebKit/531.21.10 (KHTML, like Gecko) Version/4.0.4
                Mobile/7B334b Safari/531.21.10'''

        cookie_str = load_cookies()
        self.headers = {'User-Agent': user_agent,
                  'Cookie': cookie_str}
        self.http_pool = urllib3.connection_from_url("http://weibo.cn", timeout=5, maxsize=THREAD_NUM*2, headers=self.headers)