示例#1
0
    def _transform_mobile_image(
        self, original_image_blob: Blob, new_image_blob: Blob
    ) -> Optional[Blob]:
        """
        Create smaller image size to be served on mobile devices.

        :param Blob original_image_blob: Original image blob.
        :param Blob new_image_blob: New newly created Blob for mobile image.

        :returns: Optional[Blob]
        """
        img_meta = self._get_image_meta(original_image_blob)
        img_bytes = original_image_blob.download_as_bytes()
        if img_bytes:
            stream = BytesIO(img_bytes)
            im = Image.open(stream)
            try:
                with BytesIO() as output:
                    new_image = im.reduce(2)
                    new_image.save(output, format=img_meta["format"])
                    new_image_blob.upload_from_string(
                        output.getvalue(), content_type=img_meta["content-type"]
                    )
                    LOGGER.success(f"Created mobile image `{new_image_blob.name}`")
                    return new_image_blob
            except GoogleCloudError as e:
                LOGGER.error(
                    f"GoogleCloudError while saving mobile image `{new_image_blob.name}`: {e}"
                )
            except Exception as e:
                LOGGER.error(
                    f"Unexpected exception while saving mobile image `{new_image_blob.name}`: {e}"
                )
示例#2
0
    def update_post(self, post_id: str, body: dict,
                    slug: str) -> Optional[dict]:
        """
        Update post by ID.

        :param str post_id: Ghost post ID
        :param dict body: Payload containing post updates.
        :param str slug: Human-readable unique identifier.

        :returns: Optional[dict]
        """
        try:
            resp = requests.put(
                f"{self.admin_api_url}/posts/{post_id}/",
                json=body,
                headers={
                    "Authorization": self.session_token,
                    "Content-Type": "application/json",
                },
            )
            if resp.status_code != 200:
                LOGGER.success(f"Successfully updated post `{slug}`")
                return resp.json()
        except HTTPError as e:
            LOGGER.error(f"HTTPError while updating Ghost post: {e}")
        except Exception as e:
            LOGGER.error(f"Unexpected error while updating Ghost post: {e}")
示例#3
0
    def get_author(self, author_id: int) -> Optional[List[str]]:
        """
        Fetch single Ghost author.

        :param int author_id: ID of Ghost author to fetch.

        :returns: Optional[List[str]]
        """
        try:
            params = {"key": self.content_api_key}
            headers = {
                "Content-Type": "application/json",
            }
            resp = requests.get(
                f"{self.content_api_url}/authors/{author_id}/",
                params=params,
                headers=headers,
            )
            if resp.status_code == 200:
                return resp.json()["authors"]
        except HTTPError as e:
            LOGGER.error(
                f"Failed to fetch Ghost authorID={author_id}: {e.response.content}"
            )
        except KeyError as e:
            LOGGER.error(
                f"KeyError while fetching Ghost authorID={author_id}: {e}")
示例#4
0
    def get_json_backup(self) -> Optional[dict]:
        """
        Download JSON snapshot of Ghost database.

        Optional[dict]
        """
        self._https_session()
        headers = {
            "Authorization": self.session_token,
            "accept": "text/html,application/xhtml+xml,application/xml;\
                                q=0.9,image/webp,image/apng,*/*;\
                                q=0.8,application/signed-exchange;\
                                v=b3;q=0.9",
            "accept-encoding": "gzip, deflate, br",
            "Origin": "hackersandslackers.com",
            "Authority": "hackersandslackers.com",
        }
        endpoint = f"{self.admin_api_url}/db/"
        try:
            resp = requests.get(endpoint, headers=headers)
            return resp.json()
        except HTTPError as e:
            LOGGER.error(f"HTTPError occurred while fetching JSON backup: {e}")
        except Exception as e:
            LOGGER.error(
                f"Unexpected error occurred while fetching JSON backup: {e}")
示例#5
0
def welcome_newsletter_subscriber(
    subscriber: Member, ) -> Optional[SubscriptionWelcomeEmail]:
    """
    Send welcome email to newsletter subscriber.

    :param Member subscriber: New Ghost member with newsletter subscription.

    :returns: Optional[SubscriptionWelcomeEmail]
    """
    body = {
        "from": settings.MAILGUN_FROM_SENDER,
        "to": [subscriber.email],
        "subject": settings.MAILGUN_SUBJECT_LINE,
        "template": settings.MAILGUN_NEWSLETTER_TEMPLATE,
        "h:X-Mailgun-Variables": {
            "name": subscriber.name
        },
        "o:tracking": True,
    }
    response = mailgun.send_email(body)
    if response.status_code != 200:
        LOGGER.error(f"Mailgun failed to send welcome email: {body}")
        return None
    return SubscriptionWelcomeEmail(
        from_email=settings.MAILGUN_PERSONAL_EMAIL,
        to_email=subscriber.email,
        subject=settings.MAILGUN_SUBJECT_LINE,
        template=settings.MAILGUN_NEWSLETTER_TEMPLATE,
    )
示例#6
0
 def _get_ssh_key(self):
     """ Fetch locally stored SSH key."""
     try:
         self.ssh_key = RSAKey.from_private_key_file(self.ssh_key_filepath)
         LOGGER.info(f"Found SSH key at self {self.ssh_key_filepath}")
         return self.ssh_key
     except SSHException as e:
         LOGGER.error(e)
示例#7
0
 def _upload_ssh_key(self):
     try:
         system(
             f"ssh-copy-id -i {self.ssh_key_filepath}.pub {self.user}@{self.host}>/dev/null 2>&1"
         )
         LOGGER.info(f"{self.ssh_key_filepath} uploaded to {self.host}")
     except FileNotFoundError as error:
         LOGGER.error(error)
示例#8
0
 def _upload_ssh_key(self):
     try:
         system(
             f"ssh-copy-id -i {self.ssh_key_filepath}.pub {self.user}@{self.host}>/dev/null 2>&1"
         )
         LOGGER.info(f"{self.ssh_key_filepath} uploaded to {self.host}")
     except FileNotFoundError as error:
         LOGGER.error(error)
     except Exception as e:
         LOGGER.error(f"Unexpected error occurred: {e}")
         raise e
示例#9
0
    def get_pages(self) -> Optional[dict]:
        """
        Fetch Ghost pages.

        :returns: Optional[dict]
        """
        try:
            headers = {
                "Authorization": f"Ghost {self.session_token}",
                "Content-Type": "application/json",
            }
            endpoint = f"{self.admin_api_url}/pages"
            resp = requests.get(endpoint, headers=headers)
            if resp.json().get("errors") is not None:
                LOGGER.error(
                    f"Failed to fetch Ghost pages: {resp.json().get('errors')[0]['message']}"
                )
                return None
            post = resp.json()["pages"]
            LOGGER.info(f"Fetched Ghost pages` ({endpoint})")
            return post
        except HTTPError as e:
            LOGGER.error(f"Ghost HTTPError while fetching pages: {e}")
        except KeyError as e:
            LOGGER.error(f"KeyError for `{e}` occurred while fetching pages")
        except Exception as e:
            LOGGER.error(
                f"Unexpected error occurred while fetching pages: {e}")
示例#10
0
def run_experiment(predicted_results, settings, limit, predicted_tag_count):
    """ Run the experiment with configuration """
    tags_info = settings["tags_info"]
    sample_count = config.CLASSIFIER["sample_count"]
    # predicted_tag_count = settings["predicted_tag_count"]
    LOGGER.debug("Sample count: %d" % sample_count)
    LOGGER.debug("Max predicted tag count: %d" % predicted_tag_count)

    get_similarity = settings["get_similarity"]

    # run the test
    for index, predict_result in enumerate(predicted_results):
        if index > limit:
            break
        try:
            LOGGER.debug("%d/%d sample" % (index, sample_count))
            orignal, scored_predicted = predict_result
            # TODO: HARD CODED Code again.
            if settings["should_rerank"]:
                scored_predicted = rerank_tags(scored_predicted[:30], get_similarity)
            scored_predicted = scored_predicted[:predicted_tag_count]

            predicted = [t for t, s in scored_predicted]

            # TODO: SOME PROBLEM may raise here
            predicted = predicted[:predicted_tag_count]

            for name, evaluator in settings["evaluators"].items():
                evaluation = evaluator.update(orignal, predicted)
                log_message = "\nOriginal Result: %s\n"\
                              "Predicted Result: %s\n"\
                              "Evaluator Type: %s\n"\
                              "\tPrecision: %f\n"\
                              "\tRecall: %f\n" % (
                                    str(to_named_tags(orignal, tags_info)),
                                    str(to_named_tags(predicted, tags_info)),
                                        name, evaluation[0], evaluation[1])
                LOGGER.debug(log_message)

        except Exception as e:
            LOGGER.error("Error occurs %s" % (str(e)))

    evaluations = []
    for name, evaluator in settings["evaluators"].items():
        evaluation = evaluator.get_evaluation()
        LOGGER.info("%s Precision: %f\t Recall: %f" % (name, evaluation[0], evaluation[1]))
        evaluations.append(evaluation)
    return evaluations
示例#11
0
def fetch_top_visited_urls(time_period, limit=20) -> List[Optional[dict]]:
    """
    Fetch top visited URLs from Plausible.

    :param str time_period: Period of 12mo, 6mo, month, 30d, 7d, or day.
    :param int limit: Maximum number of results to be returned.

    :returns: Optional[List[dict]]
    """
    try:
        headers = {
            "Content-Type": "application/json",
            "Authorization": f"Bearer {settings.PLAUSIBLE_API_TOKEN}",
        }
        params = {
            "site_id": "hackersandslackers.com",
            "period": time_period,
            "property": "event:page",
            "limit": limit,
        }
        resp = requests.get(
            settings.PLAUSIBLE_BREAKDOWN_ENDPOINT,
            params=params,
            headers=headers,
        )
        if resp.status_code == 200:
            results_list = resp.json().get("results")
            ghost_pages = [f"/{page['slug']}/" for page in ghost.get_pages()]
            results_list = [
                result for result in results_list
                if "/tag" not in result["page"] and "/page" not in
                result["page"] and "/author" not in result["page"]
                and result["page"] not in ghost_pages
            ]
            return [
                enrich_url_with_post_data(result) for result in results_list
                if result is not None
            ]
        return []
    except RequestException as e:
        LOGGER.error(f"RequestException when fetching Plausible top URLs: {e}")
    except Exception as e:
        LOGGER.error(
            f"Unexpected Exception when fetching Plausible top URLs: {e}")
示例#12
0
 def connection(self):
     """Open connection to remote host. """
     try:
         client = SSHClient()
         client.load_system_host_keys()
         client.set_missing_host_key_policy(AutoAddPolicy())
         client.connect(
             self.host,
             username=self.user,
             password=self.password,
             key_filename=self.ssh_key_filepath,
             timeout=5000,
         )
         return client
     except AuthenticationException as e:
         LOGGER.error(
             f"Authentication failed: did you remember to create an SSH key? {e}"
         )
         raise e
示例#13
0
    def rebuild_netlify_site(self) -> Tuple[str, int]:
        """
        Trigger Netlify site rebuild.

        :returns: Tuple[str, int]
        """
        try:
            resp = requests.post(self.netlify_build_url, )
            LOGGER.info(
                f"Triggered Netlify build with status code {resp.status_code}."
            )
            return (
                f"Triggered Netlify build with status code {resp.status_code}.",
                resp.status_code,
            )
        except HTTPError as e:
            LOGGER.error(
                f"Failed to rebuild Netlify site: {e.response.content}")
            return e.response.content, e.response.status_code
示例#14
0
def persist_algolia_searches(table_name: str,
                             timeframe: int) -> List[Optional[dict]]:
    """
    Fetch single week of searches from Algolia API.

    :param timeframe: Number of days for which to fetch recent search analytics.
    :param str table_name: DB table name

    :returns: List[Optional[dict]]
    """
    try:
        headers = {
            "x-algolia-application-id": settings.ALGOLIA_APP_ID,
            "x-algolia-api-key": settings.ALGOLIA_API_KEY,
        }
        params = {
            "index": "hackers_posts",
            "limit": 999999,
            "orderBy": "searchCount",
            "direction": "desc",
            "startDate": get_start_date_range(timeframe),
        }
        resp = requests.get(settings.ALGOLIA_SEARCHES_ENDPOINT,
                            headers=headers,
                            params=params)
        if resp.status_code == 200 and resp.json().get("searches") is not None:
            search_queries = resp.json().get("searches")
            search_queries = filter_search_queries(search_queries)
            if search_queries is not None:
                import_algolia_search_queries(search_queries, table_name)
                return search_queries
            return []
        return []
    except HTTPError as e:
        LOGGER.error(
            f"HTTPError while fetching Algolia searches for `{timeframe}`: {e}"
        )
    except Exception as e:
        LOGGER.error(
            f"Unexpected error while fetching Algolia searches for `{timeframe}`: {e}"
        )
示例#15
0
 def _retry(self, f):
     """ The query is executed multiple times to overcome any operational
         error. Database error is logged immediately."""
     count = 0
     while True:
         try:
             return f()
         # http://initd.org/psycopg/docs/module.html#psycopg2.DatabaseError
         # handle operational error - memory allocation, unexpected disconnect
         except psycopg2.OperationalError, oe:
             count += 1
             if count < self._max_retries:
                 LOGGER.warn("Transient Error Received %s ", oe)
                 time.sleep(self._retry_period)
             else:
                 LOGGER.error("Unrecoverable Error %s", oe)
                 raise oe
         # other database errors - integrity, internal, programming error etc
         except psycopg2.DatabaseError, de:
             LOGGER.error("Database Error %s", de)
             raise de
示例#16
0
    def create_member(self, body: dict) -> Tuple[str, int]:
        """
        Create new Ghost member account used to receive newsletters.

        :param dict body: Payload containing member information.

        :returns: Optional[List[str]]
        """
        try:
            resp = requests.post(
                f"{self.admin_api_url}/members/",
                json=body,
                headers={"Authorization": self.session_token},
            )
            response = f'Successfully created new Ghost member `{body.get("email")}: {resp.json()}.'
            LOGGER.success(response)
            return response, resp.status_code
        except HTTPError as e:
            LOGGER.error(
                f"Failed to create Ghost member: {e.response.content}")
            return e.response.content, e.response.status_code
示例#17
0
    def get_all_posts(self) -> Optional[List[str]]:
        """
        Fetch all Ghost post URLs.

        :returns: Optional[List[str]]
        """
        try:
            headers = {
                "Authorization": f"Ghost {self.session_token}",
                "Content-Type": "application/json",
            }
            params = {
                "limit": "200",
            }
            endpoint = f"{self.admin_api_url}/posts"
            resp = requests.get(endpoint, headers=headers, params=params)
            if resp.status_code == 200:
                posts = resp.json()["posts"]
                return [
                    post["url"].replace(".app", ".com") for post in posts
                    if post["status"] == "published"
                ]
        except HTTPError as e:
            LOGGER.error(f"Ghost HTTPError while fetching posts: {e}")
        except KeyError as e:
            LOGGER.error(f"KeyError for `{e}` occurred while fetching posts")
        except Exception as e:
            LOGGER.error(
                f"Unexpected error occurred while fetching posts: {e}")
示例#18
0
def remove_comment_upvote(db: Session, user_id: str, comment_id: int):
    """
    Delete a record of a user's upvote for a given comment.

    :param Session db: ORM database session.
    :param str user_id: Primary key for account record.
    :param int comment_id: Unique ID of comment user attempted to upvote.

    :returns: CommentUpvote
    """
    try:
        upvote = CommentUpvote(user_id=user_id, comment_id=comment_id)
        db.delete(upvote)
        db.commit()
        LOGGER.success(
            f"Removed upvote for comment `{comment_id}` from user `{user_id}`."
        )
    except SQLAlchemyError as e:
        LOGGER.error(
            f"SQLAlchemyError while attempting to remove comment upvote: {e}")
    except IntegrityError as e:
        LOGGER.error(
            f"IntegrityError while attempting to remove comment upvote: {e}")
    except Exception as e:
        LOGGER.error(
            f"Unexpected error while attempting to remove comment upvote: {e}")
示例#19
0
def create_donation(db: Session, donation: NewDonation) -> Donation:
    """
    Create new BuyMeACoffee donation record.

    :param Session db: ORM database session.
    :param NewDonation donation: Donation schema object.

    :returns: Donation
    """
    try:
        db_item = Donation(
            coffee_id=donation.coffee_id,
            email=donation.email,
            name=donation.name,
            count=donation.count,
            message=donation.message,
            link=donation.link,
            created_at=datetime.now(),
        )
        db.add(db_item)
        db.commit()
        LOGGER.success(
            f"Successfully received donation: `{donation.count}` coffees from `{donation.name}`."
        )
        return db_item
    except SQLAlchemyError as e:
        LOGGER.error(f"SQLAlchemyError while creating donation record: {e}")
    except IntegrityError as e:
        LOGGER.error(f"IntegrityError while creating donation record: {e}")
    except Exception as e:
        LOGGER.error(f"Unexpected error while creating donation record: {e}")
示例#20
0
    def get_all_authors(self) -> Optional[List[dict]]:
        """
        Fetch all Ghost authors.

        :returns: Optional[List[dict]]
        """
        try:
            params = {"key": self.content_api_key}
            headers = {
                "Authorization": f"Ghost {self.session_token}",
                "Content-Type": "application/json",
            }
            resp = requests.get(f"{self.admin_api_url}/users",
                                params=params,
                                headers=headers)
            if resp.status_code == 200:
                return resp.json().get("users")
        except HTTPError as e:
            LOGGER.error(
                f"Failed to fetch Ghost authors: {e.response.content}")
        except KeyError as e:
            LOGGER.error(f"KeyError while fetching Ghost authors: {e}")
示例#21
0
    def get_post(self, post_id: str) -> Optional[dict]:
        """
        Fetch Ghost post by ID.

        :param str post_id: ID of post to fetch.

        :returns: Optional[dict]
        """
        try:
            headers = {
                "Authorization": f"Ghost {self.session_token}",
                "Content-Type": "application/json",
            }
            params = {
                "include": "authors",
                "formats": "mobiledoc,html",
            }
            endpoint = f"{self.admin_api_url}/posts/{post_id}/"
            resp = requests.get(endpoint, headers=headers, params=params)
            if resp.json().get("errors") is not None:
                LOGGER.error(
                    f"Failed to fetch post `{post_id}`: {resp.json().get('errors')[0]['message']}"
                )
                return None
            elif resp.json().get("posts"):
                post = resp.json()["posts"][0]
                LOGGER.info(
                    f"Fetched Ghost post `{post['slug']}` ({endpoint})")
                return post
            return None
        except HTTPError as e:
            LOGGER.error(
                f"Ghost HTTPError while fetching post `{post_id}`: {e}")
        except KeyError as e:
            LOGGER.error(
                f"KeyError for `{e}` occurred while fetching post `{post_id}`")
        except Exception as e:
            LOGGER.error(
                f"Unexpected error occurred while fetching post `{post_id}`: {e}"
            )
示例#22
0
async def fetch_decoded_url(session, media_source: Dict[str, str], count: int,
                            total_count: int,
                            media_type: str) -> Dict[str, str]:
    """
    Fetch URL and create a dictionary representing the resource.

    :param session: Async HTTP requests session.
    :type session: ClientSession
    :param media_source: Single resource to fetch.
    :type media_source: Dict[str, str]
    :param count: Current URL count.
    :type count: int
    :param total_count: Total URL count.
    :type total_count: int
    :param media_type: Type of media urls to generate.
    :type media_type: str
    :returns: Dict[str, str]
    """
    try:
        async with session.post(media_source["url"]) as response:
            decoded_url = await response.text()
            if decoded_url == "":
                raise Exception(
                    f"Decoded URL returned empty string; export may have expired."
                )
            resource = {"url": decoded_url, "date": media_source["date"]}
            LOGGER.info(
                f"Decoded {count} of {total_count} {media_type}: {decoded_url}"
            )
            return resource
    except InvalidURL as e:
        LOGGER.error(
            f"Unable to decode invalid URL `{media_source['date']}`: {e}")
        raise e
    except ClientError as e:
        LOGGER.error(f"Error while decoding URL `{media_source['date']}`: {e}")
        raise e
    except Exception as e:
        LOGGER.error(f"Unexpected error: {e}")
        raise e
async def fetch_snapchat_memory(
    session: ClientSession,
    memory: Dict[str, str],
    count: int,
    total_count: int,
    media_type: str,
):
    """
    Download single media file and write to local directory.

    :param session: Async HTTP requests session.
    :type session: ClientSession
    :param memory: Resource URL to fetch with date as filename.
    :type memory: Dict[str, str]
    :param count: Current URL count.
    :type count: int
    :param total_count: Total URL count.
    :type total_count: int
    :param media_type: Type of media to fetch (photo or video).
    :type media_type: str
    """
    filepath = f"{MEDIA_EXPORT_FILEPATH}/{media_type}/{memory['date']}{'.jpg' if media_type == 'photos' else '.mp4'}"
    try:
        async with session.get(memory["url"]) as response:
            if response.status == 200:
                data = await response.read()
                async with aiofiles.open(filepath, mode="wb+") as f:
                    await f.write(data)
                    LOGGER.info(
                        f"Fetched {media_type} {count} of {total_count}: {memory['date']}"
                    )
                    await f.close()
    except InvalidURL as e:
        LOGGER.error(f"Unable to decode invalid URL `{memory['url']}`: {e}")
    except ClientError as e:
        LOGGER.error(f"Error while decoding URL `{memory['url']}`: {e}")
    except Exception as e:
        LOGGER.error(
            f"Unexpected error while decoding URL `{memory['url']}`: {e}")
示例#24
0
def create_comment(db: Session, comment: NewComment,
                   user_role: Optional[str]) -> Comment:
    """
    Create new user-submitted comment.

    :param Session db: ORM database session.
    :param NewComment comment: User comment object.
    :param Optional[str] user_role: Permissions of the comment author, if any.

    :returns: Comment
    """
    try:
        LOGGER.info(
            f"Creating comment from {comment.user_email} on {comment.post_slug}..."
        )
        new_comment = Comment(
            user_id=comment.user_id,
            user_name=comment.user_name,
            user_avatar=comment.user_avatar,
            user_email=comment.user_email,
            user_role=user_role,
            body=comment.body,
            post_slug=comment.post_slug,
            post_id=comment.post_id,
            created_at=datetime.now(),
        )
        db.add(new_comment)
        db.commit()
        LOGGER.success(
            f"New comment created by user `{new_comment.user_name}` on post `{new_comment.post_slug}`"
        )
        return new_comment
    except SQLAlchemyError as e:
        LOGGER.error(f"SQLAlchemyError while creating comment: {e}")
    except IntegrityError as e:
        LOGGER.error(f"IntegrityError while creating comment: {e}")
    except Exception as e:
        LOGGER.error(f"Unexpected error while creating comment: {e}")
示例#25
0
def create_account(db: Session, account: NetlifyAccount) -> NetlifyAccount:
    """
    Create new account record sourced from Netlify.

    :param Session db: ORM database session.
    :param account: User comment schema object.
    :param NetlifyAccount account: User account registered via Netlify.

    :returns: NetlifyAccount
    """
    try:
        new_account = Account(
            id=account.id,
            full_name=account.user_metadata.full_name,
            avatar_url=account.user_metadata.avatar_url,
            email=account.email,
            role=account.role,
            provider=account.app_metadata.provider,
            created_at=datetime.now(),
            updated_at=datetime.now(),
        )
        db.add(new_account)
        db.commit()
        LOGGER.success(
            f"New Netlify account created: `{account.user_metadata.full_name}`"
        )
        return account
    except SQLAlchemyError as e:
        LOGGER.error(
            f"SQLAlchemyError while creating Netlify user account: {e}")
    except IntegrityError as e:
        LOGGER.error(
            f"IntegrityError while creating Netlify user account: {e}")
    except Exception as e:
        LOGGER.error(
            f"Unexpected error while creating Netlify user account: {e}")
示例#26
0
                    if request and response:
                        if request.method == 'GET' and '?' in request.url:
                            # filter static URL
                            if not static_reg.search(url):
                                burp_traffic.append((request, response))
                                traffic_queue.put((request, response))
                        elif request.method == 'POST' and request.body:
                            content_type = request.get_header('Content-Type')
                            # save multipart
                            if content_type and 'multipart/form-data; boundary=' in content_type:
                                MULTIPART.append((request, response))
                            else:
                                burp_traffic.append((request, response))
                                traffic_queue.put((request, response))
        else:
            LOGGER.error('%s not exists!' % self.burp)

    @staticmethod
    def get_traffic_path(id):
        traffic_path = os.path.join(TRAFFIC_DIR, id + '.traffic')
        return traffic_path

    def get_render_task(self, url_list):
        render_task = []
        i = len(url_list)
        k = 0
        if i > self.process:
            j = i / self.process
            for i in range(self.process):
                if i == self.process - 1:
                    urls = url_list[k:]
示例#27
0
文件: start.py 项目: zsq1104/NoXss
     if file:
         with open(file) as f:
             scope_url = f.readline().strip()
     elif url:
         scope_url = url
     domain = get_domain_from_url(scope_url)
     if is_ip(scope_url):
         save_cookie_ip(args.cookie, domain)
     else:
         from cookie import save_cookie
         save_cookie(args.cookie, domain)
 if url or file or burp or args.id or args.filter:
     if args.id:
         id = args.id
         if not Engine.is_scanned(id):
             LOGGER.error('Task %s not found,exit.' % id)
             exit(0)
     else:
         id = gen_id()
     engine = Engine(id=id,
                     url=url,
                     file=file,
                     burp=burp,
                     process=num,
                     browser=browser,
                     coroutine=coroutine,
                     filter=filter)
     try:
         result = engine.start()
     except KeyboardInterrupt, e:
         LOGGER.info(e)
示例#28
0
 def start(self):
     # check if traffic_path exists.
     if self.is_scanned(self.id):
         choice = raw_input(
             'Task %s has been scanned, do you want to rescan?(Y/N)' %
             self.id)
         if choice == 'Y' or choice == 'y' or re.search(
                 'yes', choice, re.I):
             self.put_queue()
             self.send_end_sig()
         elif choice is 'N' or choice is 'n' or re.search(
                 'no', choice, re.I):
             exit(0)
         else:
             LOGGER.error('Incorrect choice.')
             exit(0)
     elif self.burp:
         self.put_burp_to_trafficqueue()
         self.send_end_sig()
         # save burp traffic
         if burp_traffic:
             self.save_traffic(burp_traffic, self.id)
     else:
         if self.url != '':
             url_list = [self.url]
         elif self.file:
             if os.path.exists(self.file):
                 with open(self.file) as f:
                     url_list = []
                     temp = [url.strip() for url in f.read().split('\n')]
                     for i in temp:
                         if i:
                             url_list.append(i)
                     if not self.file.endswith('.slice'):
                         url_list = self.deduplicate(url_list)
                     if self.filter:
                         exit(0)
                     # test 10000 urls
                     # url_list = url_list[:100]
             else:
                 LOGGER.error('%s not exists!' % self.file)
                 exit(0)
         # decode
         url_list = self.urldecode(url_list)
         if self.browser:
             # render
             LOGGER.info('Start to request url with %s.' % self.browser)
             render_task = self.get_render_task(url_list)
             for i in render_task:
                 i.start()
             for i in render_task:
                 i.join()
             self.save_traffic(traffic_list, self.id)
             # put traffic tp queue
             for i in range(len(traffic_list)):
                 request = traffic_list[i][0]
                 response = traffic_list[i][1]
                 traffic_queue.put((request, response))
             self.send_end_sig()
         else:
             # traffic genetator
             LOGGER.info('Start to request url with urllib2.')
             traffic_maker = Traffic_generator(self.id, url_list,
                                               self.coroutine)
             traffic_maker.start()
             traffic_maker.join()
             self.put_queue()
             self.send_end_sig()
     # scan
     task = [Scan() for i in range(self.process)]
     for i in task:
         i.start()
     for i in task:
         i.join()
     # save reflect for analyzing
     self.save_reflect()
     if case_list:
         if self.browser:
             # verify
             Verify.verify_with_browser(self.browser, case_list,
                                        self.process)
             self.save_analysis()
             return openner_result
         else:
             # verify,async
             verify_result = Verify.verify_async(case_list, self.coroutine)
             self.save_analysis()
             return verify_result
示例#29
0
 def put_burp_to_trafficqueue(self):
     """
     parse xxx.xml from burpsuite proxy.
     :return:
     """
     if os.path.exists(self.burp):
         import base64
         from xml.etree import cElementTree as ET
         from model import HttpRequest, HttpResponse
         with open(self.burp) as f:
             xmlstr = f.read()
         try:
             root = ET.fromstring(xmlstr)
         except cElementTree.ParseError, e:
             LOGGER.error('Parse burpsuite data error: ' + str(e))
             exit(0)
         for child in root:
             if child.tag == 'item':
                 req_headers = {}
                 resp_headers = {}
                 code = ''
                 request, response = '', ''
                 for child2 in child:
                     if child2.tag == 'method':
                         method = child2.text
                     if child2.tag == 'url':
                         url = child2.text
                         # static url in burp
                         if static_reg.search(url):
                             break
                     if child2.tag == 'status':
                         code = child2.text
                     if child2.tag == 'request':
                         req_text = child2.text
                         # base64 decode
                         req_text = base64.b64decode(req_text)
                         headers_list = req_text.split(
                             '\r\n\r\n', 1)[0].split('\r\n')[1:]
                         for header in headers_list:
                             try:
                                 header_key, header_value = header.split(
                                     ': ')[0], header.split(': ')[1]
                                 if header_key not in req_headers.keys():
                                     req_headers[header_key] = header_value
                             # split header error
                             except IndexError, e:
                                 LOGGER.warn(e)
                         body = req_text.split('\r\n\r\n', 1)[1]
                         request = HttpRequest(method, url, req_headers,
                                               body)
                     if child2.tag == 'response':
                         resp_text = child2.text
                         # if response is not None
                         if resp_text:
                             # base64 decode
                             resp_text = base64.b64decode(resp_text)
                             reason = resp_text.split('\r\n')[0]
                             headers_list = resp_text.split(
                                 '\r\n\r\n', 1)[0].split('\r\n')[1:]
                             for header in headers_list:
                                 header_key, header_value = header.split(
                                     ': ')[0], header.split(': ')[1]
                                 if header_key not in resp_headers.keys():
                                     resp_headers[header_key] = header_value
                             data = resp_text.split('\r\n\r\n', 1)[1]
                             response = HttpResponse(
                                 code, reason, resp_headers, data)
                 if request and response:
                     if request.method == 'GET' and '?' in request.url:
                         # filter static URL
                         if not static_reg.search(url):
                             burp_traffic.append((request, response))
                             traffic_queue.put((request, response))
                     elif request.method == 'POST' and request.body:
                         content_type = request.get_header('Content-Type')
                         # save multipart
                         if content_type and 'multipart/form-data; boundary=' in content_type:
                             MULTIPART.append((request, response))
                         else:
                             burp_traffic.append((request, response))
                             traffic_queue.put((request, response))
import simplejson as json

from log import LOGGER

# Set project base path
BASE_DIR = path.abspath(path.dirname(__file__))

# Location of Snapchat data dump from https://accounts.snapchat.com/accounts/welcome
SNAPCHAT_DATA_EXPORT = f"{BASE_DIR}/export"
SNAPCHAT_MEMORIES_EXPORT = f"{SNAPCHAT_DATA_EXPORT}/json/memories_history.json"
if path.exists(SNAPCHAT_MEMORIES_EXPORT):
    SNAPCHAT_MEMORIES_JSON = json.loads(open(SNAPCHAT_MEMORIES_EXPORT).read())[
        "Saved Media"
    ]
else:
    LOGGER.error(f"Snapchat data not found in `/export` folder.")
    raise Exception(f"Snapchat data not found in `/export` folder.")

# Check if media URLs have been decoded and saved from a previous run
SNAPCHAT_MEDIA_URLS = {
    "videos": None,
    "photos": None,
}

for k, v in SNAPCHAT_MEDIA_URLS.items():
    if path.exists(f"{BASE_DIR}/urls/{k}.json"):
        decoded_urls = json.loads(open(f"{BASE_DIR}/urls/{k}.json").read())
        if decoded_urls is not None and len(decoded_urls) > 0:
            SNAPCHAT_MEDIA_URLS[k] = decoded_urls