Example #1
0
 def __init__(self, input_file, output_file):
     self.logger = LogManager(__name__)
     self.spider = Spider()
     self.regex = Regex()
     self.utils = Utils()
     self.input_file = input_file
     self.output_file = output_file
Example #2
0
    def test_no_details_login(self):
        login_page = LoginPage(self.driver)
        login_page.login('', '')

        utils = Utils(self.driver)
        utils.wait_for_element(login_page.errorMessage)
        # animation still not finished, tried to solve it but couldn't, so added a small extra sleep even if not ideal
        sleep(1)
        assert 'We didn\'t recognize that email and/or password.' in login_page.get_error_message_text()
Example #3
0
 def __init__(self, bot, client,
              torrentsQueue: Queue, ytQueue, megaQueue):
     self.bot = bot
     self.client = client
     self.ytQueue = ytQueue
     self.megaQueue = megaQueue
     self.torrentsQueue = torrentsQueue
     self.utils = Utils()
     self.db = DB()
Example #4
0
 def __init__(self, bot, client, torrentsQueue: Queue, megaQueue: Queue,
              ytQueue: Queue):
     self.bot = bot
     self.client = client
     self.torrentsQueue = torrentsQueue
     self.megaQueue = megaQueue
     self.ytQueue = ytQueue
     self.utils = Utils()
     self.db = DB()
     self.logger = logging.getLogger(' Admin Conv ')
Example #5
0
    def dumpInformation(self):
        i = 1
        dumpInfo = Dump(self.input.url[0], self.sReq, self.input.output)
        self.sonar.components += dumpInfo.getComponents(self.sonar.projects)

        for comp in self.sonar.components:
            dumpInfo.getSourceRaw(comp)
            Utils().printProgressBar(i, len(self.sonar.components))
            i += 1
        print("[+] SONAR DUMP: COMPLETED")
Example #6
0
 def __init__(self, vmc_file, user_agent, is_file=False):
     self.STORAGE_CERT_DIR = Config.STORAGE_CERT_DIR
     self.vmc_file = vmc_file
     self.Utils = Utils()
     self.vmc_response = {
         "status": False,
         "errors": [],
         "vmc_link": vmc_file
     }
     self.is_file = is_file
     self.user_agent = user_agent
Example #7
0
 def __init__(self, svg_file, user_agent, is_file=False):
     self.RNG_SCHEMA_FILE = Config.RNG_SCHEMA_FILE
     self.STORAGE_SVG_DIR = Config.STORAGE_SVG_DIR
     self.svg_file = svg_file
     self.Utils = Utils()
     self.svg_response = {
         "status": False,
         "errors": [],
         "svg_link": svg_file
     }
     self.is_file = is_file
     self.user_agent = user_agent
Example #8
0
	def __init__(self,domain):
		self.approved_nameservers = None
		self.approved_mx_hostnames = None
		self.skip_tls = True
		self.include_dmarc_tag_descriptions = False
		self.nameservers = None
		self.timeout = 2.0
		self.parked = False
		# self.wait=0.0
		self.Utils = Utils()
		self.domain_results = {}
		self.domain = domain.rstrip(".\r\n").strip().lower()
		self.base_domain  = checkdmarc.get_base_domain(self.domain)
Example #9
0
 def __init__(self):
     self.logger = LogManager(__name__)
     self.spider = Spider()
     self.browser = BrowserUtil()
     self.regex = Regex()
     self.utils = Utils()
     self.csvHeader = [
         'Category', 'Sub Category 1', 'Sub Category 2', 'Product Code',
         'Product Name', 'Product ShortName', 'Product Description',
         'List Price', 'Vendor Price', 'Availability', 'Power', 'Size',
         'KW', 'Weight(kg)', 'Other Tech', 'Pdf File', 'Image File'
     ]
     self.totalProducts = 0
Example #10
0
 def __init__(self, filename):
     self.logger = LogManager(__name__)
     self.spider = Spider()
     self.regex = Regex()
     self.utils = Utils()
     self.filename = filename
     self.url = 'http://topsy.com/s?'
     self.csvWriter = Csv('topsy.csv')
     csvDataHeader = [
         'Keyword', 'Tweets in last 30 days', 'Topsy Sentiment Score',
         ' Date of scrape'
     ]
     self.csvWriter.writeCsvRow(csvDataHeader)
Example #11
0
 def __init__(self, vmc_file, user_agent, svg_link='', is_file=False):
     self.STORAGE_CERT_DIR = Config.STORAGE_CERT_DIR
     self.vmc_file = vmc_file
     self.Utils = Utils()
     self.vmc_response = {
         "status": False,
         "errors": [],
         "vmc_link": vmc_file
     }
     self.is_file = is_file
     self.user_agent = user_agent
     self.parsed_vmc = None
     self.svg_link = svg_link
     self.pem_file_path = None
Example #12
0
 def __init__(self, filename):
     self.logger = LogManager(__name__)
     self.spider = Spider()
     self.regex = Regex()
     self.utils = Utils()
     self.filename = filename
     self.url = 'https://www.google.com/finance?'
     self.main_url = 'https://www.google.com'
     self.csvWriter = Csv('google_finance.csv')
     csvDataHeader = [
         'Ticker Symbol', 'Quarter End', 'Revenue', 'Total Revenue',
         'Date of Scrape'
     ]
     self.csvWriter.writeCsvRow(csvDataHeader)
Example #13
0
 def __init__(self):
     QThread.__init__(self)
     self.logger = LogManager(__name__)
     self.spider = Spider()
     self.regex = Regex()
     self.utils = Utils()
     self.mainUrl = 'http://www.paodeacucar.com.br/'
     self.url = 'http://www.paodeacucar.com.br/'
     dupCsvReader = Csv()
     self.dupCsvRows = dupCsvReader.readCsvRow('paodeacucar.csv', 4)
     self.csvWriter = Csv('paodeacucar.csv')
     csvDataHeader = ['SKU', 'Category', 'Subcategory', 'Name', 'URL', 'URL Image', 'Details',
                      'Nutrients Table html code', 'Price from, 28/abr/14', '28/abr/14']
     if 'URL' not in self.dupCsvRows:
         self.dupCsvRows.append(csvDataHeader)
         self.csvWriter.writeCsvRow(csvDataHeader)
Example #14
0
 def on_post(self, req, resp):
     reqData = req.media
     responseObj = {}
     responseObj["message"] = ""
     responseObj["returnData"] = ""
     utils = Utils()
     self.__meetup["title"] = reqData.get("title", "")
     self.__meetup["description"] = reqData.get("description", "")
     self.__meetup["location"]["title"] = reqData.get("location", "").get("title", "")
     self.__meetup["location"]["country"] = reqData.get("location", "").get("country", "")
     self.__meetup["location"]["latitude"] = reqData.get("location", "").get("latitude", "")
     self.__meetup["location"]["longitude"] = reqData.get("location", "").get("longitude", "")
     self.__meetup["timeline"]["from"] = utils.getDateFromUTCString(reqData.get("timeline", "").get("from", ""))
     self.__meetup["timeline"]["to"] = utils.getDateFromUTCString(reqData.get("timeline", "").get("to", ""))
     self.__meetup["isPrivate"] = reqData.get("isPrivate", False)
     self.__meetup["joinedBy"] = []
     self.__meetup["metadata"]["createdBy"] = ObjectId(req.params["userId"])
     self.__meetup["metadata"]["createdOn"] = utils.getDateFromUTCString(reqData.get("metadata", "").get("createdOn", ""))
     try:
         # validate required data
         if self.validateTitle() and self.validateDescription() and self.validateTimeline():
             if "_id" in self.__meetup:
                 del self.__meetup["_id"]
             meetupdb = MeetupDb()
             # insert meetup
             meetupId = meetupdb.insertMeetup(self.__meetup)
             # add user as joining user
             meetupdb.registerToMeetup(req.params["userId"], meetupId)
             userdb = UserDb()
             # add to created meetups by user
             userdb.addToCreatedMeetups(req.params["userId"], meetupId)
             # add to joined meetups by user
             userdb.addToJoinedMeetups(req.params["userId"], meetupId)
             # get this meetup data
             responseObj["returnData"] = meetupdb.findOneMeetup(meetupId)
             responseObj["responseId"] = 211
         else:
             responseObj["responseId"] = 111
             responseObj["message"] = "check if all the fields are valid"
     except Exception as ex:
         print(ex)
         responseObj["responseId"] = 111
         responseObj["message"] = "some error occurred"
     resp.body = json.dumps(responseObj)
Example #15
0
    def __init__(self):
        QtCore.QThread.__init__(self)
        self.isExiting = False
        self.logger = LogManager(__name__)
        self.spider = Spider()
        self.regex = Regex()
        dupCsvReader = Csv()
        self.dupCsvRows = dupCsvReader.readCsvRow('nisbets.csv', 0)
        self.csvWriter = Csv('nisbets.csv')
        self.mainUrl = 'http://www.nisbets.co.uk'
        csvHeaderList = [
            'URL', 'Product Code', 'Product Technical Specifications',
            'Product Name', 'Brand', 'Product Price',
            'Product Short Description', 'Product Long Description',
            'Image File Name', 'User Manual File Name',
            'Exploded View File Name', 'Spares Code', 'Accessories',
            'Product Status'
            'Category1', 'Category2', 'Category3', 'Category4'
        ]
        if 'URL' not in self.dupCsvRows:
            self.csvWriter.writeCsvRow(csvHeaderList)
            self.dupCsvRows.append(csvHeaderList[0])

        self.utils = Utils()
Example #16
0
    def __init__(self):  # Generador ArgParse
        #TODO: configParser - http://46.101.4.154/Art%C3%ADculos%20t%C3%A9cnicos/Python/ConfigParser.pdf
        self.showBanner()

        #devdumpops --sonarqube --enumeration -u [user] -p [password] -token [token] --output [DIR] [HOST:PORT]
        #devdumpops --sonarqube --dump -u [user] -p [password] -token [token] --output [DIR] [HOST:PORT]

        parser = argparse.ArgumentParser(
            description="DevDumpOps",
            epilog="Ejemplos de uso: -*- asd bc de de"
        )  #TODO: agregar ejemplo de uso

        #TODO: agregar demas opciones JIRA, GRANDLE
        # Opciones TARGETS
        gpTarget = parser.add_argument_group(title="Target's")
        grupoExc_activos = gpTarget.add_mutually_exclusive_group()
        grupoExc_activos.add_argument("--sonarqube",
                                      action="store_true",
                                      default=True,
                                      help="SonarQube platform")
        #grupoExc_activos.add_argument("--jenkins",
        #    action="store_true",
        #    help="Plataforma Jenkins")
        #grupoExc_activos.add_argument("--maven",
        #    action="store_true",
        #    help="Plataforma Maven")

        # Opciones Configuracion Proxy
        gpProxy = parser.add_argument_group(title="Proxy - Optional")
        gpProxy.add_argument("--phost", action="store", help="Host PROXY")
        gpProxy.add_argument("--pport", action="store", help="Port PROXY")
        gpProxy.add_argument("--puser", action="store", help="Username PROXY")
        gpProxy.add_argument("--ppwd", action="store", help="Password PROXY")

        # Opciones de las acciones
        gpActions = parser.add_argument_group(
            title="Actions").add_mutually_exclusive_group()
        gpActions.add_argument(
            "--enumeration",
            action="store_true",
            default=True,  # por defecto primero enumera
            help="List all resources")
        gpActions.add_argument("--dump",
                               action="store",
                               choices=("all", "member"),
                               help="Dump all resources")
        gpActions.add_argument("--bruteforce",
                               action="store_true",
                               help="Brute Force Attack")
        gpActions.add_argument("--backdoor",
                               action="store_true",
                               help="Persistence techniques")

        # Parametros generales
        parser.add_argument("url",
                            type=str,
                            nargs=1,
                            help="Service url",
                            metavar="[URL:PORT]")
        parser.add_argument("-u",
                            dest="username",
                            action="store",
                            help="Connection user")
        parser.add_argument("-p",
                            dest="password",
                            action="store",
                            help="Connection password")
        parser.add_argument("-t",
                            dest="token",
                            action="store",
                            help="Connection Token")
        parser.add_argument("--output",
                            dest="output",
                            action="store",
                            default="results",
                            help="Results directory")

        self.args = parser.parse_args()
        self._validateMember()
        self.strucURL()
        self._lastChar()
        self.verify_conection()
        Utils().createdFolders(self.args.output)
Example #17
0
 def verify_conection(self):
     proxy = self.armerProxy()
     if (not Utils().testVisibility(self.args.url[0], proxy)):
         exit(0)
     print(f"{bcolors.OKGREEN}[+] VISIBILITY: {bcolors.ENDC}OK")
Example #18
0
class RadioSpider(object):

    log_file = './radio.log'
    radio_url_format = 'http://vtuner.com/setupapp/guide/asp/BrowseStations/StartPage.asp?sBrowseType=Format'

    mysql_obj = MysqlConnect()
    countryParseObj = ParseCountry()
    utilsObj = Utils()

    def genresParser(self):

        logging.basicConfig(filename=self.log_file, level=logging.INFO)

        genres_array = []
        soup = BeautifulSoup(urlopen(self.radio_url_format).read(), "lxml")
        genre_rows = soup.find('table', id='table10').findAll('tr')
        for row in genre_rows:
            d = row.findAll('td')
            for row_2 in d:
                t = row_2.findAll('a')
                for row_3 in t:
                    z = row_3.getText()
                    matchObj = re.sub('[;]?\s\([\d]*\)\s?', '', z)
                    if matchObj == 'R&B':
                        matchObj = matchObj.replace('&', '%26')
                    genres_array.append(matchObj)
        return genres_array

    def radioParser(self):

        pages_array = []
        all_genres = self.genresParser()

        for item in range(len(all_genres)):
            radio_url_genres = 'http://vtuner.com/setupapp/guide/asp/BrowseStations/BrowsePremiumStations.asp?sCategory=' + all_genres[
                item] + '&sBrowseType=Format&sViewBy=&sSortby=&sWhatList=&sNiceLang=&iCurrPage=1'
            url_clean = urllib.urlopen(radio_url_genres)
            soup = BeautifulSoup(url_clean, "lxml")
            pages = soup.findAll('div')
            for row in pages:
                y = row.findAll('a', {"class": "paging"})
                for row_1 in y:
                    k = row_1.findAll('b')
                    for row_2 in k:
                        l = row_2.getText()
                        pages_array.append(l)

            for number in range(len(pages_array)):
                radio_urls = 'http://vtuner.com/setupapp/guide/asp/BrowseStations/BrowsePremiumStations.asp?sCategory=' + all_genres[
                    item] + '&sBrowseType=Format&sViewBy=&sSortby=&sWhatList=&sNiceLang=&iCurrPage=' + pages_array[
                        number]
                m3u_url = 'http://vtuner.com/setupapp/guide/asp/'
                url_ready = urllib.urlopen(radio_urls)
                soup_radios = BeautifulSoup(url_ready, "lxml")
                main_table = soup_radios.find('table',
                                              id='table1').findAll('tr')
                for tab in main_table:
                    table = tab.findAll('table')
                    for tagz in table:
                        oi = tagz.findAll('tr')
                        # Need try..catch cuz some URLs can be broken
                        # so whole for going to be try..catch
                        try:
                            for tr in oi:
                                station_url = ''
                                station_name = ''
                                station_location = ''
                                station_country = ''
                                station_genre = ''
                                station_quality = ''
                                station_updated = datetime.datetime.now()
                                alTds = tr.findAll('td')
                                if len(alTds) < 5:
                                    continue
                                all_td_string = ''.join(
                                    [str(x) for x in alTds])
                                bg_tag = 'bgcolor="#FFFFFF"'
                                strong_tag = '<strong>Station Name</strong>'
                                if bg_tag in all_td_string or strong_tag in all_td_string:
                                    continue
                                if len(alTds) > 0:
                                    allTdLinks = alTds[0].findAll('a')
                                    if len(allTdLinks) > 0:
                                        station_url = m3u_url + allTdLinks[0][
                                            'href']
                                        print "DEBUG URL #1 ::::> " + station_url
                                        station_url = station_url.replace(
                                            '../', '')
                                        print "DEBUG URL #2 ::::> " + station_url
                                        station_url = Utils.parse_m3u_file(
                                            station_url)
                                        real_station_url = station_url[0]
                                        clean_url = station_url[1]
                                        print "STATION URL: " + str(
                                            real_station_url)
                                        print "CLEAN URL: " + str(clean_url)
                                        logging.info('\n')
                                        logging.info(
                                            '--- Radio block starts here ---')
                                        logging.info("URL of Radio: " +
                                                     str(real_station_url))
                                if len(alTds) > 1:
                                    allTdLinks = alTds[1].findAll('a')
                                    if len(allTdLinks) > 0:
                                        station_name = allTdLinks[0].getText()
                                        logging.info("Name of Radio: " +
                                                     station_name)
                                if len(alTds) > 2:
                                    station_location = alTds[2].getText()
                                    station_country = self.countryParseObj.get_country(
                                        station_location)
                                    logging.info("Location of Radio: " +
                                                 station_location)
                                    logging.info("Country of Radio: " +
                                                 station_country)
                                if len(alTds) > 3:
                                    allTdLinks = alTds[3].findAll('a')
                                    if len(allTdLinks) > 0:
                                        station_genre = allTdLinks[0].getText()
                                        logging.info("Genre of Radio: " +
                                                     station_genre)
                                if len(alTds) > 4:
                                    station_quality = alTds[4].getText()
                                    logging.info("Quality of Radio: " +
                                                 station_quality)
                                    logging.info(
                                        '--- Radio block ends here ---')

                                # Remove quotes for MySQL inserts
                                station_name = self.utilsObj.replace_quots(
                                    station_name)
                                ''' look IF station already EXIST in DB '''
                                check_station = "SELECT id from `radio_station_stream_urls` where url REGEXP ('" + clean_url + "') LIMIT 1;"
                                check_station_result = self.mysql_obj.make_select(
                                    check_station)
                                logging.info("Station ID is: " +
                                             str(check_station_result))

                                if not check_station_result:
                                    query_radio = "INSERT INTO `radio_stations`(`name`, `location`, `country`, `updated`) VALUES ('" + station_name + "'," + "'" + station_location + "'," + "'" + str(
                                        station_country) + "'," + "'" + str(
                                            station_updated) + "');"
                                    insert_id = self.mysql_obj.make_insert(
                                        query_radio)

                                    if insert_id != -1:
                                        station_quality = re.sub(
                                            "\D", "", station_quality)
                                        query_url_and_bitrate = "INSERT INTO `radio_station_stream_urls`(`station_id`, `url`, `bitrate`) VALUES('" + str(
                                            insert_id
                                        ) + "'," + "'" + real_station_url + "'," + "'" + station_quality + "');"
                                        self.mysql_obj.make_insert(
                                            query_url_and_bitrate)

                                    sep = "/"
                                    genre = station_genre.split(sep, 1)[0]

                                    query_get_genre_id = "SELECT `id` from `music_genres` WHERE `name`= " + "'" + genre + "'" + ";"
                                    result_genre_id = self.mysql_obj.make_select(
                                        query_get_genre_id)

                                    if not result_genre_id:
                                        query_insert_genre = "INSERT INTO `music_genres` (`name`) VALUES ('" + str(
                                            genre) + "');"
                                        id_genre_is = self.mysql_obj.make_insert(
                                            query_insert_genre)
                                        logging.info(
                                            "Result is NONE, Adding tnew genre!"
                                        )
                                    else:
                                        print "GENRE ID: " + str(
                                            result_genre_id[0]['id']) + "\n"
                                        id_genre_is = str(
                                            result_genre_id[0]['id'])

                                    query_insert_id_of_genre = "INSERT into `radio_station_genres` (`station_id`, `genre_id`) VALUES ('" + str(
                                        insert_id) + "','" + str(
                                            id_genre_is) + "');"
                                    self.mysql_obj.make_insert(
                                        query_insert_id_of_genre)
                                else:
                                    print "Radio station - ALREADY EXIST!" + "\n"
                        except StandardError, e:
                            print e
 def __init__(self):
     self.Utils = Utils()
     self.svg_file_flag = False
     self.vmc_file_flag = False
Example #20
0
import logging as logger
from Config import Config
from datetime import date
from utils.Utils import Utils
log_level = Config.LOGGING_LEVEL
log_file_path = Config.LOG_FILE_PATH + str(
    date.today()) + "_" + Config.LOG_FILE_NAME
Utils = Utils()
Utils.check_dir_folder(Config.LOG_FILE_PATH)

if log_level == "INFO":
    log_level = logger.INFO
elif log_level == "DEBUG":
    log_level = logger.DEBUG
elif log_level == "WARNING":
    log_level = logger.WARN
else:
    log_level = logger.INFO

logger.basicConfig(
    filename=log_file_path,
    filemode='a',
    format='%(asctime)s,%(msecs)d %(name)s %(levelname)s %(message)s',
    datefmt='%m/%d/%Y %H:%M:%S',
    level=log_level)