Exemplo n.º 1
0
Arquivo: main.py Projeto: sjmikee/rdef
def main(argv=sys.argv[1:]):
    args = parse_args(argv)
    '''Function to init all project'''
    if ('WindowsPE' in architecture()):  # Windows OS was detected
        logger_instance.write_log(1, 1)
        config = main_config.MainConfig()
        config.create_configuration(__config__file__, __Version__,
                                    __config__path__, __working__directory__)
        global api_url  # Accessing global identifiers
        global api_key
        try:
            # Trying to read configuration VT credentials
            api_url, api_key = config.read_configuration(
                __working__directory__, __config__file__)
            if ((api_url and api_key) == False):  # Double check
                print("There has been a problem setting VT credentials.")
                logger_instance.write_log(130, 0, '')
                from ctypes import windll
                # Gui ctypes
                windll.user32.MessageBoxW(
                    0, "Please check API credentials",
                    "Error reading VirusTotal credentials", 1)
                exit()  # Exit
            else:
                # -> Virustotal_healthcheck for first request
                healthcheck('http://www.google.co.il')
        except Exception as e:
            logger_instance.write_log(122, 0, e)
    else:  # Linux/Mac load different program
        # Error no windows exit()
        logger_instance.write_log(0, 0)

    # Creating proxy instance
    proxy_server = proxy.Proxy()
    proxy_server.start_proxy(args.port)
Exemplo n.º 2
0
def main(picture_address, mask_address, save_address):
    parser = ArgumentParser()
    print(picture_address)
    print('\n')
    print(mask_address)
    print('\n')
    print(save_address)
    print('\n')

    args = parser.parse_args()
    args.image = picture_address
    args.mask = mask_address
    args.save_to = save_address + "/output.jpg"
    output_paths = constants.OutputPaths(
        experiment_name="train12000_128")  # lzx增加

    config = main_config.MainConfig(MAIN_CONFIG_FILE)

    gmcnn_model = gmcnn_gan.GMCNNGan(batch_size=config.training.batch_size,
                                     img_height=config.training.img_height,
                                     img_width=config.training.img_width,
                                     num_channels=config.training.num_channels,
                                     warm_up_generator=False,
                                     config=config,
                                     output_paths=output_paths)  # lzx增加
    log.info('Loading GMCNN model...')
    gmcnn_model.load()
    log.info('GMCNN model successfully loaded.')

    image = cv2.imread(args.image)
    mask = cv2.imread(args.mask)

    image = preprocess_image(image, config.training.img_height,
                             config.training.img_width)
    mask = preprocess_mask(mask, config.training.img_height,
                           config.training.img_width)

    log.info('Making prediction...')
    predicted = gmcnn_model.predict([image, mask])
    predicted = postprocess_image(predicted)

    masked = deepcopy(image)
    masked = postprocess_image(masked)
    masked[mask == 1] = 255

    constructed = deepcopy(masked)
    constructed[mask == 1] = predicted[mask == 1]

    result_image = np.concatenate(
        (
            masked[0][..., [2, 1, 0]],
            # predicted[0][..., [2, 1, 0]],
            constructed[0][..., [2, 1, 0]],
            image[0][..., [2, 1, 0]] * 127.5 + 127.5),
        axis=1)

    cv2.imwrite(args.save_to, result_image)
    log.info('Saved results to: %s', args.save_to)
Exemplo n.º 3
0
def main():
    parser = ArgumentParser()

    parser.add_argument('--image', required=True, help='The path to the image')

    parser.add_argument('--mask', required=True, help='The path to the mask')

    parser.add_argument('--save_to',
                        default='predicted.jpg',
                        help='The save path of predicted image')

    args = parser.parse_args()

    config = main_config.MainConfig(MAIN_CONFIG_FILE)

    gmcnn_model = gmcnn_gan.GMCNNGan(batch_size=config.training.batch_size,
                                     img_height=config.training.img_height,
                                     img_width=config.training.img_width,
                                     num_channels=config.training.num_channels,
                                     warm_up_generator=False,
                                     config=config)
    log.info('Loading GMCNN model...')
    gmcnn_model.load()
    log.info('GMCNN model successfully loaded.')

    image = cv2.imread(args.image)
    mask = cv2.imread(args.mask)

    image = preprocess_image(image, config.training.img_height,
                             config.training.img_width)
    mask = preprocess_mask(mask, config.training.img_height,
                           config.training.img_width)

    log.info('Making prediction...')
    predicted = gmcnn_model.predict([image, mask])

    predicted = postprocess_image(predicted)

    masked = deepcopy(image)
    masked = postprocess_image(masked)
    masked[mask == 1] = 255
    result_image = np.concatenate(
        (masked[0][..., [2, 1, 0]], predicted[0][..., [2, 1, 0]],
         image[0][..., [2, 1, 0]] * 127.5 + 127.5),
        axis=1)

    cv2.imwrite(args.save_to, result_image)
    log.info('Saved results to: %s', args.save_to)
Exemplo n.º 4
0
def main():
    parser = ArgumentParser()

    parser.add_argument('--train_path',
                        required=True,
                        help='The path to training images')

    parser.add_argument('--mask_path',
                        required=True,
                        help='The path to mask images')

    parser.add_argument(
        '-warm_up_generator',
        action='store_true',
        help='Training generator model only with reconstruction loss')

    parser.add_argument(
        '-from_weights',
        action='store_true',
        help='Use this command to continue training from weights')

    parser.add_argument('--gpu',
                        default='0',
                        help='index of GPU to be used (default: %(default))')

    args = parser.parse_args()

    training_utils.set_visible_gpu(args.gpu)
    if args.warm_up_generator:
        log.info(
            'Performing generator training only with the reconstruction loss.')

    config = main_config.MainConfig(MAIN_CONFIG_FILE)
    wgan_batch_size = config.training.wgan_training_ratio * config.training.batch_size

    train_path = os.path.expanduser(args.train_path)
    mask_path = os.path.expanduser(args.mask_path)

    gmcnn_gan_model = gmcnn_gan.GMCNNGan(
        batch_size=config.training.batch_size,
        img_height=config.training.img_height,
        img_width=config.training.img_width,
        num_channels=config.training.num_channels,
        warm_up_generator=args.warm_up_generator,
        config=config)

    if args.from_weights:
        log.info('Continue training from checkpoint...')
        gmcnn_gan_model.load()

    img_dataset = datasets.Dataset(train_path=train_path,
                                   test_path=train_path,
                                   batch_size=wgan_batch_size,
                                   img_height=config.training.img_height,
                                   img_width=config.training.img_width)

    if img_dataset.train_set.samples < wgan_batch_size:
        log.error(
            'Number of training images [%s] is lower than WGAN batch size [%s]',
            img_dataset.train_set.samples, wgan_batch_size)
        exit(0)

    mask_dataset = datasets.MaskDataset(train_path=mask_path,
                                        batch_size=wgan_batch_size,
                                        img_height=config.training.img_height,
                                        img_width=config.training.img_width)

    if mask_dataset.train_set.samples < wgan_batch_size:
        log.error(
            'Number of training mask images [%s] is lower than WGAN batch size [%s]',
            mask_dataset.train_set.samples, wgan_batch_size)
        exit(0)

    gmcnn_gan_trainer = trainer.Trainer(
        gan_model=gmcnn_gan_model,
        img_dataset=img_dataset,
        mask_dataset=mask_dataset,
        batch_size=config.training.batch_size,
        img_height=config.training.img_height,
        img_width=config.training.img_width,
        num_epochs=config.training.num_epochs,
        save_model_steps_period=config.training.save_model_steps_period)

    gmcnn_gan_trainer.train()
Exemplo n.º 5
0
def main():
    parser = ArgumentParser()

    parser.add_argument('--train_path',
                        required=True,
                        help='The path to training images')

    parser.add_argument('--mask_path',
                        required=True,
                        help='The path to mask images')

    parser.add_argument('--experiment_name',
                        required=True,
                        help='The name of experiment')

    parser.add_argument(
        '-warm_up_generator',
        action='store_true',
        help='Training generator model only with reconstruction loss')

    parser.add_argument(
        '-from_weights',
        action='store_true',
        help='Use this command to continue training from weights')

    parser.add_argument('--gpu',
                        default='0',
                        help='index of GPU to be used (default: %(default))')

    args = parser.parse_args()

    output_paths = constants.OutputPaths(experiment_name=args.experiment_name)
    training_utils.set_visible_gpu(args.gpu)
    if args.warm_up_generator:
        log.info(
            'Performing generator training only with the reconstruction loss.')

    config = main_config.MainConfig(MAIN_CONFIG_FILE)
    wgan_batch_size = config.training.wgan_training_ratio * config.training.batch_size

    train_path = os.path.expanduser(args.train_path)
    mask_path = os.path.expanduser(args.mask_path)

    gmcnn_gan_model = gmcnn_gan.GMCNNGan(
        batch_size=config.training.batch_size,
        img_height=config.training.img_height,
        img_width=config.training.img_width,
        num_channels=config.training.num_channels,
        warm_up_generator=args.warm_up_generator,
        config=config,
        output_paths=output_paths)

    #if args.from_weights:
    #  log.info('Continue training from checkpoint...')
    #  gmcnn_gan_model.load()

    # look for newest weights
    weights_folder = output_paths.output_weights_path
    folders = [f.path for f in os.scandir(weights_folder) if f.is_dir()]
    folders.sort()
    last_folder = folders[-1]
    print("Loading weights from folder: %s" % last_folder)
    gmcnn_gan_model.load(last_folder)

    img_dataset = datasets.Dataset(train_path=train_path,
                                   test_path=train_path,
                                   batch_size=wgan_batch_size,
                                   img_height=config.training.img_height,
                                   img_width=config.training.img_width)

    if img_dataset.train_set.samples < wgan_batch_size:
        log.error(
            'Number of training images [%s] is lower than WGAN batch size [%s]',
            img_dataset.train_set.samples, wgan_batch_size)
        exit(0)

    mask_dataset = datasets.MaskDataset(train_path=mask_path,
                                        batch_size=wgan_batch_size,
                                        img_height=config.training.img_height,
                                        img_width=config.training.img_width)

    if mask_dataset.train_set.samples < wgan_batch_size:
        log.error(
            'Number of training mask images [%s] is lower than WGAN batch size [%s]',
            mask_dataset.train_set.samples, wgan_batch_size)
        exit(0)

    gmcnn_gan_trainer = trainer.Trainer(
        gan_model=gmcnn_gan_model,
        img_dataset=img_dataset,
        mask_dataset=mask_dataset,
        batch_size=config.training.batch_size,
        img_height=config.training.img_height,
        img_width=config.training.img_width,
        num_epochs=config.training.num_epochs,
        save_model_steps_period=config.training.save_model_steps_period,
        output_paths=output_paths,
        callback=callback)

    gmcnn_gan_trainer.train()

    gmcnn_gan_trainer.gan_model.save()
Exemplo n.º 6
0
class ProxyRequestHandler(BaseHTTPRequestHandler):
    logger_instance = logger.logger()
    logger_instance.create_logger()
    config_instance = main_config.MainConfig()
    api_url, api_key = config_instance.read_configuration(
        __working__directory__, __config__file__)
    vt_response_parser_instance = vt_response_parser.vt_response_parser()
    protocol_version = 'HTTP/1.1'
    # Var
    conn = get_connection()  # Setting the SQL
    icon = systrayIcon()
    icon.start_icon_thread()

    def do_HEAD(self):
        self.do_GET(body=False)

    def _connect_to(self, netloc, soc):
        i = netloc.find(':')
        if i >= 0:
            host_port = netloc[:i], int(netloc[i + 1:])
        else:
            host_port = netloc, 80
        try:
            soc.connect(host_port)
        except socket.error as e:
            try:
                msg = e
            except:
                msg = e
            self.send_error(404, msg)
            return 0
        return 1

    def do_GET(self):
        '''Function handles Http requests and calls checkUrl in order to detect risk
           In case the checkurl returns there is no risk (True), and record doesnt exist
           in the db, will insert to the whitelist database table, if (False) Will load Error page
           Blocked URL and will insert to DB'''
        (scm, netloc, path, params, query,
         fragment) = urlparse(self.path, 'http')
        if scm != 'http' or fragment or not netloc:  # Link Validity
            self.send_error(400, "bad url %s" % self.path)
            return
        url = scm + '://' + netloc
        # If function returns CHECK, we will check the link
        link_Status = isurlindb(self.conn, url)
        if (link_Status == 'CHECK'):
            if (self.checkUrl(url)):
                print("\n[*] Harmless url forwarding")
                self.socket_connection(netloc, path, params, query)
                inserturl(self.conn, 0, self.path, 0, 0, 0,
                          scm)  # Insert to DB whitelist
                insert_list_type(self.conn, url, 0, 'rdef_web_whitelist', scm)
            else:  # Malicious, inserting to DB
                print("\n[!] Malicious url blocked")
                # Insert checked and malicious link to blacklist

                if self.load_blocked_page(url, alert=True):
                    self.socket_connection(netloc, path, params, query)
                else:
                    insert_list_type(self.conn, url, 0, 'rdef_web_blacklist',
                                     scm)
        elif (link_Status == 'WL'):  # Whitelist, forwarding connection
            print("\n[*] Whitelisted url forwarding")
            self.socket_connection(netloc, path, params, query)
        else:
            # Blacklist, Loading error page
            print("\n[!] Blacklisted url blocked")
            if self.load_blocked_page(url):
                self.socket_connection(netloc, path, params, query)

    def load_blocked_page(self, url, alert=False):
        if alert:
            self.icon.icon_notify(f'Detected suspicious activity in {url}')
            if self.icon.defend_state():
                self.send_error(403)
                self.close_connection = 1
                return False
            else:
                return True
        else:
            if self.icon.defend_state():
                self.send_error(403)
                self.close_connection = 1
            else:
                return True

    def socket_connection(self, netloc, path, params, query):
        soc = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
        try:
            if self._connect_to(netloc, soc):
                self.log_request()
                text = "%s %s %s\r\n" % (self.command,
                                         urlunparse(
                                             ('', '', path, params, query,
                                              '')), self.request_version)
                soc.send(text.encode())
                self.headers['Connection'] = 'close'
                del self.headers['Proxy-Connection']
                for key_val in self.headers.items():
                    text = "%s: %s\r\n" % key_val
                    soc.send(text.encode())
                soc.send("\r\n".encode())
                self._read_write(soc)
        except Exception as e:
            self.logger_instance.write_log(172, 1, e)
        finally:
            soc.close()
            self.connection.close()

    def _read_write(self, soc, max_idling=50):
        try:
            iw = [self.connection, soc]
            ow = []
            count = 0
            while 1:
                count += 1
                (ins, _, exs) = select.select(iw, ow, iw, 3)
                if exs:
                    break
                if ins:
                    for i in ins:
                        if i is soc:
                            out = self.connection
                        else:
                            out = soc
                        data = None
                        try:
                            data = i.recv(8192)
                        except Exception as e:
                            self.logger_instance.write_log(170, 1, e)
                        if data:
                            out.send(data)
                            count = 0
                else:
                    pass
                if count == max_idling:
                    break
        except Exception as e:
            self.logger_instance.write_log(171, 1, e)

    def do_CONNECT_read_write(self, address):
        try:
            s = socket.create_connection(address, timeout=self.timeout)
            print("\n[*] Socket created")
        except Exception as e:
            self.send_error(502)
            print(e)
            return
        self.send_response(200, 'Connection Established')
        self.end_headers()

        conns = [self.connection, s]
        self.close_connection = 0
        while not self.close_connection:
            rlist, wlist, xlist = select.select(conns, [], conns, self.timeout)
            if xlist or not rlist:
                break
            for r in rlist:
                other = conns[1] if r is conns[0] else conns[0]
                data = None
                try:
                    data = r.recv(8192)
                except Exception as e:
                    self.logger_instance.write_log(170, 1, e)
                if not data:
                    self.close_connection = 1
                    break
                other.sendall(data)

    def do_POST(self, body=True):
        self.do_GET()

    def do_CONNECT(self):
        address = self.path.split(':', 1)
        url = ''
        if (address[0] == 'http'):
            address = [self.path.split('://')[1], 80]
            # print(address)
            url = 'http://' + address[0]
            protocol = 'http'
        else:
            address[1] = int(address[1]) or 443
            url = address[0]
            url = "https://" + url
            protocol = 'https'
        link_Status = isurlindb(self.conn, url)
        if (link_Status == 'CHECK'):
            status = self.checkUrl(url)

            if (status):
                inserturl(self.conn, 0, url, 0, 0, 0, protocol)
                insert_list_type(self.conn, url, 0, 'rdef_web_whitelist',
                                 protocol)
                print("\n[*] Harmless url forwarding")
                self.do_CONNECT_read_write(address)
            else:
                # print("Malicious")
                print("\n[!] Malicious url blocked")
                # Insert checked and malicious link to blacklist
                insert_list_type(self.conn, url, 0, 'rdef_web_blacklist',
                                 protocol)
                if self.load_blocked_page(url, alert=True):
                    self.do_CONNECT_read_write(address)

        elif (link_Status == 'WL'):
            print("\n[*] Whitelisted url forwarding")
            self.do_CONNECT_read_write(address)
        else:
            # TODO: Blacklisted url handling
            print("\n[!] Blacklisted url blocked")
            if self.load_blocked_page(url):
                self.do_CONNECT_read_write(address)

    def send_resp_headers(self, resp):
        try:
            respheaders = resp.headers
            for key in respheaders:
                if key not in [
                        'Content-Encoding', 'Transfer-Encoding',
                        'content-encoding', 'transfer-encoding',
                        'content-length', 'Content-Length'
                ]:
                    self.send_header(key, respheaders[key])
            self.send_header('Content-Length', len(resp.content))
            self.end_headers()
        except Exception as e:
            self.logger_instance.write_log(173, 1, e)

    def checkUrl(self, url):
        # Creating a web request to VirusTotal API
        url = base64.urlsafe_b64encode(url.encode()).decode().strip("=")
        vt_full_url = self.api_url + "urls/{}".format(url)
        parameters = {"x-apikey": self.api_key}
        response = requests.get(vt_full_url, headers=parameters)
        if (response.status_code == 200):
            # VT successfull request
            responseData = json.loads(response.text)
            harmless, malicious, suspicious, timeout, undetected = self.vt_response_parser_instance.last_analysis_stats(
                responseData)
            #malicious = 5
            if (malicious > 0):
                # Writing to log that malicious site detected
                print(
                    '[!] {} Agents found this url malicious'.format(malicious))
                self.logger_instance.write_log(90, 2)
                return False
            else:
                return True
        else:
            print('[!] Bad VT request\n')
            return True