def save_lime_coef(filename,
                   model_name,
                   SAVE_DIR,
                   train_dev_tokens,
                   test_tokens,
                   d_file=None):
    model = 'models/{}.pkl'.format(filename)
    path = utils.get_abs_path(SAVE_DIR, model)
    if 'svm' in model_name:
        model = utils.load_pickle(path, encoding=False)
    else:
        if model_name == 'lstm_att':
            hp_d = 'models/{}.pkl'.format(d_file)
            hp_path = utils.get_abs_path(SAVE_DIR, hp_d)
            d = utils.load_pickle(hp_path)
            model = init_model(train_dev_tokens, d, path)
        else:
            model = utils.load_pickle(path)
    features_l, importance_l = get_lime(model, test_tokens, model_name)
    features = 'features/{}_lime_all_features.pkl'.format(model_name)
    path = utils.get_abs_path(SAVE_DIR, features)
    utils.save_pickle(features_l, path)
    scores = 'feature_importance/{}_lime_all_scores.pkl'.format(model_name)
    path = utils.get_abs_path(SAVE_DIR, scores)
    utils.save_pickle(importance_l, path)
Exemplo n.º 2
0
def save_shap_val(hp_filename,
                  filename,
                  name,
                  SAVE_DIR,
                  train_data,
                  test_data,
                  test_labels,
                  use_gpu=True,
                  background_length=100,
                  padding_length=512):
    hp_d = 'models/{}.pkl'.format(hp_filename)
    hp_path = utils.get_abs_path(SAVE_DIR, hp_d)
    d = utils.load_pickle(hp_path)
    model_d = 'models/{}.pkl'.format(filename)
    model_path = utils.get_abs_path(SAVE_DIR, model_d)
    model = init_model(train_data, d, model_path, use_gpu=use_gpu)
    features_l, importance_l = [], []
    features = 'features/{}_shap_all_features.pkl'.format(name)
    feature_path = utils.get_abs_path(SAVE_DIR, features)
    scores = 'feature_importance/{}_shap_all_scores.pkl'.format(name)
    model_path = utils.get_abs_path(SAVE_DIR, scores)
    features_l, importance_l = get_lstm_shap(
        model,
        train_data,
        test_data,
        background_length=background_length,
        padding_length=padding_length,
        feature_path=feature_path,
        model_path=model_path)
    utils.save_pickle(features_l, feature_path)
    utils.save_pickle(importance_l, model_path)
Exemplo n.º 3
0
def get_prediction(test_tokens, model_name, save_dir, train_dev_tokens, test_labels, hp_name):
    hp_path, pipeline, model, predictions = None, None, None, None
    model_path = utils.get_abs_path(save_dir, 'models/{}.pkl'.format(model_name))
    if 'deception' in save_dir and model_name == 'svm':
        pipeline = utils.load_pickle(model_path, encoding=False)
    elif model_name == 'bert':
        pass
    else:
        pipeline = utils.load_pickle(model_path)
    if model_name == 'bert':
        if 'deception' in save_dir:
            dataset_name = 'deception'
        elif 'yelp' in save_dir:
            dataset_name = 'yelp'
        elif 'sst' in save_dir:
            dataset_name = 'sst'
        path = '/data/BERT_att_weights/{}-bert-preds.npy'.format(dataset_name)
        predictions = np.load(path)
    elif model_name == 'lstm_att':
        hp_path = utils.get_abs_path(save_dir, 'models/{}.pkl'.format(hp_name))
        d = utils.load_pickle(hp_path)
        model = init_model(train_dev_tokens, d, model_path)
        tokens = split_tokens(test_tokens)
        mapping = [model.get_words_to_ids(l) for l in tokens]
        predictions = model.predict(tokens, mapping)
    else:
        predictions, accuracy = sc.heldout_test(pipeline, test_tokens, test_labels)
    assert len(predictions) == len(test_tokens)
    return predictions   
Exemplo n.º 4
0
def save_att_weights(word_score_ds, save_dir):
    features_l, importance_l = get_att_weights(word_score_ds)
    features_file_name = 'features/lstm_att_weights_all_features.pkl'
    path = utils.get_abs_path(save_dir, features_file_name)
    utils.save_pickle(features_l, path)
    scores_file_name = 'feature_importance/lstm_att_weights_all_scores.pkl'
    path = utils.get_abs_path(save_dir, scores_file_name)
    utils.save_pickle(importance_l, path)
def save_xgb_impt(file, name, SAVE_DIR):
    model = 'models/{}.pkl'.format(file)
    path = utils.get_abs_path(SAVE_DIR, model)
    print('model path: {}'.format(path))
    pipeline = utils.load_pickle(path)
    xgb_impt_d = get_xgb_impt_d(pipeline)
    features = 'features/{}_impt_all_features.pkl'.format(name)
    path = utils.get_abs_path(SAVE_DIR, features)
    utils.save_pickle(xgb_impt_d, path)
Exemplo n.º 6
0
def save_files(save_dir, model_name, features_l, importance_l, explainer):
    features = 'features/{}_{}_all_features.pkl'.format(model_name, explainer)
    path = utils.get_abs_path(save_dir, features)
    utils.save_pickle(features_l, path)
    print('saved features at {}'.format(path))

    scores = 'feature_importance/{}_{}_all_scores.pkl'.format(
        model_name, explainer)
    path = utils.get_abs_path(save_dir, scores)
    utils.save_pickle(importance_l, path)
    print('saved scores at {}'.format(scores))
Exemplo n.º 7
0
def get_word_score_ds(save_dir, model_name, explainer, test_labels=None):
    path = None
    path = utils.get_abs_path(save_dir, 'features/{}_{}_all_features.pkl'.format(model_name, explainer))
    tmp_features = utils.load_pickle(path)
    if model_name == 'lstm_att' and explainer == 'shap':
        tmp_features = [' '.join(l) for l in tmp_features]
        
    path = utils.get_abs_path(save_dir, 'feature_importance/{}_{}_all_scores.pkl'.format(model_name, explainer))
    tmp_scores = utils.load_pickle(path)
    ret = create_word_score_ds(tmp_features, tmp_scores, model_name, explainer, labels=test_labels)
    return ret
Exemplo n.º 8
0
def get_built_in(save_dir, model_name, total):
    path = None
    if model_name == 'svm':
        path = utils.get_abs_path(save_dir, 'features/svm_coef_all_features.pkl')
    elif model_name == 'svm_l1':
        path = utils.get_abs_path(save_dir, 'features/svm_l1_coef_all_features.pkl')
    elif model_name == 'xgb':
        path = utils.get_abs_path(save_dir, 'features/xgb_impt_all_features.pkl')
    tmp = utils.load_pickle(path)
    tmp = convert_to_absolute(tmp) # take absoluate values
    ret = create_copies(tmp, total)
    return ret
def save_svm_coef(file, name, SAVE_DIR):
    model = 'models/{}.pkl'.format(file)
    path = utils.get_abs_path(SAVE_DIR, model)
    print('model path: {}'.format(path))
    pipeline = None
    if file == 'svm':
        pipeline = utils.load_pickle(path, encoding=False)
    else:
        pipeline = utils.load_pickle(path)
    svm_coef_d = get_svm_coef_d(pipeline)
    features = 'features/{}_coef_all_features.pkl'.format(name)
    path = utils.get_abs_path(SAVE_DIR, features)
    utils.save_pickle(svm_coef_d, path)
Exemplo n.º 10
0
    def choose_font(self, index: int = None, name: str = None):
        fonts = self.font_name_list
        if self.font_name_list is None:
            fonts = read_from_file(get_abs_path('/fonts/font.txt'))

        all_fonts = self.fonts
        if index is not None and len(all_fonts) > 0:
            return all_fonts[fonts[index]]
        elif name is not None and len(all_fonts) > 0:
            return all_fonts[fonts[index]]
        elif not name and not index:
            return get_abs_path('/pacifico/Pacifico.ttf')
        else:
            raise Exception('fonts are missing')
Exemplo n.º 11
0
def parse_docs():
    """
    Parse all documentation files and store information in data.
    """
    data = {}
    doc_folders = map(lambda x: get_abs_path(x),
                      ['coala/docs/Developers', 'documentation/Users',
                       'documentation/Help'])
    for folder in doc_folders:
        for files in os.listdir(folder):
            rst = parse_rst(os.path.join(get_abs_path(folder), files))
            extractor = Extractor(rst, data, folder + '/' + files)
            rst.walk(extractor)
    return data
Exemplo n.º 12
0
def run(train_dev_tokens, test_tokens, save_dir):
    file = 'lstm_att'
    model = 'models/{}.pkl'.format(file)
    path = utils.get_abs_path(save_dir, model)
    d_file = 'lstm_att_hp'
    hp_d = 'models/{}.pkl'.format(d_file)
    hp_path = utils.get_abs_path(save_dir, hp_d)
    d = utils.load_pickle(hp_path)
    model = init_model(train_dev_tokens, d, path)
    test_split_tokens = split_tokens(test_tokens)
    mapping = [model.get_words_to_ids(l) for l in test_split_tokens]
    predictions, word_score_ds = model.predict(test_split_tokens, mapping,
                                               True)
    accuracy = accuracy_score(test_labels, predictions)
    return word_score_ds
Exemplo n.º 13
0
def authorize():
    data = {
        "amount": 0.10,
        "currency": "RUB",
        "description": "test pay",
        "merchant_order_id": uuid.uuid4().hex,
        "client_ip": "173.65.34.137",
        "card_number": "4444444444444448",
        "expiration_year": "2019",
        "expiration_month": "06",
        "cvn": "000",
        "cardholder": "JOHN SMITH",
        "force_3ds": 1,
        "auto_clearing": 0,
        "authorized_timeout": 360,
        "authorize_timeout_action": "cancel",
        "notify_url": "https://hello.com/payment/notify",
        "success_url_3ds": "https://hello.com/payment/success",
        "fail_url_3ds": "https://hello.com/payment/failed",
    }
    data["payment_params"] = {
        "paymentLinkParams": {
            "shopId": 531189,
            "shopArticleId": 579836
        }
    }

    url = "https://demo-scrat.yamoney.ru:9094/gates/system/authorize"
    headers = {"Content-Type": "application/x-www-form-urlencoded"}
    timeout = 5
    certs = (get_abs_path(CERTIFICATE_PATH, acquirer_cert),
             get_abs_path(CERTIFICATE_PATH, acquirer_cert_key))
    try:
        response = requests.post(url,
                                 data=data,
                                 headers=headers,
                                 timeout=timeout,
                                 cert=certs)
    except Exception as ex:
        print(ex)

    print('URL: ', url)
    print('Headers: ', headers)
    pprint(data)
    print("=" * 150)
    print(response.content)
    print("=" * 150)
    print(response.text)
Exemplo n.º 14
0
    def write_fonts_on_image(self):
        # all_fonts = read_from_file(file_path=get_abs_path('/fonts/fonts.txt'))
        all_fonts = self.fonts
        font_size = 20
        pin_width = 1500
        text_in_one_column = 70
        x = 20
        y = 20
        pin_height = int(20 * len(all_fonts) / 2.5)
        img = Img.make_blank_img(img_size=(pin_width, pin_height + 10),
                                 folder_path='/tmp/',
                                 color=(255, 255, 255))

        arr = np.arange(0, len(all_fonts), text_in_one_column)
        x_w = int(pin_width / len(arr))

        r = 1
        for i, font in enumerate(all_fonts):
            txt_obj = Text(txt=str('{}.  {}'.format(i, font)),
                           font_path=all_fonts[font],
                           font_size=font_size,
                           max_width=pin_width)
            y += (font_size + 8)
            if i / text_in_one_column >= r:
                r += 1
                x += x_w
                y = 20
            txt_obj.draw_text(img=img, loc=(x, y))
        img.write_img(file_path=get_abs_path('/fonts/fonts.jpg'))
        return True
Exemplo n.º 15
0
def read_nhd_categories():
    """
    read the nhd categories to include
    """
    nhd_category_file = get_abs_path(
        '../data/tables/nhd_categories_filtered.csv')
    df = pd.read_csv(nhd_category_file)
    nhd_cats = df['ID'].to_list()
    return nhd_cats
Exemplo n.º 16
0
    def retrieve_test(self, line_no):
        test_file_path = get_abs_path(self.test_file, __file__)
        with open(test_file_path) as f:
            source_code = f.read()

        tree = ast.parse(source_code)
        line_finder = test_finder.LineFinder(line_no=line_no)
        line_finder.visit(tree)

        return line_finder.path
Exemplo n.º 17
0
    def make_all_fonts(self, zip_file_path):

        dir = get_abs_path('/fonts')
        if not os.path.exists(dir):
            os.mkdir(dir)
        dirContents = os.listdir(dir)
        if len(dirContents) == 0:
            if not os.path.exists(dir):
                os.mkdir(dir)
            with zipfile.ZipFile(zip_file_path, "r") as zip_ref:
                zip_ref.extractall(dir)

        mylist = [f for f in glob.glob(dir + "/**/*.ttf", recursive=True)]
        # rmtree(tmp, ignore_errors=True)
        font_dict = OrderedDict(
            (k.split('.')[0].split('/')[-1], k) for k in mylist)
        f_path = get_abs_path("/fonts/font.txt")
        setattr(self, 'font_name_list', list(font_dict.keys()))
        assert write_to_file(f_path, list(font_dict.keys()))
        return font_dict
Exemplo n.º 18
0
def save_db(temp):
	timestamp = int(time.time())
	datetime = get_current_time()
	pushed = 0	
	query = 'insert into measure values (null, %s, "%s", "%s", %s, %s)' % (timestamp, datetime, ROOM, pushed, temp)	
	try: 
		conn = sqlite3.connect(get_abs_path() + '/pi-temp.db')
		conn.execute(query)	
		conn.commit()
		conn.close()
	except:
		logging.error('Error when saving measure in database')
		pass	
def save_shap_val(file, name, SAVE_DIR, train_data, test_data):
    model = 'models/{}.pkl'.format(file)
    path = utils.get_abs_path(SAVE_DIR, model)
    print('model path: {}'.format(path))
    model = None
    if file == 'svm':
        model = utils.load_pickle(path, encoding=False)
    else:
        model = utils.load_pickle(path)
    features_l, importance_l = [], []
    if 'svm' in name:
        features_l, importance_l = get_shap('svm', model, train_dev_tokens,
                                            test_tokens)
    elif 'xgb' in name:
        features_l, importance_l = get_shap('xgb', model, train_dev_tokens,
                                            test_tokens)
    features = 'features/{}_shap_all_features.pkl'.format(name)
    path = utils.get_abs_path(SAVE_DIR, features)
    utils.save_pickle(features_l, path)
    scores = 'feature_importance/{}_shap_all_scores.pkl'.format(name)
    path = utils.get_abs_path(SAVE_DIR, scores)
    utils.save_pickle(importance_l, path)
Exemplo n.º 20
0
def spell_check(text: str) -> Tuple[int, int]:
    """Run spell_check.sh on the string given as argument.

    spell_check.sh by default returns two-element array
    [# incorrect words, # all words] for English documents.
    [-1, 0] otherwise.
    Elements are separated by \n.

    :param text: input string to the script
    :return: tuple of exactly two numbers returned by spell_check.sh"""
    p: sp.Popen \
        = sp.Popen([utils.get_abs_path('denormalization/spell_check.sh')],
                   stdout=PIPE, stdin=PIPE, stderr=sys.stderr)

    stdout_data: List[str] = p.communicate(input=text.encode('utf-8'))[0] \
        .decode('utf-8') \
        .strip() \
        .split('\n')

    return (int(stdout_data[0]), int(stdout_data[1]))
Exemplo n.º 21
0
def send(user, password, receiver, host='smtp.qq.com', port=25):
    sender = '*****@*****.**' % user
    content = MIMEMultipart('related')
    content['Subject'] = u'send embedded image'
    content['From'] = sender
    content['To'] = receiver

    body = '<html><img src="cid:logo.png"></html>'
    html = MIMEText(body, 'html', 'utf-8')
    content.attach(html)

    logo_filename = 'logo.png'
    mail_logo_path = get_abs_path(['static', 'img', logo_filename])
    fp = open(mail_logo_path, 'rb')
    img = MIMEImage(fp.read())
    fp.close()
    img.add_header('Content-ID', logo_filename)
    content.attach(img)

    smtp = smtplib.SMTP()
    smtp.connect(host=host, port=port)
    smtp.login(user=user, password=password)
    smtp.sendmail(sender, receiver, content.as_string())
    smtp.quit()
Exemplo n.º 22
0
def send(user, password, receiver, host='smtp.qq.com', port=25):
    sender = '*****@*****.**' % user
    content = MIMEMultipart('related')
    content['Subject'] = 'send embedded image'
    content['From'] = sender
    content['To'] = receiver

    body = '<html><img src="cid:logo.png"></html>'
    html = MIMEText(body, 'html', 'utf-8')
    content.attach(html)

    logo_filename = 'logo.png'
    mail_logo_path = get_abs_path(['static', 'img', logo_filename])
    fp = open(mail_logo_path, 'rb')
    img = MIMEImage(fp.read())
    fp.close()
    img.add_header('Content-ID', logo_filename)
    content.attach(img)

    smtp = smtplib.SMTP()
    smtp.connect(host=host, port=port)
    smtp.login(user=user, password=password)
    smtp.sendmail(sender, receiver, content.as_string())
    smtp.quit()
Exemplo n.º 23
0
def sheets_test():
    filepath = get_abs_path(['utils', 'sheets.xlsx'])
    print read_excel(filepath)
Exemplo n.º 24
0
        'na': args.na,
        'nb': args.nb,
        'nc': args.nc,
        'batch_norm': batch_norm,
        'drop_prob': args.drop_prob,
        'augmentation': True
    }

    cnn = CNN(layers,
              n_classes=n_classes,
              batch_size=128,
              l2_lambda=args.l2_lambda,
              learning_rate=args.learning_rate,
              add_scaling=args.scaling,
              data_params=data_params,
              models_dir=get_abs_path('../miniplaces/models'),
              config=config,
              img_width=100,
              img_height=100)
    train(cnn,
          train_inputs,
          train_labels,
          val_inputs,
          val_labels,
          verbose=1,
          train_generator=train_img_generator,
          test_generator=test_img_generator)

    # can't do this part; images are the wrong size (need to be 100 x 100 now)
    # preds = cnn.predict_proba(val_inputs)
    # acc1 = acc_at_k(1, preds, val_labels)
Exemplo n.º 25
0
# http://www.i18nguy.com/unicode/language-identifiers.html
LANGUAGE_CODE = 'en-us'

SITE_ID = 1

# If you set this to False, Django will make some optimizations so as not
# to load the internationalization machinery.
USE_I18N = True

# If you set this to False, Django will not format dates, numbers and
# calendars according to the current locale
USE_L10N = True

# Absolute filesystem path to the directory that will hold user-uploaded files.
# Example: "/home/media/media.lawrence.com/media/"
MEDIA_ROOT = utils.get_abs_path('../lmsite/media')

# URL that handles the media served from MEDIA_ROOT. Make sure to use a
# trailing slash.
# Examples: "http://media.lawrence.com/media/", "http://example.com/media/"
MEDIA_URL = '/media/'

# Absolute path to the directory static files should be collected to.
# Don't put anything in this directory yourself; store your static files
# in apps' "static/" subdirectories and in STATICFILES_DIRS.
# Example: "/home/media/media.lawrence.com/static/"
STATIC_ROOT = ''

# URL prefix for static files.
# Example: "http://media.lawrence.com/static/"
STATIC_URL = '/static/'
Exemplo n.º 26
0
 def test_python_file_in_non_package_directory(self):
     test_file_path = get_abs_path("source_code.py", __file__)
     module_path = ModulePathFinder().find_path(test_file_path)
     self.assertEqual("source_code", module_path)
Exemplo n.º 27
0
 def test_find_method_in_python_file_in_package_directory(self):
     test_file_path = get_abs_path("package/source_code.py", __file__)
     test_path = test_finder.get_path(test_file_path, 7)
     self.assertEqual("package.source_code.TestMe.test_a", test_path)
Exemplo n.º 28
0
    aligned_att_weights, aligned_att_weights_len = get_aligned_att_weights(test_tokens, 'sst', 128, \
                                                                           'avg', layer_num, impt=True)
    relevant_tokens = get_relevant_features('sst', aligned_att_weights_len)
    save_files(SAVE_SST_DIR, 'bert', relevant_tokens, aligned_att_weights,
               explainer)
    print(len(aligned_att_weights), len(aligned_att_weights_len))
    '''
    BERT LIME
    '''
    fp = 'data/LIME_features/deception/new_feature_l.npy'
    sp = 'data/LIME_features/deception/new_scores_l.npy'
    features = np.load(fp)
    scores = np.load(sp)
    print(len(features), len(scores))
    tmp = 'features/bert_lime_all_features.pkl'
    path = utils.get_abs_path(SAVE_DECEPTION_DIR, tmp)
    utils.save_pickle(features, path)
    tmp = 'feature_importance/bert_lime_all_scores.pkl'
    path = utils.get_abs_path(SAVE_DECEPTION_DIR, tmp)
    utils.save_pickle(scores, path)

    fp = 'data/LIME_features/deception/new_feature_l.npy'
    sp = 'data/LIME_features/deception/new_scores_l.npy'
    features = np.load(fp)
    scores = np.load(sp)
    print(len(features), len(scores))
    tmp = 'features/bert_lime_all_features.pkl'
    path = utils.get_abs_path(SAVE_YELP_DIR, tmp)
    utils.save_pickle(features, path)
    tmp = 'feature_importance/bert_lime_all_scores.pkl'
    path = utils.get_abs_path(SAVE_YELP_DIR, tmp)
Exemplo n.º 29
0
 def test_find_module_in_python_file(self):
     test_file_path = get_abs_path("source_code.py", __file__)
     test_path = test_finder.get_path(test_file_path, 0)
     self.assertEqual("source_code", test_path)
Exemplo n.º 30
0
import utils

# pppoe
pppoe_service = 'isp'
pppoe_username = '******'
pppoe_password = '******'

# vpn-l2tp
vpn_l2tp_username = '******'
vpn_l2tp_password = '******'

# vpn-l2tp-ipsec
vpn_l2tp_ipsec_key = 'test'

# vpn-openconnect-cert
vpn_openconnect_cert_cacert = utils.get_abs_path(
) + '/../dockerfiles/vpn-openconnect-cert/etc/ocserv/ca.crt'
vpn_openconnect_cert_clientcert = utils.get_abs_path(
) + '/../dockerfiles/vpn-openconnect-cert/etc/ocserv/client.crt'
vpn_openconnect_cert_clientkey = utils.get_abs_path(
) + '/../dockerfiles/vpn-openconnect-cert/etc/ocserv/client.key'

# vpn-openvpn-password
vpn_openvpn_password_cacert = utils.get_abs_path(
) + '/../dockerfiles/vpn-openvpn-password/etc/openvpn/easy-rsa/pki/ca.crt'
vpn_openvpn_password_username = '******'
vpn_openvpn_password_password = '******'

# vpn-openvpn-tls
vpn_openvpn_tls_cacert = utils.get_abs_path(
) + '/../dockerfiles/vpn-openvpn-tls/etc/openvpn/easy-rsa/pki/ca.crt'
vpn_openvpn_tls_clientcert = utils.get_abs_path(
Exemplo n.º 31
0
        return pin_width, pin_height


def read_img_files(file_path):
    imgs = []
    valid_images = [".jpg", ".gif", ".png", ".tga"]
    for f in os.listdir(file_path):
        ext = os.path.splitext(f)[1]
        if ext.lower() not in valid_images:
            continue
        imgs.append(os.path.join(file_path, f))
    return imgs


if __name__ == '__main__':
    imgs_folder = get_abs_path('/pics')
    imgs_loaded = read_img_files(imgs_folder)
    matrix_dim = (1, 3)
    imgs_loaded = imgs_loaded[:matrix_dim[0] * matrix_dim[1]]
    # all_fonts = Text(zip_file_path=get_abs_path('/Font Pack.zip'),font_index=1)
    # Text.write_fonts_on_image(all_fonts)
    # imags_folder where all intermediate files get saved
    pin = Pin(imgs=imgs_loaded,
              folder_path=imgs_folder,
              product_header=[
                  'TOP FOUR PRODUCTS', 'CHECK THEM', 'TOP FOUR PRODUCTS',
                  'CHECK THEM'
              ],
              matrix_dim=matrix_dim,
              font_index=20)
    res = pin.make_collage()
Exemplo n.º 32
0
def sheets_test():
    filepath = get_abs_path(['utils', 'sheets.xlsx'])
    print(read_excel(filepath))
Exemplo n.º 33
0
 def __init__(self, zip_file_path=None):
     self.font_name_list = None
     if zip_file_path is None:
         zip_file_path = get_abs_path('/Font Pack.zip')
     self.zip_path = zip_file_path
     self.fonts = self.make_all_fonts(self.zip_path)