Exemplo n.º 1
0
def backup_db(ctx, dbname="zam"):
    create_directory(ctx, "/var/backups/zam", owner="postgres")
    backup_filename = f"/var/backups/zam/postgres-dump-{timestamp()}.sql"
    run_as_postgres(
        ctx,
        f"pg_dump --dbname={dbname} --create --encoding=UTF8 --file={backup_filename}",
    )
Exemplo n.º 2
0
    def __init__(self):
        self.lfsbuilder_src_directory = os.path.dirname(
            os.path.realpath(__file__))
        self.temporal_folder = os.path.join(self.lfsbuilder_src_directory,
                                            "tmp")
        self.basic_parser = None
        self.build_parser = None
        self.download_parser = None
        self.xml_parser = None
        self.cli = cli.Cli()
        self.actual_owner_uid = pwd.getpwuid(os.stat(__file__).st_uid).pw_name
        self.actual_owner_gid = grp.getgrgid(os.stat(__file__).st_gid).gr_name

        # Parse sys.args and use dispatcher pattern to
        # invoke method named as command
        self.basic_parser = self.cli.configure_basic_parser()
        self.all_args = self.basic_parser.parse_args()
        self.build_args = None
        self.download_args = None
        self.xml_args = None

        # Set boolean configuration flags
        self.set_config_option(self.all_args)

        if not hasattr(self, self.all_args.command[0]):
            printer.warning(
                "Unknown command '{c}'".format(c=self.all_args.command[0]))
            self.basic_parser.print_help()
            sys.exit(1)

        # .- Create 'tmp' directory
        tools.create_directory(self.temporal_folder)

        # Run command
        getattr(self, self.all_args.command[0])()
Exemplo n.º 3
0
def copy_to_temp_directory(path_folder: str) -> str:
    """copy directory to OS temp directory
    
    Arguments:
        path_folder {str} -- directory path
    
    Returns:
        str -- temp directory path
    """

    random_number = gen_random_bit_number(256)

    # generate hash for temp folder
    hash_folder = get_hash(random_number)

    # get os temp directory
    temp_dir = tools.get_temp_directory()

    # create temp directory
    temp_dir = os.path.join(temp_dir, hash_folder)

    try:
        # create directory
        tools.create_directory(temp_dir)

        # copy directory to OS temp directory
        tools.copy_directory(path_folder, temp_dir)

    except IOError as error:
        print(error)
        return False
    return temp_dir
Exemplo n.º 4
0
def copy_to_organized_directory(dir_src: str, dir_dst: str) -> bool:
    """copy source directory to organized directory

    Get content from source directory and copy to a new directory
    into new folders with the extentions name 
    
    Arguments:
        dir_src {str} -- source directory
        dir_dst {str} -- destination directory
    
    Raises:
        IOError: path dir_src not found
        IOError: path dir_dst not found
    
    Returns:
        bool -- if successful return True
    """

    if not checks.check_exists(dir_src):
        raise IOError("Directory '{0}' not found.".format(dir_src))

    if not checks.check_exists(dir_dst):
        raise IOError("Directory '{0}' not found.".format(dir_dst))
     

    # copy files temp to new directory
    list_files = get_files(dir_src)

    for file in list_files:
        basename = os.path.basename(file)
        search = re.search(r"\.([A-Za-z0-9]+)$", basename)

        if search:
            extension = search.group(1)
            # check = [ext for ext in extensions if search.group(1) == ext]

            
            directory_extension = os.path.join(dir_dst, extension)

            if not checks.check_exists(directory_extension):
                tools.create_directory(directory_extension)

            # build target directory
            target_directory = os.path.join(directory_extension, basename)

            # copy file
            tools.copy_file(file, target_directory)

    return True
Exemplo n.º 5
0
    def build(self):
        """
        'build' command.
        """
        # 'build' command. It requires 'sudo' privileges to mount/umount directories
        # and run admin commands such as 'chroot', 'loosetup' and others.
        if os.getuid() != 0:
            msg = "'build' command requires root privileges. Please try again using 'sudo'"
            printer.error(msg)

        # Parse command line arguments
        self.build_parser = self.cli.configure_build_parser()
        self.build_args = self.build_parser.parse_args(
            self.all_args.command[1:])

        # Set boolean configuration flags once arguments get actually parsed.
        self.set_config_option(self.build_args)

        # .- Check boot manager and meson builder combination
        if tools.is_element_present(self.build_args.builders_list, "system") is True and \
           tools.check_meson_builder_combination(m=config.INCLUDE_MESON_BUILDER,
                                                 sv=config.SYSV,
                                                 sd=config.SYSTEMD) is False:
            printer.error(
                "You can not use that combination of 'boot_manager' and 'meson builder'"
            )

        # Create 'config.BASE_DIRECTORY' if necessary
        tools.create_directory(config.BASE_DIRECTORY)

        # Create and build 'builders_list'
        for builder in self.build_args.builders_list:
            os.chdir(self.lfsbuilder_src_directory)
            # Generate builder object from BuilderGenerator
            bg = builders.BuilderGenerator(builder)
            o = bg.get_builder_reference()
            del bg

            # Run the real builder
            o.set_attributes()

            o.build()
            o.clean_workspace()
            del o

        # Set 'lfsbuilder_src_directory' permission back to the original 'uid' and 'gid'
        tools.set_recursive_owner_and_group(self.lfsbuilder_src_directory,
                                            self.actual_owner_uid,
                                            self.actual_owner_gid)
Exemplo n.º 6
0
def ProcessHandler(stg):
    # setting up the path and grabbing the files
    analysis_path = tools.create_directory(stg['DP'])
    task = tools.Multiprocesser(data_dir=stg['DP'], pattern_a='frame*.jpg')
    # finding background and reflections in the images
    if stg['BR'] == 'on':
        print('preprocessing:')
        bg_a, bg_b = task.find_background(50, 10, 6)
        reflection = tools.mark_reflection(
            150, task.files_a, os.path.join(stg['DP'], 'reflection.tif'))
    else:
        bg_a, bg_b, reflection = None, None, None
    # start processing data
    print('main process:\nprocessing images...')
    task.n_files = 4
    main_process = partial(ProcessPIV,
                           bga=bg_a,
                           bgb=bg_b,
                           reflection=reflection,
                           stg=stg)
    task.run(func=main_process, n_cpus=6)
    print('- done processing')
    '''
    fig, ax = plt.subplots(2,2)
    img = tools.imread(task.files_b[0])
    bg = bg_b
    ax[0,0].imshow(img, cmap='gray')
    ax[0,1].imshow(bg, cmap='gray')
    ax[1,0].imshow(reflection, cmap='gray')
    img = img - bg
    img[reflection==255] = 0
    ax[1,1].imshow(img, cmap='gray')
    plt.show()
    
    img = tools.imread(task.files_b[0])
    plt.imshow(img, cmap='gray')
    plt.show()
    '''
    return bg_a
Exemplo n.º 7
0
def deploy_repondeur(
    ctx,
    branch="master",
    message="",
    session_secret="",
    auth_secret="",
    wipe=False,
    dbname="zam",
    dbuser="******",
    dbpassword="******",
):
    if not session_secret:
        session_secret = retrieve_secret_from_config(ctx, "session_secret")
    if not session_secret:
        session_secret = uuid4()
        print(f"Initializing session_secret to {session_secret}")

    if not auth_secret:
        auth_secret = retrieve_secret_from_config(ctx, "auth_secret")
    if not auth_secret:
        auth_secret = uuid4()
        print(f"Initializing auth_secret to {auth_secret}")

    hostname = ctx.run("hostname").stdout.strip()
    environment = hostname.split(".", 1)[0]
    menu_badge_label = environment[4:] if environment.startswith("zam-") else ""
    menu_badge_color = BADGE_COLORS.get(menu_badge_label, "#999999")

    deploy_id = rollbar_deploy_start(
        ctx, branch, environment, comment=f"[{branch}] {message}"
    )

    try:
        install_locale(ctx, "fr_FR.utf8")
        create_user(ctx, name=user, home_dir="/srv/repondeur")
        clone_repo(
            ctx,
            repo="https://github.com/betagouv/zam.git",
            branch=branch,
            path="/srv/repondeur/src",
            user=user,
        )

        # Stop workers (if running) to free up some system resources during deployment
        stop_worker_service(ctx, warn=True)

        create_virtualenv(ctx, venv_dir=venv_dir, user=user)
        install_requirements(ctx, app_dir=app_dir, venv_dir=venv_dir, user=user)

        create_directory(ctx, "/var/cache/zam/http", owner=user)

        setup_config(
            ctx,
            app_dir=app_dir,
            user=user,
            context={
                "db_url": f"postgres://{dbuser}:{dbpassword}@localhost:5432/{dbname}",
                "environment": environment,
                "branch": branch,
                "session_secret": session_secret,
                "auth_secret": auth_secret,
                "rollbar_token": ctx.config["rollbar_token"],
                "menu_badge_label": menu_badge_label,
                "menu_badge_color": menu_badge_color,
            },
        )

        # Also stop webapp (if running) to release any locks on the DB
        stop_webapp_service(ctx, warn=True)

        if wipe:
            wipe_db(ctx, dbname=dbname)
        setup_db(ctx, dbname=dbname, dbuser=dbuser, dbpassword=dbpassword)
        migrate_db(ctx, app_dir=app_dir, venv_dir=venv_dir, user=user)

        # Initialize email whitelist
        if not whitelist_list(ctx):
            whitelist_add(
                ctx,
                pattern=DEFAULT_EMAIL_WHITELIST_PATTERN,
                comment="Default allowed email pattern",
            )

        setup_webapp_service(ctx)
        setup_worker_service(ctx)

        reset_data_locks(ctx, app_dir=app_dir, venv_dir=venv_dir, user=user)

        # Load data into Redis cache
        load_data(ctx, app_dir=app_dir, venv_dir=venv_dir, user=user)

        # Update dossiers
        update_dossiers(ctx)

        # Start webapp and workers again
        start_webapp_service(ctx)
        start_worker_service(ctx)

    except Exception as exc:
        rollbar_deploy_update(ctx.config["rollbar_token"], deploy_id, status="failed")
    else:
        rollbar_deploy_update(
            ctx.config["rollbar_token"], deploy_id, status="succeeded"
        )
Exemplo n.º 8
0
import tools
import string
from libs import cloudflare
from tm_libs import dom_parser
import cookielib
import json
from StringIO import StringIO
import gzip
import main_scrape

net = Net()
addon_id = kodi.addon_id
addon = Addon(addon_id, sys.argv)

#COOKIE STUFF
tools.create_directory(tools.AOPATH, "All_Cookies/NineMovies")
cookiepath = xbmc.translatePath(
    os.path.join('special://home', 'addons', addon_id, 'All_Cookies',
                 'NineMovies/'))
cookiejar = os.path.join(cookiepath, 'cookies.lwp')
cj = cookielib.LWPCookieJar()
cookie_file = os.path.join(cookiepath, 'cookies.lwp')

timeout = int(kodi.get_setting('scraper_timeout'))


def __enum(**enums):
    return type('Enum', (), enums)


MAX_RESPONSE = 1024 * 1024 * 2
Exemplo n.º 9
0
from tm_libs import dom_parser
from libs import log_utils
import tools
from libs import cloudflare
from libs import log_utils
from tm_libs import dom_parser
import cookielib
from StringIO import StringIO
import gzip
import main_scrape
import base64
addon_id = kodi.addon_id

timeout = int(kodi.get_setting('scraper_timeout'))

tools.create_directory(tools.AOPATH, "All_Cookies/Putlocker")
cookiepath = xbmc.translatePath(
    os.path.join('special://home', 'addons', addon_id, 'All_Cookies',
                 'Putlocker/'))
cookiejar = os.path.join(cookiepath, 'cookies.lwp')
cj = cookielib.LWPCookieJar()
cookie_file = os.path.join(cookiepath, 'cookies.lwp')


def __enum(**enums):
    return type('Enum', (), enums)


MAX_RESPONSE = 1024 * 1024 * 2
FORCE_NO_MATCH = '***FORCE_NO_MATCH***'
QUALITIES = __enum(LOW='Low',
Exemplo n.º 10
0
    def extract_source_code(self):
        """
        Find and extract source tarball for the component.
        """
        # We look for a tar file
        pattern = "{n}*.tar.*".format(n=self.component_data_dict["package_name"])

        # Use 'package_version' in pattern if it is not None
        if self.component_data_dict["version"] is not None:
            pattern = "{n}*{v}*.tar.*".format(n=self.component_data_dict["package_name"],
                                              v=self.component_data_dict["version"])

        source_code_filename = tools.find_file(self.component_data_dict["sources_directory"],
                                               pattern)

        # Try a second run if 'source_code_filename' is None using only name as pattern.
        if source_code_filename is None:
            pattern = "{n}*.tar.*".format(n=self.component_data_dict["package_name"])
            source_code_filename = tools.find_file(self.component_data_dict["sources_directory"],
                                                   pattern)

        # Try to find a zip file in case the tar file was not found
        if source_code_filename is None:
            pattern = "{n}*.zip*".format(n=self.component_data_dict["package_name"])

            # Use 'package_version' in pattern if it is not None
            if self.component_data_dict["version"] is not None:
                pattern = "{n}*{v}*.zip*".format(n=self.component_data_dict["package_name"],
                                                  v=self.component_data_dict["version"])

            source_code_filename = tools.find_file(self.component_data_dict["sources_directory"],
                                                   pattern)

            # Try a second run if 'source_code_filename' is None using only name as pattern.
            if source_code_filename is None:
                pattern = "{n}*.zip*".format(n=self.component_data_dict["package_name"])
                source_code_filename = tools.find_file(
                    self.component_data_dict["sources_directory"],
                    pattern
                )

        # Give error if None
        if source_code_filename is None:
            msg = "Can't find source code file for '{n}' with pattern: '{p}'"
            msg = msg.format(n=self.component_data_dict["name"], p=pattern)
            printer.error(msg)

        # Extract
        tools.extract(source_code_filename)

        # We get the name of the extracted directory
        pattern = "{n}*".format(n=self.component_data_dict["package_name"])

        # Use 'package_version' in pattern if it is not None
        if self.component_data_dict["version"] is not None:
            pattern = "{n}*{v}*".format(n=self.component_data_dict["package_name"],
                                        v=self.component_data_dict["version"])

        # Find directory using pattern
        self.component_data_dict["extracted_directory"] = tools.find_directory(
            self.component_data_dict["sources_directory"],
            pattern)

        # Try a second run if 'extracted_directory' is None using only name as pattern.
        # If found, get the realpath
        if self.component_data_dict["extracted_directory"] is None:
            pattern = "{n}*".format(n=self.component_data_dict["package_name"])
            self.component_data_dict["extracted_directory"] = tools.find_directory(
                self.component_data_dict["sources_directory"],
                pattern
            )
        else:
            self.component_data_dict["extracted_directory"] = os.path.realpath(
                self.component_data_dict["extracted_directory"]
            )

        # Fail if not found 'extracted_directroy'
        if self.component_data_dict["extracted_directory"] is None:
            msg = "Can't find extracted directory for '{n}' with pattern: '{p}'"
            msg = msg.format(n=self.component_data_dict["name"], p=pattern)
            printer.error(msg)

        # Generate build_dir if necessary.
        if self.component_data_dict["require_build_dir"] is True:
            # Generate and save 'build_directory' path
            value = os.path.realpath(
                os.path.join(
                    self.component_data_dict["extracted_directory"],
                    self.build_directory_name
                )
            )
            tools.add_to_dictionary(self.component_data_dict,
                                    "build_directory_path",
                                    value,
                                    concat=False)

            # Create directory
            tools.create_directory(self.component_data_dict["build_directory_path"])
        else:
            # If not, build component into the extracted directory
            tools.add_to_dictionary(self.component_data_dict,
                                    "build_directory_path",
                                    self.component_data_dict["extracted_directory"],
                                    concat=False)

        # Set directory owner if we are building the 'toolchain'
        if self.component_data_dict["builder_name"] == "toolchain":
            tools.set_recursive_owner_and_group(self.component_data_dict["extracted_directory"],
                                                self.component_data_dict["run_as_username"])
Exemplo n.º 11
0
import tools
from libs import cloudflare
from libs import log_utils
from tm_libs import dom_parser
import cookielib
from StringIO import StringIO
import gzip
import main_scrape
import base64
addon_id = kodi.addon_id


timeout = int(kodi.get_setting('scraper_timeout'))


tools.create_directory(tools.AOPATH, "All_Cookies/Putlocker")
cookiepath = xbmc.translatePath(os.path.join('special://home','addons',addon_id,'All_Cookies','Putlocker/'))
cookiejar = os.path.join(cookiepath,'cookies.lwp')
cj = cookielib.LWPCookieJar()
cookie_file = os.path.join(cookiepath,'cookies.lwp')



def __enum(**enums):
    return type('Enum', (), enums)

MAX_RESPONSE = 1024 * 1024 * 2
FORCE_NO_MATCH = '***FORCE_NO_MATCH***'
QUALITIES = __enum(LOW='Low', MEDIUM='Medium', HIGH='High', HD720='HD720', HD1080='HD1080')

Exemplo n.º 12
0
import tools
from libs import kodi
from tm_libs import dom_parser
from libs.trans_utils import i18n
from libs import log_utils
from t0mm0.common.net import Net
from t0mm0.common.addon import Addon
import main_scrape
from urllib2 import Request, build_opener, HTTPCookieProcessor, HTTPHandler
import cookielib
net = Net()
addon_id = kodi.addon_id
addon = Addon(addon_id, sys.argv)

#COOKIE STUFF
tools.create_directory(tools.AOPATH, "All_Cookies/MerDb")
cookiepath = xbmc.translatePath(
    os.path.join('special://home', 'addons', addon_id, 'All_Cookies',
                 'MerDb/'))
cookiejar = os.path.join(cookiepath, 'cookies.lwp')
cj = cookielib.LWPCookieJar()
cookie_file = os.path.join(cookiepath, 'cookies.lwp')

base_url = kodi.get_setting('merdb_base_url')


def LogNotify(title, message, times, icon):
    xbmc.executebuiltin("XBMC.Notification(" + title + "," + message + "," +
                        times + "," + icon + ")")

Exemplo n.º 13
0
def run(path_folder, create_copy=True):
    """main script run 
    
    Arguments:
        path_folder {str} -- folder to organize
    
    Keyword Arguments:
        create_copy {bool} -- create a folder copy (default: {True})
    
    Raises:
        IOError: if folder doesn't exists
    """

    print(f"\n[Junkfile] - Parameters: ")
    print(f"[Junkfile] -   path_folder: {path_folder}")
    print(f"[Junkfile] -   create_copy: {create_copy}\n")
    print("[Junkfile] - executing...")
    
    # check if path exits
    if not checks.check_exists(path_folder):
        raise Exception("Please choose a directory.")
        
    # check if is not inthe blacklist directory
    if path_folder in get_blacklist_directories():
        raise Exception("Directory may not be selected. Try another directory")
    
    # remove last slash
    path_folder = sanitize_path(path=path_folder)

    '''directory contextmanager

    check if path has permission to change, set as current directory and
    change to old current directory at the end

    '''
    with current_directory_handler(path_folder):

        # copy folder to to OS temp directory
        temp_dir = copy_to_temp_directory(path_folder=path_folder)

        # make path
        new_directory = build_directory_path_with_date(path_folder=path_folder)

        # create a new folder
        if not tools.create_directory(new_directory):
            raise Exception(f"new_directory could not be created. new_directory: '{new_directory}'")
            
        # copy to new folder
        copy_to_organized_directory(dir_src=temp_dir, dir_dst=new_directory)

        # delete folders
        if checks.check_exists(temp_dir):
            tools.delete_directory(temp_dir)

        # check if is a copy
        if not create_copy:
            tools.delete_directory(path_folder)
            tools.move_directory(new_directory, path_folder)

    if create_copy:
        print(f"[Junkfile] -   Was created new directory: {new_directory}")
        
    print("[Junkfile] - executed!")

    return True
Exemplo n.º 14
0
                                       default=False,
                                       action='store_true')
args = parser.parse_args()

debug.info("***********************************")

# ---------------------------------------------
# -- check input
# ---------------------------------------------
if args.verbose == True:
	debug.set_level(6);
if    args.input == None \
   or args.input == "" :
	debug.error("must set an input directory")

tools.create_directory(args.output)

debug.info("==================================================================================================")
debug.info("== Preprocess corpus data: " + args.input + " to " + args.output)
debug.info("==================================================================================================")

debug.info("Get list of corpus files:")
audio_corpus_element = tools.get_list_of_file_in_path(args.input, ["*.json"], recursive=True)

debug.info("Corpus count " + str(len(audio_corpus_element)) + " element(s)")
elem_id = 0
for elem in audio_corpus_element:
	debug.info("---------------------------[ " + str(elem_id) + " / " + str(len(audio_corpus_element)) + " ]---------------------------------------")
	elem_id += 1
	debug.info("Element: " + elem)
	with open(elem) as file:
Exemplo n.º 15
0
toolsDirectory = cosmoDir + 'tools/'
sys.path.extend( [ toolsDirectory, modulesDir ] )
# from load_data_enzo_old import load_snapshot_enzo, load_snapshot_enzo_yt
from generate_ics_particles_functions import *
from generate_ics_grid_functions import *
from domain_decomposition import get_domain_block, get_domain_parent
from tools import create_directory

# dataDir = '/home/bruno/Desktop/data/'
# dataDir = '/raid/bruno/data/'
# dataDir = '/gpfs/alpine/proj-shared/ast149/'
dataDir = '/data/groups/comp-astro/bruno/'
enzoDir = dataDir + 'cosmo_sims/enzo/512_hydro_50Mpc/'
inDir = enzoDir
outputDir = dataDir + 'cosmo_sims/512_hydro_50Mpc/ics_16/'
create_directory( outputDir )
nSnap = 0



# Load Enzo File
snapKey = '{0:03}'.format(nSnap)
inFileName = 'DD0{0}/data0{0}'.format( snapKey)
ds = yt.load( inDir + inFileName )
data = ds.all_data()
h = ds.hubble_constant
current_z = np.float(ds.current_redshift)
current_a = 1./(current_z + 1)

# Set Domain parameters
Lbox = 50000.
Exemplo n.º 16
0
def run_post_steps(component_data_dict, parent_function):

    parent_function()

    # Get required paths
    ssh_filename = os.path.join(
        component_data_dict["lfsbuilder_src_directory"], "recipes",
        "components", "openssh", "files",
        component_data_dict["openssh_public_key_filename"])

    # .- get $HOME directory path
    if component_data_dict["openssh_username"] == "root":
        # It can be dangerous!
        printer.warning("WARNING: will configure SSH access for 'root'")
        home_directory = os.path.join(config.BASE_DIRECTORY, "root")

    elif component_data_dict[
            "openssh_username"] == "config.NON_PRIVILEGED_USERNAME":
        # Update dictionary value
        tools.add_to_dictionary(component_data_dict,
                                "openssh_username",
                                config.NON_PRIVILEGED_USERNAME,
                                concat=False)
        # Set home directory
        home_directory = os.path.join(config.BASE_DIRECTORY, "home",
                                      config.NON_PRIVILEGED_USERNAME)

    else:
        home_directory = os.path.join(config.BASE_DIRECTORY, "home",
                                      component_data_dict["openssh_username"])

    # .- '$HOME/.ssh' path
    ssh_config_path = os.path.join(home_directory, ".ssh")

    # .- destination file path
    ssh_destination_filename = os.path.join(
        ssh_config_path, component_data_dict["openssh_public_key_filename"])

    # .- authorized_keys
    authorized_keys = os.path.join(ssh_config_path, "authorized_keys")

    if os.path.exists(ssh_filename) is False:
        # Do not configure. SSH public key do not exists.
        msg = """WARNING: SSH access will not be configured because \
the provided public key file '{k}' do not exists."""
        msg = msg.format(k=component_data_dict["openssh_public_key_filename"])
        printer.warning(msg)

    elif tools.check_chroot_user_exists(
            component_data_dict["openssh_username"]) is False:
        # Do not configure. SSH username do not exists.
        msg = """WARNING: SSH access will not be configured because \
the provided username '{u}' do not exists."""
        msg = msg.format(u=component_data_dict["openssh_username"])
        printer.warning(msg)

    elif os.path.exists(home_directory) is False:
        # Do not configure. SSH username's home directory do not exists.
        msg = """WARNING: SSH access will not be configured because \
the home directory '{h}' do not exists."""
        msg = msg.format(h=home_directory)
        printer.warning(msg)

    else:
        msg = "Installing provided SSH public key '{k}' for username '{u}'"
        msg = msg.format(k=component_data_dict["openssh_public_key_filename"],
                         u=component_data_dict["openssh_username"])
        printer.substep_info(msg)

        # .- create 'ssh_config_path' directory
        tools.create_directory(ssh_config_path)

        # .- copy public key file
        tools.copy_file(ssh_filename, ssh_destination_filename)

        # .- add to authorized keys
        header = "# --- {f} ---".format(
            f=component_data_dict["openssh_public_key_filename"])
        tools.add_text_to_file(authorized_keys, header)

        tools.add_text_to_file(authorized_keys, tools.read_file(ssh_filename))

        # .- get 'UID' and 'GID' values to set permission
        etc_passwd_values = tools.get_uid_gid_chroot_username(
            component_data_dict["openssh_username"])

        # .- set 'ssh_config_path' permission
        tools.set_numeric_recursive_owner_and_group(ssh_config_path,
                                                    etc_passwd_values["uid"],
                                                    etc_passwd_values["gid"])
def execute_pipeline(parameters):
    # Retrieve workspace
    tools.create_directory(parameters["workspace"])
    
    # Merge files
    parameters["merge"]["merged_file"] = os.path.join(parameters["workspace"], parameters["merge"]["merged_file"])
    if parameters["merge"]["do"]:
        command = MERGING_COMMAND%parameters["merge"]
        print "Issuing >> ", command
        os.system(command)
        if parameters["merge"]["delete_parts"]:
            os.system("rm %s*"%(os.path.join(parameters["merge"]["working_directory"], parameters["merge"]["trajectory_prefix"])))
        if parameters["merge"]["compress_logs"]:
            os.system("tar -zcvf %(folder)s/logs.tar.gz %(folder)s/log_* --remove-files"%({"folder":os.path.join(parameters["merge"]["working_directory"])}))
    
    # Perform all analyses
    if parameters["analyze"]["do"]:
        args = ["-i", parameters["merge"]["merged_file"]]
        
        if "sasa_rgyr" in parameters["analyze"]:
            if parameters["analyze"]["sasa_rgyr"]["sasa"]:
                args.append("--sasa-vmd")
            if parameters["analyze"]["sasa_rgyr"]["rgyr"]:
                args.append("--rgyr-vmd")
            if parameters["analyze"]["sasa_rgyr"]["selection"] != "":
                args.append("--vmd-sel")
                args.append('"%s"'%parameters["analyze"]["sasa_rgyr"]["selection"])
                
        if parameters["analyze"]["rmsf"]:
            args.append("--rmsf")
            
        if "acceptance" in parameters["analyze"]:
            args.append("--report")
            args.append(parameters["analyze"]["acceptance"]["report_glob"])
            args.append("--report-dir")
            args.append(parameters["analyze"]["acceptance"]["working_directory"])
        
        command = "python %s %s"%(parameters["analyze"]["script_location"], 
                           " ".join(args))
        print "Issuing >> ", command
        os.system(command)
    
    if parameters["compare"]["do"]:
        try:
            selection = parameters["analyze"]["sasa_rgyr"]["selection"]
        except:
            selection = "all"
            
        base_path_for_sasa_rgyr = "%s.%s"%(parameters["merge"]["merged_file"],
                              selection.replace(" ","_"))
        if "sasa" in parameters["compare"]:
            parameters["compare"]["sasa"]["input"] = "%s.sasa"%(base_path_for_sasa_rgyr)
            command = "python %(script_location)s %(reference)s %(input)s save"%(parameters["compare"]["sasa"])
            print "Issuing >> ", command
            os.system(command)
            
        if "rgyr" in parameters["compare"]:
            parameters["compare"]["rgyr"]["input"] = "%s.rgyr"%(base_path_for_sasa_rgyr)
            command = "python %(script_location)s %(reference)s %(input)s save"%(parameters["compare"]["rgyr"])
            print "Issuing >> ", command
            os.system(command)
        
        if "rmsf" in parameters["compare"]:
            parameters["compare"]["rmsf"]["input"] = "%s.rmsf"%(parameters["merge"]["merged_file"])
            command = "python %(script_location)s %(reference)s %(input)s save"%(parameters["compare"]["rmsf"])
            print "Issuing >> ", command
            os.system(command)
            
        if "energy" in parameters["compare"]:
            parameters["compare"]["energy"]["input"] = "%s.ener"%(parameters["merge"]["merged_file"])
            command = "python %(script_location)s %(input)s %(skip)d"%(parameters["compare"]["energy"]) 
            print "Issuing >> ", command
            os.system(command)
Exemplo n.º 18
0
from libs import kodi
from tm_libs import dom_parser
from libs import log_utils
from t0mm0.common.net import Net
from t0mm0.common.addon import Addon
import tools
import string
import main_scrape
from urllib2 import Request, build_opener, HTTPCookieProcessor, HTTPHandler
import cookielib
net = Net()
addon_id = kodi.addon_id
addon = Addon(addon_id, sys.argv)

#COOKIE STUFF
tools.create_directory(tools.AOPATH, "All_Cookies/Afdah")
cookiepath = xbmc.translatePath(
    os.path.join('special://home', 'addons', addon_id, 'All_Cookies',
                 'Afdah/'))
cookiejar = os.path.join(cookiepath, 'cookies.lwp')
cj = cookielib.LWPCookieJar()
cookie_file = os.path.join(cookiepath, 'cookies.lwp')


def __enum(**enums):
    return type('Enum', (), enums)


FORCE_NO_MATCH = '***FORCE_NO_MATCH***'
QUALITIES = __enum(LOW='Low',
                   MEDIUM='Medium',
Exemplo n.º 19
0
from tm_libs import dom_parser
from libs.trans_utils import i18n
from libs import log_utils
from t0mm0.common.net import Net
from t0mm0.common.addon import Addon
import urlresolver
import main_scrape
from urllib2 import Request, build_opener, HTTPCookieProcessor, HTTPHandler
import cookielib
net = Net()
addon_id=kodi.addon_id
addon = Addon(addon_id, sys.argv)
ADDON = xbmcaddon.Addon(id=kodi.addon_id)

#COOKIE STUFF
tools.create_directory(tools.AOPATH, "All_Cookies/PrimeWire")
cookiepath = xbmc.translatePath(os.path.join('special://home','addons',addon_id,'All_Cookies','PrimeWire/'))
cookiejar = os.path.join(cookiepath,'cookies.lwp')
cj = cookielib.LWPCookieJar()
cookie_file = os.path.join(cookiepath,'cookies.lwp')

base_url = kodi.get_setting('primewire_base_url')
#base_url = 'http://www.primewire.ag/'


def LogNotify(title,message,times,icon):
		xbmc.executebuiltin("XBMC.Notification("+title+","+message+","+times+","+icon+")")

def OPEN_URL(url):
  req=urllib2.Request(url)
  req.add_header('User-Agent', 'Mozilla/5.0 (Linux; U; Android 4.2.2; en-us; AFTB Build/JDQ39) AppleWebKit/534.30 (KHTML, like Gecko) Version/4.0 Mobile Safari/534.30')
Exemplo n.º 20
0
def Network_config(class_num=4,
                   epoch=200,
                   initial_epoch=0,
                   batch_size=32,
                   train_data=None,
                   train_label=None,
                   test_data=None,
                   test_label=None,
                   fold=0):
    adam = Adam(lr=0.005, beta_1=0.9, beta_2=0.999, epsilon=1e-08, decay=0.000)
    sgd = SGD(lr=0.001, momentum=0.9, decay=0.0, nesterov=False)
    K.set_learning_phase(1)
    base_model = VGG16(input_tensor=Input(shape=(224, 224, 3)),
                       weights='imagenet',
                       include_top=False)

    x = base_model.output
    x = Flatten()(x)
    x = Dense(512, activation='relu')(x)
    x = BatchNormalization()(x)
    x = Dense(512, activation='relu')(x)
    x = BatchNormalization()(x)
    predictions = Dense(class_num, activation='softmax')(x)

    # this is the model we will train
    model = Model(inputs=base_model.input, outputs=predictions)
    for layer in (base_model.layers):
        layer.trainable = False
        if layer.name.startswith('bn'):
            layer.call(layer.input, training=False)

    model.compile(optimizer=adam,
                  loss='categorical_crossentropy',
                  metrics=[keras.metrics.categorical_accuracy])

    tools.create_directory('./tmpvgg/')
    weights_file = './tmpvgg/' + str(
        fold
    ) + '-weights.{epoch:02d}-{categorical_accuracy:.4f}-{val_loss:.4f}-{val_categorical_accuracy:.4f}.h5'
    csv_file = './tmpvgg/record.csv'
    lr_reducer = ReduceLROnPlateau(monitor='categorical_accuracy',
                                   factor=0.5,
                                   cooldown=0,
                                   patience=5,
                                   min_lr=0.5e-6)
    early_stopper = EarlyStopping(monitor='val_categorical_accuracy',
                                  min_delta=1e-4,
                                  patience=50)

    model_checkpoint = ModelCheckpoint(weights_file,
                                       monitor='val_categorical_accuracy',
                                       save_best_only=True,
                                       verbose=2,
                                       save_weights_only=True,
                                       mode='max')
    tensorboard = TensorBoard(log_dir='./logs/',
                              histogram_freq=0,
                              batch_size=8,
                              write_graph=True,
                              write_grads=True,
                              write_images=True,
                              embeddings_freq=0,
                              embeddings_layer_names=None,
                              embeddings_metadata=None)
    CSV_record = CSVLogger(csv_file, separator=',', append=True)

    callbacks = [
        lr_reducer, early_stopper, model_checkpoint, tensorboard, CSV_record
    ]
    gc.disable()
    model.fit_generator(
        generator=tools.batch_generator(np.array(train_data),
                                        np.array(train_label), batch_size,
                                        True, class_num),
        steps_per_epoch=int(len(train_label) / batch_size) - 1,
        max_q_size=50,
        initial_epoch=initial_epoch,
        epochs=epoch,
        verbose=1,
        callbacks=callbacks,
        validation_data=tools.batch_generator(np.array(test_data),
                                              np.array(test_label), batch_size,
                                              True, class_num),
        validation_steps=int(len(test_label) / batch_size) - 1,
        class_weight='auto')

    all_y_pred = []
    all_y_true = []
    for test_data_batch, test_label_batch in tools.batch_generator_confusion_matrix(
            np.array(test_data), np.array(test_label), batch_size, True,
            class_num):
        y_pred = model.predict(test_data_batch, batch_size)
        y_true = test_label_batch
        for y_p in y_pred:
            all_y_pred.append(np.where(y_p == max(y_p))[0][0])

        for y_t in y_true:
            all_y_true.append(np.where(y_t == max(y_t))[0][0])
    confusion = confusion_matrix(y_true=all_y_true, y_pred=all_y_pred)
    print(confusion)
    f = open('confusion_matrix.txt', 'a+')
    f.write(str(all_y_true) + "\n")
    f.write(str(all_y_pred) + "\n")
    f.write(str(confusion) + '\n')
    f.close()
    gc.enable()
Exemplo n.º 21
0
    from mpi4py import MPI
    comm = MPI.COMM_WORLD
    rank = comm.Get_rank()
    n_procs = comm.Get_size()
else:
    rank = 0
    n_procs = 1

nPoints = 1024
dataDir = '/data/groups/comp-astro/bruno/'
inDir = dataDir + 'cosmo_sims/{0}_hydro_50Mpc/output_files_pchw18/'.format(
    nPoints)
stats_dir = inDir + 'statistics/'
outDir = dataDir + 'cosmo_sims/{0}_hydro_50Mpc/snapshots_prepared/'.format(
    nPoints)
if rank == 0: create_directory(outDir)

data_type = 'hydro'
# data_type = 'particles'

# Load Statistics
statistics = h5.File(stats_dir + 'stats_{0}.h5'.format(data_type), 'r')

fields = ['density']
precision = np.float32

Lbox = 5000  #kpc/h
if nPoints == 1024: proc_grid = [4, 2, 2]
if nPoints == 2048: proc_grid = [8, 8, 8]
box_size = [Lbox, Lbox, Lbox]
grid_size = [nPoints, nPoints, nPoints]  #Size of the simulation grid
Exemplo n.º 22
0
import tools
from libs import kodi
from tm_libs import dom_parser
from libs.trans_utils import i18n
from libs import log_utils
from t0mm0.common.net import Net
from t0mm0.common.addon import Addon
import main_scrape
from urllib2 import Request, build_opener, HTTPCookieProcessor, HTTPHandler
import cookielib
net = Net()
addon_id = kodi.addon_id
addon = Addon(addon_id, sys.argv)

#COOKIE STUFF
tools.create_directory(tools.AOPATH, "All_Cookies/IWatchOnline")
cookiepath = xbmc.translatePath(
    os.path.join('special://home', 'addons', addon_id, 'All_Cookies',
                 'IWatchOnline'))
cookiejar = os.path.join(cookiepath, 'cookies.lwp')
cj = cookielib.LWPCookieJar()
cookie_file = os.path.join(cookiepath, 'cookies.lwp')

base_url = 'http://www.merdb.link/'


def LogNotify(title, message, times, icon):
    xbmc.executebuiltin("XBMC.Notification(" + title + "," + message + "," +
                        times + "," + icon + ")")

Exemplo n.º 23
0
import tools
from libs import kodi
from tm_libs import dom_parser
from libs.trans_utils import i18n
from libs import log_utils
from t0mm0.common.net import Net
from t0mm0.common.addon import Addon
import main_scrape
from urllib2 import Request, build_opener, HTTPCookieProcessor, HTTPHandler
import cookielib
net = Net()
addon_id=kodi.addon_id
addon = Addon(addon_id, sys.argv)

#COOKIE STUFF
tools.create_directory(tools.AOPATH, "All_Cookies/MerDb")
cookiepath = xbmc.translatePath(os.path.join('special://home','addons',addon_id,'All_Cookies','MerDb/'))
cookiejar = os.path.join(cookiepath,'cookies.lwp')
cj = cookielib.LWPCookieJar()
cookie_file = os.path.join(cookiepath,'cookies.lwp')

base_url = kodi.get_setting('merdb_base_url')

def LogNotify(title,message,times,icon):
		xbmc.executebuiltin("XBMC.Notification("+title+","+message+","+times+","+icon+")")

def OPEN_URL(url):
  req=urllib2.Request(url)
  req.add_header('User-Agent', 'Mozilla/5.0 (Linux; U; Android 4.2.2; en-us; AFTB Build/JDQ39) AppleWebKit/534.30 (KHTML, like Gecko) Version/4.0 Mobile Safari/534.30')
  response=urllib2.urlopen(req)
  link=response.read()
Exemplo n.º 24
0
from tm_libs import dom_parser
from libs.trans_utils import i18n
from libs import log_utils
from t0mm0.common.net import Net
from t0mm0.common.addon import Addon
import urlresolver
import main_scrape
from urllib2 import Request, build_opener, HTTPCookieProcessor, HTTPHandler
import cookielib
net = Net()
addon_id = kodi.addon_id
addon = Addon(addon_id, sys.argv)
ADDON = xbmcaddon.Addon(id=kodi.addon_id)

#COOKIE STUFF
tools.create_directory(tools.AOPATH, "All_Cookies/PrimeWire")
cookiepath = xbmc.translatePath(
    os.path.join('special://home', 'addons', addon_id, 'All_Cookies',
                 'PrimeWire/'))
cookiejar = os.path.join(cookiepath, 'cookies.lwp')
cj = cookielib.LWPCookieJar()
cookie_file = os.path.join(cookiepath, 'cookies.lwp')

base_url = kodi.get_setting('primewire_base_url')
#base_url = 'http://www.primewire.ag/'


def LogNotify(title, message, times, icon):
    xbmc.executebuiltin("XBMC.Notification(" + title + "," + message + "," +
                        times + "," + icon + ")")
Exemplo n.º 25
0
addon_id = kodi.addon_id

timeout = int(kodi.get_setting('scraper_timeout'))


def __enum(**enums):
    return type('Enum', (), enums)


VIDEO_TYPES = __enum(TVSHOW='TV Show',
                     MOVIE='Movie',
                     EPISODE='Episode',
                     SEASON='Season')
#COOKIE STUFF

tools.create_directory(tools.AOPATH, "All_Cookies/IceFilms")
cookiepath = xbmc.translatePath(
    os.path.join('special://home', 'addons', addon_id, 'All_Cookies',
                 'IceFilms/'))
cookiejar = os.path.join(cookiepath, 'cookies.lwp')
cj = cookielib.LWPCookieJar()
cookie_file = os.path.join(cookiepath, 'cookies.lwp')

MAX_RESPONSE = 1024 * 1024 * 2
FORCE_NO_MATCH = '***FORCE_NO_MATCH***'
QUALITIES = __enum(LOW='Low',
                   MEDIUM='Medium',
                   HIGH='High',
                   HD720='HD720',
                   HD1080='HD1080')
Exemplo n.º 26
0
import HTMLParser
addon_id = kodi.addon_id


timeout = int(kodi.get_setting('scraper_timeout'))

def __enum(**enums):
    return type('Enum', (), enums)


VIDEO_TYPES = __enum(TVSHOW='TV Show', MOVIE='Movie', EPISODE='Episode', SEASON='Season')
#COOKIE STUFF



tools.create_directory(tools.AOPATH, "All_Cookies/IceFilms")
cookiepath = xbmc.translatePath(os.path.join('special://home','addons',addon_id,'All_Cookies','IceFilms/'))
cookiejar = os.path.join(cookiepath,'cookies.lwp')
cj = cookielib.LWPCookieJar()
cookie_file = os.path.join(cookiepath,'cookies.lwp')




MAX_RESPONSE = 1024 * 1024 * 2
FORCE_NO_MATCH = '***FORCE_NO_MATCH***'
QUALITIES = __enum(LOW='Low', MEDIUM='Medium', HIGH='High', HD720='HD720', HD1080='HD1080')

XHR = {'X-Requested-With': 'XMLHttpRequest'}
USER_AGENT = "Mozilla/5.0 (compatible, MSIE 11, Windows NT 6.3; Trident/7.0; rv:11.0) like Gecko"
BR_VERS = [
Exemplo n.º 27
0
from tm_libs import dom_parser
from libs import log_utils
import tools
from libs import cloudflare
from libs import log_utils
from tm_libs import dom_parser
import cookielib
from StringIO import StringIO
import gzip
import main_scrape
import base64
addon_id = kodi.addon_id

timeout = int(kodi.get_setting('scraper_timeout'))

tools.create_directory(tools.AOPATH, "All_Cookies/SantaSeries")
cookiepath = xbmc.translatePath(
    os.path.join('special://home', 'addons', addon_id, 'All_Cookies',
                 'SantaSeries/'))
cookiejar = os.path.join(cookiepath, 'cookies.lwp')
cj = cookielib.LWPCookieJar()
cookie_file = os.path.join(cookiepath, 'cookies.lwp')


def __enum(**enums):
    return type('Enum', (), enums)


MAX_RESPONSE = 1024 * 1024 * 2
FORCE_NO_MATCH = '***FORCE_NO_MATCH***'
QUALITIES = __enum(LOW='Low',
Exemplo n.º 28
0
def migrate_db(ctx, app_dir, venv_dir, user):
    create_directory(ctx, "/var/lib/zam", owner=user)
    cmd = f"{venv_dir}/bin/alembic -c production.ini upgrade head"
    ctx.sudo(f'bash -c "cd {app_dir} && {cmd}"', user=user)
Exemplo n.º 29
0
     change_dir(name)
 elif command == '1':
     name = input('Enter file name: ')
     if name == '':
         print('File name is missing')
         save_info('Error - File name is missing')
     else:
         create_file(name)
         save_info(f'File {name} is created')
 elif command == '2':
     name = input('Enter folder name: ')
     if name == '':
         print('Folder name is missing')
         save_info('Error - Folder name is missing')
     else:
         create_directory(name)
         save_info(f'Folder {name} is created')
 elif command == '3':
     name = input('Enter file or folder name: ')
     if name == '':
         print('File or folder name is missing')
         save_info('Error delete - File or folder name is missing')
     else:
         delete_file(name)
         save_info(f'Deleted {name}')
 elif command == '4':
     name = input('Enter file or folder to copy: ')
     new_name = input('Enter the new destination: ')
     if name == '':
         print('File or folder name is missing')
         save_info('Error copy - File or folder name is missing')
Exemplo n.º 30
0
def Network_config(class_num=4,
                   epoch=200,
                   initial_epoch=0,
                   batch_size=32,
                   train_data=None,
                   train_label=None,
                   test_data=None,
                   test_label=None,
                   fold=0):
    adam = Adam(lr=0.005,
                beta_1=0.9,
                beta_2=0.999,
                epsilon=1e-08,
                decay=0.0009)
    sgd = SGD(lr=0.001, momentum=0.9, decay=0.0, nesterov=False)

    input_tensor = Input(shape=(224, 224, 3))

    #backbone
    base_model = VGG16(input_tensor=input_tensor,
                       weights='imagenet',
                       include_top=False)
    base_output = base_model.output

    #self-attention
    x = non_local.non_local_block(base_output,
                                  intermediate_dim=None,
                                  compression=2,
                                  mode='embedded',
                                  add_residual=False)
    x = BatchNormalization()(x)

    #channel-attention
    y = channel_attention.squeeze_excitation_layer(base_output,
                                                   512,
                                                   ratio=4,
                                                   concate=False)
    y = BatchNormalization()(y)

    #concat
    x = concatenate([base_output, x], axis=3)
    x = concatenate([x, y], axis=3)

    # spp
    gap = GlobalAveragePooling2D()(x)
    x = Flatten()(x)
    x = concatenate([gap, x])
    x = Dense(512, activation='relu')(x)
    x = BatchNormalization()(x)
    x = Dense(512, activation='relu')(x)
    x = BatchNormalization()(x)
    predict = Dense(class_num, activation='softmax')(x)
    model = Model(inputs=input_tensor, outputs=predict)

    for layer in (base_model.layers):
        layer.trainable = False

    for l in model.layers:
        print(l.name)

    model.compile(optimizer=adam,
                  loss='categorical_crossentropy',
                  metrics=[keras.metrics.categorical_accuracy])
    model.summary()

    tools.create_directory('./final/')
    weights_file = './final/' + str(
        fold
    ) + '-weights.{epoch:02d}-{categorical_accuracy:.4f}-{val_loss:.4f}-{val_categorical_accuracy:.4f}.h5'
    csv_file = './final/record.csv'
    lr_reducer = ReduceLROnPlateau(monitor='categorical_accuracy',
                                   factor=0.2,
                                   cooldown=0,
                                   patience=2,
                                   min_lr=0.5e-6)
    early_stopper = EarlyStopping(monitor='val_categorical_accuracy',
                                  min_delta=1e-4,
                                  patience=30)

    model_checkpoint = ModelCheckpoint(weights_file,
                                       monitor='val_categorical_accuracy',
                                       save_best_only=True,
                                       verbose=1,
                                       save_weights_only=True,
                                       mode='max')
    tensorboard = TensorBoard(log_dir='./logs/',
                              histogram_freq=0,
                              batch_size=8,
                              write_graph=True,
                              write_grads=True,
                              write_images=True,
                              embeddings_freq=0,
                              embeddings_layer_names=None,
                              embeddings_metadata=None)
    CSV_record = CSVLogger(csv_file, separator=',', append=True)

    callbacks = [
        lr_reducer, early_stopper, model_checkpoint, tensorboard, CSV_record
    ]
    gc.disable()
    model.fit_generator(
        generator=tools.batch_generator(np.array(train_data),
                                        np.array(train_label), batch_size,
                                        True, class_num, True),
        steps_per_epoch=int(len(train_label) / batch_size) - 1,
        max_q_size=20,
        initial_epoch=initial_epoch,
        epochs=epoch,
        verbose=1,
        callbacks=callbacks,
        validation_data=tools.batch_generator(np.array(test_data),
                                              np.array(test_label), batch_size,
                                              True, class_num, False),
        validation_steps=int(len(test_label) / batch_size) - 1,
        class_weight='auto')

    #confusion matrix
    all_y_pred = []
    all_y_true = []
    for test_data_batch, test_label_batch in tools.batch_generator_confusion_matrix(
            np.array(test_data), np.array(test_label), batch_size, True,
            class_num):
        y_pred = model.predict(test_data_batch, batch_size)
        y_true = test_label_batch
        for y_p in y_pred:
            all_y_pred.append(np.where(y_p == max(y_p))[0][0])
        for y_t in y_true:
            all_y_true.append(np.where(y_t == max(y_t))[0][0])
    confusion = confusion_matrix(y_true=all_y_true, y_pred=all_y_pred)
    print(confusion)
    f = open('confusion_matrix.txt', 'a+')
    f.write(str(all_y_true) + "\n")
    f.write(str(all_y_pred) + "\n")
    f.write(str(confusion) + '\n')
    f.close()
    gc.enable()
Exemplo n.º 31
0
from libs import kodi
from tm_libs import dom_parser
from libs import log_utils
from t0mm0.common.net import Net
from t0mm0.common.addon import Addon
import tools
import string
import main_scrape
from urllib2 import Request, build_opener, HTTPCookieProcessor, HTTPHandler
import cookielib
net = Net()
addon_id = kodi.addon_id
addon = Addon(addon_id, sys.argv)

#COOKIE STUFF
tools.create_directory(tools.AOPATH, "All_Cookies/Afdah")
cookiepath = xbmc.translatePath(os.path.join('special://home','addons',addon_id,'All_Cookies','Afdah/'))
cookiejar = os.path.join(cookiepath,'cookies.lwp')
cj = cookielib.LWPCookieJar()
cookie_file = os.path.join(cookiepath,'cookies.lwp')


def __enum(**enums):
    return type('Enum', (), enums)

FORCE_NO_MATCH = '***FORCE_NO_MATCH***'
QUALITIES = __enum(LOW='Low', MEDIUM='Medium', HIGH='High', HD720='HD720', HD1080='HD1080')

def LogNotify(title,message,times,icon):
		xbmc.executebuiltin("XBMC.Notification("+title+","+message+","+times+","+icon+")")
Exemplo n.º 32
0
import string
from libs import cloudflare
from tm_libs import dom_parser
import cookielib
import json
from StringIO import StringIO
import gzip
import main_scrape


net = Net()
addon_id = kodi.addon_id
addon = Addon(addon_id, sys.argv)

#COOKIE STUFF
tools.create_directory(tools.AOPATH, "All_Cookies/NineMovies")
cookiepath = xbmc.translatePath(os.path.join('special://home','addons',addon_id,'All_Cookies','NineMovies/'))
cookiejar = os.path.join(cookiepath,'cookies.lwp')
cj = cookielib.LWPCookieJar()
cookie_file = os.path.join(cookiepath,'cookies.lwp')



timeout = int(kodi.get_setting('scraper_timeout'))

def __enum(**enums):
    return type('Enum', (), enums)


MAX_RESPONSE = 1024 * 1024 * 2
FORCE_NO_MATCH = '***FORCE_NO_MATCH***'
Exemplo n.º 33
0
import tools
from libs import kodi
from tm_libs import dom_parser
from libs.trans_utils import i18n
from libs import log_utils
from t0mm0.common.net import Net
from t0mm0.common.addon import Addon
import main_scrape
from urllib2 import Request, build_opener, HTTPCookieProcessor, HTTPHandler
import cookielib
net = Net()
addon_id=kodi.addon_id
addon = Addon(addon_id, sys.argv)

#COOKIE STUFF
tools.create_directory(tools.AOPATH, "All_Cookies/IWatchOnline")
cookiepath = xbmc.translatePath(os.path.join('special://home','addons',addon_id,'All_Cookies','IWatchOnline'))
cookiejar = os.path.join(cookiepath,'cookies.lwp')
cj = cookielib.LWPCookieJar()
cookie_file = os.path.join(cookiepath,'cookies.lwp')

base_url = 'http://www.merdb.link/'

def LogNotify(title,message,times,icon):
		xbmc.executebuiltin("XBMC.Notification("+title+","+message+","+times+","+icon+")")

def OPEN_URL(url):
  req=urllib2.Request(url)
  req.add_header('User-Agent', 'Mozilla/5.0 (Linux; U; Android 4.2.2; en-us; AFTB Build/JDQ39) AppleWebKit/534.30 (KHTML, like Gecko) Version/4.0 Mobile Safari/534.30')
  response=urllib2.urlopen(req)
  link=response.read()