Exemple #1
0
def split(path, size, file_, dirpath, split_size, start_time=0, i=1, inLoop=False):
    parts = ceil(size/TG_SPLIT_SIZE)
    if EQUAL_SPLITS and not inLoop:
        split_size = ceil(size/parts)
    if file_.upper().endswith(VIDEO_SUFFIXES):
        base_name, extension = ospath.splitext(file_)
        split_size = split_size - 2500000
        while i <= parts :
            parted_name = "{}.part{}{}".format(str(base_name), str(i).zfill(3), str(extension))
            out_path = ospath.join(dirpath, parted_name)
            srun(["ffmpeg", "-hide_banner", "-loglevel", "error", "-i",
                            path, "-ss", str(start_time), "-fs", str(split_size),
                            "-async", "1", "-strict", "-2", "-c", "copy", out_path])
            out_size = get_path_size(out_path)
            if out_size > 2097152000:
                dif = out_size - 2097152000
                split_size = split_size - dif + 2500000
                osremove(out_path)
                return split(path, size, file_, dirpath, split_size, start_time, i, inLoop=True)
            lpd = get_media_info(out_path)[0]
            if lpd <= 4 or out_size < 1000000:
                osremove(out_path)
                break
            start_time += lpd - 3
            i = i + 1
    else:
        out_path = ospath.join(dirpath, file_ + ".")
        srun(["split", "--numeric-suffixes=1", "--suffix-length=3", f"--bytes={split_size}", path, out_path])
Exemple #2
0
def logs(
    since: str = "today",
    help=(
        "Date from which we want to see logs. String in format 'YYYY-MM-DD hh:mm:ss'. "
        "Defaults to today."),
):
    """
    See folder custodian's user service logs from specified time.
    """
    click.echo("Showing Folder Custodian service logs.")
    srun(["journalctl", "--user-unit=fcust.service", f"--since={since}"],
         check=True)
Exemple #3
0
def restart(update, context):
    restart_message = sendMessage("Restarting...", context.bot, update)
    if Interval:
        Interval[0].cancel()
    alive.kill()
    procs = Process(web.pid)
    for proc in procs.children(recursive=True):
        proc.kill()
    procs.kill()
    clean_all()
    srun(["python3", "update.py"])
    # Save restart message object in order to reply to it after restarting
    nox.kill()
    with open(".restartmsg", "w") as f:
        f.truncate(0)
        f.write(f"{restart_message.chat.id}\n{restart_message.message_id}\n")
    osexecl(executable, executable, "-m", "bot")
Exemple #4
0
def logs(
    since: str = "today",
    help=(
        "Date from which we want to see logs. String in format 'YYYY-MM-DD hh:mm:ss'. "
        "Defaults to today."),
):
    """
    See podman custodian's transmission-pod user service logs from specified time.
    """
    click.echo("Showing Podman Custodian transmission-pod service logs.")
    srun(
        [
            "journalctl", "--user-unit=transmission-pod.service",
            f"--since={since}"
        ],
        check=True,
    )
Exemple #5
0
def take_ss(video_file):
    des_dir = 'Thumbnails'
    if not ospath.exists(des_dir):
        mkdir(des_dir)
    des_dir = ospath.join(des_dir, f"{time()}.jpg")
    duration = get_media_info(video_file)[0]
    if duration == 0:
        duration = 3
    duration = duration // 2
    try:
        srun(["ffmpeg", "-hide_banner", "-loglevel", "error", "-ss", str(duration),
                        "-i", video_file, "-vframes", "1", des_dir])
    except:
        return None

    if not ospath.lexists(des_dir):
        return None
    Image.open(des_dir).convert("RGB").save(des_dir, "JPEG")
    return des_dir
Exemple #6
0
def run(job_id: int, njobs: int, mask_fn: str, command: Sequence[str]):
    """
    Runs part of the script

    :param job_id: job ID
    :param njobs: number of jobs
    :param mask_fn: mask filename
    :param command: script to run
    """
    logger.debug(f'loading original mask from {mask_fn}')
    mask_img = nib.load(addExt(mask_fn, mustExist=True, unambiguous=True))
    mask = mask_img.get_data() > 0
    voxels = np.where(mask)
    nvox = voxels[0].size
    boundaries = np.round(np.linspace(0, nvox, njobs + 1)).astype('int')

    use = tuple(vox[boundaries[job_id]:boundaries[job_id + 1]]
                for vox in voxels)
    logger.debug(
        f'creating new mask covering voxels {boundaries[job_id]} to ' +
        f'{boundaries[job_id + 1]} out of {nvox} voxels')
    mask[()] = False
    mask[use] = True
    marker = get_markers(njobs)[job_id]
    with tempfile.NamedTemporaryFile(prefix='mask' + marker,
                                     suffix='.nii.gz') as temp_mask:
        logger.debug(f'Storing new mask under {temp_mask}')
        nib.Nifti1Image(mask.astype('i4'), affine=None,
                        header=mask_img.header).to_filename(temp_mask.name)

        if not any('MASK' in part for part in command):
            raise ValueError('MASK not found')
        new_cmd = [
            part.replace('MASK', temp_mask.name).replace('JOBID', marker)
            for part in command
        ]
        logger.info(f'Running {new_cmd}')
        srun(new_cmd, check=True)
Exemple #7
0
def rkhunter_version_check():
    cp = srun([RKHUNTER_PATH, '--version'], stdout=stdPIPE)
    output_lst = cp.stdout.decode('utf-8').split('\n')
    for index in range(0, len(output_lst)):
        line = output_lst[index].strip()
        if line == '':
            continue
        #vmatch = rsearch('^\[ Rootkit Hunter version (.*?) \]$', line)
        vmatch = rsearch('^Rootkit Hunter ([0-9\.]+?)$', line)
        if vmatch:
            rkhunter_version = vmatch.groups()[0]
            if rkhunter_version != WORKING_VERSION:
                print('Incompatible version found! Aborting.')
                sexit(255)
            return rkhunter_version
    print('Unable to identify RKHunter Version! Aborting.')
    sexit(255)
    return False
Exemple #8
0
 def onDownloadComplete(self):
     with download_dict_lock:
         LOGGER.info(f"Download completed: {download_dict[self.uid].name()}")
         download = download_dict[self.uid]
         name = str(download.name()).replace('/', '')
         gid = download.gid()
         size = download.size_raw()
         if name == "None" or self.isQbit:
             name = listdir(f'{DOWNLOAD_DIR}{self.uid}')[-1]
         m_path = f'{DOWNLOAD_DIR}{self.uid}/{name}'
     if self.isZip:
         try:
             with download_dict_lock:
                 download_dict[self.uid] = ZipStatus(name, m_path, size)
             pswd = self.pswd
             path = m_path + ".zip"
             LOGGER.info(f'Zip: orig_path: {m_path}, zip_path: {path}')
             if pswd is not None:
                 if self.isLeech and int(size) > TG_SPLIT_SIZE:
                     path = m_path + ".zip"
                     srun(["7z", f"-v{TG_SPLIT_SIZE}b", "a", "-mx=0", f"-p{pswd}", path, m_path])
                 else:
                     srun(["7z", "a", "-mx=0", f"-p{pswd}", path, m_path])
             elif self.isLeech and int(size) > TG_SPLIT_SIZE:
                 path = m_path + ".zip"
                 srun(["7z", f"-v{TG_SPLIT_SIZE}b", "a", "-mx=0", path, m_path])
             else:
                 srun(["7z", "a", "-mx=0", path, m_path])
         except FileNotFoundError:
             LOGGER.info('File to archive not found!')
             self.onUploadError('Internal error occurred!!')
             return
         try:
             rmtree(m_path)
         except:
             osremove(m_path)
     elif self.extract:
         try:
             if ospath.isfile(m_path):
                 path = get_base_name(m_path)
             LOGGER.info(f"Extracting: {name}")
             with download_dict_lock:
                 download_dict[self.uid] = ExtractStatus(name, m_path, size)
             pswd = self.pswd
             if ospath.isdir(m_path):
                 for dirpath, subdir, files in walk(m_path, topdown=False):
                     for file_ in files:
                         if search(r'\.part0*1.rar$', file_) or search(r'\.7z.0*1$', file_) \
                            or (file_.endswith(".rar") and not search(r'\.part\d+.rar$', file_)) \
                            or file_.endswith(".zip") or search(r'\.zip.0*1$', file_):
                             m_path = ospath.join(dirpath, file_)
                             if pswd is not None:
                                 result = srun(["7z", "x", f"-p{pswd}", m_path, f"-o{dirpath}", "-aot"])
                             else:
                                 result = srun(["7z", "x", m_path, f"-o{dirpath}", "-aot"])
                             if result.returncode != 0:
                                 LOGGER.error('Unable to extract archive!')
                     for file_ in files:
                         if file_.endswith(".rar") or search(r'\.r\d+$', file_) \
                            or search(r'\.7z.\d+$', file_) or search(r'\.z\d+$', file_) \
                            or search(r'\.zip.\d+$', file_) or file_.endswith(".zip"):
                             del_path = ospath.join(dirpath, file_)
                             osremove(del_path)
                 path = f'{DOWNLOAD_DIR}{self.uid}/{name}'
             else:
                 if pswd is not None:
                     result = srun(["bash", "pextract", m_path, pswd])
                 else:
                     result = srun(["bash", "extract", m_path])
                 if result.returncode == 0:
                     LOGGER.info(f"Extract Path: {path}")
                     osremove(m_path)
                     LOGGER.info(f"Deleting archive: {m_path}")
                 else:
                     LOGGER.error('Unable to extract archive! Uploading anyway')
                     path = f'{DOWNLOAD_DIR}{self.uid}/{name}'
         except NotSupportedExtractionArchive:
             LOGGER.info("Not any valid archive, uploading file as it is.")
             path = f'{DOWNLOAD_DIR}{self.uid}/{name}'
     else:
         path = f'{DOWNLOAD_DIR}{self.uid}/{name}'
     up_name = PurePath(path).name
     up_path = f'{DOWNLOAD_DIR}{self.uid}/{up_name}'
     size = get_path_size(f'{DOWNLOAD_DIR}{self.uid}')
     if self.isLeech and not self.isZip:
         checked = False
         for dirpath, subdir, files in walk(f'{DOWNLOAD_DIR}{self.uid}', topdown=False):
             for file_ in files:
                 f_path = ospath.join(dirpath, file_)
                 f_size = ospath.getsize(f_path)
                 if int(f_size) > TG_SPLIT_SIZE:
                     if not checked:
                         checked = True
                         with download_dict_lock:
                             download_dict[self.uid] = SplitStatus(up_name, up_path, size)
                         LOGGER.info(f"Splitting: {up_name}")
                     fssplit(f_path, f_size, file_, dirpath, TG_SPLIT_SIZE)
                     osremove(f_path)
     if self.isLeech:
         LOGGER.info(f"Leech Name: {up_name}")
         tg = TgUploader(up_name, self)
         tg_upload_status = TgUploadStatus(tg, size, gid, self)
         with download_dict_lock:
             download_dict[self.uid] = tg_upload_status
         update_all_messages()
         tg.upload()
     else:
         LOGGER.info(f"Upload Name: {up_name}")
         drive = GoogleDriveHelper(up_name, self)
         upload_status = UploadStatus(drive, size, gid, self)
         with download_dict_lock:
             download_dict[self.uid] = upload_status
         update_all_messages()
         drive.upload(up_name)
Exemple #9
0
def run_io(command_line_):
    command_line = command_line_.replace('\n', ' ').strip('\n').strip()
    print('>', command_line)
    srun(command_line, shell=True, check=True)
Exemple #10
0
            logging.error(f"Failed to download config.env {res.status_code}")
    except Exception as e:
        logging.error(f"CONFIG_FILE_URL: {e}")
except TypeError:
    pass

load_dotenv('config.env', override=True)

UPSTREAM_REPO = environ.get('UPSTREAM_REPO', None)
try:
    if len(UPSTREAM_REPO) == 0:
        raise TypeError
except TypeError:
    UPSTREAM_REPO = None

if UPSTREAM_REPO is not None:
    if ospath.exists('.git'):
        srun(["rm", "-rf", ".git"])

    srun([
        f"git init -q \
                      && git config --global user.email [email protected] \
                      && git config --global user.name mltb \
                      && git add . \
                      && git commit -sm update -q \
                      && git remote add origin {UPSTREAM_REPO} \
                      && git fetch origin -q \
                      && git reset --hard origin/master -q"
    ],
         shell=True)
Exemple #11
0
 if args.verbose:
     print('> Running RKHunter ...')
 # check version
 if args.verbose:
     print('> Version check: ', end='')
 rkhunter_version = rkhunter_version_check()
 if not rkhunter_version:
     if args.verbose:
         print('not supported!')
     else:
         print('unsupported version!', file=sstderr)
     sexit(255)
 if args.verbose:
     print(rkhunter_version)
 # run rkhunter as subprocess
 cp = srun([RKHUNTER_PATH, '-c', '--sk', '--nocolors', '--noappend-log'], stdout=stdPIPE)
 output_lst = cp.stdout.decode('utf-8').split('\n')
 # run report parser on output
 hunter_report = {'information': {}, 'summary': {}}
 section = None
 for index in range(0, len(output_lst)):
     line = output_lst[index].strip()
     if line == '':
         continue
     # search for rkhunter version
     if 'information' in hunter_report.keys() and 'version' not in hunter_report['information'].keys():
         if rkhunter_version:
             hunter_report['information']['version'] = rkhunter_version
     # generic parser
     if line.startswith('Performing '):
         perf, rest = line.split(' ', 1)
Exemple #12
0
try:
    SERVER_PORT = getConfig('SERVER_PORT')
    if len(SERVER_PORT) == 0:
        raise KeyError
except KeyError:
    SERVER_PORT = 80

PORT = environ.get('PORT', SERVER_PORT)
web = Popen([
    f"gunicorn wserver:start_server --bind 0.0.0.0:{PORT} --worker-class aiohttp.GunicornWebWorker"
],
            shell=True)
alive = Popen(["python3", "alive.py"])
nox = Popen(["qbittorrent-nox", "--profile=."])
if not ospath.exists('.netrc'):
    srun(["touch", ".netrc"])
srun(["cp", ".netrc", "/root/.netrc"])
srun(["chmod", "600", ".netrc"])
srun(["chmod", "+x", "aria.sh"])
srun(["./aria.sh"], shell=True)
sleep(0.5)

Interval = []
DRIVES_NAMES = []
DRIVES_IDS = []
INDEX_URLS = []

try:
    if bool(getConfig('_____REMOVE_THIS_LINE_____')):
        logging.error('The README.md file there to be read! Exiting now!')
        exit()
 def run(self):
     srun(self.exepath)
Exemple #14
0
            logging.error(f"Failed to download .netrc {res.status_code}")
    except Exception as e:
        logging.error(f"NETRC_URL: {e}")
except KeyError:
    pass
try:
    SERVER_PORT = getConfig('SERVER_PORT')
    if len(SERVER_PORT) == 0:
        raise KeyError
except KeyError:
    SERVER_PORT = 80

PORT = environ.get('PORT', SERVER_PORT)
web = Popen([f"gunicorn web.wserver:app --bind 0.0.0.0:{PORT}"], shell=True)
alive = Popen(["python3", "alive.py"])
srun(["qbittorrent-nox", "-d", "--profile=."])
if not ospath.exists('.netrc'):
    srun(["touch", ".netrc"])
srun(["cp", ".netrc", "/root/.netrc"])
srun(["chmod", "600", ".netrc"])
srun(["chmod", "+x", "aria.sh"])
a2c = Popen(["./aria.sh"], shell=True)
sleep(1)

Interval = []
DRIVES_NAMES = []
DRIVES_IDS = []
INDEX_URLS = []

try:
    if bool(getConfig('_____REMOVE_THIS_LINE_____')):
Exemple #15
0
#! /bin/python

from __future__ import print_function

import os
from subprocess import run as srun

print("-" * 50)
print("Updating git repos...")
os.chdir(os.path.expanduser("~/Prog/PyCharm"))
with open("/dev/null", "w") as null:
    for dirnm in os.listdir("."):
        os.chdir(dirnm)
        print("Updating", dirnm)
        srun(["git", "pull"], stdout=null, stderr=null)
        os.chdir("..")

print("-" * 50)
print("Updated all git repos!")