Ejemplo n.º 1
0
def run_inference(im, model, result_code):
    # Perform inference
    inference_start = time.time()
    result = model.run_inference(im)[0]
    inference_duration = time.time() - inference_start
    result = cv2.cvtColor((result * 255).astype(np.float32), cv2.COLOR_BGR2RGB)

    # Save image
    pink_path = f"results/{result_code}.jpg"
    cv2.imwrite(pink_path, result)

    # Upload to b2
    upload_start = time.time()
    info = InMemoryAccountInfo()
    b2_api = B2Api(info)
    b2_api.authorize_account("production", config.B2_KEY, config.B2_SECRET)
    bucket = b2_api.get_bucket_by_name(config.B2_BUCKET)
    bucket.upload_local_file(
        local_file=pink_path,
        file_name=f"cycleganime/results/{os.path.basename(pink_path)}")
    upload_duration = time.time() - upload_start

    # Remove local file
    os.remove(pink_path)

    # Log output
    with open("log.txt", "a") as f:
        f.write(
            f"{pink_path}\t{inference_duration:.2f}\t{upload_duration:.2f}\n")
Ejemplo n.º 2
0
def sync():
    """Synchronize files modified in last x days from the bucket to the media folder"""
    b2 = B2Api(InMemoryAccountInfo())
    app_key_id = get_snap_config('b2-application-key-id')
    app_key = get_snap_config('b2-application-key')
    b2.authorize_account('production', app_key_id, app_key)
    exclude_before_timestamp = int(time.time()) - (int(get_snap_config('remove-after-days')) * 86400)
    policies_manager = ScanPoliciesManager(
            exclude_file_regexes=("index.html",),
            exclude_modified_before=exclude_before_timestamp * 1000 # in ms
        )
    synchronizer = Synchronizer(
            max_workers=5,
            newer_file_mode=NewerFileSyncMode.SKIP,
            policies_manager=policies_manager
        )
    bucket_uri = 'b2://' + get_snap_config('b2-bucket')
    source = parse_sync_folder(bucket_uri, b2)
    destination = parse_sync_folder(MEDIA_DIR, b2)
    if not os.path.isdir(MEDIA_DIR):
        os.makedirs(MEDIA_DIR)
    with SyncReport(sys.stdout, False) as reporter:
        synchronizer.sync_folders(
            source_folder=source,
            dest_folder=destination,
            now_millis=time.time() * 1000, # in ms
            reporter=reporter
        )
Ejemplo n.º 3
0
 def b2Api(self) -> B2Api:
     if not hasattr(self, "_b2Api"):
         self._accountInfo = (SqliteAccountInfo(
             file_name=self._sqliteDbPath) if hasattr(
                 self, "_sqliteDbPath") else InMemoryAccountInfo())
         self._b2Api = B2Api(self._accountInfo)
         self._b2Api.authorize_account(**self._authInfo)
     return self._b2Api
    def __init__(self, account_id, application_key, bucket_name_prefix):
        self.account_id = account_id
        self.application_key = application_key

        self.bucket_name_prefix = bucket_name_prefix

        info = InMemoryAccountInfo()
        cache = InMemoryCache()
        self.api = B2Api(info, cache=cache)
        self.api.authorize_account('production', self.account_id, self.application_key)
Ejemplo n.º 5
0
 def __init__(self, application_key_id, application_key, bucket_name):
     """ Initialize b2_api and authenticate account
     """
     info = InMemoryAccountInfo()
     self.b2_api = B2Api(info)
     self.application_key_id = application_key_id
     self.application_key = application_key
     self.b2_api.authorize_account("production", application_key_id,
                                   application_key)
     self.bucket = self.b2_api.get_bucket_by_name(bucket_name)
Ejemplo n.º 6
0
def upload_guide():
    """Upload the "all.xml" file to BackBlaze B2"""

    # Initialize BackBlaze B2 API
    info = InMemoryAccountInfo()
    api = B2Api(info)

    # Login to BackBlaze
    api.authorize_account("production", B2_ID, B2_KEY)
    bucket = api.get_bucket_by_name(B2_BUCKET)

    # Upload the "all.xml" file, which contain the tv guide
    bucket.upload_local_file(local_file="all.xml", file_name="all.xml")
    print("[+] Guide upload completed")
Ejemplo n.º 7
0
def upload(
    model: str,
    directory: str,
    bucket: str = 'malaya',
    application_key_id: str = os.environ.get('backblaze_application_key_id'),
    application_key: str = os.environ.get('backblaze_application_key')):
    """
    Upload directory with malaya-style pattern.

    Parameters
    ----------
    model: str
        it will become directory name.
    directory: str
        local directory with files in it.
    bucket: str, optional (default='malaya')
        backblaze bucket.
    application_key_id: str, optional (default=os.environ.get('backblaze_application_key_id'))
    application_key: str, optional (default=os.environ.get('backblaze_application_key'))
    """

    if not application_key_id or not application_key:
        raise ValueError(
            'must set `backblaze_application_key_id` and `backblaze_application_key` are None.'
        )

    from b2sdk.v1 import B2Api, InMemoryAccountInfo
    info = InMemoryAccountInfo()
    b2_api = B2Api(info)

    b2_api.authorize_account('production', application_key_id, application_key)
    file_info = {'how': 'good-file'}
    b2_bucket = b2_api.get_bucket_by_name(bucket)

    for file in glob(os.path.join(directory, '*')):
        if file.endswith('frozen_model.pb'):
            outPutname = f'{model}/model.pb'
        elif file.endswith('frozen_model.pb.quantized'):
            outPutname = f'{model}-quantized/model.pb'
        else:
            outPutname = f'{model}/{file}'

        b2_bucket.upload_local_file(
            local_file=file,
            file_name=outPutname,
            file_infos=file_info,
        )

        logger.info(f'Uploaded from local {file} to {bucket}/{outPutname}')
Ejemplo n.º 8
0
def run():
    if "B2_KEY_ID" not in os.environ.keys():
        logging.error("Please set environment variables: B2_KEY_ID and B2_KEY")
        sys.exit(1)
    B2_KEY_ID = os.environ['B2_KEY_ID']
    B2_KEY = os.environ['B2_KEY']
    B2_BUCKET_NAME = os.environ['B2_BUCKET_NAME']
    args = parse_args()
    logging.debug("Args: {}".format(args))

    screenshotFolder = os.path.abspath(args.directory)
    logging.debug("Configured screenshotsFolder: {}".format(args.directory))

    if not os.path.isdir(screenshotFolder):
        logging.error("File {screenshotFolder} doesn't exist!")
        exit(1)

    # Initialize B2Api
    info = InMemoryAccountInfo()
    b2_api = B2Api(info)
    b2_api.authorize_account("production", B2_KEY_ID, B2_KEY)
    bucket = b2_api.get_bucket_by_name(B2_BUCKET_NAME)
    # build random word string for filename
    r = RandomWord()
    c = ["the", "my", "his", "her", "our", "their", "that", "this"]
    randwords = '-'.join([
        r.word(include_parts_of_speech=["verbs"]),
        random.choice(c),
        r.word(include_parts_of_speech=["adjectives"]),
        r.word(include_parts_of_speech=["nouns"])
    ])

    fileName = "{}.png".format(randwords)
    logging.debug("Filename {}".format(fileName))
    fullFile = os.path.join(screenshotFolder, fileName)
    logging.debug("Full file path: {}".format(fullFile))

    if args.area:
        take_area_screenshot(fullFile)
    if args.window:
        take_window_screenshot(fullFile)
    if args.screen:
        take_screenshot(fullFile)

    uploadResults = bucket.upload_local_file(local_file=fullFile,
                                             file_name=fileName)

    fileId = uploadResults.as_dict()['fileId']
    print(b2_api.get_download_url_for_file_name(B2_BUCKET_NAME, fileName))
Ejemplo n.º 9
0
def upload_epaper_to_backblaze(pdf_info: dict, app_key_id: str, app_key: str,
                               bucket_name: str):
    info: InMemoryAccountInfo = InMemoryAccountInfo()
    b2_api: B2Api = B2Api(info)
    b2_api.authorize_account("production", app_key_id, app_key)
    bucket = b2_api.get_bucket_by_name(bucket_name)
    print(f'Uploading {pdf_info["cover_file_path"]}')
    bucket.upload_local_file(
        local_file=pdf_info["cover_file_path"],
        file_name=f'cover_{pdf_info["edition"]}.jpg',
    )
    print(f'Uploading {pdf_info["epaper_file_path"]}')
    bucket.upload_local_file(local_file=pdf_info["epaper_file_path"],
                             file_name=f'{pdf_info["edition"]}.pdf')
    print("Files uploaded.")
Ejemplo n.º 10
0
    def api(self):
        ''' The B2API, authorized from `self.credentials`.
    '''
        api = B2Api(InMemoryAccountInfo())
        # monkey patch the API instance to serialise reauthorization attempts
        b2authorize_account = api.authorize_account

        def locked_authorize_account(*a):
            ''' Serialised version of the API authorize_account method.
      '''
            with self._lock:
                return b2authorize_account(*a)

        api.authorize_account = locked_authorize_account
        with self._conn_sem:
            api.authorize_account("production", self.credentials.keyId,
                                  self.credentials.apiKey)
        return api
Ejemplo n.º 11
0
def load_result_ids():
    """Returns set of file ids and a queue ordering them"""
    result_ids = set()
    info = InMemoryAccountInfo()
    b2_api = B2Api(info)
    b2_api.authorize_account("production", config.B2_KEY, config.B2_SECRET)
    bucket = b2_api.get_bucket_by_name(config.B2_BUCKET)
    files = []
    for file_info, _ in bucket.ls(folder_to_list="cycleganime/results",
                                  show_versions=False,
                                  recursive=True):
        if file_info.content_type != "image/jpeg":
            continue
        result_id = os.path.basename(file_info.file_name)[:5]
        upload_timestamp = file_info.upload_timestamp
        result_ids.add(result_id)
        files.append((result_id, upload_timestamp))
    files = sorted(files, key=lambda x: x[1])  # Sort by upload timestamp
    result_q = deque(files)
    return result_ids, result_q
Ejemplo n.º 12
0
def b2_download_models():
    # Retrieve all relevant credentials / info from environment
    application_key_id = env['B2_APPLICATION_ID']
    application_key = env['B2_APPLICATION_KEY']
    application_realm = env['B2_APPLICATION_REALM']
    bucket_name = env['B2_BUCKET_NAME']

    # Authenticate using credentials
    info = InMemoryAccountInfo(
    )  # store credentials, tokens and cache in memory
    b2_api = B2Api(info)
    b2_api.authorize_account(application_realm, application_key_id,
                             application_key)
    bucket = b2_api.get_bucket_by_name(bucket_name)

    subreddit_models_dir = 'subreddit-clfs'
    norm_models_dir = 'norm-clfs'
    directory_prefix = f'tmp/backend-ml/{subreddit_models_dir}'
    for directory in [subreddit_models_dir, norm_models_dir]:
        full_directory = path.join(directory_prefix, directory)
        if not path.exists(full_directory):
            makedirs(full_directory)

    subreddits = \
    [
      "Futurology",
      "science"
    ]

    for subreddit in subreddits:
        for extension in [".vec", ".bin"]:
            b2_filename = f'{subreddit_models_dir}/model_{subreddit}{extension}'
            local_file_path = f'{directory_prefix}/model_{subreddit}{extension}'
            if path.exists(local_file_path):
                print(f'Model {extension} for subreddit {subreddit} exists!')
                continue
            download_dest = DownloadDestLocalFile(local_file_path)
            progress_listener = SimpleProgressListener(
                f'Downloading model: model_{subreddit}{extension}')
            bucket.download_file_by_name(b2_filename, download_dest,
                                         progress_listener)
Ejemplo n.º 13
0
def b2_upload(bucket_name, filename):

    from b2sdk.v1 import InMemoryAccountInfo
    from b2sdk.v1 import B2Api

    info = InMemoryAccountInfo()
    b2 = B2Api(info)
    app_key_id = "William"
    app_key = "Gibson"
    b2.authorize_account("production", app_key_id, app_key)

    # Upload a single file
    local_file_path = filename
    b2_file_name = filename
    file_info = {"how": "good-file"}

    bucket = b2.get_bucket_by_name(bucket_name)
    bucket.upload_local_file(
        local_file=local_file_path,
        file_name=b2_file_name,
        file_infos=file_info,
    )
Ejemplo n.º 14
0
 def __init__(self):
     log("Initializing Clipper")
     self.clips: Dict[AnyStr, Clip] = None
     self.c = load_yaml()
     logging.basicConfig(
         level=self.c["Logging"]["level"],
         format=self.c["Logging"]["format"]
     )
     self.meta = ClipsMeta.from_url(
         self.get_config("MetaSource", "endpoint"),
         self.get_config("MetaSource", "token")
     )
     self.load_local_clips()
     key_id = self.get_config("B2", "key_id")
     app_key = self.get_config("B2", "app_key")
     log("Initializing B2")
     info = InMemoryAccountInfo()
     self._api = B2Api(info)
     self._api.authorize_account("production", key_id, app_key)
     self._bucket = self._api.get_bucket_by_name(
         self.get_config("B2", "bucket_name")
     )
     self._file_link_template = "https://rushia.moe/clips?uid={uid}"
     log("Done initializing Clipper")
Ejemplo n.º 15
0
# Load up JSON so we can grab a message
with open(os.getenv("GITHUB_EVENT_PATH")) as fh:
    event_data = json.load(fh)

if GITHUB_EVENT_NAME == "push":
    # Commit name
    HEAD_MESSAGE = event_data['head_commit']['message']
else:
    HEAD_MESSAGE = ""

if __name__ == "__main__":
    src_dir = sys.argv[1]
    dest_dir = sys.argv[2]

    info = InMemoryAccountInfo()
    b2_api = B2Api(info)
    b2_api.authorize_account("production", B2_KEY_ID, B2_APP_KEY)
    bucket = b2_api.get_bucket_by_id(B2_BUCKET_ID)

    # Iterate every file in the directory.
    for filename in pathlib.Path(src_dir).iterdir():
        print(f"==> Uploading {filename}...")
        bucket.upload_local_file(
            filename,
            dest_dir + "/" + filename.name,
            content_type="application/zip",
            file_infos={
                "platform": dest_dir,
                "event_name": GITHUB_EVENT_NAME,
                "sha": GITHUB_SHA,
Ejemplo n.º 16
0
 def __init__(self, replica):
     self.app_key_id = replica['meta']['app_key_id']
     self.app_key = replica['meta']['app_key']
     info = InMemoryAccountInfo()
     self.api = B2Api(info)
     self.bucket = 'openpacs'
Ejemplo n.º 17
0
    def __init__(self):
        account_info = InMemoryAccountInfo()
        raw_api = B2RawApi(
            B2Http(user_agent_append=os.environ.get('B2_USER_AGENT_APPEND')))

        super().__init__(account_info=account_info, raw_api=raw_api)
Ejemplo n.º 18
0
 def _authorize(self, keyid: str, appkey: str, bucketname: str):
     b2_api = B2Api(InMemoryAccountInfo())
     b2_api.authorize_account("production", keyid, appkey)
     bucket = b2_api.get_bucket_by_name(bucketname,)
     return b2_api,bucket
Ejemplo n.º 19
0
def list_target_files(config):
    import urllib.parse
    try:
        target = urllib.parse.urlparse(config["target"])
    except ValueError:
        return "invalid target"

    if target.scheme == "file":
        return [(fn, os.path.getsize(os.path.join(target.path, fn)))
                for fn in os.listdir(target.path)]

    elif target.scheme == "rsync":
        rsync_fn_size_re = re.compile(r'.*    ([^ ]*) [^ ]* [^ ]* (.*)')
        rsync_target = '{host}:{path}'

        target_path = target.path
        if not target_path.endswith('/'):
            target_path = target_path + '/'
        if target_path.startswith('/'):
            target_path = target_path[1:]

        rsync_command = [
            'rsync', '-e',
            '/usr/bin/ssh -i /root/.ssh/id_rsa_miab -oStrictHostKeyChecking=no -oBatchMode=yes',
            '--list-only', '-r',
            rsync_target.format(host=target.netloc, path=target_path)
        ]

        code, listing = shell('check_output',
                              rsync_command,
                              trap=True,
                              capture_stderr=True)
        if code == 0:
            ret = []
            for l in listing.split('\n'):
                match = rsync_fn_size_re.match(l)
                if match:
                    ret.append((match.groups()[1],
                                int(match.groups()[0].replace(',', ''))))
            return ret
        else:
            if 'Permission denied (publickey).' in listing:
                reason = "Invalid user or check you correctly copied the SSH key."
            elif 'No such file or directory' in listing:
                reason = "Provided path {} is invalid.".format(target_path)
            elif 'Network is unreachable' in listing:
                reason = "The IP address {} is unreachable.".format(
                    target.hostname)
            elif 'Could not resolve hostname' in listing:
                reason = "The hostname {} cannot be resolved.".format(
                    target.hostname)
            else:
                reason = "Unknown error." \
                  "Please check running 'management/backup.py --verify'" \
                  "from mailinabox sources to debug the issue."
            raise ValueError(
                "Connection to rsync host failed: {}".format(reason))

    elif target.scheme == "s3":
        # match to a Region
        fix_boto()  # must call prior to importing boto
        import boto.s3
        from boto.exception import BotoServerError
        custom_region = False
        for region in boto.s3.regions():
            if region.endpoint == target.hostname:
                break
        else:
            # If region is not found this is a custom region
            custom_region = True

        bucket = target.path[1:].split('/')[0]
        path = '/'.join(target.path[1:].split('/')[1:]) + '/'

        # Create a custom region with custom endpoint
        if custom_region:
            from boto.s3.connection import S3Connection
            region = boto.s3.S3RegionInfo(name=bucket,
                                          endpoint=target.hostname,
                                          connection_cls=S3Connection)

        # If no prefix is specified, set the path to '', otherwise boto won't list the files
        if path == '/':
            path = ''

        if bucket == "":
            raise ValueError("Enter an S3 bucket name.")

        # connect to the region & bucket
        try:
            conn = region.connect(aws_access_key_id=config["target_user"],
                                  aws_secret_access_key=config["target_pass"])
            bucket = conn.get_bucket(bucket)
        except BotoServerError as e:
            if e.status == 403:
                raise ValueError("Invalid S3 access key or secret access key.")
            elif e.status == 404:
                raise ValueError("Invalid S3 bucket name.")
            elif e.status == 301:
                raise ValueError("Incorrect region for this bucket.")
            raise ValueError(e.reason)

        return [(key.name[len(path):], key.size)
                for key in bucket.list(prefix=path)]
    elif target.scheme == 'b2':
        from b2sdk.v1 import InMemoryAccountInfo, B2Api
        from b2sdk.v1.exception import NonExistentBucket
        info = InMemoryAccountInfo()
        b2_api = B2Api(info)

        # Extract information from target
        b2_application_keyid = target.netloc[:target.netloc.index(':')]
        b2_application_key = target.netloc[target.netloc.index(':') +
                                           1:target.netloc.index('@')]
        b2_bucket = target.netloc[target.netloc.index('@') + 1:]

        try:
            b2_api.authorize_account("production", b2_application_keyid,
                                     b2_application_key)
            bucket = b2_api.get_bucket_by_name(b2_bucket)
        except NonExistentBucket as e:
            raise ValueError(
                "B2 Bucket does not exist. Please double check your information!"
            )
        return [(key.file_name, key.size) for key, _ in bucket.ls()]

    else:
        raise ValueError(config["target"])
Ejemplo n.º 20
0

def replace_keys():
    """Replace the placeholder authorization token and upload URL in the index file"""
    with open(INDEX_FILE, 'r+') as f:
        data = f.read()
        f.seek(0)
        t = tokens()
        replaced = data.replace('CHANGEME_AUTHORIZATION_TOKEN',
                                t['authorizationToken'])
        replaced = replaced.replace('CHANGEME_UPLOAD_URL', t['uploadUrl'])
        f.write(replaced)
        f.truncate()


def upload():
    """Upload the modified file to the bucket"""
    name = get_snap_config('b2-bucket')
    bucket = B2.get_bucket_by_name(name)
    bucket.upload_local_file(INDEX_FILE, 'index.html')


B2 = B2Api(InMemoryAccountInfo())
app_key_id = get_snap_config('b2-application-key-id')
app_key = get_snap_config('b2-application-key')
B2.authorize_account('production', app_key_id, app_key)

copy_template()
replace_keys()
upload()
Ejemplo n.º 21
0
def get_bucket(config):
    b2_config = config['b2']
    account_info = InMemoryAccountInfo()
    b2_api = B2Api(account_info=account_info)
    b2_api.authorize_account('production', b2_config['key'], b2_config['secret'])
    return b2_api.get_bucket_by_name(b2_config['bucket'])
Ejemplo n.º 22
0
 def __init__(self, model, db_session=None):
     self.api = B2Api(InMemoryAccountInfo())
     self.db = db_session
     self.model = model