Пример #1
0
async def handle_register(req, websocket):
    if 'register' == req['method']:
        if 'params' in req and 'secret_key' in req[
                'params'] and RTR_SECRET_KEY_SHA1 == get_sha1(
                    req['params']['secret_key']):
            if 'view' in req['params']:
                view_name = Cached.get_view_name(req['params']['view'])
            else:
                view_name = Cached.VIEW_DEFAULT
            await Cached.add_client(websocket, req['id'], view_name)
            data = await Cached.get_global()
            torrents = await Cached.get_torrents()
            result = {
                'version': RTR_VERSION,
                'global': data.__dict__,
                'torrents': [t.__dict__ for t in torrents]
            }
            plugins_data = {}
            for plugin in Cached.plugins:
                plugin_output = await plugin.get(False)
                if plugin_output is not None:
                    plugins_data[plugin.name()] = plugin_output
            if plugins_data:
                result['plugins'] = plugins_data
            result = Cached.filter_by_view(result, view_name)
            response_json = get_json_response(req['id'], result)
            return response_json
Пример #2
0
  def get_objects_on_disk(self):
    """
    get_objects_on_disk(self)

    Walk though local storage and build one giant dictionary of objects on disk
    """

    objects_on_disk = {}
    download_path = self.options['dest_sync']
    if 'shelf' in self.options:
      download_path = os.path.join(download_path, self.options['shelf'])

    for (root, dirs, files) in os.walk(download_path):
      for f in files:
        obj = os.path.join(root, f)
        object_name = utils.unix_path(
          os.path.relpath(obj, self.options['dest_sync'])
        )
        # Return sha1 hash if checksum is enabled
        if self.options['checksum']:
          objects_on_disk.update({object_name: {
            'sha1_hash': utils.get_sha1(obj),
          }})
        else:
          objects_on_disk.update({object_name: {
            'modified_time': utils.get_modified_time(obj),
            'file_size': utils.get_file_size(obj)
          }})

    return objects_on_disk
Пример #3
0
    def get_objects_on_disk(self):
        """
    get_objects_on_disk(self)

    Walk though local storage and build one giant dictionary of objects on disk
    """

        objects_on_disk = {}
        download_path = self.options['dest_sync']
        if 'shelf' in self.options:
            download_path = os.path.join(download_path, self.options['shelf'])

        for (root, dirs, files) in os.walk(download_path):
            for f in files:
                obj = os.path.join(root, f)
                object_name = utils.unix_path(
                    os.path.relpath(obj, self.options['dest_sync']))
                # Return sha1 hash if checksum is enabled
                if self.options['checksum']:
                    objects_on_disk.update(
                        {object_name: {
                            'sha1_hash': utils.get_sha1(obj),
                        }})
                else:
                    objects_on_disk.update({
                        object_name: {
                            'modified_time': utils.get_modified_time(obj),
                            'file_size': utils.get_file_size(obj)
                        }
                    })

        return objects_on_disk
 def checking_inaccessibility(self, key, data_len=None):
     """ Checking that data is inaccessible
     """
     try:
         result_data = self.read_data_sync(key).pop().data
     except Exception as e:
         print e.message
     else:
         print len(result_data), '/', data_len, 'bytes already accessible'
         assert utils.get_sha1(result_data) != key
Пример #5
0
def Main():
    (options, args) = parse_options()
    assert len(args) == 1
    name = args[0]
    print 'Creating archive for %s' % name
    if not name in os.listdir('.'):
        print 'You must be standing directly below the directory you are uploading'
        return 1
    filename = utils.create_archive(name)
    sha1 = utils.get_sha1(filename)
    dest = os.path.join(GMSCORE_DEPS, sha1)
    uploadFile(filename, dest)
    sha1_file = '%s.sha1' % filename
    with open(sha1_file, 'w') as output:
        output.write(sha1)
    print 'Sha (%s) written to: %s' % (sha1, sha1_file)
Пример #6
0
def Main():
    (options, args) = parse_options()
    assert len(args) == 1
    sha1_file = args[0]
    dest = sha1_file[:-5]
    print 'Ensuring %s' % dest
    with open(sha1_file, 'r') as input_sha:
        sha1 = input_sha.readline()
    if os.path.exists(dest) and utils.get_sha1(dest) == sha1:
        print 'sha1 matches, not downloading'
        dest_dir = extract_dir(dest)
        if os.path.exists(dest_dir):
            print 'destination directory exists, no extraction'
        else:
            unpack_archive(dest)
        return
    src = os.path.join(GMSCORE_DEPS, sha1)
    if not os.path.exists(src):
        print 'File (%s) does not exist on x20' % src
    print 'Downloading %s to %s' % (src, dest)
    shutil.copyfile(src, dest)
    unpack_archive(dest)
Пример #7
0
def main():
  # We need prodaccess to upload to x20
  utils.check_prodacces()

  working_dir = run_on_as_app.WORKING_DIR

  print 'Removing directories that do not match checked out revision'
  with utils.ChangedWorkingDirectory(working_dir):
    for repo in run_on_as_app.APP_REPOSITORIES:
      repo_dir = os.path.join(working_dir, repo.name)
      if os.path.exists(repo_dir) \
          and utils.get_HEAD_sha1_for_checkout(repo_dir) != repo.revision:
        print 'Removing %s' % repo_dir
        shutil.rmtree(repo_dir)

  print 'Downloading all missing apps'
  run_on_as_app.clone_repositories(quiet=False)

  # Package all files as x20 dependency
  parent_dir = os.path.dirname(working_dir)
  with utils.ChangedWorkingDirectory(parent_dir):
    print 'Creating archive for opensource_apps (this may take some time)'
    working_dir_name = os.path.basename(working_dir)
    repo_dirs = [working_dir_name + '/' + repo.name
                 for repo in run_on_as_app.APP_REPOSITORIES]
    filename = utils.create_archive("opensource_apps", repo_dirs)
    sha1 = utils.get_sha1(filename)
    dest = os.path.join(upload_to_x20.GMSCORE_DEPS, sha1)
    upload_to_x20.uploadFile(filename, dest)
    sha1_file = '%s.sha1' % filename
    with open(sha1_file, 'w') as output:
      output.write(sha1)
    shutil.move(sha1_file,
                os.path.join(utils.THIRD_PARTY, 'opensource_apps.tar.gz.sha1'))

  print 'To have apps benchmarked on Golem, the updated apps have to be ' \
        'downloaded to the runners by ssh\'ing into each runner and do:\n' \
        'cd ../golem\n' \
        'update_dependencies.sh\n'
Пример #8
0
from cachetools import TTLCache, cached
from websockets import WebSocketException

from diffs import map_diff, map_get_multi_diff
from model import Client
from plugins import DiskUsage
from remote import Remote
from utils import Logger, get_sha1, getenv_path

# logging.getLogger("asyncio").setLevel(logging.INFO)
RTR_CERT_PATH = getenv_path('RTR_CERT_PATH', './cert/cert.pem')
RTR_RETR_INTERVAL = int(os.getenv('RTR_RETR_INTERVAL', 5))
RTR_SHORT_CACHE_TTL = int(os.getenv('RTR_SHORT_CACHE_TTL', 5))
RTR_LISTEN_HOST = os.getenv('RTR_LISTEN_HOST', '127.0.0.1')
RTR_LISTEN_PORT = int(os.getenv('RTR_LISTEN_PORT', 8765))
RTR_SECRET_KEY_SHA1 = getenv_path('RTR_SECRET_KEY_SHA1', get_sha1('abc123'))
SOCK_PATH = getenv_path('RTR_SCGI_SOCKET_PATH', './.rtorrent.sock')
RTR_PID_PATH = getenv_path('RTR_PID_PATH', './wss_server.pid')
RTR_PLUGINS_DISK_USAGE_PATHS = os.getenv('RTR_PLUGINS_DISK_USAGE_PATHS', '/')
logger = Logger.get_logger()
RTR_VERSION = '__RTR_VERSION_PLACEHOLDER__'


class Cached:
    VIEW_DEFAULT = 'main'
    VIEW_NAME = 'name'
    VIEWS = {
        VIEW_DEFAULT, VIEW_NAME, 'started', 'stopped', 'complete',
        'incomplete', 'hashing', 'seeding', 'leeching', 'active'
    }
    global_data = None
Пример #9
0
        # Skip weekends
        dt = datetime.utcnow()
        isWeekend = (datetime.now(timezone('US/Eastern')).isoweekday() >= 6)
        if not isWeekend:
            # DOWNLOAD THE FILE
            try:
                r = requests.get(url)
                if r.ok:
                    with open(file_name, 'wb') as outfile:
                        outfile.write(r.content)
                    date_time_path = dt.strftime(
                        '%Y/%m/%d/%H/%Y-%m-%dT%H-%M-%S')
                    # 2021/02/17/02/2021-02-17T02-01-09

                    # SAVE NEW VERSIONS TO B2 STORAGE
                    sha1str = get_sha1(file_name)
                    if sha1str != db[
                            KEY.grab_file_hash]:  # It's new! Save it to B2!
                        print("[{0}] SHA1: {1}".format(run_count, sha1str))
                        db[KEY.grab_file_hash] = sha1str
                        # Upload new version to b2
                        destination_path = 'TDTrackerMattZ/{0}_{1}'.format(
                            date_time_path, file_name)
                        try:
                            bucket.upload_file(Filename=file_name,
                                               Key=destination_path)
                        except:
                            print(
                                'FAILED TO UPLOAD, OH WELL BETTER LUCK NEXT TIME!'
                            )
            except:
Пример #10
0
    def process_objects(self, expanded_objects=[]):
        """
    process_objects(expanded_objects)

    Given a list of objects, determines if uploadable (binary), and
    then create a dictionary of:
      sha1_hash
      sha256_hash
      modified_time
      filesize

    Sha1_hash is only determined on first upload or if modified time and
    file size changed.
    """

        objects_metadata = {}
        for obj in expanded_objects:
            # Process if object is uploadable
            if self.uploadable_object(obj):

                # Object name in metadata file. Replace \\ with / to remain consistent
                # accoss platforms
                object_name = utils.unix_path(
                    os.path.relpath(obj, self.paths['shelves']))

                # Determine paths
                object_path = os.path.abspath(obj)
                object_metadata_file = '%s.pitem' % object_path

                # Add object to gitignore
                self.add_object_to_gitignore(obj)

                object_mtime = utils.get_modified_time(obj)
                object_file_size = utils.get_file_size(obj)
                # Use cached checksum since checksum hashing is cpu intensive and
                # file size and modified times are quicker. Checksums are force using
                # cli flag --checksum.
                if (not self.options['checksum']
                        and os.path.exists(object_metadata_file)):
                    with open(object_metadata_file) as json_file:
                        cached_metadata = json.load(json_file)

                    # Use cached hash if filesize and mtime are the same
                    if (object_file_size
                            == cached_metadata[object_name]['file_size']
                            and object_mtime
                            == cached_metadata[object_name]['modified_time']):
                        object_sha1_hash = cached_metadata[object_name][
                            'sha1_hash']
                        if 'sha26_hash' in cached_metadata[object_name]:
                            object_sha256_hash = cached_metadata[object_name][
                                'sha256_hash']
                        else:
                            object_sha256_hash = utils.get_sha256(obj)
                    else:
                        object_sha1_hash = utils.get_sha1(obj)
                        object_sha256_hash = utils.get_sha256(obj)
                else:
                    # Genertate hash if cached_metadat is not present
                    object_sha1_hash = utils.get_sha1(obj)
                    object_sha256_hash = utils.get_sha256(obj)

                # TODO remove sha1 check as its not needed.
                # Add object to metadata dictionary
                objects_metadata[object_name] = {
                    'sha1_hash': object_sha1_hash,
                    'sha256_hash': object_sha256_hash,
                    'modified_time': object_mtime,
                    'file_size': object_file_size,
                }

        return objects_metadata
Пример #11
0
  def process_objects(self, expanded_objects=[]):
    """
    process_objects(expanded_objects)

    Given a list of objects, determines if uploadable (binary), and
    then create a dictionary of:
      sha1_hash
      sha256_hash
      modified_time
      filesize

    Sha1_hash is only determined on first upload or if modified time and
    file size changed.
    """

    objects_metadata = {}
    for obj in expanded_objects:
      # Process if object is uploadable
      if self.uploadable_object(obj):

        # Object name in metadata file. Replace \\ with / to remain consistent
        # accoss platforms
        object_name = utils.unix_path(
          os.path.relpath(obj, self.paths['shelves'])
        )

        # Determine paths
        object_path = os.path.abspath(obj)
        object_metadata_file = '%s.pitem' % object_path

        # Add object to gitignore
        self.add_object_to_gitignore(obj)

        object_mtime = utils.get_modified_time(obj)
        object_file_size = utils.get_file_size(obj)
        # Use cached checksum since checksum hashing is cpu intensive and
        # file size and modified times are quicker. Checksums are force using
        # cli flag --checksum.
        if (
          not self.options['checksum'] and
          os.path.exists(object_metadata_file)
        ):
          with open(object_metadata_file) as json_file:
            cached_metadata = json.load(json_file)

          # Use cached hash if filesize and mtime are the same
          if (
            object_file_size == cached_metadata[object_name]['file_size'] and
            object_mtime == cached_metadata[object_name]['modified_time']
          ):
            object_sha1_hash = cached_metadata[object_name]['sha1_hash']
            if 'sha26_hash' in cached_metadata[object_name]:
              object_sha256_hash = cached_metadata[object_name]['sha256_hash']
            else:
              object_sha256_hash = utils.get_sha256(obj)
          else:
            object_sha1_hash = utils.get_sha1(obj)
            object_sha256_hash = utils.get_sha256(obj)
        else:
          # Genertate hash if cached_metadat is not present
          object_sha1_hash = utils.get_sha1(obj)
          object_sha256_hash = utils.get_sha256(obj)

        # TODO remove sha1 check as its not needed.
        # Add object to metadata dictionary
        objects_metadata[object_name] = {
          'sha1_hash': object_sha1_hash,
          'sha256_hash': object_sha256_hash,
          'modified_time': object_mtime,
          'file_size': object_file_size,
        }

    return objects_metadata
Пример #12
0
    f'https://api.codenation.dev/v1/challenge/dev-ps/submit-solution?token={key}'
}

r = requests.get(urls['generate_file'])

utils.write_json_content('answer.json', r.json())

# Second part

json_content = utils.read_json_content('answer.json')

json_dict = utils.to_dict(json_content)

encrypted_text = json_dict['cifrado']
key = json_dict['numero_casas']

decrypted_text = utils.decrypt(encrypted_text, key)
sha1_text = utils.get_sha1(decrypted_text)

json_dict['decifrado'] = decrypted_text
json_dict['resumo_criptografico'] = sha1_text

utils.write_json_content('answer.json', json_dict)

# Third Part

files = {'answer': open('answer.json', 'rb')}

r = requests.post(urls['submit_file'], files=files)

print(r.text)