Exemple #1
0
    def get_objects_on_disk(self):
        """
    get_objects_on_disk(self)

    Walk though local storage and build one giant dictionary of objects on disk
    """

        objects_on_disk = {}
        download_path = self.options['dest_sync']
        if 'shelf' in self.options:
            download_path = os.path.join(download_path, self.options['shelf'])

        for (root, dirs, files) in os.walk(download_path):
            for f in files:
                obj = os.path.join(root, f)
                object_name = utils.unix_path(
                    os.path.relpath(obj, self.options['dest_sync']))
                # Return sha1 hash if checksum is enabled
                if self.options['checksum']:
                    objects_on_disk.update(
                        {object_name: {
                            'sha1_hash': utils.get_sha1(obj),
                        }})
                else:
                    objects_on_disk.update({
                        object_name: {
                            'modified_time': utils.get_modified_time(obj),
                            'file_size': utils.get_file_size(obj)
                        }
                    })

        return objects_on_disk
Exemple #2
0
  def get_objects_on_disk(self):
    """
    get_objects_on_disk(self)

    Walk though local storage and build one giant dictionary of objects on disk
    """

    objects_on_disk = {}
    download_path = self.options['dest_sync']
    if 'shelf' in self.options:
      download_path = os.path.join(download_path, self.options['shelf'])

    for (root, dirs, files) in os.walk(download_path):
      for f in files:
        obj = os.path.join(root, f)
        object_name = utils.unix_path(
          os.path.relpath(obj, self.options['dest_sync'])
        )
        # Return sha1 hash if checksum is enabled
        if self.options['checksum']:
          objects_on_disk.update({object_name: {
            'sha1_hash': utils.get_sha1(obj),
          }})
        else:
          objects_on_disk.update({object_name: {
            'modified_time': utils.get_modified_time(obj),
            'file_size': utils.get_file_size(obj)
          }})

    return objects_on_disk
    def prod_stack(self):
        path = os.path.join(os.path.dirname(os.path.abspath(__file__)), ".prod_stack.txt")

        if os.path.exists(path):
            modified = utils.get_modified_time(path)
            current = datetime.utcnow() - timedelta(hours=24)

            # only try to use the cached config if it's recent enough
            if modified >= current:
                f = open(path, "r")
                name = f.read().strip()
                f.close()

                if len(name) >= 1:
                    return name

        conn = Route53Connection(AWS_ACCESS_KEY_ID, AWS_SECRET_KEY)
        zones = conn.get_all_hosted_zones()
        name = None
        host = None

        for zone in zones["ListHostedZonesResponse"]["HostedZones"]:
            if zone["Name"] == u"stamped.com.":
                host = zone
                break

        if host is not None:
            records = conn.get_all_rrsets(host["Id"][12:])

            for record in records:
                if record.name == "api.stamped.com.":
                    name = record.alias_dns_name.split("-")[0].strip()
                    break

        if name is not None:
            f = open(path, "w")
            f.write(name)
            f.close()

        return name
Exemple #4
0
def get_stack(stack=None):
    if stack is None and not is_ec2():
        return None

    if stack is not None:
        stack = stack.lower()

    name = ".%s.stack.txt" % ("__local__" if stack is None else stack)
    path = os.path.join(os.path.dirname(os.path.abspath(__file__)), name)

    if os.path.exists(path):
        modified = utils.get_modified_time(path)
        current = datetime.datetime.utcnow() - datetime.timedelta(minutes=15)

        # only try to use the cached config if it's recent enough
        if modified >= current:
            try:
                f = open(path, "r")
                info = json.loads(f.read())
                f.close()
                info = utils.AttributeDict(info)
                if info.instance is not None and len(info.nodes) > 0:
                    info.nodes = map(utils.AttributeDict, info.nodes)
                    return info
            except:
                utils.log("error getting cached stack info; recomputing")
                utils.printException()

    conn = EC2Connection(keys.aws.AWS_ACCESS_KEY_ID, keys.aws.AWS_SECRET_KEY)

    reservations = conn.get_all_instances()
    instance_id = get_local_instance_id()
    stacks = defaultdict(list)
    cur_instance = None

    for reservation in reservations:
        for instance in reservation.instances:
            try:
                if instance.state == "running":
                    stack_name = instance.tags["stack"]

                    node = dict(
                        name=instance.tags["name"],
                        stack=stack_name,
                        roles=eval(instance.tags["roles"]),
                        instance_id=instance.id,
                        public_dns_name=instance.public_dns_name,
                        private_dns_name=instance.private_dns_name,
                        private_ip_address=instance.private_ip_address,
                    )

                    stacks[stack_name].append(node)

                    if stack is None and instance.id == instance_id:
                        stack = stack_name
                        cur_instance = node
            except:
                pass

    info = {"instance": cur_instance, "nodes": stacks[stack]}

    f = open(path, "w")
    f.write(json.dumps(info, indent=2))
    f.close()

    info = utils.AttributeDict(info)
    info.nodes = map(utils.AttributeDict, info.nodes)

    return info
Exemple #5
0
def get_nodes(tag):
    if not is_ec2():
        return None

        # Check for local cache of ratelimiter nodes
    name = ".__local__.%s.txt" % tag
    path = os.path.join(os.path.dirname(os.path.abspath(__file__)), name)

    if os.path.exists(path):
        modified = utils.get_modified_time(path)
        current = datetime.datetime.utcnow() - datetime.timedelta(minutes=15)

        # only try to use the cached config if it's recent enough
        if modified >= current:
            try:
                f = open(path, "r")
                nodes = json.loads(f.read())
                f.close()
                if len(nodes) > 0:
                    return map(utils.AttributeDict, nodes)
            except:
                utils.log("error getting cached stack info; recomputing")
                utils.printException()

    # Check current instance tags for specified ratelimiter stack
    conn = EC2Connection(keys.aws.AWS_ACCESS_KEY_ID, keys.aws.AWS_SECRET_KEY)

    reservations = conn.get_all_instances()
    instance_id = get_local_instance_id()
    cur_instance = None

    for reservation in reservations:
        for instance in reservation.instances:
            try:
                if instance.id == instance_id:
                    cur_instance = instance
                    break
            except Exception:
                pass

    if cur_instance is None:
        raise Exception("'%s' nodes not found: %s" % (tag, instance_id))

    dbStackName = cur_instance.tags["stack"]

    # Generate db nodes based on specified db stack
    dbStack = get_stack(dbStackName)

    tagNodes = set()
    for node in dbStack["nodes"]:
        if tag in node.roles:
            tagNodes.add(node)

    if len(tagNodes) == 0:
        raise Exception("nodes not found for stack '%s' and tag '%s'" % (dbStackName, tag))

    f = open(path, "w")
    f.write(json.dumps(map(lambda x: dict(x), tagNodes), indent=2))
    f.close()

    return list(tagNodes)
Exemple #6
0
    def process_objects(self, expanded_objects=[]):
        """
    process_objects(expanded_objects)

    Given a list of objects, determines if uploadable (binary), and
    then create a dictionary of:
      sha1_hash
      sha256_hash
      modified_time
      filesize

    Sha1_hash is only determined on first upload or if modified time and
    file size changed.
    """

        objects_metadata = {}
        for obj in expanded_objects:
            # Process if object is uploadable
            if self.uploadable_object(obj):

                # Object name in metadata file. Replace \\ with / to remain consistent
                # accoss platforms
                object_name = utils.unix_path(
                    os.path.relpath(obj, self.paths['shelves']))

                # Determine paths
                object_path = os.path.abspath(obj)
                object_metadata_file = '%s.pitem' % object_path

                # Add object to gitignore
                self.add_object_to_gitignore(obj)

                object_mtime = utils.get_modified_time(obj)
                object_file_size = utils.get_file_size(obj)
                # Use cached checksum since checksum hashing is cpu intensive and
                # file size and modified times are quicker. Checksums are force using
                # cli flag --checksum.
                if (not self.options['checksum']
                        and os.path.exists(object_metadata_file)):
                    with open(object_metadata_file) as json_file:
                        cached_metadata = json.load(json_file)

                    # Use cached hash if filesize and mtime are the same
                    if (object_file_size
                            == cached_metadata[object_name]['file_size']
                            and object_mtime
                            == cached_metadata[object_name]['modified_time']):
                        object_sha1_hash = cached_metadata[object_name][
                            'sha1_hash']
                        if 'sha26_hash' in cached_metadata[object_name]:
                            object_sha256_hash = cached_metadata[object_name][
                                'sha256_hash']
                        else:
                            object_sha256_hash = utils.get_sha256(obj)
                    else:
                        object_sha1_hash = utils.get_sha1(obj)
                        object_sha256_hash = utils.get_sha256(obj)
                else:
                    # Genertate hash if cached_metadat is not present
                    object_sha1_hash = utils.get_sha1(obj)
                    object_sha256_hash = utils.get_sha256(obj)

                # TODO remove sha1 check as its not needed.
                # Add object to metadata dictionary
                objects_metadata[object_name] = {
                    'sha1_hash': object_sha1_hash,
                    'sha256_hash': object_sha256_hash,
                    'modified_time': object_mtime,
                    'file_size': object_file_size,
                }

        return objects_metadata
Exemple #7
0
  def process_objects(self, expanded_objects=[]):
    """
    process_objects(expanded_objects)

    Given a list of objects, determines if uploadable (binary), and
    then create a dictionary of:
      sha1_hash
      sha256_hash
      modified_time
      filesize

    Sha1_hash is only determined on first upload or if modified time and
    file size changed.
    """

    objects_metadata = {}
    for obj in expanded_objects:
      # Process if object is uploadable
      if self.uploadable_object(obj):

        # Object name in metadata file. Replace \\ with / to remain consistent
        # accoss platforms
        object_name = utils.unix_path(
          os.path.relpath(obj, self.paths['shelves'])
        )

        # Determine paths
        object_path = os.path.abspath(obj)
        object_metadata_file = '%s.pitem' % object_path

        # Add object to gitignore
        self.add_object_to_gitignore(obj)

        object_mtime = utils.get_modified_time(obj)
        object_file_size = utils.get_file_size(obj)
        # Use cached checksum since checksum hashing is cpu intensive and
        # file size and modified times are quicker. Checksums are force using
        # cli flag --checksum.
        if (
          not self.options['checksum'] and
          os.path.exists(object_metadata_file)
        ):
          with open(object_metadata_file) as json_file:
            cached_metadata = json.load(json_file)

          # Use cached hash if filesize and mtime are the same
          if (
            object_file_size == cached_metadata[object_name]['file_size'] and
            object_mtime == cached_metadata[object_name]['modified_time']
          ):
            object_sha1_hash = cached_metadata[object_name]['sha1_hash']
            if 'sha26_hash' in cached_metadata[object_name]:
              object_sha256_hash = cached_metadata[object_name]['sha256_hash']
            else:
              object_sha256_hash = utils.get_sha256(obj)
          else:
            object_sha1_hash = utils.get_sha1(obj)
            object_sha256_hash = utils.get_sha256(obj)
        else:
          # Genertate hash if cached_metadat is not present
          object_sha1_hash = utils.get_sha1(obj)
          object_sha256_hash = utils.get_sha256(obj)

        # TODO remove sha1 check as its not needed.
        # Add object to metadata dictionary
        objects_metadata[object_name] = {
          'sha1_hash': object_sha1_hash,
          'sha256_hash': object_sha256_hash,
          'modified_time': object_mtime,
          'file_size': object_file_size,
        }

    return objects_metadata