Example #1
0
def registerToken(requester, service, data, androidID = None):
    uuid = shortuuid.uuid()
    
    database = connection.safeguard
    userData = database.userData
    
    #delete old token
    #print data
    
    #print 'service', service
    #print 'requester', requester
    #print "data", "{'id':" ,data["id"] ,"}}"
    
    query = {'service': service, 'requester': requester,"data.id": data["id"], "data.androidid" : androidID}
    userData.remove(query)
    
    while list(userData.find({'token': uuid}).limit(1)):
        uuid = shortuuid.uuid()
    
    if androidID:
        data["androidid"] = androidID
    
    userData.insert({"token":uuid, "service" : service, 'requester': requester,"data" : data})
    
    return uuid
Example #2
0
def run(**kwargs):
    parser = OptionParser(usage="Usage: %prog [options]")
    parser.add_option("-v", "--verbose", action="store_true", dest="verbose", default=False, help="make lots of noise")
    parser.add_option("--logging", dest="logging", default='info', help="Logging level")
    parser.add_option("-H", "--host", dest="host", default='localhost', help="RabbitMQ host")
    parser.add_option("-P", "--port", dest="port", default=5672, type=int, help="RabbitMQ port")

    (options, args) = parser.parse_args()

    logging.basicConfig(
        format=u'[%(asctime)s] %(filename)s:%(lineno)d %(levelname)-6s %(message)s',
        level=getattr(logging, options.logging.upper(), logging.INFO)
    )

    Listener(
        port=options.port,
        host=options.host,
        handlers=context.handlers,
        context=Context(
            options=options,
            node_uuid=uuid(getfqdn()),
            uuid=uuid(),
            **kwargs
        )
    ).loop()
Example #3
0
    def create_slug(self, model_instance, add):
        # get fields to populate from and slug field to set
        if not isinstance(self._populate_from, (list, tuple)):
            self._populate_from = (self._populate_from, )
        slug_field = model_instance._meta.get_field(self.attname)

        if add or self.overwrite:
            # slugify the original field content and set next step to 2
            slug_for_field = lambda field: self.slugify_func(getattr(model_instance, field))
            slug = self.separator.join(map(slug_for_field, self._populate_from))
            #next = 2
            next = shortuuid.uuid() # universal unique
            next = next[:7] # not universal, but probability of collision is still very low
        else:
            # get slug from the current model instance
            slug = getattr(model_instance, self.attname)
            # model_instance is being modified, and overwrite is False,
            # so instead of doing anything, just return the current slug
            return slug

        # strip slug depending on max_length attribute of the slug field
        # and clean-up
        slug_len = slug_field.max_length
        if slug_len:
            slug = slug[:slug_len]
        slug = self._slug_strip(slug)
        original_slug = slug

        if self.allow_duplicates:
            return slug

        # exclude the current model instance from the queryset used in finding
        # the next valid slug
        queryset = self.get_queryset(model_instance.__class__, slug_field)
        if model_instance.pk:
            queryset = queryset.exclude(pk=model_instance.pk)

        # form a kwarg dict used to impliment any unique_together contraints
        kwargs = {}
        for params in model_instance._meta.unique_together:
            if self.attname in params:
                for param in params:
                    kwargs[param] = getattr(model_instance, param, None)
        kwargs[self.attname] = slug

        # increases the number while searching for the next valid slug
        # depending on the given slug, clean-up
        while not slug or queryset.filter(**kwargs):
            slug = original_slug
            end = '%s%s' % (self.separator, next)
            end_len = len(end)
            if slug_len and len(slug) + end_len > slug_len:
                slug = slug[:slug_len - end_len]
                slug = self._slug_strip(slug)
            slug = '%s%s' % (slug, end)
            kwargs[self.attname] = slug
            next = shortuuid.uuid() # universal unique
            next = next[:7] # not universal, but probability of collision is still very low
        return slug
Example #4
0
def generate_unique_slug(instance=None):
    """ Generate the unique slug for a model instance """
    if instance is not None:
        pk = instance.pk if hasattr(instance, 'pk') and type(instance.pk) is not None else random.random()
        hash_val = '%s-%s-%s' % (instance.__class__.__name__, pk, datetime.datetime.utcnow())
        return shortuuid.uuid(name=hash_val)
    else:
        return shortuuid.uuid()
def forwards_func(apps, schema_editor):
    ClowderUser = apps.get_model("clowder_account", "ClowderUser")
    db_alias = schema_editor.connection.alias
    users = ClowderUser.objects.all()
    for user in users:
        user.public_key = shortuuid.uuid()
        user.secret_key = shortuuid.uuid()
        user.save()
Example #6
0
def generate_data(key):
    return {
        "model": "bookmark.%s" % key,
        "pk": shortuuid.uuid(),
        "fields": {
            "%s" % key: shortuuid.uuid()
        }
    }
Example #7
0
def generate_data(key):
    """generate data."""
    return {
        "model": "nenga.%s" % key,
        "pk": shortuuid.uuid(),
        "fields": {
            "%s" % key: shortuuid.uuid()
        }
    }
Example #8
0
 def setUp(self):
     self.required_fields = getattr(User, 'REQUIRED_FIELDS', [])
     self.required_fields.append(
         getattr(User, 'USERNAME_FIELD', 'username'))
     if 'username' in self.required_fields:
         self.inviter = User.objects.create(username=uuid())
         self.existing = User.objects.create(
             email='*****@*****.**', username=uuid())
     else:
         self.inviter = User.objects.create()
         self.existing = User.objects.create(email='*****@*****.**')
Example #9
0
def crawling_history(feed_id):
    return {
        "model": "bookmark.crawling_history",
        "pk": shortuuid.uuid(),
        "fields": {
            "id": shortuuid.uuid(),
            "feed": feed_id,
            "update_datetime": datetime.now().strftime(
                "%Y-%m-%dT%H:%M:%S+00:00")
        }
    }
Example #10
0
def feed_subscription(owner_id, category_id):
    return {
        "model": "bookmark.feed_subscription",
        "pk": shortuuid.uuid(),
        "fields": {
            "url": "http://%s.example.org/%s" % (shortuuid.uuid()[:4],
                                                 shortuuid.uuid()),
            "name": shortuuid.uuid(),
            "owner": owner_id,
            "default_category": category_id
        }
    }
Example #11
0
def main():
    parser = OptionParser()
    parser.add_option("-s", "--service-registry",
                      dest="registry_nodes",
                      default=os.getenv('GILLIAM_SERVICE_REGISTRY', ''),
                      help="service registry nodes", metavar="HOSTS")
    parser.add_option('--name', dest="name")
    parser.add_option("-p", "--port", dest="port", type=int,
                      help="listen port", metavar="PORT", default=9000)
    parser.add_option('--host', dest="host", default=None,
                      help="public hostname", metavar="HOST")
    (options, args) = parser.parse_args()
    assert options.host, "must specify host with --host"

    # logging
    format = '%(levelname)-8s %(name)s: %(message)s'
    logging.basicConfig(level=logging.INFO, format=format)

    formation = os.getenv('GILLIAM_FORMATION', 'executor')
    service = os.getenv('GILLIAM_SERVICE', 'api')
    instance = options.name or shortuuid.uuid()
    clock = Clock()

    base_url = os.getenv('DOCKER')
    docker = DockerClient(base_url) if base_url else DockerClient()

    service_registry_cluster_nodes = options.registry_nodes.split(',')
    service_registry = ServiceRegistryClient(
        clock, service_registry_cluster_nodes)

    cont_runtime = partial(PlatformRuntime, service_registry, options.registry_nodes)
    cont_store = ContainerStore(partial(Container, docker, cont_runtime,
                                        service_registry, options.host))

    # set-up runtime and store for the one-off containers:
    proc_runtime = partial(PlatformRuntime, service_registry, options.registry_nodes,
                           attach=True)
    proc_factory = lambda image, command, env, ports, opts, formation, **kw: Container(
        docker, proc_runtime, None, None, image, command, env, ports, opts,
        formation, None, shortuuid.uuid(), restart=False, **kw)
    proc_store = ContainerStore(proc_factory)

    register = partial(service_registry.register, formation, service, instance)
    announcement = service_registry.build_announcement(
        formation, service, instance, ports={options.port: str(options.port)},
        host=options.host)

    app = App(clock, cont_store, proc_store, docker, register, announcement)
    app.start()

    pywsgi.WSGIServer(('', options.port), app.create_api(),
                      handler_class=WebSocketHandler).serve_forever()
Example #12
0
 def create_uuid(self):
     if not self.version or self.version == 4:
         return shortuuid.uuid()
     elif self.version == 1:
         return shortuuid.uuid()
     elif self.version == 2:
         raise UUIDVersionError("UUID version 2 is not supported.")
     elif self.version == 3:
         raise UUIDVersionError("UUID version 3 is not supported.")
     elif self.version == 5:
         return shortuuid.uuid(name=self.namespace)
     else:
         raise UUIDVersionError("UUID version %s is not valid." % self.version)
Example #13
0
def generate_bookmark(owner_id, category_id, is_hide):
    return {
        "model": "bookmark.bookmark",
        "pk": shortuuid.uuid(),
        "fields": {
            "url": "http://%s.example.org/%s" % (shortuuid.uuid()[:4],
                                                 shortuuid.uuid()),
            "title": shortuuid.uuid(),
            "category": category_id,
            "description": shortuuid.uuid(),
            "owner": owner_id,
            "is_hide": is_hide
        }
    }
 def pre_save(self, model_instance, add):
     """
     This is used to ensure that we auto-set values if required.
     See CharField.pre_save
     """
     value = super(ShortUUIDField, self).pre_save(model_instance, add)
     if self.auto and not value:
         # Assign a new value for this attribute if required.
         if sys.version_info < (3, 0):
             value = unicode(shortuuid.uuid())
         else:
             value = str(shortuuid.uuid())
         setattr(model_instance, self.attname, value)
     return value
Example #15
0
def ics(request, team_id=None, team_name=None):

    if team_id:
        this_team = Team.objects.get(id=team_id)
    elif team_name:
        this_team = Team.objects.get(name=team_name)

    home_games = Game.objects.filter(home_team=this_team)
    away_games = Game.objects.filter(away_team=this_team)

    games = home_games | away_games
    games = games.order_by("time", "field")

    cal = Calendar()
    cal.add('prodid', '-//Breakway Schedules//Soccer Calendars//EN')
    cal.add('version', '2.0')
    cal.add('X-WR-CALNAME', this_team.name)
    cal.add('X-WR-TIMEZONE', 'CST6CDT')
    cal.add('X-WR-CALDESC', 'Breakaway Team Schedule')

    now_dt = datetime.now()
    now_string = "%04d%02d%02dT%02d%02d%02d" % (
        now_dt.year,
        now_dt.month,
        now_dt.day,
        now_dt.hour,
        now_dt.minute,
        now_dt.second
    )

    for game in games:
        event = Event()
        try:
            summary = '%s vs. %s' % (game.home_team, game.away_team)
        except Exception:
            summary = 'Breakaway game'

        if game.color_conflict:
            desc = 'Color conflict! (%s vs. %s)' % (game.away_team.color, game.home_team.color)
            summary += ' (color conflict)'
            event.add('description', desc)

        event.add('summary', summary)

        event.add('dtstart', game.time)
        event.add('dtend', game.time + timedelta(hours=1))
        event.add('dtstamp', datetime.now())
        event.add('location', "BreakAway Field %s" % game.field)
        event['uid'] = '%s/%[email protected]' % (now_string, shortuuid.uuid())
        event.add('priority', 5)

        alarm = Alarm()
        alarm.add("TRIGGER;RELATED=START", "-PT{0}M".format('45'))
        alarm.add('action', 'display')
        alarm.add('description', 'Breakaway game')

        event.add_component(alarm)
        cal.add_component(event)

    return HttpResponse(cal.to_ical(), content_type='text/calendar')
Example #16
0
def deserialize_person(person, data):
    person.uuid = data['uuid']
    person.first_name = data['first_name']
    person.last_name = data['last_name']
    setattr_from(person, 'pref_email', data, parse=get_contact)

    person.username = shortuuid.uuid()
Example #17
0
def downloadAndSaveImages(url_list, socketid):
    try:
        uuid = shortuuid.uuid()
        directory = os.path.join(conf.PIC_DIR, str(uuid))
        if not os.path.exists(directory):
            os.mkdir(directory)

        for url in url_list[""]:
            try:
                log_to_terminal(str(url), socketid)

                file = requests.get(url)
                file_full_name_raw = basename(urlparse(url).path)
                file_name_raw, file_extension = os.path.splitext(file_full_name_raw)
                # First parameter is the replacement, second parameter is your input string
                file_name = re.sub('[^a-zA-Z0-9]+', '', file_name_raw)

                f = open(os.path.join(conf.PIC_DIR, str(uuid) + file_name + file_extension), 'wb')
                f.write(file.content)
                f.close()

                imgFile = Image.open(os.path.join(conf.PIC_DIR, str(uuid) + file_name + file_extension))
                size = (500, 500)
                imgFile.thumbnail(size, Image.ANTIALIAS)
                imgFile.save(os.path.join(conf.PIC_DIR, str(uuid), file_name + file_extension))
                log_to_terminal('Saved Image: ' + str(url), socketid)
            except Exception as e:
                print str(e)
        return uuid, directory
    except:
        print 'Exception' + str(traceback.format_exc())
Example #18
0
def short_token():
    """
    Generate a hash that can be used as an application identifier
    """
    hash = hashlib.sha1(shortuuid.uuid())
    hash.update(settings.SECRET_KEY)
    return hash.hexdigest()[::2]
Example #19
0
def wrapExpectationsLocal(cmd):
    script = createScript(cmd)
    remoteScript = '/tmp/fexpect_'+shortuuid.uuid()
    with open(remoteScript, 'w') as filehandle:
        filehandle.write(script)
    wrappedCmd = 'python '+remoteScript
    return wrappedCmd
Example #20
0
def save_object(collection, obj):
    """Save an object ``obj`` to the given ``collection``.

    ``obj.id`` must be unique across all other existing objects in
    the given collection.  If ``id`` is not present in the object, a
    *UUID* is assigned as the object's ``id``.

    Indexes already defined on the ``collection`` are updated after
    the object is saved.

    Returns the object.
    """
    if 'id' not in obj:
        obj.id = uuid()
    id = obj.id
    path = object_path(collection, id)
    temp_path = '%s.temp' % path
    with open(temp_path, 'w') as f:
        data = _serialize(obj)
        f.write(data)
    shutil.move(temp_path, path)
    if id in _db[collection].cache:
        _db[collection].cache[id] = obj
    _update_indexes_for_mutated_object(collection, obj)
    return obj
Example #21
0
    def slug(self, **kwargs):
        name = str(self.project.pk) + '-' + str(self.company.pk)

        if len(kwargs.keys()) > 0:
           name = '{name}{extra}'.format(name=name, extra='-'.join([i for i in kwargs.values()]).encode('utf-8').strip())

        return shortuuid.uuid(name=name)
Example #22
0
    def safe_filename(self, filename):
        """ If the file already exists the file will be renamed to contain a
        short url safe UUID. This will avoid overwtites.

        Arguments
        ---------
        filename : str
            A filename to check if it exists

        Returns
        -------
        str
            A safe filenaem to use when writting the file
        """

        while self.exists(filename):
            dir_name, file_name = os.path.split(filename)
            file_root, file_ext = os.path.splitext(file_name)
            uuid = shortuuid.uuid()
            filename = secure_filename('{0}_{1}{2}'.format(
                file_root,
                uuid,
                file_ext))

        return filename
Example #23
0
 def __init__(self, docker, runtime, registry, host,
              image, command, env, ports, options,
              formation, service, instance,
              restart=True, tty=False):
     self.docker = docker
     self.runtime = runtime
     self.registry = registry
     self.host = host
     self.id = shortuuid.uuid()
     cmd = ' '.join(command) if isinstance(command, list) else command
     self.log = logging.getLogger('container[{0}/{1}.{2} (image={3}, command="{4}")]'.format(
             formation, service, instance, image, cmd))
     self.image = image
     self.command = command
     self.env = env
     self.ports = ports
     self.options = options
     self.formation = formation
     self.service = service
     self.instance = instance
     self.state = 'init'
     self.tty = tty
     self.reason = None
     self.status_code = None
     self._stopped = Event()
     self._cont_id = None
     self._registration = None
     self._runtime = None
     self._restart = restart
     self._reset()
Example #24
0
    def clean(self):
        if self.begins_at > self.ends_at:
            self.raise_invalid_bounds('begins_at')

        if not self.slug and not self.is_private:
            self.slug = slugify(self.title)
        elif not self.slug:
            self.slug = shortuuid.uuid()

        xmin, ymin, xmax, ymax = settings.NYC_BOUNDS
        p = self.location
        if not p:
            raise ValidationError({'location': [
                "Location is required"
            ]})
        elif p.x < xmin or p.x > xmax or p.y < ymin or p.y > ymax:
            raise ValidationError({'location': [
                "Please choose a location in New York City"
            ]})

        rsvp_count = self.eventregistration_set.count()
        if self.max_attendees < rsvp_count:
            raise ValidationError({'max_attendees': [
                "Max attendees cannot be set to a value less than the number "
                "of people currently registered (%d)" % rsvp_count]})
Example #25
0
def getConguration():
    with open(db) as f:
        config["database"] = json.load(f)
        try:
            con = MySQLdb.connect(
                config.get("database").get("host"),
                config.get("database").get("user"),
                config.get("database").get("passwd"),
                config.get("database").get("db"),
                cursorclass=MySQLdb.cursors.DictCursor,
            )
            if db:
                cur = con.cursor()
                cur.execute(
                    "SELECT * FROM `configurations` WHERE id = (SELECT active FROM `states` ORDER BY id DESC limit 1)"
                )
                config["fingerprint"] = cur.fetchone()
                config["session"] = shortuuid.uuid()
                config["remote_ip"] = ipgetter.myip()
                config["vpn_ip"] = ni.ifaddresses("tun0")[2][0]["addr"]
                print "Connected to " + str(config["vpn_ip"])
                config["fingerprint"]["amp_min"] = 10
                config["fingerprint"]["plot"] = 0
                config["verbose"] = False
                config["soundcard"] = {"chunksize": 8096, "channels": 1}
                return config
            else:
                print "Connection unsuccessful"
        except MySQLdb.Error, e:
            print "MySQL Error [%d]: %s" % (e.args[0], e.args[1])
            print "MySQL Error: %s" % str(e)
Example #26
0
def long_token():
    """
    Generate a hash that can be used as an application secret
    """
    hash = hashlib.sha1(shortuuid.uuid())
    hash.update(settings.SECRET_KEY)
    return hash.hexdigest()
Example #27
0
 def __init__(self, docker, runtime, registry, host,
              image, command, env, ports, options,
              formation, service, instance,
              restart=True, tty=False):
     self.docker = docker
     self.runtime = runtime
     self.registry = registry
     self.host = host
     self.id = shortuuid.uuid()
     self.log = logging.getLogger('container:%s' % (self.id,))
     self.image = image
     self.command = command
     self.env = env
     self.ports = ports
     self.options = options
     self.formation = formation
     self.service = service
     self.instance = instance
     self.state = 'init'
     self.tty = tty
     self.reason = None
     self.status_code = None
     self._stopped = Event()
     self._cont_id = None
     self._registration = None
     self._runtime = None
     self._restart = restart
Example #28
0
def generate_app_id(length=5):
    """
    Generate a short ID for this model.
    """
    id = shortuuid.uuid()[:length]
    date = datetime.datetime.now().strftime('%d%I%M%p%y-')
    return date + id
Example #29
0
def add_client():
    default_client_secret = shortuuid.uuid()

    name = click.prompt('name')
    client_id = click.prompt('client_id')
    client_secret = click.prompt('client_secret', default=default_client_secret,
                                 hide_input=True, confirmation_prompt=True)
    description = click.prompt('description', default='sample client')
    is_confidential = click.prompt('is_confidential', default=False, type=bool)
    redirect_uris = click.prompt('redirect_uri', )
    scopes = click.prompt('scopes', )

    client = Client(
        client_id=client_id,
        client_secret=client_secret,
        name=name,
        description=description,
        is_confidential=is_confidential,
        redirect_uris_text=redirect_uris,
        default_scopes_text=scopes,
    )

    db.session.add(client)
    try:
        db.session.commit()
    except IntegrityError as e:
        click.echo('Error occurred while add client: <%s>' % client_id)
        db.session.rollback()
    finally:
        print client
Example #30
0
def parse_plaintext(body):
    """Parse plaintext and return references in a BibJSON-like dict."""
    tmpin = os.path.join("/tmp", shortuuid.uuid())
    #tmpin = shortuuid.uuid()
    body = unidecode(body)
    #print body
    preprocessed_body = preprocess_body(body)
    #preprocessed_body = "References\n\n" + group_citations(preprocessed_body)
    #print preprocessed_body
    file = open(tmpin, "w")
    file.write(codecs.BOM_UTF8)
    file.write(preprocessed_body.encode("utf-8"))
    file.close()
    try:
        parsing = envoy.run("perl ParsCit/bin/citeExtract.pl -m extract_citations %s" % tmpin)
        bibjson_citations = xml_to_bibjson(parsing.std_out)
    except IOError:
        return {"status": "error", "message": "Could not run parser on file"}
    try:
        os.remove(tmpin)
        os.remove(tmpin + ".cite")
        os.remove(tmpin + ".body")
    except OSError:
        pass

    return bibjson_citations
Example #31
0
def uuid_code():
    """
    Returns a short uuid code
    """
    return shortuuid.uuid()
Example #32
0
def get_a_random_api_token(length=8):
    token = shortuuid.uuid()[:length]
    return token
Example #33
0
'''
Author : ming
date   : 2018年1月12日13:43:27
role   : 定制 Application
'''
from shortuuid import uuid
from tornado import httpserver, ioloop
from tornado import options as tnd_options
from tornado.options import options, define
from tornado.web import Application as tornado_app
from libs.tornado_libs.web_logs import Logger, ins_log
from ops.settings import IP, PORT

define("port", default=PORT, help="run on the given port", type=int)
define("host", default=IP, help="run port on given host", type=str)
define("progid", default=str(uuid()), help="tornado progress id", type=str)


class Application(tornado_app):
    """ 定制 Tornado Application 集成日志 功能 """
    def __init__(self,
                 handlers=None,
                 default_host="",
                 transforms=None,
                 **settings):
        tnd_options.parse_command_line()  #打印infor日志
        Logger(options.progid)
        super(Application, self).__init__(handlers, default_host, transforms,
                                          **settings)
        http_server = httpserver.HTTPServer(self)
        http_server.listen(options.port, address=options.host)
Example #34
0
File: run.py Project: morkov/crew
#!/usr/bin/env python
import logging
from shortuuid import uuid
from optparse import OptionParser
from socket import getfqdn
from .listener import Listener
from .context import Context, context

NODE_UUID = uuid(getfqdn())
UUID = uuid()


def run(**kwargs):
    parser = OptionParser(usage="Usage: %prog [options]")
    parser.add_option("-v",
                      "--verbose",
                      action="store_true",
                      dest="verbose",
                      default=False,
                      help="make lots of noise")
    parser.add_option("--logging",
                      dest="logging",
                      default='info',
                      help="Logging level")
    parser.add_option("-H",
                      "--host",
                      dest="host",
                      default='localhost',
                      help="RabbitMQ host")
    parser.add_option("-P",
                      "--port",
Example #35
0
    def add_signal(self,
                   signal=None,
                   mdata=None,
                   group='',
                   name=None,
                   compress=False):
        """Add a signal to the file.

        Parameters
        ----------
        signal : array
            Signal to add.
        mdata : dict, optional
            Signal metadata.
        group : str, optional
            Destination signal group.
        name : str, optional
            Name of the dataset to create.
        compress : bool, optional
            If True, the signal will be compressed with gzip.

        Returns
        -------
        group : str
            Destination group.
        name : str
            Name of the created signal dataset.

        """

        # check inputs
        if signal is None:
            raise TypeError("Please specify an input signal.")

        if mdata is None:
            mdata = {}

        if name is None:
            name = shortuuid.uuid()

        # navigate to group
        weg = self._join_group(self._signals.name, group)
        try:
            node = self._file[weg]
        except KeyError:
            # create group
            node = self._file.create_group(weg)

        # create dataset
        if compress:
            dset = node.create_dataset(name, data=signal, compression='gzip')
        else:
            dset = node.create_dataset(name, data=signal)

        # add metadata
        dset.attrs['json'] = json.dumps(mdata)

        # output
        grp = weg.replace('/signals', '')

        return utils.ReturnTuple((grp, name), ('group', 'name'))
Example #36
0
def facebook_auth():
    resp = facebook.authorized_response()

    if resp is None or isinstance(resp, OAuthException):
        flash(gettext('An error occurred.'), 'danger')
        return redirect('/signup')

    access_token = resp['access_token']
    session['oauth_token'] = (access_token, '')
    facebook_me = facebook.get(
        '/me?fields=id,name,first_name,last_name,age_range,picture,link,gender,locale,timezone,updated_time,verified'
    )
    facebook_friends = facebook.get('/me/friends')

    authorize = Authorize.query\
        .filter_by(oauth_id=facebook_me.data.get('id'), network='facebook')\
        .first()

    friends = facebook_friends.data.get('summary')
    if friends:
        friends = friends['total_count']

    if authorize is None:

        if current_user.is_authenticated():
            user_id = current_user.get_id()

        else:
            user = User(facebook_me.data.get('name'), None, None)
            db.session.add(user)
            db.session.commit()
            user_id = user.id

            user_locale = facebook_me.data.get('locale')
            if user_locale:
                user.locale = user_locale[:2]
                db.session.add(user)
                db.session.commit()

        authorize = Authorize(facebook_me.data.get('id'),
                              facebook_me.data.get('name'),
                              facebook_me.data.get('gender'),
                              facebook_me.data.get('locale'),
                              facebook_me.data.get('verified'), friends, None,
                              'facebook', user_id)
    else:
        authorize.name = facebook_me.data.get('name')
        authorize.gender = facebook_me.data.get('gender')
        authorize.locale = facebook_me.data.get('locale')
        authorize.verified = facebook_me.data.get('verified')
        authorize.friends = friends

    db.session.add(authorize)
    db.session.commit()

    if current_app.config['DOWNLOAD_FACEBOOK_PHOTO']:
        fb_picture = facebook_me.data.get('picture')
        if fb_picture and authorize.picture != fb_picture['data']['url']:
            try:
                file_name = shortuuid.uuid() + '.jpg'
                file_folder = os.path.join(current_app.root_path,
                                           current_app.config['UPLOAD_FOLDER'])
                file_path = os.path.join(file_folder, file_name)

                url = 'https://graph.facebook.com/{}/picture?type=large&redirect=true&access_token={}'.format(
                    facebook_me.data.get('id'), access_token)
                urllib.request.urlretrieve(url, file_path)

                authorize.picture = fb_picture['data']['url']
                db.session.add(authorize)

                user = User.query.get(authorize.user_id)
                user.picture = file_name
                db.session.add(user)
                db.session.commit()
            except Exception:
                pass

    if current_user.is_anonymous():
        user = User.query.get(authorize.user_id)
        login_user(user, remember=True)

    return redirect(request.args.get('next') or '/user')
Example #37
0
import shortuuid

print(shortuuid.uuid())

print(shortuuid.uuid(name="example.com"))
print(shortuuid.uuid(name="http://www.google.com"))

print(shortuuid.ShortUUID().random(length=50))

print(shortuuid.get_alphabet())

import uuid

s = shortuuid.encode(uuid.uuid4())
print(s)

su = shortuuid.ShortUUID()
print(su.uuid())
Example #38
0
def generate(ds_name,
             tags_list,
             filters,
             background_dir=None,
             rand_backgrounds=False):

    start_time = time.time()

    # search for available GPUs to speed up generation time
    prop = bpy.context.preferences.addons['cycles'].preferences
    prop.get_devices()

    prop.compute_device_type = 'CUDA'

    for device in prop.devices:
        if device.type == 'CUDA':
            device.use = True

    bpy.context.scene.cycles.device = 'GPU'

    for scene in bpy.data.scenes:
        scene.cycles.device = 'GPU'

    #check if folder exists in render, if not, create folder
    try:
        os.mkdir("render/" + ds_name)
    except Exception:
        pass

    data_storage_path = os.getcwd() + "/render/" + ds_name
    #setting file output stuff
    output_node = bpy.data.scenes["Render"].node_tree.nodes["File Output"]
    output_node.base_path = data_storage_path

    #remove all animation
    for scene in bpy.data.scenes:
        for obj in scene.objects:
            obj.animation_data_clear()

    shortuuid.set_alphabet('12345678abcdefghijklmnopqrstwxyz')

    #np.random.seed(42)

    poses = utils.random_rotations(NUM)
    lightings = utils.random_rotations(NUM)

    images_list = []
    img_names = []

    # check if background dir is not None and get list of .exr files in that directory
    if background_dir is not None:
        for f in os.listdir(background_dir):
            if not rand_backgrounds:
                # background images for dynamically sized moon should be formatted: image_<distance>.exr
                # where distance is a float ie. 150.859, this is used to calculate the correct camera rotation so that the moon is in frame
                if f.endswith(".exr") or f.endswith(".png") or f.endswith(
                        ".jpg"):
                    imgnum = f.split("_")[1]
                    imgnum = imgnum.split(".")[0] + "." + imgnum.split(".")[1]
                    img_names.append(imgnum)
                    images_list.append(f)
            else:
                if f.endswith(".exr") or f.endswith(".png") or f.endswith(
                        ".jpg"):
                    img_names.append(f)
                    images_list.append(f)
        images_list = sorted(images_list)

    node_tree = bpy.data.scenes["Render"].node_tree
    check_nodes(filters, node_tree)
    reset_filter_nodes(node_tree)

    # set default background incase base blender file is messed up
    bpy.data.worlds["World"].node_tree.nodes[
        'Environment Texture'].image = bpy.data.images["Moon1.exr"]

    for i, (pose, lighting) in enumerate(zip(poses, lightings)):

        for scene in bpy.data.scenes:
            scene.unit_settings.scale_length = 1 / SCALE

        nmi = np.random.uniform(low=0.5, high=6)
        distance = nm_to_bu(nmi)

        if img_names:
            random_name = random.choice(img_names)

        if not rand_backgrounds:
            # 75/25 split between images with moon background and deep space background
            if np.random.uniform(0, 1) < 0.75:
                if img_names:
                    # moon background - uniform distribution over disk slightly larger than moon - hopefully
                    r = (45 / float(random_name)) * MOON_RADIUS * 2 * np.sqrt(
                        np.random.random())
                    t = np.random.uniform(low=0, high=2 * np.pi)
                    background = Euler([
                        0, MOON_CENTERX - (r / 2) * np.cos(t),
                        MOON_CENTERY + (r / 2) * np.sin(t)
                    ])
                else:
                    r = MOON_RADIUS * np.sqrt(np.random.random())
                    t = np.random.uniform(low=0, high=2 * np.pi)
                    background = Euler([
                        0, MOON_CENTERX - r * np.cos(t),
                        MOON_CENTERY + r * np.sin(t)
                    ])
            else:
                background = Euler([0, 0, 0])

        else:
            # no randomized background images should have a [0,0,0] background as gateway will be centered on weird distortion pattern
            r = MOON_RADIUS * np.sqrt(np.random.random())
            t = np.random.uniform(low=0, high=2 * np.pi)
            background = Euler([
                0, MOON_CENTERX - r * np.cos(t), MOON_CENTERY + r * np.sin(t)
            ])

        offset = np.random.uniform(low=0.0, high=1.0, size=(2, ))
        position = np.random.uniform(low=0.0, high=1.0, size=(3, ))

        bpy.context.scene.frame_set(0)
        frame = starfish.Frame(position=position,
                               background=background,
                               pose=pose,
                               lighting=lighting,
                               distance=distance,
                               offset=offset)
        frame.setup(bpy.data.scenes['Real'], bpy.data.objects["Gateway"],
                    bpy.data.objects["Camera"], bpy.data.objects["Sun"])

        # load new Environment Texture
        if img_names:
            if not rand_backgrounds:
                image = bpy.data.images.load(filepath=background_dir +
                                             "/image_" + random_name + ".exr")

            else:
                image = bpy.data.images.load(filepath=os.path.join(
                    background_dir, random.choice(images_list)))
            bpy.data.worlds["World"].node_tree.nodes[
                'Environment Texture'].image = image

        set_filter_nodes(filters, node_tree)
        #create name for the current image (unique to that image)
        name = shortuuid.uuid()
        output_node.file_slots[0].path = "image_" + str(name) + "#"
        output_node.file_slots[1].path = "mask_" + str(name) + "#"

        mask_filepath = os.path.join(output_node.base_path,
                                     "mask_" + str(name) + "0.png")
        meta_filepath = os.path.join(output_node.base_path,
                                     "meta_" + str(name) + "0.json")

        # render
        bpy.ops.render.render(scene="Render")

        #Tag the pictures
        frame.tags = tags_list
        # add metadata to frame
        frame.sequence_name = ds_name

        # run color normalization with labels plus black background
        normalize_mask_colors(mask_filepath,
                              list(LABEL_MAP.values()) + [(0, 0, 0)])

        # get bbox and centroid and add them to metadata
        frame.bboxes = get_bounding_boxes_from_mask(mask_filepath, LABEL_MAP)
        frame.centroids = get_centroids_from_mask(mask_filepath, LABEL_MAP)

        with open(
                os.path.join(output_node.base_path,
                             "meta_" + str(name) + "0.json"), "w") as f:
            f.write(frame.dumps())

    print("===========================================" + "\r")
    time_taken = time.time() - start_time
    print("------Time Taken: %s seconds----------" % (time_taken) + "\r")
    print("Data stored at: " + data_storage_path)
    bpy.ops.wm.quit_blender()
Example #39
0
from azure.storage.blob import BlobServiceClient
import boto3
from dotenv import find_dotenv, load_dotenv
from pytest_cases import fixture, fixture_union
from shortuuid import uuid

from cloudpathlib import AzureBlobClient, AzureBlobPath, S3Client, S3Path
import cloudpathlib.azure.azblobclient
import cloudpathlib.s3.s3client
from .mock_clients.mock_azureblob import mocked_client_class_factory
from .mock_clients.mock_s3 import mocked_session_class_factory

load_dotenv(find_dotenv())

SESSION_UUID = uuid()

# ignore these files when uploading test assets
UPLOAD_IGNORE_LIST = [
    ".DS_Store",  # macOS cruft
]


@fixture()
def assets_dir() -> Path:
    """Path to test assets directory."""
    return Path(__file__).parent / "assets"


class CloudProviderTestRig:
    """Class that holds together the components needed to test a cloud implementation."""
Example #40
0
stmthead = "UPDATE " + tab + " set image_file = '%s' where id = %d"
stmt = "SELECT id, image_url, genus, family  FROM " + tab + " where image_url <>'' and image_url is not null and (image_file is null or image_file = '') order by id"

cur.execute(stmt)
i = 0
for row in cur:
    i = i + 1
    url = row[1]
    if app == 'other' and row[3]:
        family = row[3].title()
    urlparts = re.search('\/(.*)(\?)?', url)
    a = urlparts.group(1)
    # ext = a.split('.')[-1]
    ext = 'jpg'
    uid = shortuuid.uuid()
    fname = row[2] + '_' + shortuuid.uuid() + "." + ext
    # fname = "%s_%09d_%09d.jpg" % (type, row[0], row[2])

    if not url or url == '':
        print("Bad", url)
        cur.nextset()

    try:
        # html = urlopen(url)
        if app == 'other':
            imgdir = "/mnt/static/utils/images/" + family + "/"
            Path(imgdir).mkdir(parents=True, exist_ok=True)
        local_filename, headers = urllib.request.urlretrieve(
            url, imgdir + fname)
        stmt = stmthead % (fname, row[0])
Example #41
0
    def add_event(self,
                  ts=None,
                  values=None,
                  mdata=None,
                  group='',
                  name=None,
                  compress=False):
        """Add an event to the file.

        Parameters
        ----------
        ts : array
            Array of time stamps.
        values : array, optional
            Array with data for each time stamp.
        mdata : dict, optional
            Event metadata.
        group : str, optional
            Destination event group.
        name : str, optional
            Name of the dataset to create.
        compress : bool, optional
            If True, the data will be compressed with gzip.

        Returns
        -------
        group : str
            Destination group.
        name : str
            Name of the created event dataset.

        """

        # check inputs
        if ts is None:
            raise TypeError("Please specify an input array of time stamps.")

        if values is None:
            values = []

        if mdata is None:
            mdata = {}

        if name is None:
            name = shortuuid.uuid()

        # navigate to group
        weg = self._join_group(self._events.name, group)
        try:
            node = self._file[weg]
        except KeyError:
            # create group
            node = self._file.create_group(weg)

        # create new event group
        evt_node = node.create_group(name)

        # create datasets
        if compress:
            _ = evt_node.create_dataset('ts', data=ts, compression='gzip')
            _ = evt_node.create_dataset('values',
                                        data=values,
                                        compression='gzip')
        else:
            _ = evt_node.create_dataset('ts', data=ts)
            _ = evt_node.create_dataset('values', data=values)

        # add metadata
        evt_node.attrs['json'] = json.dumps(mdata)

        # output
        grp = weg.replace('/events', '')

        return utils.ReturnTuple((grp, name), ('group', 'name'))
Example #42
0
def short_token():
    """Generate a 20-character random token"""
    hash = hashlib.sha1(shortuuid.uuid().encode('utf-8'))
    hash.update(settings.SECRET_KEY.encode('utf-8'))
    return hash.hexdigest()[::2]
Example #43
0
async def handler(messages: List[WorkerMsg]):
    for msg in messages:
        site_id = msg['site_id']
        url = msg['url']
        domain = msg.get('domain', '')
        pattern = "*"
        html = await get_page(url)
        new_page = True
        if msg.get('doc_id'):
            new_page = False
            key = msg['obj_key']
            meta_key = msg['meta_obj_key']
            doc_id = msg['doc_id']
        else:
            key = shortuuid.uuid(name=url)
            meta_key = f"{key}.metadata.json"
            doc_id = f"{site_id}:{key}"

        f = BytesIO(html.raw_html)
        BUCKET.upload_fileobj(f, key, ExtraArgs={"ACL": "bucket-owner-full-control"})

        doc_title = html.find("title", first=True).text

        metadata = {
            "DocumentId": doc_id,
            "Attributes": {
                "_source_uri": url,
                "site_id": site_id,
                "domain": domain,
            },
            "Title": doc_title,
            "ContentType": "HTML",
        }

        meta_obj = BytesIO(json.dumps(metadata, ensure_ascii=False).encode('utf-8'))
        BUCKET.upload_fileobj(meta_obj, meta_key, ExtraArgs={"ACL": "bucket-owner-full-control"})

        now = arrow.utcnow()
        updates: list = [
            Page.scraped.set(True),
            Page.doc_title.set(doc_title),
            Page.last_scraped_at.set(now.timestamp)
        ]
        if new_page:
            updates += [
                Page.doc_id.set(doc_id),
                Page.obj_key.set(key),
                Page.meta_obj_key.set(meta_key),
            ]
        try:
            Page(site_id, url).update(updates)
        except Page.DoesNotExist:
            print(f"Fail update scrap {url} result")
            continue

        # get links
        links = {refine_url(l) for l in html.absolute_links if l != url and (domain in l) and verify(pattern, url)}
        for link in links:
            try:
                page = Page.get(site_id, link)
                page.update(
                    [Page.scraped.set(False)],
                    # 한시간 이내 수집 했으면 수집 안함
                    condition=~Page.last_scraped_at.exists() | Page.last_scraped_at < now.shift(hours=-1).timestamp
                )
            except Page.DoesNotExist:
                page = Page(
                    site_id, link,
                    _type='html',
                    user=msg['user'],
                )
                page.save()
            except Exception:
                print(f'already scraped {site_id} : {url}')
Example #44
0
def create_short_url():
    return shortuuid.uuid()[:7]
Example #45
0
def generate_id():
    import shortuuid

    return shortuuid.uuid()
Example #46
0
Universal_pat = re.compile(r"[a-zA-Z\_][0-9a-zA-Z\_]*")
comma_pat = re.compile(r"\s*,\s*")
logger = Logger("sys").getLogger
cli_logger = Logger("cli").getLogger
err_logger = Logger("error").getLogger
plugin_logger = Logger("plugin").getLogger
access_logger = Logger("access").getLogger
md5 = lambda pwd: hashlib.md5(pwd).hexdigest()
hmac_sha256 = lambda message: hmac.new(key="273d32c8d797fa715190c7408ad73811",
                                       msg=message,
                                       digestmod=hashlib.sha256).hexdigest()
gen_token = lambda n=32: b32encode(uuid4().hex)[:n]
gen_requestId = lambda: str(uuid4())
gen_fingerprint = lambda n=16, s=2: ":".join(
    ["".join(random.sample("0123456789abcdef", s)) for i in range(0, n)])
gen_uniqueId = lambda: shortuuid.uuid()


def ip_check(ip):
    if ip and isinstance(ip, (str, unicode)):
        return ip_pat.match(ip)


def email_check(email):
    if email and isinstance(email, (str, unicode)):
        return mail_pat.match(email)


def phone_check(phone):
    if phone and isinstance(phone, (str, unicode)):
        return mobilephone_pat.match(phone)
Example #47
0
def main_pipeline(config):
    """
    runs the pipeline subcommand. Runs clustering (or just splitting if clustering is skipped) and sets up
    submission scripts for the other pipeline stages/steps
    """
    from shortuuid import uuid
    batch_id = 'batch-' + str(uuid())
    conversion_dir = mkdirp(os.path.join(config.output, 'converted_inputs'))
    pairing_inputs = []
    submitall = []
    jobid_var_index = 0
    config.output = os.path.abspath(config.output)

    def get_prefix(filename):
        return re.sub(r'\.[^\.]+$', '', os.path.basename(filename))

    job_name_by_output = {}

    for libconf in config.libraries.values():
        base = os.path.join(
            config.output,
            '{}_{}_{}'.format(libconf.library, libconf.disease_status,
                              libconf.protocol))
        log('setting up the directory structure for', libconf.library, 'as',
            base)
        libconf.inputs = run_conversion(config, libconf, conversion_dir)

        # run the cluster stage
        cluster_output = mkdirp(os.path.join(
            base, SUBCOMMAND.CLUSTER))  # creates the output dir
        merge_args = {'batch_id': batch_id, 'output': cluster_output}
        merge_args['split_only'] = SUBCOMMAND.CLUSTER in config.skip_stage
        merge_args.update(config.reference.items())
        merge_args.update(config.cluster.items())
        merge_args.update(libconf.items())
        log('clustering', '(split only)' if merge_args['split_only'] else '')
        inputs = cluster_main.main(log_args=True, **merge_args)

        for inputfile in inputs:
            prefix = get_prefix(inputfile)  # will be batch id + job number
            dependency = ''
            if SUBCOMMAND.VALIDATE not in config.skip_stage:
                outputdir = mkdirp(
                    os.path.join(base, SUBCOMMAND.VALIDATE, prefix))
                command = build_validate_command(config, libconf, inputfile,
                                                 outputdir)
                # build the submission script
                options = {k: config.schedule[k] for k in STD_SUBMIT_OPTIONS}
                options['stdout'] = outputdir
                options['jobname'] = 'MV_{}_{}'.format(libconf.library, prefix)

                if libconf.is_trans():
                    options[
                        'memory_limit'] = config.schedule.trans_validation_memory
                else:
                    options['memory_limit'] = config.schedule.validation_memory
                script = SubmissionScript(command, config.schedule.scheduler,
                                          **options)
                scriptname = script.write(os.path.join(outputdir, 'submit.sh'))

                submitall.append('vjob{}=$({} {})'.format(
                    jobid_var_index,
                    SCHEDULER_CONFIG[config.schedule.scheduler].submit,
                    scriptname))
                # for setting up subsequent jobs and holds
                outputfile = os.path.join(outputdir, VALIDATION_PASS_PATTERN)
                job_name_by_output[outputfile] = options['jobname']
                inputfile = outputfile
                dependency = SCHEDULER_CONFIG[
                    config.schedule.scheduler].dependency(
                        '${{vjob{}##* }}'.format(jobid_var_index))
            # annotation cannot be skipped
            outputdir = mkdirp(os.path.join(base, SUBCOMMAND.ANNOTATE, prefix))
            command = build_annotate_command(config, libconf, inputfile,
                                             outputdir)

            options = {k: config.schedule[k] for k in STD_SUBMIT_OPTIONS}
            options['stdout'] = outputdir
            options['jobname'] = 'MA_{}_{}'.format(libconf.library, prefix)
            options['memory_limit'] = config.schedule.annotation_memory
            script = SubmissionScript(command, config.schedule.scheduler,
                                      **options)
            scriptname = script.write(os.path.join(outputdir, 'submit.sh'))
            submitall.append('ajob{}=$({} {} {})'.format(
                jobid_var_index,
                SCHEDULER_CONFIG[config.schedule.scheduler].submit, dependency,
                scriptname))
            outputfile = os.path.join(outputdir, ANNOTATION_PASS_PATTERN)
            pairing_inputs.append(outputfile)
            job_name_by_output[outputfile] = options['jobname']
            jobid_var_index += 1

    # set up scripts for the pairing held on all of the annotation jobs
    outputdir = mkdirp(os.path.join(config.output, SUBCOMMAND.PAIR))
    args = config.pairing.flatten()
    args.update({
        'output': outputdir,
        'annotations': config.reference.annotations_filename
    })
    command = ['{} {}'.format(PROGNAME, SUBCOMMAND.PAIR)]
    command.extend(stringify_args_to_command(args))
    command.append('--inputs {}'.format(' \\\n\t'.join(pairing_inputs)))
    command = ' \\\n\t'.join(command)

    options = {k: config.schedule[k] for k in STD_SUBMIT_OPTIONS}
    options['stdout'] = outputdir
    options['jobname'] = 'MP_{}'.format(batch_id)
    script = SubmissionScript(command, config.schedule.scheduler, **options)
    scriptname = script.write(os.path.join(outputdir, 'submit.sh'))

    submitall.append('jobid=$({} {} {})'.format(
        SCHEDULER_CONFIG[config.schedule.scheduler].submit,
        SCHEDULER_CONFIG[config.schedule.scheduler].dependency(':'.join([
            '${{ajob{}##* }}'.format(i) for i in range(0, jobid_var_index)
        ])), scriptname))

    # set up scripts for the summary held on the pairing job
    outputdir = mkdirp(os.path.join(config.output, SUBCOMMAND.SUMMARY))
    args = dict(output=outputdir,
                flanking_call_distance=config.pairing.flanking_call_distance,
                split_call_distance=config.pairing.split_call_distance,
                contig_call_distance=config.pairing.contig_call_distance,
                spanning_call_distance=config.pairing.spanning_call_distance,
                dgv_annotation=config.reference.dgv_annotation_filename,
                annotations=config.reference.annotations_filename,
                inputs=os.path.join(config.output,
                                    'pairing/mavis_paired*.tab'))
    args.update(config.summary.items())
    command = ['{} {}'.format(PROGNAME, SUBCOMMAND.SUMMARY)]
    command.extend(stringify_args_to_command(args))
    command = ' \\\n\t'.join(command)

    options = {k: config.schedule[k] for k in STD_SUBMIT_OPTIONS}
    options['stdout'] = outputdir
    options['jobname'] = 'MS_{}'.format(batch_id)
    script = SubmissionScript(command, config.schedule.scheduler, **options)
    scriptname = script.write(os.path.join(outputdir, 'submit.sh'))

    submitall.append('{} {} {}'.format(
        SCHEDULER_CONFIG[config.schedule.scheduler].submit,
        SCHEDULER_CONFIG[config.schedule.scheduler].dependency('${jobid##* }'),
        scriptname))

    # now write a script at the top level to submit all
    submitallfile = os.path.join(config.output,
                                 'submit_pipeline_{}.sh'.format(batch_id))
    log('writing:', submitallfile)
    with open(submitallfile, 'w') as fh:
        for line in submitall:
            fh.write(line + '\n')
Example #48
0
    async def post(request: Request):
        """
        上传app, format[安装包-package: file, 更新说明(默认为空)-msg: str]
        :param request:
        :return:
        """
        file = request.files.get('package')
        update_msg = request.form.get('msg') or ''
        if not file:
            log.warning('not upload file')
            raise BadRequest('not find file')

        session = Session()
        while 1:
            # 安装包id
            fid = uuid('{}-{}'.format(id(Request), time.time()), 16)
            if not session.query(AppVersionModel).filter_by(id=fid).count():
                break
        file_name = '{}.{}'.format(fid, file.name[file.name.rfind('.') + 1:])
        file_path = '{}/{}'.format(Config.app_dir, file_name)

        # 保存安装包
        async with aiofiles.open(file_path, 'wb+') as f:
            await f.write(file.body)
            log.debug('save upload success: {}'.format(file_path))

        package = await PackageParse.parse(file_path)
        if not package:
            os.remove(file_path)
            raise BadRequest('the file is not support')

        app_query = session.query(AppModel).filter_by(
            package_name=package.package_name, type=package.app_type)
        if app_query.count():  # 已存在
            app = app_query.one()
        else:  # 不存在
            # 生成短链
            while 1:
                short_chain = random(8)
                if not session.query(AppModel).filter_by(
                        short_chain_uri_=short_chain).count():
                    break

            app_uuid = uuid(
                '+_{}-{}_+'.format(package.app_type, package.package_name), 16)
            icon_name = '{}.{}'.format(
                app_uuid, package.icon_path[package.icon_path.rfind('.') + 1:])
            app = AppModel(type=package.app_type,
                           short_chain_uri_=short_chain,
                           detail='',
                           name=package.app_name,
                           package_name=package.package_name,
                           icon_='{}/{}'.format(Config.icon_dir, icon_name),
                           icon_uri_='{}/{}'.format(Config.static_icon,
                                                    icon_name))
            session.add(app)
            session.commit()

        # 保存图标
        await package.save_icon(app.icon_)

        file_byte = os.path.getsize(file_path)
        file_size = Byte.pretty(file_byte)

        app_version = AppVersionModel(
            id=fid,
            version_name=package.version_name,
            version_code=package.version_code,
            update_msg=update_msg,
            size=file_size,
            package_='{}/{}'.format(Config.app_dir, file_name),
            package_uri_='{}/{}'.format(Config.static_app, file_name),
            app_id=app.id)
        session.add(app_version)
        session.commit()

        return JsonResult.ok().response_json()
Example #49
0
try:
    connection = psycopg2.connect(host=database_ids.host, database=database_ids.database, user=database_ids.user, password=database_ids.password)
    # declare a cursor
    cur = connection.cursor()
    # requests and reception of the data

    # createClient = "INSERT INTO fake_clients_clone VALUES(%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)"
    refresh = "DELETE FROM fake_clients"
    createClient = "INSERT INTO fake_clients VALUES(%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)"
    
    # empties the table
    cur.execute(refresh)
    count = 0
    while count < 200 : 
        client_id = shortuuid.uuid()
        name = fake.name()
        address = fake.address()
        phone_number = fake.phone_number()
        birthdate = str(fake.date_of_birth(None, 40, 65))
        email = fake.free_email(),
        gender = randomStrRange("H", "F")
        et = fake.date_between(start_date='-90d', end_date='today') # need the raw version to make the last_maintenance
        engie_time = str(et)
        nb_panneaux = randomIntRange(1, 6)
        last_activity = fake.date_between(start_date='-6d', end_date='today'),
        time_activity = scoringActivityApp()
        profil = randomIntRange(1, 4) # "Économe", "Autonome", "Écologie"
        famille = randomIntRange(1, 9) # "IFaP", "IFoP", "IFaN", "IFoN", "PFaP", "PFoP", "PFaN", "PFoN"
        feeling = randomIntRange(3, 6) # "Curieux", "Harmonieux", "Heureux"
        satisfaction = randomIntRange(20, 100)
Example #50
0
 def mkdtemp(base_directory=None):
     prefix = "dvc-test.{}.".format(os.getpid())
     suffix = ".{}".format(shortuuid.uuid())
     return tempfile.mkdtemp(
         prefix=prefix, suffix=suffix, dir=base_directory
     )
Example #51
0
def tmp_fname(fname):
    """ Temporary name for a partial download """
    return fspath(fname) + "." + uuid() + ".tmp"
Example #52
0
    def track_trajectory(self):
        self._is_tracking = True

        id = shortuuid.uuid()

        cap = self._cap

        # MIL appears to work best...
        tracker = self.get_tracker(self._tracker_type)

        cap_fps = cap.get(cv2.CAP_PROP_FPS)
        current_frame = int(cap.get(
            cv2.CAP_PROP_POS_FRAMES))  # int(cap_fps * start_sec)
        duration_frames = int(cap_fps * self._duration_sec)

        # cap.set(cv2.CAP_PROP_POS_FRAMES, current_frame)
        time = (current_frame - 1) * cap_fps

        resw = int(cap.get(cv2.CAP_PROP_FRAME_WIDTH))
        resh = int(cap.get(cv2.CAP_PROP_FRAME_HEIGHT))
        res = (resw, resh)

        frame = self._current_frame
        cv2.putText(frame, "SET MARK", (100, 150), cv2.FONT_HERSHEY_SIMPLEX,
                    0.75, (50, 170, 50), 2)
        cv2.imshow(WIN_NAME, frame)

        if self._save_video:

            fourcc = cv2.VideoWriter_fourcc(*'DIVX')
            out_fname = self._root_path + self._filename + \
                        '.multi_track_{0}.avi'.format(id)
            out = cv2.VideoWriter(out_fname, fourcc, cap_fps, res)

        # Define an initial bounding box
        # bbox = (287, 23, 86, 320)

        bbox = cv2.selectROI(WIN_NAME, frame, True)
        ok = tracker.init(frame, bbox)

        # Uncomment the line below to select a different bounding box
        # selector_frame = frame.copy()

        csv_fname = self._root_path + self._filename + \
                    '.{0}.{1}.{2}.{3}.csv'.format(id, bbox[2], bbox[3], current_frame)
        csvfile = open(csv_fname, 'w', newline='')
        bac_writer = csv.writer(csvfile,
                                delimiter=',',
                                quotechar='|',
                                quoting=csv.QUOTE_MINIMAL)

        bac_writer.writerow(['BacteriaId', 'Frame', 'Time (s)', 'X', 'Y'])

        invalid = False

        while cap.isOpened() and current_frame < duration_frames:
            # ret, frame = cap.read()
            frame = self.next_frame()

            # Start timer
            timer = cv2.getTickCount()

            # Update tracker
            ok, bbox = tracker.update(frame)

            # Calculate Frames per second (FPS)
            fps = cv2.getTickFrequency() / (cv2.getTickCount() - timer)

            # Draw bounding box
            if ok:
                p1 = (int(bbox[0]), int(bbox[1]))
                p2 = (int(bbox[0] + bbox[2]), int(bbox[1] + bbox[3]))
                cv2.rectangle(frame, p1, p2, (255, 0, 0), 2, 1)
                bac_writer.writerow([
                    id, current_frame, "{:.3f}".format(time),
                    int(bbox[0] + bbox[2] / 2),
                    int(bbox[1] + bbox[3] / 2)
                ])
            else:
                # Tracking failure
                cv2.putText(frame, "Tracking failure detected", (100, 80),
                            cv2.FONT_HERSHEY_SIMPLEX, 0.75, (0, 0, 255), 2)

            if self._save_video:
                # Display tracker type on frame
                # cv2.putText(frame, "2.131 Bacterial Tracker", (100, 20), cv2.FONT_HERSHEY_SIMPLEX, 0.75, (50, 170, 50), 2);
                # Display FPS on frame
                cv2.putText(frame, "FPS : " + str(int(fps)), (100, 50),
                            cv2.FONT_HERSHEY_SIMPLEX, 0.75, (50, 170, 50), 2)
                out.write(frame)

            # Display result
            cv2.imshow(WIN_NAME, frame)
            cv2.setTrackbarPos(POS_TRACKBAR, WIN_NAME,
                               int(self._cap.get(cv2.CAP_PROP_POS_FRAMES)))

            current_frame = current_frame + 1
            time = time + 1.0 / cap_fps

            # stop recording if ESC or space pressed
            k = cv2.waitKey(1) & 0xff
            if k == 27 or k == ord(" "):
                invalid = True
                break

        csvfile.close()
        if self._save_video:
            out.release()

        if invalid:
            cv2.putText(frame, "CANCELLED", (100, 80),
                        cv2.FONT_HERSHEY_SIMPLEX, 0.75, (0, 0, 255), 2)
            cv2.imshow(WIN_NAME, frame)
            os.remove(csv_fname)
            os.remove(out_fname)
            cv2.imshow(WIN_NAME, frame)

        self._current_frame = frame
        self._is_tracking = False
        self.load_existing_trackers()
Example #53
0
    def calculate_derived(self):
        """
        Derives additional configuration values necessary for training from the current config
        """

        # wandb
        # sets a unique wandb group
        if self.wandb_group is None:
            # if none is defined a uuid is set for the run
            self.wandb_group = shortuuid.uuid()

        # number of gpus
        # Get number of GPUs param or hostfile to determine train_batch_size
        num_gpus = self.num_gpus
        if num_gpus is None:
            num_gpus = -1  # set -1 for backwards compatibility to old default value
        if num_gpus < 1:
            if self.hostfile is not None or os.path.exists(DLTS_HOSTFILE):
                hostfile_path = self.hostfile or DLTS_HOSTFILE
                resources = obtain_resource_pool(hostfile_path, self.include or "", self.exclude or "")
                num_gpus = sum(map(len, resources.values()))
            else:
                num_gpus = torch.cuda.device_count()
        self.update_value("num_gpus", num_gpus)

        logging.info(
            self.__class__.__name__ + ".calculate_derived() " + f"Total number of GPUs determined to be: {self.num_gpus}")

        # get world size in the model/pipe parallel case, the actual `world size` deepspeed uses is the size of the
        # data-parallel group, or (num_gpus / mp_size) / pp_size
        pp_size = self.pipe_parallel_size
        pp_size = pp_size if pp_size >= 1 else 1
        mp_size = self.model_parallel_size
        mp_size = mp_size if mp_size >= 1 else 1
        self.update_value("model_parallel_size", mp_size)

        # pp_size and mp_size are only used here to compute dp world size and nowhere else.
        dp_world_size = ((num_gpus / pp_size) / mp_size)
        if not (dp_world_size % 1 == 0):
            error_message = self.__class__.__name__ + ".calculate_derived() " + f"(num_gpus / pp_size) / mp_size [({num_gpus} / {pp_size}) / {mp_size}] must be a whole number"
            logging.error(error_message)
            raise AssertionError(error_message)

        # Automatically derive train_batch_size = train_micro_batch_size_per_gpu*num_gpus*gradient_accumulation_steps
        train_batch_size, train_micro_batch_size_per_gpu, gradient_accumulation_steps = self.calculate_batch_parameters(
            dp_world_size=dp_world_size,
            train_batch=self.train_batch_size,
            micro_batch=self.train_micro_batch_size_per_gpu,
            grad_acc=self.gradient_accumulation_steps
        )
        self.check_batch_parameters(
            dp_world_size=dp_world_size,
            train_batch=train_batch_size,
            micro_batch=train_micro_batch_size_per_gpu,
            grad_acc=gradient_accumulation_steps
        )
        self.update_values({
            # batch size params
            "train_batch_size": train_batch_size,
            "train_micro_batch_size_per_gpu": train_micro_batch_size_per_gpu,
            "gradient_accumulation_steps": gradient_accumulation_steps,
            "batch_size": train_micro_batch_size_per_gpu,

            # duplicate items
            "gas": self.gradient_accumulation_steps,
            "clip_grad": self.gradient_clipping,

        })
        
        # derive precision
        if (self.fp16 or {}).get("type", self.precision) == "bfloat16":
            self.update_value("precision", "bfloat16")
        elif (self.fp16 or {}).get("enabled", False):
            self.update_value("precision", "fp16")
        else:
            self.update_value("precision", "fp32")
        

        # zero optimization
        if self.zero_optimization is None:
            self.zero_optimization = copy.deepcopy(ZERO_DEFAULTS)  # a dict is overwritten and not updated key by key
        self.update_values({
            "zero_stage": self.zero_optimization.get('stage', ZERO_DEFAULTS['stage']),
            "zero_reduce_scatter": self.zero_optimization.get('reduce_scatter', ZERO_DEFAULTS['reduce_scatter']),
            "zero_contiguous_gradients": self.zero_optimization.get('contiguous_gradients',
                                                                    ZERO_DEFAULTS['contiguous_gradients']),
            "zero_reduce_bucket_size": self.zero_optimization.get('reduce_bucket_size',
                                                                  ZERO_DEFAULTS['reduce_bucket_size']),
            "zero_allgather_bucket_size": self.zero_optimization.get('allgather_bucket_size',
                                                                     ZERO_DEFAULTS['allgather_bucket_size'])
        })

        # optimizer and scheduler
        opt_params = self.optimizer or {"type": OPT_DEFAULT, "params": OPT_PARAMS_DEFAULTS}
        self.update_values({
            "optimizer_type": opt_params.get('type', OPT_DEFAULT),
            "lr": opt_params['params'].get('lr', OPT_PARAMS_DEFAULTS['lr'])
        })

        if self.optimizer_type.lower() == "onebitadam":
            # onebitadam needs to instantiated by deepspeed, and so we need to pass deepspeed scheduler args
            # for all other optimizers, the scheduling is handled by megatron
            self.scheduler = {
                "type": "WarmupDecayLR",  # for now this is the only ds scheduler offering decay
                "params": {
                    "warmup_min_lr": 0,
                    "warmup_max_lr": self.lr,
                    "warmup_num_steps": int(self.train_iters * self.warmup),
                    "total_num_steps": self.lr_decay_iters or self.train_iters
                }}

        # Fp16 loss scaling.
        self.update_value("dynamic_loss_scale", self.loss_scale is None)

        # Update 'is pipe parallel' flag
        # if we set pipe_parallel_size to 0 or 1, GPT2ModelPipe.to_sequential() is called, and we run training with
        # the sequential model without the PipelineModule wrapper to avoid the overhead it incurs
        self.update_value("is_pipe_parallel", self.pipe_parallel_size >= 1)

        # Attention config
        if self.attention_config is None:
            self.update_value("attention_config", [[["global"], self.num_layers]])
        self.update_value("attention_config", expand_attention_types(self.attention_config, self.num_layers))
        assert len(self.attention_config) == self.num_layers, "Length of attention config list must equal num_layers"
        for item in self.attention_config:
            assert item in ATTENTION_TYPE_CHOICES, f"Attention type {item} not recognized"
        if "gmlp" in self.attention_config or "amlp" in self.attention_config:
            assert not self.partition_activations, "GMLP Blocks are not compatible with partition activations"

        # Sparsity config
        if self.sparsity_config is None:
            # Can't have a default value as an empty dict so need to set it here
            self.update_value("sparsity_config", {})

        # Adding equal dataset weights if none are provided
        if self.train_data_paths and (self.train_data_weights is None):
            self.train_data_weights = [1.] * len(self.train_data_paths)
        if self.valid_data_paths and (self.valid_data_weights is None):
            self.valid_data_weights = [1.] * len(self.valid_data_paths)
        if self.test_data_paths and (self.test_data_weights is None):
            self.test_data_weights = [1.] * len(self.test_data_paths)
Example #54
0
def get_uuid():
    return shortuuid.uuid().lower()[:5]
 def __init__(self, assigned=True, **kwargs):
     self.id = shortuuid.uuid()[0:10]
     self.created = datetime.datetime.utcnow().isoformat()
     self.assigned = assigned
     self.products = {}
     super(Badge, self).__init__(**kwargs)
Example #56
0
    def test_incoming_message_info(self):
        client = yield from connect(AMQP_URL, loop=self.loop)

        queue_name = self.get_random_name("test_connection")
        routing_key = self.get_random_name()

        channel = yield from client.channel()
        exchange = yield from channel.declare_exchange('direct',
                                                       auto_delete=True)
        queue = yield from channel.declare_queue(queue_name, auto_delete=True)

        yield from queue.bind(exchange, routing_key)

        body = bytes(shortuuid.uuid(), 'utf-8')

        info = {
            'headers': {
                "foo": "bar"
            },
            'content_type': "application/json",
            'content_encoding': "text",
            'delivery_mode': DeliveryMode.PERSISTENT.value,
            'priority': 0,
            'correlation_id': b'1',
            'reply_to': 'test',
            'expiration': 1.5,
            'message_id': shortuuid.uuid(),
            'timestamp': int(time.time()),
            'type': '0',
            'user_id': 'guest',
            'app_id': 'test',
            'body_size': len(body)
        }

        msg = Message(body=body,
                      headers={'foo': 'bar'},
                      content_type='application/json',
                      content_encoding='text',
                      delivery_mode=DeliveryMode.PERSISTENT,
                      priority=0,
                      correlation_id=1,
                      reply_to='test',
                      expiration=1.5,
                      message_id=info['message_id'],
                      timestamp=info['timestamp'],
                      type='0',
                      user_id='guest',
                      app_id='test')

        yield from exchange.publish(msg, routing_key)

        incoming_message = yield from queue.get(timeout=5)
        incoming_message.ack()

        info['synchronous'] = incoming_message.synchronous
        info['routing_key'] = incoming_message.routing_key
        info['redelivered'] = incoming_message.redelivered
        info['exchange'] = incoming_message.exchange
        info['delivery_tag'] = incoming_message.delivery_tag
        info['consumer_tag'] = incoming_message.consumer_tag
        info['cluster_id'] = incoming_message.cluster_id

        self.assertEqual(incoming_message.body, body)
        self.assertDictEqual(incoming_message.info(), info)

        yield from queue.unbind(exchange, routing_key)
        yield from queue.delete()
        yield from wait((client.close(), client.closing), loop=self.loop)
Example #57
0
def tmp_fname(fname=""):
    """Temporary name for a partial download"""
    from shortuuid import uuid

    return os.fspath(fname) + "." + uuid() + ".tmp"
Example #58
0
 def generate_uuid(cls, content):
     """Generate short uuid, used to uniquely identify logs based on content."""
     return shortuuid.uuid(str(content))
Example #59
0
def tmp_fname(fname):
    """ Temporary name for a partial download """
    from shortuuid import uuid

    return fspath(fname) + "." + str(uuid()) + ".tmp"
Example #60
0
    def init(self) -> Union[Run, Dummy, None]:  # noqa: C901
        trigger.call("on_init", **self.kwargs)
        s = self.settings
        sweep_config = self.sweep_config
        config = self.config
        if s._noop:
            drun = Dummy()
            drun.config = wandb.wandb_sdk.wandb_config.Config()
            drun.config.update(sweep_config)
            drun.config.update(config)
            drun.summary = DummyDict()
            drun.log = lambda data, *_, **__: drun.summary.update(data)
            drun.finish = lambda *_, **__: module.unset_globals()
            drun.step = 0
            drun.resumed = False
            drun.disabled = True
            drun.id = shortuuid.uuid()
            drun.name = "dummy-" + drun.id
            drun.dir = "/"
            module.set_global(
                run=drun,
                config=drun.config,
                log=drun.log,
                summary=drun.summary,
                save=drun.save,
                use_artifact=drun.use_artifact,
                log_artifact=drun.log_artifact,
                plot_table=drun.plot_table,
                alert=drun.alert,
            )
            return drun
        if s.reinit or (s._jupyter and s.reinit is not False):
            if len(self._wl._global_run_stack) > 0:
                if len(self._wl._global_run_stack) > 1:
                    wandb.termwarn(
                        "If you want to track multiple runs concurrently in wandb you should use multi-processing not threads"  # noqa: E501
                    )

                last_id = self._wl._global_run_stack[-1]._run_id
                jupyter = (s._jupyter and not s._silent
                           and ipython._get_python_type() == "jupyter")
                if jupyter:
                    ipython.display_html(
                        "Finishing last run (ID:{}) before initializing another..."
                        .format(last_id))

                self._wl._global_run_stack[-1].finish()

                if jupyter:
                    ipython.display_html(
                        "...Successfully finished last run (ID:{}). Initializing new run:<br/><br/>"
                        .format(last_id))
        elif isinstance(wandb.run, Run):
            logger.info("wandb.init() called when a run is still active")
            return wandb.run

        use_redirect = True
        stdout_master_fd, stderr_master_fd = None, None
        stdout_slave_fd, stderr_slave_fd = None, None

        backend = Backend()
        backend.ensure_launched(
            settings=s,
            stdout_fd=stdout_master_fd,
            stderr_fd=stderr_master_fd,
            use_redirect=use_redirect,
        )
        backend.server_connect()
        # Make sure we are logged in
        # wandb_login._login(_backend=backend, _settings=self.settings)

        # resuming needs access to the server, check server_status()?

        run = Run(config=config, settings=s, sweep_config=sweep_config)

        # Populate intial telemetry
        with telemetry.context(run=run) as tel:
            tel.cli_version = wandb.__version__
            tel.python_version = platform.python_version()
            hf_version = _huggingface_version()
            if hf_version:
                tel.huggingface_version = hf_version
            if s._jupyter:
                tel.env.jupyter = True
            if s._kaggle:
                tel.env.kaggle = True
            if s._windows:
                tel.env.windows = True
            run._telemetry_imports(tel.imports_init)

        run._set_console(
            use_redirect=use_redirect,
            stdout_slave_fd=stdout_slave_fd,
            stderr_slave_fd=stderr_slave_fd,
        )
        run._set_library(self._wl)
        run._set_backend(backend)
        run._set_reporter(self._reporter)
        run._set_teardown_hooks(self._teardown_hooks)
        # TODO: pass mode to backend
        # run_synced = None

        backend._hack_set_run(run)
        backend.interface.publish_header()

        if s._offline:
            with telemetry.context(run=run) as tel:
                tel.feature.offline = True
            run_proto = backend.interface._make_run(run)
            backend.interface._publish_run(run_proto)
            run._set_run_obj_offline(run_proto)
        else:
            ret = backend.interface.communicate_check_version(
                current_version=wandb.__version__)
            if ret:
                if ret.upgrade_message:
                    run._set_upgraded_version_message(ret.upgrade_message)
                if ret.delete_message:
                    run._set_deleted_version_message(ret.delete_message)
                if ret.yank_message:
                    run._set_yanked_version_message(ret.yank_message)
            run._on_init()
            ret = backend.interface.communicate_run(run, timeout=30)
            error_message = None
            if not ret:
                error_message = "Error communicating with backend"
            if ret and ret.error:
                error_message = ret.error.message
            if error_message:
                # Shutdown the backend and get rid of the logger
                # we don't need to do console cleanup at this point
                backend.cleanup()
                self.teardown()
                raise UsageError(error_message)
            if ret.run.resumed:
                with telemetry.context(run=run) as tel:
                    tel.feature.resumed = True
            run._set_run_obj(ret.run)

        # initiate run (stats and metadata probing)
        _ = backend.interface.communicate_run_start()

        self._wl._global_run_stack.append(run)
        self.run = run
        self.backend = backend
        module.set_global(
            run=run,
            config=run.config,
            log=run.log,
            summary=run.summary,
            save=run.save,
            use_artifact=run.use_artifact,
            log_artifact=run.log_artifact,
            plot_table=run.plot_table,
            alert=run.alert,
        )
        self._reporter.set_context(run=run)
        run._on_start()

        run._freeze()
        return run