Exemplo n.º 1
0
def delete_list(string, entities):
	"""Delete a to-do list"""

	# List name
	listname = ''

	# Find entities
	for item in entities:
		if item['entity'] == 'list':
			listname = item['sourceText'].lower()

	# Verify if a list name has been provided
	if not listname:
		return utils.output('end', 'list_not_provided', utils.translate('list_not_provided'))

	# Verify if the list exists
	if db_lists.count(Query.name == listname) == 0:
		return utils.output('end', 'list_does_not_exist', utils.translate('list_does_not_exist', { 'list': listname }))

	# Delete the to-do list
	db_lists.remove(Query.name == listname)
	# Delete todos of that to-do list
	db_todos.remove(Query.list == listname)

	return utils.output('end', 'list_deleted', utils.translate('list_deleted', { 'list': listname }))
Exemplo n.º 2
0
def pause(request):
    device_id = getDeviceId()
    if device_id:
        sp_request('PUT', 'me/player/pause', {"device_id": device_id}, {})
        utils.output('end', 'success', utils.translate('pause'))
    else:
        utils.output('end', 'error', utils.translate('spotifynotrunning'))
Exemplo n.º 3
0
    def __init__(self, start, end, size, gap, angle=None, align='center', layer=None, datatype=None):

        self.start=np.array(start)
        self.end=np.array(end)
        self.size=np.array(size)
        self.gap=gap
        self.align=align

        pts=np.array([[0,0], [0, size[1]], size, [size[0], 0]])
        if angle is not None:
            pts=rotate(pts, angle, 'com')
            
        if align.lower()=='bottom':
            pass
        elif align.lower()=='top':
            pts=translate(pts, (0, -self.size[1]))
        elif align.lower()=='center':
            pts=translate(pts, (0, -self.size[1]/2))        
        else:
            raise ValueError('Align parameter must be one of bottom/top/center')

        strip_width=size[0]+gap
        
        v=self.end-self.start
        l=np.sqrt(np.dot(v,v))        
        N=int(np.floor(l/strip_width))
        spacing=v/N
        rotation=math.atan2(v[1], v[0])*180/np.pi
        pts=rotate(pts, rotation)

        origin = start + 0.5* v* (l-(N*strip_width - gap))/l

        polys=[translate(pts, origin + i*spacing) for i in range(N)]

        Elements.__init__(self, polys, layer, datatype)
Exemplo n.º 4
0
def isitdown(string):
    """Check if a website is down or not"""

    domains = utils.finddomains(string)
    output = ''

    for i, domain in enumerate(domains):
        state = 'up'
        websitename = domain[:domain.find('.')].title()

        utils.output(
            'inter', 'checking',
            utils.translate('checking', {'website_name': websitename}))

        try:
            r = utils.http('GET', 'http://' + domain)

            if (r.status_code != requests.codes.ok):
                state = 'down'

            utils.output('inter', 'up',
                         utils.translate(state, {'website_name': websitename}))
        except requests.exceptions.RequestException as e:
            utils.output(
                'inter', 'down',
                utils.translate('errors', {'website_name': websitename}))

        if len(domains) > 1 and i >= 0 and i + 1 < len(domains):
            output += ' '

    if len(domains) == 0:
        return utils.output('end', 'invalid_domain_name',
                            utils.translate('invalid_domain_name'))
    else:
        return utils.output('end', 'done')
Exemplo n.º 5
0
def create_list(string, entities):
    """Create a to-do list"""

    # List name
    listname = ''

    # Find entities
    for item in entities:
        if item['entity'] == 'list':
            listname = item['sourceText'].lower()

    # Verify if a list name has been provided
    if not listname:
        return utils.output('end', 'list_not_provided',
                            utils.translate('list_not_provided'))

    # Verify if list already exists or not
    if db_lists.count(Query.name == listname) > 0:
        return utils.output(
            'end', 'list_already_exists',
            utils.translate('list_already_exists', {'list': listname}))

    dbCreateList(listname)

    return utils.output('end', 'list_created',
                        utils.translate('list_created', {'list': listname}))
Exemplo n.º 6
0
def view_lists(string, entities):
    """View to-do lists"""

    # Lists number
    lists_nb = len(db_lists)

    # Verify if a list exists
    if lists_nb == 0:
        return utils.output('end', 'no_list', utils.translate('no_list'))

    result = ''
    # Fill end-result
    for listelement in db_lists:
        result += utils.translate(
            'list_list_element', {
                'list': listelement['name'],
                'todos_nb': db_todos.count(Query.list == listelement['name'])
            })

    return utils.output(
        'end', 'lists_listed',
        utils.translate('lists_listed', {
            'lists_nb': lists_nb,
            'result': result
        }))
Exemplo n.º 7
0
 def as_list(self):
   """
   Returns this object's attribute values in a list. 
   This method should always correspond with the HEADERS variable.
   """
   
   return [translate (self.workplace), translate (self.role)]
Exemplo n.º 8
0
    def wrapper_load_config(string, entities):
        payload = dict()
        payload["string"] = string
        payload["entities"] = entities

        api_key = utils.config("api_key")
        pro = utils.config("pro")
        payload["temperature_units"] = utils.config("temperature_units")
        payload["wind_speed_units"] = utils.config("wind_speed_units")

        if ((payload["temperature_units"] != "celsius")
                and (payload["temperature_units"] != "fahrenheit")):
            return utils.output("end", "invalid_temperature_units",
                                utils.translate("invalid_temperature_units"))

        if payload["wind_speed_units"] == "meters per seconds":
            payload["wind_speed_units_response"] = payload["wind_speed_units"]
            payload["wind_speed_units"] = "meters_sec"
        elif payload["wind_speed_units"] == "miles per hour":
            payload["wind_speed_units_response"] = payload["wind_speed_units"]
            payload["wind_speed_units"] = "miles_hour"
        else:
            return utils.output("end", "invalid_wind_speed_units",
                                utils.translate("invalid_wind_speed_units"))

        if pro:
            payload["owm"] = OWM(api_key, subscription_type="pro")
        else:
            payload["owm"] = OWM(api_key)

        return func(payload)
Exemplo n.º 9
0
def run(string, entities):
    """The SpeedTest package will give you information about your network speed """

    utils.output('inter', 'testing', utils.translate('testing'))

    realpath = os.path.dirname(os.path.realpath(__file__))
    process = subprocess.Popen(
        [sys.executable, realpath + '/speedtest.lib.py', '--simple'],
        stdout=subprocess.PIPE,
        stderr=subprocess.STDOUT)

    (output, err) = process.communicate()
    p_status = process.wait()

    if err:
        return utils.output('end', 'error', utils.translate('error'))

    rawoutput = output.decode('utf-8')

    data = {
        'ping': re.search('Ping:(.+?)\n', rawoutput).group(1).strip(),
        'download': re.search('Download:(.+?)\n', rawoutput).group(1).strip(),
        'upload': re.search('Upload:(.+?)\n', rawoutput).group(1).strip()
    }

    return utils.output('end', 'done', utils.translate('done', data))
Exemplo n.º 10
0
def reset_exclusions():
    """
    Reset all user-set exclusion paths to blanks.
    :return:
    """
    if xbmcgui.Dialog().yesno(utils.translate(32604), utils.translate(32610), utils.translate(32607)):
        __addon__.setSetting(id="exclusion1", value="")
        __addon__.setSetting(id="exclusion2", value="")
        __addon__.setSetting(id="exclusion3", value="")
Exemplo n.º 11
0
def default():
    # Open PDF
    add_dir(utils.translate(30000), "plugin://%s/open_pdf" % utils.ADDON_ID,
            os.path.join(utils.IMG_FOLDER, "open.png"))

    # Open Settings
    add_dir(utils.translate(30001),
            "plugin://%s/open_settings" % utils.ADDON_ID,
            os.path.join(utils.IMG_FOLDER, "settings.png"))
Exemplo n.º 12
0
def uncheck_todos(string, entities):
    """Uncheck todos"""

    # List name
    listname = ''

    # Todos
    todos = []

    # Find entities
    for item in entities:
        if item['entity'] == 'list':
            listname = item['sourceText'].lower()
        elif item['entity'] == 'todos':
            # Split todos into array and trim start/end-whitespaces
            todos = [
                chunk.strip()
                for chunk in item['sourceText'].lower().split(',')
            ]

    # Verify if a list name has been provided
    if not listname:
        return utils.output('end', 'list_not_provided',
                            utils.translate('list_not_provided'))

    # Verify todos have been provided
    if len(todos) == 0:
        return utils.output('end', 'todos_not_provided',
                            utils.translate('todos_not_provided'))

    # Verify if the list exists
    if db_lists.count(Query.name == listname) == 0:
        return utils.output(
            'end', 'list_does_not_exist',
            utils.translate('list_does_not_exist', {'list': listname}))

    result = ''
    for todo in todos:
        for db_todo in db_todos.search(Query.list == listname):
            # Rough matching (e.g. 1kg of rice = rice)
            if db_todo['name'].find(todo) != -1:
                db_todos.update(
                    {
                        'is_completed': False,
                        'updated_at': timestamp
                    },
                    (Query.list == listname) & (Query.name == db_todo['name']))

                result += utils.translate('list_todo_element',
                                          {'todo': db_todo['name']})

    return utils.output(
        'end', 'todo_unchecked',
        utils.translate('todos_unchecked', {
            'list': listname,
            'result': result
        }))
def landmarksBuilding(devide, _x1, _x2, _y1, _y2, _z1, _z2):
    #devide: how many landmarks you want to draw on a certain direction
    #x, y, z: the range of translation that the landmarks are placed
    #this function only draws basic images: lines, stars, triangles, cubes
    global lines, stars, triangles, cubes
    for yi in range(devide):
        #rand_scaler = random.uniform(0.75, (y2-y1)/devide)
        rand_choose = random.randint(1, 4)
        rand_angle1 = random.uniform(-PI, PI)
        rand_angle2 = random.uniform(-PI, PI)
        rand_angle3 = random.uniform(-PI, PI)
        rand_trans1 = random.uniform(_x1, _x2)
        rand_trans2 = random.uniform(_y1, _y2)
        rand_trans3 = random.uniform(_z1, _z2)
        if (rand_choose == 1):
            #l_temp = utils.scale(0.75, (y2-y1)/devide-0.2, l0)
            l_temp = utils.affine(0.6, 0.7, l0)
            l_temp = utils.multi(
                utils.EulerRotate(rand_angle1, rand_angle2, rand_angle3),
                l_temp)
            #l_temp = utils.translate(rand_trans1, rand_trans2+(yi+0.5)*(y2-y1)/devide, rand_trans3, l_temp)
            l_temp = utils.translate(rand_trans1, rand_trans2, rand_trans3,
                                     l_temp)
            lines = np.append(lines, l_temp)
        elif (rand_choose == 2):
            #s_temp = utils.scale(0.75, (y2-y1)/devide-0.2, s0)
            s_temp = utils.affine(0.6, 0.7, s0)
            s_temp = utils.multi(
                utils.EulerRotate(rand_angle1, rand_angle2, rand_angle3),
                s_temp)
            #s_temp = utils.translate(rand_trans1, rand_trans2+(yi+0.5)*(y2-y1)/devide, rand_trans3, s_temp)
            s_temp = utils.translate(rand_trans1, rand_trans2, rand_trans3,
                                     s_temp)
            stars = np.append(stars, s_temp)
        elif (rand_choose == 3):
            #t_temp = utils.scale(0.75, (y2-y1)/devide-0.2, t0)
            t_temp = utils.affine(0.6, 0.7, t0)
            t_temp = utils.multi(
                utils.EulerRotate(rand_angle1, rand_angle2, rand_angle3),
                t_temp)
            #t_temp = utils.translate(rand_trans1, rand_trans2+(yi+0.5)*(y2-y1)/devide, rand_trans3, t_temp)
            t_temp = utils.translate(rand_trans1, rand_trans2, rand_trans3,
                                     t_temp)
            triangles = np.append(triangles, t_temp)
        elif (rand_choose == 4):
            #c_temp = utils.scaleCubes(0.75, (y2-y1)/devide-0.2, c0)
            c_temp = utils.affine(0.6, 0.7, c0)
            c_temp = utils.multi(
                utils.EulerRotate(rand_angle1, rand_angle2, rand_angle3),
                c_temp)
            #c_temp = utils.translate(rand_trans1, rand_trans2+(yi+0.5)*(y2-y1)/devide, rand_trans3, c_temp)
            c_temp = utils.translate(rand_trans1, rand_trans2, rand_trans3,
                                     c_temp)
            cubes = np.append(cubes, c_temp)
        else:
            print('Warning: Error Occurs in right!')
def reset_exclusions():
    """
    Reset all user-set exclusion paths to blanks.
    :return:
    """
    if Dialog().yesno(translate(32604), translate(32610), translate(32607)):
        ADDON.setSetting(id="exclusion1", value="")
        ADDON.setSetting(id="exclusion2", value="")
        ADDON.setSetting(id="exclusion3", value="")
        ADDON.setSetting(id="exclusion4", value="")
        ADDON.setSetting(id="exclusion5", value="")
Exemplo n.º 15
0
 def onClick(self, control_id, *args):
     if control_id == self.TRIMBUTTONID:
         if xbmcgui.Dialog().yesno(utils.translate(32604), utils.translate(32605), utils.translate(32607)):
             self.getControl(self.TEXTBOXID).setText(self.log.trim())
     elif control_id == self.CLEARBUTTONID:
         if xbmcgui.Dialog().yesno(utils.translate(32604), utils.translate(32606), utils.translate(32607)):
             self.getControl(self.TEXTBOXID).setText(self.log.clear())
     elif control_id == self.CLOSEBUTTONID:
         self.close()
     else:
         utils.debug("Unknown button pressed", xbmc.LOGERROR)
 def test_does_not_modify_given_data(self):
     ranges = [(
         {'chain': 'A', 'number': 1, 'insertion_code': 'a'},
         {'chain': 'A', 'number': 2}
     )]
     ranges2 = [(
         {'chain': 'A', 'number': 1, 'insertion_code': 'a'},
         {'chain': 'A', 'number': 2}
     )]
     u.translate(self.basic, ranges)
     self.assertEqual(ranges, ranges2)
def reset_exclusions():
    """
    Reset all user-set exclusion paths to blanks.
    :return:
    """
    if Dialog().yesno(translate(32604), translate(32610), translate(32607)):
        ADDON.setSetting(id="exclusion1", value="")
        ADDON.setSetting(id="exclusion2", value="")
        ADDON.setSetting(id="exclusion3", value="")
        ADDON.setSetting(id="exclusion4", value="")
        ADDON.setSetting(id="exclusion5", value="")
Exemplo n.º 18
0
def render(edges,
           vertices,
           scale=(1, 1),
           position=(0, 0),
           offset=(0, 0),
           color="black"):
    wn = turtle.Screen()
    t = turtle.Turtle()
    t.speed(0)
    t.pensize(1)
    t.hideturtle()
    wn.tracer(0, 0)
    t.pencolor(color)
    t.penup()

    # copy by value
    local_vertices = [] + vertices

    # find center of object
    midpoint = utils.vertices_midpoint(local_vertices)

    # adjust scale and position
    # move center of object to origin point for ease implulation
    local_vertices = utils.translate(local_vertices,
                                     [-midpoint[0], -midpoint[1]])

    local_vertices = utils.scale(local_vertices, scale)

    min_x = utils.get_min_x(local_vertices)
    min_y = utils.get_max_y(local_vertices)

    local_vertices = utils.translate(
        local_vertices,
        (-min_x + offset[0] + position[0], min_y + offset[1] + position[1]))

    # drawing
    for edge in edges:
        t.penup()

        from_edge = edge[0] - 1
        to_edge = edge[1] - 1
        p = local_vertices[from_edge]

        x_2d = p[0]
        y_2d = p[1]
        t.goto(x_2d, y_2d)

        p = local_vertices[to_edge]
        t.pendown()
        x_2d = p[0]
        y_2d = p[1]
        t.goto(x_2d, y_2d)

    wn.update()
Exemplo n.º 19
0
def play_images(data):
    i = 1
    for img in data["images"]:
        if not utils.show_real_thumbnails():
            add_page("%s %d" % (utils.translate(30017), i), img,
                     thumbnails.get_thumbnail(str(i)))
        else:
            add_page("%s %d" % (utils.translate(30017), i), img, img)

        i = 1 if i == 100 else i + 1

    xbmc.executebuiltin("Container.SetViewMode(500)")
Exemplo n.º 20
0
def weather(string, entities):
    """Check the weather"""

    city = 'vide0'  #On initialise la variable city

    for item in entities:  #On va chercher dans 'entities' la ville demandée par l'utilisateur
        if item['entity'] == 'city':
            city = item['sourceText'].lower()

    #return utils.output('end', 'inter', utils.translate('inter', {'cit' : city}))

    if city == 'vide0':
        return utils.output('end', 'error', utils.translate('error'))

    url = "http://api.openweathermap.org/data/2.5/weather?appid=9ad6e7083f9e9c5d558ee4d7925e17ed&q=" + city
    content = requests.get(url)
    data = content.json()

    #On test les codes d'erreurs
    if data['cod'] != 200:  #Si c'est différent de 200 c'est qu'il y'a un problème
        if data['cod'] == "404":  #Il ne trouve pas
            return utils.output('end', '404_city_not_found',
                                utils.translate('404_city_not_found'))
        elif data['cod'] == "429":  #Trop de demande à la minute
            return utils.output('end', 'ezy', utils.translate('ezy'))
        else:
            return utils.output('end', 'error', utils.translate('error'))

    t = data['main']['temp']
    sky = data['weather'][0]['main']

    t = t - 273.15
    t = round(t, 1)

    vir = city.find(',')
    es = city.find(' ')

    if vir != -1:
        city = city[:vir]
    elif es != -1:
        city = city[:es]

    city = city.capitalize()
    sky = sky.lower()

    return utils.output(
        'end', 'weather',
        utils.translate('weather', {
            'cit': city,
            'sky': sky,
            't': t
        }))
Exemplo n.º 21
0
def run(string, entities):
    """Leon tells you about other personal assistants"""

    string = string.lower()

    assistants = ['alexa', 'cortana', 'siri', 'google assistant']

    for assistant in assistants:
        if string.find(assistant) != -1:
            return utils.output('end', 'success',
                                utils.translate(assistant.replace(' ', '_')))

    return utils.output('end', 'unknown', utils.translate('unknown'))
Exemplo n.º 22
0
def run(string, entities):
    """Verify if one or several email addresses have been pwned"""

    emails = []

    for item in entities:
        if item['entity'] == 'email':
            emails.append(item['resolution']['value'])

    if not emails:
        emails = utils.config('emails')

        if not emails:
            return utils.output('end', 'no-email', utils.translate('no-email'))

    utils.output('inter', 'checking', utils.translate('checking'))

    for index, email in enumerate(emails):
        isLastEmail = index == len(emails) - 1
        breached = checkForBreach(email)
        data = {'email': email}

        # Have I Been Pwned API returns a 403 when accessed by unauthorized/banned clients
        if breached == 403:
            return utils.output(
                'end', 'blocked',
                utils.translate('blocked',
                                {'website_name': 'Have I Been Pwned'}))
        elif breached == 503:
            return utils.output(
                'end', 'blocked',
                utils.translate('unavailable',
                                {'website_name': 'Have I Been Pwned'}))
        elif not breached:
            if isLastEmail:
                return utils.output('end', 'no-pwnage',
                                    utils.translate('no-pwnage', data))
            else:
                utils.output('inter', 'no-pwnage',
                             utils.translate('no-pwnage', data))
        else:
            data['result'] = ''

            for index, b in enumerate(breached):
                data['result'] += utils.translate(
                    'list_element', {
                        'url': 'http://' + b['Domain'],
                        'name': b['Name'],
                        'total': b['PwnCount']
                    })

            if isLastEmail:
                return utils.output('end', 'pwned',
                                    utils.translate('pwned', data))
            else:
                utils.output('inter', 'pwned', utils.translate('pwned', data))
Exemplo n.º 23
0
    def wrapper_acquire_weather(payload):
        for item in payload["entities"]:
            if item["entity"] == "city":
                utils.output("inter", "acquiring",
                             utils.translate("acquiring"))

                payload["city"] = item["sourceText"].title()
                payload["observation"] = payload["owm"].weather_at_place(
                    payload["city"])
                payload["wtr"] = payload["observation"].get_weather()

                return func(payload)
        return utils.output("end", "request_error",
                            utils.translate("request_error"))
Exemplo n.º 24
0
def main():
    model = 1
    file_name = 'train'
    n_classes = 100
    val_phrase = open_file('data/sign-to-gloss/cleaned/split-files/' +
                           file_name + '-phrase-' + str(n_classes))
    val_info = open_file('data/sign-to-gloss/cleaned/split-files/' +
                         file_name + '-info-' + str(n_classes))
    inp_lines, tar_lines, pred_lines = [], [], []
    for i in range(10, 11):
        inp, tar = create_batch([val_phrase[i]], val_info, n_classes)
        translate(inp, model)
        print(tar)
        """print('Input sentence: ', preprocess_inp_tar(inp))
Exemplo n.º 25
0
def rename_list(string, entities):
    """Rename a to-do list"""

    # Old list name
    old_listname = ''

    # New list name
    new_listname = ''

    # Find entities
    for item in entities:
        if item['entity'] == 'old_list':
            old_listname = item['sourceText'].lower()
        elif item['entity'] == 'new_list':
            new_listname = item['sourceText'].lower()

    # Verify if an old and new list name have been provided
    if not old_listname or not new_listname:
        return utils.output('end', 'new_or_old_list_not_provided',
                            utils.translate('new_or_old_list_not_provided'))

    # Verify if the old list exists
    if db_lists.count(Query.name == old_listname) == 0:
        return utils.output(
            'end', 'list_does_not_exist',
            utils.translate('list_does_not_exist', {'list': old_listname}))

    # Verify if the new list name already exists
    if db_lists.count(Query.name == new_listname) > 0:
        return utils.output(
            'end', 'list_already_exists',
            utils.translate('list_already_exists', {'list': new_listname}))

    # Rename the to-do list
    db_lists.update({
        'name': new_listname,
        'updated_at': int(time())
    }, Query.name == old_listname)
    # Rename the list name of the todos
    db_todos.update({
        'list': new_listname,
        'updated_at': int(time())
    }, Query.list == old_listname)

    return utils.output(
        'end', 'list_renamed',
        utils.translate('list_renamed', {
            'old_list': old_listname,
            'new_list': new_listname
        }))
Exemplo n.º 26
0
def do_translate(input_data):
    index = input_data[0]
    source = input_data[1][0]
    target = input_data[1][1]
    print(index)
    output = translate(source, data_loader, trainer, SEQ_MAX_LEN_TARGET)
    return {'source': source, 'target': target, 'output': output}
Exemplo n.º 27
0
def write_inferred_cds(handle, transcript, state, frame, rna_sequence):

    posteriors = state.max_posterior * frame.posterior
    index = np.argmax(posteriors)
    tis = state.best_start[index]
    tts = state.best_stop[index]

    # output is not a valid CDS
    if tis is None or tts is None:
        return None

    posterior = int(posteriors[index] * 10000)
    protein = utils.translate(rna_sequence[tis:tts])
    # identify TIS and TTS in genomic coordinates
    if transcript.strand == '+':
        cdstart = transcript.start + np.where(transcript.mask)[0][tis]
        cdstop = transcript.start + np.where(transcript.mask)[0][tts]
    else:
        cdstart = transcript.start + transcript.mask.size - np.where(
            transcript.mask)[0][tts]
        cdstop = transcript.start + transcript.mask.size - np.where(
            transcript.mask)[0][tis]

    towrite = [
        transcript.chromosome, transcript.start, transcript.stop,
        transcript.id, posterior, transcript.strand, cdstart, cdstop, protein,
        len(transcript.exons),
        ','.join(map(str, [e[1] - e[0] for e in transcript.exons])) + ',',
        ','.join(map(str, [transcript.start + e[0]
                           for e in transcript.exons])) + ','
    ]
    handle.write(" ".join(map(str, towrite)) + '\n')

    return None
Exemplo n.º 28
0
    def post(self, request, *args, **kwargs):
        entry = dict([(k, v) for k, v in request.POST.items()])
        try:
            valid_request = self.schema.deserialize(entry)
        except colander.Invalid as e:
            logging.error(e)
            return JsonResponse({'error': 1, 'message': 'bad args'})
        try:
            template_id = int(entry.get('id'))
            try:
                template = Template.objects.get(pk=template_id)
            except Template.DoesNotExist:
                return JsonResponse({'error': 1, 'message': 'bad'})

        except ValueError:
            template = Template()
            template.template_name = entry.get('name')
            file_postfix = datetime.datetime.now().strftime("%Y-%m-%d_%H_%M_%S")
            try:
                template.template_file = '{0}_{1}.xls'.format(template.template_name, file_postfix)
            except UnicodeEncodeError:
                template.template_file = '{0}_{1}.xls'.format(translate(template.template_name), file_postfix)

        f = request.FILES.get('file')
        saved = self.save_file_on_storage(f, template.template_file)
        if saved:
            template.lang = valid_request.get('lang')
            template.report_type_id = valid_request.get('report_type_id')
            template.template_type_id = valid_request.get('template_type_id')
            template.save()
            return JsonResponse({'error': 0, 'message': template.single_json})
        return JsonResponse({'error': 1, 'message': 'file not saved'})
Exemplo n.º 29
0
def main():
    model = int(sys.argv[1])
    file_name = sys.argv[2]
    val_inp = text_retrieve(file_name + '.en')
    val_tar = text_retrieve(file_name + '.phone')
    inp_lines, tar_lines, pred_lines = [], [], []
    for i in range(len(val_inp)):
        inp = str(val_inp[i])
        tar = str(val_tar[i])
        try:
            pred = translate(inp, model)
        except:
            continue
        print(i)
        print('Input sentence: ', preprocess_inp_tar(inp))
        print('Target sentence: ', preprocess_inp_tar(tar))
        print('Predict sentence: ', pred)
        print()
        inp_lines.append(preprocess_inp_tar(inp))
        tar_lines.append(preprocess_inp_tar(tar))
        pred_lines.append(pred)
    inp_text = lines_to_text(inp_lines, '\n')
    tar_text = lines_to_text(tar_lines, '\n')
    pred_text = lines_to_text(pred_lines, '\n')
    text_save(inp_text,
              'model_' + str(model) + '/predictions/' + file_name + '_inp.txt')
    text_save(tar_text,
              'model_' + str(model) + '/predictions/' + file_name + '_tar.txt')
    text_save(
        pred_text,
        'model_' + str(model) + '/predictions/' + file_name + '_pred.txt')
Exemplo n.º 30
0
def authorize(url):
    db = utils.db()['db']

    code = url.split("?code=")[1].split("&")[0]
    payload = {'redirect_uri': utils.config('callback_uri'),
                   'code': code,
                   'grant_type': 'authorization_code',
                   'scope': utils.config('scope')}
    
    auth_header = base64.b64encode(six.text_type(utils.config('client_id') + ':' + utils.config('client_secret')).encode('ascii'))
    headers = {'Authorization': 'Basic %s' % auth_header.decode('ascii')}

    results = requests.post('https://accounts.spotify.com/api/token', data=payload, headers=headers)

    token_info = results.json()

    token_info['expires_at'] = int(time.time()) + token_info['expires_in']

    token_info['client_id'] = utils.config('client_id')

    token_info['client_secret'] = utils.config('client_secret')

    token_info['prefix'] = utils.config('prefix')

    token_info['scope'] = utils.config('scope')

    db.insert(token_info)

    utils.output('end', 'success', utils.translate('logged'))
Exemplo n.º 31
0
def do_translation_part(filename1,
                        filename2,
                        option='translation',
                        header=True,
                        cols=None):
    """ Get centroids """
    if cols is None:
        centroid_interpolated = utils.get_data(filename1, cols=cols)
    else:
        centroid_interpolated = utils.get_data(
            filename1,
            cols=[cols[0], cols[1]])  # change cols if cx,cy,cz columns change
    centroid_translation = utils.get_data(filename2)
    """ Get interpolation """
    xy_interpolated = utils.intepolate(centroid_interpolated,
                                       centroid_translation)
    """ Translate back """
    centroid_translated_back = utils.translate(centroid_interpolated,
                                               xy_interpolated)
    """ Save translation file """
    if cols is None:
        utils.save_file(filename1,
                        centroid_translated_back,
                        option=option,
                        header=header,
                        cols=cols)
    else:
        utils.save_file(filename1,
                        centroid_translated_back,
                        option=option,
                        header=header,
                        cols=[cols[0], cols[1]
                              ])  # change cols if cx,cy,cz columns change

    return None
Exemplo n.º 32
0
def main():

    train_images = load_train_data()
    X_trans, trans, X_original = translate(train_images)

    model = Model_Train(X_trans, trans, X_original, FLAGS.num_capsules, FLAGS.recognizer_dimen, FLAGS.generator_dimen, X_trans.shape[1])
    model.train()
Exemplo n.º 33
0
def cmd_list(ctx):

    bot = ctx.bot
    args = ctx.args
    config = ctx.config

    ctx.alt = bot.options.parse_alt(args)
    language = bot.options.parse_lang(ctx, args)

    selected_units = bot.options.parse_unit_names(args)

    if args:
        return bot.errors.unknown_parameters(args)

    if not selected_units:
        return bot.errors.no_unit_selected(ctx)

    msgs = []
    translations = []
    for unit in selected_units:
        name = translate(unit.base_id, language)
        translations.append(name)

    unit_list = '\n- '.join(translations)
    return [{
        'title':
        'Unit List',
        'description':
        'Here is the list of units you selected:\n- %s\n' % unit_list,
    }]
Exemplo n.º 34
0
def check_occurence (data_object):
  """
  Checks all the occurrences of the specified object.
    @param data_object: a data object
    @return: a set of all the elements, in which the object occured
  """
  import utils
  
  occuernces  = set ( )
  occuernces |= __check_container (get_vacations ( ),        data_object)
  occuernces |= __check_container (get_titles ( ),           data_object)
  occuernces |= __check_container (get_turnus_types ( ),     data_object)
  occuernces |= __check_container (get_roles ( ),            data_object)
  occuernces |= __check_container (get_turnuses ( ),         data_object)
  occuernces |= __check_container (get_workplaces ( ),       data_object)
  occuernces |= __check_container (get_employment_types ( ), data_object)
  occuernces |= __check_container (get_scheduling_units ( ), data_object)
  occuernces |= __check_container (get_nurses ( ),           data_object)
  
  to_sort = []  
  for o in occuernces:
    to_sort.append ((utils.translate (type (o).__name__), o))
  to_sort.sort ( )
  
  return [str(el[0]) + ': ' + str (el[1]) for el in to_sort]
Exemplo n.º 35
0
def main():
    print()
    model = int(sys.argv[1])
    file_name = sys.argv[2]
    n_classes = int(sys.argv[3])
    class_list = text_retrieve('class_list.txt')
    val_phrase = open_file('data/sign-to-gloss/cleaned/split-files/' +
                           file_name + '-phrase-' + str(n_classes))
    dataset_info = open_file(
        'data/sign-to-gloss/cleaned/split-files/dataset-info-' +
        str(n_classes))
    tar_lines, pred_lines = [], []
    for i in range(0, len(val_phrase)):
        print(i)
        inp, tar = create_batch([val_phrase[i]], dataset_info, n_classes)
        pred = translate(inp, model, n_classes)
        tar, pred = convert_tar_pred(list(tar[0][1:-1]), pred, class_list)
        print('Target phrase: ', tar)
        print('Predict phrase: ', pred)
        print()
        tar_lines.append(tar)
        pred_lines.append(pred)
    tar_text = lines_to_text(tar_lines, '\n')
    pred_text = lines_to_text(pred_lines, '\n')
    text_save(
        tar_text,
        str(n_classes) + '/luong/model_' + str(model) + '/predictions/' +
        file_name + '_tar.txt')
    text_save(
        pred_text,
        str(n_classes) + '/luong/model_' + str(model) + '/predictions/' +
        file_name + '_pred.txt')
Exemplo n.º 36
0
    def getKey(self):
        key = self.key
        if self.isPassword:
            key = 'Password'
        elif self.isUsername:
            key = 'Username'
        elif self.translate:
            key = translate(self.key, self.translate)

        return key.title()
Exemplo n.º 37
0
 def test_basic_translation(self):
     ranges = [(
         {'chain': 'A', 'number': 1, 'insertion_code': 'a'},
         {'chain': 'A', 'number': 2}
     )]
     val = u.translate(self.basic, ranges)
     ans = [(
         {'chain': 'A', 'number': 3, 'insertion_code': 'a'},
         {'chain': 'A', 'number': 5}
     )]
     self.assertEqual(ans, val)
Exemplo n.º 38
0
def get_opts():
    headings = []
    handlers = []

    # Show log
    headings.append(utils.translate(30001))
    handlers.append(lambda: show_log(False))

    # Show old log
    headings.append(utils.translate(30002))
    handlers.append(lambda: show_log(True))

    # Upload log
    if has_addon("script.kodi.loguploader"):
        headings.append(utils.translate(30015))
        handlers.append(lambda: xbmc.executebuiltin("RunScript(script.kodi.loguploader)"))

    # Open Settings
    headings.append(utils.translate(30011))
    handlers.append(utils.open_settings)

    return headings, handlers
Exemplo n.º 39
0
 def build(self):
     if self.report == reports.DataWarehouseReport:
         return utils.translate(self.raw, {
             'metrics': 'Metric_List',
             'breakdowns': 'Breakdown_List',
             'dateFrom': 'Date_From',
             'dateTo': 'Date_To',
             # is this the correct mapping?
             'date': 'Date_Preset',
             'dateGranularity': 'Date_Granularity',
             })
     else:
         return {'reportDescription': self.raw}
Exemplo n.º 40
0
    def __init__(self, path_to_template, entry_array,
                 brand_name=None, report_type_name=None, date=None):
        self.body_row_count = 0
        self.body_row_index = 0

        self.file = path_to_template    # путь до файла-шаблона
        self.brand_name = brand_name    # название правообладателя, идет в имя итогового файла
        self.report_type_name = report_type_name    # тип отчета, идет в -//-

        self.array = entry_array
        self.work_book = xlrd.open_workbook(self.file, formatting_info=True)
        self.write_book, self.styles = self.copy_hack()

        for key, val in self.work_book.__dict__.items():
            setattr(self, key, val)


        rightholder = translate(self.brand_name) if self.brand_name else u''
        rightholder = rightholder.replace(' ', '_')
        report_type_name = translate(self.report_type_name) if self.report_type_name else u''
        month = calendar.month_name[date.month]

        self.link = u'/tmp/{0}_{1}_{2}.xls'.format(rightholder, month, report_type_name)

        for sheet in self._sheet_list:
            if sheet.name != '_sup_':
                template = self.define_blocks(sheet)
                self.inject_data(template)
                self.update_styles(sheet, self.write_book.get_sheet(0))
                self.burn(self.write_book.get_sheet(0))

        #rightholder = translate(self.brand_name) if self.brand_name else u''
        #rightholder = rightholder.replace(' ', '_')
        #report_type_name = translate(self.report_type_name) if self.report_type_name else u''
        #month = calendar.month_name[date.month]
        #self.link = u'/tmp/{0}_{1}_{2}.xls'.format(rightholder, month, report_type_name)

        self.write_book.save(self.link)
def update(alpha, beta, gamma):
	# alpha(Y) - 0 side level - increase tilting away from you
	# beta (X) - 0 top level - increase tilting right
	# gamma(Z) - 0 north    - increase rotating counterclockwise

	ra = utils.rot_x(np.deg2rad(alpha)) 
	rb = utils.rot_z(np.deg2rad(beta))

	update.pose = (
		utils.translate(y = 5) # vertical offset
		.dot(ra)               # roll
		.dot(rb)               # yaw
		.dot(utils.rot_x(np.deg2rad(90))) # camera -> device
	)
Exemplo n.º 42
0
def get_dataset(filename, limit=0):
    X,Y = [], []
    with open(filename) as h:
        dataset = csv.reader(h)
        i = 0
        for row in dataset:
            y = 0 if int(row[0]) == 0 else 1
            Y.append(y)
            x = translate(normalize(row[5][:140]))
            X.append(x)
            if limit !=0 and i > limit:
                break
            i+=1
    return X,Y
Exemplo n.º 43
0
 def onClick(self, control_id, *args):
     if control_id == self.TRIMBUTTONID:
         if Dialog().yesno(utils.translate(32604), utils.translate(32605), utils.translate(32607)):
             self.getControl(self.TEXTBOXID).setText(self.log.trim())
     elif control_id == self.CLEARBUTTONID:
         if Dialog().yesno(utils.translate(32604), utils.translate(32606), utils.translate(32607)):
             self.getControl(self.TEXTBOXID).setText(self.log.clear())
     else:
         raise ValueError("Unknown button pressed")
Exemplo n.º 44
0
def get_multi_mnist_batch(dataset, batch_size, S, image_size, eos=False):
    batch_xs = []
    batch_ys = []
    for i in range(S):
        batch_x, batch_y = dataset.next_batch(batch_size)
        batch_x = batch_x.reshape((-1, 28, 28, 1))
        batch_x = translate(batch_x, size=(image_size[0], image_size[1] // S))
        batch_xs.append(batch_x)
        batch_ys.append(batch_y)

    batch_x = np.concatenate(batch_xs, axis=2)
    batch_y = np.concatenate([b[:, None, :] for b in batch_ys], axis=1)

    if eos:
        eos_label = np.repeat([11], batch_size).reshape((batch_size, 1))
        batch_x = np.concatenate([batch_x, eos_label])
    return batch_x, batch_y
Exemplo n.º 45
0
def check_text_instruction(text):
    global statistics

    if text:
        if type(text) == unicode:
            text = text.encode('utf8')
        if text:
            for pattern in INSTRUCTION_SUBSTITUTE_PATTERNS:
                if text.find(pattern):
                    text = re.sub(INSTRUCTION_SUBSTITUTE_PATTERNS[pattern], '', text)
            bad_instr = False
            for instr in BAD_INSTRUCTIONS:
                if text.find(instr) != -1:
                    bad_instr = True
                    break
            if bad_instr:
                statistics.wrong_text()
                print "[*] Bad string found"
                return None
        return translate(text).strip()
    return text
Exemplo n.º 46
0
def write_inferred_cds(handle, transcript, state, frame, rna_sequence):

    posteriors = state.max_posterior*frame.posterior
    index = np.argmax(posteriors)
    tis = state.best_start[index]
    tts = state.best_stop[index]

    # output is not a valid CDS
    if tis is None or tts is None:
        return None

    posterior = int(posteriors[index]*10000) 
    protein = utils.translate(rna_sequence[tis:tts])
    # identify TIS and TTS in genomic coordinates
    if transcript.strand=='+':
        cdstart = transcript.start + np.where(transcript.mask)[0][tis]
        cdstop = transcript.start + np.where(transcript.mask)[0][tts]
    else:
        cdstart = transcript.start + transcript.mask.size - np.where(transcript.mask)[0][tts]
        cdstop = transcript.start + transcript.mask.size - np.where(transcript.mask)[0][tis]

    towrite = [transcript.chromosome, 
               transcript.start, 
               transcript.stop, 
               transcript.id, 
               posterior, 
               transcript.strand, 
               cdstart, 
               cdstop,
               protein, 
               len(transcript.exons), 
               ','.join(map(str,[e[1]-e[0] for e in transcript.exons]))+',', 
               ','.join(map(str,[transcript.start+e[0] for e in transcript.exons]))+',']
    handle.write(" ".join(map(str,towrite))+'\n')

    return None
Exemplo n.º 47
0
 def __init__(self, xml_filename, script_path, default_skin="Default", default_res="720p", *args, **kwargs):
     self.log = utils.Log()
     self.caption = utils.translate(32603)
     xbmcgui.WindowXMLDialog.__init__(self, xml_filename, script_path)
Exemplo n.º 48
0
 def as_list(self):
   """Returns this object's attribute values in a list. 
   This method should always correspond with the HEADERS variable."""
   return [translate (self.code), translate (self.label), translate (self.duration)]
Exemplo n.º 49
0
def run(args):
    batch_size = args.batch_size
    num_epochs = args.num_epochs
    num_steps = args.num_time_steps
    num_classes = 10
    num_lstm_units = args.num_lstm_units
    num_lstm_layer = 1
    alpha = args.alpha
    location_sigma = args.location_sigma
    glimpse_size = (12, 12)

    image_rows, image_cols = [int(v) for v in args.image_size.split("x")]

    mnist = input_data.read_data_sets("data", one_hot=True)

    sess = tf.Session()
    K.set_session(sess)

    image = tf.placeholder(tf.float32, (None, image_rows, image_cols, 1))
    label = tf.placeholder(tf.int32, (None, num_classes))

    tf.image_summary("translated mnist", image, max_images=3)

    cell = tf.nn.rnn_cell.LSTMCell(num_lstm_units, forget_bias=1., use_peepholes=True, state_is_tuple=True)
    # cell = tf.nn.rnn_cell.MultiRNNCell([lstm_cell] * num_lstm_layer, state_is_tuple=True)
    state = initial_state = cell.zero_state(tf.shape(image)[0], dtype=tf.float32)

    location_net = Dense(2, activation="linear", name="location_net")
    h_g = Dense(128, activation="relu", name="h_g")
    h_l = Dense(128, activation="relu", name="h_l")
    linear_h_g = Dense(256, activation="linear", name="linear_h_g")
    linear_h_l = Dense(256, activation="linear", name="linear_h_l")

    locations = []
    loc_means = []
    with tf.variable_scope("RNN"):
        for time_step in range(num_steps):
            if time_step > 0:
                tf.get_variable_scope().reuse_variables()

            h_tm1 = state.h

            loc_mean = location_net(h_tm1)
            tf.histogram_summary("loc_mean(t=%d) without tanh" % time_step, loc_mean)
            # loc_mean = 1.7159 * tf.nn.tanh(2/3 * loc_mean)
            # tf.histogram_summary("loc_mean(t=%d)" % time_step, loc_mean)
            locations += [tf.stop_gradient(tf.random_normal((batch_size, 2), loc_mean, location_sigma))]
            loc_means += [loc_mean]

            sizes = [(glimpse_size[0] * (i + 1), glimpse_size[1] * (i + 1))
                     for i in range(3)]
            glimpses = take_glimpses(image, locations[-1], sizes)
            glimpse = tf.concat(3, glimpses)
            glimpse = tf.reshape(glimpse, (-1, np.prod(glimpse_size) * len(sizes)))

            _h_g = h_g(glimpse)
            _h_l = h_l(locations[-1])
            inputs = tf.nn.relu(linear_h_g(_h_g) + linear_h_l(_h_l))
            (cell_output, state) = cell(inputs, state)
            tf.image_summary("12x12 glimpse t=%d" % time_step, glimpses[-1], max_images=5)

    logits = Dense(num_classes, name="logits")(state.h)
    inference = tf.nn.softmax(logits)
    prediction = tf.arg_max(inference, 1)
    R = tf.cast(tf.equal(prediction, tf.arg_max(label, 1)), tf.float32)
    R = tf.stop_gradient(tf.expand_dims(R, 1))

    accuracy = tf.reduce_mean(R)
    tf.scalar_summary("accuracy", accuracy)

    loss = tf.nn.softmax_cross_entropy_with_logits(logits, tf.cast(label, tf.float32))
    loss = tf.reduce_mean(loss)
    tf.scalar_summary("xentropy", loss)

    b = K.variable(0., name="baseline")
    tf.scalar_summary("baseline", b)

    reinforce_loss = 0.
    for time_step, (l, l_mean) in enumerate(zip(locations, loc_means)):
        b_val = 0.
        if args.baseline:
            b_val = tf.stop_gradient(b)

        p = 1. / tf.sqrt(2 * np.pi * tf.square(location_sigma))
        p *= tf.exp(-tf.square(l - l_mean) / (2 * tf.square(location_sigma)))
        reinforce_loss -= alpha * (R - b_val) * tf.log(p + K.epsilon())

    baseline_loss = tf.squared_difference(tf.reduce_mean(R), b)
    tf.scalar_summary("loss:baseline", baseline_loss)

    reinforce_loss = tf.reduce_sum(tf.reduce_mean(reinforce_loss, reduction_indices=0))
    tf.scalar_summary("loss:reinforce", reinforce_loss)

    total_loss = loss + reinforce_loss + baseline_loss
    tf.scalar_summary("loss:total", total_loss)

    if str.lower(args.optimizer) == "adam":
        optimizer = tf.train.AdamOptimizer(learning_rate=args.learning_rate)
    elif str.lower(args.optimizer) == "momentum":
        optimizer = tf.train.MomentumOptimizer(learning_rate=args.learning_rate, momentum=args.momentum)

    tvars = tf.trainable_variables()
    grads = tf.gradients(total_loss, tvars)
    for tvar, grad in zip(tvars, grads):
        tf.histogram_summary(tvar.name, grad)
    train_step = optimizer.apply_gradients(zip(grads, tvars))

    merged = tf.merge_all_summaries()
    summary_writer = tf.train.SummaryWriter(args.logdir, sess.graph)

    # Training
    sess.run(tf.initialize_all_variables())
    initial_c, initial_h = sess.run([initial_state.c, initial_state.h],
                                    feed_dict={image: np.zeros((batch_size, image_rows, image_cols, 1))})

    saver = tf.train.Saver()
    if args.train == 1:
        epoch_loss = []
        epoch_reinforce_loss = []
        epoch_acc = []

        global_step = 0
        while mnist.train.epochs_completed < num_epochs:
            current_epoch = mnist.train.epochs_completed
            batch_x, batch_y = mnist.train.next_batch(batch_size)
            batch_x = translate(batch_x.reshape((-1, 28, 28, 1)), size=(image_rows, image_cols))

            preds, loss, r_loss, summary, _ = sess.run([prediction, total_loss, reinforce_loss, merged, train_step],
                                                       feed_dict={image: batch_x, label: batch_y,
                                                                  initial_state.c: initial_c, initial_state.h: initial_h,
                                                                  K.learning_phase(): 1})
            epoch_loss += [loss]
            epoch_reinforce_loss += [r_loss]
            epoch_acc += [accuracy_score(preds, np.argmax(batch_y, axis=1))]

            summary_writer.add_summary(summary, global_step)
            global_step += 1

            if mnist.train.epochs_completed != current_epoch:
                print("[Epoch %d/%d]" % (current_epoch + 1, num_epochs))
                print("loss:", np.asarray(epoch_loss).mean())
                print("reinforce_loss: %.5f+/-%.5f" % (
                      np.asarray(epoch_reinforce_loss).mean(),
                      np.asarray(epoch_reinforce_loss).std()))
                print("acc: ", np.asarray(epoch_acc).mean())

                epoch_acc = []
                epoch_loss = []
                epoch_reinforce_loss = []

                val_loss = []
                val_reinforce_loss = []
                val_acc = []
                while mnist.validation.epochs_completed != 1:
                    batch_x, batch_y = mnist.validation.next_batch(batch_size)
                    batch_x = translate(batch_x.reshape((-1, 28, 28, 1)), size=(image_rows, image_cols))
                    res = sess.run([prediction, total_loss, reinforce_loss] + locations,
                                   feed_dict={image: batch_x.reshape((-1, image_rows, image_cols, 1)),
                                              label: batch_y,
                                              initial_state.c: initial_c, initial_state.h: initial_h,
                                              K.learning_phase(): 0})
                    preds, loss, r_loss = res[:3]
                    locs = res[3:]
                    val_loss += [loss]
                    val_reinforce_loss += [r_loss]
                    val_acc += [accuracy_score(preds, np.argmax(batch_y, axis=1))]

                    images = batch_x.reshape((-1, image_rows, image_cols))
                    locs = np.asarray(locs, dtype=np.float32)
                    locs = (locs + 1) * (image_rows / 2)
                    plot_glimpse(images, locs, name=args.logdir + "/glimpse.png")
                mnist.validation._epochs_completed = 0
                mnist.validation._index_in_epoch = 0

                print("Val loss:", np.asarray(val_loss).mean())
                print("Val reinforce_loss: %.5f+/-%.5f" % (
                      np.asarray(val_reinforce_loss).mean(),
                      np.asarray(val_reinforce_loss).std()))
                print("Val acc: ", np.asarray(val_acc).mean())
        saver.save(sess, args.checkpoint)

    if len(args.checkpoint) > 0:
        saver.restore(sess, args.checkpoint)

    # plot results
    batch_x, _ = mnist.train.next_batch(batch_size)
    batch_x = translate(batch_x.reshape((-1, 28, 28, 1)), size=(image_rows, image_cols))

    locs = sess.run(locations, feed_dict={image: batch_x.reshape((-1, image_rows, image_cols, 1)),
                                          initial_state.c: initial_c, initial_state.h: initial_h,
                                          K.learning_phase(): 0})

    images = batch_x.reshape((-1, image_rows, image_cols))
    locs = np.asarray(locs, dtype=np.float32)
    locs = (locs + 1) * (image_rows / 2)
    plot_glimpse(images, locs)
Exemplo n.º 50
0
 def test_no_translation(self):
     ans = [({'chain': 'A', 'number': 1}, {'chain': 'A', 'number': 2})]
     val = u.translate(self.no, ans)
     self.assertEqual(ans, val)
Exemplo n.º 51
0
    def main(self):
        msg = translate(self.recv_buffer)

        if msg['command'] == 'PRIVMSG':
            time.sleep(0.5)
            sender = msg['nick']
            channel = msg['params'][0].lower()
            message = msg['params'][1].lower()

            if self.nick.lower() in message:

                if 'hello' in message or 'hey' in message or 'hi' in message:
                    output = '\'sup?'

                elif 'rw' in message:
                    output = get_card('rw', 'single')
                    if 'spread' in message:
                        output = get_card('rw', 'spread')

                elif 'rune' in message:
                    output = get_rune('single')
                    if 'spread' in message:
                        output = get_rune('spread')

                elif 'spread' in message:
                    output = get_card('thoth', 'spread')

                elif 'y/n' in message:
                    output = get('yesno')

                elif 'when' in message:
                    output = get('when')

                elif 'necro' in message or 'necronomicon' in message:
                    output = get('necro')

                elif 'quote' in message:
                    output = get('quote')

                elif 'bane' in message:
                    output = get('bane')

                elif 'joke' in message:
                    output = get('joke')

                elif 'cat' in message:
                    output = get('cat')

                elif 'doge' in message:
                    output = get('doge')

                elif 'book' in message:
                    output = get('book')

                elif 'song' in message:
                    output = get('song')

                elif 'crowley' in message:
                    output = get('crowley')

                elif 'tranny' in message or 'trannies' in message:
                    output = get('thoth')

                else:
                    output = get_card('thoth', 'single')

                self.message(channel, '{}: {}'.format(sender, output))

        print('msg: ' + str(msg))
Exemplo n.º 52
0
def run(args):
    num_epochs = args.num_epochs
    num_time_steps = args.N * args.S
    batch_size = args.batch_size
    image_rows, image_cols = args.image_size
    glimpse_size = [int(s) for s in args.glimpse_size.split("x")]
    num_classes = 10
    num_lstm_units = args.num_lstm_units
    location_sigma = np.float32(args.location_sigma)
    location_max = np.float32(1 / args.ratio)

    mnist = input_data.read_data_sets("data", one_hot=True)

    sess = tf.Session()
    K.set_session(sess)

    if str.lower(args.optimizer) == "adam":
        optimizer = tf.train.AdamOptimizer(learning_rate=args.lr,
                                           beta1=0.9, beta2=0.999,
                                           epsilon=args.epsilon)
    elif str.lower(args.optimizer) == "momentum":
        optimizer = tf.train.MomentumOptimizer(learning_rate=args.lr,
                                               momentum=args.momentum)
    else:
        optimizer = tf.train.GradientDescentOptimizer(learning_rate=args.lr)

    image = tf.placeholder(tf.float32, (None, image_rows, image_cols, 1))
    label = tf.placeholder(tf.int32, (None, args.S, num_classes))

    tf.image_summary("translated", image, max_images=3)

    # === Recurrent network ===
    lstm_cell = tf.nn.rnn_cell.LSTMCell(num_lstm_units, forget_bias=1., use_peepholes=True, state_is_tuple=True)
    cell = tf.nn.rnn_cell.MultiRNNCell([lstm_cell] * 2, state_is_tuple=True)
    state = initial_state = cell.zero_state(batch_size, tf.float32)

    context_net = build_context_net(num_lstm_units, glimpse_size)
    glimpse_net = build_glimpse_net(num_lstm_units, glimpse_size)
    emission_net = Dense(2, name="emission_net")
    classification_net = Dense(num_classes, name="classification_net")
    baseline_net = Dense(1, name="baseline_net")

    image_coarse = tf.image.resize_images(image, glimpse_size)
    state2 = tf.nn.rnn_cell.LSTMStateTuple(c=state[1].c, h=context_net(image_coarse))
    state = (state[0], state2)

    y_preds = [None] * num_time_steps

    location_means = [emission_net(state[1].h)]
    locations = [tf.clip_by_value(tf.random_normal((batch_size, 2), location_means[-1], location_sigma),
                                  -location_max, location_max)]

    rewards = []
    baselines = []
    loss = 0.
    accuracy = 0.

    glimpse_sizes = [(glimpse_size[0] * (i + 1), glimpse_size[1] * (i + 1))
                     for i in range(3)]
    with tf.variable_scope("RNN") as scope:
        for t in range(num_time_steps):
            if t > 0:
                scope.reuse_variables()
            baselines.append(baseline_net(tf.stop_gradient(state[1].h)))

            glimpses = take_glimpses(image, locations[-1] * args.ratio, glimpse_sizes)
            tf.image_summary("glimpse(t=%d)" % t, glimpses[0], max_images=3)
            glimpse = tf.concat(3, glimpses)

            g = glimpse_net([glimpse, locations[-1]])
            output, state = cell(g, state)

            location_means.append(emission_net(state[1].h))
            locations.append(
                tf.clip_by_value(tf.random_normal((batch_size, 2), mean=location_means[-1], stddev=location_sigma),
                                 -location_max, location_max))

            if (t + 1) % args.N == 0:
                target_idx = t // args.N
                logits = classification_net(state[0].h)
                label_t = tf.cast(label[:, target_idx, :], tf.float32)
                y_preds[t] = tf.argmax(tf.nn.softmax(logits), 1)

                cumulative = 0.
                if len(rewards) > 0:
                    cumulative = rewards[t - args.N]
                reward = tf.cast(tf.equal(y_preds[t], tf.argmax(label_t, 1)), tf.float32)
                rewards.append(cumulative + tf.expand_dims(reward, 1))
                loss += tf.reduce_mean(
                    tf.nn.softmax_cross_entropy_with_logits(logits, label_t))
                accuracy += tf.reduce_mean(reward) / args.S

    reinforce_loss = 0.
    baseline_loss = 0.
    for t in range(num_time_steps):  # t = 0..T-1
        p = 1 / tf.sqrt(2 * np.pi * tf.square(location_sigma))
        p *= tf.exp(-tf.square(locations[t] - location_means[t]) / (2 * tf.square(location_sigma)))
        R = tf.stop_gradient(rewards[t // args.N])
        b = baselines[t]
        b_ = tf.stop_gradient(b)
        log_p = tf.log(p + K.epsilon())
        tf.histogram_summary("p(t=%d)" % t, p)
        tf.histogram_summary("log_p(t=%d)" % t, log_p)
        reinforce_loss -= (R - b_) * log_p
        baseline_loss += tf.reduce_mean(tf.squared_difference(tf.reduce_mean(R), b))

    reinforce_loss = tf.reduce_sum(tf.reduce_mean(reinforce_loss, reduction_indices=0))
    total_loss = loss + args.alpha * reinforce_loss + baseline_loss
    tf.scalar_summary("loss:total", total_loss)
    tf.scalar_summary("loss:xentropy", loss)
    tf.scalar_summary("loss:reinforcement", reinforce_loss)
    tf.scalar_summary("loss:baseline", baseline_loss)
    tf.scalar_summary("accuracy", accuracy)

    tvars = tf.trainable_variables()
    grads = tf.gradients(total_loss, tvars)
    for tvar, grad in zip(tvars, grads):
        tf.histogram_summary(tvar.name, grad)
    train_step = optimizer.apply_gradients(zip(grads, tvars))

    merged = tf.merge_all_summaries()
    summary_writer = tf.train.SummaryWriter(args.logdir, sess.graph)

    sess.run(tf.initialize_all_variables())

    saver = tf.train.Saver()

    if args.resume:
        assert os.path.exists(args.checkpoint)
        saver.restore(sess, args.checkpoint)

    best_val_score = 0.

    if not args.test:
        epoch_loss = []
        epoch_reinforce_loss = []
        epoch_acc = []

        global_step = 0
        while mnist.train.epochs_completed < num_epochs:
            current_epoch = mnist.train.epochs_completed
            batch_x, batch_y = mnist.train.next_batch(args.batch_size)
            batch_x = translate(batch_x.reshape((-1, 28, 28, 1)), size=(image_rows, image_cols))
            batch_y = batch_y.reshape((-1, args.S, num_classes))

            acc, loss, r_loss, summary, _ = sess.run([accuracy, total_loss, reinforce_loss, merged, train_step],
                                                     feed_dict={image: batch_x, label: batch_y})

            epoch_loss.append(loss)
            epoch_reinforce_loss.append(r_loss)
            epoch_acc.append(acc)

            summary_writer.add_summary(summary, global_step)
            global_step += 1

            if mnist.train.epochs_completed != current_epoch:
                val_loss = []
                val_reinforce_loss = []
                val_acc = []

                while mnist.validation.epochs_completed != 1:
                    batch_x, batch_y = mnist.validation.next_batch(batch_size)
                    batch_x = translate(batch_x.reshape((-1, 28, 28, 1)), size=(image_rows, image_cols))
                    batch_y = batch_y.reshape((-1, args.S, num_classes))
                    res = sess.run([accuracy, total_loss, reinforce_loss] + locations,
                                   feed_dict={image: batch_x,
                                              label: batch_y})
                    acc, loss, r_loss = res[:3]
                    locs = res[3:]
                    val_loss.append(loss)
                    val_reinforce_loss.append(r_loss)
                    val_acc.append(acc)

                    images = batch_x.reshape((-1, image_rows, image_cols))
                    locs = np.asarray(locs, dtype=np.float32) * args.ratio
                    locs = (locs + 1) * (image_rows / 2)
                    plot_glimpse(images, locs, name=args.logdir + "/glimpse.png")

                print("[Epoch %d/%d]" % (current_epoch + 1, num_epochs))
                print("loss:", np.asarray(epoch_loss).mean())
                print("reinforce_loss: %.5f+/-%.5f" % (
                    np.asarray(epoch_reinforce_loss).mean(),
                    np.asarray(epoch_reinforce_loss).std()))
                print("accuracy:", np.asarray(epoch_acc).mean())

                print("val_loss:", np.asarray(val_loss).mean())
                print("val_reinforce_loss:", np.asarray(val_reinforce_loss).mean())
                print("val_acc:", np.asarray(val_acc).mean())
                mnist.validation._epochs_completed = 0
                mnist.validation._index_in_epoch = 0

                val_score = np.asarray(val_acc).mean()
                if val_score < 0.2:
                    return val_score
                best_val_score = max(val_score, best_val_score)

        saver.save(sess, args.checkpoint)
        return best_val_score
Exemplo n.º 53
0
	if not geo_data:
		print "Discarding {!r} - no data".format(fname)
		continue

	encoding, geo_data = geo_data[:8], geo_data[8:]
	geo_data = json.loads(geo_data)

	print geo_data

	# angle in centi degrees
	yaw = geo_data['Yaw']
	pitch = geo_data['Pitch']
	roll = geo_data['Roll']

	pose = (utils.translate(y=5)
		.dot(utils.rot_y(yaw))
		.dot(utils.rot_x(pitch))
		.dot(utils.rot_z(roll))
	)

	#damn you, opencv
	shutil.copy(fname, 'temp.jpg')
	im = cv2.imread('temp.jpg')
	dst = from_quad.apply_fixes_to(im, pose)
	if dst is None:
		print "Skipping {!r} -  could not transform".format(fname)
		continue

	cv2.imwrite('temp.jpg', dst)
	shutil.copy('temp.jpg', new_fname)

	cv2.imwrite('temp.jpg', im)