Example #1
0
def create_scale_specific_rule(operation=random.choice(RULE_ACTION_SCALE_LIST), name=Utils.id_generator(),
                               mem_value='1', mem_operand='less equal', cpu_value='0', cpu_operand='less'):

    """Method to create a default scalability rule body used to create or update rules
    :param operation: operation to be performed by the Scalability manager
    :param name: Rule name
    :param cpu_value mem_value: value of the parameter to match
    :param cpu_operand mem_operand: operand of the parameter to match
    :returns json body (dict)
    """

    if name == RANDOM:
        name = Utils.id_generator()

    rule = {
            RULE_ACTION: {
                RULE_ACTION_NAME: RULE_ACTION_NAME_LIST[0],
                RULE_OPERATION: operation
            },
            RULE_NAME: name,
            RULE_CONDITION: {
                MEM: {
                    RULE_OPERAND: mem_operand,
                    RULE_VALUE: mem_value
            },
                CPU: {
                    RULE_OPERAND: cpu_operand,
                    RULE_VALUE: cpu_value
                    }
                }
            }

    return rule
Example #2
0
    async def test_message_view_send_message_to_user(self):
        member_I = utils.id_generator()
        request, session_I = await self.signup(member_I)
        assert request.status == 200

        member_II = utils.id_generator()
        request, session_II = await self.signup(member_II)
        assert request.status == 200

        recipients = ['bot', member_I]

        # Write a few messages
        for i in range(1, 5, 1):
            message = utils.id_generator(100)
            request = await self.send_mesage_to_users(recipients=recipients,
                                                      message=message,
                                                      session_id=session_II)
            assert request.status == 200
            json_text = await request.json()
            assert 'id' in json_text

        # Get a created chats
        request = await self.get_chats(session_id=session_I)
        assert request.status == 200
        json_text = await request.json()

        chat_members = recipients[:]
        chat_members.append(member_II)
        chat_found = False
        for chat in json_text:
            if chat['name'] == ','.join(sorted(chat_members)):
                chat_found = True
                assert 'last_message' in chat
                assert chat['last_message']['message'] == message
        assert chat_found == True
Example #3
0
def create_notify_specific_rule(body=DEFAULT_BODY, email="*****@*****.**", name=Utils.id_generator(), mem_value='1',
                                mem_operand='less equal', cpu_value='0', cpu_operand='less'):

    """Method to create a default notify rule body used to create or update rules
    :param body: body to be send to the user
    :param name: Rule name
    :param cpu_value mem_value: value of the parameter to match
    :param cpu_operand mem_operand: operand of the parameter to match
    :returns json body (dict)
    """

    if name == RANDOM:
        name = Utils.id_generator()

    rule = {
            RULE_ACTION: {
                RULE_ACTION_NAME: RULE_ACTION_NAME_LIST[1],
                BODY: body,
                EMAIL: email
            },
            RULE_NAME: name,
            RULE_CONDITION: {
                MEM: {
                    RULE_OPERAND: mem_operand,
                    RULE_VALUE: mem_value
            },
                CPU: {
                    RULE_OPERAND: cpu_operand,
                    RULE_VALUE: cpu_value
                    }
                }
            }
    return rule
Example #4
0
    async def test_messages_view_get(self):
        member_I = utils.id_generator()
        request, session_I = await self.signup(member_I)
        assert request.status == 200

        member_II = utils.id_generator()
        request, session_II = await self.signup(member_II)
        assert request.status == 200

        recipients = ['bot', member_I]

        # Write a few messages
        chat_id = ""
        for i in range(1, 5, 1):
            message = utils.id_generator(100)
            request = await self.send_mesage_to_users(recipients=recipients,
                                                      message=message,
                                                      session_id=session_I)
            assert request.status == 200
            json_text = await request.json()
            assert 'id' in json_text
            chat_id = json_text['id']

        request = await self.get_messages_from_chat(chat_id=chat_id,
                                                    session_id=session_I)
        assert request.status == 200
        json_text = await request.json()

        message_found = False
        for message_json in json_text:
            if message_json['message'] == message:
                message_found = True
        assert message_found == True
Example #5
0
def new_from_image():
    """Create a new deck from an uploaded image
    """
    log_request(request)
    data = request.json

    if not valid_params(['username', 'deck_name', 'description', 'session_id', 'name', 'divs'], data):
        logging.debug("Missing parameters")
        return jsonify({'error' : 500})
        
    username = data['username']
    deckname = data['deck_name']
    desc     = data['description']
    sId      = data['session_id']

    filename = data['name']
    
    uId = user.get_uId(username)
    
    if not user.verify(username, sId):
        return jsonify({'error' : 101})
        
    if not filename or not os.path.exists("/var/www/resources/tmp/{0}".format(filename)):
        return jsonify({'error' : 201})
        
    # create the new deck in the database
    dId, deck_id = deck.new(deckname, uId, desc)

    # split the temp image
    i = Image.open("/var/www/resources/tmp/{0}".format(filename))
    imgs = splitImage(i, data['divs'])

    for row in imgs:
        # pairwise collect the cards
        img_pairs =  pairs(row)
        for p in img_pairs:
            cId = card.new(dId, "", "")
            # String IO for file in memory
            atmp = StringIO()
            p[0].convert("RGB").save(atmp, format="JPEG")
            atmp.seek(0)
            a_id = resource.new(atmp, id_generator() + ".jpg", cId)[1]
            sideA = '<img src="[FLASHYRESOURCE:{0}]" />'.format(a_id)

            if p[1] != None:
                btmp = StringIO()
                p[1].convert("RGB").save(btmp, format="JPEG")
                btmp.seek(0)
                b_id = resource.new(btmp, id_generator() + ".jpg", cId)[1]
                sideB = '<img src="[FLASHYRESOURCE:{0}]" />'.format(b_id)
            else:
                sideB = '[FLASHYRESOURCE:00000000]'
                
            card.modify(cId, sideA, sideB)

    os.unlink("/var/www/resources/tmp/{0}".format(filename))        # let the filesystem delete the temp file
    d = deck.get_deck(dId);
    return jsonify({'deck': d, 'error': 0})
Example #6
0
def create_default_metadata_or_attributes_list(num_attributes=2):

    parameters = []

    for i in range(num_attributes):
        parameter = {}
        parameter[KEY] = id_generator(10)
        parameter[DESCRIPTION] = id_generator(10)
        parameter[VALUE] = id_generator(10)
        parameters.append(parameter)

    return parameters
Example #7
0
def create_default_metadata_or_attributes_list(num_attributes=2):

    parameters = []

    for i in range(num_attributes):
        parameter = {}
        parameter[KEY] = id_generator(10)
        parameter[DESCRIPTION] = id_generator(10)
        parameter[VALUE] = id_generator(10)
        parameters.append(parameter)

    return parameters
Example #8
0
def make_short_link():
    data = request.form
    custom_code = data.get('custom_code', None)
    expire = data.get('expire', 5 * 60)
    otp = data.get('otp', None)
    private = data.get('private', False)

    if not expire:
        expire = 5 * 60

    # if custom_code and len(custom_code) < 8:
    #     return render_template("index.html", error='Custom code must be 8')

    if custom_code is not None and check_key(custom_code):
        return render_template("index.html",
                               error='Custom code exist and enabled!')

    if custom_code:
        try:
            db.session.query(ShortLink).filter(
                ShortLink.short_url == custom_code).delete()
            db.session.commit()
        except NoResultFound:
            pass

        generated_id = custom_code
    else:
        generated_id = id_generator()
        while True:
            if not check_key(generated_id):
                break
            generated_id = id_generator()

    short_link = ShortLink(main_url=data.get('main_url'),
                           short_url=generated_id,
                           private=bool(private),
                           expire=expire)
    set_cache(generated_id, data.get('main_url'), expire)
    if current_user.is_authenticated:
        short_link.user = current_user
        if otp:
            short_link.otp_code = otp

    db.session.add(short_link)
    db.session.commit()
    return render_template("index.html",
                           result=request.host_url + generated_id,
                           main_url=data.get('main_url'))
Example #9
0
def add_word():
    form = AddWordForm()
    if form.validate_on_submit():
        entity = EntityStream.query.filter_by(caption=form.name.data).first()
        if entity == None:
            entity = EntityStream()
            entity.eid = form.name.data + '-' + id_generator()
            entity.creator = current_user.username
            entity.creation_time = utcnow()
            entity.caption = form.name.data
            entity.alias = form.alias.data
            entity.description = form.description.data
            for s in prefix_suggestion(entity.caption):
                searchkey = SearchKey.query.filter_by(word=s).first()
                if searchkey == None:
                    searchkey = SearchKey(s)
                else:
                    print "found searchkey", searchkey.word, searchkey.id
                entity.suggestion.append(searchkey)
        db.session.add(entity)
        #         db.session.commit()
        flash('The new word has been created.')
        #         else:
        #             LOG("add_word(): entity found in db")
        return redirect(url_for('.go_entity', name=entity.eid))

    return render_template('add_word.html', form=form)
    def launch(self, path_to_file, parsed_params=None):
        """
        Launch method, that will call the in-method egine to execute the application
        Creating empty list for the whole class adaptor and executed adaptor
        :params: path_to_file, parsed_params
        :types: string, dictionary

        .. note::
            For the time being we only have one "workflow engine" but we could extend this
            launch method to accept another parameter to be able to choose which engine to
            launch
        """
        logger.info(
            "Launching the application located there {}".format(path_to_file))
        template = self._micado_parser_upload(path_to_file, parsed_params)
        self.template = template
        tpl = self._mapper_instantiation(template)

        id_app = utils.id_generator()

        object_adaptors = self._instantiate_adaptors(id_app, template)
        logger.debug("list of objects adaptor: {}".format(object_adaptors))
        #self._save_file(id_app, path_to_file)
        self.app_list.update({id_app: ""})
        self._update_json()
        logger.info("dictionnaty of id is: {}".format(self.app_list))

        self._engine(object_adaptors, template, id_app)
        return id_app
Example #11
0
def censys_search(query, protocol):
    print "Extracting IPs for following query: " + query
    print "Please wait.."
    discovery_id = id_generator()
    pages = 2
    output_file = ""
    page = 1
    while page <= pages:
        print "Extracting IPs from page %s" % str(page)
        params = {'query': query, 'page': page}
        try:
            res = requests.post(CENSYS_API_URL + "/search/ipv4",
                                json=params,
                                auth=(CENSYS_UID, CENSYS_SECRET))
        except:
            print "Cannot communicate with Censys.io"
        payload = res.json()
        ip_list = []

        i = 0
        l = len(payload['results'])

        # Initial call to print 0% progress
        printProgressBar(i,
                         l,
                         prefix='Progress:',
                         suffix='Complete',
                         length=50)
        for r in payload['results']:
            ip = r["ip"]
            if ip in ip_list:
                continue
            ip_list.append(ip)
            output_file = get_output_file_by_scanner('censys', discovery_id,
                                                     protocol)
            try:
                with open(output_file, "a") as ips:
                    ips.write(ip)
                    ips.write("\n")
            except IOError:
                print "There is no such file: %s" % output_file
                return 0

            # Update Progress Bar
            i += 1
            sys.stdout.write(
                printProgressBar(i,
                                 l,
                                 prefix='Progress:',
                                 suffix='Complete',
                                 length=50))
            sleep(0.1)
            sys.stdout.flush()
        print ""
        if page == 1:
            pages = payload['metadata']['pages']
        page += 1
    print "Results saved under: %s" % output_file
    print "Finished"
    return 1
def mass_scan(ip, protocol):
    print "Scanning " + ip + " for " + protocol
    print "Please wait it may take time.."
    discovery_id = id_generator()
    param = get_protocol_by_service(protocol, 'massscan')
    output_file = ""
    FNULL = open(os.devnull, 'w')
    res = subprocess.call(['masscan', ip, param, '--max-rate', '100000', '-oX', 'masscan.xml'], stdout=FNULL,
                          stderr=subprocess.STDOUT)
    if not res:
        infile = open("masscan.xml", "r")
        contents = infile.read()
        soup = BeautifulSoup(contents, 'xml')
        titles = soup.find_all('address')
        output_file = get_output_file_by_scanner('massscan', discovery_id, protocol)
        for title in titles:
            try:
                with open(output_file, "a") as ips:
                    ips.write(title['addr'])
                    ips.write("\n")
            except IOError:
                print "There is no such file: %s" % output_file
                return 0
        os.remove("masscan.xml")
    else:
        print "Masscan requires root privileges. Please start Leviathan with sudo command. "
    print "Results saved under: %s" % output_file
    print "Finished"
    return 1
Example #13
0
def link_extract(query, number):
    try:
        print "Extracting URLs from Google for following dork: " + query
        discovery_id = id_generator()
        results = google_search(query,
                                GOOGLE_API_KEY,
                                GOOGLE_CSE_ID,
                                num=number)
        i = 0
        l = len(results)
        for result in results:
            printProgressBar(i,
                             l,
                             prefix='Progress:',
                             suffix='Complete',
                             length=50)
            filename = os.path.join(BASE_DIR, 'assets', 'discovered',
                                    'google_web_' + str(discovery_id) + '.txt')
            with open(filename, "a") as links:
                links.write(result['link'])
                links.write("\n")
            i += 1
            sys.stdout.write(
                printProgressBar(i,
                                 l,
                                 prefix='Progress:',
                                 suffix='Complete',
                                 length=50))
            time.sleep(0.1)
            sys.stdout.flush()
        print "\nFinished"
    except Exception as e:
        print "Link extraction failed! Probably your API limit exceeded"
        print('Error on line {}'.format(sys.exc_info()[-1].tb_lineno), type(e),
              e)
def A_user(request, engine_fixture, A_group):
    user_table = models.account.User_TB

    user_name=id_generator(size=25).decode('utf-8')
    salt = os.urandom(26).encode('hex')
    password = os.urandom(10).encode('hex')

    with transaction.manager as tm:
        success, user = user_table.create(
                            user_name = user_name,
                            password=password,
                            activated=True,
                            group_id=A_group.group_id
                            )
        assert success == True

    print '(A_user fixture)=> created'
    def fin():
        model = DBSession.query(user_table).filter(user_table.user_id == user.user_id).scalar()
        if model:
            DBSession.delete(model)
            DBSession.flush()
            transaction.commit()
            print '(A_user fixture)=> delete'
        if DBSession.dirty:
            transaction.commit()
    request.addfinalizer(fin)
    return user
Example #15
0
def engine_file():
    """ API functions to launch an application
    """
    template = request.files['file']
    try:
        id_app = request.form['id']
    except Exception:
        id_app = utils.id_generator()

    try:
        params = request.form['params']
    except Exception:
        parsed_params = None
    else:
        parsed_params = ast.literal_eval(params)

    template.save("{}/files/templates/{}.yaml".format(app.root_path, id_app))
    path_to_file = "files/templates/{}.yaml".format(id_app)

    id_app = submitter.launch(path_to_file=path_to_file,
                              parsed_params=parsed_params,
                              id_app=id_app)

    response = jsonify(dict(message="app:{}".format(id_app)))
    response.status_code = 200
    return response
Example #16
0
 def find_or_update(self, request):
     """Find  User or update password"""
     account = User.get_by_email(self.email)
     logging.error('Ther You go hiud')
     if request == 'GET':
         if account is not None:
             if account.password == ceaser_cipher(self.password):
                 return account.email, 0
             else:
                 return None, 404
         else:
             return None, 404
     else:
         if account is not None:
             if self.password is None:
                 Password = id_generator()
                 account.password = security.generate_password_hash(
                     Password, length=12)
                 account.active = False
                 return account.email, 406
             else:
                 account.password = security.generate_password_hash(
                     self.Password, length=12)
                 account.active = True
                 return account.email, 407
         else:
             return None, 405
Example #17
0
def create_default_metadata_list(num_metadatas=2):
    """
    Creates a list with random metadata values
    :param num_metadatas: Number of metadatas to be generated
    :return: A list of random metadatas
    """
    parameters = []

    for i in range(num_metadatas):
        parameter = {}
        parameter[KEY] = id_generator(10)
        parameter[DESCRIPTION] = id_generator(10)
        parameter[VALUE] = id_generator(10)
        parameters.append(parameter)

    return parameters
Example #18
0
def build_game():
    content = request.get_json()
    player1_name = content.get('player1Name', 'Player 1')
    player2_name = content.get('player2Name', 'Player 2')
    decks = content.get('decks', ['Codenames'])
    bystanders = content.get('bystanders', 9)
    game = make_game(player1_name, player2_name, bystanders, decks)
    db_game = Game(token=id_generator(size=30), game_details=json.dumps(game))
    db.session.add(db_game)
    db.session.commit()
    # Create 2 attendees to be used later
    player1 = db_game.add_pending_attendee(name=player1_name, index=1)
    player2 = db_game.add_pending_attendee(name=player2_name, index=2)
    game = safe_game(game, db_game.token)
    return {
        'gameUrlPlayer1':
        url_for('start_game_split',
                game_id=db_game.token,
                player_token=player1.token,
                token=make_login_token(signer, db_game.token, 1)),
        'gameUrlPlayer2':
        url_for('start_game_split',
                game_id=db_game.token,
                player_token=player2.token,
                token=make_login_token(signer, db_game.token, 2)),
    }
Example #19
0
 def save_to_dir(self, list_document, labels):
 	utils.mkdir(self.result_dir)
     _ = map(lambda x: utils.mkdir(os.path.join(self.result_dir, x)), my_map.name2label.keys())
     for i in xrange(len(labels)):
         output_dir = os.path.join(self.result_dir, my_map.label2name[labels[i]])
         with open(os.path.join(output_dir, utils.id_generator()), 'w', encoding='utf-8') as fw:
             fw.write(unicode(list_document[i]))
Example #20
0
def auth_client():
    room = request.args.get('text')
    user_id = request.args.get('userid')

    temp_login = ""
    res = user_traces_db.get_temp_login_from_user_id(user_id)
    if not res['temp_login'] or (
            res['temp_login_created'] and
        (time.time() - res['temp_login_created'] > 3600)):
        print("create a new temp login")
        # create a temporary user login
        temp_login = utils.id_generator()
        user_traces_db.create_temp_login(user_id, temp_login)
        print("\ttemp login: %s" % temp_login)
    else:
        temp_login = res['temp_login']
        print("got already-existing temp login %s" % temp_login)

    # forward this request to the client directly using socketio
    r = {"login": temp_login, "date": request.args.get('date')}

    print("received: %s" % r)

    print("emit socketio message to /auth namespace")
    socketio.emit("authclientrequest", r, namespace='/auth', room=room)

    return json.dumps({"status": "ok", "code": temp_login})
Example #21
0
def create_default_metadata_list(num_metadatas=2):
    """
    Creates a list with random metadata values
    :param num_metadatas: Number of metadatas to be generated
    :return: A list of random metadatas
    """
    parameters = []

    for i in range(num_metadatas):
        parameter = {}
        parameter[KEY] = id_generator(10)
        parameter[DESCRIPTION] = id_generator(10)
        parameter[VALUE] = id_generator(10)
        parameters.append(parameter)

    return parameters
Example #22
0
def censys_search(query, protocol):
    print "Extracting IPs for following query: " + query
    print "Please wait.."
    discovery_id = id_generator()
    pages = 2
    output_file = ""
    page = 1
    while page <= pages:
        try:
            print "Extracting IPs from page %s" % str(page)
            params = {'query': query, 'page': page}
            try:
                res = requests.post(CENSYS_API_URL + "/search/ipv4", json=params, auth=(CENSYS_UID, CENSYS_SECRET))
            except:
                print "Cannot communicate with Censys.io"
                return   
            payload = res.json()
            ip_list = []
            if 'results' in payload.keys():
                i = 0
                l = len(payload['results'])

                # Initial call to print 0% progress
                printProgressBar(i, l, prefix='Progress:', suffix='Complete', length=50)
                for r in payload['results']:
                    ip = r["ip"]
                    if ip in ip_list:
                        continue
                    ip_list.append(ip)
                    output_file = get_output_file_by_scanner('censys', discovery_id, protocol)
                    try:
                        with open(output_file, "a") as ips:
                            ips.write(ip)
                            ips.write("\n")
                    except IOError:
                        print "There is no such file: %s" % output_file
                        return 0
                    except Exception as e:
                        print('Error on line {}'.format(sys.exc_info()[-1].tb_lineno), type(e), e)
                        return 0
                        

                    # Update Progress Bar
                    i += 1
                    sys.stdout.write(printProgressBar(i, l, prefix='Progress:', suffix='Complete', length=50))
                    sleep(0.1)
                    sys.stdout.flush()
                print ""
                if page == 1:
                    pages = payload['metadata']['pages']
                page += 1
            else:
                print "Can not communicate with Censys"
                return 0
        except KeyboardInterrupt:
            break
    print "Results saved under: %s" % output_file
    print "Finished"
    return 1
Example #23
0
def create_default_attribute_list(num_attributes=2):
    """
    Creates a list with random attribute values and default type
    :param num_attributes: Number of attributes to be generated
    :return: A list of random attributes
    """
    parameters = []

    for i in range(num_attributes):
        parameter = {}
        parameter[KEY] = id_generator(10)
        parameter[DESCRIPTION] = id_generator(10)
        parameter[VALUE] = id_generator(10)
        parameter[ATTRIBUTE_TYPE] = ATTRIBUTE_TYPE_PLAIN
        parameters.append(parameter)

    return parameters
Example #24
0
def create_default_attribute_list(num_attributes=2):
    """
    Creates a list with random attribute values and default type
    :param num_attributes: Number of attributes to be generated
    :return: A list of random attributes
    """
    parameters = []

    for i in range(num_attributes):
        parameter = {}
        parameter[KEY] = id_generator(10)
        parameter[DESCRIPTION] = id_generator(10)
        parameter[VALUE] = id_generator(10)
        parameter[ATTRIBUTE_TYPE] = ATTRIBUTE_TYPE_PLAIN
        parameters.append(parameter)

    return parameters
Example #25
0
    def post(self, request):
        data = request.data

        if data:
            if 'token' not in data:
                return JsonResponse({'error': 'no token'},
                                    status=HTTP_400_BAD_REQUEST)

            if 'email' not in data:
                return JsonResponse({'error': 'no email'},
                                    status=HTTP_400_BAD_REQUEST)

            email = data['email']

            user = User.objects.filter(email=email).first()

            if not user:
                return JsonResponse({'error': 'no user'},
                                    status=HTTP_404_NOT_FOUND)

            key = f'VERIFY:{email}'
            token = redis.get(key)

            if not token:
                return JsonResponse({'error': 'no token'},
                                    status=HTTP_404_NOT_FOUND)

            if token != data.get('token'):
                return JsonResponse({'error': 'incorrect'},
                                    status=HTTP_400_BAD_REQUEST)

            user.email_verified = True
            user.save()

            del redis[key]

        else:
            if not request.user.is_authenticated:
                return JsonResponse({}, status=HTTP_401_UNAUTHORIZED)

            key = f'VERIFY:{request.user.email}'
            token = id_generator(size=128)
            redis.set(key, token, 180)  # 3분

            html = loader.render_to_string(
                'email_verify_template.html', {
                    'token': token,
                    'email': request.user.email,
                    'verify_url': verify_url
                })

            send_mail('[FreeChart] 이메일 인증 링크',
                      '',
                      from_email=EMAIL_HOST_SENDER,
                      recipient_list=[request.user.email],
                      html_message=html)

        return JsonResponse({})
Example #26
0
 def _read_file(filename, length):
     try:
         with open(filename, "r") as myfile:
             return myfile.read().replace("\n", "")
     except IOError:
         with open(filename, "w") as f:
             key = id_generator(length)
             f.write(key)
             return key
Example #27
0
 def _read_file(filename, length):
     try:
         with open(filename, "r") as myfile:
             return myfile.read().replace("\n", "")
     except IOError:
         with open(filename, "w") as f:
             key = id_generator(length)
             f.write(key)
             return key
Example #28
0
 def insert(self, doc, key=None):
     if key is None:
         key = utils.id_generator()
     minhash = doc.get_minhash(doc.k_shingles,
                               config.MINHASH_CONFIG['num_permutation'])
     if len(doc.k_shingles) == 0:
         return u'Does not insert this document to database.\nDocument\'s shingle = 0.\nDocument need to contain at least %d word' \
                % (config.SHINGLE_CONFIG['k'])
     self.lsh.add(key, minhash)
Example #29
0
def create_scale_specific_rule(operation=random.choice(RULE_ACTION_SCALE_LIST), name=Utils.id_generator(),
                               mem_value=1, mem_operand='less equal', cpu_value=0, cpu_operand='less',
                               hdd_value=0, hdd_operand='less', net_value=0, net_operand='less'):

    """Method to create a default scalability rule body used to create or update rules
    :param operation: operation to be performed by the Scalability manager
    :param name: Rule name
    :param cpu_value mem_value: value of the parameter to match
    :param cpu_operand mem_operand: operand of the parameter to match
    :returns json body (dict)
    """

    if name == RANDOM:
        name = Utils.id_generator()

    rule = {
            RULE_ACTION: {
                RULE_ACTION_NAME: RULE_ACTION_NAME_LIST[0],
                RULE_OPERATION: operation
            },
            RULE_NAME: name,
            RULE_CONDITION: {
                MEM: {
                    RULE_OPERAND: mem_operand,
                    RULE_VALUE: mem_value
                },
                CPU: {
                    RULE_OPERAND: cpu_operand,
                    RULE_VALUE: cpu_value

                },
                HDD: {
                    RULE_OPERAND: hdd_operand,
                    RULE_VALUE: hdd_value
                },
                NET: {
                    RULE_OPERAND: net_operand,
                    RULE_VALUE: net_value
                }
            }

    }

    return rule
Example #30
0
def create_notify_specific_rule(body=DEFAULT_BODY, email="*****@*****.**", name=Utils.id_generator(), mem_value=1,
                                mem_operand='less equal', cpu_value=0, cpu_operand='less', hdd_value=0,
                                hdd_operand='less', net_value=0, net_operand='less'):

    """Method to create a default notify rule body used to create or update rules
    :param body: body to be send to the user
    :param name: Rule name
    :param cpu_value mem_value: value of the parameter to match
    :param cpu_operand mem_operand: operand of the parameter to match
    :returns json body (dict)
    """

    if name == RANDOM:
        name = Utils.id_generator()

    rule = {
            RULE_ACTION: {
                RULE_ACTION_NAME: RULE_ACTION_NAME_LIST[1],
                BODY: body,
                EMAIL: email
            },
            RULE_NAME: name,
            RULE_CONDITION: {
                MEM: {
                    RULE_OPERAND: mem_operand,
                    RULE_VALUE: mem_value
                },
                CPU: {
                    RULE_OPERAND: cpu_operand,
                    RULE_VALUE: cpu_value
                    },
                HDD: {
                    RULE_OPERAND: hdd_operand,
                    RULE_VALUE: hdd_value
                },
                NET: {
                    RULE_OPERAND: net_operand,
                    RULE_VALUE: net_value
                }
            }

            }
    return rule
Example #31
0
 async def test_chats_view_post_with_non_existing_members(self):
     request, session_id = await self.signup()
     assert request.status == 200
     data = {
         "members": [
             utils.id_generator(10),
             utils.id_generator(10),
         ]
     }
     request = await self.client.request("POST",
                                         "/chats",
                                         data=data,
                                         headers={
                                             "Authorization":
                                             "SessionId %s" % session_id,
                                         })
     assert request.status == 422
     text = await request.text()
     assert str(exceptions.UserDoesNotExist) in text
def test_change_password(engine_fixture, A_user):
    user_table = models.account.User_TB
    original_user_name = A_user.user_name

    with transaction.manager as tm:
        new_user_password = id_generator(size=8)
        A_user.change_password(new_user_password)
        found_user = DBSession.query(user_table).filter(user_table.user_name == original_user_name).scalar()
        success, msg = found_user.pwd_validate(original_user_name, new_user_password)
        print msg
        assert success == True
Example #33
0
def annotate_vcf(filename):

    prefix = id_generator()

    cmd = "./table_annovar.pl %s humandb/ -buildver hg19 -out %s" \
          " -remove -protocol refGene,esp6500si_all,ljb26_pp2hvar,clinvar_20140902" \
          " -operation g,f,f,f -nastring . -vcfinput -csvout" % (filename, prefix)

    subprocess.call(cmd, cwd=settings.ANNOVAR_PATH, shell=True)

    return '%s.hg19_multianno.csv' % (prefix)
Example #34
0
def annotate_vcf(filename):

	prefix = id_generator()

	cmd = "./table_annovar.pl %s humandb/ -buildver hg19 -out %s" \
	      " -remove -protocol refGene,esp6500si_all,ljb26_pp2hvar,clinvar_20140902" \
	      " -operation g,f,f,f -nastring . -vcfinput -csvout" % (filename, prefix)

	subprocess.call(cmd, cwd=settings.ANNOVAR_PATH, shell=True)

	return '%s.hg19_multianno.csv' % (prefix)
Example #35
0
def getbid():
    if not os.path.isfile('/tmp/node_id'):
        from utils import id_generator
        logger.info('can not find node_id, create a tmp node_id')
        id = id_generator(12)
        with open('/tmp/node_id', 'w') as f:
            f.write(id)
        return id

    with open('/tmp/node_id') as f:
        return f.read()[:-1]
Example #36
0
    def send_client_ready(self):
        def on_response(self, *args):
            log.debug("[Connected:Response] %s" % args)

        self.socketIO.emit('message', dict(component='pad',
                                           type="CLIENT_READY",
                                           padId=self.socketIO.params['padid'],
                                           sessionID=None,
                                           password=None,
                                           token="t.%s" % (id_generator(20),),
                                           protocolVersion=2), on_response)
def test_modify_user(engine_fixture, A_user):
    user_table = models.account.User_TB

    original_user_id = A_user.user_id
    new_user_name = id_generator(size=5).decode('utf-8')

    with transaction.manager as tm:
        A_user.user_name = new_user_name
        DBSession.flush()
        find_user = DBSession.query(user_table).filter(user_table.user_name == new_user_name).scalar()
        assert A_user.user_id == find_user.user_id
Example #38
0
def start_session():
    # create a new session
    if 'sid' not in session:
        session_id = "SemanticaStudy-" + utils.id_generator()
        print("created session-id: %s" % session_id)
        session['sid'] = session_id
    else:
        session_id = session['sid']
        print("using session-id: %s" % session_id)

    study.start_session(session_id, get_ip_address(request))
Example #39
0
def default_product(name=None, metadata=None, attributes=None):

    body_dict = {PRODUCT: {PRODUCT_DESCRIPTION: id_generator(20), PRODUCT_NAME: name, PRODUCT_METADATAS: metadata,
                 PRODUCT_ATTRIBUTES: attributes}}

    if attributes is None:
        body_dict = delete_keys_from_dict(body_dict, PRODUCT_ATTRIBUTES)
    if metadata is None:
        body_dict = delete_keys_from_dict(body_dict, PRODUCT_METADATAS)

    return body_dict
Example #40
0
 def save_stories_to_file(self):
     print('save stories to file...')
     utils.mkdir('result')
     date = self.date.strftime('%Y-%m-%d')
     output_dir = os.path.join('result', date)
     utils.mkdir(output_dir)
     for story in self.crawler.new_stories:
         story_name = utils.id_generator()
         with open(os.path.join(output_dir, story_name),
                   'w',
                   encoding='utf-8') as f:
             f.write(story)
Example #41
0
 def __clone_steiner_vertices(self, h):
     new_candidates = []
     for sv in self.__vert_tree[h].intersection(self.__hot_spots):
         new_candidate_id = id_generator()
         self.__graph[new_candidate_id] = \
             (self.__graph[sv][0], self.__graph[sv][1].copy(), self.__graph[sv][2].copy())
         for w, dist in self.__graph[sv][1].iteritems():
             self.__graph[w][1][new_candidate_id] = dist
         self.__hot_spots.append(new_candidate_id)
         self.__medoids.update({new_candidate_id: self.__medoids[sv]})
         new_candidates.append(new_candidate_id)
     self.__graph.update_dist_paths(new_candidates, compute_paths=False)
Example #42
0
 def contract_suitable_regions(self, generator, excluded_nodes=None, get_centroid_medoid=False):
     self.contracted_regions = self.get_suitable_regions(generator, excluded_nodes, get_border_internal_nodes=True,
                                                         get_centroid_medoid=get_centroid_medoid,
                                                         get_dist_paths_within_region=True)
     # When an adjacent node to a region has more than one neighbour within the region, an auxiliary node is created
     # for every edge between this adjacent node and the region.
     # For each region...
     for region_id, (region, border_nodes, _, _, _, dist_paths) in self.contracted_regions.iteritems():
         # Create a dictionary of adjacent nodes with their corresponding border nodes of the region.
         adj_nodes_to_region = {}
         for bn in border_nodes:
             for adj_node in self[bn][1].keys():
                 if adj_node not in region:
                     if adj_node not in adj_nodes_to_region:
                         adj_nodes_to_region[adj_node] = [bn]
                     else:
                         adj_nodes_to_region[adj_node].append(bn)
         # Create a new region node in the graph. Distances and paths between the nodes of the region are saved.
         self[region_id] = (
             generator.weights["VERY_SUITABLE"][0], {}, {'contracted': True, 'dist_paths': dist_paths})
         # Wire the adjacent nodes with the new nodes (region nodes or auxiliary nodes).
         dropped_edges = {}
         for adj_node, bns in adj_nodes_to_region.iteritems():
             # When an adjacent node has only one neighbour in the region...
             if len(bns) == 1:
                 # Wire the adjacent node with the new region node and drop the edge between the adjacent node and
                 # the only neighbour in the region.
                 self[adj_node][1][region_id] = self[adj_node][1][bns[0]]
                 self[region_id][1][adj_node] = self[adj_node][1][bns[0]]
                 del self[adj_node][1][bns[0]]
                 dropped_edges[adj_node] = bns[0]
             else:  # When it has more than one...
                 # For each neighbour in the region...
                 for bn in bns:
                     # Auxiliary nodes are needed when an adjacent node has more than one neighbour in the region.
                     new_node_id = id_generator()
                     self[new_node_id] = (generator.weights["WARNING"][0], {}, {})
                     # Wire the adjacent node with the new auxiliary node.
                     self[adj_node][1][new_node_id] = self[adj_node][1][bn]
                     self[new_node_id][1][adj_node] = self[adj_node][1][bn]
                     # Wire the auxiliary node with the region.
                     self[region_id][1][new_node_id] = 0
                     self[new_node_id][1][region_id] = 0
                     # A list of the new created auxiliary nodes is saved.
                     self.auxiliary_nodes.add(new_node_id)
                     # Drop the edge between the adjacent node and this neighbour in the region.
                     del self[adj_node][1][bn]
                     dropped_edges[new_node_id] = bn
         # Save dropped edges in new region node.
         self[region_id][2]['dropped_edges'] = dropped_edges
         # Drop nodes from the graph.
         for w in region:
             del self[w]
Example #43
0
    async def test_chat_view_send_message_via_post(self):
        request, session_id = await self.signup()
        assert request.status == 200
        request, json_text = await self.register_chat(session_id=session_id)
        assert request.status == 201

        chat_id = json_text['id']
        message = utils.id_generator(100)
        request = await self.send_message(chat_id=chat_id,
                                          message=message,
                                          session_id=session_id)
        assert request.status == 201
Example #44
0
def generate_qrcode():
    text = request.args.get('text')

    print("text: \"%s\"" % text)
    # create the login QR code
    session_id = "TrackingAdvisorQRCode-" + utils.id_generator()
    qrcode_filename = "qrcode-" + session_id + ".svg"
    utils.generate_qr_code(url=text,
                           logo_path="./static/img/location-arrow.svg",
                           output_file="./static/img/" + qrcode_filename)

    return render_template('qrcode.html', qrcode=qrcode_filename)
def test_modify_group(engine_fixture, A_group):
    group_table = models.account.Group_TB
    model = DBSession.query(group_table).filter(group_table.group_name == A_group.group_name).scalar()
    assert model.group_id == A_group.group_id

    original_group_id = model.group_id
    new_group_name = id_generator(size=5).decode('utf-8')

    with transaction.manager as tm:
        A_group.group_name = new_group_name
        DBSession.flush()
        find_group = DBSession.query(group_table).filter(group_table.group_name == new_group_name).scalar()
        assert find_group.group_id == original_group_id
Example #46
0
def new(deckname, uId, desc):
    """Create a new deck in the database
    Creates a new deck in the 'decks' database
    This function is assumed to be called within a request context so the global
    variable `g` is available to it
    """
    # generate a unique id
    # TODO: fix collisions
    deck_id = id_generator(size=16)
    g.cur.execute("""
    INSERT INTO decks (deck_id, name, creator, description, hash)
    VALUES(%s, %s, %s, %s, %s)
    """, (deck_id, deckname, uId, desc, 0)) # hash to be updated later
    g.db.commit()
    return (g.cur.lastrowid, deck_id)
def A_group(request, engine_fixture):
    group_table = models.account.Group_TB
    with transaction.manager as tm:
        group = group_table(group_name=id_generator(size=25).decode('utf-8'))
        DBSession.add(group)
        DBSession.flush()
    print "(A_group fixture) created"
    def fin():
        model = DBSession.query(group_table).filter(group_table.group_id == group.group_id).scalar()
        if model:
            DBSession.delete(model)
            DBSession.flush()
            transaction.commit()
            print '(A_group fixture) delete'
        if DBSession.dirty:
            transaction.commit()
    request.addfinalizer(fin)
    return group
def test_delete_group(engine_fixture):
    group_table = models.account.Group_TB
    new_group_name = id_generator(size=25).decode('utf-8')
    with transaction.manager as tm:

        #create
        new_model = group_table(group_name=new_group_name)
        DBSession.add(new_model)
        DBSession.flush()

        model = DBSession.query(group_table).filter(group_table.group_name == new_group_name).scalar()
        assert model

        #delete
        DBSession.delete(model)
        DBSession.flush()

        model = DBSession.query(group_table).filter(group_table.group_name == new_group_name).first()
        assert model == None
Example #49
0
def new(username, email, password):
    # get the verification id's already used
    g.cur.execute("""
    SELECT verifyId
    FROM tempusers""")
    exist = [x[0] for x in g.cur.fetchall()]

    # create a new verification_id
    verify_id = id_generator(size=64, existing=exist)

    g.cur.execute("""
    INSERT INTO tempusers(verifyId, username, password, email, created)
    VALUES(%s, %s, %s, %s ,%s)""",
                  (verify_id,
                   username,
                   bcrypt.hashpw(password, bcrypt.gensalt()),
                   email,
                   datetime.now().isoformat()))
    g.db.commit()
    return verify_id
def test_delete_user(engine_fixture, A_group):
    user_table = models.account.User_TB

    user_name=id_generator(size=25).decode('utf-8')
    salt = os.urandom(26).encode('hex')
    password = os.urandom(10).encode('hex')

    with transaction.manager as tm:
        success, user = user_table.create(
                            user_name = user_name,
                            password=password,
                            activated=True,
                            group_id=A_group.group_id
                            )
        assert success == True
        #delete
        DBSession.delete(user)
        DBSession.flush()
        model = DBSession.query(user_table).filter(user_table.user_id == user.user_id).first()
        assert model == None
Example #51
0
def link_extract(query, number):
    try:
        print "Extracting URLs from Google for following dork: " + query
        discovery_id = id_generator()
        results = google_search(query, GOOGLE_API_KEY, GOOGLE_CSE_ID, num=number)
        i = 0
        l = len(results)
        for result in results:
            printProgressBar(i, l, prefix='Progress:', suffix='Complete', length=50)
            filename = os.path.join(BASE_DIR, 'assets', 'discovered', 'google_web_' + str(discovery_id) + '.txt')
            with open(filename, "a") as links:
                links.write(result['link'])
                links.write("\n")
            i += 1    
            sys.stdout.write(printProgressBar(i, l, prefix='Progress:', suffix='Complete', length=50))
            time.sleep(0.1)
            sys.stdout.flush()    
        print "\nFinished"
    except Exception as e:
        print "Link extraction failed! Probably your API limit exceeded"
        print('Error on line {}'.format(sys.exc_info()[-1].tb_lineno), type(e), e)
Example #52
0
def new(f, filename, cId):
    # get existing resource ids & generate the id
    g.cur.execute("""
    SELECT resource_id
    FROM resources
    """)
    existing = ['00000000'] + [x[0] for x in g.cur.fetchall()]
    resource_id = id_generator(size=8, existing=existing)
    
    # save the file into the resource directory
    ext = os.path.splitext(filename)[1]
    dest = os.path.join(current_app.config['RESOURCE_DIRECTORY'], resource_id + ext)
    outfile = open(dest, mode="w")
    outfile.write(f.read())
    # add the resource to the resources table
    g.cur.execute("""
    INSERT
    INTO resources(cId, resource_id, name, path, hash)
    VALUES(%s, %s, %s, %s)""", (cId, resource_id, filename, "http://www.flashyapp.com/resources/" + resource_id + ext, 0))
    g.db.commit()
    return (g.cur.lastrowid, resource_id)
Example #53
0
def shodan_search(query, protocol):
    print "Extracting IPs for following query: " + query
    print "Please wait.."
    discovery_id = id_generator()
    api = shodan.Shodan(SHODAN_API_KEY)
    try:
        results = api.search(query)
    except:
        print "Cannot communicate with Shodan.io"    
        return 0
    ip_list = []
    output_file = ''
    i = 0
    l = len(results['matches'])
    for result in results['matches']:
        try:
            # Initial call to print 0% progress
            printProgressBar(i, l, prefix='Progress:', suffix='Complete', length=50)
            ip = result['ip_str']
            if ip in ip_list:
                continue
            ip_list.append(ip)
            output_file = get_output_file_by_scanner('shodan', discovery_id, protocol)
            try:
                with open(output_file, "a") as ips:
                    ips.write(ip)
                    ips.write("\n")
            except IOError:
                print "There is no such file: %s" % output_file
                return 0
            i += 1
            sys.stdout.write(printProgressBar(i, l, prefix='Progress:', suffix='Complete', length=50))
            sleep(0.1)
            sys.stdout.flush()    
        except KeyboardInterrupt:
            break
    print "\nResults saved under: %s" % output_file
    print "Finished"
    return 1
Example #54
0
def login(username, password):
    g.cur.execute("""
    SELECT id, password
    FROM users
    WHERE username=%s""",
                  (username))
    
    result = g.cur.fetchone()
    if result == None:
        return None;

    uId, hashed = result

    if bcrypt.hashpw(password, hashed) != hashed:
        return None;

    # Get existing session ids
    g.cur.execute("""
    SELECT id
    FROM sessions
    WHERE uId=%s""",
                  (uId))
    res = [x[0] for x in g.cur.fetchall()]

    # Create a new session id
    sId = id_generator(size=64, existing=set(res))

    g.cur.execute("""
    INSERT INTO sessions(id, uId, lastactive)
    VALUES(%s, %s, %s)""",
                  (sId,
                   uId,
                   datetime.now().isoformat()))
    g.db.commit()
    
    return sId
Example #55
0
def playAllFiles(items, monitor):
    log.debug("playAllFiles called with items: {0}", items)
    server = download_utils.getServer()

    playlist = xbmc.PlayList(xbmc.PLAYLIST_VIDEO)
    playlist.clear()

    for item in items:

        item_id = item.get("Id")
        sources = item.get("MediaSources")
        selected_media_source = sources[0]

        listitem_props = []
        playback_type = "0"
        playurl = None
        play_session_id = id_generator()
        log.debug("play_session_id: {0}", play_session_id)

        # check if strm file, path will contain contain strm contents
        if selected_media_source.get('Container') == 'strm':
            playurl, listitem_props = PlayUtils().getStrmDetails(selected_media_source)
            if playurl is None:
                return

        if not playurl:
            playurl, playback_type = PlayUtils().getPlayUrl(item_id, selected_media_source, False, play_session_id)

        log.debug("Play URL: {0} ListItem Properties: {1}", playurl, listitem_props)

        playback_type_string = "DirectPlay"
        if playback_type == "2":
            playback_type_string = "Transcode"
        elif playback_type == "1":
            playback_type_string = "DirectStream"

        # add the playback type into the overview
        if item.get("Overview", None) is not None:
            item["Overview"] = playback_type_string + "\n" + item.get("Overview")
        else:
            item["Overview"] = playback_type_string

        # add title decoration is needed
        item_title = item.get("Name", i18n('missing_title'))
        list_item = xbmcgui.ListItem(label=item_title)

        # add playurl and data to the monitor
        data = {}
        data["item_id"] = item_id
        data["playback_type"] = playback_type_string
        data["play_session_id"] = play_session_id
        data["play_action_type"] = "play_all"
        monitor.played_information[playurl] = data
        log.debug("Add to played_information: {0}", monitor.played_information)

        list_item.setPath(playurl)
        list_item = setListItemProps(item_id, list_item, item, server, listitem_props, item_title)

        playlist.add(playurl, list_item)

    xbmc.Player().play(playlist)
Example #56
0
def external(json_data, algo_id, request):
	''' Make external algorithm run for request that came from trusted ip.
	
	First creates the InputFile object of the request.POST's json's data and saves
	it. Then executes the actual run.
	
	Returns AlgorithmRun id.
	
	TODO: Refactor me
	'''
	from Stemweb.files.models import InputFile
	algorithm = get_object_or_404(Algorithm, pk = algo_id)
	csv_file = tempfile.NamedTemporaryFile(mode = 'w', delete = False)
	ext = ""
	csv_data = None
	if algorithm.file_extension == 'csv': 
		csv_data = json_data.pop('data')
		ext = ".csv"
	elif algorithm.file_extension == 'nex': 
		from csvtonexus import csv2nex
		csv_data = csv2nex(json_data.pop('data'))
		ext = ".nex"
		
	# First write the file in the temporary file and close it.
	with codecs.open(csv_file.name, mode = 'w', encoding = 'utf8') as f:
		f.write(csv_data.encode('ascii', 'replace'))	

	# Then construct a mock up InMemoryUploadedFile from it for the InputFile
	mock_file = None
	input_file_id = None
	with open(csv_file.name, "r") as f:
		name =  datetime.now().strftime("%Y%m%d-%H%M%S") + "-" + utils.id_generator() + ext
		mock_file = InMemoryUploadedFile(file = f, field_name = 'file', name = name, \
									content_type = 'utf8', size = os.path.getsize(csv_file.name), charset = 'utf-8')	
			
		input_file = InputFile(name = name, file = mock_file)  
		input_file.extension = ext
		input_file.save() # Save to be sure input_file.id is created 
		input_file_id = input_file.id
	
	input_file = InputFile.objects.get(pk = input_file_id)
	parameters = json_data['parameters']
	#print input_file
	
	input_file_key = ''
	for arg in algorithm.args.all():
		if arg.value == 'input_file':
			input_file_key = arg.key
	
	run_args = utils.build_external_args(parameters, input_file_key, input_file,
			algorithm_name = algorithm.name)
	current_run = AlgorithmRun.objects.create(input_file = input_file,
										algorithm = algorithm, 
                                    	folder = os.path.join(algo_root, run_args['folder_url']),
                                    	external = True)
	
	current_run.extras = json.dumps(json_data, encoding = 'utf8')
	current_run.save()	# Save to ensure that id generation is not delayed.
	rid = current_run.id
	user_id = json_data['userid']
	return_host = json_data['return_host']
	return_path = json_data['return_path']
	kwargs = {'run_args': run_args, 'algorithm_run': rid}
	call = algorithm.get_callable(kwargs)
	call.apply_async(kwargs = kwargs, link = external_algorithm_run_finished.s(rid, user_id, return_host, return_path), \
					link_error = external_algorithm_run_error.s(rid, user_id, return_host, return_path))
	return current_run.id
Example #57
0
 def screenshot_name(self):
     return "%s_%s.png" % (time.strftime("%Y_%m_%d-%H_%M_%S"), id_generator(6))
Example #58
0
def playFile(play_info, monitor):

    id = play_info.get("item_id")

    # if this is a list of items them add them all to the play list
    if isinstance(id, list):
        return playListOfItems(id, monitor)

    auto_resume = play_info.get("auto_resume", "-1")
    force_transcode = play_info.get("force_transcode", False)
    media_source_id = play_info.get("media_source_id", "")
    use_default = play_info.get("use_default", False)

    log.debug("playFile id({0}) resume({1}) force_transcode({2})", id, auto_resume, force_transcode)

    settings = xbmcaddon.Addon()
    addon_path = settings.getAddonInfo('path')
    force_auto_resume = settings.getSetting('forceAutoResume') == 'true'
    jump_back_amount = int(settings.getSetting("jump_back_amount"))

    server = download_utils.getServer()

    url = "{server}/emby/Users/{userid}/Items/" + id + "?format=json"
    data_manager = DataManager()
    result = data_manager.GetContent(url)
    log.debug("Playfile item: {0}", result)

    if result is None:
        log.debug("Playfile item was None, so can not play!")
        return

    # if this is a season, tv show or album then play all items in that parent
    if result.get("Type") == "Season" or result.get("Type") == "MusicAlbum":
        log.debug("PlayAllFiles for parent item id: {0}", id)
        url = ('{server}/emby/Users/{userid}/items' +
               '?ParentId=' + id +
               '&Fields=MediaSources' +
               '&format=json')
        result = data_manager.GetContent(url)
        log.debug("PlayAllFiles items: {0}", result)

        # process each item
        items = result["Items"]
        if items is None:
            items = []
        return playAllFiles(items, monitor)

    # select the media source to use
    media_sources = result.get('MediaSources')
    selected_media_source = None

    if media_sources is None or len(media_sources) == 0:
        log.debug("Play Failed! There is no MediaSources data!")
        return

    elif len(media_sources) == 1:
        selected_media_source = media_sources[0]

    elif media_source_id != "":
        for source in media_sources:
            if source.get("Id", "na") == media_source_id:
                selected_media_source = source
                break

    elif len(media_sources) > 1:
        sourceNames = []
        for source in media_sources:
            sourceNames.append(source.get("Name", "na"))

        dialog = xbmcgui.Dialog()
        resp = dialog.select(i18n('select_source'), sourceNames)
        if resp > -1:
            selected_media_source = media_sources[resp]
        else:
            log.debug("Play Aborted, user did not select a MediaSource")
            return

    if selected_media_source is None:
        log.debug("Play Aborted, MediaSource was None")
        return

    seekTime = 0
    auto_resume = int(auto_resume)

    # process user data for resume points
    if auto_resume != -1:
        seekTime = (auto_resume / 1000) / 10000

    elif force_auto_resume:
        userData = result.get("UserData")
        reasonableTicks = int(userData.get("PlaybackPositionTicks")) / 1000
        seekTime = reasonableTicks / 10000

    else:
        userData = result.get("UserData")
        if userData.get("PlaybackPositionTicks") != 0:

            reasonableTicks = int(userData.get("PlaybackPositionTicks")) / 1000
            seekTime = reasonableTicks / 10000
            displayTime = str(timedelta(seconds=seekTime))

            resumeDialog = ResumeDialog("ResumeDialog.xml", addon_path, "default", "720p")
            resumeDialog.setResumeTime("Resume from " + displayTime)
            resumeDialog.doModal()
            resume_result = resumeDialog.getResumeAction()
            del resumeDialog
            log.debug("Resume Dialog Result: {0}", resume_result)

            # check system settings for play action
            # if prompt is set ask to set it to auto resume
            params = {"setting": "myvideos.selectaction"}
            setting_result = json_rpc('Settings.getSettingValue').execute(params)
            log.debug("Current Setting (myvideos.selectaction): {0}", setting_result)
            current_value = setting_result.get("result", None)
            if current_value is not None:
                current_value = current_value.get("value", -1)
            if current_value not in (2,3):
                return_value = xbmcgui.Dialog().yesno(i18n('extra_prompt'), i18n('turn_on_auto_resume?'))
                if return_value:
                    params = {"setting": "myvideos.selectaction", "value": 2}
                    json_rpc_result = json_rpc('Settings.setSettingValue').execute(params)
                    log.debug("Save Setting (myvideos.selectaction): {0}", json_rpc_result)

            if resume_result == 1:
                seekTime = 0
            elif resume_result == -1:
                return

    listitem_props = []
    playback_type = "0"
    playurl = None
    play_session_id = id_generator()
    log.debug("play_session_id: {0}", play_session_id)

    # check if strm file, path will contain contain strm contents
    if selected_media_source.get('Container') == 'strm':
        playurl, listitem_props = PlayUtils().getStrmDetails(selected_media_source)
        if playurl is None:
            return

    if not playurl:
        playurl, playback_type = PlayUtils().getPlayUrl(id, selected_media_source, force_transcode, play_session_id)

    log.debug("Play URL: {0} ListItem Properties: {1}", playurl, listitem_props)

    playback_type_string = "DirectPlay"
    if playback_type == "2":
        playback_type_string = "Transcode"
    elif playback_type == "1":
        playback_type_string = "DirectStream"

    # add the playback type into the overview
    if result.get("Overview", None) is not None:
        result["Overview"] = playback_type_string + "\n" + result.get("Overview")
    else:
        result["Overview"] = playback_type_string

    # add title decoration is needed
    item_title = result.get("Name", i18n('missing_title'))
    list_item = xbmcgui.ListItem(label=item_title)

    if playback_type == "2": # if transcoding then prompt for audio and subtitle
        playurl = audioSubsPref(playurl, list_item, selected_media_source, id, use_default)
        log.debug("New playurl for transcoding: {0}", playurl)

    elif playback_type == "1": # for direct stream add any streamable subtitles
        externalSubs(selected_media_source, list_item, id)

    # add playurl and data to the monitor
    data = {}
    data["item_id"] = id
    data["playback_type"] = playback_type_string
    data["play_session_id"] = play_session_id
    data["play_action_type"] = "play"
    monitor.played_information[playurl] = data
    log.debug("Add to played_information: {0}", monitor.played_information)

    list_item.setPath(playurl)
    list_item = setListItemProps(id, list_item, result, server, listitem_props, item_title)

    playlist = xbmc.PlayList(xbmc.PLAYLIST_VIDEO)
    playlist.clear()
    playlist.add(playurl, list_item)
    xbmc.Player().play(playlist)

    send_next_episode_details(result)

    if seekTime == 0:
        return

    count = 0
    while not xbmc.Player().isPlaying():
        log.debug("Not playing yet...sleep for 1 sec")
        count = count + 1
        if count >= 10:
            return
        else:
            xbmc.Monitor().waitForAbort(1)

    seekTime = seekTime - jump_back_amount

    target_seek = (seekTime - 5)
    current_position = 0
    while current_position < target_seek:
        # xbmc.Player().pause()
        xbmc.sleep(100)
        xbmc.Player().seekTime(seekTime)
        xbmc.sleep(100)
        # xbmc.Player().play()
        current_position = xbmc.Player().getTime()
        log.debug("Playback_Start_Seek target:{0} current:{1}", target_seek, current_position)