Ejemplo n.º 1
0
def verify_certificate(c, pc):
    # TODO: need a new way
    c_signature_algorithm = c['signature_algorithm']['algorithm'].dotted
    c_tbs_encoded = c['tbs_certificate'].dump()
    if c_signature_algorithm == '1.2.840.113549.1.1.4':  # RSA
        tbs_hash_hex = hashlib.md5(c_tbs_encoded).hexdigest()
    elif c_signature_algorithm == '1.2.840.113549.1.1.5':
        tbs_hash_hex = hashlib.sha1(c_tbs_encoded).hexdigest()
    elif c_signature_algorithm == '1.2.840.113549.1.1.11':
        tbs_hash_hex = hashlib.sha256(c_tbs_encoded).hexdigest()
    elif c_signature_algorithm == '1.2.840.113549.1.1.12':
        tbs_hash_hex = hashlib.sha384(c_tbs_encoded).hexdigest()
    elif c_signature_algorithm == '1.2.840.113549.1.1.13':
        tbs_hash_hex = hashlib.sha512(c_tbs_encoded).hexdigest()
    elif c_signature_algorithm == '1.2.840.10040.4.3':  # DSA
        tbs_hash_hex = hashlib.sha1(c_tbs_encoded).hexdigest()
    elif c_signature_algorithm == '2.16.840.1.101.3.4.3.2':
        tbs_hash_hex = hashlib.sha256(c_tbs_encoded).hexdigest()
    elif c_signature_algorithm == '1.2.840.10045.4.1':  # ecdsa
        tbs_hash_hex = hashlib.sha1(c_tbs_encoded).hexdigest()
    elif c_signature_algorithm == '1.2.840.10045.4.3.2':
        tbs_hash_hex = hashlib.sha256(c_tbs_encoded).hexdigest()
    else:
        tbs_hash_hex = ''
    pub_key = pc.public_key
    return sig_verify(c.signature, pub_key, tbs_hash_hex)
Ejemplo n.º 2
0
    def test_get(self):
        username = "******"
        email = "mailto:[email protected]"
        password = "******"
        auth = "Basic %s" % base64.b64encode("%s:%s" % (username, password))
        form = {'username':username,'email': email,'password':password,'password2':password}
        response = self.client.post(reverse(views.register),form, X_Experience_API_Version="1.0.0")        

        r = self.client.get(self.url, self.testparams1, X_Experience_API_Version="1.0.0", Authorization=self.auth)
        self.assertEqual(r.status_code, 200)
        robj = ast.literal_eval(r.content)
        self.assertEqual(robj['test'], self.teststate1['test'])
        self.assertEqual(robj['obj']['agent'], self.teststate1['obj']['agent'])
        self.assertEqual(r['etag'], '"%s"' % hashlib.sha1(r.content).hexdigest())

        r2 = self.client.get(self.url, self.testparams2, X_Experience_API_Version="1.0.0", Authorization=self.auth)
        self.assertEqual(r2.status_code, 200)
        robj2 = ast.literal_eval(r2.content)
        self.assertEqual(robj2['test'], self.teststate2['test'])
        self.assertEqual(robj2['obj']['agent'], self.teststate2['obj']['agent'])
        self.assertEqual(r2['etag'], '"%s"' % hashlib.sha1(r2.content).hexdigest())
        
        r3 = self.client.get(self.url, self.testparams3, X_Experience_API_Version="1.0.0", Authorization=self.auth)
        self.assertEqual(r3.status_code, 200)
        robj3 = ast.literal_eval(r3.content)
        self.assertEqual(robj3['test'], self.teststate3['test'])
        self.assertEqual(robj3['obj']['agent'], self.teststate3['obj']['agent'])
        self.assertEqual(r3['etag'], '"%s"' % hashlib.sha1(r3.content).hexdigest())

        r4 = self.client.get(self.url, self.testparams4, X_Experience_API_Version="1.0.0", Authorization=auth)
        self.assertEqual(r4.status_code, 200)
        robj4 = ast.literal_eval(r4.content)
        self.assertEqual(robj4['test'], self.teststate4['test'])
        self.assertEqual(robj4['obj']['agent'], self.teststate4['obj']['agent'])
        self.assertEqual(r4['etag'], '"%s"' % hashlib.sha1(r4.content).hexdigest())
Ejemplo n.º 3
0
    def multi_file(self, basePath, check_md5=False):
        """
        Generate multi-file torrent
          check_md5: adds md5sum to the torrentlist
          basePath: path to folder
        Torrent name will automatically be basePath
        """
        if 'length' in self.tdict['info']:
            raise TypeError('Cannot add multi-file to single-file torrent')
        if basePath.endswith('/'):
            basePath = basePath[:-1]
        realPath = path.abspath(basePath)
        toGet = []
        fileList = []
        info_pieces = ''
        data = ''
        for root, subdirs, files in walk(realPath):
            for f in files:
                subPath = path.relpath(root, start=realPath).split('/')
                subPath.append(f)
                subPath = [str(p) for p in subPath]
                toGet.append(subPath)
        for pathList in toGet:
            length = 0
            filePath = ('/').join(pathList)
            if check_md5:
                md5sum = md5()
            fileDict = {
                'path': pathList,
                'length': len(open(path.join(basePath, filePath), "rb").read())
            }
            with open(path.join(basePath, filePath), "rb") as fn:
                while True:
                    filedata = fn.read(self.piece_length)

                    if len(filedata) == 0:
                        break
                    length += len(filedata)

                    data += filedata

                    if len(data) >= self.piece_length:
                        info_pieces += sha1(data[:self.piece_length]).digest()
                        data = data[self.piece_length:]

                    if check_md5:
                        md5sum.update(filedata)
                        fileDict['md5sum'] = md5sum.hexdigest()
            fileList.append(fileDict)
        if len(data) > 0:
            info_pieces += sha1(data).digest()
        self.tdict['info'].update(
            {
                'name': str(path.basename(realPath)),
                'files': fileList,
                'pieces': info_pieces
            }
        )
        info_hash = sha1(bencode(self.tdict['info'])).hexdigest()
        return({'Created': info_hash})
Ejemplo n.º 4
0
def showpaste(db, id, lang=None):
    """
    Shows the highlighted entry on the browser. If the entry is protected
    with a password it will display a password entry and will compare against
    the database for a match
    """

    paste = _get_paste(db, id)
    if not paste:
        return bottle.HTTPError(404, output='This paste does not exist')
    password = bottle.request.forms.password
    util.log.debug(
        '%s == %s ? %s' % (
            hashlib.sha1(password).hexdigest(), paste.password,
            hashlib.sha1(password).hexdigest() == paste.password,
        )
    )
    if paste.password:
        if not password:
            return template(
                'password_protect',
                url=get_url(),
                title=util.conf.get(util.cfg_section, 'title'),
                version=pasttle.__version__,
            )
        if hashlib.sha1(password).hexdigest() == paste.password:
            bottle.response.content_type = 'text/html'
            return _pygmentize(paste, lang)
        else:
            return bottle.HTTPError(401, output='Wrong password provided')
    else:
        return _pygmentize(paste, lang)
Ejemplo n.º 5
0
    def process_data_line(self, line, duplicate):
        line_set = line.split(",")

        if len(line_set) != self.attr_size:
            raise Exception("feature size[%d] not match attribute size[%d]"
                            % (len(line_set), self.attr_size))

        if duplicate == 1:   # remove duplicate feature with same socre
            hash = sha1("".join(line_set)).hexdigest()
            if hash in self.duplicate_set:
                self.duplicate_set[hash] += 1
                return

            else:
                self.duplicate_set[hash] = 1

        elif duplicate == 2:   # remove duplicate feature then average socre
            hash = sha1("".join(line_set[:-1])).hexdigest()
            if hash in self.duplicate_set:
                self.duplicate_set[hash]['count'] += 1
                self.duplicate_set[hash]['score'].append(int(line_set[-1]))
                return

            else:
                self.duplicate_set[hash] = {}
                self.duplicate_set[hash]['count'] = 1
                self.duplicate_set[hash]['score'] = [int(line_set[-1])]
                self.duplicate_set[hash]['record'] = line_set

        self.data_size += 1
        self.data.append(line_set)
Ejemplo n.º 6
0
def generate_doc(db, prefix, suffix_length=12, id_string="%s%s", max_retry=100, data={}):
    """Generate doc with unique ID, based on provided prefix and random suffix. Retries on duplicate.
    **Deprecated since couchdbcurl.client.Document.create() method.**
    """

    assert not "_id" in data
    assert not "_rev" in data
    assert max_retry > 0
    assert type(max_retry) == int

    # doc = data.copy()

    rand = hashlib.sha1(str(random.random())).hexdigest()[:suffix_length]
    doc_id = id_string % (prefix, "")

    while True:

        try:
            db[doc_id] = data
            break
        except ResourceConflict:
            pass

        max_retry -= 1
        if max_retry < 0:
            raise Exception("Retry-limit reached during document generation")

        rand = hashlib.sha1(str(random.random())).hexdigest()[:suffix_length]
        doc_id = id_string % (prefix, "_%s" % rand)
Ejemplo n.º 7
0
    def do_coffee_compile(self, opts, timestamp=False, ignore_errors=False):
        from calibre.utils.serve_coffee import compile_coffeescript
        src_files = {}
        for src in self.COFFEE_DIRS:
            for f in glob.glob(self.j(self.SRC, __appname__, src,
                '*.coffee')):
                bn = os.path.basename(f).rpartition('.')[0]
                arcname = src.replace('/', '.') + '.' + bn + '.js'
                try:
                    with open(f, 'rb') as fs:
                        src_files[arcname] = (f, hashlib.sha1(fs.read()).hexdigest())
                except EnvironmentError:
                    time.sleep(0.1)
                    with open(f, 'rb') as fs:
                        src_files[arcname] = (f, hashlib.sha1(fs.read()).hexdigest())

        existing = {}
        dest = self.j(self.RESOURCES, 'compiled_coffeescript.zip')
        if os.path.exists(dest):
            with zipfile.ZipFile(dest, 'r') as zf:
                existing_hashes = {}
                raw = zf.comment
                if raw:
                    existing_hashes = json.loads(raw)
                for info in zf.infolist():
                    if info.filename in existing_hashes and src_files.get(info.filename, (None, None))[1] == existing_hashes[info.filename]:
                        existing[info.filename] = (zf.read(info), info, existing_hashes[info.filename])

        todo = set(src_files) - set(existing)
        updated = {}
        for arcname in todo:
            name = arcname.rpartition('.')[0]
            print ('\t%sCompiling %s'%(time.strftime('[%H:%M:%S] ') if
                        timestamp else '', name))
            src, sig = src_files[arcname]
            js, errors = compile_coffeescript(open(src, 'rb').read(), filename=src)
            if errors:
                print ('\n\tCompilation of %s failed'%name)
                for line in errors:
                    print >>sys.stderr, line
                if ignore_errors:
                    js = u'# Compilation from coffeescript failed'
                else:
                    raise SystemExit(1)
            else:
                if opts.show_js:
                    self.show_js(js)
                    print ('#'*80)
                    print ('#'*80)
            zi = zipfile.ZipInfo()
            zi.filename = arcname
            zi.date_time = time.localtime()[:6]
            updated[arcname] = (js.encode('utf-8'), zi, sig)
        if updated:
            hashes = {}
            with zipfile.ZipFile(dest, 'w', zipfile.ZIP_STORED) as zf:
                for raw, zi, sig in sorted(chain(updated.itervalues(), existing.itervalues()), key=lambda x: x[1].filename):
                    zf.writestr(zi, raw)
                    hashes[zi.filename] = sig
                zf.comment = json.dumps(hashes)
Ejemplo n.º 8
0
def signup(request):
	if request.method == 'POST':
		form = SignupForm(request.POST)
		print(form.data)
		print(form.is_valid())
		if form.is_valid():
			form.save()

			username = form.data['username']
			email = form.data['email']
			print(username,email)
			salt = hashlib.sha1(str(random.random()).encode('utf-8')).hexdigest()[:5]
			activation_key = hashlib.sha1((salt+email).encode('utf-8')).hexdigest()
			key_expires = datetime.datetime.today() + datetime.timedelta(1)

			# Create and save user profile
			user = User.objects.get(email=email)
			new_profile = UserProfile(user=user, activation_key=activation_key,
				key_expires=key_expires)
			new_profile.save()

			# Send email with activation key
			email_subject = 'Account confirmation'
			email_body = "Hi %s, thanks for signing up. To activate your account, click this link within \
			48hours http://127.0.0.1:8000/confirm/%s" % (username, activation_key)
			send_mail(email_subject, email_body, '*****@*****.**',
				[email], fail_silently = False)
			return render(request, 'sign_up_success.html')
		else:
			return render(request, 'confirm_expired.html')
	else:
		return render(request, 'sign_up.html')
Ejemplo n.º 9
0
def copy_to_host(module):
    compress = module.params.get('compress')
    dest = module.params.get('dest')
    mode = int(module.params.get('mode'), 0)
    sha1 = module.params.get('sha1')
    src = module.params.get('src')

    data = base64.b64decode(src)
    raw_data = zlib.decompress(data) if compress else data

    if sha1:
        if os.path.exists(dest):
            if os.access(dest, os.R_OK):
                with open(dest, 'rb') as f:
                    if hashlib.sha1(f.read()).hexdigest() == sha1:
                        module.exit_json(changed=False)
            else:
                module.exit_json(failed=True, changed=False,
                                 msg='file is not accessible: {}'.format(dest))

        if sha1 != hashlib.sha1(raw_data).hexdigest():
            module.exit_json(failed=True, changed=False,
                             msg='sha1 sum does not match data')

    with os.fdopen(os.open(dest, os.O_WRONLY | os.O_CREAT, mode), 'wb') as f:
        f.write(raw_data)

    module.exit_json(changed=True)
Ejemplo n.º 10
0
    def _auth_DATA(self, line):
        
        if self.authMech == 'EXTERNAL':
            self.sendAuthMessage('DATA')
            
        elif self.authMech == 'DBUS_COOKIE_SHA1':
            try:
                data = binascii.unhexlify( line.strip() ).decode('ascii')
                
                cookie_context, cookie_id, server_challenge = data.split()

                server_cookie = self._authGetDBusCookie(cookie_context, cookie_id)

                client_challenge = hexlify(hashlib.sha1(os.urandom(8)).digest())

                response = '%s:%s:%s' % (server_challenge,
                                         client_challenge,
                                         server_cookie)

                response = hexlify(hashlib.sha1(response.encode('ascii')).digest())

                reply = client_challenge + ' ' + response
                
                self.sendAuthMessage( 'DATA ' + hexlify(reply))
            except Exception as e:
                log.msg('DBUS Cookie authentication failed: ' + str(e))
                self.sendAuthMessage('ERROR ' + str(e))
Ejemplo n.º 11
0
		def wrapper(self, *args, **kwargs):
		
			#Get the session parameters
			auth_id = self.request.cookies.get('auth_id', '')
			session_id = self.request.cookies.get('session_id', '')
			service = urllib.unquote(args[0])
			
			#Check the db for the session
			session = Session.GetSession(session_id, auth_id)
			
			if session is None:
				logging.info("No Session")
				self.redirect(redirectTo)
				return
			else:
				if session.service is None:
					logging.info("Service is None")
					self.redirect(redirectTo)
					return
				
				if hashlib.sha1(service).hexdigest() != hashlib.sha1(session.service.email).hexdigest():
					# The service in the url is not the same as the sessioned service.
					self.redirect(redirectTo)
					return
				
				result = method(self, *args, **kwargs)
				
			return result
Ejemplo n.º 12
0
    def post(self, template_variables={}):
        template_variables = {}

        # validate the fields

        form = SettingPasswordForm(self)

        if not form.validate():
            self.get({"errors": form.errors})
            return

        # validate the password

        user_info = self.current_user
        user_id = user_info["uid"]
        secure_password = hashlib.sha1(form.password_old.data).hexdigest()
        secure_new_password = hashlib.sha1(form.password.data).hexdigest()

        if not user_info["password"] == secure_password:
            template_variables["errors"] = {}
            template_variables["errors"]["error_password"] = [u"当前密码输入有误"]
            self.get(template_variables)
            return

        # continue while validate succeed

        update_result = self.user_model.set_user_password_by_uid(user_id, secure_new_password)
        template_variables["success_message"] = [u"您的用户密码已更新"]
        # update `updated`
        updated = self.user_model.set_user_base_info_by_uid(user_id, {"updated": time.strftime("%Y-%m-%d %H:%M:%S")})
        self.redirect("/setting")
Ejemplo n.º 13
0
    def post(self, request, *args, **kwargs):
        serialized = UserSerializer(data=request.DATA)
        if serialized.is_valid():

            user = User.objects.create_user(
                    serialized.initial_data['username'],
                    serialized.initial_data['email'],
                    serialized.initial_data['password'],
                    )
            user.is_active = False
            user.save()

            username = serialized.initial_data['username']
            email = serialized.initial_data['email']

            salt = hashlib.sha1(str(random.random()).encode('utf-8')).hexdigest()[:5]
            activation_key = hashlib.sha1(str(salt).encode('utf-8')+str(email).encode('utf-8')).hexdigest()

            key_expires = datetime.datetime.today() + datetime.timedelta(2)
            user = User.objects.get(username=username)

            new_profile = UserProfile(user=user, activation_key=activation_key, key_expires=key_expires)
            new_profile.save()
            email_subject = "Подтверждение регистрации на instagram-mephi.ru"
            email_body = "Привет %s, спасибо, что зарегистрировался на нашем сайте! Чтобы активировать свой аккаунт зайди" \
                         " по ссылке ниже, у тебя есть 48 часов! \
                                 http://127.0.0.1:8000/confirm/%s" % (username, activation_key)

            send_mail(email_subject, email_body, '*****@*****.**', [email], fail_silently=False)
            return Response(status=status.HTTP_201_CREATED,)
        else:
            return Response(serialized._errors, status=status.HTTP_400_BAD_REQUEST)
Ejemplo n.º 14
0
  def testWithoutModifiedPasskey(self):
    """authenticate() a user without a modified passkey"""
    nonce = 'a_nonce'
    nextnonce = 'a_nextnonce'
    response = hashlib.sha1(nonce).hexdigest()
    cnonce = hashlib.sha1(
      hashlib.sha1(nextnonce).hexdigest()).hexdigest()
    user = Prototype()
    user.username = '******'
    user.nonce = nonce
    user.nextnonce = nextnonce
    user.cnonce = cnonce
    user.response = response
    user.passkey = 'y'

    def callback(u):
      assert False, 'Callback called in MISSING_CREDS'

    user = pychap.authenticate(callback, user)

    # nonce
    self.assertEqual(user.nonce, nonce)

    #nextnonce
    self.assertEqual(user.nextnonce, nextnonce)

    #passkey
    self.assertEqual(user.passkey, 'y') 

    #authenticated
    self.assertEqual(user.authenticated, False)

    #authmessage
    self.assertEqual(user.message, pychap.UNMODIFIED)
Ejemplo n.º 15
0
def file_upload_view_verify(request):
    """
    Use the sha digest hash to verify the uploaded contents.
    """
    form_data = request.POST.copy()
    form_data.update(request.FILES)

    for key, value in form_data.items():
        if key.endswith('_hash'):
            continue
        if key + '_hash' not in form_data:
            continue
        submitted_hash = form_data[key + '_hash']
        if isinstance(value, UploadedFile):
            new_hash = hashlib.sha1(value.read()).hexdigest()
        else:
            new_hash = hashlib.sha1(value.encode()).hexdigest()
        if new_hash != submitted_hash:
            return HttpResponseServerError()

    # Adding large file to the database should succeed
    largefile = request.FILES['file_field2']
    obj = FileModel()
    obj.testfile.save(largefile.name, largefile)

    return HttpResponse('')
Ejemplo n.º 16
0
def send_activation_email(request, user):
    username = user.username
    email = user.email
    salt = hashlib.sha1(str(random.random())).hexdigest()[:5]
    activation_key = hashlib.sha1(salt + email).hexdigest()

    # Create and save user profile
    new_profile = UserProfile(user=user, activation_key=activation_key)
    new_profile.save()

    # Send email with activation key
    activation_link = request.META['HTTP_HOST'] + \
        reverse('users:confirm', kwargs={'activation_key': activation_key})
    email_subject = 'Account confirmation'
    email_body = render_to_string('index/activation_email.html',
                    {'username': username, 'activation_link': activation_link,
                    'active_time': new_profile.active_time})

    msg = EmailMultiAlternatives(email_subject, email_body, EMAIL_HOST_USER, [email])
    msg.attach_alternative(email_body, "text/html")

    try:
        Thread(target=msg.send, args=()).start()
    except:
        logger.warning("There is an error when sending email to %s's mailbox" % username)
Ejemplo n.º 17
0
def register2(request):
    if request.method == "POST":
        form = RegisterForm(request.POST)
        if form.is_valid():
            ## TODO:
            ## 1) Redirect to profile settings
            ## 2) Do Email Authentication that way We don't let
            ## users do much without authentication. Otherwise
            ## bots could just flood the system.
            ## 3) After user creation, log them in automatically.
            kwargs = {}
            kwargs["username"] = form.cleaned_data.get("username")
            kwargs["password"] = form.cleaned_data.get("password")
            kwargs["email"] = form.cleaned_date.get("email")

            # creating authentication key
            salt = hashlib.sha1(str(random.random())).hexdigest()[:5]
            if isinstance(kwargs["email"], unicode):
                email = email.encode("utf-8")
            kwargs["activation_key"] = hashlib.sha1(salt + email).hexdigest()
            kwargs["email_subject"] = "Authenticate you account"

            form.sendEmail(kwargs)
            form.save(kwargs)

            return redirect("blog.views.post_list")
    else:
        # check if person is authenticated, if so redirect them to
        # Post_List
        if request.user.is_authenticated():
            return redirect("blog.views.post_list")
        else:
            form = RegisterForm()
    return render(request, "blog/register.html", {"form": form})
Ejemplo n.º 18
0
    def createFileFeed():
        myChannelId = channelManager.channelcast_db.getMyChannelId()
        community = channelManager._disp_get_community_from_channel_id(myChannelId)

        print >> sys.stderr, "Using community:", community._cid.encode('HEX')

        items = json.load(open(opt.file, 'rb'))
        for item in items:
            try:
                infohash = sha1(item['name']).digest()
            except:
                infohash = sha1(str(random.randint(0, 1000000))).digest()
            message = community._disp_create_torrent(infohash, long(time.time()), unicode(item['name']), ((u'fake.file', 10),), tuple(), update=False, forward=False)

            print >> sys.stderr, "Created a new torrent"

            latest_review = None
            for modification in item['modifications']:
                reviewmessage = community._disp_create_modification('description', unicode(modification['text']), long(time.time()), message, latest_review, update=False, forward=False)

                print >> sys.stderr, "Created a new modification"

                if modification['revert']:
                    community._disp_create_moderation('reverted', long(time.time()), 0, reviewmessage.packet_id, update=False, forward=False)

                    print >> sys.stderr, "Reverted the last modification"
                else:
                    latest_review = reviewmessage
Ejemplo n.º 19
0
    def _update(self, hunk):
        if self.payload_sha1 is None:
            # convoluted handling of two newlines crossing hunks
            # XXX write tests for this
            if self._prev_hunk_last_two_bytes.endswith("\n"):
                if hunk.startswith("\n"):
                    self.payload_sha1 = hashlib.sha1()
                    self.payload_sha1.update(hunk[1:])
                elif hunk.startswith("\r\n"):
                    self.payload_sha1 = hashlib.sha1()
                    self.payload_sha1.update(hunk[2:])
            elif self._prev_hunk_last_two_bytes == "\n\r":
                if hunk.startswith("\n"):
                    self.payload_sha1 = hashlib.sha1()
                    self.payload_sha1.update(hunk[1:])
            else:
                m = re.search(r"\n\r?\n", hunk)
                if m is not None:
                    self.payload_sha1 = hashlib.sha1()
                    self.payload_sha1.update(hunk[m.end() :])

            # if we still haven't found start of payload hold on to these bytes
            if self.payload_sha1 is None:
                self._prev_hunk_last_two_bytes = hunk[-2:]
        else:
            self.payload_sha1.update(hunk)

        self.block_sha1.update(hunk)

        self.tempfile.write(hunk)
        self.proxy_dest.sendall(hunk)
        self.len += len(hunk)
Ejemplo n.º 20
0
 def implementation_signature(self):
     self._generate_schema(self.t)
     schema_names = ([prop for prop in self.properties_schema] +
                     [at for at in self.attributes_schema])
     schema_hash = hashlib.sha1(';'.join(schema_names))
     templ_hash = hashlib.sha1(self.template_data())
     return (schema_hash.hexdigest(), templ_hash.hexdigest())
Ejemplo n.º 21
0
    def send_confirmation(self, user):
        assert user.email

        self.filter(user=user).delete()

        salt = hashlib.sha1(str(random.random()) + settings.SECRET_KEY).hexdigest()[:5]
        confirmation_key = hashlib.sha1(salt + user.email.encode('utf8')).hexdigest()
        try:
            current_site = Site.objects.get_current()
        except Site.DoesNotExist:
            return
        path = reverse('accounts:confirm_email', args=[confirmation_key])
        activate_url = 'http://%s%s' % (unicode(current_site.domain), path)
        context = {
            'user': user,
            'activate_url': activate_url,
            'current_site': current_site,
            'confirmation_key': confirmation_key,
        }
        subject = _('Please confirm your email address for %(site)s') % {'site': current_site.name}
        send_templated_email(user.email, subject, 'accounts/email_confirmation_message.html',
                             context, fail_silently=settings.DEBUG)
        return self.create(
            user=user,
            sent=timezone.now(),
            confirmation_key=confirmation_key)
Ejemplo n.º 22
0
    def test_get(self):
        r = self.client.get(reverse(agent_profile), self.testparams1, Authorization=self.auth, X_Experience_API_Version=settings.XAPI_VERSION)
        self.assertEqual(r.status_code, 200)
        
        robj = ast.literal_eval(r.content)
        self.assertEqual(robj['test'], self.testprofile1['test'])
        self.assertEqual(robj['obj']['agent'], self.testprofile1['obj']['agent'])
        self.assertEqual(r['etag'], '"%s"' % hashlib.sha1('%s' % self.testprofile1).hexdigest())

        r2 = self.client.get(reverse(agent_profile), self.testparams2, Authorization=self.auth, X_Experience_API_Version=settings.XAPI_VERSION)
        self.assertEqual(r2.status_code, 200)
        robj2 = ast.literal_eval(r2.content)
        self.assertEqual(robj2['test'], self.testprofile2['test'])
        self.assertEqual(robj2['obj']['agent'], self.testprofile2['obj']['agent'])
        self.assertEqual(r2['etag'], '"%s"' % hashlib.sha1('%s' % self.testprofile2).hexdigest())
        
        r3 = self.client.get(reverse(agent_profile), self.testparams3, Authorization=self.auth, X_Experience_API_Version=settings.XAPI_VERSION)
        self.assertEqual(r3.status_code, 200)
        robj3 = ast.literal_eval(r3.content)
        self.assertEqual(robj3['test'], self.testprofile3['test'])
        self.assertEqual(robj3['obj']['agent'], self.testprofile3['obj']['agent'])
        self.assertEqual(r3['etag'], '"%s"' % hashlib.sha1('%s' % self.testprofile3).hexdigest())

        r4 = self.client.get(reverse(agent_profile), self.testparams4, Authorization=self.auth, X_Experience_API_Version=settings.XAPI_VERSION)
        self.assertEqual(r4.status_code, 200)
        robj4 = ast.literal_eval(r4.content)
        self.assertEqual(robj4['test'], self.otherprofile1['test'])
        self.assertEqual(robj4['obj']['agent'], self.otherprofile1['obj']['agent'])
        self.assertEqual(r4['etag'], '"%s"' % hashlib.sha1('%s' % self.otherprofile1).hexdigest())
Ejemplo n.º 23
0
def hmac_sha1(key, msg):
	if len(key) > blksize:
		key = hashlib.sha1(key).digest()
	key += chr(0) * (blksize - len(key))
	opad = key.translate(trans_5c)
	ipad = key.translate(trans_36)
	return hashlib.sha1(opad + hashlib.sha1(ipad + msg).digest())
Ejemplo n.º 24
0
    def post(self):
        # post only updates the apikey... it's the place to customize user preferences
        from hashlib import sha1
        from time import time
	user = users.get_current_user()
        r={}
        if user:
	    q = db.GqlQuery("SELECT * FROM ApiKeyStorage WHERE User = :1", user)
	    userprefs = q.get()
	    if userprefs == None:
                logging.info("Creating user info")
                apikey = sha1("%s-%s" % (user.user_id(), time())).hexdigest()
                ks=ApiKeyStorage(User=user, ApiKey=apikey)
                ks.put()
            else:
                logging.info("Updating user info")
                logging.info(q.count())
                apikey = sha1("%s-%s" % (user.user_id(), time())).hexdigest()
                userprefs.ApiKey=apikey
                userprefs.save()
		
            _tpl = {'username': user, 'apikey': apikey}
	    path = os.path.join(os.path.dirname(__file__), 'admin.html')
	    self.response.out.write(template.render(path, _tpl))
	    return
	else:
	    r['error']="No User"
	    self.response.out.write(simplejson.dumps(r))
Ejemplo n.º 25
0
    def _reset_activation_key(self, resend=False):
        """Resets the activation key

        `resend` whether this key is being reset as the result of a resend confirmation email

        If `resend`, then check the `key_expires` timestamp to see if we should reuse the existing activation key, or generate a new one
        """
        should_reset = True
        if resend and self.activation_key and self.key_expires:
            now = utcnow()
            if now < self.key_expires:
                # do not reset key if remaining time has not fallen below threshold
                remaining_time = self.key_expires - now
                threshold = datetime.timedelta(hours=EMAIL_ACTIVATION_KEY_REUSE_THRESHOLD_HOURS)
                should_reset = remaining_time < threshold

        if should_reset:
            user = self.user
            salt = sha1(str(random.random())).hexdigest()[:5]
            activation_key = sha1(salt + user.username).hexdigest()
            key_expires = utcnow() + datetime.timedelta(hours=EMAIL_ACTIVATION_KEY_EXPIRATION_HOURS)

            self.activation_key = activation_key
            self.key_expires = key_expires
            self.save()
        else:
            # no need to reset activation key, use the same one
            pass
Ejemplo n.º 26
0
  def test_creation(self):

    h1 = hashlib.sha1('derp').hexdigest()
    h2 = hashlib.sha1('herp').hexdigest()
    now = nanotime.now()

    sr = serial.SerialRepresentation()
    sr['key'] = '/A'
    sr['hash'] = h1
    sr['parent'] = h2
    sr['created'] = now.nanoseconds()
    sr['committed'] = now.nanoseconds()
    sr['attributes'] = {'str' : {'value' : 'derp'} }
    sr['type'] = 'Hurr'

    v = Version(sr)
    self.assertEqual(v.type, 'Hurr')
    self.assertEqual(v.hash, h1)
    self.assertEqual(v.parent, h2)
    self.assertEqual(v.created, now)
    self.assertEqual(v.committed, now)
    self.assertEqual(v.shortHash(5), h1[0:5])
    self.assertEqual(v.attributeValue('str'), 'derp')
    self.assertEqual(v.attribute('str')['value'], 'derp')
    self.assertEqual(v['str']['value'], 'derp')
    self.assertEqual(hash(v), hash(fasthash.hash(h1)))
    self.assertEqual(v, Version(sr))
    self.assertFalse(v.isBlank)

    self.assertRaises(KeyError, v.attribute, 'fdsafda')
    self.assertRaises(TypeError, cmp, v, 'fdsafda')
Ejemplo n.º 27
0
  def test_model(self):

    h1 = hashlib.sha1('derp').hexdigest()
    h2 = hashlib.sha1('herp').hexdigest()

    attrs = {'first' : {'value':'Herp'}, \
             'last' : {'value':'Derp'}, \
             'phone' : {'value': '123'}, \
             'age' : {'value': 19}, \
             'gender' : {'value' : 'Male'}}

    sr = serial.SerialRepresentation()
    sr['key'] = '/Person:PersonA'
    sr['hash'] = h1
    sr['parent'] = h2
    sr['created'] = nanotime.now().nanoseconds()
    sr['committed'] = sr['created']
    sr['attributes'] = attrs
    sr['type'] = 'Person'

    ver = Version(sr)
    instance = Person(ver)

    self.assertEqual(instance.__dstype__, ver.type)
    self.assertEqual(instance.version, ver)
    self.assertFalse(instance.isDirty())
    self.assertTrue(instance.isPersisted())
    self.assertTrue(instance.isCommitted())

    self.assertEqual(instance.key, Key('/Person:PersonA'))
    self.assertEqual(instance.first, 'Herp')
    self.assertEqual(instance.last, 'Derp')
    self.assertEqual(instance.phone, '123')
    self.assertEqual(instance.age, 19)
    self.assertEqual(instance.gender, 'Male')
Ejemplo n.º 28
0
def build_stamp(pyxes):
    """ Cythonize files in `pyxes`, return pyx, C filenames, hashes

    Parameters
    ----------
    pyxes : sequence
        sequence of filenames of files on which to run Cython

    Returns
    -------
    pyx_defs : dict
        dict has key, value pairs of <pyx_filename>, <pyx_info>, where
        <pyx_info> is a dict with key, value pairs of "pyx_hash", <pyx file SHA1
        hash>; "c_filename", <c filemane>; "c_hash", <c file SHA1 hash>.
    """
    pyx_defs = {}
    for source in pyxes:
        base, ext = splitext(source)
        pyx_hash = sha1(open(source, 'rt').read()).hexdigest()
        c_filename = base + '.c'
        check_call('cython ' + source, shell=True)
        c_hash = sha1(open(c_filename, 'rt').read()).hexdigest()
        pyx_defs[source] = dict(pyx_hash=pyx_hash,
                                c_filename=c_filename,
                                c_hash=c_hash)
    return pyx_defs
Ejemplo n.º 29
0
    def __add__(self, file_data1, file_data2):
        # file_data1's target is file_data2 and
        # file_data2's target is file_data1

        self.cursor.execute(
            "INSERT INTO patch (patch_id, patch_data) VALUES (?, ?)",
            [None,
             base64.b64encode(UndoDB.build_patch(file_data1,
                                                 file_data2)).decode('ascii')])
        patch_id = self.cursor.lastrowid
        try:
            self.cursor.execute("""INSERT INTO source_file (
source_checksum, source_size, target_size, patch_id) values (?, ?, ?, ?)""",
                                [sha1(file_data1).hexdigest().decode('ascii'),
                                 len(file_data1),
                                 len(file_data2),
                                 patch_id])
            self.cursor.execute("""INSERT INTO source_file (
source_checksum, source_size, target_size, patch_id) values (?, ?, ?, ?)""",
                                [sha1(file_data2).hexdigest().decode('ascii'),
                                 len(file_data2),
                                 len(file_data1),
                                 patch_id])
            self.db.commit()
        except sqlite3.IntegrityError:
            self.db.rollback()
Ejemplo n.º 30
0
    def _makeRelayCrypto(self, secret_input):
        '''Derive shared key material using HKDF from secret_input.

        :returns: **oppy.crypto.relaycrypto.RelayCrypto** initialized with
            shared key data
        '''
        prk = hkdf.hkdf_extract(salt=T_KEY, input_key_material=secret_input,
                                hash=hashlib.sha256)
        km = hkdf.hkdf_expand(pseudo_random_key=prk, info=M_EXPAND,
                              length=72, hash=hashlib.sha256)

        df = km[: DIGEST_LEN]
        db = km[DIGEST_LEN : DIGEST_LEN * 2]
        kf = km[DIGEST_LEN * 2 : DIGEST_LEN * 2 + KEY_LEN]
        kb = km[DIGEST_LEN * 2 + KEY_LEN : DIGEST_LEN * 2 + KEY_LEN * 2]

        f_digest = hashlib.sha1(df)
        b_digest = hashlib.sha1(db)
        f_cipher = util.makeAES128CTRCipher(kf)
        b_cipher = util.makeAES128CTRCipher(kb)

        return RelayCrypto(forward_digest=f_digest,
                           backward_digest=b_digest,
                           forward_cipher=f_cipher,
                           backward_cipher=b_cipher)
Ejemplo n.º 31
0
    cert_der = ssl.PEM_cert_to_DER_cert(cert_pem)
except socket.error as e:
    if str(e).find("[Errno 111]") is not -1:
        print("ERROR: Could not connect to %s:%s" % (args.host, args.port))
    elif str(e).find("[Errno 104]") is not -1:
        print(
            "ERROR: Mosquitto broker does not appear to be using TLS at %s:%s"
            % (args.host, args.port))
    print(e)
    sys.exit(1)

matches = []
for k in signatures:
    fingerprint = binascii.a2b_hex(signatures[k].replace(" ", ""))
    if cert_der.find(fingerprint) is not -1:
        matches.append(k)
if not matches:
    print("WARNING: Couldn't identify signature algorithm")
else:
    print("INFO: Found signature algorithm: " + ", ".join(matches))
    for sig in ("rsa_sha384", "rsa_sha512", "ecdsa_sha384", "ecdsa_sha512"):
        if sig in matches:
            print(
                "ERROR: MQTT broker is using a %s signature which will not work with ESP8266"
                % (sig))

sha1 = hashlib.sha1(cert_der).hexdigest()

print("const uint8_t MQTT_FINGERPRINT[] = {0x" +
      ",0x".join([sha1[i:i + 2] for i in range(0, len(sha1), 2)]) + "};")
Ejemplo n.º 32
0
def __sha1(f):
    sha1obj = hashlib.sha1()
    sha1obj.update(f.read())
    return sha1obj.hexdigest()
Ejemplo n.º 33
0
 def hash_influx_server(host, port, user, db, measurement, tags):
     hash_source = "%s%s%s%s%s%s" % (host, port, user, db, measurement, tags)
     h = hashlib.sha1(hash_source.encode('utf-8'))
     return h.hexdigest()
Ejemplo n.º 34
0
 def _compute_checksum(self, bin_data):
     """ compute the checksum for the given datas
         :param bin_data : datas in its binary form
     """
     # an empty file has a checksum too (for caching)
     return hashlib.sha1(bin_data or b'').hexdigest()
Ejemplo n.º 35
0
    print('Updated %d localization entries' % (loccount,))
    
if opts.resetpw:
    if '@' in opts.resetpw:
        player = db.players.find_one({ 'email':opts.resetpw })
    else:
        player = db.players.find_one({ 'name':opts.resetpw })
        if not player:
            player = db.players.find_one({ 'namekey':opts.resetpw })
    if not player:
        raise Exception('No such player: ' + opts.resetpw)
    print('Enter password for %s (%s)' % (player['name'], player['email']))
    import getpass
    import hashlib
    
    newpw = getpass.getpass()
    newpw2 = getpass.getpass()
    if newpw != newpw2:
        raise Exception('Passwords do not match')

    password = unicodedata.normalize('NFKC', newpw)
    password = password.encode()  # to UTF8 bytes
    pwsalt = binascii.hexlify(os.urandom(8))
    saltedpw = pwsalt + b':' + password
    cryptpw = hashlib.sha1(saltedpw).hexdigest().encode()
    
    db.players.update({'_id':player['_id']},
                      {'$set':{'pwsalt': pwsalt, 'password': cryptpw}})
    print('Password set.')
    
def create_image_lists(image_dir, testing_percentage, validation_percentage):
    """Builds a list of training images from the file system.

  Analyzes the sub folders in the image directory, splits them into stable
  training, testing, and validation sets, and returns a data structure
  describing the lists of images for each label and their paths.

  Args:
    image_dir: String path to a folder containing subfolders of images.
    testing_percentage: Integer percentage of the images to reserve for tests.
    validation_percentage: Integer percentage of images reserved for validation.

  Returns:
    A dictionary containing an entry for each label subfolder, with images split
    into training, testing, and validation sets within each label.
  """
    if not gfile.Exists(image_dir):
        print("Image directory '" + image_dir + "' not found.")
        return None
    result = {}
    sub_dirs = [x[0] for x in gfile.Walk(image_dir)]
    # The root directory comes first, so skip it.
    is_root_dir = True
    for sub_dir in sub_dirs:
        if is_root_dir:
            is_root_dir = False
            continue
        extensions = ['jpg', 'jpeg', 'JPG', 'JPEG']
        file_list = []
        dir_name = os.path.basename(sub_dir)
        if dir_name == image_dir:
            continue
        print("Looking for images in '" + dir_name + "'")
        for extension in extensions:
            file_glob = os.path.join(image_dir, dir_name, '*.' + extension)
            file_list.extend(gfile.Glob(file_glob))
        if not file_list:
            print('No files found')
            continue
        if len(file_list) < 20:
            print(
                'WARNING: Folder has less than 20 images, which may cause issues.'
            )
        elif len(file_list) > MAX_NUM_IMAGES_PER_CLASS:
            print(
                'WARNING: Folder {} has more than {} images. Some images will '
                'never be selected.'.format(dir_name,
                                            MAX_NUM_IMAGES_PER_CLASS))
        label_name = re.sub(r'[^a-z0-9]+', ' ', dir_name.lower())
        training_images = []
        testing_images = []
        validation_images = []
        for file_name in file_list:
            base_name = os.path.basename(file_name)
            # We want to ignore anything after '_nohash_' in the file name when
            # deciding which set to put an image in, the data set creator has a way of
            # grouping photos that are close variations of each other. For example
            # this is used in the plant disease data set to group multiple pictures of
            # the same leaf.
            hash_name = re.sub(r'_nohash_.*$', '', file_name)
            # This looks a bit magical, but we need to decide whether this file should
            # go into the training, testing, or validation sets, and we want to keep
            # existing files in the same set even if more files are subsequently
            # added.
            # To do that, we need a stable way of deciding based on just the file name
            # itself, so we do a hash of that and then use that to generate a
            # probability value that we use to assign it.
            hash_name_hashed = hashlib.sha1(
                compat.as_bytes(hash_name)).hexdigest()
            percentage_hash = ((int(hash_name_hashed, 16) %
                                (MAX_NUM_IMAGES_PER_CLASS + 1)) *
                               (100.0 / MAX_NUM_IMAGES_PER_CLASS))
            if percentage_hash < validation_percentage:
                validation_images.append(base_name)
            elif percentage_hash < (testing_percentage +
                                    validation_percentage):
                testing_images.append(base_name)
            else:
                training_images.append(base_name)
        result[label_name] = {
            'dir': dir_name,
            'training': training_images,
            'testing': testing_images,
            'validation': validation_images,
        }
    return result
Ejemplo n.º 37
0
	def _createHash(self,url):
    		"""This function generate 10 character long hash"""
    		url_hash = int(hashlib.sha1(url).hexdigest(), 16) % (10 ** 8)
    		return url_hash
Ejemplo n.º 38
0
def main():
    argp = ArgumentParser(
            description="Crypt - Decrypt Tool",
            usage="./crypt-decrypt.py [options] [-w word/hash] \nSamples: ./crypt-decrypt.py",
            version="Crypt - Decrypt Tool v" + VERSION)

    argp.add_argument('-e', '--encrypt', dest='encrypt', action='store_true',
                      help='Encrypt word/s (offline mode)')

    argp.add_argument('-d', '--decrypt', dest='decrypt', action='store_true',
                      help='Decrypt a hash')

    argp.add_argument('-i', '--identify', dest='identify', action='store_true',
                      help='Identify type of hash')

    argp.add_argument('-t', '--hash-type', dest='type',
                      help='Hash type to encrypt/decrypt word/hash')

    argp.add_argument('-w', '--word', dest='word',
                      help='Word or hash to encrypt/decrypt/identify')

    argp.add_argument('-o', '--online', dest='online', action='store_true',
                      help='Decrypt online mode')

    argp.add_argument('-f', '--offline', dest='offline', action='store_true',
                      help='Decrypt offline mode')

    argp.add_argument('-l', '--wordlist', dest='wordlist',
                      help='Dictionary to decrypt hash (offline mode only)')

    argp.add_argument('-a', '--all', dest='all', action='store_true',
                      help='Encrypt word/s with all hash types')

    argp.add_argument('-F', '--file', dest='file',
                      help='File with hashes to decrypt/identify')

    args = argp.parse_args()

    if args.encrypt and not args.all and not args.decrypt and not args.identify:  # ENCRYPTER
        alg = args.type.lower()
        word = args.word
        print("\n [+]" + colors.INFO + " Word: " + colors.ENDC + word)
        print(" [+]" + colors.INFO + " Type: " + colors.ENDC + alg)
        if alg == "md5":
            encrypted = hashlib.md5(word).hexdigest()
            print(colors.GREEN + "   [i]" + colors.INFO + " Hash: " + colors.GREEN + encrypted + colors.ENDC + "\n")
        elif alg == "sha1":
            encrypted = hashlib.sha1(word).hexdigest()
            print(colors.GREEN + "   [i]" + colors.INFO + " Hash: " + colors.GREEN + encrypted + colors.ENDC + "\n")
        elif alg == "sha224":
            encrypted = hashlib.sha224(word).hexdigest()
            print(colors.GREEN + "   [i]" + colors.INFO + " Hash: " + colors.GREEN + encrypted + colors.ENDC + "\n")
        elif alg == "sha256":
            encrypted = hashlib.sha256(word).hexdigest()
            print(colors.GREEN + "   [i]" + colors.INFO + " Hash: " + colors.GREEN + encrypted + colors.ENDC + "\n")
        elif alg == "sha384":
            encrypted = hashlib.sha384(word).hexdigest()
            print(colors.GREEN + "   [i]" + colors.INFO + " Hash: " + colors.GREEN + encrypted + colors.ENDC + "\n")
        elif alg == "sha512":
            encrypted = hashlib.sha512(word).hexdigest()
            print(colors.GREEN + "   [i]" + colors.INFO + " Hash: " + colors.GREEN + encrypted + colors.ENDC + "\n")
        elif alg == "ntlm":
            encrypted = hashlib.new("md4", word.encode("utf-16le")).digest()
            print(colors.GREEN + "   [i]" + colors.INFO + " Hash: " + colors.GREEN + binascii.hexlify(encrypted).upper() + colors.ENDC + "\n")
        elif alg == "lm":
            encrypted = lmhash.encrypt(word).upper()
            print(colors.GREEN + "   [i]" + colors.INFO + " Hash: " + colors.GREEN + encrypted + colors.ENDC + "\n")
        else:
            print (colors.FAIL + colors.BOLD + "\n[!] Incorrect algorithm!!\n" + colors.ENDC)

    elif args.encrypt and args.all and not args.decrypt and not args.identify:  # ALL TYPES
        word = args.word
        print("\n [+]" + colors.INFO + " Word: " + colors.ENDC + word)
        print("\n [+]" + colors.INFO + " Type: " + colors.ENDC + "md5")
        print(colors.GREEN + "   [i]" + colors.INFO + " Hash: " + colors.GREEN + hashlib.md5(word).hexdigest() + colors.ENDC)
        print("\n [+]" + colors.INFO + " Type: " + colors.ENDC + "sha1")
        print(colors.GREEN + "   [i]" + colors.INFO + " Hash: " + colors.GREEN + hashlib.sha1(word).hexdigest() + colors.ENDC)
        print("\n [+]" + colors.INFO + " Type: " + colors.ENDC + "sha224")
        print(colors.GREEN + "   [i]" + colors.INFO + " Hash: " + colors.GREEN + hashlib.sha224(word).hexdigest() + colors.ENDC)
        print("\n [+]" + colors.INFO + " Type: " + colors.ENDC + "sha256")
        print(colors.GREEN + "   [i]" + colors.INFO + " Hash: " + colors.GREEN + hashlib.sha256(word).hexdigest() + colors.ENDC)
        print("\n [+]" + colors.INFO + " Type: " + colors.ENDC + "sha384")
        print(colors.GREEN + "   [i]" + colors.INFO + " Hash: " + colors.GREEN + hashlib.sha384(word).hexdigest() + colors.ENDC)
        print("\n [+]" + colors.INFO + " Type: " + colors.ENDC + "sha512")
        print(colors.GREEN + "   [i]" + colors.INFO + " Hash: " + colors.GREEN + hashlib.sha512(word).hexdigest() + colors.ENDC)
        print("\n [+]" + colors.INFO + " Type: " + colors.ENDC + "lm")
        print(colors.GREEN + "   [i]" + colors.INFO + " Hash: " + colors.GREEN + lmhash.encrypt(word).upper() + colors.ENDC)
        print("\n [+]" + colors.INFO + " Type: " + colors.ENDC + "ntlm")
        encrypted = hashlib.new("md4", word.encode("utf-16le")).digest()
        print(colors.GREEN + "   [i]" + colors.INFO + " Hash: " + colors.GREEN + binascii.hexlify(encrypted).upper() + colors.ENDC)

    elif args.decrypt and not args.encrypt and not args.identify and not args.file:  # DECRYPTER
        alg = args.type.lower()
        hashs = args.word
        if args.online and not args.offline:  # Online
            print("\n [+]" + colors.INFO + " Hash: " + colors.ENDC + hashs)
            print(" [+]" + colors.INFO + " Type: " + colors.ENDC + alg)
            if alg == "md5":
                md5OnDecrypt(hashs)
                md5OnDecrypt3(hashs)
                md5OnDecrypt4(hashs)
                MultiOnDecrypt2(hashs)
                MultiOnDecrypt3(hashs)
            elif alg == "sha1":
                MultiOnDecrypt2(hashs)
                MultiOnDecrypt3(hashs)
            elif alg == "sha224":
                MultiOnDecrypt3(hashs)
            elif alg == "sha256":
                web = "Sha256/"
                MultiOnDecrypt(hashs, web)
                MultiOnDecrypt2(hashs)
                MultiOnDecrypt3(hashs)
            elif alg == "sha384":
                web = "Sha384/"
                MultiOnDecrypt(hashs, web)
                MultiOnDecrypt2(hashs)
                MultiOnDecrypt3(hashs)
            elif alg == "sha512":
                web = "Sha512/"
                MultiOnDecrypt(hashs, web)
                MultiOnDecrypt2(hashs)
                MultiOnDecrypt3(hashs)
            elif alg == "ntlm":
                web = "Ntlm/"
                MultiOnDecrypt(hashs, web)
            elif alg == "lm":
                print(colors.FAIL + colors.BOLD + "\n[!] Hash type not supported in this mode" + colors.ENDC + "\n")
            else:
                print (colors.FAIL + colors.BOLD + "\n[!] Incorrect algorithm!!" + colors.ENDC)

        elif args.offline and not args.online:  # Offline
            print("\n [+]" + colors.INFO + " Hash: " + colors.ENDC + hashs)
            print(" [+]" + colors.INFO + " Type: " + colors.ENDC + alg)
            if alg == "md5":
                MultiOffDecrypt(hashs, args.wordlist, "md5")
            elif alg == "sha1":
                MultiOffDecrypt(hashs, args.wordlist, "sha1")
            elif alg == "sha224":
                MultiOffDecrypt(hashs, args.wordlist, "sha224")
            elif alg == "sha256":
                MultiOffDecrypt(hashs, args.wordlist, "sha256")
            elif alg == "sha384":
                MultiOffDecrypt(hashs, args.wordlist, "sha384")
            elif alg == "sha512":
                MultiOffDecrypt(hashs, args.wordlist, "sha512")
            elif alg == "ntlm":
                print(colors.FAIL + colors.BOLD + "\n[!] Hash type not supported in this mode" + colors.ENDC + "\n")
            elif alg == "lm":
                lmOffDecryptFile(hashs, args.wordlist)
            else:
                print (colors.FAIL + colors.BOLD + "\n[!] Incorrect algorithm!!" + colors.ENDC)

    elif args.identify and not args.encrypt and not args.decrypt:  # IDENTIFIER
        if args.file:
            with open(args.file, 'r') as f:
                for h in f.readlines():
                    print("\n [+]" + colors.INFO + " Hash: " + colors.GREEN + h.strip("\n") + colors.ENDC)
                    r = commands.getoutput('./hash-identifier.py %s' % h.strip('\n'))
                    for x in r.split("\n"):
                        if not "Least Possible" in x:
                            print(x + "\n"),
                        else:
                            break
        else:
            r = commands.getoutput('./hash-identifier.py %s' % args.word)
            print(r)

    elif args.file and args.decrypt and not args.encrypt and not args.identify:  # FILE
        alg = args.type
        with open(args.file, 'r') as myfile:
            if args.online:  # Online
                for line in myfile.readlines():
                    if alg == "md5":
                        md5OnDecrypt(line)
                        md5OnDecrypt3(line)
                        md5OnDecrypt4(line)
                        MultiOnDecrypt2(line)
                        MultiOnDecrypt3(line)
                    elif alg == "sha1":
                        MultiOnDecrypt2(line)
                        MultiOnDecrypt3(line)
                    elif alg == "sha224":
                        MultiOnDecrypt3(line)
                    elif alg == "sha256":
                        web = "Sha256/"
                        MultiOnDecrypt(line, web)
                        MultiOnDecrypt2(line)
                        MultiOnDecrypt3(line)
                    elif alg == "sha384":
                        web = "Sha384/"
                        MultiOnDecrypt(line, web)
                        MultiOnDecrypt2(line)
                        MultiOnDecrypt3(line)
                    elif alg == "sha512":
                        web = "Sha512/"
                        MultiOnDecrypt(line, web)
                        MultiOnDecrypt2(line)
                        MultiOnDecrypt3(line)
                    elif alg == "ntlm":
                        web = "Ntlm/"
                        MultiOnDecrypt(line, web)
                    elif alg == "lm":
                        print(colors.FAIL + colors.BOLD + "\n[!] Hash type not supported in this mode" + colors.ENDC + "\n")
                    else:
                        print (colors.FAIL + colors.BOLD + "\n[!] Incorrect algorithm!!" + colors.ENDC)

            elif args.offline:  # Offline
                for line in myfile.readlines():
                    line = line.strip("\n")
                    print("\n [+]" + colors.INFO + " Hash: " + colors.ENDC + line)
                    print(" [+]" + colors.INFO + " Type: " + colors.ENDC + args.type)
                    if alg == "md5":
                        MultiOffDecrypt(line, args.wordlist, "md5")
                    elif alg == "sha1":
                        MultiOffDecrypt(line, args.wordlist, "sha1")
                    elif alg == "sha224":
                        MultiOffDecrypt(line, args.wordlist, "sha224")
                    elif alg == "sha256":
                        MultiOffDecrypt(line, args.wordlist, "sha256")
                    elif alg == "sha384":
                        MultiOffDecrypt(line, args.wordlist, "sha384")
                    elif alg == "sha512":
                        MultiOffDecrypt(line, args.wordlist, "sha512")
                    elif alg == "ntlm":
                        print(colors.FAIL + colors.BOLD + "\n[!] Hash type not supported in this mode" + colors.ENDC + "\n")
                    elif alg == "lm":
                        lmOffDecryptFile(line, args.wordlist)
                    else:
                        print (colors.FAIL + colors.BOLD + "\n[!] Incorrect algorithm!!" + colors.ENDC)

    else:
        print(SAMPLES)
Ejemplo n.º 39
0
def hash_target(address, suffix):
    hasher = sha1()
    hasher.update(address.encode())
    hasher.update(suffix.encode())
    return hasher.hexdigest()
Ejemplo n.º 40
0
def checksum(fn):
    """Compute the SHA1 sum of a file"""
    sha1 = hashlib.sha1()
    with open(fn) as f:
        sha1.update(f.read())
    return sha1.hexdigest()
Ejemplo n.º 41
0
def search(search_request):
    logger.info("Starting new search: %s" % search_request)
    if search_request.maxage is None and config.settings.searching.maxAge:
        search_request.maxage = config.settings.searching.maxAge
        logger.info("Will ignore results older than %d days" %
                    search_request.maxage)

    # Clean up cache
    for k in list(pseudo_cache.keys()):
        if pseudo_cache[k]["last_access"].replace(minutes=+5) < arrow.utcnow():
            pseudo_cache.pop(k)

    # Clean up old search results. We do this here because we don't have any background jobs and this is the function most regularly called
    keepFor = config.settings.main.keepSearchResultsForDays
    oldSearchResultsCount = countOldSearchResults(keepFor)
    if oldSearchResultsCount > 0:
        logger.info(
            "Deleting %d search results from database that are older than %d days"
            % (oldSearchResultsCount, keepFor))
        SearchResult.delete().where(SearchResult.firstFound < (
            datetime.date.today() -
            datetime.timedelta(days=keepFor))).execute()
    else:
        if logger.getEffectiveLevel() == logging.DEBUG:
            logger.debug("%d search results stored in database" %
                         SearchResult.select().count())

    limit = search_request.limit
    external_offset = int(search_request.offset)
    search_hash = search_request.search_hash
    categoryResult = categories.getCategoryByAnyInput(search_request.category)
    search_request.category = categoryResult
    if search_hash not in pseudo_cache.keys(
    ) or search_request.offset == 0:  # If it's a new search (which starts with offset 0) do it again instead of using the cached results
        logger.debug(
            "Didn't find this query in cache or want to do a new search")
        cache_entry = {
            "results": [],
            "indexer_infos": {},
            "total": 0,
            "last_access": arrow.utcnow(),
            "offset": 0,
            "rejected": SearchModule.getRejectedCountDict()
        }
        category = categoryResult.category
        indexers_to_call = pick_indexers(search_request)
        for p in indexers_to_call:
            cache_entry["indexer_infos"][p] = {
                "has_more": True,
                "search_request": search_request,
                "total_included": False
            }

        dbsearch = Search(internal=search_request.internal,
                          query=search_request.query,
                          category=categoryResult.category.pretty,
                          identifier_key=search_request.identifier_key,
                          identifier_value=search_request.identifier_value,
                          season=search_request.season,
                          episode=search_request.episode,
                          type=search_request.type,
                          title=search_request.title,
                          author=search_request.author,
                          username=search_request.username)
        saveSearch(dbsearch)
        # dbsearch.save()
        cache_entry["dbsearch"] = dbsearch

        # Find ignored words and parse query for ignored words
        search_request.forbiddenWords = []
        search_request.requiredWords = []
        applyRestrictionsGlobal = config.settings.searching.applyRestrictions == "both" or (
            config.settings.searching.applyRestrictions == "internal"
            and search_request.internal) or (
                config.settings.searching.applyRestrictions == "external"
                and not search_request.internal)
        applyRestrictionsCategory = category.applyRestrictions == "both" or (
            category.applyRestrictions == "internal"
            and search_request.internal
        ) or (search_request.category.category.applyRestrictions == "external"
              and not search_request.internal)
        if config.settings.searching.forbiddenWords and applyRestrictionsGlobal:
            logger.debug("Using configured global forbidden words: %s" %
                         config.settings.searching.forbiddenWords)
            search_request.forbiddenWords.extend([
                x.lower().strip() for x in list(
                    filter(bool,
                           config.settings.searching.forbiddenWords.split(
                               ",")))
            ])
        if config.settings.searching.requiredWords and applyRestrictionsGlobal:
            logger.debug("Using configured global required words: %s" %
                         config.settings.searching.requiredWords)
            search_request.requiredWords.extend([
                x.lower().strip() for x in list(
                    filter(bool,
                           config.settings.searching.requiredWords.split(",")))
            ])

        if category.forbiddenWords and applyRestrictionsCategory:
            logger.debug(
                "Using configured forbidden words for category %s: %s" %
                (category.pretty, category.forbiddenWords))
            search_request.forbiddenWords.extend([
                x.lower().strip()
                for x in list(filter(bool, category.forbiddenWords.split(",")))
            ])
        if category.requiredWords and applyRestrictionsCategory:
            logger.debug(
                "Using configured required words for category %s: %s" %
                (category.pretty, category.requiredWords))
            search_request.requiredWords.extend([
                x.lower().strip()
                for x in list(filter(bool, category.requiredWords.split(",")))
            ])

        if search_request.query:
            forbiddenWords = [
                str(x[1]) for x in re.findall(r"[\s|\b](\-\-|!)(?P<term>\w+)",
                                              search_request.query)
            ]
            if len(forbiddenWords) > 0:
                logger.debug("Query before removing NOT terms: %s" %
                             search_request.query)
                search_request.query = re.sub(r"[\s|\b](\-\-|!)(?P<term>\w+)",
                                              "", search_request.query)
                logger.debug("Query after removing NOT terms: %s" %
                             search_request.query)
                logger.debug("Found NOT terms: %s" % ",".join(forbiddenWords))

                search_request.forbiddenWords.extend(forbiddenWords)
        cache_entry["forbiddenWords"] = search_request.forbiddenWords
        cache_entry["requiredWords"] = search_request.requiredWords
        cache_entry["query"] = search_request.query

        pseudo_cache[search_hash] = cache_entry
    else:
        cache_entry = pseudo_cache[search_hash]
        indexers_to_call = [
            indexer for indexer, info in cache_entry["indexer_infos"].items()
            if info["has_more"]
        ]
        dbsearch = cache_entry["dbsearch"]
        search_request.forbiddenWords = cache_entry["forbiddenWords"]
        search_request.requiredWords = cache_entry["requiredWords"]
        search_request.query = cache_entry["query"]
        logger.debug("Found search in cache")

        logger.debug(
            "Will search at indexers as long as we don't have enough results for the current offset+limit and any indexer has more results."
        )
    if search_request.loadAll:
        logger.debug(
            "Requested to load all results. Will continue to search until all indexers are exhausted"
        )
    while (len(cache_entry["results"]) < external_offset + limit
           or search_request.loadAll) and len(indexers_to_call) > 0:
        if len(cache_entry["results"]) < external_offset + limit:
            logger.debug(
                "We want %d results but have only %d so far" %
                ((external_offset + limit), len(cache_entry["results"])))
        elif search_request.loadAll:
            logger.debug("All results requested. Continuing to search.")
        logger.debug("%d indexers still have results" % len(indexers_to_call))
        search_request.offset = cache_entry["offset"]

        logger.debug("Searching indexers with offset %d" %
                     search_request.offset)
        result = search_and_handle_db(
            dbsearch, {x: search_request
                       for x in indexers_to_call})
        logger.debug("All search calls to indexers completed")
        search_results = []
        indexers_to_call = []

        waslocked = False
        before = arrow.now()
        if databaseLock.locked():
            logger.debug(
                "Database accesses locked by other search. Will wait for our turn."
            )
            waslocked = True
        databaseLock.acquire()
        if waslocked:
            after = arrow.now()
            took = (after - before).seconds * 1000 + (
                after - before).microseconds / 1000
            logger.debug("Waited %dms for database lock" % took)
        for indexer, queries_execution_result in result["results"].items():
            with db.atomic():
                logger.info("%s returned %d results" %
                            (indexer, len(queries_execution_result.results)))
                for result in queries_execution_result.results:
                    if result.title is None or result.link is None or result.indexerguid is None:
                        logger.info("Skipping result with missing data: %s" %
                                    result)
                        continue
                    try:
                        searchResultId = hashlib.sha1(
                            str(indexer.indexer.id) +
                            result.indexerguid).hexdigest()
                        tryGetOrCreateSearchResultDbEntry(
                            searchResultId, indexer.indexer.id, result)
                        result.searchResultId = searchResultId
                        search_results.append(result)
                    except (IntegrityError, OperationalError) as e:
                        logger.error(
                            "Error while trying to save search result to database. Skipping it. Error: %s"
                            % e)

            cache_entry["indexer_infos"][indexer].update({
                "did_search":
                queries_execution_result.didsearch,
                "indexer":
                indexer.name,
                "search_request":
                search_request,
                "has_more":
                queries_execution_result.has_more,
                "total":
                queries_execution_result.total,
                "total_known":
                queries_execution_result.total_known,
                "indexer_search":
                queries_execution_result.indexerSearchEntry,
                "rejected":
                queries_execution_result.rejected,
                "processed_results":
                queries_execution_result.loaded_results
            })
            if queries_execution_result.has_more:
                indexers_to_call.append(indexer)
                logger.debug(
                    "%s still has more results so we could use it the next round"
                    % indexer)

            if queries_execution_result.total_known:
                if not cache_entry["indexer_infos"][indexer]["total_included"]:
                    cache_entry["total"] += queries_execution_result.total
                    logger.debug(
                        "%s reports %d total results. We'll include in the total this time only"
                        % (indexer, queries_execution_result.total))
                    cache_entry["indexer_infos"][indexer][
                        "total_included"] = True
            elif queries_execution_result.has_more:
                logger.debug(
                    "%s doesn't report an exact number of results so let's just add another 100 to the total"
                    % indexer)
                cache_entry["total"] += 100
            for rejectKey in cache_entry["rejected"].keys():
                if rejectKey in cache_entry["indexer_infos"][indexer][
                        "rejected"].keys():
                    cache_entry["rejected"][rejectKey] += cache_entry[
                        "indexer_infos"][indexer]["rejected"][rejectKey]

        databaseLock.release()

        logger.debug("Searching for duplicates")
        numberResultsBeforeDuplicateRemoval = len(search_results)
        grouped_by_sameness, uniqueResultsPerIndexer = find_duplicates(
            search_results)
        allresults = []
        for group in grouped_by_sameness:
            if search_request.internal:
                for i in group:
                    # We give each group of results a unique value by which they can be identified later
                    i.hash = hash(group[0].details_link)
                    allresults.append(i)

            else:
                # We sort by age first and then by indexerscore so the newest result with the highest indexer score is chosen
                group = sorted(group, key=lambda x: x.epoch, reverse=True)
                group = sorted(group,
                               key=lambda x: x.indexerscore,
                               reverse=True)
                allresults.append(group[0])
        search_results = allresults

        with databaseLock:
            for indexer, infos in cache_entry["indexer_infos"].iteritems():
                if indexer.name in uniqueResultsPerIndexer.keys(
                ):  # If the search failed it isn't contained in the duplicates list
                    uniqueResultsCount = uniqueResultsPerIndexer[
                        infos["indexer"]]
                    processedResults = infos["processed_results"]
                    logger.debug(
                        "Indexer %s had a unique results share of %d%% (%d of %d total results were only provided by this indexer)"
                        % (indexer.name, 100 /
                           (numberResultsBeforeDuplicateRemoval /
                            uniqueResultsCount), uniqueResultsCount,
                           numberResultsBeforeDuplicateRemoval))
                    infos["indexer_search"].uniqueResults = uniqueResultsCount
                    infos["indexer_search"].processedResults = processedResults
                    infos["indexer_search"].save()

        if not search_request.internal:
            countAfter = len(search_results)
            countRemoved = numberResultsBeforeDuplicateRemoval - countAfter
            logger.info("Removed %d duplicates from %d results" %
                        (countRemoved, numberResultsBeforeDuplicateRemoval))

        search_results = sorted(search_results,
                                key=lambda x: x.epoch,
                                reverse=True)

        cache_entry["results"].extend(search_results)
        cache_entry["offset"] += limit

    if len(indexers_to_call) == 0:
        logger.info("All indexers exhausted")
    elif len(cache_entry["results"]) >= external_offset + limit:
        logger.debug(
            "Loaded a total of %d results which is enough for the %d requested. Stopping search."
            % (len(cache_entry["results"]), (external_offset + limit)))

    if search_request.internal:
        logger.debug(
            "We have %d cached results and return them all because we search internally"
            % len(cache_entry["results"]))
        nzb_search_results = copy.deepcopy(
            cache_entry["results"][external_offset:])
    else:
        logger.debug(
            "We have %d cached results and return %d-%d of %d total available accounting for the limit set for the API search"
            % (len(cache_entry["results"]), external_offset,
               external_offset + limit, cache_entry["total"]))
        nzb_search_results = copy.deepcopy(
            cache_entry["results"][external_offset:(external_offset + limit)])
    cache_entry["last_access"] = arrow.utcnow()
    for k, v in cache_entry["rejected"].items():
        if v > 0:
            logger.info("Rejected %d because: %s" % (v, k))
    logger.info("Returning %d results" % len(nzb_search_results))
    return {
        "results": nzb_search_results,
        "indexer_infos": cache_entry["indexer_infos"],
        "dbsearchid": cache_entry["dbsearch"].id,
        "total": cache_entry["total"],
        "offset": external_offset,
        "rejected": cache_entry["rejected"].items()
    }
Ejemplo n.º 42
0
 def hash_password(self, password):
     hashed_password = hashlib.sha1(password.encode('utf-8')).hexdigest()
     return hashed_password
Ejemplo n.º 43
0
def lib_sha1(string):
    sha = hashlib.sha1()
    sha.update(string.encode())
    val = sha.hexdigest()
    return val
Ejemplo n.º 44
0
def generate_hash():
    import os
    import hashlib as h
    entry = [[
        e['entry']['id'],
        str(e['entry']['datetime']), e['entry']['title'], '|'.join([
            h.sha1(e['entry']['description']).hexdigest(),
            h.md5(e['entry']['description']).hexdigest(),
            h.sha224(e['entry']['description']).hexdigest(),
            h.sha256(e['entry']['description']).hexdigest(),
            h.sha384(e['entry']['description']).hexdigest(),
            h.sha512(e['entry']['description']).hexdigest()
        ])
    ] for e in cynotedb(cynotedb.entry.id > 0).select(
        cynotedb.entry.id, cynotedb.entry.title, cynotedb.entry.datetime,
        cynotedb.entry.description).records]
    comment = [[
        e['comment']['id'],
        str(e['comment']['datetime']), e['comment']['entry_id'], '|'.join([
            h.sha1(e['comment']['body']).hexdigest(),
            h.md5(e['comment']['body']).hexdigest(),
            h.sha224(e['comment']['body']).hexdigest(),
            h.sha256(e['comment']['body']).hexdigest(),
            h.sha384(e['comment']['body']).hexdigest(),
            h.sha512(e['comment']['body']).hexdigest()
        ])
    ] for e in cynotedb(cynotedb.comment.id > 0).select(
        cynotedb.comment.id, cynotedb.comment.entry_id,
        cynotedb.comment.datetime, cynotedb.comment.body).records]
    tentry = [[
        e['entry']['id'],
        str(e['entry']['datetime']), e['entry']['title'], '|'.join([
            h.sha1(e['entry']['description']).hexdigest(),
            h.md5(e['entry']['description']).hexdigest(),
            h.sha224(e['entry']['description']).hexdigest(),
            h.sha256(e['entry']['description']).hexdigest(),
            h.sha384(e['entry']['description']).hexdigest(),
            h.sha512(e['entry']['description']).hexdigest()
        ])
    ] for e in cynotedb(cynotedb.track_entry.id > 0).select(
        cynotedb.track_entry.id, cynotedb.track_entry.title, cynotedb.
        track_entry.datetime, cynotedb.track_entry.description).records]
    tcomment = [[
        e['comment']['id'],
        str(e['comment']['datetime']), e['comment']['entry_id'], '|'.join([
            h.sha1(e['comment']['body']).hexdigest(),
            h.md5(e['comment']['body']).hexdigest(),
            h.sha224(e['comment']['body']).hexdigest(),
            h.sha256(e['comment']['body']).hexdigest(),
            h.sha384(e['comment']['body']).hexdigest(),
            h.sha512(e['comment']['body']).hexdigest()
        ])
    ] for e in cynotedb(cynotedb.track_comment.id > 0).select(
        cynotedb.track_comment.id, cynotedb.track_comment.entry_id,
        cynotedb.track_comment.datetime, cynotedb.track_comment.body).records]
    upload_dir = os.sep.join(
        [os.getcwd(), 'applications', request.application, 'uploads'])
    upfile = [[f, filehash(open(os.sep.join([upload_dir, f])))]
              for f in [t for t in os.walk(upload_dir)][0][2]]
    for e in entry:
        db.entry_hash.insert(eid=e[0], edatetime=e[1], etitle=e[2], ehash=e[3])
    for c in comment:
        db.comment_hash.insert(cid=c[0], cdatetime=c[1], eid=c[2], chash=c[3])
    for e in tentry:
        db.track_entry_hash.insert(eid=e[0],
                                   edatetime=e[1],
                                   etitle=e[2],
                                   ehash=e[3])
    for c in tcomment:
        db.track_comment_hash.insert(cid=c[0],
                                     cdatetime=c[1],
                                     eid=c[2],
                                     chash=c[3])
    for f in upfile:
        db.file_hash.insert(filename=f[0], fhash=f[1])
    db.log.insert(event='Notebook Entry hash generation. n=' + str(len(entry)),
                  user=session.username)
    db.log.insert(event='Notebook Comment hash generation. n=' +
                  str(len(comment)),
                  user=session.username)
    db.log.insert(event='Trackbook Entry hash generation. n=' +
                  str(len(tentry)),
                  user=session.username)
    db.log.insert(event='Trackbook Comment hash generation. n=' +
                  str(len(tcomment)),
                  user=session.username)
    db.log.insert(event='Uploaded file hash generation. n=' + str(len(upfile)),
                  user=session.username)
    return dict(entry=entry,
                comment=comment,
                tentry=tentry,
                tcomment=tcomment,
                upfile=upfile)
Ejemplo n.º 45
0
Archivo: mp.py Proyecto: Jetzzzzz/luda
def mphook():
    applist = ['全市通办']
    if request.method == 'GET':
        token = '69b3f633cd9e4136bfdd8be812a34e28'  # 微信配置所需的token
        data = request.args
        signature = data.get('signature', '')
        timestamp = data.get('timestamp', '')
        nonce = data.get('nonce', '')
        echostr = data.get('echostr', '')
        s = [timestamp, nonce, token]
        s.sort()
        s = ''.join(s).encode()
        if (hashlib.sha1(s).hexdigest() == signature):
            return make_response(echostr)
    else:
        rec = request.stream.read()
        xml_rec = ET.fromstring(rec)
        tou = xml_rec.find('ToUserName').text
        fromu = xml_rec.find('FromUserName').text
        app.logger.error(xml_rec.find('Content').text)
        content = xml_rec.find('Content').text + " -- from luda"
        xml_rep = "<xml><ToUserName><![CDATA[%s]]></ToUserName><FromUserName><![CDATA[%s]]></FromUserName><CreateTime>%s</CreateTime><MsgType><![CDATA[text]]></MsgType><Content><![CDATA[%s]]></Content><FuncFlag>0</FuncFlag></xml>"

        xml_rep_mutiimg = '''<xml>
                                <ToUserName><![CDATA[%s]]></ToUserName>
                                <FromUserName><![CDATA[%s]]></FromUserName>
                                <CreateTime>%s</CreateTime>
                                <MsgType><![CDATA[news]]></MsgType>
                                <ArticleCount>1</ArticleCount>
                                <Articles>
                                    <item>
                                        <Title><![CDATA[%s]]></Title>
                                        <Description><![CDATA[%s]]></Description>
                                        <PicUrl><![CDATA[%s]]></PicUrl>
                                        <Url><![CDATA[%s]]></Url>
                                    </item>                                    
                                </Articles>
                            </xml>'''
        if xml_rec.find('Content').text in applist:
            app.logger.error(
                xml_rec.find('Content').text + " is in the applist...")
            home_title = '您好\n' + "以下是您所属分局的全市通办统计情况:"
            my_imag_url = "http://47.92.37.219/static/img/234979-1404251G15249.jpg"
            application = Application.query.filter_by(id=3).first()
            ret = requests.post(application.url, data={})
            returnList = ret.json()['result']

            det = []
            count = []
            for ret in returnList:
                if '上海市徐汇区税务局' == ret['yyslfjmc']:
                    count.append(ret['jjqphj'])
                    count.append(ret['qthj'])
                    det.append(
                        ".%s:%s笔,发票申领%s笔,其他%s笔\n" %
                        (ret['nsrzgfjmc'], str(ret['jjqphj'] + ret['qthj']),
                         str(ret['jjqphj']), str(ret['qthj'])))

            des = "截止%s,共收到外区申请事项%s笔,其中\n" % (time.strftime(
                "%Y-%m-%d", time.localtime(time.time())), str(sum(count)))

            response = make_response(
                xml_rep_mutiimg %
                (fromu, tou, str(int(time.time())), home_title, des +
                 ''.join(det), my_imag_url, "http://47.92.37.219/qstb/3"))
            response.content_type = 'application/xml'
            return response
        else:
            ret_ask = ask_session.get_answer(xml_rec.find('Content').text)
            response = make_response(
                xml_rep % (fromu, tou, str(int(
                    time.time())), ret_ask.http_body.decode()))
            response.content_type = 'application/xml'
            return response
    return 'Hello weixin!'
Ejemplo n.º 46
0
def hashArray(x):
    return hashlib.sha1(np.ascontiguousarray(x).view(np.uint8)).hexdigest()
def moderatechallenge():
    global commentHashesAndComments
    global submission
    commentHashesAndComments = {}
    stringio = StringIO()
    stringio.write('<html>\n<head>\n</head>\n\n')

    # redditSession = loginAndReturnRedditSession()
    redditSession = loginOAuthAndReturnRedditSession()
    submission = getSubmissionForRedditSession(redditSession)
    flat_comments = getCommentsForSubmission(submission)
    retiredHashes = retiredCommentHashes()
    i = 1
    stringio.write('<iframe name="invisibleiframe" style="display:none;"></iframe>\n')
    stringio.write("<h3>")
    stringio.write(os.getcwd())
    stringio.write("<br>\n")
    stringio.write(submission.title)
    stringio.write("</h3>\n\n")
    stringio.write('<form action="copydisplaytoclipboard.html" method="post" target="invisibleiframe">')
    stringio.write('<input type="submit" name="actiontotake" value="Copy display.py stdout to clipboard">')
    stringio.write('<input type="submit" name="actiontotake" value="Automatically post display.py stdout">')
    stringio.write('</form>')
    stringio.write('<form action="updategooglechart.html" method="post" target="invisibleiframe">')
    stringio.write('<input type="submit" value="update-google-chart.py">')
    stringio.write('</form>')
    for comment in flat_comments:
        # print comment.is_root
        # print comment.score
        i += 1
        commentHash = sha1()
        commentHash.update(comment.permalink)
        commentHash.update(comment.body.encode('utf-8'))
        commentHash = commentHash.hexdigest()
        if commentHash not in retiredHashes:
            commentHashesAndComments[commentHash] = comment
            authorName = str(comment.author)  # can be None if author was deleted.  So check for that and skip if it's None.
            participant = ParticipantCollection().participantNamed(authorName)
            stringio.write("<hr>\n")
            stringio.write('<font color="blue"><b>')
            stringio.write(authorName)
            stringio.write('</b></font><br>')
            if ParticipantCollection().hasParticipantNamed(authorName):
                stringio.write(' <small><font color="green">(member)</font></small>')
                if participant.isStillIn:
                    stringio.write(' <small><font color="green">(still in)</font></small>')
                else:
                    stringio.write(' <small><font color="red">(out)</font></small>')
                if participant.hasCheckedIn:
                    stringio.write(' <small><font color="green">(checked in)</font></small>')
                else:
                    stringio.write(' <small><font color="orange">(not checked in)</font></small>')
                if participant.hasRelapsed:
                    stringio.write(' <small><font color="red">(relapsed)</font></small>')
                else:
                    stringio.write(' <small><font color="green">(not relapsed)</font></small>')
            else:
                stringio.write(' <small><font color="red">(not a member)</font></small>')
            stringio.write('<form action="takeaction.html" method="post" target="invisibleiframe">')
            # stringio.write('<input type="submit" name="actiontotake" value="Checkin">')
            stringio.write('<input type="submit" name="actiontotake" value="Checkin" style="color:white;background-color:green">')
            stringio.write('<input type="submit" name="actiontotake" value="Signup and checkin">')
            # stringio.write('<input type="submit" name="actiontotake" value="Signup and checkin" style="color:white;background-color:green">')
            stringio.write('<input type="submit" name="actiontotake" value="Relapse" style="color:white;background-color:red">')
            stringio.write('<input type="submit" name="actiontotake" value="Reinstate with automatic comment">')
            stringio.write('<input type="submit" name="actiontotake" value="Reply with sorry-too-late comment">')
            stringio.write('<input type="submit" name="actiontotake" value="Skip comment">')
            stringio.write('<input type="submit" name="actiontotake" value="Skip comment and don\'t upvote">')
            stringio.write('<input type="hidden" name="username" value="' + b64encode(authorName) + '">')
            stringio.write('<input type="hidden" name="commenthash" value="' + commentHash + '">')
            stringio.write('<input type="hidden" name="commentpermalink" value="' + comment.permalink + '">')
            stringio.write('</form>')
            stringio.write(bleach.clean(markdown.markdown(comment.body.encode('utf-8')), tags=['p']))
            stringio.write("\n<br><br>\n\n")
    stringio.write('</html>')
    pageString = stringio.getvalue()
    stringio.close()
    return Response(pageString, mimetype='text/html')
Ejemplo n.º 48
0
def main():
    # TODO(tmandry): Remove all hardcoded paths and replace with args.
    parser = argparse.ArgumentParser()
    parser.add_argument("--root_build_dir", required=True)
    parser.add_argument("--fuchsia_dir", required=True)
    parser.add_argument("json_path")
    args = parser.parse_args()
    json_path = args.json_path

    project = None
    try:
        with open(json_path, "r") as json_file:
            project = json.loads(json_file.read())
    except IOError:
        print("Failed to generate Cargo.toml files")
        print("No project.json in the root of your out directory!")
        print("Run gn with the --ide=json flag set")
        # returns 0 so that CQ doesn't fail if this isn't set properly
        return 0

    project = Project(project)
    root_path = os.path.abspath(args.fuchsia_dir)
    root_build_dir = os.path.abspath(args.root_build_dir)

    rust_crates_path = os.path.join(root_path, "third_party/rust_crates")

    # this will be removed eventually?
    with open(rust_crates_path + "/Cargo.toml", "r") as f:
        cargo_toml = toml.load(f)
    project.patches = cargo_toml["patch"]["crates-io"]

    # Map from crate name to FeatureSpec. We don't include the version because we don't directly
    # depend on more than one version of the same crate.
    def collect_features(deps):
        for dep, info in deps.iteritems():
            if isinstance(info, str) or isinstance(info, unicode):
                continue
            project.third_party_features[dep] = FeatureSpec(
                info.get("features", []), info.get("default-features", True))

    collect_features(cargo_toml["dependencies"])
    for target_info in cargo_toml["target"].itervalues():
        collect_features(target_info.get("dependencies", {}))

    host_binaries = []
    target_binaries = []

    lookup = {}
    for idx, target in enumerate(project.rust_targets()):
        # hash is the GN target name without the prefixed //
        lookup[target] = hashlib.sha1(target[2:].encode("utf-8")).hexdigest()

    # remove the priorly generated rust crates
    gn_cargo_dir = os.path.join(root_build_dir, "cargo")
    shutil.rmtree(gn_cargo_dir, ignore_errors=True)
    os.makedirs(gn_cargo_dir)
    # Write a stamp file with a predictable name so the build system knows the
    # step ran successfully.
    with open(os.path.join(gn_cargo_dir, "generate_cargo.stamp"), "w") as f:
        f.truncate()

    for target in project.rust_targets():
        cargo_toml_dir = os.path.join(gn_cargo_dir, str(lookup[target]))
        try:
            os.makedirs(cargo_toml_dir)
        except OSError:
            print("Failed to create directory for Cargo: %s" % cargo_toml_dir)

        metadata = project.targets[target]
        with open(os.path.join(cargo_toml_dir, "Cargo.toml"), "w") as fout:
            write_toml_file(
                fout, metadata, project, target, lookup, root_path,
                root_build_dir)
    return 0
Ejemplo n.º 49
0
def sha1(text):
    return hashlib.sha1(text).hexdigest()
Ejemplo n.º 50
0
    def _algorithm(cls, data): return hashlib.sha1(data)


class sha224(HashUnit):
 def __hash__(self):
     if self.__hash_value__ is None:
         item = np.ascontiguousarray(self.__array__).view(np.uint8)
         digest = hashlib.sha1(item).hexdigest()
         self.__hash_value__ = int(digest, 16)
     return self.__hash_value__
Ejemplo n.º 52
0
 def get_serial(self):
     dat = self._handle.controlRead(Panda.REQUEST_IN, 0xd0, 0, 0, 0x20)
     hashsig, calc_hash = dat[0x1c:], hashlib.sha1(
         dat[0:0x1c]).digest()[0:4]
     assert (hashsig == calc_hash)
     return [dat[0:0x10], dat[0x10:0x10 + 10]]
Ejemplo n.º 53
0
def user2cookie(user, max_age):
    expires = str(int(time.time() + max_age))#失效时间
    s = '%s-%s-%s-%s' % (user.id, user.passwd, expires, _COOKIE_KEY)
    L = [user.id, expires, hashlib.sha1(s.encode('utf-8')).hexdigest()]
    return '-'.join(L)
Ejemplo n.º 54
0
        zipName = 'locale_' + lang_code + '.zip'
        zipPath = outputDir + zipName
        myzip = zipfile.ZipFile(zipPath, mode='w')
        qmFilePath = outputDir + 'mscore_' + lang_code + ".qm"
        myzip.write(qmFilePath, 'mscore_' + lang_code + ".qm")
        qmFilePath = outputDir + 'instruments_' + lang_code + ".qm"
        myzip.write(qmFilePath, 'instruments_' + lang_code + ".qm")
        myzip.close()

        # get zip file size
        file_size = os.path.getsize(zipPath)
        file_size = "%.2f" % (file_size / 1024)

        #compute zip file hash
        file = open(zipPath, 'rb')
        hash_file = hashlib.sha1()
        hash_file.update(file.read())
        file.close()

        data[lang_code]["file_name"] = zipName
        data[lang_code]["name"] = langCodeNameDict[lang_code]
        data[lang_code]["hash"] = str(hash_file.hexdigest())
        data[lang_code]["file_size"] = file_size
        for s3Url in s3Urls:
            push_zip = subprocess.Popen([
                's3cmd', 'put', '--acl-public', '--guess-mime-type', zipPath,
                s3Url + zipName
            ])
            push_zip.communicate()

json_file = open(outputDir + "details.json", "w")
Ejemplo n.º 55
0
    def request_the_third(self, file1, names=None):
        '''处理case里的各项参数,并request'''
        names = names or []
        headers = {'content-type': 'application/x-www-form-urlencoded'}
        list1 = self.excel_table_byindex(file1)
        nonce = str(time.time()).split('.')[0] + '000'
        nonce_ex = 'nonce=' + str(time.time()).split('.')[0] + '000'
        n = 1
        for i in list1:
            i = eval(str(i))
            #print i
            method = i['method']
            url = str(i['url'])
            if 'http://apitest.lyancafe.com/third/v1/citys' == i['url']:
                for name in names:
                    print u'========================now running apitest of %s' % (
                        name), ',the %sst case' % n, '======================'
                    #print noncenonce_ex = 'nonce='+str(time.time()).split('.')[0]+'000'
                    sign_ex = 'appid=' + names[name][
                        0] + '&' + nonce_ex + '&' + 'appkey=' + names[name][1]
                    sign = hashlib.sha1(sign_ex).hexdigest()
                    #print sign
                    getparams = {
                        'appid': names[name][0],
                        'nonce': nonce,
                        'sign': sign
                    }
                    print getparams
                    postparams = ''
                    putparams = ''
                    response = self.apicall(method, url, getparams, postparams,
                                            putparams, headers)
                    try:
                        if response['status'] == '0':
                            print 'pass'
                            print response

                        else:
                            print 'case fail'
                            print response
                            print response['message']

                    #print data['message'].encode('gbk')
                    except Exception, e:
                        print Exception, ':', e
                        print 'wrong!please check!!!'
                        print 'status:', response
                        continue
                n = n + 1

            if 'http://apitest.lyancafe.com/third/v1/provider/goods' == i[
                    'url']:
                for name in names:
                    print u'========================now running apitest of %s' % (
                        name), ',the %sst case' % n, '======================'
                    #print noncenonce_ex = 'nonce='+str(time.time()).split(='.')[0]+'000'
                    sign_ex = 'appid=' + names[name][
                        0] + '&' + nonce_ex + '&' + 'providerId=' + str(
                            int(i['provider_id'])
                        ) + '&' + 'appkey=' + names[name][1]
                    print sign_ex
                    sign = hashlib.sha1(sign_ex).hexdigest()
                    #print sign
                    getparams = {
                        'appid': names[name][0],
                        'nonce': nonce,
                        'providerId': int(i['provider_id']),
                        'sign': sign
                    }
                    #print getparams
                    postparams = ''
                    putparams = ''
                    response = self.apicall(method, url, getparams, postparams,
                                            putparams, headers)
                    try:
                        if response['status'] == '0':
                            print 'pass'
                            print response
                            #print response['data'][0]['subcats'][0]['items']
                            #for i in response['data'][0]['subcats'][0]['items']:
                            #print i['title']

                        else:
                            print 'case fail'
                            print response
                            print response['message']

                    #print data['message'].encode('gbk')
                    except Exception, e:
                        print Exception, ':', e
                        print 'wrong!please check!!!'
                        print 'status:', response
                        continue
                n = n + 1
Ejemplo n.º 56
0
 def etag(self):
     etag = hashlib.sha1()
     etag.update(six.text_type(self.id).encode('utf-8'))
     etag.update(six.text_type(
         self.revision_start.isoformat()).encode('utf-8'))
     return etag.hexdigest()
Ejemplo n.º 57
0
def parse_filing_document(document_buffer: Union[bytes, str], extract: bool = False):
    """
    Parse a document buffer into metadata and contents.
    :param document_buffer: raw document buffer
    :param extract: whether to pass to Tika for text extraction
    :return:
    """
    # Parse segment
    doc_type = re.findall("<TYPE>(.+)", document_buffer)
    doc_sequence = re.findall("<SEQUENCE>(.+)", document_buffer)
    doc_file_name = re.findall("<FILENAME>(.+)", document_buffer)
    doc_description = re.findall("<DESCRIPTION>(.+)", document_buffer)

    # Start and end tags
    content_p0 = document_buffer.rfind("</", 0, document_buffer.rfind("</"))
    content_p1 = document_buffer.find(">", content_p0)
    doc_tag_type = document_buffer[content_p0 + len("</"):content_p1]
    content_start_tag = "<{0}>".format(doc_tag_type)
    content_end_tag = "</{0}>".format(doc_tag_type)

    doc_content_p0 = document_buffer.find(content_start_tag) + len(content_start_tag)
    doc_content_p1 = document_buffer.find(content_end_tag)
    doc_content = document_buffer[doc_content_p0:doc_content_p1]

    # Check content types
    is_uuencoded = False
    doc_text_head = doc_content[0:100]
    doc_text_head_upper = doc_text_head.upper()

    if "<PDF>" in doc_text_head_upper:
        is_uuencoded = True
        content_type = "application/pdf"
    elif "<HTML" in doc_text_head_upper:
        content_type = "text/html"
    elif "<XML" in doc_text_head_upper:
        content_type = "application/xml"
    elif "<?XML" in doc_text_head_upper:
        content_type = "application/xml"
    elif doc_text_head.startswith("\nbegin "):
        is_uuencoded = True
        if len(doc_file_name) > 0:
            content_type = mimetypes.guess_type(os.path.basename(doc_file_name[0]))
            if content_type is None:
                content_type = "application/octet-stream"
            else:
                content_type = content_type[0]
        else:
            content_type = "application/octet-stream"
    else:
        content_type = "text/plain"

    # uudecode if required and calculate hash for sharding/dedupe
    doc_content = doc_content.encode("utf-8")
    if is_uuencoded:
        doc_content = uudecode(doc_content)
    doc_sha1 = hashlib.sha1(doc_content).hexdigest()

    # extract text from tika if requested
    if extract:
        doc_content_text = extract_text(doc_content)
    else:
        doc_content_text = None

    return {"type": doc_type[0] if len(doc_type) > 0 else None,
            "sequence": doc_sequence[0] if len(doc_sequence) > 0 else None,
            "file_name": doc_file_name[0] if len(doc_file_name) > 0 else None,
            "description": doc_description[0] if len(doc_description) > 0 else None,
            "content_type": content_type,
            "sha1": doc_sha1,
            "content": doc_content,
            "content_text": doc_content_text}
Ejemplo n.º 58
0
    def request_the_third_response(self, file1, names=None):
        '''计算不同接口的sign签名并调用接口'''
        response = {}
        names = names or {}
        headers = {'content-type': 'application/x-www-form-urlencoded'}

        list1 = self.excel_table_byindex(file1)

        nonce = str(time.time()).split('.')[0] + '000'
        nonce_ex = 'nonce=' + str(time.time()).split('.')[0] + '000'
        n = 1
        caselist2 = []
        caselist3 = []
        for i in list1:
            i = eval(str(i))
            caselist = i
            method = i['method']
            url = str(i['url'])
            print type(caselist)
            for i in caselist.keys():
                print type(i)
                print 'i:', i
                if caselist[i] == '':
                    caselist.pop(i)
                if i == 'method':
                    caselist.pop(i)
                if i == 'url':
                    caselist.pop(i)
                if i == 'status':
                    caselist.pop(i)
                if i == 'description':
                    caselist.pop(i)
                caselist1 = caselist
                #print 'caselist1:',caselist1
            if caselist1 != {}:
                print 111
                for i in caselist1.keys():
                    eachparam = str(i) + '=' + str(caselist1[i])
                    caselist2.append(eachparam)
                    print 222
                    print caselist2
                for name in names:
                    print 444
                    print u'========================now running apitest of %s' % (
                        name), ',the %sst case' % n, '======================'
                    appid = 'appid' + '=' + str(names[name][0])
                    appkey = 'appkey' + '=' + str(names[name][1])
                    print 333
                    caselist2.append(nonce_ex)
                    caselist2.append(appid)
                    print 'caselist2:', caselist2
                    caselist2.sort()
                    caselist2.append(appkey)
                    print 'caselist2:', caselist2

                    sign_ex = '&'.join(caselist2)
                    print sign_ex
                    sign = hashlib.sha1(sign_ex).hexdigest()
                    print sign
                    params_ex = {
                        'appid': names[name][0],
                        'nonce': nonce,
                        'sign': sign
                    }
                    print 'params_ex:', params_ex
                    #print 'caselist1:',caselist1
                    caselist1.update(params_ex)
                    params = caselist1
                    print 'params:', params
                    if method == 'GET':
                        getparams = params
                        postparams = ''
                        putparams = ''
                        response = self.apicall(method, url, getparams,
                                                postparams, putparams, headers)
                    if method == 'POST':
                        postparams = params
                        getparams = ''
                        putparams = ''
                        response = self.apicall(method, url, getparams,
                                                postparams, putparams, headers)
                    if method == 'PUT':
                        putparams = params
                        getparams = ''
                        postparams = ''
                        response = self.apicall(method, url, getparams,
                                                postparams, putparams, headers)
                    return response
                    n += 1
                print 555

            else:
                print 2222
                for name in names.keys():
                    print u'========================now running apitest of %s' % (
                        name), ',the %sst case' % n, '======================'
                    appid = 'appid' + '=' + str(names[name][0])
                    appkey = 'appkey' + '=' + str(names[name][1])
                    caselist3.append(nonce_ex)
                    caselist3.append(appid)

                    caselist3.sort()
                    caselist3.appen(appkey)
                    sign_ex = '&'.join(caselist3)
                    print sign_ex
                    sign = hashlib.sha1(sign_ex).hexdigest()
                    print sign
                    params_ex = {
                        'appid': names[name][0],
                        'nonce': nonce,
                        'sign': sign
                    }
                    caselist1.update(params_ex)
                    params = caselist1
                    if method == 'GET':
                        getparams = params
                        postparams = ''
                        putparams = ''
                        response = self.apicall(method, url, getparams,
                                                postparams, putparams, headers)
                    if method == 'POST':
                        postparams = params
                        getparams = ''
                        putparams = ''
                        response = self.apicall(method, url, getparams,
                                                postparams, putparams, headers)
                    if method == 'PUT':
                        putparams = params
                        getparams = ''
                        postparams = ''
                        response = self.apicall(method, url, getparams,
                                                postparams, putparams, headers)
Ejemplo n.º 59
0
    def _spawn_from_binary_external(cls, binary):
        def create_interpreter(stdout, check_binary=False):
            identity = stdout.decode("utf-8").strip()
            if not identity:
                raise cls.IdentificationError(
                    "Could not establish identity of {}.".format(binary))
            interpreter = cls(PythonIdentity.decode(identity))
            # We should not need to check this since binary == interpreter.binary should always be
            # true, but historically this could be untrue as noted in `PythonIdentity.get`.
            if check_binary and not os.path.exists(interpreter.binary):
                raise cls.InterpreterNotFound(
                    "Cached interpreter for {} reports a binary of {}, which could not be found"
                    .format(binary, interpreter.binary))
            return interpreter

        # Part of the PythonInterpreter data are environment markers that depend on the current OS
        # release. That data can change when the OS is upgraded but (some of) the installed interpreters
        # remain the same. As such, include the OS in the hash structure for cached interpreters.
        os_digest = hashlib.sha1()
        for os_identifier in platform.release(), platform.version():
            os_digest.update(os_identifier.encode("utf-8"))
        os_hash = os_digest.hexdigest()

        interpreter_cache_dir = os.path.join(ENV.PEX_ROOT, "interpreters")
        os_cache_dir = os.path.join(interpreter_cache_dir, os_hash)
        if os.path.isdir(
                interpreter_cache_dir) and not os.path.isdir(os_cache_dir):
            with TRACER.timed("GCing interpreter cache from prior OS version"):
                safe_rmtree(interpreter_cache_dir)

        interpreter_hash = CacheHelper.hash(binary)

        # Some distributions include more than one copy of the same interpreter via a hard link (e.g.:
        # python3.7 is a hardlink to python3.7m). To ensure a deterministic INTERP-INFO file we must
        # emit a separate INTERP-INFO for each link since INTERP-INFO contains the interpreter path and
        # would otherwise be unstable.
        #
        # See cls._REGEXEN for a related affordance.
        path_id = binary.replace(os.sep, ".").lstrip(".")

        cache_dir = os.path.join(os_cache_dir, interpreter_hash, path_id)
        cache_file = os.path.join(cache_dir, cls.INTERP_INFO_FILE)
        if os.path.isfile(cache_file):
            try:
                with open(cache_file, "rb") as fp:
                    return SpawnedJob.completed(
                        create_interpreter(fp.read(), check_binary=True))
            except (IOError, OSError, cls.Error, PythonIdentity.Error):
                safe_rmtree(cache_dir)
                return cls._spawn_from_binary_external(binary)
        else:
            pythonpath = third_party.expose(["pex"])
            cmd, env = cls._create_isolated_cmd(
                binary,
                args=[
                    "-c",
                    dedent("""\
                        import os
                        import sys

                        from pex.common import atomic_directory, safe_open
                        from pex.interpreter import PythonIdentity


                        encoded_identity = PythonIdentity.get(binary={binary!r}).encode()
                        sys.stdout.write(encoded_identity)
                        with atomic_directory({cache_dir!r}, exclusive=False) as cache_dir:
                            if cache_dir:
                                with safe_open(os.path.join(cache_dir, {info_file!r}), 'w') as fp:
                                    fp.write(encoded_identity)
                        """.format(binary=binary,
                                   cache_dir=cache_dir,
                                   info_file=cls.INTERP_INFO_FILE)),
                ],
                pythonpath=pythonpath,
            )
            process = Executor.open_process(cmd,
                                            env=env,
                                            stdout=subprocess.PIPE,
                                            stderr=subprocess.PIPE)
            job = Job(command=cmd, process=process)
            return SpawnedJob.stdout(job, result_func=create_interpreter)
Ejemplo n.º 60
0
     n = n + 1
 if 'http://apitest.lyancafe.com/third/v1/expectedtimes' == i[
         'url']:
     for name in names:
         print u'========================now running apitest of %s' % (
             name), ',the %sst case' % n, '======================'
         #print noncenonce_ex = 'nonce='+str(time.time()).split(='.')[0]+'000'
         sign_ex = 'address=' + str(
             i['address']
         ) + '&' + 'appid=' + names[name][0] + '&' + 'city=' + str(
             i['city']
         ) + '&' + nonce_ex + '&' + 'providerid=' + str(
             int(i['provider_id'])
         ) + '&' + 'appkey=' + names[name][1]
         print sign_ex
         sign = hashlib.sha1(sign_ex).hexdigest()
         print sign
         print nonce
         getparams = {
             'appid': names[name][0],
             'nonce': nonce,
             'address': str(i['address']),
             'sign': sign,
             'city': str(i['city']),
             'providerid': str(int(i['provider_id']))
         }
         #print getparams
         postparams = ''
         putparams = ''
         response = self.apicall(method, url, getparams, postparams,
                                 putparams, headers)